UNPKG

3.59 MBSource Map (JSON)View Raw
1{"version":3,"file":"tf.js","sources":["../node_modules/@tensorflow/tfjs-core/src/canvas_util.ts","../node_modules/@tensorflow/tfjs-core/src/device_util.ts","../node_modules/@tensorflow/tfjs-core/src/util.ts","../node_modules/@tensorflow/tfjs-core/src/profiler.ts","../node_modules/@tensorflow/tfjs-core/src/tensor_format.ts","../node_modules/@tensorflow/tfjs-core/src/tensor.ts","../node_modules/@tensorflow/tfjs-core/src/tape.ts","../node_modules/@tensorflow/tfjs-core/src/tensor_util.ts","../node_modules/@tensorflow/tfjs-core/src/engine.ts","../node_modules/@tensorflow/tfjs-core/src/environment_util.ts","../node_modules/@tensorflow/tfjs-core/src/environment.ts","../node_modules/@tensorflow/tfjs-core/src/gradients.ts","../node_modules/@tensorflow/tfjs-core/src/globals.ts","../node_modules/@tensorflow/tfjs-core/src/log.ts","../node_modules/@tensorflow/tfjs-core/src/ops/array_ops_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/axis_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/concat_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/reduce_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/segment_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/slice_util.ts","../node_modules/@tensorflow/tfjs-core/src/tensor_util_env.ts","../node_modules/@tensorflow/tfjs-core/src/ops/operation.ts","../node_modules/@tensorflow/tfjs-core/src/ops/softmax.ts","../node_modules/@tensorflow/tfjs-core/src/ops/complex_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/tensor_ops.ts","../node_modules/@tensorflow/tfjs-core/src/types.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/backend.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/backend_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/complex_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/non_max_suppression_impl.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/split_shared.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/topk_impl.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/argminmax_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/avg_pool_backprop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/ops/broadcast_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/batchnorm_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/batchnorm_packed_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/binaryop_complex_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/binaryop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/clip_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/complex_abs_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/concat_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/conv_backprop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/conv_backprop_gpu_depthwise.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/conv_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/conv_gpu_depthwise.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/crop_and_resize_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/shader_compiler_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/shader_compiler.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/cumsum_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/depth_to_space_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/encode_float_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/fft_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/from_pixels_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/gather_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/gather_nd_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/tex_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/webgl_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/gpgpu_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/gpgpu_context.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/gpgpu_math.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/im2col_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/lrn_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/lrn_grad_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/max_pool_backprop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/mulmat_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/mulmat_packed_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/multinomial_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/onehot_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/packing_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/pack_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/pad_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/pool_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/reduce_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/reshape_packed_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/resize_bilinear_backprop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/resize_bilinear_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/resize_nearest_neighbor_backprop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/resize_nearest_neighbor_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/reverse_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/scatter_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/segment_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/select_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/slice_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/strided_slice_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/texture_manager.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/tile_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/transpose_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/ops/erf_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/selu_util.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/unaryop_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/webgl/unpack_gpu.ts","../node_modules/@tensorflow/tfjs-core/src/ops/concat_split.ts","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/alea.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/xor128.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/xorwow.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/xorshift7.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/xor4096.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/lib/tychei.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/seedrandom.js","../node_modules/@tensorflow/tfjs-core/node_modules/seedrandom/index.js","../node_modules/@tensorflow/tfjs-core/src/ops/rand.ts","../node_modules/@tensorflow/tfjs-core/src/ops/array_ops.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/where_impl.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/backend_webgl.ts","../node_modules/@tensorflow/tfjs-core/src/ops/unary_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/batchnorm.ts","../node_modules/@tensorflow/tfjs-core/src/ops/conv_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/matmul.ts","../node_modules/@tensorflow/tfjs-core/src/ops/conv.ts","../node_modules/@tensorflow/tfjs-core/src/ops/reverse.ts","../node_modules/@tensorflow/tfjs-core/src/ops/pool.ts","../node_modules/@tensorflow/tfjs-core/src/ops/slice.ts","../node_modules/@tensorflow/tfjs-core/src/ops/reduction_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/compare.ts","../node_modules/@tensorflow/tfjs-core/src/ops/binary_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/logical_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/relu_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/transpose.ts","../node_modules/@tensorflow/tfjs-core/src/ops/lrn.ts","../node_modules/@tensorflow/tfjs-core/src/ops/norm.ts","../node_modules/@tensorflow/tfjs-core/src/ops/segment_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/lstm.ts","../node_modules/@tensorflow/tfjs-core/src/ops/moving_average.ts","../node_modules/@tensorflow/tfjs-core/src/ops/strided_slice.ts","../node_modules/@tensorflow/tfjs-core/src/ops/topk.ts","../node_modules/@tensorflow/tfjs-core/src/ops/scatter_nd.ts","../node_modules/@tensorflow/tfjs-core/src/ops/spectral_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/sparse_to_dense_util.ts","../node_modules/@tensorflow/tfjs-core/src/ops/sparse_to_dense.ts","../node_modules/@tensorflow/tfjs-core/src/ops/gather_nd.ts","../node_modules/@tensorflow/tfjs-core/src/ops/loss_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/linalg_ops.ts","../node_modules/@tensorflow/tfjs-core/src/ops/image_ops.ts","../node_modules/@tensorflow/tfjs-core/src/kernels/backend_cpu.ts","../node_modules/@tensorflow/tfjs-core/src/browser_util.ts","../node_modules/@tensorflow/tfjs-core/src/io/types.ts","../node_modules/@tensorflow/tfjs-core/src/io/io_utils.ts","../node_modules/@tensorflow/tfjs-core/src/io/router_registry.ts","../node_modules/@tensorflow/tfjs-core/src/io/model_management.ts","../node_modules/@tensorflow/tfjs-core/src/io/indexed_db.ts","../node_modules/@tensorflow/tfjs-core/src/io/local_storage.ts","../node_modules/@tensorflow/tfjs-core/src/io/browser_files.ts","../node_modules/@tensorflow/tfjs-core/src/io/weights_loader.ts","../node_modules/@tensorflow/tfjs-core/src/io/browser_http.ts","../node_modules/@tensorflow/tfjs-core/src/io/passthrough.ts","../node_modules/@tensorflow/tfjs-core/src/io/io.ts","../node_modules/@tensorflow/tfjs-core/src/ops/confusion_matrix.ts","../node_modules/@tensorflow/tfjs-core/src/serialization.ts","../node_modules/@tensorflow/tfjs-core/src/test_util.ts","../node_modules/@tensorflow/tfjs-core/src/version.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/adadelta_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/adagrad_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/adam_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/adamax_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/sgd_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/momentum_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/rmsprop_optimizer.ts","../node_modules/@tensorflow/tfjs-core/src/optimizers/optimizer_constructors.ts","../node_modules/@tensorflow/tfjs-core/src/train.ts","../node_modules/@tensorflow/tfjs-core/src/index.ts","../node_modules/@tensorflow/tfjs-layers/src/backend/common.ts","../node_modules/@tensorflow/tfjs-layers/src/backend/state.ts","../node_modules/@tensorflow/tfjs-layers/src/errors.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/generic_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/constraints.ts","../node_modules/@tensorflow/tfjs-layers/src/exports_constraints.ts","../node_modules/@tensorflow/tfjs-layers/src/common.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/math_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/backend/tfjs_backend.ts","../node_modules/@tensorflow/tfjs-layers/src/initializers.ts","../node_modules/@tensorflow/tfjs-layers/src/exports_initializers.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/types_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/variable_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/variables.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/topology.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/input_layer.ts","../node_modules/@tensorflow/tfjs-layers/src/base_callbacks.ts","../node_modules/@tensorflow/tfjs-layers/src/logs.ts","../node_modules/@tensorflow/tfjs-layers/src/losses.ts","../node_modules/@tensorflow/tfjs-layers/src/metrics.ts","../node_modules/@tensorflow/tfjs-layers/src/optimizers.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/layer_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/serialization.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/serialization_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/version.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/container.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/executor.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/training_dataset.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/training_tensors.ts","../node_modules/@tensorflow/tfjs-layers/src/engine/training.ts","../node_modules/@tensorflow/tfjs-layers/src/models.ts","../node_modules/@tensorflow/tfjs-layers/src/exports.ts","../node_modules/@tensorflow/tfjs-layers/src/activations.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/advanced_activations.ts","../node_modules/@tensorflow/tfjs-layers/src/regularizers.ts","../node_modules/@tensorflow/tfjs-layers/src/utils/conv_utils.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/convolutional.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/convolutional_depthwise.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/core.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/embeddings.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/merge.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/normalization.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/padding.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/pooling.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/recurrent.ts","../node_modules/@tensorflow/tfjs-layers/src/layers/wrappers.ts","../node_modules/@tensorflow/tfjs-layers/src/exports_layers.ts","../node_modules/@tensorflow/tfjs-layers/src/exports_metrics.ts","../node_modules/@tensorflow/tfjs-layers/src/exports_regularizers.ts","../node_modules/@tensorflow/tfjs-layers/src/callbacks.ts","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/aspromise/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/base64/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/eventemitter/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/float/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/inquire/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/utf8/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/@protobufjs/pool/index.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/util/longbits.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/util/minimal.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/writer.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/writer_buffer.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/reader.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/reader_buffer.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/rpc/service.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/rpc.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/src/index-minimal.js","../node_modules/@tensorflow/tfjs-converter/node_modules/protobufjs/minimal.js","../node_modules/@tensorflow/tfjs-converter/src/data/compiled_api.js","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/utils.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/arithmetic.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/basic_math.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/control.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/convolution.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/creation.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/dynamic.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/evaluation.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/graph.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/image.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/logical.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/matrices.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/normalization.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/reduction.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/slice_join.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/op_list/transformation.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/operation_mapper.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/arithmetic_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/basic_math_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/executor/tensor_array.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/control_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/convolution_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/creation_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/dynamic_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/evaluation_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/graph_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/image_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/logical_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/matrices_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/normalization_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/reduction_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/slice_join_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/executors/transformation_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/operations/operation_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/executor/execution_context.ts","../node_modules/@tensorflow/tfjs-converter/src/executor/graph_executor.ts","../node_modules/@tensorflow/tfjs-converter/src/executor/frozen_model.ts","../node_modules/@tensorflow/tfjs-converter/src/version.ts","../src/version.ts","../src/index.ts"],"sourcesContent":["/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nconst contexts: {[key: string]: WebGLRenderingContext} = {};\n\nconst WEBGL_ATTRIBUTES: WebGLContextAttributes = {\n alpha: false,\n antialias: false,\n premultipliedAlpha: false,\n preserveDrawingBuffer: false,\n depth: false,\n stencil: false,\n failIfMajorPerformanceCaveat: true\n};\n\nexport function getWebGLContext(webGLVersion: number): WebGLRenderingContext {\n if (!(webGLVersion in contexts)) {\n const canvas = document.createElement('canvas');\n canvas.addEventListener('webglcontextlost', ev => {\n ev.preventDefault();\n delete contexts[webGLVersion];\n }, false);\n contexts[webGLVersion] = getWebGLRenderingContext(webGLVersion);\n }\n const gl = contexts[webGLVersion];\n if (gl.isContextLost()) {\n delete contexts[webGLVersion];\n return getWebGLContext(webGLVersion);\n }\n\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n\n return contexts[webGLVersion];\n}\n\nfunction getWebGLRenderingContext(webGLVersion: number): WebGLRenderingContext {\n if (webGLVersion !== 1 && webGLVersion !== 2) {\n throw new Error('Cannot get WebGL rendering context, WebGL is disabled.');\n }\n\n const canvas = document.createElement('canvas');\n if (webGLVersion === 1) {\n return (canvas.getContext('webgl', WEBGL_ATTRIBUTES) ||\n canvas.getContext('experimental-webgl', WEBGL_ATTRIBUTES)) as\n WebGLRenderingContext;\n }\n return canvas.getContext('webgl2', WEBGL_ATTRIBUTES) as WebGLRenderingContext;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport function isMobile(): boolean {\n // tslint:disable-next-line:no-any\n const a = navigator.userAgent || navigator.vendor || (window as any).opera;\n // tslint:disable-next-line:max-line-length\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i\n .test(a) ||\n // tslint:disable-next-line:max-line-length\n /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i\n .test(a.substr(0, 4));\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ArrayData, DataType, DataTypeMap, FlatVector, RecursiveArray, RegularArray, TensorLike, TypedArray} from './types';\n\n/** Shuffles the array using Fisher-Yates algorithm. */\n// tslint:disable-next-line:no-any\nexport function shuffle(array: any[]|Uint32Array|Int32Array|\n Float32Array): void {\n let counter = array.length;\n let temp = 0;\n let index = 0;\n // While there are elements in the array\n while (counter > 0) {\n // Pick a random index\n index = (Math.random() * counter) | 0;\n // Decrease counter by 1\n counter--;\n // And swap the last element with it\n temp = array[counter];\n array[counter] = array[index];\n array[index] = temp;\n }\n}\n\n/** Clamps a value to a specified range. */\nexport function clamp(min: number, x: number, max: number): number {\n return Math.max(min, Math.min(x, max));\n}\n\nexport function nearestLargerEven(val: number): number {\n return val % 2 === 0 ? val : val + 1;\n}\n\nexport function sum(arr: number[]): number {\n let sum = 0;\n for (let i = 0; i < arr.length; i++) {\n sum += arr[i];\n }\n return sum;\n}\n\n/**\n * Returns a sample from a uniform [a, b) distribution.\n *\n * @param a The minimum support (inclusive).\n * @param b The maximum support (exclusive).\n * @return A pseudorandom number on the half-open interval [a,b).\n */\nexport function randUniform(a: number, b: number) {\n const r = Math.random();\n return (b * r) + (1 - r) * a;\n}\n\n/** Returns the squared Euclidean distance between two vectors. */\nexport function distSquared(a: FlatVector, b: FlatVector): number {\n let result = 0;\n for (let i = 0; i < a.length; i++) {\n const diff = Number(a[i]) - Number(b[i]);\n result += diff * diff;\n }\n return result;\n}\n\nexport function assert(expr: boolean, msg: string|(() => string)) {\n if (!expr) {\n throw new Error(typeof msg === 'string' ? msg : msg());\n }\n}\n\nexport function assertShapesMatch(\n shapeA: number[], shapeB: number[], errorMessagePrefix = ''): void {\n assert(\n arraysEqual(shapeA, shapeB),\n errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\n\nexport function assertNonNull(a: TensorLike): void {\n assert(\n a != null,\n `The input to the tensor constructor must be a non-null value.`);\n}\n\n// NOTE: We explicitly type out what T extends instead of any so that\n// util.flatten on a nested array of number doesn't try to infer T as a\n// number[][], causing us to explicitly type util.flatten<number>().\nexport function flatten<T extends number|boolean|Promise<number>|string>(\n arr: T|RecursiveArray<T>, ret: T[] = []): T[] {\n if (Array.isArray(arr)) {\n for (let i = 0; i < arr.length; ++i) {\n flatten(arr[i], ret);\n }\n } else {\n ret.push(arr as T);\n }\n return ret;\n}\n\nexport function sizeFromShape(shape: number[]): number {\n if (shape.length === 0) {\n // Scalar.\n return 1;\n }\n let size = shape[0];\n for (let i = 1; i < shape.length; i++) {\n size *= shape[i];\n }\n return size;\n}\n\nexport function isScalarShape(shape: number[]): boolean {\n return shape.length === 0;\n}\n\nexport function arraysEqual(n1: FlatVector, n2: FlatVector) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n}\n\nexport function isInt(a: number): boolean {\n return a % 1 === 0;\n}\n\nexport function tanh(x: number): number {\n // tslint:disable-next-line:no-any\n if ((Math as any).tanh != null) {\n // tslint:disable-next-line:no-any\n return (Math as any).tanh(x);\n }\n if (x === Infinity) {\n return 1;\n } else if (x === -Infinity) {\n return -1;\n } else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\n\nexport function sizeToSquarishShape(size: number): [number, number] {\n for (let a = Math.floor(Math.sqrt(size)); a > 1; --a) {\n if (size % a === 0) {\n return [a, size / a];\n }\n }\n return [1, size];\n}\n\nexport function createShuffledIndices(n: number): Uint32Array {\n const shuffledIndices = new Uint32Array(n);\n for (let i = 0; i < n; ++i) {\n shuffledIndices[i] = i;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\n\nexport function rightPad(a: string, size: number): string {\n if (size <= a.length) {\n return a;\n }\n return a + ' '.repeat(size - a.length);\n}\n\nexport function repeatedTry(\n checkFn: () => boolean, delayFn = (counter: number) => 0,\n maxCounter?: number): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n let tryCount = 0;\n\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n\n tryCount++;\n\n const nextBackoff = delayFn(tryCount);\n\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n\n tryFn();\n });\n}\n\n/**\n * Given the full size of the array and a shape that may contain -1 as the\n * implicit dimension, returns the inferred shape where -1 is replaced.\n * E.g. For shape=[2, -1, 3] and size=24, it will return [2, 4, 3].\n *\n * @param shape The shape, which may contain -1 in some dimension.\n * @param size The full size (number of elements) of the array.\n * @return The inferred shape where -1 is replaced with the inferred size.\n */\nexport function inferFromImplicitShape(\n shape: number[], size: number): number[] {\n let shapeProd = 1;\n let implicitIdx = -1;\n\n for (let i = 0; i < shape.length; ++i) {\n if (shape[i] >= 0) {\n shapeProd *= shape[i];\n } else if (shape[i] === -1) {\n if (implicitIdx !== -1) {\n throw Error(\n `Shapes can only have 1 implicit size. ` +\n `Found -1 at dim ${implicitIdx} and dim ${i}`);\n }\n implicitIdx = i;\n } else if (shape[i] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i]} at dim ${i}`);\n }\n }\n\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n\n if (shapeProd === 0) {\n throw Error(\n `Cannot infer the missing size in [${shape}] when ` +\n `there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(\n `The implicit shape can't be a fractional number. ` +\n `Got ${size} / ${shapeProd}`);\n }\n\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\n\n/** Reduces the shape by removing all dimensions of shape 1. */\nexport function squeezeShape(shape: number[], axis?: number[]):\n {newShape: number[], keptDims: number[]} {\n const newShape: number[] = [];\n const keptDims: number[] = [];\n let j = 0;\n for (let i = 0; i < shape.length; ++i) {\n if (axis != null) {\n if (axis[j] === i && shape[i] !== 1) {\n throw new Error(\n `Can't squeeze axis ${i} since its dim '${shape[i]}' is not 1`);\n }\n if ((axis[j] == null || axis[j] > i) && shape[i] === 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n if (axis[j] <= i) {\n j++;\n }\n }\n if (shape[i] !== 1) {\n newShape.push(shape[i]);\n keptDims.push(i);\n }\n }\n return {newShape, keptDims};\n}\n\nexport function getTypedArrayFromDType<D extends DataType>(\n dtype: D, size: number): DataTypeMap[D] {\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n } else if (dtype === 'int32') {\n values = new Int32Array(size);\n } else if (dtype === 'bool') {\n values = new Uint8Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\n\nexport function checkComputationForNaN<D extends DataType>(\n vals: DataTypeMap[D], dtype: D, name: string): void {\n if (dtype !== 'float32') {\n // Only floating point computations will generate NaN values\n return;\n }\n for (let i = 0; i < vals.length; i++) {\n if (isNaN(vals[i])) {\n throw Error(`The result of the '${name}' has NaNs.`);\n }\n }\n}\n\nexport function checkConversionForNaN<D extends DataType>(\n vals: DataTypeMap[D]|number[], dtype: D): void {\n if (dtype === 'float32') {\n // NaN is valid for floating point conversions\n return;\n }\n\n for (let i = 0; i < vals.length; i++) {\n if (isNaN(vals[i])) {\n throw Error(`NaN is not a valid value for dtype: '${dtype}'.`);\n }\n }\n}\n\n/**\n * Returns true if the new type can't encode the old type without loss of\n * precision.\n */\nexport function hasEncodingLoss(oldType: DataType, newType: DataType): boolean {\n if (newType === 'complex64') {\n return false;\n }\n if (newType === 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'int32' && oldType !== 'float32' && oldType !== 'complex64') {\n return false;\n }\n if (newType === 'bool' && oldType === 'bool') {\n return false;\n }\n return true;\n}\n\nfunction copyTypedArray<D extends DataType>(\n array: DataTypeMap[D]|number[]|boolean[], dtype: D,\n debugMode: boolean): DataTypeMap[D] {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(array as number[]);\n } else if (dtype === 'int32') {\n if (debugMode) {\n checkConversionForNaN(array as number[], dtype);\n }\n return new Int32Array(array as number[]);\n } else if (dtype === 'bool') {\n const bool = new Uint8Array(array.length);\n for (let i = 0; i < bool.length; ++i) {\n if (Math.round(array[i] as number) !== 0) {\n bool[i] = 1;\n }\n }\n return bool;\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n\nexport function isTypedArray(a: TypedArray|number|boolean|RegularArray<number>|\n RegularArray<boolean>): boolean {\n return a instanceof Float32Array || a instanceof Int32Array ||\n a instanceof Uint8Array;\n}\n\nexport function bytesPerElement(dtype: DataType): number {\n if (dtype === 'float32' || dtype === 'int32') {\n return 4;\n } else if (dtype === 'complex64') {\n return 8;\n } else if (dtype === 'bool') {\n return 1;\n } else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n\nexport function isFunction(f: Function) {\n return !!(f && f.constructor && f.call && f.apply);\n}\n\nexport function nearestDivisor(size: number, start: number): number {\n for (let i = start; i < size; ++i) {\n if (size % i === 0) {\n return i;\n }\n }\n return size;\n}\n\nexport function computeStrides(shape: number[]): number[] {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n\n // Last dimension has implicit stride of 1, thus having D-1 (instead of D)\n // strides.\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * shape[i + 1];\n }\n return strides;\n}\n\nexport function toTypedArray<D extends DataType>(\n a: ArrayData<D>, dtype: D, debugMode: boolean): DataTypeMap[D] {\n if (noConversionNeeded(a, dtype)) {\n return a as DataTypeMap[D];\n }\n if (Array.isArray(a)) {\n a = flatten(a as number[]);\n }\n return copyTypedArray(a, dtype, debugMode);\n}\n\nfunction noConversionNeeded<D extends DataType>(\n a: ArrayData<D>, dtype: D): boolean {\n return (a instanceof Float32Array && dtype === 'float32') ||\n (a instanceof Int32Array && dtype === 'int32') ||\n (a instanceof Uint8Array && dtype === 'bool');\n}\n\nexport function makeOnesTypedArray<D extends DataType>(\n size: number, dtype: D): DataTypeMap[D] {\n const array = makeZerosTypedArray(size, dtype);\n for (let i = 0; i < array.length; i++) {\n array[i] = 1;\n }\n return array;\n}\n\nexport function makeZerosTypedArray<D extends DataType>(\n size: number, dtype: D): DataTypeMap[D] {\n if (dtype == null || dtype === 'float32' || dtype === 'complex64') {\n return new Float32Array(size);\n } else if (dtype === 'int32') {\n return new Int32Array(size);\n } else if (dtype === 'bool') {\n return new Uint8Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\n\n/**\n * Returns the current high-resolution real time in milliseconds. It is\n * relative to an arbitrary time in the past.\n */\nexport function now(): number {\n if (typeof performance !== 'undefined') {\n return performance.now();\n } else if (typeof process !== 'undefined') {\n const time = process.hrtime();\n return time[0] * 1000 + time[1] / 1000000;\n } else {\n throw new Error(\n 'Cannot measure time in this environment. You should run tf.js ' +\n 'in the browser or in Node.js');\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {BackendTimer} from './kernels/backend';\nimport {Tensor} from './tensor';\nimport {TypedArray} from './types';\nimport * as util from './util';\n\nexport class Profiler {\n constructor(private backendTimer: BackendTimer, private logger?: Logger) {\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n\n profileKernel<T extends Tensor|Tensor[]>(name: string, f: () => T | Tensor[]):\n T {\n let result: T|Tensor[];\n const holdResultWrapperFn = () => {\n result = f();\n };\n const timer = this.backendTimer.time(holdResultWrapperFn);\n\n const results: Tensor[] =\n Array.isArray(result) ? result : [result] as Tensor[];\n results.forEach(r => {\n const vals = r.dataSync();\n util.checkComputationForNaN(vals, r.dtype, name);\n\n timer.then(timing => {\n let extraInfo = '';\n if (timing.getExtraProfileInfo != null) {\n extraInfo = timing.getExtraProfileInfo();\n }\n\n this.logger.logKernelProfile(name, r, vals, timing.kernelMs, extraInfo);\n });\n });\n\n return result as T;\n }\n}\n\nexport class Logger {\n logKernelProfile(\n name: string, result: Tensor, vals: TypedArray, timeMs: number,\n extraInfo?: string) {\n const time = util.rightPad(`${timeMs}ms`, 9);\n const paddedName = util.rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = util.rightPad(result.shape.toString(), 14);\n\n console.log(\n `%c${paddedName}\\t%c${time}\\t%c${rank}D ${shape}\\t%c${size}\\t%c${\n extraInfo}`,\n 'font-weight:bold', 'color:red', 'color:blue', 'color: orange',\n 'color: green');\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {DataType, TypedArray} from './types';\nimport {computeStrides, rightPad, sizeFromShape} from './util';\n\n// Maximum number of values before we decide to show ellipsis.\nconst FORMAT_LIMIT_NUM_VALS = 20;\n// Number of first and last values to show when displaying a, b,...,y, z.\nconst FORMAT_NUM_FIRST_LAST_VALS = 3;\n// Number of significant digits to show.\nconst FORMAT_NUM_SIG_DIGITS = 7;\n\nexport function tensorToString(\n vals: TypedArray, shape: number[], dtype: DataType, verbose: boolean) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = ['Tensor'];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map(l => ' ' + l).join('\\n'));\n return lines.join('\\n');\n}\n\nfunction computeMaxSizePerColumn(\n vals: TypedArray, shape: number[], dtype: DataType,\n strides: number[]): number[] {\n const n = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples =\n dtype === 'complex64' ? createComplexTuples(vals) : vals;\n\n if (rank > 1) {\n for (let row = 0; row < n / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(\n padPerCol[j], valToString(valuesOrTuples[offset + j], 0).length);\n }\n }\n }\n return padPerCol;\n}\n\nfunction valToString(val: number|[number, number], pad: number) {\n let valStr: string;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ` +\n `${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n } else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n\n return rightPad(valStr, pad);\n}\n\nfunction subTensorToString(\n vals: TypedArray, shape: number[], dtype: DataType, strides: number[],\n padPerCol: number[], isLast = true): string[] {\n const storagePerElement = dtype === 'complex64' ? 2 : 1;\n\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === 'complex64') {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0)];\n }\n return [vals[0].toString()];\n }\n\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n\n let firstVals: Array<number|[number, number]> =\n Array.from(vals.subarray(0, firstValsSize));\n let lastVals: Array<number|[number, number]> = Array.from(vals.subarray(\n size - FORMAT_NUM_FIRST_LAST_VALS * storagePerElement, size));\n if (dtype === 'complex64') {\n firstVals = createComplexTuples(firstVals as number[]);\n lastVals = createComplexTuples(lastVals as number[]);\n }\n\n return [\n '[' + firstVals.map((x, i) => valToString(x, padPerCol[i])).join(', ') +\n ', ..., ' +\n lastVals\n .map(\n (x, i) => valToString(\n x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i]))\n .join(', ') +\n ']'\n ];\n }\n const displayVals: Array<number|[number, number]> =\n dtype === 'complex64' ? createComplexTuples(vals) : Array.from(vals);\n\n return [\n '[' + displayVals.map((x, i) => valToString(x, padPerCol[i])).join(', ') +\n ']'\n ];\n }\n\n // The array is rank 2 or more.\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines: string[] = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i = 0; i < FORMAT_NUM_FIRST_LAST_VALS; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(\n vals.subarray(start, end), subshape, dtype, substrides, padPerCol,\n false /* isLast */));\n }\n lines.push('...');\n for (let i = size - FORMAT_NUM_FIRST_LAST_VALS; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(\n vals.subarray(start, end), subshape, dtype, substrides, padPerCol,\n i === size - 1 /* isLast */));\n }\n } else {\n for (let i = 0; i < size; i++) {\n const start = i * stride;\n const end = start + stride;\n lines.push(...subTensorToString(\n vals.subarray(start, end), subshape, dtype, substrides, padPerCol,\n i === size - 1 /* isLast */));\n }\n }\n const sep = rank === 2 ? ',' : '';\n lines[0] = '[' + lines[0] + sep;\n for (let i = 1; i < lines.length - 1; i++) {\n lines[i] = ' ' + lines[i] + sep;\n }\n let newLineSep = ',\\n';\n for (let i = 2; i < rank; i++) {\n newLineSep += '\\n';\n }\n lines[lines.length - 1] =\n ' ' + lines[lines.length - 1] + ']' + (isLast ? '' : newLineSep);\n return lines;\n}\n\nfunction createComplexTuples(vals: number[]|\n TypedArray): Array<[number, number]> {\n const complexTuples: Array<[number, number]> = [];\n for (let i = 0; i < vals.length; i += 2) {\n complexTuples.push([vals[i], vals[i + 1]] as [number, number]);\n }\n return complexTuples;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {tensorToString} from './tensor_format';\nimport {DataType, Rank, ShapeMap, TensorLike, TypedArray} from './types';\nimport * as util from './util';\nimport {computeStrides} from './util';\n\n/** @hidden */\nexport interface TensorData {\n dataId?: DataId;\n values?: TypedArray;\n}\n\n/**\n * A mutable object, similar to `tf.Tensor`, that allows users to set values\n * at locations before converting to an immutable `tf.Tensor`.\n *\n * See `tf.buffer` for creating a tensor buffer.\n */\n/** @doc {heading: 'Tensors', subheading: 'Classes'} */\nexport class TensorBuffer<R extends Rank> {\n size: number;\n shape: ShapeMap[R];\n strides: number[];\n values: TypedArray;\n\n constructor(shape: ShapeMap[R], public dtype: DataType, values?: TypedArray) {\n this.shape = shape.slice();\n this.size = util.sizeFromShape(shape);\n\n if (values != null) {\n const n = values.length;\n util.assert(\n n === this.size,\n `Length of values '${n}' does not match the size ` +\n `inferred by the shape '${this.size}'.`);\n }\n if (dtype === 'complex64') {\n throw new Error(\n `complex64 dtype TensorBuffers are not supported. Please create ` +\n `a TensorBuffer for the real and imaginary parts separately and ` +\n `call tf.complex(real, imag).`);\n }\n this.values = values ||\n util.getTypedArrayFromDType(dtype, util.sizeFromShape(this.shape));\n this.strides = computeStrides(shape);\n }\n\n /**\n * Sets a value in the buffer at a given location.\n *\n * @param value The value to set.\n * @param locs The location indices.\n */\n /** @doc {heading: 'Tensors', subheading: 'Creation'} */\n set(value: number, ...locs: number[]) {\n if (locs.length === 0) {\n locs = [0];\n }\n util.assert(\n locs.length === this.rank,\n `The number of provided coordinates (${locs.length}) must ` +\n `match the rank (${this.rank})`);\n\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n\n /**\n * Returns the value in the buffer at the provided location.\n *\n * @param locs The location indices.\n */\n /** @doc {heading: 'Tensors', subheading: 'Creation'} */\n get(...locs: number[]): number {\n if (locs.length === 0) {\n locs = [0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.values[index];\n }\n\n locToIndex(locs: number[]): number {\n if (this.rank === 0) {\n return 0;\n } else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return index;\n }\n\n indexToLoc(index: number): number[] {\n if (this.rank === 0) {\n return [];\n } else if (this.rank === 1) {\n return [index];\n }\n const locs: number[] = new Array(this.shape.length);\n for (let i = 0; i < locs.length - 1; ++i) {\n locs[i] = Math.floor(index / this.strides[i]);\n index -= locs[i] * this.strides[i];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n\n get rank() {\n return this.shape.length;\n }\n\n /**\n * Creates an immutable `tf.Tensor` object from the buffer.\n */\n /** @doc {heading: 'Tensors', subheading: 'Creation'} */\n toTensor(): Tensor<R> {\n return Tensor.make(this.shape, {values: this.values}, this.dtype);\n }\n}\n\nexport interface TensorTracker {\n registerTensor(t: Tensor): void;\n disposeTensor(t: Tensor): void;\n write(dataId: DataId, values: TypedArray): void;\n read(dataId: DataId): Promise<TypedArray>;\n readSync(dataId: DataId): TypedArray;\n registerVariable(v: Variable): void;\n nextTensorId(): number;\n nextVariableId(): number;\n}\n\n/**\n * The Tensor class calls into this handler to delegate chaining operations.\n */\nexport interface OpHandler {\n cast<T extends Tensor>(x: T, dtype: DataType): T;\n buffer<R extends Rank>(\n shape: ShapeMap[R], dtype: DataType,\n values?: TypedArray): TensorBuffer<R>;\n print<T extends Tensor>(x: T, verbose: boolean): void;\n reshape<R2 extends Rank>(x: Tensor, shape: ShapeMap[R2]): Tensor<R2>;\n expandDims<R2 extends Rank>(x: Tensor, axis: number): Tensor<R2>;\n cumsum<T extends Tensor>(\n x: Tensor, axis: number, exclusive: boolean, reverse: boolean): T;\n squeeze<T extends Tensor>(x: Tensor, axis?: number[]): T;\n clone<T extends Tensor>(x: T): T;\n tile<T extends Tensor>(x: T, reps: number[]): T;\n gather<T extends Tensor>(x: T, indices: Tensor1D, axis: number): T;\n matMul<T extends Tensor>(\n a: T, b: T, transposeA: boolean, transposeB: boolean): T;\n dot(t1: Tensor, t2: Tensor): Tensor;\n norm(\n x: Tensor, ord: number|'euclidean'|'fro', axis: number|number[],\n keepDims: boolean): Tensor;\n slice<R extends Rank, T extends Tensor<R>>(\n x: T, begin: number|number[], size?: number|number[]): T;\n split<T extends Tensor>(\n x: T, numOrSizeSplits: number[]|number, axis?: number): T[];\n reverse<T extends Tensor>(x: T, axis?: number|number[]): T;\n concat<T extends Tensor>(tensors: T[], axis: number): T;\n stack<T extends Tensor>(tensors: T[], axis: number): Tensor;\n unstack<T extends Tensor>(value: T, axis: number): Tensor[];\n pad<T extends Tensor>(\n x: T, paddings: Array<[number, number]>, constantValue: number): T;\n batchNormalization<R extends Rank>(\n x: Tensor<R>, mean: Tensor<R>|Tensor1D, variance: Tensor<R>|Tensor1D,\n varianceEpsilon: number, scale?: Tensor<R>|Tensor1D,\n offset?: Tensor<R>|Tensor1D): Tensor<R>;\n all<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean): T;\n any<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean): T;\n logSumExp<T extends Tensor>(\n x: Tensor, axis: number|number[], keepDims: boolean): T;\n sum<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean): T;\n prod<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean):\n T;\n mean<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean):\n T;\n min<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean): T;\n max<T extends Tensor>(x: Tensor, axis: number|number[], keepDims: boolean): T;\n argMin<T extends Tensor>(x: Tensor, axis: number): T;\n argMax<T extends Tensor>(x: Tensor, axis: number): T;\n add<T extends Tensor>(a: Tensor, b: Tensor): T;\n addStrict<T extends Tensor>(a: T, b: T): T;\n atan2<T extends Tensor>(a: Tensor, b: Tensor): T;\n sub<T extends Tensor>(a: Tensor, b: Tensor): T;\n subStrict<T extends Tensor>(a: T, b: T): T;\n pow<T extends Tensor>(base: T, exp: Tensor): T;\n powStrict<T extends Tensor>(base: T, exp: Tensor): T;\n mul<T extends Tensor>(a: Tensor, b: Tensor): T;\n mulStrict<T extends Tensor>(a: T, b: T): T;\n div<T extends Tensor>(a: Tensor, b: Tensor): T;\n floorDiv<T extends Tensor>(a: Tensor, b: Tensor): T;\n divStrict<T extends Tensor>(a: T, b: T): T;\n mod<T extends Tensor>(a: Tensor, b: Tensor): T;\n modStrict<T extends Tensor>(a: T, b: T): T;\n minimum<T extends Tensor>(a: Tensor, b: Tensor): T;\n minimumStrict<T extends Tensor>(a: T, b: T): T;\n maximum<T extends Tensor>(a: Tensor, b: Tensor): T;\n maximumStrict<T extends Tensor>(a: T, b: T): T;\n squaredDifference<T extends Tensor>(a: Tensor, b: Tensor): T;\n squaredDifferenceStrict<T extends Tensor>(a: T, b: T): T;\n transpose<T extends Tensor>(x: T, perm?: number[]): T;\n logicalNot<T extends Tensor>(x: T): T;\n logicalAnd<T extends Tensor>(a: Tensor, b: Tensor): T;\n logicalOr<T extends Tensor>(a: Tensor, b: Tensor): T;\n logicalXor<T extends Tensor>(a: Tensor, b: Tensor): T;\n where<T extends Tensor>(condition: Tensor, a: T, b: T): T;\n notEqual<T extends Tensor>(a: Tensor, b: Tensor): T;\n notEqualStrict<T extends Tensor>(a: T, b: T): T;\n less<T extends Tensor>(a: Tensor, b: Tensor): T;\n lessStrict<T extends Tensor>(a: T, b: T): T;\n equal<T extends Tensor>(a: Tensor, b: Tensor): T;\n equalStrict<T extends Tensor>(a: T, b: T): T;\n lessEqual<T extends Tensor>(a: Tensor, b: Tensor): T;\n lessEqualStrict<T extends Tensor>(a: T, b: T): T;\n greater<T extends Tensor>(a: Tensor, b: Tensor): T;\n greaterStrict<T extends Tensor>(a: T, b: T): T;\n greaterEqual<T extends Tensor>(a: Tensor, b: Tensor): T;\n greaterEqualStrict<T extends Tensor>(a: T, b: T): T;\n neg<T extends Tensor>(x: T): T;\n ceil<T extends Tensor>(x: T): T;\n floor<T extends Tensor>(x: T): T;\n sign<T extends Tensor>(x: T): T;\n round<T extends Tensor>(x: T): T;\n exp<T extends Tensor>(x: T): T;\n expm1<T extends Tensor>(x: T): T;\n log<T extends Tensor>(x: T): T;\n log1p<T extends Tensor>(x: T): T;\n sqrt<T extends Tensor>(x: T): T;\n rsqrt<T extends Tensor>(x: T): T;\n square<T extends Tensor>(x: T): T;\n reciprocal<T extends Tensor>(x: T): T;\n abs<T extends Tensor>(x: T): T;\n clipByValue<T extends Tensor>(\n x: T, clipValueMin: number, clipValueMax: number): T;\n sigmoid<T extends Tensor>(x: T): T;\n logSigmoid<T extends Tensor>(x: T): T;\n softplus<T extends Tensor>(x: T): T;\n zerosLike<T extends Tensor>(x: T): T;\n onesLike<T extends Tensor>(x: T): T;\n sin<T extends Tensor>(x: T): T;\n cos<T extends Tensor>(x: T): T;\n tan<T extends Tensor>(x: T): T;\n asin<T extends Tensor>(x: T): T;\n acos<T extends Tensor>(x: T): T;\n atan<T extends Tensor>(x: T): T;\n sinh<T extends Tensor>(x: T): T;\n cosh<T extends Tensor>(x: T): T;\n tanh<T extends Tensor>(x: T): T;\n asinh<T extends Tensor>(x: T): T;\n acosh<T extends Tensor>(x: T): T;\n atanh<T extends Tensor>(x: T): T;\n erf<T extends Tensor>(x: T): T;\n step<T extends Tensor>(x: T, alpha: number): T;\n relu<T extends Tensor>(x: T): T;\n elu<T extends Tensor>(x: T): T;\n selu<T extends Tensor>(x: T): T;\n leakyRelu<T extends Tensor>(x: T, alpha: number): T;\n prelu<T extends Tensor>(x: T, alpha: T): T;\n softmax<T extends Tensor>(logits: T, dim: number): T;\n logSoftmax<T extends Tensor>(logits: T, axis: number): T;\n image: {\n resizeBilinear<T extends Tensor3D|Tensor4D>(\n images: T, size: [number, number], alignCorners: boolean): T;\n resizeNearestNeighbor<T extends Tensor3D|Tensor4D>(\n images: T, size: [number, number], alignCorners: boolean): T;\n };\n conv1d<T extends Tensor2D|Tensor3D>(\n x: T, filter: Tensor3D, stride: number, pad: 'valid'|'same'|number,\n dataFormat: 'NWC'|'NCW', dilation: number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n conv2d<T extends Tensor3D|Tensor4D>(\n x: T, filter: Tensor4D, strides: [number, number]|number,\n pad: 'valid'|'same'|number, dataFormat: 'NHWC'|'NCHW',\n dilations: [number, number]|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n conv2dTranspose<T extends Tensor3D|Tensor4D>(\n x: T, filter: Tensor4D,\n outputShape: [number, number, number, number]|[number, number, number],\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n depthwiseConv2d<T extends Tensor3D|Tensor4D>(\n x: T, filter: Tensor4D, strides: [number, number]|number,\n pad: 'valid'|'same'|number, dataFormat: 'NHWC'|'NCHW',\n dilations: [number, number]|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n separableConv2d<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, depthwiseFilter: Tensor4D|TensorLike,\n pointwiseFilter: Tensor4D|TensorLike, strides: [number, number]|number,\n pad: 'valid'|'same', dilation: [number, number]|number,\n dataFormat: 'NHWC'|'NCHW'): T;\n maxPool<T extends Tensor3D|Tensor4D>(\n x: T, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n avgPool<T extends Tensor3D|Tensor4D>(\n x: T, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T;\n pool<T extends Tensor3D|Tensor4D>(\n input: T, windowShape: [number, number]|number, poolingType: 'avg'|'max',\n padding: 'valid'|'same'|number, diationRate?: [number, number]|number,\n strides?: [number, number]|number): T;\n localResponseNormalization<T extends Tensor3D|Tensor4D>(\n x: T, depthRadius: number, bias: number, alpha: number, beta: number): T;\n unsortedSegmentSum<T extends Tensor>(\n x: T, segmentIds: Tensor1D, numSegments: number): T;\n batchToSpaceND<T extends Tensor>(\n x: T, blockShape: number[], crops: number[][]): T;\n spaceToBatchND<T extends Tensor>(\n x: T, blockShape: number[], paddings: number[][]): T;\n topk<T extends Tensor>(x: T, k: number, sorted: boolean):\n {values: T, indices: T};\n stridedSlice<T extends Tensor>(\n x: T, begin: number[], end: number[], strides: number[],\n beginMask: number, endMask: number): T;\n depthToSpace(x: Tensor4D, blockSize: number, dataFormat: string): Tensor4D;\n spectral: {fft(x: Tensor): Tensor; ifft(x: Tensor): Tensor;};\n}\n\n// For tracking tensor creation and disposal.\nlet trackerFn: () => TensorTracker = null;\n// Used by chaining methods to call into ops.\nlet opHandler: OpHandler = null;\n\n/**\n * An external consumer can register itself as the tensor tracker. This way\n * the Tensor class can notify the tracker for every tensor created and\n * disposed.\n */\nexport function setTensorTracker(fn: () => TensorTracker) {\n trackerFn = fn;\n}\n\n/**\n * An external consumer can register itself as the op handler. This way the\n * Tensor class can have chaining methods that call into ops via the op handler.\n */\nexport function setOpHandler(handler: OpHandler) {\n opHandler = handler;\n}\n\n/**\n * We wrap data id since we use weak map to avoid memory leaks.\n * Since we have our own memory management, we have a reference counter\n * mapping a tensor to its data, so there is always a pointer (even if that\n * data is otherwise garbage collectable).\n * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/\n * Global_Objects/WeakMap\n */\nexport type DataId = object; // object instead of {} to force non-primitive.\n\n/**\n * A `tf.Tensor` object represents an immutable, multidimensional array of\n * numbers that has a shape and a data type.\n *\n * See `tf.tensor` for details on how to create a `tf.Tensor`.\n */\n/** @doc {heading: 'Tensors', subheading: 'Classes'} */\nexport class Tensor<R extends Rank = Rank> {\n /** Unique id of this tensor. */\n readonly id: number;\n /**\n * Id of the bucket holding the data for this tensor. Multiple arrays can\n * point to the same bucket (e.g. when calling array.reshape()).\n */\n dataId: DataId;\n /** The shape of the tensor. */\n readonly shape: ShapeMap[R];\n /** Number of elements in the tensor. */\n readonly size: number;\n /** The data type for the array. */\n readonly dtype: DataType;\n /** The rank type for the array (see `Rank` enum). */\n readonly rankType: R;\n\n /**\n * Number of elements to skip in each dimension when indexing. See\n * https://docs.scipy.org/doc/numpy/reference/generated/\\\n * numpy.ndarray.strides.html\n */\n readonly strides: number[];\n\n protected constructor(\n shape: ShapeMap[R], dtype: DataType, values?: TypedArray,\n dataId?: DataId) {\n this.shape = shape.slice();\n this.dtype = dtype || 'float32';\n this.size = util.sizeFromShape(shape);\n if (values != null) {\n util.assert(\n this.size === values.length,\n `Based on the provided shape, [${shape}], and dtype ` +\n `${this.dtype}, the tensor should have ` +\n `${this.size} values but has ${values.length}`);\n }\n\n this.strides = computeStrides(shape);\n this.dataId = dataId != null ? dataId : {};\n this.id = trackerFn().nextTensorId();\n this.rankType = (this.rank < 5 ? this.rank.toString() : 'higher') as R;\n trackerFn().registerTensor(this);\n if (values != null) {\n trackerFn().write(this.dataId, values);\n }\n }\n\n /**\n * Makes a new tensor with the provided shape and values. Values should be in\n * a flat array.\n */\n static make<T extends Tensor<R>, D extends DataType = 'float32',\n R extends Rank = Rank>(\n shape: ShapeMap[R], data: TensorData, dtype?: D): T {\n return new Tensor(shape, dtype, data.values, data.dataId) as T;\n }\n\n /** Flatten a Tensor to a 1D array. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n flatten(): Tensor1D {\n this.throwIfDisposed();\n return this.as1D();\n }\n\n /** Converts a size-1 `tf.Tensor` to a `tf.Scalar`. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n asScalar(): Scalar {\n this.throwIfDisposed();\n util.assert(this.size === 1, 'The array must have only 1 element.');\n return this.reshape<Rank.R0>([]);\n }\n\n /** Converts a `tf.Tensor` to a `tf.Tensor1D`. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n as1D(): Tensor1D {\n this.throwIfDisposed();\n return this.reshape<Rank.R1>([this.size]);\n }\n\n /**\n * Converts a `tf.Tensor` to a `tf.Tensor2D`.\n *\n * @param rows Number of rows in `tf.Tensor2D`.\n * @param columns Number of columns in `tf.Tensor2D`.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n as2D(rows: number, columns: number): Tensor2D {\n this.throwIfDisposed();\n return this.reshape<Rank.R2>([rows, columns]);\n }\n\n /**\n * Converts a `tf.Tensor` to a `tf.Tensor3D`.\n *\n * @param rows Number of rows in `tf.Tensor3D`.\n * @param columns Number of columns in `tf.Tensor3D`.\n * @param depth Depth of `tf.Tensor3D`.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n as3D(rows: number, columns: number, depth: number): Tensor3D {\n this.throwIfDisposed();\n return this.reshape<Rank.R3>([rows, columns, depth]);\n }\n\n /**\n * Converts a `tf.Tensor` to a `tf.Tensor4D`.\n *\n * @param rows Number of rows in `tf.Tensor4D`.\n * @param columns Number of columns in `tf.Tensor4D`.\n * @param depth Depth of `tf.Tensor4D`.\n * @param depth2 4th dimension of `tf.Tensor4D`.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n as4D(rows: number, columns: number, depth: number, depth2: number): Tensor4D {\n this.throwIfDisposed();\n return this.reshape<Rank.R4>([rows, columns, depth, depth2]);\n }\n\n /**\n * Casts a `tf.Tensor` to a specified dtype.\n *\n * @param dtype Data-type to cast the tensor to.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n asType<T extends this>(this: T, dtype: DataType): T {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype) as T;\n }\n\n get rank(): number {\n return this.shape.length;\n }\n\n /**\n * Returns the value in the tensor at the provided location.\n * If using WebGL backend, this is a blocking call.\n * Prefer calling the `async data()[flatIndex]` method instead.\n *\n * @param locs The location indices.\n */\n get(...locs: number[]) {\n util.assert(\n locs.length === this.rank,\n 'Number of coordinates in get() must match the rank of the tensor');\n util.assert(\n this.dtype !== 'complex64',\n 'Tensor.get() is not supported for complex64 tensors yet.');\n this.throwIfDisposed();\n if (locs.length === 0) {\n locs = [0];\n }\n let index = locs[locs.length - 1];\n for (let i = 0; i < locs.length - 1; ++i) {\n index += this.strides[i] * locs[i];\n }\n return this.dataSync()[index];\n }\n\n /** Returns a `tf.TensorBuffer` that holds the underlying data. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n buffer(): TensorBuffer<R> {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n\n /**\n * Asynchronously downloads the values from the `tf.Tensor`. Returns a promise\n * of `TypedArray` that resolves when the computation has finished.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n async data(): Promise<TypedArray> {\n this.throwIfDisposed();\n return trackerFn().read(this.dataId);\n }\n\n /**\n * Synchronously downloads the values from the `tf.Tensor`. This blocks the UI\n * thread until the values are ready, which can cause performance issues.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n dataSync(): TypedArray {\n this.throwIfDisposed();\n return trackerFn().readSync(this.dataId);\n }\n\n /**\n * Disposes `tf.Tensor` from memory.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n dispose(): void {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n\n private isDisposedInternal = false;\n get isDisposed(): boolean {\n return this.isDisposedInternal;\n }\n\n private throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n\n /** Casts the array to type `float32` */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n toFloat<T extends this>(this: T): T {\n return this.asType('float32');\n }\n\n /** Casts the array to type `int32` */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n toInt() {\n return this.asType('int32');\n }\n\n /** Casts the array to type `bool` */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n toBool() {\n return this.asType('bool');\n }\n\n /**\n * Prints the `tf.Tensor`. See `tf.print` for details.\n *\n * @param verbose Whether to print verbose information about the tensor,\n * including dtype and size.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n print(verbose = false): void {\n return opHandler.print(this, verbose);\n }\n\n /**\n * Reshapes the tensor into the provided shape.\n * See `tf.reshape` for more details.\n *\n * @param newShape An array of integers defining the output tensor shape.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n reshape<R2 extends Rank>(newShape: ShapeMap[R2]): Tensor<R2> {\n this.throwIfDisposed();\n return opHandler.reshape(this, newShape);\n }\n\n /**\n * Reshapes the tensor into the shape of the provided tensor.\n *\n * @param x The tensor of required shape.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n reshapeAs<T extends Tensor>(x: T): T {\n this.throwIfDisposed();\n return this.reshape(x.shape) as T;\n }\n\n /**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape. See `tf.expandDims` for details.\n *\n * @param axis The dimension index at which to insert shape of 1. Defaults to\n * 0 (the first dimension).\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n expandDims<R2 extends Rank>(axis = 0): Tensor<R2> {\n return opHandler.expandDims(this, axis);\n }\n\n /**\n * Returns the cumulative sum of the `tf.Tensor` along `axis`.\n *\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Defaults to\n * false. If set to true then the sum of each tensor entry does not include\n * its own value, but only the values previous to it along the specified\n * axis.\n * @param reverse Whether to sum in the opposite direction. Defaults to\n * false.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n cumsum<T extends Tensor>(axis = 0, exclusive = false, reverse = false): T {\n return opHandler.cumsum(this, axis, exclusive, reverse);\n }\n\n /**\n * Returns a `tf.Tensor` with dimensions of size 1 removed from the shape.\n * See `tf.squeeze` for more details.\n *\n * @param axis A list of numbers. If specified, only squeezes the\n * dimensions listed. The dimension index starts at 0. It is an error to\n * squeeze a dimension that is not 1.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n squeeze<T extends Tensor>(axis?: number[]): T {\n this.throwIfDisposed();\n return opHandler.squeeze(this, axis);\n }\n\n /** Returns a copy of the tensor. See `tf.clone` for details. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n clone<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n\n /** Returns a human-readable description of the tensor. Useful for logging. */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n toString(verbose = false): string {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n\n // Below is chain API that is not exposed to docs to avoid repetition. To\n // expose a method, move it above this comment and add @doc and jsdoc.\n\n tile<T extends this>(this: T, reps: number[]): T {\n this.throwIfDisposed();\n return opHandler.tile(this, reps) as T;\n }\n\n gather<T extends this>(this: T, indices: Tensor1D, axis = 0): T {\n this.throwIfDisposed();\n return opHandler.gather(this, indices, axis) as T;\n }\n\n matMul<T extends Tensor>(\n this: T, b: T, transposeA = false, transposeB = false): T {\n this.throwIfDisposed();\n return opHandler.matMul(this, b, transposeA, transposeB);\n }\n dot(b: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.dot(this, b);\n }\n norm(\n ord: number|'euclidean'|'fro' = 'euclidean', axis: number|number[] = null,\n keepDims = false): Tensor {\n this.throwIfDisposed();\n return opHandler.norm(this, ord, axis, keepDims);\n }\n slice<T extends Tensor<R>>(\n this: T, begin: number|number[], size?: number|number[]): T {\n this.throwIfDisposed();\n return opHandler.slice(this, begin, size);\n }\n reverse<T extends Tensor>(this: T, axis?: number|number[]): T {\n this.throwIfDisposed();\n return opHandler.reverse(this, axis);\n }\n concat<T extends Tensor>(this: T, x: T, axis = 0): T {\n this.throwIfDisposed();\n return opHandler.concat([this, x], axis);\n }\n split<T extends Tensor>(this: T, numOrSizeSplits: number[]|number, axis = 0):\n T[] {\n this.throwIfDisposed();\n return opHandler.split(this, numOrSizeSplits, axis);\n }\n stack(x: Tensor, axis = 0): Tensor {\n return opHandler.stack([this, x], axis);\n }\n unstack(x: Tensor, axis = 0): Tensor[] {\n return opHandler.unstack(this, axis);\n }\n pad<T extends Tensor>(\n this: T, paddings: Array<[number, number]>, constantValue = 0): T {\n return opHandler.pad(this, paddings, constantValue);\n }\n batchNormalization(\n mean: Tensor<R>|Tensor1D, variance: Tensor<R>|Tensor1D,\n varianceEpsilon = .001, scale?: Tensor<R>|Tensor1D,\n offset?: Tensor<R>|Tensor1D): Tensor<R> {\n this.throwIfDisposed();\n return opHandler.batchNormalization(\n this, mean, variance, varianceEpsilon, scale, offset);\n }\n\n // Reduction ops.\n all<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.all(this, axis, keepDims);\n }\n any<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.any(this, axis, keepDims);\n }\n logSumExp<T extends Tensor>(axis: number|number[] = null, keepDims = false):\n T {\n this.throwIfDisposed();\n return opHandler.logSumExp(this, axis, keepDims);\n }\n sum<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.sum(this, axis, keepDims);\n }\n prod<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.prod(this, axis, keepDims);\n }\n mean<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.mean(this, axis, keepDims);\n }\n min<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.min(this, axis, keepDims);\n }\n max<T extends Tensor>(axis: number|number[] = null, keepDims = false): T {\n this.throwIfDisposed();\n return opHandler.max(this, axis, keepDims);\n }\n argMin<T extends Tensor>(axis: number = null): T {\n this.throwIfDisposed();\n return opHandler.argMin(this, axis);\n }\n argMax<T extends Tensor>(axis: number = null): T {\n this.throwIfDisposed();\n return opHandler.argMax(this, axis);\n }\n\n // Transformations\n cast<T extends this>(dtype: DataType): T {\n this.throwIfDisposed();\n return opHandler.cast(this as T, dtype) as T;\n }\n\n // Binary ops.\n\n add<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.add(this, x);\n }\n addStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.addStrict(this, x) as T;\n }\n atan2<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.atan2(this, x) as T;\n }\n sub<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.sub(this, x);\n }\n subStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.subStrict(this, x) as T;\n }\n pow<T extends Tensor>(this: T, exp: Tensor): T {\n this.throwIfDisposed();\n return opHandler.pow(this, exp);\n }\n powStrict(exp: Tensor): Tensor<R> {\n this.throwIfDisposed();\n return opHandler.powStrict(this, exp);\n }\n mul<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.mul(this, x);\n }\n mulStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.mulStrict(this, x) as T;\n }\n div<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.div(this, x);\n }\n floorDiv<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.floorDiv(this, x);\n }\n divStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.divStrict(this, x) as T;\n }\n minimum<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.minimum(this, x);\n }\n minimumStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.minimumStrict(this, x) as T;\n }\n maximum<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.maximum(this, x);\n }\n maximumStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.maximumStrict(this, x) as T;\n }\n mod<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.mod(this, x);\n }\n modStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.modStrict(this, x) as T;\n }\n squaredDifference<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.squaredDifference(this, x);\n }\n squaredDifferenceStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.squaredDifferenceStrict(this, x) as T;\n }\n transpose<T extends Tensor>(this: T, perm?: number[]): T {\n this.throwIfDisposed();\n return opHandler.transpose(this, perm);\n }\n\n // Compare ops.\n\n notEqual<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.notEqual(this, x);\n }\n notEqualStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.notEqualStrict(this, x) as T;\n }\n less<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.less(this, x);\n }\n lessStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.lessStrict(this, x) as T;\n }\n equal<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.equal(this, x);\n }\n equalStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.equalStrict(this, x) as T;\n }\n lessEqual<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.lessEqual(this, x);\n }\n lessEqualStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.lessEqualStrict(this, x) as T;\n }\n greater<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.greater(this, x);\n }\n greaterStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.greaterStrict(this, x) as T;\n }\n greaterEqual<T extends Tensor>(x: Tensor): T {\n this.throwIfDisposed();\n return opHandler.greaterEqual(this, x);\n }\n greaterEqualStrict<T extends this>(this: T, x: T): T {\n this.throwIfDisposed();\n return opHandler.greaterEqualStrict(this, x) as T;\n }\n\n // Compare ops.\n logicalAnd(x: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.logicalAnd(this, x);\n }\n logicalOr(x: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.logicalOr(this, x);\n }\n logicalNot<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.logicalNot(this);\n }\n logicalXor(x: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.logicalXor(this, x);\n }\n where(condition: Tensor, x: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.where(condition, this, x);\n }\n\n // Unary ops.\n neg<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.neg(this);\n }\n ceil<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.ceil(this);\n }\n floor<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.floor(this);\n }\n sign<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.sign(this);\n }\n exp<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.exp(this);\n }\n expm1<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.expm1(this);\n }\n log<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.log(this);\n }\n log1p<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.log1p(this);\n }\n sqrt<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.sqrt(this);\n }\n rsqrt<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.rsqrt(this);\n }\n square<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.square(this);\n }\n reciprocal<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.reciprocal(this);\n }\n abs<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.abs(this);\n }\n clipByValue(min: number, max: number): Tensor<R> {\n this.throwIfDisposed();\n return opHandler.clipByValue(this, min, max);\n }\n relu<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.relu(this);\n }\n elu<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.elu(this);\n }\n selu<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.selu(this);\n }\n leakyRelu(alpha = 0.2): Tensor<R> {\n this.throwIfDisposed();\n return opHandler.leakyRelu(this, alpha);\n }\n prelu(alpha: Tensor<R>): Tensor<R> {\n this.throwIfDisposed();\n return opHandler.prelu(this, alpha);\n }\n sigmoid<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.sigmoid(this);\n }\n logSigmoid<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.logSigmoid(this);\n }\n softplus<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.softplus(this);\n }\n zerosLike<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.zerosLike(this);\n }\n onesLike<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.onesLike(this);\n }\n sin<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.sin(this);\n }\n cos<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.cos(this);\n }\n tan<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.tan(this);\n }\n asin<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.asin(this);\n }\n acos<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.acos(this);\n }\n atan<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.atan(this);\n }\n sinh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.sinh(this);\n }\n cosh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.cosh(this);\n }\n tanh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.tanh(this);\n }\n asinh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.asinh(this);\n }\n acosh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.acosh(this);\n }\n atanh<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.atanh(this);\n }\n erf<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.erf(this);\n }\n round<T extends Tensor>(this: T): T {\n this.throwIfDisposed();\n return opHandler.round(this);\n }\n step<T extends Tensor>(this: T, alpha = 0.0): T {\n this.throwIfDisposed();\n return opHandler.step(this, alpha);\n }\n softmax<T extends this>(this: T, dim = -1): T {\n this.throwIfDisposed();\n return opHandler.softmax(this, dim) as T;\n }\n logSoftmax<T extends this>(this: T, axis = -1): T {\n this.throwIfDisposed();\n return opHandler.logSoftmax(this, axis) as T;\n }\n\n // Image ops.\n resizeBilinear<T extends Tensor3D|Tensor4D>(\n this: T, newShape2D: [number, number], alignCorners = false): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.image.resizeBilinear(this, newShape2D, alignCorners);\n }\n\n resizeNearestNeighbor<T extends Tensor3D|Tensor4D>(\n this: T, newShape2D: [number, number], alignCorners = false): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.image.resizeNearestNeighbor(\n this, newShape2D, alignCorners);\n }\n\n // Convolutions.\n conv1d<T extends Tensor2D|Tensor3D>(\n this: T, filter: Tensor3D, stride: number, pad: 'valid'|'same'|number,\n dataFormat: 'NWC'|'NCW' = 'NWC', dilation = 1,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.conv1d(\n this, filter, stride, pad, dataFormat, dilation, dimRoundingMode);\n }\n conv2d<T extends Tensor3D|Tensor4D>(\n this: T, filter: Tensor4D, strides: [number, number]|number,\n pad: 'valid'|'same'|number, dataFormat: 'NHWC'|'NCHW' = 'NHWC',\n dilations: [number, number]|number = [1, 1],\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.conv2d(\n this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n }\n conv2dTranspose<T extends Tensor3D|Tensor4D>(\n this: T, filter: Tensor4D,\n outputShape: [number, number, number, number]|[number, number, number],\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.conv2dTranspose(\n this, filter, outputShape, strides, pad, dimRoundingMode);\n }\n depthwiseConv2D<T extends Tensor3D|Tensor4D>(\n this: T, filter: Tensor4D, strides: [number, number]|number,\n pad: 'valid'|'same'|number, dataFormat: 'NHWC'|'NCHW' = 'NHWC',\n dilations: [number, number]|number = [1, 1],\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.depthwiseConv2d(\n this, filter, strides, pad, dataFormat, dilations, dimRoundingMode);\n }\n\n separableConv2d<T extends Tensor3D|Tensor4D>(\n this: T|TensorLike, depthwiseFilter: Tensor4D|TensorLike,\n pointwiseFilter: Tensor4D|TensorLike, strides: [number, number]|number,\n pad: 'valid'|'same', dilation: [number, number]|number = [1, 1],\n dataFormat: 'NHWC'|'NCHW' = 'NHWC'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.separableConv2d(\n this, depthwiseFilter, pointwiseFilter, strides, pad, dilation,\n dataFormat);\n }\n\n // Pooling.\n avgPool<T extends Tensor3D|Tensor4D>(\n this: T, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.avgPool(this, filterSize, strides, pad, dimRoundingMode);\n }\n maxPool<T extends Tensor3D|Tensor4D>(\n this: T, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.maxPool(this, filterSize, strides, pad, dimRoundingMode);\n }\n localResponseNormalization<T extends Tensor3D|Tensor4D>(\n this: T, radius = 5, bias = 1, alpha = 1, beta = 0.5): T {\n return opHandler.localResponseNormalization(\n this, radius, bias, alpha, beta);\n }\n pool<T extends Tensor3D|Tensor4D>(\n this: T, windowShape: [number, number]|number, poolingType: 'max'|'avg',\n padding: 'valid'|'same'|number, dilationRate?: [number, number]|number,\n strides?: [number, number]|number): T {\n (this as Tensor).throwIfDisposed();\n return opHandler.pool(\n this, windowShape, poolingType, padding, dilationRate, strides);\n }\n\n variable(trainable = true, name?: string, dtype?: DataType): Variable<R> {\n this.throwIfDisposed();\n return Variable.variable(this, trainable, name, dtype);\n }\n\n unsortedSegmentSum<T extends Tensor>(\n this: T, segmentIds: Tensor1D, numSegments: number): T {\n this.throwIfDisposed();\n return opHandler.unsortedSegmentSum(this, segmentIds, numSegments);\n }\n\n batchToSpaceND<T extends Tensor>(\n this: T, blockShape: number[], crops: number[][]): T {\n this.throwIfDisposed();\n return opHandler.batchToSpaceND(this, blockShape, crops);\n }\n\n spaceToBatchND<T extends Tensor>(\n this: T, blockShape: number[], paddings: number[][]): T {\n this.throwIfDisposed();\n return opHandler.spaceToBatchND(this, blockShape, paddings);\n }\n\n topk<T extends Tensor>(this: T, k = 1, sorted = true):\n {values: T, indices: T} {\n this.throwIfDisposed();\n return opHandler.topk(this, k, sorted);\n }\n\n stridedSlice<T extends Tensor>(\n this: T, begin: number[], end: number[], strides: number[], beginMask = 0,\n endMask = 0): T {\n this.throwIfDisposed();\n return opHandler.stridedSlice(\n this, begin, end, strides, beginMask, endMask);\n }\n\n depthToSpace(this: Tensor4D, blockSize: number, dataFormat: 'NHWC'|'NCHW'):\n Tensor4D {\n this.throwIfDisposed();\n return opHandler.depthToSpace(this, blockSize, dataFormat);\n }\n\n fft(this: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.spectral.fft(this);\n }\n\n ifft(this: Tensor): Tensor {\n this.throwIfDisposed();\n return opHandler.spectral.ifft(this);\n }\n}\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance: Tensor) => {\n return !!instance && instance.shape != null && instance.dtype != null;\n }\n});\n\n/** @doclink Tensor */\nexport type Scalar = Tensor<Rank.R0>;\n/** @doclink Tensor */\nexport type Tensor1D = Tensor<Rank.R1>;\n/** @doclink Tensor */\nexport type Tensor2D = Tensor<Rank.R2>;\n/** @doclink Tensor */\nexport type Tensor3D = Tensor<Rank.R3>;\n/** @doclink Tensor */\nexport type Tensor4D = Tensor<Rank.R4>;\n/** @doclink Tensor */\nexport type Tensor5D = Tensor<Rank.R5>;\n/** @doclink Tensor */\nexport type Tensor6D = Tensor<Rank.R6>;\n\n/**\n * A mutable `tf.Tensor`, useful for persisting state, e.g. for training.\n */\n/** @doc {heading: 'Tensors', subheading: 'Classes'} */\nexport class Variable<R extends Rank = Rank> extends Tensor<R> {\n name: string;\n\n /**\n * Private constructor since we cannot add logic before calling `super()`.\n * Instead, we expose static `Variable.variable` method below, which will be\n * added to global namespace.\n */\n private constructor(\n initialValue: Tensor<R>, public trainable = true, name?: string) {\n super(\n initialValue.shape, initialValue.dtype, null /* values */,\n initialValue.dataId);\n this.name = name;\n if (this.name == null) {\n this.name = trackerFn().nextVariableId().toString();\n }\n try {\n trackerFn().registerVariable(this);\n } catch (ex) {\n trackerFn().disposeTensor(this);\n throw ex;\n }\n }\n\n /**\n * Creates a new variable with the provided initial value.\n * ```js\n * const x = tf.variable(tf.tensor([1, 2, 3]));\n * x.assign(tf.tensor([4, 5, 6]));\n *\n * x.print();\n * ```\n *\n * @param initialValue Initial value for the tensor.\n * @param trainable If true, optimizers are allowed to update it.\n * @param name Name of the variable. Defaults to a unique id.\n * @param dtype If set, initialValue will be converted to the given type.\n */\n /** @doc {heading: 'Tensors', subheading: 'Creation'} */\n static variable<R extends Rank>(\n initialValue: Tensor<R>, trainable = true, name?: string,\n dtype?: DataType): Variable<R> {\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.asType(dtype) as Tensor<R>;\n }\n return new Variable(initialValue, trainable, name);\n }\n\n /**\n * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have\n * the same shape and dtype as the old `tf.Tensor`.\n *\n * @param newValue New tensor to be assigned to this variable.\n */\n /** @doc {heading: 'Tensors', subheading: 'Classes'} */\n assign(newValue: Tensor<R>): void {\n if (newValue.dtype !== this.dtype) {\n throw new Error(\n `dtype of the new value (${newValue.dtype}) and ` +\n `previous value (${this.dtype}) must match`);\n }\n if (!util.arraysEqual(newValue.shape, this.shape)) {\n throw new Error(\n `shape of the new value (${newValue.shape}) and ` +\n `previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().registerTensor(this);\n }\n}\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance: Variable) => {\n return instance instanceof Tensor && instance.assign != null &&\n instance.assign instanceof Function;\n }\n});\n\nconst variable = Variable.variable;\nexport {variable};\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from './tensor';\nimport {NamedTensorMap} from './tensor_types';\nimport * as util from './util';\n\nexport interface TapeNode {\n id: number;\n name: string;\n outputs: Tensor[];\n inputs: NamedTensorMap;\n // Optional params, defined only for ops with gradient impl.\n gradient?: (dy: Tensor|Tensor[]) => NamedGradientMap;\n}\n\nexport type NamedGradientMap = {\n [inputName: string]: () => Tensor;\n};\n\n/**\n * Computes a list of TapeNodes that connect x to y, filtering everything else\n * out and preserving the order of the original tape elements.\n *\n * @param tape The tape elements to filter.\n * @param xs The input Tensors.\n * @param y The output Tensor.\n */\nexport function getFilteredNodesXToY(\n tape: TapeNode[], xs: Tensor[], y: Tensor): TapeNode[] {\n // Forward pass to compute all the nodes and Tensors that are transitively a\n // function of x.\n const tensorsFromX: {[tensorId: number]: boolean} = {};\n const nodesFromX: {[nodeId: number]: boolean} = {};\n for (let i = 0; i < xs.length; i++) {\n tensorsFromX[xs[i].id] = true;\n }\n\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input = nodeInputs[inputName];\n\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input.id]) {\n node.outputs.forEach(output => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n\n if (anyInputFromX) {\n break;\n }\n }\n }\n\n // Backward pass to find all of the nodes and Tensors that lead to y.\n const tensorsLeadToY: {[tensorId: number]: boolean} = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY: {[nodeId: number]: boolean} = {};\n\n for (let i = tape.length - 1; i >= 0; i--) {\n const node = tape[i];\n const nodeInputs = node.inputs;\n\n // If any of the outputs lead to y, mark all of the inputs as leading to y.\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n\n // Return the paths that come from x and lead to y.\n const filteredTape: TapeNode[] = [];\n for (let i = 0; i < tape.length; i++) {\n const node = tape[i];\n\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n // Prune the inputs from the node that aren't a function of x.\n const prunedInputs: {[inputName: string]: Tensor} = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n\n // Copy the node and overwrite inputsAndArgs to the pruned version.\n const prunedNode = Object.assign({}, node) as TapeNode;\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n\n filteredTape.push(prunedNode);\n }\n }\n\n return filteredTape;\n}\n\n/**\n * Backpropagate gradients through the filtered TapeNodes.\n *\n * @param tensorAccumulatedGradientMap A map of Tensor to its gradient. This map\n * is mutated by this method.\n * @param filteredTape The filtered TapeNodes to backprop through.\n */\nexport function backpropagateGradients(\n tensorAccumulatedGradientMap: {[tensorId: number]: Tensor},\n filteredTape: TapeNode[]) {\n // Walk the tape backward and keep a map of Tensor to its gradient.\n for (let i = filteredTape.length - 1; i >= 0; i--) {\n const node = filteredTape[i];\n\n const dys: Tensor[] = [];\n node.outputs.forEach(o => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n } else {\n // This particular output is not in the back-propagation subgraph, so it\n // does not affect the final output, thus we put zeros for its dy.\n const dy = Tensor.make(\n o.shape, {values: util.makeZerosTypedArray(o.size, o.dtype)},\n o.dtype);\n dys.push(dy);\n }\n });\n\n if (node.gradient == null) {\n throw new Error(\n `Cannot compute gradient: gradient function not found ` +\n `for ${node.name}.`);\n }\n\n // Backprop dy through this node and accumulate gradients over the inputs.\n const inputGradients =\n // Grad functions of ops with single outputs expect a dy, while ops\n // with multiple outputs expect dys (array of dy).\n node.gradient(node.outputs.length === 1 ? dys[0] : dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(\n `Cannot backprop through input ${inputName}. ` +\n `Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n\n // Call the gradient function.\n const dx = inputGradients[inputName]();\n const x = node.inputs[inputName];\n if (!util.arraysEqual(dx.shape, x.shape)) {\n throw new Error(\n `Error in gradient for op ${node.name}. The gradient of input ` +\n `'${inputName}' has shape '${dx.shape}', which does not match ` +\n `the shape of the input '${x.shape}'`);\n }\n\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n } else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = curGradient.add(dx);\n curGradient.dispose();\n }\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from './tensor';\nimport {NamedTensorMap, TensorContainer, TensorContainerArray} from './tensor_types';\nimport {assert} from './util';\n\nexport function assertTypesMatch(a: Tensor, b: Tensor): void {\n assert(\n a.dtype === b.dtype,\n `The dtypes of the first(${a.dtype}) and` +\n ` second(${b.dtype}) input must match`);\n}\n\nexport function isTensorInList(tensor: Tensor, tensorList: Tensor[]): boolean {\n for (let i = 0; i < tensorList.length; i++) {\n if (tensorList[i].id === tensor.id) {\n return true;\n }\n }\n return false;\n}\n\nexport function flattenNameArrayMap(\n nameArrayMap: Tensor|NamedTensorMap, keys?: string[]): Tensor[] {\n const xs: Tensor[] = [];\n if (nameArrayMap instanceof Tensor) {\n xs.push(nameArrayMap);\n } else {\n const xMap = nameArrayMap as {[xName: string]: Tensor};\n for (let i = 0; i < keys.length; i++) {\n xs.push(xMap[keys[i]]);\n }\n }\n return xs;\n}\n\nexport function unflattenToNameArrayMap(\n keys: string[], flatArrays: Tensor[]): NamedTensorMap {\n if (keys.length !== flatArrays.length) {\n throw new Error(\n `Cannot unflatten Tensor[], keys and arrays are not of same length.`);\n }\n const result: NamedTensorMap = {};\n for (let i = 0; i < keys.length; i++) {\n result[keys[i]] = flatArrays[i];\n }\n return result;\n}\n\n/**\n * Extracts any `Tensor`s found within the provided object.\n *\n * @param container an object that may be a `Tensor` or may directly contain\n * `Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. In general it\n * is safe to pass any object here, except that `Promise`s are not\n * supported.\n * @returns An array of `Tensors` found within the passed object. If the\n * argument is simply a `Tensor', a list containing that `Tensor` is\n * returned. If the object is not a `Tensor` or does not\n * contain `Tensors`, an empty list is returned.\n */\nexport function getTensorsInContainer(result: TensorContainer): Tensor[] {\n const list: Tensor[] = [];\n const seen = new Set<{}|void>();\n walkTensorContainer(result, list, seen);\n return list;\n}\n\nfunction walkTensorContainer(\n container: TensorContainer, list: Tensor[], seen: Set<{}|void>): void {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n // Iteration over keys works also for arrays.\n const iterable = container as TensorContainerArray;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\n\n// tslint:disable-next-line:no-any\nfunction isIterable(obj: any): boolean {\n return Array.isArray(obj) || typeof obj === 'object';\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {BackendTimingInfo, DataMover, KernelBackend} from './kernels/backend';\nimport {Profiler} from './profiler';\nimport {backpropagateGradients, getFilteredNodesXToY, NamedGradientMap, TapeNode} from './tape';\nimport {DataId, Tensor, Tensor3D, Variable} from './tensor';\nimport {NamedTensorMap, NamedVariableMap, TensorContainer} from './tensor_types';\nimport {getTensorsInContainer, isTensorInList} from './tensor_util';\nimport {DataType, TypedArray} from './types';\nimport * as util from './util';\nimport {makeOnesTypedArray, now, sizeFromShape} from './util';\n\n/**\n * A function that computes an output. The save function is for saving tensors\n * computed in the forward pass, that we need in the backward pass.\n */\nexport type ForwardFunc<T> =\n (backend: KernelBackend, save?: <S extends Tensor>(tensor: S) => S) => T;\n\n/**\n * @docalias (a: Tensor, b: Tensor,...) => {\n * value: Tensor, * gradFunc: (dy: Tensor) => Tensor | Tensor[] * }\n */\nexport type CustomGradientFunc<T extends Tensor> = (...args: Tensor[]) => {\n value: T, gradFunc: (dy: T) => Tensor | Tensor[];\n};\n\nexport type MemoryInfo = {\n numTensors: number; numDataBuffers: number; numBytes: number;\n unreliable?: boolean;\n};\n\ntype KernelProfile = {\n name: string; bytesAdded: number; totalBytesSnapshot: number;\n tensorsAdded: number;\n totalTensorsSnapshot: number;\n inputShapes: number[][];\n outputShape: number[] | number[][];\n};\n\nexport type ProfileInfo = {\n newBytes: number; newTensors: number; peakBytes: number;\n kernels: KernelProfile[];\n result: TensorContainer;\n};\n\nexport interface TimingInfo extends BackendTimingInfo {\n wallMs: number;\n}\n\n/** @docalias Function */\nexport type ScopeFn<T extends TensorContainer> = () => T;\n\nexport interface TensorManager {\n registerTensor(a: Tensor): void;\n registerVariable(v: Variable): void;\n disposeTensor(a: Tensor): void;\n memory(): {numDataBuffers: number; numBytes: number;};\n}\n\ninterface ScopeState {\n track: Tensor[];\n name: string;\n}\n\nexport class Engine implements TensorManager, DataMover {\n // Public since optimizers will use it.\n registeredVariables: NamedVariableMap = {};\n\n private nextTapeNodeId = 0;\n private numBytes = 0;\n private numTensors = 0;\n private numDataBuffers = 0;\n\n private profiling = false;\n private activeProfile: ProfileInfo;\n\n private activeTape: TapeNode[];\n private gradientScopeCount = 0;\n private customGradientDepth = 0;\n\n // Keep Tensors that parallel the tapes.\n private activeScope: ScopeState;\n private scopeStack: ScopeState[];\n private keepTensors: Set<number> = new Set();\n private profiler: Profiler;\n\n private tensorInfo = new WeakMap<DataId, {\n backend: KernelBackend,\n dtype: DataType,\n shape: number[],\n refCount: number\n }>();\n\n constructor(\n public backend: KernelBackend, public safeMode: boolean,\n private debugMode: () => boolean) {\n // Create a default outer scope.\n this.activeScope = {track: [], name: 'default scope'};\n this.scopeStack = [this.activeScope];\n this.profiler = new Profiler(backend);\n this.activeProfile =\n {newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null};\n }\n\n moveData(dataId: DataId) {\n this.write(dataId, this.readSync(dataId));\n }\n\n tidy<T extends TensorContainer>(\n nameOrFn: string|ScopeFn<T>, fn?: ScopeFn<T>, gradMode = false): T {\n // gradMode Primarily for internal use during backprop\n // If true, will start a tape if it is the outermost tidy.\n\n let name: string = null;\n if (fn == null) {\n // Called with only 1 argument.\n if (typeof nameOrFn !== 'function') {\n throw new Error('Please provide a function to tidy()');\n }\n fn = nameOrFn;\n } else {\n // Called with 2 arguments.\n if (typeof nameOrFn !== 'string' && !(nameOrFn instanceof String)) {\n throw new Error(\n 'When calling with two arguments, the first argument ' +\n 'to tidy() must be a string');\n }\n if (typeof fn !== 'function') {\n throw new Error(\n 'When calling with two arguments, the 2nd argument ' +\n 'to tidy() must be a function');\n }\n name = nameOrFn as string;\n // TODO(nsthorat,smilkov): Do operation logging and performance\n // profiling.\n }\n let result: T;\n return this.scopedRun(\n () => this.startScope(name, gradMode),\n () => this.endScope(result, gradMode), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n return result;\n });\n }\n\n private scopedRun<T>(start: () => void, end: () => void, f: () => T): T {\n start();\n try {\n const res = f();\n end();\n return res;\n } catch (ex) {\n end();\n throw ex;\n }\n }\n\n private static nextTensorId = 0;\n nextTensorId(): number {\n return Engine.nextTensorId++;\n }\n\n private static nextVariableId = 0;\n nextVariableId(): number {\n return Engine.nextVariableId++;\n }\n\n runKernel<T extends Tensor|Tensor[], I extends NamedTensorMap>(\n forwardFunc: ForwardFunc<T>,\n inputs: I,\n backwardsFunc?: (dy: T, saved: Tensor[]) => {[P in keyof I]: () => I[P]},\n ): T {\n let result: T;\n const saved: Tensor[] = [];\n const saveFunc = <T extends Tensor>(x: T): T => {\n saved.push(x);\n return x;\n };\n const scopeName = this.activeScope.name;\n const startingBytecount = this.numBytes;\n const startingNumTensors = this.numTensors;\n\n // Stop recording to a tape when running a kernel.\n this.scopedRun(\n () => this.customGradientDepth++, () => this.customGradientDepth--,\n () => {\n if (!this.debugMode()) {\n result = forwardFunc(this.backend, saveFunc);\n } else {\n result = this.profiler.profileKernel(\n scopeName, () => forwardFunc(this.backend, saveFunc));\n }\n });\n\n if (this.shouldRecord()) {\n const tapeNode: TapeNode = {\n id: this.nextTapeNodeId++,\n name: scopeName,\n inputs,\n outputs: Array.isArray(result) ? result : [result] as Tensor[]\n };\n if (backwardsFunc != null) {\n tapeNode.gradient =\n ((dy: T) => backwardsFunc(dy, saved)) as (dy: Tensor) =>\n NamedGradientMap;\n }\n this.activeTape.push(tapeNode);\n }\n\n if (this.profiling) {\n this.activeProfile.kernels.push({\n name: scopeName,\n bytesAdded: this.numBytes - startingBytecount,\n totalBytesSnapshot: this.numBytes,\n tensorsAdded: this.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.numTensors,\n inputShapes: Object.keys(inputs).map(key => inputs[key].shape),\n outputShape: Array.isArray(result) ?\n (result as Tensor[]).map(item => (item as Tensor).shape) :\n (result as Tensor).shape\n });\n }\n\n return result;\n }\n\n // TensorManager implementation.\n\n registerTensor(a: Tensor|Variable): void {\n const refCount = this.tensorInfo.has(a.dataId) ?\n this.tensorInfo.get(a.dataId).refCount :\n 0;\n this.numTensors++;\n if (refCount === 0) {\n this.numDataBuffers++;\n\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.numBytes +=\n util.sizeFromShape(a.shape) * util.bytesPerElement(a.dtype);\n }\n this.tensorInfo.set(\n a.dataId,\n {backend: this.backend, dtype: a.dtype, shape: a.shape, refCount: 0});\n this.backend.register(a.dataId, a.shape, a.dtype);\n }\n this.tensorInfo.get(a.dataId).refCount++;\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n\n registerVariable(v: Variable) {\n if (this.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.registeredVariables[v.name] = v;\n }\n\n disposeTensor(a: Tensor): void {\n if (!this.tensorInfo.has(a.dataId)) {\n return;\n }\n if (this.keepTensors.has(a.id)) {\n this.keepTensors.delete(a.id);\n }\n this.numTensors--;\n const refCount = this.tensorInfo.get(a.dataId).refCount;\n if (refCount <= 1) {\n const info = this.tensorInfo.get(a.dataId);\n info.backend.disposeData(a.dataId);\n this.numDataBuffers--;\n // Don't count bytes for complex numbers as they are counted by their\n // components.\n if (a.dtype !== 'complex64') {\n this.numBytes -=\n util.sizeFromShape(a.shape) * util.bytesPerElement(a.dtype);\n }\n this.tensorInfo.delete(a.dataId);\n } else {\n this.tensorInfo.get(a.dataId).refCount--;\n }\n // TODO(nsthorat): Construct an error and save the stack trace for\n // debugging when in debug mode. Creating a stack trace is too expensive\n // to do unconditionally.\n }\n\n disposeVariables(): void {\n for (const varName in this.registeredVariables) {\n const v = this.registeredVariables[varName];\n this.disposeTensor(v);\n delete this.registeredVariables[varName];\n }\n }\n\n memory(): MemoryInfo {\n const info = this.backend.memory() as MemoryInfo;\n info.numTensors = this.numTensors;\n info.numDataBuffers = this.numDataBuffers;\n info.numBytes = this.numBytes;\n return info;\n }\n\n async profile(query: () => TensorContainer): Promise<ProfileInfo> {\n this.profiling = true;\n\n const startBytes = this.numBytes;\n const startNumTensors = this.numTensors;\n\n this.activeProfile.kernels = [];\n this.activeProfile.result = query();\n\n this.profiling = false;\n\n this.activeProfile.peakBytes =\n Math.max(...this.activeProfile.kernels.map(d => d.totalBytesSnapshot));\n this.activeProfile.newBytes = this.numBytes - startBytes;\n this.activeProfile.newTensors = this.numTensors - startNumTensors;\n return this.activeProfile;\n }\n\n private shouldRecord(): boolean {\n return this.activeTape != null && this.customGradientDepth === 0;\n }\n\n private addTapeNode(\n inputs: Tensor[], result: Tensor,\n gradientsFunc: (dy: Tensor) => Tensor[]): void {\n const inputsMap: NamedTensorMap = {};\n inputs.forEach((input, idx) => {\n inputsMap[idx] = input;\n });\n\n const gradient = (dy: Tensor) => {\n const res = gradientsFunc(dy);\n const resMap: NamedGradientMap = {};\n res.forEach((r, idx) => {\n resMap[idx] = () => r;\n });\n return resMap;\n };\n\n const tapeNode: TapeNode = {\n id: this.nextTapeNodeId++,\n name: this.activeScope.name,\n inputs: inputsMap,\n outputs: [result],\n gradient\n };\n this.activeTape.push(tapeNode);\n }\n\n keep<T extends Tensor>(result: T): T {\n if (this.scopeStack.length === 1 && this.safeMode) {\n throw new Error(\n 'Safe mode is ON. Enclose all tensor operations inside tf.tidy(): ' +\n 'tf.tidy(() => {...}) to avoid memory leaks.');\n }\n this.keepTensors.add(result.id);\n return result;\n }\n\n /**\n * Start a scope. Use this with endScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n startScope(name?: string, gradientsMode = false) {\n if (gradientsMode && this.gradientScopeCount === 0) {\n this.activeTape = [];\n }\n if (gradientsMode) {\n this.gradientScopeCount++;\n }\n\n const scopeInfo: ScopeState = {track: [], name: 'unnamed scope'};\n if (name) {\n scopeInfo.name = name;\n }\n this.scopeStack.push(scopeInfo);\n this.activeScope = scopeInfo;\n }\n\n /**\n * End a scope. Use this with startScope() to achieve the same functionality\n * as scope() without the need for a function closure.\n */\n endScope(result?: TensorContainer, gradientsMode = false) {\n if (gradientsMode) {\n this.gradientScopeCount--;\n if (this.gradientScopeCount === 0) {\n this.activeTape = null;\n }\n }\n\n const tensorsToKeep = new Set(this.keepTensors);\n\n const tensorsToTrackInParent = getTensorsInContainer(result);\n tensorsToTrackInParent.forEach(tensor => tensorsToKeep.add(tensor.id));\n\n // Dispose the arrays tracked in this scope.\n for (let i = 0; i < this.activeScope.track.length; i++) {\n const tensor = this.activeScope.track[i];\n if (tensorsToKeep.has(tensor.id)) {\n continue;\n }\n\n if (this.activeTape != null) {\n tensorsToTrackInParent.push(tensor);\n } else {\n tensor.dispose();\n }\n }\n\n const oldScope = this.scopeStack.pop();\n this.activeScope = this.scopeStack.length === 0 ?\n {track: [], name: 'default scope'} :\n this.scopeStack[this.scopeStack.length - 1];\n\n // Track the current result in the parent scope.\n tensorsToTrackInParent.forEach(tensor => {\n // Only track the tensor if was allocated in the inner scope and is not\n // globally kept.\n if (!this.keepTensors.has(tensor.id) &&\n isTensorInList(tensor, oldScope.track)) {\n this.track(tensor);\n }\n });\n }\n\n /**\n * Returns gradients of `f` with respect to each of the `xs`. The gradients\n * returned are of the same length as `xs`, but some might be null if `f` was\n * not a function of that `x`. It also takes optional dy to multiply the\n * gradient, which defaults to `1`.\n */\n gradients<T extends Tensor>(\n f: () => T, xs: Tensor[], dy?: T,\n allowNoGradients = false): {value: T, grads: Tensor[]} {\n util.assert(xs.length > 0, 'gradients() received an empty list of xs.');\n\n return this.tidy('gradients', () => {\n const y = f();\n util.assert(\n y instanceof Tensor,\n 'The result y returned by f() must be a tensor.');\n // Filter out the nodes that don't connect x => y.\n const filteredTape = getFilteredNodesXToY(this.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error(\n 'Cannot compute gradient of y=f(x) with respect to x. Make sure ' +\n 'that the f you passed encloses all operations that lead from x ' +\n 'to y.');\n }\n\n const accumulatedGradientMap: {[tensorId: number]: Tensor} = {};\n accumulatedGradientMap[y.id] = (dy == null) ? ones(y.shape) : dy;\n\n // Backprop gradients through the filtered nodes.\n backpropagateGradients(accumulatedGradientMap, filteredTape);\n\n const grads = xs.map(x => accumulatedGradientMap[x.id]);\n return {value: y, grads};\n }, true /* gradientsMode */);\n }\n\n customGrad<T extends Tensor>(f: CustomGradientFunc<T>):\n (...args: Tensor[]) => T {\n util.assert(\n util.isFunction(f),\n 'The f passed in customGrad(f) must be a function.');\n return (...inputs: Tensor[]): T => {\n util.assert(\n inputs.every(t => t instanceof Tensor),\n 'The args passed in customGrad(f)(x1, x2,...) must all be tensors');\n\n let gradientsFunc: (dy: T) => Tensor | Tensor[];\n let result: T;\n this.scopedRun(\n () => this.customGradientDepth++, () => this.customGradientDepth--,\n () => {\n const gradientsMode = true;\n result = this.tidy(f.name, () => {\n const {value, gradFunc} = f(...inputs);\n util.assert(\n value instanceof Tensor,\n 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.value` is a tensor');\n util.assert(\n util.isFunction(gradFunc),\n 'The function f passed in customGrad(f) must return an ' +\n 'object where `obj.gradFunc` is a function.');\n gradientsFunc = gradFunc;\n return value;\n }, gradientsMode);\n });\n\n if (this.shouldRecord()) {\n const gradFunc = (dy: T): Tensor[] => {\n const res = gradientsFunc(dy);\n const grads: Tensor[] = Array.isArray(res) ? res : [res];\n util.assert(\n grads.length === inputs.length,\n 'The function f passed in customGrad(f) must return an object ' +\n 'where `obj.gradFunc` is a function that returns the same ' +\n 'number of tensors as inputs passed to f(...).');\n util.assert(\n grads.every(t => t instanceof Tensor),\n 'The function f passed in customGrad(f) must return an object ' +\n 'where `obj.gradFunc` is a function that returns a list of ' +\n 'only tensors.');\n return grads;\n };\n this.addTapeNode(inputs, result, gradFunc);\n }\n return result;\n };\n }\n\n // Forwarding to backend.\n write(dataId: DataId, values: TypedArray): void {\n const info = this.tensorInfo.get(dataId);\n if (this.backend !== info.backend) {\n // Delete the tensor from the old backend and move it to the new backend.\n info.backend.disposeData(dataId);\n info.backend = this.backend;\n this.backend.register(dataId, info.shape, info.dtype);\n }\n this.backend.write(dataId, values);\n }\n readSync(dataId: DataId): TypedArray {\n // Route the read to the correct backend.\n const info = this.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId: DataId): Promise<TypedArray> {\n // Route the read to the correct backend.\n const info = this.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n fromPixels(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels: number): Tensor3D {\n return this.backend.fromPixels(pixels, numChannels);\n }\n async time(query: () => void): Promise<TimingInfo> {\n const start = now();\n const timingInfo = await this.backend.time(query) as TimingInfo;\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n\n /**\n * Tracks a Tensor in the current scope to be automatically cleaned up\n * when the current scope ends, and returns the value.\n *\n * @param result The Tensor to track in the current scope.\n */\n private track<T extends Tensor>(result: T): T {\n if (this.scopeStack.length === 1 && this.safeMode) {\n throw new Error(\n 'Safe mode is ON. Enclose all tensor operations inside tf.tidy(): ' +\n 'tf.tidy(() => {op();...}); to avoid memory leaks.');\n }\n this.activeScope.track.push(result);\n return result;\n }\n}\n\nfunction ones(shape: number[]): Tensor {\n const values = makeOnesTypedArray(sizeFromShape(shape), 'float32');\n return Tensor.make(shape, {values});\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {getWebGLContext} from './canvas_util';\n\nexport interface Features {\n // Whether to enable debug mode.\n 'DEBUG'?: boolean;\n // Whether we are in a browser (as versus, say, node.js) environment.\n 'IS_BROWSER'?: boolean;\n // Whether we are in the Node.js environment.\n 'IS_NODE'?: boolean;\n // Whether packed WebGL kernels lazily unpack their outputs.\n 'WEBGL_LAZILY_UNPACK'?: boolean;\n // Whether the WebGL backend will sometimes forward ops to the CPU.\n 'WEBGL_CPU_FORWARD'?: boolean;\n // Whether we will pack the batchnormalization op.\n 'WEBGL_PACK_BATCHNORMALIZATION'?: boolean;\n // Whether we will use the im2col algorithm to speed up convolutions.\n 'WEBGL_CONV_IM2COL'?: boolean;\n // Whether we will perform memory paging.\n 'WEBGL_PAGING_ENABLED'?: boolean;\n // The maximum texture dimension.\n 'WEBGL_MAX_TEXTURE_SIZE'?: number;\n // The disjoint_query_timer extension version.\n // 0: disabled, 1: EXT_disjoint_timer_query, 2:\n // EXT_disjoint_timer_query_webgl2.\n // In Firefox with WebGL 2.0,\n // EXT_disjoint_timer_query_webgl2 is not available, so we must use the\n // WebGL 1.0 extension.\n 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'?: number;\n // Whether the timer object from the disjoint_query_timer extension gives\n // timing information that is reliable.\n 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE'?: boolean;\n // 0: No WebGL, 1: WebGL 1.0, 2: WebGL 2.0.\n 'WEBGL_VERSION'?: number;\n // True if WebGL is supported.\n 'HAS_WEBGL'?: boolean;\n // Whether rendering to float32 textures is enabled. If disabled, renders to\n // float16 textures.\n 'WEBGL_RENDER_FLOAT32_ENABLED'?: boolean;\n // Whether downloading float textures is enabled. If disabled, uses IEEE 754\n // encoding of the float32 values to 4 uint8 when downloading.\n 'WEBGL_DOWNLOAD_FLOAT_ENABLED'?: boolean;\n // Whether the fence API is available.\n 'WEBGL_FENCE_API_ENABLED'?: boolean;\n // Tensors with size <= than this will be uploaded as uniforms, not textures.\n 'WEBGL_SIZE_UPLOAD_UNIFORM'?: number;\n 'BACKEND'?: string;\n // Test precision for unit tests. This is decreased when we can't render\n // float32 textures.\n 'TEST_EPSILON'?: number;\n 'IS_CHROME'?: boolean;\n // True if running unit tests.\n 'IS_TEST'?: boolean;\n // Smallest positive value used to make ops like division and log numerically\n // stable.\n 'EPSILON'?: number;\n // True when the environment is \"production\" where we disable safety checks\n // to gain performance.\n 'PROD'?: boolean;\n // Whether to do sanity checks when inferring a shape from user-provided\n // values, used when creating a new tensor.\n 'TENSORLIKE_CHECK_SHAPE_CONSISTENCY'?: boolean;\n}\n\nexport enum Type {\n NUMBER,\n BOOLEAN,\n STRING\n}\n\nexport const URL_PROPERTIES: URLProperty[] = [\n {name: 'DEBUG', type: Type.BOOLEAN},\n {name: 'IS_BROWSER', type: Type.BOOLEAN},\n {name: 'WEBGL_LAZILY_UNPACK', type: Type.BOOLEAN},\n {name: 'WEBGL_CPU_FORWARD', type: Type.BOOLEAN},\n {name: 'WEBGL_PACK_BATCHNORMALIZATION', type: Type.BOOLEAN},\n {name: 'WEBGL_CONV_IM2COL', type: Type.BOOLEAN},\n {name: 'WEBGL_MAX_TEXTURE_SIZE', type: Type.NUMBER},\n {name: 'WEBGL_PAGING_ENABLED', type: Type.BOOLEAN},\n {name: 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION', type: Type.NUMBER},\n {name: 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE', type: Type.BOOLEAN},\n {name: 'WEBGL_VERSION', type: Type.NUMBER},\n {name: 'WEBGL_RENDER_FLOAT32_ENABLED', type: Type.BOOLEAN},\n {name: 'WEBGL_DOWNLOAD_FLOAT_ENABLED', type: Type.BOOLEAN},\n {name: 'WEBGL_FENCE_API_ENABLED', type: Type.BOOLEAN},\n {name: 'WEBGL_SIZE_UPLOAD_UNIFORM', type: Type.NUMBER},\n {name: 'BACKEND', type: Type.STRING},\n {name: 'EPSILON', type: Type.NUMBER},\n {name: 'PROD', type: Type.BOOLEAN},\n {name: 'TENSORLIKE_CHECK_SHAPE_CONSISTENCY', type: Type.BOOLEAN},\n];\n\nexport interface URLProperty {\n name: keyof Features;\n type: Type;\n}\n\nexport function isWebGLVersionEnabled(webGLVersion: 1|2) {\n try {\n const gl = getWebGLContext(webGLVersion);\n if (gl != null) {\n return true;\n }\n } catch (e) {\n return false;\n }\n return false;\n}\n\nlet MAX_TEXTURE_SIZE: number;\n// Caching MAX_TEXTURE_SIZE here because the environment gets reset between\n// unit tests and we don't want to constantly query the WebGLContext for\n// MAX_TEXTURE_SIZE.\nexport function getWebGLMaxTextureSize(webGLVersion: number): number {\n if (MAX_TEXTURE_SIZE == null) {\n const gl = getWebGLContext(webGLVersion);\n MAX_TEXTURE_SIZE = gl.getParameter(gl.MAX_TEXTURE_SIZE);\n }\n return MAX_TEXTURE_SIZE;\n}\n\nexport function getWebGLDisjointQueryTimerVersion(webGLVersion: number):\n number {\n if (webGLVersion === 0) {\n return 0;\n }\n\n let queryTimerVersion: number;\n const gl = getWebGLContext(webGLVersion);\n\n if (hasExtension(gl, 'EXT_disjoint_timer_query_webgl2') &&\n webGLVersion === 2) {\n queryTimerVersion = 2;\n } else if (hasExtension(gl, 'EXT_disjoint_timer_query')) {\n queryTimerVersion = 1;\n } else {\n queryTimerVersion = 0;\n }\n return queryTimerVersion;\n}\n\nexport function isRenderToFloatTextureEnabled(webGLVersion: number): boolean {\n if (webGLVersion === 0) {\n return false;\n }\n\n const gl = getWebGLContext(webGLVersion);\n\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n } else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n\n const isFrameBufferComplete =\n createFloatTextureAndBindToFramebuffer(gl, webGLVersion);\n return isFrameBufferComplete;\n}\n\nexport function isDownloadFloatTextureEnabled(webGLVersion: number): boolean {\n if (webGLVersion === 0) {\n return false;\n }\n\n const gl = getWebGLContext(webGLVersion);\n\n if (webGLVersion === 1) {\n if (!hasExtension(gl, 'OES_texture_float')) {\n return false;\n }\n if (!hasExtension(gl, 'WEBGL_color_buffer_float')) {\n return false;\n }\n } else {\n if (!hasExtension(gl, 'EXT_color_buffer_float')) {\n return false;\n }\n }\n\n const isFrameBufferComplete =\n createFloatTextureAndBindToFramebuffer(gl, webGLVersion);\n return isFrameBufferComplete;\n}\n\nexport function isWebGLFenceEnabled(webGLVersion: number) {\n if (webGLVersion !== 2) {\n return false;\n }\n const gl = getWebGLContext(webGLVersion);\n\n // tslint:disable-next-line:no-any\n const isEnabled = (gl as any).fenceSync != null;\n return isEnabled;\n}\n\nexport function isChrome() {\n return typeof navigator !== 'undefined' && navigator != null &&\n navigator.userAgent != null && /Chrome/.test(navigator.userAgent) &&\n /Google Inc/.test(navigator.vendor);\n}\n\n// Expects flags from URL in the format ?tfjsflags=FLAG1:1,FLAG2:true.\nconst TENSORFLOWJS_FLAGS_PREFIX = 'tfjsflags';\nexport function getFeaturesFromURL(): Features {\n const features: Features = {};\n\n if (typeof window === 'undefined' || typeof window.location === 'undefined' ||\n typeof window.location.search === 'undefined') {\n return features;\n }\n\n const urlParams = getQueryParams(window.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const urlFlags: {[key: string]: string} = {};\n\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(',');\n keyValues.forEach(keyValue => {\n const [key, value] = keyValue.split(':') as [string, string];\n urlFlags[key] = value;\n });\n\n URL_PROPERTIES.forEach(urlProperty => {\n if (urlProperty.name in urlFlags) {\n console.log(\n `Setting feature override from URL ${urlProperty.name}: ` +\n `${urlFlags[urlProperty.name]}`);\n if (urlProperty.type === Type.NUMBER) {\n features[urlProperty.name] = +urlFlags[urlProperty.name];\n } else if (urlProperty.type === Type.BOOLEAN) {\n features[urlProperty.name] = urlFlags[urlProperty.name] === 'true';\n } else if (urlProperty.type === Type.STRING) {\n // tslint:disable-next-line:no-any\n features[urlProperty.name] = urlFlags[urlProperty.name] as any;\n } else {\n console.warn(`Unknown URL param: ${urlProperty.name}.`);\n }\n }\n });\n }\n\n return features;\n}\n\nfunction hasExtension(gl: WebGLRenderingContext, extensionName: string) {\n const ext = gl.getExtension(extensionName);\n return ext != null;\n}\n\nfunction createFloatTextureAndBindToFramebuffer(\n gl: WebGLRenderingContext, webGLVersion: number): boolean {\n const frameBuffer = gl.createFramebuffer();\n const texture = gl.createTexture();\n\n gl.bindTexture(gl.TEXTURE_2D, texture);\n\n // tslint:disable-next-line:no-any\n const internalFormat = webGLVersion === 2 ? (gl as any).RGBA32F : gl.RGBA;\n gl.texImage2D(\n gl.TEXTURE_2D, 0, internalFormat, 1, 1, 0, gl.RGBA, gl.FLOAT, null);\n\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);\n\n const isFrameBufferComplete =\n gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE;\n\n gl.bindTexture(gl.TEXTURE_2D, null);\n gl.bindFramebuffer(gl.FRAMEBUFFER, null);\n gl.deleteTexture(texture);\n gl.deleteFramebuffer(frameBuffer);\n\n return isFrameBufferComplete;\n}\n\nexport function getQueryParams(queryString: string): {[key: string]: string} {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s, ...t) => {\n decodeParam(params, t[0], t[1]);\n return t.join('=');\n });\n return params;\n}\n\nfunction decodeParam(\n params: {[key: string]: string}, name: string, value?: string) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || '');\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as device_util from './device_util';\nimport {Engine, MemoryInfo, ProfileInfo, ScopeFn, TimingInfo} from './engine';\nimport {Features, getFeaturesFromURL, getWebGLDisjointQueryTimerVersion, getWebGLMaxTextureSize, isChrome, isDownloadFloatTextureEnabled, isRenderToFloatTextureEnabled, isWebGLFenceEnabled, isWebGLVersionEnabled} from './environment_util';\nimport {KernelBackend} from './kernels/backend';\nimport {DataId, setTensorTracker, Tensor, TensorTracker} from './tensor';\nimport {TensorContainer} from './tensor_types';\nimport {getTensorsInContainer} from './tensor_util';\n\nconst EPSILON_FLOAT16 = 1e-3;\nconst TEST_EPSILON_FLOAT16 = 1e-1;\n\nconst EPSILON_FLOAT32 = 1e-7;\nconst TEST_EPSILON_FLOAT32 = 1e-3;\n\nexport class Environment {\n private features: Features = {};\n private globalEngine: Engine;\n private registry:\n {[id: string]: {backend: KernelBackend, priority: number}} = {};\n backendName: string;\n\n constructor(features?: Features) {\n if (features != null) {\n this.features = features;\n }\n\n if (this.get('DEBUG')) {\n console.warn(\n 'Debugging mode is ON. The output of every math call will ' +\n 'be downloaded to CPU and checked for NaNs. ' +\n 'This significantly impacts performance.');\n }\n }\n\n /**\n * Sets the backend (cpu, webgl, etc) responsible for creating tensors and\n * executing operations on those tensors.\n *\n * Note this disposes the current backend, if any, as well as any tensors\n * associated with it. A new backend is initialized, even if it is of the\n * same type as the previous one.\n *\n * @param backendName The name of the backend. Currently supports\n * `'webgl'|'cpu'` in the browser, and `'tensorflow'` under node.js\n * (requires tfjs-node).\n * @param safeMode Defaults to false. In safe mode, you are forced to\n * construct tensors and call math operations inside a `tidy()` which\n * will automatically clean up intermediate tensors.\n */\n /** @doc {heading: 'Environment'} */\n static setBackend(backendName: string, safeMode = false) {\n if (!(backendName in ENV.registry)) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n ENV.engine.backend = ENV.findBackend(backendName);\n ENV.backendName = backendName;\n }\n\n /**\n * Returns the current backend name (cpu, webgl, etc). The backend is\n * responsible for creating tensors and executing operations on those tensors.\n */\n /** @doc {heading: 'Environment'} */\n static getBackend(): string {\n ENV.initEngine();\n return ENV.backendName;\n }\n\n /**\n * Dispose all variables kept in backend engine.\n */\n /** @doc {heading: 'Environment'} */\n static disposeVariables(): void {\n ENV.engine.disposeVariables();\n }\n\n /**\n * Returns memory info at the current time in the program. The result is an\n * object with the following properties:\n *\n * - `numBytes`: Number of bytes allocated (undisposed) at this time.\n * - `numTensors`: Number of unique tensors allocated.\n * - `numDataBuffers`: Number of unique data buffers allocated\n * (undisposed) at this time, which is ≤ the number of tensors\n * (e.g. `a.reshape(newShape)` makes a new Tensor that shares the same\n * data buffer with `a`).\n * - `unreliable`: `Optional` `boolean`:\n * - On WebGL, not present (always reliable).\n * - On CPU, true. Due to automatic garbage collection, these numbers\n * represent undisposed tensors, i.e. not wrapped in `tidy()`, or\n * lacking a call to `tensor.dispose()`.\n */\n /** @doc {heading: 'Performance', subheading: 'Memory'} */\n static memory(): MemoryInfo {\n return ENV.engine.memory();\n }\n\n /**\n * Executes the provided function `f()` and returns a promise that resolves\n * with information about the function's memory use:\n * - `newBytes`: tne number of new bytes allocated\n * - `newTensors`: the number of new tensors created\n * - `peakBytes`: the peak number of bytes allocated\n * - `kernels`: an array of objects for each kernel involved that reports\n * their input and output shapes, number of bytes used, and number of new\n * tensors created.\n *\n * ```js\n * const profile = await tf.profile(() => {\n * const x = tf.tensor1d([1, 2, 3]);\n * let x2 = x.square();\n * x2.dispose();\n * x2 = x.square();\n * x2.dispose();\n * return x;\n * });\n *\n * console.log(`newBytes: ${profile.newBytes}`);\n * console.log(`newTensors: ${profile.newTensors}`);\n * console.log(`byte usage over all kernels: ${profile.kernels.map(k =>\n * k.totalBytesSnapshot)}`);\n * ```\n *\n */\n /** @doc {heading: 'Performance', subheading: 'Profile'} */\n static profile(f: () => TensorContainer): Promise<ProfileInfo> {\n return ENV.engine.profile(f);\n }\n\n /**\n * Executes the provided function `fn` and after it is executed, cleans up all\n * intermediate tensors allocated by `fn` except those returned by `fn`.\n * `fn` must not return a Promise (async functions not allowed). The returned\n * result can be a complex object.\n *\n * Using this method helps avoid memory leaks. In general, wrap calls to\n * operations in `tf.tidy` for automatic memory cleanup.\n *\n * When in safe mode, you must enclose all `tf.Tensor` creation and ops\n * inside a `tf.tidy` to prevent memory leaks.\n *\n * ```js\n * // y = 2 ^ 2 + 1\n * const y = tf.tidy(() => {\n * // a, b, and one will be cleaned up when the tidy ends.\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n * const b = a.square();\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * y.print();\n * ```\n *\n * @param nameOrFn The name of the closure, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If debug mode is on, the timing and the memory usage of the function\n * will be tracked and displayed on the console using the provided name.\n * @param fn The function to execute.\n */\n /** @doc {heading: 'Performance', subheading: 'Memory'} */\n static tidy<T extends TensorContainer>(\n nameOrFn: string|ScopeFn<T>, fn?: ScopeFn<T>, gradMode = false): T {\n return ENV.engine.tidy(nameOrFn, fn, gradMode);\n }\n\n /**\n * Disposes any `tf.Tensor`s found within the provided object.\n *\n * @param container an object that may be a `tf.Tensor` or may directly\n * contain `tf.Tensor`s, such as a `Tensor[]` or `{key: Tensor, ...}`. If\n * the object is not a `tf.Tensor` or does not contain `Tensors`, nothing\n * happens. In general it is safe to pass any object here, except that\n * `Promise`s are not supported.\n */\n /** @doc {heading: 'Performance', subheading: 'Memory'} */\n static dispose(container: TensorContainer) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach(tensor => tensor.dispose());\n }\n\n /**\n * Keeps a `tf.Tensor` generated inside a `tf.tidy` from being disposed\n * automatically.\n *\n * ```js\n * let b;\n * const y = tf.tidy(() => {\n * const one = tf.scalar(1);\n * const a = tf.scalar(2);\n *\n * // b will not be cleaned up by the tidy. a and one will be cleaned up\n * // when the tidy ends.\n * b = tf.keep(a.square());\n *\n * console.log('numTensors (in tidy): ' + tf.memory().numTensors);\n *\n * // The value returned inside the tidy function will return\n * // through the tidy, in this case to the variable y.\n * return b.add(one);\n * });\n *\n * console.log('numTensors (outside tidy): ' + tf.memory().numTensors);\n * console.log('y:');\n * y.print();\n * console.log('b:');\n * b.print();\n * ```\n *\n * @param result The tensor to keep from being disposed.\n */\n /** @doc {heading: 'Performance', subheading: 'Memory'} */\n static keep<T extends Tensor>(result: T): T {\n return ENV.engine.keep(result);\n }\n\n /**\n * Executes `f()` and returns a promise that resolves with timing\n * information.\n *\n * The result is an object with the following properties:\n *\n * - `wallMs`: Wall execution time.\n * - `kernelMs`: Kernel execution time, ignoring data transfer.\n * - On `WebGL` The following additional properties exist:\n * - `uploadWaitMs`: CPU blocking time on texture uploads.\n * - `downloadWaitMs`: CPU blocking time on texture downloads (readPixels).\n *\n * ```js\n * const x = tf.randomNormal([20, 20]);\n * const time = await tf.time(() => x.matMul(x));\n *\n * console.log(`kernelMs: ${time.kernelMs}, wallTimeMs: ${time.wallMs}`);\n * ```\n *\n * @param f The function to execute and time.\n */\n /** @doc {heading: 'Performance', subheading: 'Timing'} */\n static time(f: () => void): Promise<TimingInfo> {\n return ENV.engine.time(f);\n }\n\n get<K extends keyof Features>(feature: K): Features[K] {\n if (feature in this.features) {\n return this.features[feature];\n }\n\n this.features[feature] = this.evaluateFeature(feature);\n\n return this.features[feature];\n }\n\n getFeatures(): Features {\n return this.features;\n }\n\n set<K extends keyof Features>(feature: K, value: Features[K]): void {\n this.features[feature] = value;\n }\n\n private getBestBackendName(): string {\n if (Object.keys(this.registry).length === 0) {\n throw new Error('No backend found in registry.');\n }\n const sortedBackends = Object.keys(this.registry)\n .map(name => {\n return {name, entry: this.registry[name]};\n })\n .sort((a, b) => {\n // Highest priority comes first.\n return b.entry.priority - a.entry.priority;\n });\n return sortedBackends[0].name;\n }\n\n private evaluateFeature<K extends keyof Features>(feature: K): Features[K] {\n if (feature === 'DEBUG') {\n return false;\n } else if (feature === 'IS_BROWSER') {\n return typeof window !== 'undefined';\n } else if (feature === 'IS_NODE') {\n return (typeof process !== 'undefined') &&\n (typeof process.versions.node !== 'undefined');\n } else if (feature === 'IS_CHROME') {\n return isChrome();\n } else if (feature === 'WEBGL_CPU_FORWARD') {\n return true;\n } else if (feature === 'WEBGL_PACK_BATCHNORMALIZATION') {\n return false;\n } else if (feature === 'WEBGL_LAZILY_UNPACK') {\n return false;\n } else if (feature === 'WEBGL_CONV_IM2COL') {\n return false;\n } else if (feature === 'WEBGL_PAGING_ENABLED') {\n return this.get('IS_BROWSER') && !this.get('PROD');\n } else if (feature === 'WEBGL_MAX_TEXTURE_SIZE') {\n return getWebGLMaxTextureSize(this.get('WEBGL_VERSION'));\n } else if (feature === 'IS_TEST') {\n return false;\n } else if (feature === 'BACKEND') {\n return this.getBestBackendName();\n } else if (feature === 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') {\n const webGLVersion = this.get('WEBGL_VERSION');\n\n if (webGLVersion === 0) {\n return 0;\n }\n return getWebGLDisjointQueryTimerVersion(webGLVersion);\n } else if (feature === 'WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_RELIABLE') {\n return this.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0 &&\n !device_util.isMobile();\n } else if (feature === 'HAS_WEBGL') {\n return this.get('WEBGL_VERSION') > 0;\n } else if (feature === 'WEBGL_VERSION') {\n if (isWebGLVersionEnabled(2)) {\n return 2;\n } else if (isWebGLVersionEnabled(1)) {\n return 1;\n }\n return 0;\n } else if (feature === 'WEBGL_RENDER_FLOAT32_ENABLED') {\n return isRenderToFloatTextureEnabled(this.get('WEBGL_VERSION'));\n } else if (feature === 'WEBGL_DOWNLOAD_FLOAT_ENABLED') {\n return isDownloadFloatTextureEnabled(this.get('WEBGL_VERSION'));\n } else if (feature === 'WEBGL_FENCE_API_ENABLED') {\n return isWebGLFenceEnabled(this.get('WEBGL_VERSION'));\n } else if (feature === 'WEBGL_SIZE_UPLOAD_UNIFORM') {\n // Use uniform uploads only when 32bit floats are supported. In 16bit\n // environments there are problems with comparing a 16bit texture value\n // with a 32bit uniform value.\n const useUniforms = this.get('WEBGL_RENDER_FLOAT32_ENABLED');\n return useUniforms ? 4 : 0;\n } else if (feature === 'TEST_EPSILON') {\n return this.backend.floatPrecision() === 32 ? TEST_EPSILON_FLOAT32 :\n TEST_EPSILON_FLOAT16;\n } else if (feature === 'EPSILON') {\n return this.backend.floatPrecision() === 32 ? EPSILON_FLOAT32 :\n EPSILON_FLOAT16;\n } else if (feature === 'PROD') {\n return false;\n } else if (feature === 'TENSORLIKE_CHECK_SHAPE_CONSISTENCY') {\n return !this.get('PROD');\n }\n throw new Error(`Unknown feature ${feature}.`);\n }\n\n setFeatures(features: Features) {\n this.features = Object.assign({}, features);\n }\n\n reset() {\n this.features = getFeaturesFromURL();\n if (this.globalEngine != null) {\n this.globalEngine = null;\n }\n }\n\n get backend(): KernelBackend {\n return this.engine.backend;\n }\n\n findBackend(name: string): KernelBackend {\n if (!(name in this.registry)) {\n return null;\n }\n return this.registry[name].backend;\n }\n\n /**\n * Registers a global backend. The registration should happen when importing\n * a module file (e.g. when importing `backend_webgl.ts`), and is used for\n * modular builds (e.g. custom tfjs bundle with only webgl support).\n *\n * @param factory The backend factory function. When called, it should\n * return an instance of the backend.\n * @param priority The priority of the backend (higher = more important).\n * In case multiple backends are registered, the priority is used to find\n * the best backend. Defaults to 1.\n * @return False if the creation/registration failed. True otherwise.\n */\n registerBackend(\n name: string, factory: () => KernelBackend, priority = 1,\n setTensorTrackerFn?: (f: () => TensorTracker) => void): boolean {\n if (name in this.registry) {\n console.warn(\n `${name} backend was already registered. Reusing existing backend`);\n if (setTensorTrackerFn != null) {\n setTensorTrackerFn(() => this.engine);\n }\n return false;\n }\n try {\n const backend = factory();\n backend.setDataMover(\n {moveData: (dataId: DataId) => this.engine.moveData(dataId)});\n this.registry[name] = {backend, priority};\n return true;\n } catch (err) {\n console.warn(`Registration of backend ${name} failed`);\n console.warn(err.stack || err.message);\n return false;\n }\n }\n\n removeBackend(name: string): void {\n if (!(name in this.registry)) {\n throw new Error(`${name} backend not found in registry`);\n }\n this.registry[name].backend.dispose();\n delete this.registry[name];\n }\n\n get engine(): Engine {\n this.initEngine();\n return this.globalEngine;\n }\n\n private initEngine() {\n if (this.globalEngine == null) {\n this.backendName = this.get('BACKEND');\n const backend = this.findBackend(this.backendName);\n this.globalEngine =\n new Engine(backend, false /* safeMode */, () => this.get('DEBUG'));\n }\n }\n}\n\nfunction getGlobalNamespace(): {ENV: Environment} {\n // tslint:disable-next-line:no-any\n let ns: any;\n if (typeof (window) !== 'undefined') {\n ns = window;\n } else if (typeof (process) !== 'undefined') {\n ns = process;\n } else {\n throw new Error('Could not find a global object');\n }\n return ns;\n}\n\nfunction getOrMakeEnvironment(): Environment {\n const ns = getGlobalNamespace();\n if (ns.ENV == null) {\n ns.ENV = new Environment(getFeaturesFromURL());\n setTensorTracker(() => ns.ENV.engine);\n }\n return ns.ENV;\n}\n\nexport let ENV = getOrMakeEnvironment();\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {CustomGradientFunc, ScopeFn} from './engine';\nimport {ENV} from './environment';\nimport {Scalar, Tensor, Variable} from './tensor';\nimport {NamedTensorMap, TensorContainer} from './tensor_types';\nimport * as util from './util';\n\n/**\n * Create a new gradient scope. Similar to scope, but forces all inner scopes\n * to not clean up so that gradient operations can be used inside of this\n * scope.\n * @param nameOrScopeFn The name of the scope, or the function to execute.\n * If a name is provided, the 2nd argument should be the function.\n * If a name is provided, and debug mode is on, the timing and the memory\n * usage of the function will be tracked and displayed on the console\n * using the provided name.\n * @param scopeFn The function to execute.\n */\nfunction gradScope<T extends TensorContainer>(\n nameOrScopeFn: string|ScopeFn<T>, scopeFn?: ScopeFn<T>): T {\n return ENV.engine.tidy(nameOrScopeFn, scopeFn, true /* gradScope */);\n}\n\n/**\n * Provided `f(x)`, returns another function `g(x, dy?)`, which gives the\n * gradient of `f(x)` with respect to `x`.\n *\n * If `dy` is provided, the gradient of `f(x).mul(dy).sum()` with respect to\n * `x` is computed instead. `f(x)` must take a single tensor `x` and return a\n * single tensor `y`. If `f()` takes multiple inputs, use `tf.grads` instead.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.grad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * g(x).print();\n * ```\n *\n * ```js\n * // f(x) = x ^ 3\n * const f = x => x.pow(tf.scalar(3, 'int32'));\n * // f'(x) = 3x ^ 2\n * const g = tf.grad(f);\n * // f''(x) = 6x\n * const gg = tf.grad(g);\n *\n * const x = tf.tensor1d([2, 3]);\n * gg(x).print();\n * ```\n *\n * @param f The function f(x), to compute gradient for.\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction grad<I extends Tensor, O extends Tensor>(f: (x: I) => O): (\n x: I, dy?: O) => I {\n util.assert(util.isFunction(f), 'The f passed in grad(f) must be a function');\n return (x: I, dy?: O): I => {\n util.assert(\n x instanceof Tensor, 'The x passed in grad(f)(x) must be a tensor');\n util.assert(\n dy == null || dy instanceof Tensor,\n 'The dy passed in grad(f)(x, dy) must be a tensor');\n return ENV.engine.tidy(() => {\n const {value, grads} = ENV.engine.gradients(() => f(x), [x], dy);\n if (dy != null) {\n util.assertShapesMatch(\n value.shape, dy.shape,\n 'The shape of dy passed in grad(f)(x, dy) must match the shape ' +\n 'returned by f(x)');\n }\n checkGrads(grads);\n return grads[0] as I;\n });\n };\n}\n\n/**\n * Provided `f(x1, x2,...)`, returns another function `g([x1, x2,...], dy?)`,\n * which gives an array of gradients of `f()` with respect to each input\n * [`x1`,`x2`,...].\n *\n * If `dy` is passed when calling `g()`, the gradient of\n * `f(x1,...).mul(dy).sum()` with respect to each input is computed instead.\n * The provided `f` must take one or more tensors and return a single tensor\n * `y`. If `f()` takes a single input, we recommend using `tf.grad` instead.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df / da = b, df / db = a\n * const g = tf.grads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const [da, db] = g([a, b]);\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n *\n * @param f The function `f(x1, x2,...)` to compute gradients for.\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction grads<O extends Tensor>(f: (...args: Tensor[]) => O): (\n args: Tensor[], dy?: O) => Tensor[] {\n util.assert(\n util.isFunction(f), 'The f passed in grads(f) must be a function');\n return (args: Tensor[], dy?: O): Tensor[] => {\n util.assert(\n Array.isArray(args) && args.every(arg => arg instanceof Tensor),\n 'The args passed in grads(f)(args) must be an array of tensors');\n util.assert(\n dy == null || dy instanceof Tensor,\n 'The dy passed in grads(f)(args, dy) must be a tensor');\n return ENV.engine.tidy(() => {\n const {value, grads} = ENV.engine.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(\n value.shape, dy.shape,\n 'The shape of dy passed in grads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(grads);\n return grads;\n });\n };\n}\n\n/**\n * Like `tf.grad`, but also returns the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grad: The gradient of `f(x)` w.r.t `x` (result of `tf.grad`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(x) = x ^ 2\n * const f = x => x.square();\n * // f'(x) = 2x\n * const g = tf.valueAndGrad(f);\n *\n * const x = tf.tensor1d([2, 3]);\n * const {value, grad} = g(x);\n *\n * console.log('value');\n * value.print();\n * console.log('grad');\n * grad.print();\n * ```\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction valueAndGrad<I extends Tensor, O extends Tensor>(f: (x: I) => O): (\n x: I, dy?: O) => {\n value: O;\n grad: I;\n} {\n util.assert(\n util.isFunction(f), 'The f passed in valueAndGrad(f) must be a function');\n return (x: I, dy?: O) => {\n util.assert(\n x instanceof Tensor,\n 'The x passed in valueAndGrad(f)(x) must be a tensor');\n util.assert(\n dy == null || dy instanceof Tensor,\n 'The dy passed in valueAndGrad(f)(x, dy) must be a tensor');\n const {grads, value} = ENV.engine.gradients(() => f(x), [x], dy);\n checkGrads(grads);\n return {grad: grads[0] as I, value: value as O};\n };\n}\n\n/**\n * Like `tf.grads`, but returns also the value of `f()`. Useful when `f()`\n * returns a metric you want to show.\n *\n * The result is a rich object with the following properties:\n * - grads: The gradients of `f()` w.r.t each input (result of `tf.grads`).\n * - value: The value returned by `f(x)`.\n *\n * ```js\n * // f(a, b) = a * b\n * const f = (a, b) => a.mul(b);\n * // df/da = b, df/db = a\n * const g = tf.valueAndGrads(f);\n *\n * const a = tf.tensor1d([2, 3]);\n * const b = tf.tensor1d([-2, -3]);\n * const {value, grads} = g([a, b]);\n *\n * const [da, db] = grads;\n *\n * console.log('value');\n * value.print();\n *\n * console.log('da');\n * da.print();\n * console.log('db');\n * db.print();\n * ```\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction valueAndGrads<O extends Tensor>(f: (...args: Tensor[]) => O): (\n args: Tensor[], dy?: O) => {\n grads: Tensor[];\n value: O;\n} {\n util.assert(\n util.isFunction(f),\n 'The f passed in valueAndGrads(f) must be a function');\n return (args: Tensor[], dy?: O) => {\n util.assert(\n Array.isArray(args) && args.every(arg => arg instanceof Tensor),\n 'The args passed in valueAndGrads(f)(args) must be array of tensors');\n util.assert(\n dy == null || dy instanceof Tensor,\n 'The dy passed in valueAndGrads(f)(args, dy) must be a tensor');\n const res = ENV.engine.gradients(() => f(...args), args, dy);\n if (dy != null) {\n util.assertShapesMatch(\n res.value.shape, dy.shape,\n 'The shape of dy passed in valueAndGrads(f)([x1,...], dy) must ' +\n 'match the shape returned by f([x1,...])');\n }\n checkGrads(res.grads);\n return res;\n };\n}\n\n/**\n * Computes and returns the gradient of f(x) with respect to the list of\n * trainable variables provided by `varList`. If no list is provided, it\n * defaults to all trainable variables.\n *\n * ```js\n * const a = tf.variable(tf.tensor1d([3, 4]));\n * const b = tf.variable(tf.tensor1d([5, 6]));\n * const x = tf.tensor1d([1, 2]);\n *\n * // f(a, b) = a * x ^ 2 + b * x\n * const f = () => a.mul(x.square()).add(b.mul(x)).sum();\n * // df/da = x ^ 2, df/db = x\n * const {value, grads} = tf.variableGrads(f);\n *\n * Object.keys(grads).forEach(varName => grads[varName].print());\n * ```\n *\n * @param f The function to execute. f() should return a scalar.\n * @param varList The list of trainable variables. Defaults to all variables.\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction variableGrads(f: () => Scalar, varList?: Variable[]):\n {value: Scalar, grads: NamedTensorMap} {\n util.assert(\n util.isFunction(f),\n 'The f passed in variableGrads(f) must be a function');\n util.assert(\n varList == null ||\n Array.isArray(varList) && varList.every(v => v instanceof Variable),\n 'The varList passed in variableGrads(f, varList) must be an array ' +\n 'of variables');\n if (varList == null) {\n // Get all of the trainable variables.\n varList = [];\n for (const varName in ENV.engine.registeredVariables) {\n varList.push(ENV.engine.registeredVariables[varName]);\n }\n }\n // Prune non-trainable variables.\n const originalVarCount = varList.length;\n varList = varList.filter(variable => variable.trainable);\n util.assert(\n varList.length > 0,\n `variableGrads() expects at least one of the input variables to be ` +\n `trainable, but none of the ${originalVarCount} variables is ` +\n `trainable.`);\n\n const allowNoGradients = true;\n const {value, grads} =\n ENV.engine.gradients(f, varList, null, allowNoGradients);\n\n util.assert(\n grads.some(g => g != null),\n 'Cannot find a connection between any variable and the result of the ' +\n 'loss function y=f(x). Please make sure the operations that use ' +\n 'variables are inside the function f passed to minimize().');\n util.assert(\n value.rank === 0,\n `The f passed in variableGrads(f) must return a scalar, but it ` +\n `returned a rank-${value.rank} tensor`);\n\n const namedGrads: NamedTensorMap = {};\n varList.forEach((v, i) => {\n if (grads[i] != null) {\n namedGrads[v.name] = grads[i];\n }\n });\n return {value, grads: namedGrads};\n}\n\n/**\n * Overrides the gradient computation of a function `f`.\n *\n * Takes a function\n * `f(...inputs) => {value: Tensor, gradFunc: dy => Tensor[]}` and returns\n * another function `g(...inputs)` which takes the same inputs as `f`. When\n * called, `g` returns `f().value`. In backward mode, custom gradients with\n * respect to each input of `f` are computed using `f().gradFunc`.\n *\n * ```js\n * const customOp = tf.customGrad(x => {\n * // Override gradient of our custom x ^ 2 op to be dy * abs(x);\n * return {value: x.square(), gradFunc: dy => [dy.mul(x.abs())]};\n * });\n *\n * const x = tf.tensor1d([-1, -2, 3]);\n * const dx = tf.grad(x => customOp(x));\n *\n * console.log(`f(x):`);\n * customOp(x).print();\n * console.log(`f'(x):`);\n * dx(x).print();\n * ```\n *\n * @param f The function to evaluate in forward mode, which should return\n * `{value: Tensor, gradFunc: (dy) => Tensor[]}`, where `gradFunc` returns\n * the custom gradients of `f` with respect to its inputs.\n */\n/** @doc {heading: 'Training', subheading: 'Gradients'} */\nfunction customGrad<T extends Tensor>(f: CustomGradientFunc<T>):\n (...args: Tensor[]) => T {\n return ENV.engine.customGrad(f);\n}\n\nfunction checkGrads(grads: Tensor[]) {\n const numNullGradients = grads.filter(g => g == null).length;\n if (numNullGradients > 0) {\n throw new Error(\n `Cannot compute gradient of y=f(x) with respect to x. Make sure that\n the f you passed encloses all operations that lead from x to y.`);\n }\n}\n\nexport {\n gradScope,\n customGrad,\n variableGrads,\n valueAndGrad,\n valueAndGrads,\n grad,\n grads,\n};\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Environment} from './environment';\nexport {customGrad, grad, grads, valueAndGrad, valueAndGrads, variableGrads} from './gradients';\n\nexport const tidy = Environment.tidy;\nexport const keep = Environment.keep;\nexport const dispose = Environment.dispose;\nexport const time = Environment.time;\nexport const profile = Environment.profile;","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from './environment';\n\nexport function warn(...msg: Array<{}>): void {\n if (!ENV.get('IS_TEST')) {\n console.warn(...msg);\n }\n}\n\nexport function log(...msg: Array<{}>): void {\n if (!ENV.get('IS_TEST')) {\n console.log(...msg);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Gets the new shape of the input Tensor after it's been reshaped\n * to:\n * [blockShape[0], ..., blockShape[M-1], batch / prod(blockShape),\n * inputShape[1], ..., inputShape[N-1]]\n *\n * See step 1: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshaped(\n inputShape: number[], blockShape: number[], prod: number,\n batchToSpace = true): number[] {\n let reshaped: number[] = [];\n if (batchToSpace) {\n reshaped = reshaped.concat(blockShape.slice(0));\n reshaped.push(inputShape[0] / prod);\n reshaped = reshaped.concat(inputShape.slice(1));\n } else {\n reshaped = reshaped.concat(inputShape[0]);\n const spatialLength = blockShape.length;\n for (let i = 0; i < spatialLength; ++i) {\n reshaped =\n reshaped.concat([inputShape[i + 1] / blockShape[i], blockShape[i]]);\n }\n reshaped = reshaped.concat(inputShape.slice(spatialLength + 1));\n }\n return reshaped;\n}\n\n/**\n * Gets the permutation that will transpose the dimensions of the\n * reshaped tensor to shape:\n *\n * [batch / prod(block_shape),inputShape[1], blockShape[0], ...,\n * inputShape[M], blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * see step 2: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getPermuted(\n reshapedRank: number, blockShapeRank: number,\n batchToSpace = true): number[] {\n const permuted = [];\n if (batchToSpace) {\n permuted.push(blockShapeRank);\n for (let i = blockShapeRank + 1; i < reshapedRank; ++i) {\n if (i <= 2 * blockShapeRank) {\n permuted.push(i);\n permuted.push(i - (blockShapeRank + 1));\n } else {\n permuted.push(i);\n }\n }\n } else {\n const permutedBeforeBatch = [];\n const permutedAfterBatch = [];\n for (let i = 1; i < reshapedRank; ++i) {\n if (i >= blockShapeRank * 2 + 1 || i % 2 === 1) {\n permutedAfterBatch.push(i);\n } else {\n permutedBeforeBatch.push(i);\n }\n }\n permuted.push(...permutedBeforeBatch);\n permuted.push(0);\n permuted.push(...permutedAfterBatch);\n }\n return permuted;\n}\n\n/**\n * Gets the shape of the reshaped and permuted input Tensor before any cropping\n * is applied. The new shape will be:\n *\n * [batch / prod(blockShape),inputShape[1] * blockShape[0], ...,\n * inputShape[M] * blockShape[M-1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 3: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getReshapedPermuted(\n inputShape: number[], blockShape: number[], prod: number,\n batchToSpace = true): number[] {\n const reshapedPermuted = [];\n\n if (batchToSpace) {\n reshapedPermuted.push(inputShape[0] / prod);\n } else {\n reshapedPermuted.push(inputShape[0] * prod);\n }\n\n for (let i = 1; i < inputShape.length; ++i) {\n if (i <= blockShape.length) {\n if (batchToSpace) {\n reshapedPermuted.push(blockShape[i - 1] * inputShape[i]);\n } else {\n reshapedPermuted.push(inputShape[i] / blockShape[i - 1]);\n }\n } else {\n reshapedPermuted.push(inputShape[i]);\n }\n }\n\n return reshapedPermuted;\n}\n\n/**\n * Converts the crops argument into the beginning coordinates of a slice\n * operation.\n */\nexport function getSliceBeginCoords(\n crops: number[][], blockShape: number): number[] {\n const sliceBeginCoords = [0];\n for (let i = 0; i < blockShape; ++i) {\n sliceBeginCoords.push(crops[i][0]);\n }\n return sliceBeginCoords;\n}\n\n/**\n * Converts the crops argument into the size of a slice operation. When\n * combined with getSliceBeginCoords this function allows the reshaped and\n * permuted Tensor to be cropped to its final output shape of:\n *\n * inputShape[1] * blockShape[0] - crops[0,0] - crops[0,1], ...,\n * inputShape[M] * blockShape[M-1] -crops[M-1,0] -\n * crops[M-1,1],inputShape[M+1], ..., inputShape[N-1]]\n *\n * See step 4: https://www.tensorflow.org/api_docs/python/tf/batch_to_space_nd\n */\nexport function getSliceSize(\n uncroppedShape: number[], crops: number[][], blockShape: number): number[] {\n const sliceSize = uncroppedShape.slice(0, 1);\n for (let i = 0; i < blockShape; ++i) {\n sliceSize.push(uncroppedShape[i + 1] - crops[i][0] - crops[i][1]);\n }\n\n return sliceSize;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as util from '../util';\n\n/**\n * Returns true if the axis specifies the inner most dimensions of the\n * array.\n */\nexport function axesAreInnerMostDims(axes: number[], rank: number): boolean {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n}\n\nexport function combineLocations(\n outputLoc: number[], reduceLoc: number[], axes: number[]): number[] {\n const rank = outputLoc.length + reduceLoc.length;\n const loc = [];\n let outIdx = 0;\n let reduceIdx = 0;\n   for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n loc.push(outputLoc[outIdx++]);\n } else {\n loc.push(reduceLoc[reduceIdx++]);\n }\n }\n return loc;\n}\n\nexport function computeOutAndReduceShapes(\n aShape: number[], axes: number[]): [number[], number[]] {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outShape.push(aShape[dim]);\n }\n }\n const reduceShape = axes.map(dim => aShape[dim]);\n return [outShape, reduceShape];\n}\n\nexport function expandShapeToKeepDim(\n shape: number[], axes: number[]): number[] {\n const reduceSubShape = axes.map(x => 1);\n return combineLocations(shape, reduceSubShape, axes);\n}\n\nexport function parseAxisParam(\n axis: number|number[], shape: number[]): number[] {\n const rank = shape.length;\n\n // Normalize input\n axis = axis == null ? shape.map((s, i) => i) : [].concat(axis);\n\n // Check for valid range\n util.assert(\n axis.every(ax => ax >= -rank && ax < rank),\n `All values in axis param must be in range [-${rank}, ${rank}) but ` +\n `got axis ${axis}`);\n\n // Check for only integers\n util.assert(\n axis.every(ax => util.isInt(ax)),\n `All values in axis param must be integers but ` +\n `got axis ${axis}`);\n\n // Handle negative axis.\n return axis.map(a => a < 0 ? rank + a : a);\n}\n\nexport function assertAxesAreInnerMostDims(\n msg: string, axes: number[], rank: number): void {\n util.assert(\n axesAreInnerMostDims(axes, rank),\n `${msg} supports only inner-most axes for now. ` +\n `Got axes ${axes} and rank-${rank} input.`);\n}\n\n/**\n * Returns the axes permutation to be used with `tf.transpose`, if such\n * permutation is necessary. Otherwise it returns null. This method is used by\n * operations that operate only on inner-most axes.\n */\nexport function getAxesPermutation(axes: number[], rank: number): number[]|\n null {\n if (axesAreInnerMostDims(axes, rank)) {\n return null;\n }\n const result: number[] = [];\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n result.push(i);\n }\n }\n axes.forEach(axis => result.push(axis));\n return result;\n}\n\n/** Returns the axes permutation that undoes the original permutation. */\nexport function getUndoAxesPermutation(axes: number[]): number[] {\n return axes.map((axis, i) => [i, axis])\n .sort((a, b) => a[1] - b[1])\n .map(x => x[0]);\n}\n\nexport function getInnerMostAxes(numAxes: number, rank: number): number[] {\n const res: number[] = [];\n for (let i = rank - numAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as util from '../util';\n\nexport function assertParamsConsistent(shapes: number[][], axis: number) {\n const rank = shapes[0].length;\n shapes.forEach((shape, i) => {\n util.assert(\n shape.length === rank,\n `Error in concat${rank}D: rank of tensors[${i}] must be the same ` +\n `as the rank of the rest (${rank})`);\n });\n\n util.assert(\n axis >= 0 && axis < rank,\n `Error in concat${rank}D: axis must be between 0 and ${rank - 1}.`);\n\n const firstShape = shapes[0];\n shapes.forEach((shape, i) => {\n for (let r = 0; r < rank; r++) {\n util.assert(\n (r === axis) || (shape[r] === firstShape[r]),\n `Error in concat${rank}D: Shape of tensors[${i}] (${shape}) ` +\n `does not match the shape of the rest (${firstShape}) ` +\n `along the non-concatenated axis ${i}.`);\n }\n });\n}\n\nexport function computeOutShape(shapes: number[][], axis: number): number[] {\n const outputShape = shapes[0].slice();\n for (let i = 1; i < shapes.length; i++) {\n outputShape[axis] += shapes[i][axis];\n }\n return outputShape;\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {Tensor} from '../tensor';\nimport {computeStrides} from '../util';\n\n/**\n * Validate gather nd inputs.\n *\n * @param tensor The tensor contains the source values.\n * @param indices The tensor contains the indices to slice the source.\n *\n * @returns [resultShape, numUpdates, sliceSize, strides]\n */\nexport function prepareAndValidate(\n tensor: Tensor, indices: Tensor): [number[], number, number, number[]] {\n if (tensor.rank < 1) {\n throw new Error(\n 'tf.gatherND() expects the input to be rank 1 or higher,' +\n ` but the rank was ${tensor.rank}.`);\n }\n if (indices.rank < 1) {\n throw new Error(\n 'tf.gatherND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(\n 'tf.gatherND() expects the indices to be int32 type,' +\n ` but the dtype was ${indices.dtype}.`);\n }\n if (indices.shape[indices.rank - 1] > tensor.rank) {\n throw new Error(\n 'index innermost dimension length must be <= tensor rank; saw: ' +\n `${indices.shape[indices.rank - 1]} vs. ${tensor.rank}`);\n }\n\n if (tensor.size === 0) {\n throw new Error(\n 'Requested more than 0 entries, but input is empty.' +\n ` Input shape: ${tensor.shape}.`);\n }\n\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n\n // The result shape is\n // indices.shape[:-1] + params.shape[indices.shape[-1]:]\n let nResult = 1;\n for (let i = 0; i < indicesShape.length - 1; ++i) {\n nResult *= indicesShape[i];\n }\n\n const inputShape = tensor.shape;\n\n const resultShape = indicesShape.slice();\n resultShape.pop();\n\n let sliceSize = 1;\n for (let i = sliceRank; i < tensor.rank; ++i) {\n sliceSize *= inputShape[i];\n resultShape.push(inputShape[i]);\n }\n\n const strides =\n [...computeStrides(tensor.shape).map(stride => stride / sliceSize),\n 1].slice(0, sliceRank);\n\n return [resultShape, nResult, sliceSize, strides];\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Inputs of size above this threshold will be parallelized by calling multiple\n * shader programs.\n */\nimport {nearestDivisor} from '../util';\n\nexport const PARALLELIZE_THRESHOLD = 30;\n\nexport interface ReduceInfo {\n windowSize: number;\n batchSize: number;\n inSize: number;\n}\n\nexport function computeOptimalWindowSize(inSize: number): number {\n if (inSize <= PARALLELIZE_THRESHOLD) {\n return inSize;\n }\n return nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {Tensor} from '../tensor';\nimport {computeStrides, sizeFromShape} from '../util';\n\n/**\n * Check whether updates.shape = indices.shape[:batchDim] +\n * shape[sliceDim:]\n *\n * @param x The input tensor.\n */\nexport function validateUpdateShape(\n shape: number[], indices: Tensor, updates: Tensor) {\n const sliceDim = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n const batchDim = (indices.rank > 1) ? indices.rank - 1 : 1;\n\n const shapeError = 'Must have updates.shape = indices.shape[:batchDim] + ' +\n `shape[sliceDim:], got updates.shape: ${updates.shape}` +\n `, indices.shape: ${indices.shape}, shape: ${shape}` +\n `, sliceDim: ${sliceDim}, and batchDim: ${batchDim}.`;\n\n if (updates.rank < batchDim) {\n throw new Error(shapeError + ` update.rank < ${batchDim}. `);\n }\n if (shape.length < sliceDim + (updates.rank - batchDim)) {\n throw new Error(\n shapeError +\n ` Output shape length < ${sliceDim + (updates.rank - batchDim)}`);\n }\n if (updates.rank !== batchDim + shape.length - sliceDim) {\n throw new Error(\n shapeError + ` update.rank != ${batchDim + shape.length - sliceDim}`);\n }\n for (let d = 0; d < batchDim; ++d) {\n if (updates.shape[d] !== indices.shape[d]) {\n throw new Error(\n shapeError +\n ` updates.shape[${d}] (${updates.shape[d]}) != indices.shape[${d}] (${\n indices.shape[d]}).`);\n }\n }\n for (let d = 0; d < updates.rank - batchDim; ++d) {\n if (updates.shape[d + batchDim] !== shape[d + sliceDim]) {\n throw new Error(\n shapeError +\n ` updates.shape[${d + batchDim}] (${\n updates.shape[d + batchDim]}) != shape[${d + batchDim}] (${\n shape[d + batchDim]})`);\n }\n }\n}\n\nexport interface ScatterShapeInfo {\n sliceRank: number;\n numUpdates: number;\n sliceSize: number;\n strides: number[];\n outputSize: number;\n}\n/**\n * Validate scatter nd inputs.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n */\nexport function validateInput(\n updates: Tensor, indices: Tensor, shape: number[]) {\n if (indices.rank < 1) {\n throw new Error(\n 'tf.scatterND() expects the indices to be rank 1 or higher,' +\n ` but the rank was ${indices.rank}.`);\n }\n if (updates.rank < 1) {\n throw new Error(\n 'tf.scatterND() expects the updates to be rank 1 or higher,' +\n ` but the rank was ${updates.rank}.`);\n }\n if (indices.dtype !== 'int32') {\n throw new Error(`The dtype of 'indices' should be int32, but got dtype: ${\n indices.dtype}`);\n }\n if (shape.length < 1) {\n throw new Error(\n `Output rank must be greater or equal to 1, but got shape: ${shape}`);\n }\n\n if (shape.length === 0) {\n if (indices.size === 0) {\n throw new Error(`Indices specified for empty output. indices shape: ${\n indices.shape}`);\n }\n if (updates.size === 0) {\n throw new Error(`Updates specified for empty output. updates shape: ${\n updates.shape}`);\n }\n }\n\n validateUpdateShape(shape, indices, updates);\n}\n\n/**\n * Calculate the shape information for the output.\n *\n * @param update The tensor contains the update values.\n * @param indices The tensor contains the indices for the update values.\n * @param shape The shape of the output tensor.\n *\n * @returns ScatterShapeInfo\n */\nexport function calculateShapes(\n updates: Tensor, indices: Tensor, shape: number[]): ScatterShapeInfo {\n // Calculate the number of dimensions in indices\n const sliceRank = (indices.rank > 1) ? indices.shape[indices.rank - 1] : 1;\n\n // Calculate the number of elements that make up each slice of our updated\n // tensor. This allows us to work with flattened tensors and copy over whole\n // slices at a time.\n const totalNd = shape.length;\n\n let sliceSize = 1;\n for (let i = sliceRank; i < totalNd; ++i) {\n sliceSize *= shape[i];\n }\n\n const safeSliceDim = (sliceRank < 1) ? 1 : sliceRank;\n const numUpdates = indices.size / safeSliceDim;\n\n const outputStrides = [...computeStrides(shape), 1];\n const strides = outputStrides.slice(\n outputStrides.length - sliceRank, outputStrides.length);\n const outputSize = sizeFromShape(shape);\n return {sliceRank, numUpdates, sliceSize, strides, outputSize};\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {nearestDivisor} from '../util';\nimport {PARALLELIZE_THRESHOLD} from './reduce_util';\n\nexport interface SegOpInfo {\n windowSize: number;\n batchSize: number;\n inSize: number;\n numSegments: number;\n}\n\nexport function segOpComputeOptimalWindowSize(\n inSize: number, numSegments: number): number {\n let done = false;\n let res;\n\n if (inSize <= PARALLELIZE_THRESHOLD) {\n res = inSize;\n done = true;\n } else {\n res = nearestDivisor(inSize, Math.floor(Math.sqrt(inSize)));\n }\n\n while (!done) {\n if (res > numSegments || res === inSize) {\n done = true;\n break;\n } else {\n res = nearestDivisor(inSize, res + 1);\n }\n }\n return res;\n}\n\nexport function computeOutShape(\n aShape: number[], axis: number, numSegments: number): number[] {\n const outShape = [];\n const rank = aShape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (dim !== axis) {\n outShape.push(aShape[dim]);\n } else {\n outShape.push(numSegments);\n }\n }\n return outShape;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from '../tensor';\nimport * as util from '../util';\n\nexport function assertParamsValid(\n input: Tensor, begin: number[], size: number[]): void {\n util.assert(\n input.rank === begin.length,\n `Error in slice${input.rank}D: Length of begin ${begin} must ` +\n `match the rank of the array (${input.rank}).`);\n util.assert(\n input.rank === size.length,\n `Error in slice${input.rank}D: Length of size ${size} must ` +\n `match the rank of the array (${input.rank}).`);\n\n for (let i = 0; i < input.rank; ++i) {\n util.assert(\n begin[i] + size[i] <= input.shape[i],\n `Error in slice${input.rank}D: begin[${i}] + size[${i}] ` +\n `(${begin[i] + size[i]}) would overflow input.shape[${i}] (${\n input.shape[i]})`);\n }\n}\n\n/**\n * Calculate the start index and output tensor shape for strided slice op.\n * @returns array of [startIndex, size, shrinkAxis]\n */\nexport function getStridedSlicedInfo(\n shape: number[], begin: number[], end: number[], strides: number[],\n beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0,\n shrinkAxisMask = 0): [number[], number[], number[]] {\n if (ellipsisMask !== 0) {\n throw new Error('ellipsis mask is not yet supported');\n }\n if (newAxisMask !== 0) {\n throw new Error('new axis mask is not yet supported');\n }\n // Note that the axis orders are reversed for runtime ops, so the indices,\n // strides and masks must be as well too.\n const startIndex: number[] = [];\n const endIndex: number[] = [];\n const shrinkAxis: number[] = [];\n for (let i = 0; i < shape.length; i++) {\n startIndex[i] = startForAxis(beginMask, begin, strides, shape, i);\n endIndex[i] = stopForAxis(endMask, end, strides, shape, i);\n // When shrinking an axis, use startIndex + 1 for endIndex.\n // Check the axis bit from right of shrinkAxisMask\n if (shrinkAxisMask & 1 << i) {\n endIndex[i] = startIndex[i] + 1;\n shrinkAxis.push(i);\n }\n }\n\n let size = new Array(shape.length).fill(0);\n size = size.map((d, i) => {\n let count = 0;\n for (let start = startIndex[i];\n !(strides[i] > 0 ? start >= endIndex[i] : start <= endIndex[i]);\n start += strides[i]) {\n count += 1;\n }\n return count;\n });\n\n return [startIndex, size, shrinkAxis];\n}\n\nexport function startForAxis(\n beginMask: number, startIndices: number[], strides: number[],\n inputShape: number[], axis: number): number {\n // Begin with the specified index\n let start = startIndices[axis];\n\n // Check the axis bit from right of beginMask\n if (beginMask & 1 << axis) {\n if (strides[axis] > 0) {\n // Forward iteration - use the first element. These values will get\n // clamped below (Note: We could have set them to 0 and axis_size-1, but\n // use lowest() and max() to maintain symmetry with StopForAxis())\n start = Number.MIN_SAFE_INTEGER;\n } else {\n // Backward iteration - use the last element.\n start = Number.MAX_SAFE_INTEGER;\n }\n }\n\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (start < 0) {\n start += axisSize;\n }\n\n // Clamping\n start = util.clamp(0, start, axisSize - 1);\n\n return start;\n}\n\nexport function stopForAxis(\n endMask: number, stopIndices: number[], strides: number[],\n inputShape: number[], axis: number): number {\n // Begin with the specified index\n let stop = stopIndices[axis];\n\n // Check the axis bit from right of endMask\n if (endMask & (1 << axis)) {\n if (strides[axis] > 0) {\n // Forward iteration - use the last element. These values will get\n // clamped below\n stop = Number.MAX_SAFE_INTEGER;\n } else {\n // Backward iteration - use the first element.\n stop = Number.MIN_SAFE_INTEGER;\n }\n }\n\n // Handle negative indices\n const axisSize = inputShape[axis];\n if (stop < 0) {\n stop += axisSize;\n }\n\n // Clamping\n // Because the end index points one past the last element, we need slightly\n // different clamping ranges depending on the direction.\n if (strides[axis] > 0) {\n // Forward iteration\n stop = util.clamp(0, stop, axisSize);\n } else {\n // Backward iteration\n stop = util.clamp(-1, stop, axisSize - 1);\n }\n\n return stop;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from './environment';\nimport {Tensor} from './tensor';\nimport {ArrayData, DataType, RegularArray, TensorLike, TypedArray} from './types';\nimport {assert, isTypedArray, toTypedArray} from './util';\n\nexport function inferShape(val: TypedArray|number|boolean|RegularArray<number>|\n RegularArray<boolean>): number[] {\n let firstElem: typeof val = val;\n\n if (isTypedArray(val)) {\n return [(val as TypedArray).length];\n }\n if (!Array.isArray(val)) {\n return []; // Scalar.\n }\n const shape: number[] = [];\n\n while (firstElem instanceof Array) {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (val instanceof Array && ENV.get('TENSORLIKE_CHECK_SHAPE_CONSISTENCY')) {\n deepAssertShapeConsistency(val, shape, []);\n }\n\n return shape;\n}\n\nfunction deepAssertShapeConsistency(\n val: number|boolean|RegularArray<number>|RegularArray<boolean>,\n shape: number[], indices: number[]) {\n indices = indices || [];\n if (!(val instanceof Array)) {\n assert(\n shape.length === 0,\n () => `Element arr[${indices.join('][')}] is a primitive, ` +\n `but should be an array of ${shape[0]} elements`);\n return;\n }\n assert(\n shape.length > 0,\n () => `Element arr[${indices.join('][')}] should be a primitive, ` +\n `but is an array of ${val.length} elements`);\n assert(\n val.length === shape[0],\n () => `Element arr[${indices.join('][')}] should have ${shape[0]} ` +\n `elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i = 0; i < val.length; ++i) {\n deepAssertShapeConsistency(val[i], subShape, indices.concat(i));\n }\n}\n\nexport function convertToTensor<T extends Tensor>(\n x: T|TensorLike, argName: string, functionName: string,\n dtype: DataType = 'float32'): T {\n dtype = dtype || 'float32';\n if (x instanceof Tensor) {\n return x;\n }\n if (!isTypedArray(x) && !Array.isArray(x) && typeof x !== 'number' &&\n typeof x !== 'boolean') {\n throw new Error(\n `Argument '${argName}' passed to '${functionName}' must be a ` +\n `Tensor or TensorLike, but got ${x.constructor.name}`);\n }\n const inferredShape = inferShape(x);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x] as number[];\n }\n return Tensor.make(\n inferredShape,\n {values: toTypedArray(x as ArrayData<DataType>, dtype, ENV.get('DEBUG'))},\n dtype);\n}\n\nexport function convertToTensorArray<T extends Tensor>(\n arg: T[]|TensorLike[], argName: string, functionName: string): T[] {\n if (!Array.isArray(arg)) {\n throw new Error(\n `Argument ${argName} passed to ${functionName} must be a ` +\n '`Tensor[]` or `TensorLike[]`');\n }\n const tensors = arg as T[];\n return tensors.map(\n (t, i) => convertToTensor(t, `${argName}[${i}]`, functionName));\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\n\n/**\n * Used for wrapping functions that perform math operations on\n * Tensors. The function will be wrapped in a named scope that cleans all\n * memory usage after the function is done.\n */\nexport function op<T extends Function>(f: {[name: string]: T}): T {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(\n `Please provide an object with a single key ` +\n `(operation name) mapping to a function. Got an object with ` +\n `${keys.length} keys.`);\n }\n\n let opName = keys[0];\n const fn = f[opName];\n\n // Strip the underscore from the end of the function name.\n if (opName.endsWith('_')) {\n opName = opName.substring(0, opName.length - 1);\n }\n\n // tslint:disable-next-line:no-any\n const f2 = (...args: any[]) => {\n ENV.engine.startScope(opName);\n try {\n const result = fn(...args);\n if (result instanceof Promise) {\n console.error('Cannot return a Promise inside of tidy.');\n }\n ENV.engine.endScope(result);\n return result;\n } catch (ex) {\n ENV.engine.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, 'name', {value: opName, configurable: true});\n\n // tslint:disable-next-line:no-any\n return f2 as any as T;\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {customGrad} from '../gradients';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {op} from './operation';\n\n/**\n * Computes the softmax normalized vector given the logits.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.softmax().print(); // or tf.softmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction softmax_<T extends Tensor>(logits: T|TensorLike, dim = -1): T {\n const $logits = convertToTensor(logits, 'logits', 'softmax');\n\n if (dim === -1) {\n dim = $logits.rank - 1;\n }\n if (dim !== $logits.rank - 1) {\n throw Error(\n 'Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and dim was ${dim}`);\n }\n\n const customOp = customGrad(logits => {\n // Do it in log space for numerical stability.\n // exp(X - logSumExp(X))\n const keepDims = true;\n const lse = logits.logSumExp([dim], keepDims);\n const logResult = logits.toFloat().sub(lse);\n const y = logResult.exp() as T;\n\n const gradFunc = (dy: T) => {\n const dyTimesY = dy.mul(y);\n const keepDims = true;\n return dyTimesY.sub(dyTimesY.sum([dim], keepDims).mul(y));\n };\n\n return {value: y, gradFunc};\n });\n\n return customOp($logits);\n}\n\n/**\n * Computes the log softmax.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * ```js\n * const a = tf.tensor2d([2, 4, 6, 1, 2, 3], [2, 3]);\n *\n * a.logSoftmax().print(); // or tf.logSoftmax(a)\n * ```\n *\n * @param logits The logits array.\n * @param axis The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction logSoftmax_<T extends Tensor>(logits: T|TensorLike, axis = -1): T {\n const $logits = convertToTensor(logits, 'logits', 'logSoftmax');\n\n if (axis === -1) {\n axis = $logits.rank - 1;\n }\n if (axis !== $logits.rank - 1) {\n throw Error(\n 'Log Softmax along a non-last dimension is not yet supported. ' +\n `Logits was rank ${$logits.rank} and axis was ${axis}`);\n }\n\n const customOp = customGrad(logits => {\n const keepDims = true;\n const xMax = logits.max(axis, true);\n const shifted = logits.sub(xMax);\n const value =\n shifted.toFloat().sub(shifted.exp().sum(axis, keepDims).log()) as T;\n\n const gradFunc = (dy: T) => {\n const softmax = value.exp();\n return dy.sub(dy.sum(axis, keepDims).mul(softmax));\n };\n\n return {value, gradFunc};\n });\n\n return customOp($logits);\n}\n\nexport const softmax = op({softmax_});\nexport const logSoftmax = op({logSoftmax_});\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {op} from './operation';\n\n/**\n * Converts two real numbers to a complex number.\n *\n * Given a tensor `real` representing the real part of a complex number, and a\n * tensor `imag` representing the imaginary part of a complex number, this\n * operation returns complex numbers elementwise of the form [r0, i0, r1, i1],\n * where r represents the real part and i represents the imag part.\n *\n * The input tensors real and imag must have the same shape.\n *\n * ```js\n * const real = tf.tensor1d([2.25, 3.25]);\n * const imag = tf.tensor1d([4.75, 5.75]);\n * const complex = tf.complex(real, imag);\n *\n * complex.print();\n * ```\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction complex_<T extends Tensor>(real: T|TensorLike, imag: T|TensorLike): T {\n const $real = convertToTensor(real, 'real', 'complex');\n const $imag = convertToTensor(imag, 'imag', 'complex');\n util.assertShapesMatch(\n $real.shape, $imag.shape,\n `real and imag shapes, ${$real.shape} and ${$imag.shape}, ` +\n `must match in call to tf.complex().`);\n\n return ENV.engine.runKernel(\n backend => backend.complex($real, $imag), {$real, $imag}) as T;\n}\n\n/**\n * Returns the real part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the real part of each element in input considered as a complex number.\n *\n * If the input is real, it simply makes a clone.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.real(x).print();\n * ```\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction real_<T extends Tensor>(input: T|TensorLike): T {\n const $input = convertToTensor(input, 'input', 'real');\n\n return ENV.engine.runKernel(backend => backend.real($input), {$input}) as T;\n}\n\n/**\n * Returns the imaginary part of a complex (or real) tensor.\n *\n * Given a tensor input, this operation returns a tensor of type float that is\n * the imaginary part of each element in input considered as a complex number.\n * If input is real, a tensor of all zeros is returned.\n *\n * ```js\n * const x = tf.complex([-2.25, 3.25], [4.75, 5.75]);\n * tf.imag(x).print();\n * ```\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction imag_<T extends Tensor>(input: T|TensorLike): T {\n const $input = convertToTensor(input, 'input', 'imag');\n\n return ENV.engine.runKernel(backend => backend.imag($input), {$input}) as T;\n}\n\nexport const complex = op({complex_});\nexport const real = op({real_});\nexport const imag = op({imag_});\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Scalar, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, Tensor5D, Tensor6D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {inferShape} from '../tensor_util_env';\nimport {TensorLike, TensorLike1D, TensorLike2D, TensorLike3D, TensorLike4D, TensorLike5D, TensorLike6D} from '../types';\nimport {ArrayData, DataType, Rank, ShapeMap} from '../types';\nimport {assertNonNull, assertShapesMatch, getTypedArrayFromDType, isTypedArray, makeOnesTypedArray, makeZerosTypedArray, sizeFromShape, toTypedArray} from '../util';\nimport {complex} from './complex_ops';\nimport {op} from './operation';\n\n/**\n * Creates a `tf.Tensor` with the provided values, shape and dtype.\n *\n * ```js\n * // Pass an array of values to create a vector.\n * tf.tensor([1, 2, 3, 4]).print();\n * ```\n *\n * ```js\n * // Pass a nested array of values to make a matrix or a higher\n * // dimensional tensor.\n * tf.tensor([[1, 2], [3, 4]]).print();\n * ```\n *\n * ```js\n * // Pass a flat array and specify a shape yourself.\n * tf.tensor([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor<R extends Rank>(\n values: TensorLike, shape?: ShapeMap[R],\n dtype: DataType = 'float32'): Tensor<R> {\n if (dtype === 'complex64') {\n throw new Error(\n `Cannot construct a complex64 tensor directly. ` +\n `Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) &&\n typeof values !== 'number' && typeof values !== 'boolean') {\n throw new Error(\n 'values passed to tensor(values) must be an ' +\n 'array of numbers or booleans, or a TypedArray');\n }\n const inferredShape = inferShape(values);\n if (shape != null && inferredShape.length !== 1) {\n assertShapesMatch(\n shape, inferredShape,\n `Error creating a new Tensor. ` +\n `Inferred shape (${inferredShape}) does not match the ` +\n `provided shape (${shape}). `);\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values] as number[];\n }\n shape = shape || inferredShape;\n return Tensor.make(\n shape, {\n values:\n toTypedArray(values as ArrayData<DataType>, dtype, ENV.get('DEBUG'))\n },\n dtype);\n}\n\n/**\n * Creates rank-0 `tf.Tensor` (scalar) with the provided value and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.scalar` as it makes the code more readable.\n *\n * ```js\n * tf.scalar(3.14).print();\n * ```\n *\n * @param value The value of the scalar.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction scalar(\n value: number|boolean|[number, number],\n dtype: DataType = 'float32'): Scalar {\n if ((isTypedArray(value) || Array.isArray(value)) && dtype !== 'complex64') {\n throw new Error(\n 'Error creating a new Scalar: value must be a primitive ' +\n '(number|boolean)');\n }\n return tensor(value, [], dtype);\n}\n\n/**\n * Creates rank-1 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor1d` as it makes the code more readable.\n *\n * ```js\n * tf.tensor1d([1, 2, 3]).print();\n * ```\n *\n * @param values The values of the tensor. Can be array of numbers,\n * or a `TypedArray`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor1d(values: TensorLike1D, dtype: DataType = 'float32'): Tensor1D {\n assertNonNull(values);\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 1) {\n throw new Error('tensor1d() requires values to be a flat/TypedArray');\n }\n return tensor(values, inferredShape as [number], dtype);\n}\n\n/**\n * Creates rank-2 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor2d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor2d([[1, 2], [3, 4]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor2d(\n values: TensorLike2D, shape?: [number, number],\n dtype: DataType = 'float32'): Tensor2D {\n assertNonNull(values);\n if (shape != null && shape.length !== 2) {\n throw new Error('tensor2d() requires shape to have two numbers');\n }\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 2 && inferredShape.length !== 1) {\n throw new Error(\n 'tensor2d() requires values to be number[][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\n 'tensor2d() requires shape to be provided when `values` ' +\n 'are a flat/TypedArray');\n }\n shape = shape || inferredShape as [number, number];\n return tensor(values, shape, dtype);\n}\n\n/**\n * Creates rank-3 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor3d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor3d([[[1], [2]], [[3], [4]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor3d([1, 2, 3, 4], [2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. If not provided, it is inferred from\n * `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor3d(\n values: TensorLike3D, shape?: [number, number, number],\n dtype: DataType = 'float32'): Tensor3D {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error('tensor3d() requires shape to have three numbers');\n }\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error(\n 'tensor3d() requires values to be number[][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\n 'tensor3d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape || inferredShape as [number, number, number];\n return tensor(values, shape, dtype);\n}\n\n/**\n * Creates rank-4 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor4d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor4d([[[[1], [2]], [[3], [4]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor4d(\n values: TensorLike4D, shape?: [number, number, number, number],\n dtype: DataType = 'float32'): Tensor4D {\n assertNonNull(values);\n if (shape != null && shape.length !== 4) {\n throw new Error('tensor4d() requires shape to have four numbers');\n }\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 4 && inferredShape.length !== 1) {\n throw new Error(\n 'tensor4d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\n 'tensor4d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape || inferredShape as [number, number, number, number];\n return tensor(values, shape, dtype);\n}\n\n/**\n * Creates rank-5 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor5d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor5d([[[[[1], [2]], [[3], [4]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor5d([1, 2, 3, 4, 5, 6, 7, 8], [1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor5d(\n values: TensorLike5D, shape?: [number, number, number, number, number],\n dtype: DataType = 'float32'): Tensor5D {\n assertNonNull(values);\n if (shape != null && shape.length !== 5) {\n throw new Error('tensor5d() requires shape to have five numbers');\n }\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 5 && inferredShape.length !== 1) {\n throw new Error(\n 'tensor5d() requires values to be ' +\n 'number[][][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\n 'tensor5d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape || inferredShape as [number, number, number, number, number];\n return tensor(values, shape, dtype);\n}\n\n/**\n * Creates rank-6 `tf.Tensor` with the provided values, shape and dtype.\n *\n * The same functionality can be achieved with `tf.tensor`, but in general\n * we recommend using `tf.tensor6d` as it makes the code more readable.\n *\n * ```js\n * // Pass a nested array.\n * tf.tensor6d([[[[[[1],[2]],[[3],[4]]],[[[5],[6]],[[7],[8]]]]]]).print();\n * ```\n * ```js\n * // Pass a flat array and specify a shape.\n * tf.tensor6d([1, 2, 3, 4, 5, 6, 7, 8], [1, 1, 2, 2, 2, 1]).print();\n * ```\n *\n * @param values The values of the tensor. Can be nested array of numbers,\n * or a flat array, or a `TypedArray`.\n * @param shape The shape of the tensor. Optional. If not provided,\n * it is inferred from `values`.\n * @param dtype The data type.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction tensor6d(\n values: TensorLike6D,\n shape?: [number, number, number, number, number, number],\n dtype: DataType = 'float32'): Tensor6D {\n assertNonNull(values);\n if (shape != null && shape.length !== 6) {\n throw new Error('tensor6d() requires shape to have six numbers');\n }\n const inferredShape = inferShape(values);\n if (inferredShape.length !== 6 && inferredShape.length !== 1) {\n throw new Error(\n 'tensor6d() requires values to be number[][][][] or flat/TypedArray');\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\n 'tensor6d() requires shape to be provided when `values` ' +\n 'are a flat array');\n }\n shape = shape ||\n inferredShape as [number, number, number, number, number, number];\n return tensor(values, shape, dtype);\n}\n\n/**\n * Creates a `tf.Tensor` with all elements set to 1.\n *\n * ```js\n * tf.ones([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction ones<R extends Rank>(\n shape: ShapeMap[R], dtype: DataType = 'float32'): Tensor<R> {\n if (dtype === 'complex64') {\n const real = ones(shape, 'float32');\n const imag = ones(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeOnesTypedArray(sizeFromShape(shape), dtype);\n return Tensor.make(shape, {values}, dtype);\n}\n\n/**\n * Creates a `tf.Tensor` with all elements set to 0.\n *\n * ```js\n * tf.zeros([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The type of an element in the resulting tensor. Can\n * be 'float32', 'int32' or 'bool'. Defaults to 'float'.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction zeros<R extends Rank>(\n shape: ShapeMap[R], dtype: DataType = 'float32'): Tensor<R> {\n if (dtype === 'complex64') {\n const real = zeros(shape, 'float32');\n const imag = zeros(shape, 'float32');\n return complex(real, imag);\n }\n const values = makeZerosTypedArray(sizeFromShape(shape), dtype);\n return Tensor.make(shape, {values}, dtype);\n}\n\n/**\n * Creates a `tf.Tensor` filled with a scalar value.\n *\n * ```js\n * tf.fill([2, 2], 4).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param value The scalar value to fill the tensor with.\n * @param dtype The type of an element in the resulting tensor. Defaults to\n * 'float'.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction fill<R extends Rank>(\n shape: ShapeMap[R], value: number, dtype: DataType = 'float32'): Tensor<R> {\n const values = getTypedArrayFromDType(dtype, sizeFromShape(shape));\n values.fill(value);\n return Tensor.make(shape, {values}, dtype);\n}\n\n/**\n * Creates a `tf.Tensor` with all elements set to 1 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.onesLike(x).print();\n * ```\n * @param x A tensor.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction onesLike_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'onesLike');\n return ones($x.shape, $x.dtype) as T;\n}\n\n/**\n * Creates a `tf.Tensor` with all elements set to 0 with the same shape as the\n * given tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n * tf.zerosLike(x).print();\n * ```\n *\n * @param x The tensor of required shape.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction zerosLike_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'zerosLike');\n return zeros($x.shape, $x.dtype) as T;\n}\n\n/**\n * Return an evenly spaced sequence of numbers over the given interval.\n *\n * ```js\n * tf.linspace(0, 9, 10).print();\n * ```\n * @param start The start value of the sequence.\n * @param stop The end value of the sequence.\n * @param num The number of values to generate.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction linspace(start: number, stop: number, num: number): Tensor1D {\n if (num === 0) {\n throw new Error('Cannot request zero samples');\n }\n\n const step = (stop - start) / (num - 1);\n\n const values = makeZerosTypedArray(num, 'float32');\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n\n return tensor1d(values, 'float32');\n}\n\n/**\n * Creates a new `tf.Tensor1D` filled with the numbers in the range provided.\n *\n * The tensor is a is half-open interval meaning it includes start, but\n * excludes stop. Decrementing ranges and negative step values are also\n * supported.\n *\n * ```js\n * tf.range(0, 9, 2).print();\n * ```\n *\n * @param start An integer start value\n * @param stop An integer stop value\n * @param step An integer increment (will default to 1 or -1)\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction range(\n start: number, stop: number, step = 1,\n dtype: 'float32'|'int32' = 'float32'): Tensor1D {\n if (step === 0) {\n throw new Error('Cannot have a step of zero');\n }\n\n const sameStartStop = start === stop;\n const increasingRangeNegativeStep = start < stop && step < 0;\n const decreasingRangePositiveStep = stop < start && step > 1;\n\n if (sameStartStop || increasingRangeNegativeStep ||\n decreasingRangePositiveStep) {\n return zeros([0], dtype);\n }\n\n const numElements = Math.abs(Math.ceil((stop - start) / step));\n const values = makeZerosTypedArray(numElements, dtype);\n\n if (stop < start && step === 1) {\n // Auto adjust the step's sign if it hasn't been set\n // (or was set to 1)\n step = -1;\n }\n\n values[0] = start;\n for (let i = 1; i < values.length; i++) {\n values[i] = values[i - 1] + step;\n }\n\n return tensor1d(values, dtype);\n}\n\nexport {\n fill,\n linspace,\n ones,\n range,\n scalar,\n tensor,\n tensor1d,\n tensor2d,\n tensor3d,\n tensor4d,\n tensor5d,\n tensor6d,\n zeros\n};\n\nexport const onesLike = op({onesLike_});\nexport const zerosLike = op({zerosLike_});\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport enum DType {\n float32 = 'float32',\n int32 = 'int32',\n bool = 'bool'\n}\n\n/** @docalias number[] */\nexport interface ShapeMap {\n R0: number[];\n R1: [number];\n R2: [number, number];\n R3: [number, number, number];\n R4: [number, number, number, number];\n R5: [number, number, number, number, number];\n R6: [number, number, number, number, number, number];\n}\n\n/** @hidden */\nexport interface DataTypeMap {\n float32: Float32Array;\n int32: Int32Array;\n bool: Uint8Array;\n complex64: Float32Array;\n}\n/** @docalias 'float32'|'int32'|'bool'|'complex64' */\nexport type DataType = keyof DataTypeMap;\nexport type TypedArray = DataTypeMap[DataType];\n\nexport enum Rank {\n R0 = 'R0',\n R1 = 'R1',\n R2 = 'R2',\n R3 = 'R3',\n R4 = 'R4',\n R5 = 'R5',\n R6 = 'R6'\n}\n\nexport type FlatVector = boolean[]|number[]|TypedArray;\nexport type RegularArray<T> =\n T[]|T[][]|T[][][]|T[][][][]|T[][][][][]|T[][][][][][];\nexport type ArrayData<D extends DataType> =\n DataTypeMap[D]|RegularArray<number>|RegularArray<boolean>;\n\n// tslint:disable-next-line:no-any\nexport interface RecursiveArray<T extends any> {\n [index: number]: T|RecursiveArray<T>;\n}\n\n// Looks for upcasting types. Used, for example, in operations with mixed dtype\n// inputs.\nenum UpcastInt32AndMap {\n 'float32' = 'float32',\n 'int32' = 'int32',\n 'bool' = 'int32',\n 'complex64' = 'complex64'\n}\n\nenum UpcastBoolAndMap {\n 'float32' = 'float32',\n 'int32' = 'int32',\n 'bool' = 'bool',\n 'complex64' = 'complex64'\n}\n\nenum UpcastFloat32AndMap {\n 'float32' = 'float32',\n 'int32' = 'float32',\n 'bool' = 'float32',\n 'complex64' = 'complex64'\n}\n\nenum UpcastComplex64AndMap {\n 'float32' = 'complex64',\n 'int32' = 'complex64',\n 'bool' = 'complex64',\n 'complex64' = 'complex64'\n}\n\nconst upcastTypeMap = {\n 'float32': UpcastFloat32AndMap,\n 'int32': UpcastInt32AndMap,\n 'bool': UpcastBoolAndMap,\n 'complex64': UpcastComplex64AndMap\n};\n\nexport function upcastType(typeA: DataType, typeB: DataType): DataType {\n return upcastTypeMap[typeA][typeB];\n}\n\n/** Returns the output type after summation. */\nexport function sumOutType(type: DataType): DataType {\n return upcastType(type, 'int32');\n}\n\n/** @docalias TypedArray|Array */\nexport type TensorLike =\n TypedArray|number|boolean|number[]|number[][]|number[][][]|number[][][][]|\n number[][][][][]|number[][][][][][]|boolean[]|boolean[][]|boolean[][][]|\n boolean[][][][]|boolean[][][][][]|boolean[][][][][][];\n/** @docalias TypedArray|Array */\nexport type TensorLike1D = TypedArray|number[]|boolean[];\n/** @docalias TypedArray|Array */\nexport type TensorLike2D = TypedArray|number[]|number[][]|boolean[]|boolean[][];\n/** @docalias TypedArray|Array */\nexport type TensorLike3D =\n TypedArray|number[]|number[][][]|boolean[]|boolean[][][];\n/** @docalias TypedArray|Array */\nexport type TensorLike4D =\n TypedArray|number[]|number[][][][]|boolean[]|boolean[][][][];\n/** @docalias TypedArray|Array */\nexport type TensorLike5D =\n TypedArray|number[]|number[][][][][]|boolean[]|boolean[][][][][];\n/** @docalias TypedArray|Array */\nexport type TensorLike6D =\n TypedArray|number[]|number[][][][][][]|boolean[]|boolean[][][][][][];\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../ops/conv_util';\nimport {DataId, Scalar, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {DataType, Rank, ShapeMap, TypedArray} from '../types';\n\n// Required information for all backends.\nexport interface BackendTimingInfo {\n kernelMs: number;\n getExtraProfileInfo?(): string; // a field for additional timing information\n // e.g. packing / unpacking for WebGL backend\n}\n\nexport interface TensorStorage {\n read(dataId: DataId): Promise<TypedArray>;\n readSync(dataId: DataId): TypedArray;\n disposeData(dataId: DataId): void;\n write(dataId: DataId, values: TypedArray): void;\n fromPixels(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels: number): Tensor3D;\n register(dataId: DataId, shape: number[], dtype: DataType): void;\n memory(): {unreliable: boolean;}; // Backend-specific information.\n}\n\n/** Convenient class for storing tensor-related data. */\nexport class DataStorage<T> {\n private data = new WeakMap<DataId, T>();\n\n constructor(private dataMover: DataMover) {}\n\n get(dataId: DataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(dataId);\n }\n return this.data.get(dataId);\n }\n\n set(dataId: DataId, value: T): void {\n this.data.set(dataId, value);\n }\n\n has(dataId: DataId): boolean {\n return this.data.has(dataId);\n }\n\n delete(dataId: DataId): boolean {\n return this.data.delete(dataId);\n }\n}\n\nexport interface DataMover {\n /**\n * To be called by backends whenever they see a dataId that they don't own.\n * Upon calling this method, the mover will fetch the tensor from another\n * backend and register it with the current active backend.\n */\n moveData(dataId: DataId): void;\n}\n\nexport interface BackendTimer {\n time(f: () => void): Promise<BackendTimingInfo>;\n}\n\n/**\n * The interface that defines the kernels that should be implemented when\n * adding a new backend. New backends don't need to implement every one of the\n * methods, this can be done gradually (throw an error for unimplemented\n * methods).\n */\nexport class KernelBackend implements TensorStorage, BackendTimer {\n time(f: () => void): Promise<BackendTimingInfo> {\n throw new Error('Not yet implemented.');\n }\n read(dataId: object): Promise<Float32Array|Int32Array|Uint8Array> {\n throw new Error('Not yet implemented.');\n }\n readSync(dataId: object): Float32Array|Int32Array|Uint8Array {\n throw new Error('Not yet implemented.');\n }\n disposeData(dataId: object): void {\n throw new Error('Not yet implemented.');\n }\n write(dataId: object, values: Float32Array|Int32Array|Uint8Array): void {\n throw new Error('Not yet implemented.');\n }\n fromPixels(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels: number): Tensor<Rank.R3> {\n throw new Error('Not yet implemented.');\n }\n register(\n dataId: object, shape: number[],\n dtype: 'float32'|'int32'|'bool'|'complex64'): void {\n throw new Error('Not yet implemented.');\n }\n memory(): {unreliable: boolean;} {\n throw new Error('Not yet implemented.');\n }\n /** Returns the highest precision for floats in bits (e.g. 16 or 32) */\n floatPrecision(): number {\n throw new Error('Not yet implemented');\n }\n\n batchMatMul(\n a: Tensor3D, b: Tensor3D, transposeA: boolean,\n transposeB: boolean): Tensor3D {\n throw new Error('Not yet implemented');\n }\n\n slice<T extends Tensor>(x: T, begin: number[], size: number[]): T {\n throw new Error('Not yet implemented');\n }\n stridedSlice<T extends Tensor>(\n x: T, begin: number[], end: number[], strides: number[],\n beginMask: number, endMask: number, ellipsisMask: number,\n newAxisMask: number, shrinkAxisMask: number): T {\n throw new Error('Not yet implemented');\n }\n reverse<T extends Tensor>(a: T, axis: number[]): T {\n throw new Error('Not yet implemented');\n }\n\n concat(tensors: Tensor[], axis: number): Tensor {\n throw new Error('Not yet implemented');\n }\n\n neg<T extends Tensor>(a: T): T {\n throw new Error('Not yet implemented');\n }\n\n add(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n addN<T extends Tensor>(tensors: T[]): T {\n throw new Error('Not yet implemented');\n }\n subtract(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n multiply(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n realDivide(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n floorDiv(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n sum(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n prod(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n\n unsortedSegmentSum<T extends Tensor>(\n x: T, segmentIds: Tensor1D, numSegments: number): Tensor {\n throw new Error('Not yet implemented');\n }\n\n argMin(x: Tensor, axis: number): Tensor {\n throw new Error('Not yet implemented');\n }\n argMax(x: Tensor, axis: number): Tensor {\n throw new Error('Not yet implemented');\n }\n\n equal(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n notEqual(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n less(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n lessEqual(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n greater(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n greaterEqual(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n logicalNot<T extends Tensor>(a: T): T {\n throw new Error('Not yet implemented');\n }\n logicalAnd(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n logicalOr(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n where(condition: Tensor): Tensor2D {\n throw new Error('Not yet implemented');\n }\n select(condition: Tensor, a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n topk<T extends Tensor>(x: T, k: number, sorted: boolean): [T, T] {\n throw new Error('Not yet implemented');\n }\n\n min(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n minimum(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n mod(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n max(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n maximum(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n all(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n any(x: Tensor, axes: number[]): Tensor {\n throw new Error('Not yet implemented');\n }\n\n squaredDifference(a: Tensor, b: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n ceil<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n floor<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n round<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n sign<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n pow<T extends Tensor>(a: T, b: Tensor): T {\n throw new Error('Not yet implemented');\n }\n exp<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n expm1<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n log<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n log1p<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n sqrt<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n rsqrt<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n square<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n reciprocal<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n relu<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n elu<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n eluDer<T extends Tensor>(dy: T, y: T): T {\n throw new Error('Not yet implemented');\n }\n selu<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n int<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n clip<T extends Tensor>(x: T, min: number, max: number): T {\n throw new Error('Not yet implemented');\n }\n\n abs<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n complexAbs<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n sigmoid<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n softplus<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n sin<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n cos<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n tan<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n asin<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n acos<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n atan<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n atan2<T extends Tensor>(a: T, b: T): T {\n throw new Error('Not yet implemented');\n }\n\n sinh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n cosh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n tanh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n asinh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n acosh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n atanh<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n erf<T extends Tensor>(x: T): T {\n throw new Error('Not yet implemented');\n }\n\n step<T extends Tensor>(x: T, alpha: number): T {\n throw new Error('Not yet implemented');\n }\n\n conv2d(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n throw new Error('Not yet implemented');\n }\n conv2dDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n conv2dDerFilter(x: Tensor4D, dY: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n depthwiseConv2D(input: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n depthwiseConv2DDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n depthwiseConv2DDerFilter(x: Tensor4D, dY: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n maxPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n throw new Error('Not yet implemented');\n }\n maxPoolBackprop(dy: Tensor4D, x: Tensor4D, y: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n avgPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n throw new Error('Not yet implemented');\n }\n avgPoolBackprop(dy: Tensor4D, x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n reshape<T extends Tensor, R extends Rank>(x: T, shape: ShapeMap[R]):\n Tensor<R> {\n throw new Error('Not yet implemented');\n }\n cast<T extends Tensor>(x: T, dtype: DataType): T {\n throw new Error('Not yet implemented');\n }\n\n tile<T extends Tensor>(x: T, reps: number[]): T {\n throw new Error('Not yet implemented');\n }\n\n pad<T extends Tensor>(\n x: T, paddings: Array<[number, number]>, constantValue: number): T {\n throw new Error('Not yet implemented');\n }\n\n transpose<T extends Tensor>(x: T, perm: number[]): T {\n throw new Error('Not yet implemented');\n }\n\n gather<T extends Tensor>(x: T, indices: Tensor1D, axis: number): T {\n throw new Error('Not yet implemented');\n }\n\n gatherND(x: Tensor, indices: Tensor): Tensor {\n throw new Error('Not yet implemented');\n }\n\n scatterND<R extends Rank>(\n indices: Tensor, updates: Tensor, shape: ShapeMap[R]): Tensor<R> {\n throw new Error('Not yet implemented');\n }\n\n batchToSpaceND<T extends Tensor>(\n x: T, blockShape: number[], crops: number[][]): T {\n throw new Error('Not yet implemented');\n }\n\n spaceToBatchND<T extends Tensor>(\n x: T, blockShape: number[], paddings: number[][]): T {\n throw new Error('Not yet implemented');\n }\n\n resizeBilinear(\n x: Tensor4D, newHeight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n resizeBilinearBackprop(dy: Tensor4D, x: Tensor4D, alignCorners: boolean):\n Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n resizeNearestNeighbor(\n x: Tensor4D, newHEight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n resizeNearestNeighborBackprop(\n dy: Tensor4D, x: Tensor4D, alignCorners: boolean): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n batchNormalization(\n x: Tensor4D, mean: Tensor4D|Tensor1D, variance: Tensor4D|Tensor1D,\n varianceEpsilon: number, scale?: Tensor4D|Tensor1D,\n offset?: Tensor4D|Tensor1D): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n localResponseNormalization4D(\n x: Tensor4D, radius: number, bias: number, alpha: number,\n beta: number): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n LRNGrad(\n dy: Tensor4D, inputImage: Tensor4D, outputImage: Tensor4D, radius: number,\n bias: number, alpha: number, beta: number): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n multinomial(\n logits: Tensor2D, normalized: boolean, numSamples: number,\n seed: number): Tensor2D {\n throw new Error('Not yet implemented');\n }\n\n oneHot(indices: Tensor1D, depth: number, onValue: number, offValue: number):\n Tensor2D {\n throw new Error('Not yet implemented');\n }\n\n cumsum(x: Tensor, axis: number, exclusive: boolean, reverse: boolean):\n Tensor {\n throw new Error('Not yet implemented');\n }\n\n nonMaxSuppression(\n boxes: Tensor2D, scores: Tensor1D, maxOutputSize: number,\n iouThreshold: number, scoreThreshold?: number): Tensor1D {\n throw new Error('Not yet implemented');\n }\n\n fft(x: Tensor2D): Tensor2D {\n throw new Error('Not yet implemented');\n }\n ifft(x: Tensor2D): Tensor2D {\n throw new Error('Not yet implemented');\n }\n complex<T extends Tensor>(real: T, imag: T): T {\n throw new Error('Not yet implemented');\n }\n real<T extends Tensor>(input: T): T {\n throw new Error('Not yet implemented');\n }\n imag<T extends Tensor>(input: T): T {\n throw new Error('Not yet implemented');\n }\n\n cropAndResize(\n image: Tensor4D, boxes: Tensor2D, boxIndex: Tensor1D,\n cropSize: [number, number], method: 'bilinear'|'nearest',\n extrapolationValue: number): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n depthToSpace(x: Tensor4D, blockSize: number, dataFormat: string): Tensor4D {\n throw new Error('Not yet implemented');\n }\n\n // Aligns with the \"SplitV\" kernel in TensorFlow.\n split<T extends Tensor>(value: T, sizeSplits: number[], axis: number): T[] {\n throw new Error('Not yet implemented');\n }\n\n sparseToDense<R extends Rank>(\n sparseIndices: Tensor, sparseValues: Tensor, outputShape: ShapeMap[R],\n defaultValue: Scalar): Tensor<R> {\n throw new Error('Not yet implemented');\n }\n /**\n * Sets the data mover for this backend. Backends should use the mover to\n * move data from other backends to this backend.\n */\n setDataMover(dataMover: DataMover): void {\n throw new Error('Not yet implemented');\n }\n\n dispose(): void {\n throw new Error('Not yet implemented');\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {scalar, zeros} from '../ops/tensor_ops';\nimport {Tensor} from '../tensor';\nimport {Rank} from '../types';\nimport {DataType, ShapeMap} from '../types';\nimport {hasEncodingLoss} from '../util';\nimport {KernelBackend} from './backend';\n\nexport function castTensor<T extends Tensor>(\n x: T, dtype: DataType, backend: KernelBackend): T {\n if (dtype === 'complex64') {\n if (x.dtype === 'complex64') {\n return x.clone();\n }\n const zerosTensor = zeros(x.shape);\n const floatX = x.toFloat();\n const result = backend.complex(floatX, zerosTensor);\n zerosTensor.dispose();\n floatX.dispose();\n return result as T;\n }\n\n if (!hasEncodingLoss(x.dtype, dtype)) {\n // We don't change the underlying data, since we cast to higher\n // precision.\n return Tensor.make(x.shape, {dataId: x.dataId}, dtype) as T;\n }\n if (x.dtype === 'complex64') {\n const real = backend.real(x);\n const result = real.cast(dtype);\n real.dispose();\n return result;\n }\n if (dtype === 'int32') {\n return backend.int(x);\n } else if (dtype === 'bool') {\n const zero = scalar(0, x.dtype);\n const result = backend.notEqual(x, zero) as T;\n zero.dispose();\n return result;\n } else {\n throw new Error(`Error in Cast: unknown dtype argument (${dtype})`);\n }\n}\n\nexport function reshapeTensor<T extends Tensor, R extends Rank>(\n x: T, shape: ShapeMap[R]): Tensor<R> {\n return Tensor.make(shape, {dataId: x.dataId}, x.dtype);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {TypedArray} from '../types';\n/**\n * Merges real and imaginary Float32Arrays into a single complex Float32Array.\n *\n * The memory layout is interleaved as follows:\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n * complex: [r0, i0, r1, i1, r2, i2]\n *\n * This is the inverse of splitRealAndImagArrays.\n *\n * @param real The real values of the complex tensor values.\n * @param imag The imag values of the complex tensor values.\n * @returns A complex tensor as a Float32Array with merged values.\n */\nexport function mergeRealAndImagArrays(\n real: Float32Array, imag: Float32Array): Float32Array {\n if (real.length !== imag.length) {\n throw new Error(\n `Cannot merge real and imag arrays of different lengths. real:` +\n `${real.length}, imag: ${imag.length}.`);\n }\n const result = new Float32Array(real.length * 2);\n for (let i = 0; i < result.length; i += 2) {\n result[i] = real[i / 2];\n result[i + 1] = imag[i / 2];\n }\n return result;\n}\n\n/**\n * Splits a complex Float32Array into real and imag parts.\n *\n * The memory layout is interleaved as follows:\n * complex: [r0, i0, r1, i1, r2, i2]\n * real: [r0, r1, r2]\n * imag: [i0, i1, i2]\n *\n * This is the inverse of mergeRealAndImagArrays.\n *\n * @param complex The complex tensor values.\n * @returns An object with real and imag Float32Array components of the complex\n * tensor.\n */\nexport function splitRealAndImagArrays(complex: Float32Array):\n {real: Float32Array, imag: Float32Array} {\n const real = new Float32Array(complex.length / 2);\n const imag = new Float32Array(complex.length / 2);\n for (let i = 0; i < complex.length; i += 2) {\n real[i / 2] = complex[i];\n imag[i / 2] = complex[i + 1];\n }\n return {real, imag};\n}\n\n/**\n * Extracts even indexed complex values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithEvenIndex(complex: Float32Array):\n {real: Float32Array, imag: Float32Array} {\n const len = Math.ceil(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 0; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return {real, imag};\n}\n\n/**\n * Extracts odd indexed comple values in the given array.\n * @param complex The complex tensor values\n */\nexport function complexWithOddIndex(complex: Float32Array):\n {real: Float32Array, imag: Float32Array} {\n const len = Math.floor(complex.length / 4);\n const real = new Float32Array(len);\n const imag = new Float32Array(len);\n for (let i = 2; i < complex.length; i += 4) {\n real[Math.floor(i / 4)] = complex[i];\n imag[Math.floor(i / 4)] = complex[i + 1];\n }\n return {real, imag};\n}\n\n/**\n * Get the map representing a complex value in the given array.\n * @param complex The complex tensor values.\n * @param index An index of the target complex value.\n */\nexport function getComplexWithIndex(\n complex: Float32Array, index: number): {real: number, imag: number} {\n const real = complex[index * 2];\n const imag = complex[index * 2 + 1];\n return {real, imag};\n}\n\n/**\n * Insert a given complex value into the TypedArray.\n * @param data The array in which the complex value is inserted.\n * @param c The complex value to be inserted.\n * @param index An index of the target complex value.\n */\nexport function assignToTypedArray(\n data: TypedArray, real: number, imag: number, index: number) {\n data[index * 2] = real;\n data[index * 2 + 1] = imag;\n}\n\n/**\n * Make the list of exponent terms used by FFT.\n */\nexport function exponents(\n n: number, inverse: boolean): {real: Float32Array, imag: Float32Array} {\n const real = new Float32Array(n / 2);\n const imag = new Float32Array(n / 2);\n for (let i = 0; i < Math.ceil(n / 2); i++) {\n const x = (inverse ? 2 : -2) * Math.PI * (i / n);\n real[i] = Math.cos(x);\n imag[i] = Math.sin(x);\n }\n return {real, imag};\n}\n\n/**\n * Make the exponent term used by FFT.\n */\nexport function exponent(\n k: number, n: number, inverse: boolean): {real: number, imag: number} {\n const x = (inverse ? 2 : -2) * Math.PI * (k / n);\n const real = Math.cos(x);\n const imag = Math.sin(x);\n return {real, imag};\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Implementation of the NonMaxSuppression kernel shared between webgl and cpu.\n */\n\nimport {tensor1d} from '../ops/tensor_ops';\nimport {Tensor1D} from '../tensor';\nimport {TypedArray} from '../types';\n\nexport function nonMaxSuppressionImpl(\n boxes: TypedArray, scores: TypedArray, maxOutputSize: number,\n iouThreshold: number, scoreThreshold: number): Tensor1D {\n const candidates = Array.from(scores)\n .map((score, boxIndex) => ({score, boxIndex}))\n .filter(c => c.score > scoreThreshold)\n .sort((c1, c2) => c2.score - c1.score);\n\n const selected: number[] = [];\n\n for (let i = 0; i < candidates.length; i++) {\n const {score, boxIndex} = candidates[i];\n if (score < scoreThreshold) {\n break;\n }\n\n let ignoreCandidate = false;\n for (let j = selected.length - 1; j >= 0; --j) {\n const iou = intersectionOverUnion(boxes, boxIndex, selected[j]);\n if (iou >= iouThreshold) {\n ignoreCandidate = true;\n break;\n }\n }\n\n if (!ignoreCandidate) {\n selected.push(boxIndex);\n if (selected.length >= maxOutputSize) {\n break;\n }\n }\n }\n\n return tensor1d(selected, 'int32');\n}\n\nfunction intersectionOverUnion(boxes: TypedArray, i: number, j: number) {\n const iCoord = boxes.subarray(i * 4, i * 4 + 4);\n const jCoord = boxes.subarray(j * 4, j * 4 + 4);\n const yminI = Math.min(iCoord[0], iCoord[2]);\n const xminI = Math.min(iCoord[1], iCoord[3]);\n const ymaxI = Math.max(iCoord[0], iCoord[2]);\n const xmaxI = Math.max(iCoord[1], iCoord[3]);\n const yminJ = Math.min(jCoord[0], jCoord[2]);\n const xminJ = Math.min(jCoord[1], jCoord[3]);\n const ymaxJ = Math.max(jCoord[0], jCoord[2]);\n const xmaxJ = Math.max(jCoord[1], jCoord[3]);\n const areaI = (ymaxI - yminI) * (xmaxI - xminI);\n const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ);\n if (areaI <= 0 || areaJ <= 0) {\n return 0.0;\n }\n const intersectionYmin = Math.max(yminI, yminJ);\n const intersectionXmin = Math.max(xminI, xminJ);\n const intersectionYmax = Math.min(ymaxI, ymaxJ);\n const intersectionXmax = Math.min(xmaxI, xmaxJ);\n const intersectionArea = Math.max(intersectionYmax - intersectionYmin, 0.0) *\n Math.max(intersectionXmax - intersectionXmin, 0.0);\n return intersectionArea / (areaI + areaJ - intersectionArea);\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from '../tensor';\n\n/** Shared implementation of the split kernel across WebGL and CPU. */\nexport function split<T extends Tensor>(\n x: T, sizeSplits: number[], axis: number): T[] {\n const begin = Array(x.rank).fill(0);\n const size = x.shape.slice();\n return sizeSplits.map(s => {\n size[axis] = s;\n const slice = x.slice(begin, size);\n begin[axis] += s;\n return slice;\n });\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/** An implementation of the TopK kernel shared between webgl and cpu. */\n\nimport {tensor} from '../ops/tensor_ops';\nimport {Tensor} from '../tensor';\nimport {DataType, TypedArray} from '../types';\nimport {getTypedArrayFromDType} from '../util';\n\nexport function topkImpl<T extends Tensor>(\n x: TypedArray, xShape: number[], xDtype: DataType, k: number,\n sorted: boolean): [T, T] {\n // Reshape into a 2d tensor [batch, lastDim] and compute topk along lastDim.\n const lastDim = xShape[xShape.length - 1];\n const [batch, size] = [x.length / lastDim, lastDim];\n const allTopKVals = getTypedArrayFromDType(xDtype, batch * k);\n const allTopKIndices = getTypedArrayFromDType('int32', batch * k);\n\n for (let b = 0; b < batch; b++) {\n const offset = b * size;\n const vals = x.subarray(offset, offset + size);\n const valAndInd: Array<{value: number, index: number}> = [];\n for (let i = 0; i < vals.length; i++) {\n valAndInd.push({value: vals[i], index: i});\n }\n valAndInd.sort((a, b) => b.value - a.value);\n\n const outOffset = b * k;\n const topKVals = allTopKVals.subarray(outOffset, outOffset + k);\n const topKIndices = allTopKIndices.subarray(outOffset, outOffset + k);\n for (let i = 0; i < k; i++) {\n topKVals[i] = valAndInd[i].value;\n topKIndices[i] = valAndInd[i].index;\n }\n }\n // Reshape back to the original input shape, except that the last\n // dimension is k.\n const outputShape = xShape.slice();\n outputShape[outputShape.length - 1] = k;\n return [\n tensor(allTopKVals, outputShape, xDtype) as T,\n tensor(allTopKIndices, outputShape, 'int32') as T\n ];\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ReduceInfo} from '../../ops/reduce_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ArgMinMaxProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[];\n userCode: string;\n\n constructor(reduceInfo: ReduceInfo, op: 'max'|'min', firstPass: boolean) {\n const windowSize = reduceInfo.windowSize;\n const batchSize = reduceInfo.batchSize;\n const inSize = reduceInfo.inSize;\n const outSize = Math.ceil(inSize / windowSize);\n if (!firstPass) {\n this.variableNames.push('bestIndicesA');\n }\n this.outputShape = [batchSize, outSize];\n const compOp = (op === 'max') ? '>' : '<';\n const indexSnippet = firstPass ?\n 'inOffset + i;' :\n 'round(getBestIndicesA(batch, inOffset + i));';\n\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n int bestIndex = inOffset;\n float bestValue = getA(batch, bestIndex);\n\n for (int i = 0; i < ${windowSize}; i++) {\n int inIdx = ${indexSnippet};\n float candidate = getA(batch, inIdx);\n if (candidate ${compOp} bestValue) {\n bestValue = candidate;\n bestIndex = inIdx;\n }\n }\n setOutput(float(bestIndex));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class AvgPool2DBackpropProgram implements GPGPUProgram {\n variableNames = ['dy'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.inShape;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float avgMultiplier = float(${avgMultiplier});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC+= ${dilationWidth}) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n\n dotProd += dyValue * avgMultiplier;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport function getBroadcastDims(\n inShape: number[], outShape: number[]): number[] {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\n\n/**\n * Returns the axes in the output space that should be reduced to produce\n * the input space.\n */\nexport function getReductionAxes(\n inShape: number[], outShape: number[]): number[] {\n const result: number[] = [];\n for (let i = 0; i < outShape.length; i++) {\n const inDim = inShape[inShape.length - i - 1];\n const outAxis = outShape.length - i - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || (inDim === 1 && outDim > 1)) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\n\n/**\n * Given the output of `getBroadcastDims()`, returns true if the broadcasting\n * is along the outer-most dimensions of the input.\n */\nexport function broadcastDimsAreOuter(dims: number[]): boolean {\n for (let i = 0; i < dims.length; i++) {\n if (dims[i] !== i) {\n return false;\n }\n }\n return true;\n}\n\nexport function assertAndGetBroadcastShape(\n shapeA: number[], shapeB: number[]): number[] {\n const result: number[] = [];\n const l = Math.max(shapeA.length, shapeB.length);\n\n for (let i = 0; i < l; i++) {\n let a = shapeA[shapeA.length - i - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n } else if (b === 1) {\n result.unshift(a);\n } else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ` +\n `${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n } else {\n result.unshift(a);\n }\n }\n return result;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as broadcast_util from '../../ops/broadcast_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class BatchNormProgram implements GPGPUProgram {\n variableNames: string[];\n outputShape: number[] = [];\n userCode: string;\n supportsBroadcasting = true;\n\n constructor(\n xShape: number[], meanShape: number[], varianceShape: number[],\n offsetShape: number[]|null, scaleShape: number[]|null,\n varianceEpsilon: number) {\n this.variableNames = ['x', 'mean', 'variance'];\n broadcast_util.assertAndGetBroadcastShape(xShape, meanShape);\n broadcast_util.assertAndGetBroadcastShape(xShape, varianceShape);\n\n let offsetSnippet = '0.0';\n if (offsetShape != null) {\n broadcast_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = 'getOffsetAtOutCoords()';\n }\n\n let scaleSnippet = '1.0';\n if (scaleShape != null) {\n broadcast_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = 'getScaleAtOutCoords()';\n }\n\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n float x = getXAtOutCoords();\n float mean = getMeanAtOutCoords();\n float variance = getVarianceAtOutCoords();\n float offset = ${offsetSnippet};\n float scale = ${scaleSnippet};\n float inv = scale * inversesqrt(variance + float(${varianceEpsilon}));\n setOutput(dot(vec3(x, -mean, offset), vec3(inv, inv, 1)));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as broadcast_util from '../../ops/broadcast_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class BatchNormPackedProgram implements GPGPUProgram {\n variableNames: string[];\n outputShape: number[];\n userCode: string;\n supportsBroadcasting = true;\n usesPackedTextures = true;\n\n constructor(\n xShape: number[], meanShape: number[], varianceShape: number[],\n offsetShape: number[]|null, scaleShape: number[]|null,\n varianceEpsilon: number) {\n this.variableNames = ['x', 'mean', 'variance'];\n broadcast_util.assertAndGetBroadcastShape(xShape, meanShape);\n broadcast_util.assertAndGetBroadcastShape(xShape, varianceShape);\n\n const meanSnippet = broadcastSample('mean', meanShape.length);\n const varianceSnippet = broadcastSample('variance', varianceShape.length);\n\n let offsetSnippet = 'vec4 offset = vec4(0.0)';\n if (offsetShape != null) {\n broadcast_util.assertAndGetBroadcastShape(xShape, offsetShape);\n this.variableNames.push('offset');\n offsetSnippet = broadcastSample('offset', offsetShape.length);\n }\n\n let scaleSnippet = 'vec4 scale = vec4(1.0)';\n if (scaleShape != null) {\n broadcast_util.assertAndGetBroadcastShape(xShape, scaleShape);\n this.variableNames.push('scale');\n scaleSnippet = broadcastSample('scale', scaleShape.length);\n }\n\n this.outputShape = xShape;\n this.userCode = `\n void main() {\n ivec4 rc = getOutputCoords();\n\n ${offsetSnippet};\n ${scaleSnippet};\n\n vec4 x = getX(rc.x, rc.y, rc.z, rc.w);\n ${meanSnippet};\n ${varianceSnippet};\n\n vec4 inv = scale * inversesqrt(variance + vec4(${varianceEpsilon}));\n\n setOutput((x - mean) * inv + offset);\n }\n `;\n }\n}\n\nfunction broadcastSample(texName: string, rank: number): string {\n const texSampler = `get${texName.charAt(0).toUpperCase()}${texName.slice(1)}`;\n if (rank === 1) {\n return `\n vec4 ${texName}Sample = ${texSampler}(rc.w);\n vec4 ${texName} = vec4(${texName}Sample.xy, ${texName}Sample.xy);\n `;\n }\n return `vec4 ${texName} = ${texSampler}(rc.x, rc.y, rc.z, rc.w)`;\n}","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as broadcast_util from '../../ops/broadcast_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\n// (Ar + Ai)(Br + Bi) =\n// ArBr + ArBi + AiBr + AiBi = ArBr - AB + ArBi + AiBr\n// Yr = ArBr - AB\n// Yi = ArBi + AiBr\nexport const COMPLEX_MULTIPLY = {\n REAL: 'return areal * breal - aimag * bimag;',\n IMAG: 'return areal * bimag + aimag * breal;'\n};\n\nexport class BinaryOpComplexProgram implements GPGPUProgram {\n variableNames = ['AReal', 'AImag', 'BReal', 'BImag'];\n userCode: string;\n outputShape: number[];\n supportsBroadcasting = true;\n\n constructor(op: string, aShape: number[], bShape: number[]) {\n this.outputShape =\n broadcast_util.assertAndGetBroadcastShape(aShape, bShape);\n\n this.userCode = `\n float binaryOpComplex(\n float areal, float aimag, float breal, float bimag) {\n ${op}\n }\n\n void main() {\n float areal = getARealAtOutCoords();\n float aimag = getAImagAtOutCoords();\n float breal = getBRealAtOutCoords();\n float bimag = getBImagAtOutCoords();\n setOutput(binaryOpComplex(areal, aimag, breal, bimag));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as broadcast_util from '../../ops/broadcast_util';\n\nimport {GPGPUContext} from './gpgpu_context';\nimport {GPGPUProgram} from './gpgpu_math';\n\nconst CHECK_NAN_SNIPPET = `\n if (isNaN(a)) return a;\n if (isNaN(b)) return b;\n`;\n\nexport const ADD = 'return a + b;';\nexport const SUB = 'return a - b;';\nexport const MUL = 'return a * b;';\nexport const DIV = `if (a == b) return 1.0;\n return a / b;`;\n\n// We use native integer division to deal with floating point imprecision. Since\n// we implement floor division and glsl implements truncated division, we\n// correct for this by subtracting 1 from result when the result is negative and\n// there is a remainder.\nexport const INT_DIV = `\n float resultSign = sign(a) * sign(b);\n int ia = round(a);\n int ib = round(b);\n int result = ia / ib;\n int amodb = ia - ib * result;\n\n if (resultSign < 0.0 && amodb != 0) {\n result -= 1;\n }\n return float(result);\n`;\n\nexport const POW = `\nif(a < 0.0 && floor(b) < b){\n return NAN;\n}\nreturn (round(mod(b, 2.0)) == 0 || round(mod(b, 2.0)) == 2) ?\n pow(abs(a), b) : sign(a) * pow(abs(a), b);\n`;\nexport const SQUARED_DIFFERENCE = 'return (a - b) * (a - b);';\n\nexport const EQUAL = `return float(a == b);`;\n\nexport const NOT_EQUAL = `return float(a != b);`;\n\nexport const LESS = `return float(a < b);`;\n\nexport const LESS_EQUAL = `return float(a <= b);`;\n\nexport const GREATER = `return float(a > b);`;\n\nexport const GREATER_EQUAL = `return float(a >= b);`;\n\nexport const LOGICAL_AND = `return float(a >= 1.0 && b >= 1.0);`;\n\nexport const LOGICAL_OR = `return float(a >= 1.0 || b >= 1.0);`;\n\nexport const MAX = CHECK_NAN_SNIPPET + `\n return max(a, b);\n`;\nexport const MIN = CHECK_NAN_SNIPPET + `\n return min(a, b);\n`;\nexport const MOD = `if (b == 0.0) return NAN;\n return mod(a, b);`;\n\nexport const ATAN2 = CHECK_NAN_SNIPPET + `\n return atan(a, b);\n`;\n\nexport const ELU_DER = `return (b >= 1.0) ? a : a * (b + 1.0);`;\n\nexport class BinaryOpProgram implements GPGPUProgram {\n variableNames = ['A', 'B'];\n outputShape: number[];\n userCode: string;\n supportsBroadcasting = true;\n\n // Caching uniform location for speed.\n startLoc: WebGLUniformLocation;\n\n constructor(op: string, aShape: number[], bShape: number[]) {\n this.outputShape =\n broadcast_util.assertAndGetBroadcastShape(aShape, bShape);\n this.userCode = `\n uniform float NAN;\n float binaryOperation(float a, float b) {\n ${op}\n }\n\n void main() {\n float a = getAAtOutCoords();\n float b = getBAtOutCoords();\n setOutput(binaryOperation(a, b));\n }\n `;\n }\n\n getCustomSetupFunc() {\n return (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'NAN');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1f(this.startLoc, NaN);\n };\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ClipProgram implements GPGPUProgram {\n variableNames = ['A'];\n userCode: string;\n outputShape: number[];\n\n constructor(aShape: number[], min: number, max: number) {\n this.outputShape = aShape;\n this.userCode = `\n void main() {\n float value = getAAtOutCoords();\n if (isNaN(value)) {\n setOutput(value);\n return;\n }\n\n setOutput(clamp(value, float(${min}), float(${max})));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ComplexAbsProgram implements GPGPUProgram {\n variableNames = ['real', 'imag'];\n userCode: string;\n outputShape: number[];\n\n constructor(shape: number[]) {\n this.outputShape = shape;\n this.userCode = `\n void main() {\n float real = getRealAtOutCoords();\n float imag = getImagAtOutCoords();\n vec2 v = vec2(real, imag);\n\n setOutput(sqrt(dot(v, v)));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as concat_util from '../../ops/concat_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ConcatProgram implements GPGPUProgram {\n variableNames = ['A', 'B'];\n outputShape: number[] = [];\n userCode: string;\n\n // Concats 2d tensors along axis=1. See comments in MathBackendWebGL.concat().\n constructor(aShape: [number, number], bShape: [number, number]) {\n this.outputShape =\n concat_util.computeOutShape([aShape, bShape], 1 /* axis */);\n\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int yR = coords.x;\n int yC = coords.y;\n\n float value = 0.0;\n if (yC < ${aShape[1]}) {\n value = getA(yR, yC);\n } else {\n yC -= ${aShape[1]};\n value = getB(yR, yC);\n }\n\n setOutput(value);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class Conv2DDerFilterProgram implements GPGPUProgram {\n variableNames = ['x', 'dy'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.filterShape;\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int d2 = coords.w;\n\n // Convolve x(?, ?, d1) with dy(:, :, d2) to get dw(wR, wC, d1, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n\nexport class Conv2DDerInputProgram implements GPGPUProgram {\n variableNames = ['dy', 'W'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.inShape;\n\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n for (int d2 = 0; d2 < ${convInfo.outChannels}; d2++) {\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, d2);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class DepthwiseConv2DDerFilterProgram implements GPGPUProgram {\n variableNames = ['x', 'dy'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.filterShape;\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int wR = coords.x;\n int wC = coords.y;\n int d1 = coords.z;\n int dm = coords.w;\n int d2 = d1 * ${channelMul} + dm;\n\n float dotProd = 0.0;\n\n // TODO: Vec4 over the batch size\n for (int b = 0; b < ${convInfo.batchSize}; b++) {\n for (int yR = 0; yR < ${convInfo.outHeight}; yR++) {\n int xR = wR + yR * ${strideHeight} - ${padTop};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int yC = 0; yC < ${convInfo.outWidth}; yC++) {\n int xC = wC + yC * ${strideWidth} - ${padLeft};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float dyValue = getDy(b, yR, yC, d2);\n float xValue = getX(b, xR, xC, d1);\n dotProd += (xValue * dyValue);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n\nexport class DepthwiseConv2DDerInputProgram implements GPGPUProgram {\n variableNames = ['dy', 'W'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.inShape;\n\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n\n const padTop = filterHeight - 1 - convInfo.padInfo.top;\n const padLeft = filterWidth - 1 - convInfo.padInfo.left;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d1 = coords[3];\n ivec2 dyCorner = coords.yz - pads;\n int dyRCorner = dyCorner.x;\n int dyCCorner = dyCorner.y;\n\n float dotProd = 0.0;\n\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n int wRPerm = ${filterHeight} - 1 - wR;\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n int wCPerm = ${filterWidth} - 1 - wC;\n\n // TODO: Vec4 over the channelMul\n for (int dm = 0; dm < ${channelMul}; dm++) {\n int d2 = d1 * ${channelMul} + dm;\n float xValue = getDy(batch, idyR, idyC, d2);\n float wValue = getW(wRPerm, wCPerm, d1, dm);\n dotProd += xValue * wValue;\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class Conv2DProgram implements GPGPUProgram {\n variableNames = ['x', 'W'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.outShape;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n\n const inputDepthNearestVec4 = Math.floor(convInfo.inChannels / 4) * 4;\n const inputDepthVec4Remainder = convInfo.inChannels % 4;\n\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d2 = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, d2) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n for (int d1 = 0; d1 < ${inputDepthNearestVec4}; d1 += 4) {\n vec4 xValues = vec4(\n getX(batch, xR, xC, d1),\n getX(batch, xR, xC, d1 + 1),\n getX(batch, xR, xC, d1 + 2),\n getX(batch, xR, xC, d1 + 3)\n );\n vec4 wValues = vec4(\n getW(wR, wC, d1, d2),\n getW(wR, wC, d1 + 1, d2),\n getW(wR, wC, d1 + 2, d2),\n getW(wR, wC, d1 + 3, d2)\n );\n\n dotProd += dot(xValues, wValues);\n }\n\n if (${inputDepthVec4Remainder === 1}) {\n dotProd +=\n getX(batch, xR, xC, ${inputDepthNearestVec4}) *\n getW(wR, wC, ${inputDepthNearestVec4}, d2);\n } else if (${inputDepthVec4Remainder === 2}) {\n vec2 xValues = vec2(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1)\n );\n vec2 wValues = vec2(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2)\n );\n dotProd += dot(xValues, wValues);\n } else if (${inputDepthVec4Remainder === 3}) {\n vec3 xValues = vec3(\n getX(batch, xR, xC, ${inputDepthNearestVec4}),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 1),\n getX(batch, xR, xC, ${inputDepthNearestVec4} + 2)\n );\n vec3 wValues = vec3(\n getW(wR, wC, ${inputDepthNearestVec4}, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 1, d2),\n getW(wR, wC, ${inputDepthNearestVec4} + 2, d2)\n );\n dotProd += dot(xValues, wValues);\n }\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class DepthwiseConv2DProgram implements GPGPUProgram {\n variableNames = ['x', 'W'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.outShape;\n\n const xNumRows = convInfo.inHeight;\n const xNumCols = convInfo.inWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const channelMul = convInfo.outChannels / convInfo.inChannels;\n\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords.x;\n ivec2 xRCCorner = coords.yz * strides - pads;\n int d2 = coords.w;\n int d1 = d2 / ${channelMul};\n int q = d2 - d1 * ${channelMul};\n\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // Convolve x(?, ?, d1) with w(:, :, d1, q) to get y(yR, yC, d2).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n // TODO(dsmilkov): Flatten the two for loops and vec4 the operations.\n for (int wR = 0; wR < ${filterHeight}; wR++) {\n int xR = xRCorner + wR * ${dilationHeight};\n\n if (xR < 0 || xR >= ${xNumRows}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidth}; wC++) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n if (xC < 0 || xC >= ${xNumCols}) {\n continue;\n }\n\n float xVal = getX(batch, xR, xC, d1);\n float wVal = getW(wR, wC, d1, q);\n dotProd += xVal * wVal;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport { GPGPUProgram } from './gpgpu_math';\n\nexport class CropAndResizeProgram implements GPGPUProgram {\n variableNames = ['Image', 'Boxes', 'BoxInd'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(\n imageShape: [number, number, number, number], boxShape: [number, number],\n cropSize: [number, number], method: 'bilinear' | 'nearest',\n extrapolationValue: number) {\n const [batch, imageHeight, imageWidth, depth] = imageShape;\n const [numBoxes,] = boxShape;\n const [cropHeight, cropWidth] = cropSize;\n this.outputShape = [numBoxes, cropHeight, cropWidth, depth];\n const methodId = method === 'bilinear' ? 1 : 0;\n\n const [inputHeightFloat, inputWidthFloat] =\n [`${imageHeight - 1}.0`, `${imageWidth - 1}.0`];\n\n const [heightRatio, heightScale, inY] = cropHeight > 1 ?\n [\n `${(imageHeight-1)/(cropHeight-1)}`,\n '(y2-y1) * height_ratio',\n `y1*${inputHeightFloat} + float(y)*(height_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (y1+y2) * ${inputHeightFloat}`,\n ];\n const [widthRatio, widthScale, inX] = cropWidth > 1 ?\n [\n `${(imageWidth-1)/(cropWidth-1)}`,\n '(x2-x1) * width_ratio',\n `x1*${inputWidthFloat} + float(x)*(width_scale)`,\n ] :\n [\n '0.0',\n '0.0',\n `0.5 * (x1+x2) * ${inputWidthFloat}`,\n ];\n\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op_gpu.cu.cc\n this.userCode = `\n const float height_ratio = float(${heightRatio});\n const float width_ratio = float(${widthRatio});\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int y = coords[1];\n int x = coords[2];\n int d = coords[3];\n\n // get box vals\n float y1 = getBoxes(b,0);\n float x1 = getBoxes(b,1);\n float y2 = getBoxes(b,2);\n float x2 = getBoxes(b,3);\n\n // get image in batch index\n int bInd = round(getBoxInd(b));\n if(bInd < 0 || bInd >= ${batch}) {\n return;\n }\n\n float height_scale = ${heightScale};\n float width_scale = ${widthScale};\n\n float in_y = ${inY};\n if( in_y < 0.0 || in_y > ${inputHeightFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n float in_x = ${inX};\n if( in_x < 0.0 || in_x > ${inputWidthFloat} ) {\n setOutput(float(${extrapolationValue}));\n return;\n }\n\n vec2 sourceFracIndexRC = vec2(in_y,in_x);\n if(${methodId} == 1) {\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(ceil(sourceFracIndexRC));\n\n float topLeft = getImage(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getImage(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getImage(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getImage(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n setOutput(newValue);\n } else {\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(floor(\n sourceFracIndexRC + vec2(0.5,0.5)));\n float newValue = getImage(b, sourceNearestRC.x, sourceNearestRC.y, d);\n setOutput(newValue);\n }\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as util from '../../util';\n\n/**\n * Produces GLSL code that derives logical coordinates from a flat\n * index. The code performs integer division with each stride and decrements\n * the index until the index equals the final dimension coordinate.\n */\nexport function getLogicalCoordinatesFromFlatIndex(\n coords: string[], shape: number[], index = 'index'): string {\n const strides = util.computeStrides(shape);\n return strides\n .map((stride, i) => {\n const line1 = `int ${coords[i]} = ${index} / ${stride}`;\n const line2 = i === strides.length - 1 ?\n `int ${coords[i + 1]} = ${index} - ${coords[i]} * ${stride}` :\n `index -= ${coords[i]} * ${stride}`;\n return `${line1}; ${line2};`;\n })\n .join('');\n}\n\nfunction buildVec(x: string[]): string {\n if (x.length === 1) {\n return `${x[0]}`;\n }\n return `vec${x.length}(${x.join(',')})`;\n}\n\n/**\n * Produces GLSL code that computes the dot product of the input x and y\n * vectors. Handles splitting inputs into increments of vec4s when necessary.\n */\nexport function dotify(x: string[], y: string[]): string {\n if (x.length !== y.length) {\n throw new Error(\n `Vectors to be dotted must be of the same length -` +\n `got ${x.length} and ${y.length}`);\n }\n\n const slices: string[] = [];\n const nearestVec4 = Math.floor(x.length / 4);\n const nearestVec4Remainder = x.length % 4;\n\n for (let i = 0; i < nearestVec4; i++) {\n const xSlice = x.slice(i * 4, i * 4 + 4);\n const ySlice = y.slice(i * 4, i * 4 + 4);\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n\n if (nearestVec4Remainder !== 0) {\n let xSlice = x.slice(nearestVec4 * 4);\n let ySlice = y.slice(nearestVec4 * 4);\n if (xSlice.length === 1) {\n xSlice = xSlice.map(d => `float(${d})`);\n ySlice = ySlice.map(d => `float(${d})`);\n }\n slices.push(`${buildVec(xSlice)}, ${buildVec(ySlice)}`);\n }\n\n return slices.map((d, i) => `dot(${d})`).join('+');\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport * as broadcast_util from '../../ops/broadcast_util';\nimport * as util from '../../util';\nimport * as shader_util from './shader_compiler_util';\n\nexport type ShapeInfo = {\n logicalShape: number[],\n texShape: [number, number],\n isUniform: boolean,\n isPacked: boolean\n};\n\nexport type InputInfo = {\n name: string,\n shapeInfo: ShapeInfo\n};\n\nexport function makeShader(\n inputsInfo: InputInfo[], outputShape: ShapeInfo, userCode: string,\n broadcast: boolean, usesPackedTextures: boolean): string {\n let inputPrefixSnippet: string[]|string = inputsInfo.map(x => {\n const size = util.sizeFromShape(x.shapeInfo.logicalShape);\n if (x.shapeInfo.isUniform) {\n return `uniform float ${x.name}${size > 1 ? `[${size}]` : ''};`;\n }\n return `uniform sampler2D ${x.name};`;\n });\n inputPrefixSnippet = inputPrefixSnippet.join('\\n');\n\n const inputSamplingSnippet =\n inputsInfo.map(x => getInputSamplingSnippet(x, outputShape, broadcast))\n .join('\\n');\n const outTexShape = outputShape.texShape;\n let outputSamplingSnippet: string;\n let floatTextureSetOutputSnippet: string;\n let shaderPrefix = SHADER_PREFIX;\n\n if (outputShape.isPacked) {\n outputSamplingSnippet =\n getPackedOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = FLOAT_TEXTURE_SET_RGBA_SNIPPET;\n } else {\n outputSamplingSnippet =\n getOutputSamplingSnippet(outputShape.logicalShape, outTexShape);\n floatTextureSetOutputSnippet = FLOAT_TEXTURE_SET_R_SNIPPET;\n }\n\n if (usesPackedTextures) {\n shaderPrefix += SHADER_PACKED_PREFIX;\n }\n\n const source = [\n shaderPrefix, FLOAT_TEXTURE_SAMPLE_SNIPPET, floatTextureSetOutputSnippet,\n inputPrefixSnippet, outputSamplingSnippet, inputSamplingSnippet, userCode\n ].join('\\n');\n return source;\n}\n\nfunction getSamplerFromInInfo(inInfo: InputInfo): string {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 0:\n return getSamplerScalar(inInfo);\n case 1:\n return getSampler1D(inInfo);\n case 2:\n return getSampler2D(inInfo);\n case 3:\n return getSampler3D(inInfo);\n case 4:\n return getSampler4D(inInfo);\n case 5:\n return getSampler5D(inInfo);\n case 6:\n return getSampler6D(inInfo);\n default:\n throw new Error(\n `${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\n\nfunction getPackedSamplerFromInInfo(inInfo: InputInfo): string {\n const shape = inInfo.shapeInfo.logicalShape;\n switch (shape.length) {\n case 1:\n return getPackedSampler1D(inInfo);\n case 2:\n return getPackedSampler2D(inInfo);\n case 3:\n return getPackedSampler3D(inInfo);\n case 4:\n return getPackedSampler4D(inInfo);\n default:\n throw new Error(\n `Packed ${shape.length}-D input sampling` +\n ` is not yet supported`);\n }\n}\n\nfunction getInputSamplingSnippet(\n inInfo: InputInfo, outShapeInfo: ShapeInfo, broadcast: boolean): string {\n let res = getSamplerFlat(inInfo);\n if (inInfo.shapeInfo.isPacked) {\n res += getPackedSamplerFromInInfo(inInfo);\n } else {\n res += getSamplerFromInInfo(inInfo);\n }\n\n // If input and output have matching logical shapes, add\n // getTexNameAtOutCoord() method that samples the input\n // textureSampler using the output coordinates.\n if (broadcast ||\n util.arraysEqual(\n inInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape)) {\n res += getSamplerAtOutputCoords(inInfo, outShapeInfo, broadcast);\n }\n return res;\n}\n\nfunction getPackedOutputSamplingSnippet(\n outShape: number[], outTexShape: [number, number]): string {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutputPacked1DCoords(outShape as [number], outTexShape);\n case 2:\n return getOutputPacked2DCoords(outShape as [number, number], outTexShape);\n case 3:\n return getOutputPacked3DCoords(\n outShape as [number, number, number], outTexShape);\n case 4:\n return getOutputPacked4DCoords(\n outShape as [number, number, number, number], outTexShape);\n default:\n throw new Error(\n `${outShape.length}-D packed output ` +\n `coordinate fetching is not yet supported`);\n }\n}\n\nfunction getOutputSamplingSnippet(\n outShape: number[], outTexShape: [number, number]): string {\n switch (outShape.length) {\n case 0:\n return getOutputScalarCoords();\n case 1:\n return getOutput1DCoords(outShape as [number], outTexShape);\n case 2:\n return getOutput2DCoords(outShape as [number, number], outTexShape);\n case 3:\n return getOutput3DCoords(\n outShape as [number, number, number], outTexShape);\n case 4:\n return getOutput4DCoords(\n outShape as [number, number, number, number], outTexShape);\n case 5:\n return getOutput5DCoords(\n outShape as [number, number, number, number, number], outTexShape);\n case 6:\n return getOutput6DCoords(\n outShape as [number, number, number, number, number, number],\n outTexShape);\n default:\n throw new Error(\n `${outShape.length}-D output sampling is not yet supported`);\n }\n}\n\nconst SAMPLE_1D_SNIPPET = `\nvec2 UVfrom1D(int texNumR, int texNumC, int index) {\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom1D(int texNumR, int texNumC, int index) {\n int texelIndex = index / 2;\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst SAMPLE_2D_SNIPPET = `\nvec2 UVfrom2D(int texNumR, int texNumC, int numC, int row, int col) {\n int index = row * numC + col;\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom2D(int texelsInLogicalRow, int texNumR,\n int texNumC, int row, int col) {\n int texelIndex = (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = texelIndex / texNumC;\n int texC = texelIndex - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst SAMPLE_3D_SNIPPET = `\nvec2 UVfrom3D(int texNumR, int texNumC, int stride0,\n int stride1, int row, int col, int depth) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * stride0 + col * stride1 + depth;\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom3D(int texNumR, int texNumC,\n int texelsInBatch, int texelsInLogicalRow, int b,\n int row, int col) {\n int index = b * texelsInBatch + (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst SAMPLE_4D_SNIPPET = `\nvec2 UVfrom4D(int texNumR, int texNumC, int stride0,\n int stride1, int stride2, int row, int col, int depth,\n int depth2) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * stride0 + col * stride1 + depth * stride2 + depth2;\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\nvec2 packedUVfrom4D(int texNumR, int texNumC, int texelsInBatch2,\n int texelsInBatch, int texelsInLogicalRow, int b2, int b,\n int row, int col) {\n int index = b2 * texelsInBatch2 + b * texelsInBatch +\n (row / 2) * texelsInLogicalRow + (col / 2);\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst SAMPLE_5D_SNIPPET = `\nvec2 UVfrom5D(int texNumR, int texNumC, int stride0,\n int stride1, int stride2, int stride3, int row, int col, int depth,\n int depth2, int depth3) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * stride0 + col * stride1 +\n depth * stride2 + depth2 * stride3 + depth3;\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst SAMPLE_6D_SNIPPET = `\nvec2 UVfrom6D(int texNumR, int texNumC, int stride0,\n int stride1, int stride2, int stride3, int stride4,\n int row, int col, int depth, int depth2, int depth3, int depth4) {\n // Explicitly use integer operations as dot() only works on floats.\n int index = row * stride0 + col * stride1 + depth * stride2 + depth2 *\n stride3 + depth3 * stride4 + depth4;\n int texR = index / texNumC;\n int texC = index - texR * texNumC;\n return (vec2(texC, texR) + halfCR) / vec2(texNumC, texNumR);\n}\n`;\n\nconst FLOAT_TEXTURE_SAMPLE_SNIPPET = `\n float sampleTexture(sampler2D textureSampler, vec2 uv) {\n return texture2D(textureSampler, uv).r;\n }\n`;\n\nconst FLOAT_TEXTURE_SET_R_SNIPPET = `\n void setOutput(float val) {\n gl_FragColor = vec4(val, 0, 0, 0);\n }\n`;\n\nconst FLOAT_TEXTURE_SET_RGBA_SNIPPET = `\n void setOutput(vec4 val) {\n gl_FragColor = val;\n }\n`;\n\n/*\nPrevious NaN check '(val < 0.0 || 0.0 < val || val == 0.0) ? false : true' does\nnot work on iOS 12\n */\nconst SHADER_PREFIX = `\n precision highp float;\n precision highp int;\n varying vec2 resultUV;\n const vec2 halfCR = vec2(0.5, 0.5);\n\n struct ivec5\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n };\n\n struct ivec6\n {\n int x;\n int y;\n int z;\n int w;\n int u;\n int v;\n };\n\n bool isNaN(float val) {\n return (val < 1.0 || 0.0 < val || val == 0.0) ? false : true;\n }\n\n bool hasNaN(vec4 values) {\n vec4 v1 = values * values;\n vec4 v2 = values * values;\n return any(notEqual(v1, v2));\n }\n\n float getNaN(vec4 values) {\n return dot(vec4(1), values);\n }\n\n int round(float value) {\n return int(floor(value + 0.5));\n }\n\n int imod(int x, int y) {\n return x - y * (x / y);\n }\n\n //Based on the work of Dave Hoskins\n //https://www.shadertoy.com/view/4djSRW\n #define HASHSCALE1 443.8975\n float random(float seed){\n vec2 p = resultUV * seed;\n vec3 p3 = fract(vec3(p.xyx) * HASHSCALE1);\n p3 += dot(p3, p3.yzx + 19.19);\n return fract((p3.x + p3.y) * p3.z);\n }\n\n ${SAMPLE_1D_SNIPPET}\n ${SAMPLE_2D_SNIPPET}\n ${SAMPLE_3D_SNIPPET}\n ${SAMPLE_4D_SNIPPET}\n ${SAMPLE_5D_SNIPPET}\n ${SAMPLE_6D_SNIPPET}\n`;\n\nconst SHADER_PACKED_PREFIX = `\n float getChannel(vec4 frag, vec2 innerDims) {\n vec2 modCoord = mod(innerDims, 2.);\n return modCoord.x == 0. ?\n (modCoord.y == 0. ? frag.r : frag.g) :\n (modCoord.y == 0. ? frag.b : frag.a);\n }\n float getChannel(vec4 frag, int dim) {\n float modCoord = mod(float(dim), 2.);\n return modCoord == 0. ? frag.r : frag.g;\n }\n`;\n\nfunction getOutputScalarCoords() {\n return `\n int getOutputCoords() {\n return 0;\n }\n `;\n}\n\nfunction getOutputPacked1DCoords(\n shape: [number], texShape: [number, number]): string {\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.x * ${packedTexShape[1]}.0);\n }\n `;\n }\n\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return 2 * int(resultUV.y * ${packedTexShape[0]}.0);\n }\n `;\n }\n\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n return resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n }\n `;\n}\n\nfunction getOutput1DCoords(\n shape: [number], texShape: [number, number]): string {\n if (texShape[0] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.x * ${texShape[1]}.0);\n }\n `;\n }\n if (texShape[1] === 1) {\n return `\n int getOutputCoords() {\n return int(resultUV.y * ${texShape[0]}.0);\n }\n `;\n }\n return `\n int getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n return resTexRC.x * ${texShape[1]} + resTexRC.y;\n }\n `;\n}\n\nfunction getOutputPacked3DCoords(\n shape: [number, number, number], texShape: [number, number]): string {\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texelsInLogicalRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[1] / 2);\n\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec3(b, r, c);\n }\n `;\n}\n\nfunction getOutput3DCoords(\n shape: [number, number, number], texShape: [number, number]): string {\n const coordsFromIndexSnippet =\n shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n\n return `\n ivec3 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}\n\nfunction getOutputPacked4DCoords(\n shape: [number, number, number, number],\n texShape: [number, number]): string {\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n\n const texelsInLogicalRow = Math.ceil(shape[3] / 2);\n const texelsInBatch = texelsInLogicalRow * Math.ceil(shape[2] / 2);\n const texelsInBatch2 = texelsInBatch * shape[1];\n\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n\n int b2 = index / ${texelsInBatch2};\n index -= b2 * ${texelsInBatch2};\n\n int b = index / ${texelsInBatch};\n index -= b * ${texelsInBatch};\n\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec4(b2, b, r, c);\n }\n `;\n}\n\nfunction getOutput4DCoords(\n shape: [number, number, number, number],\n texShape: [number, number]): string {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(\n ['r', 'c', 'd', 'd2'], shape);\n\n return `\n ivec4 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n ${coordsFromIndexSnippet}\n return ivec4(r, c, d, d2);\n }\n `;\n}\n\nfunction getOutput5DCoords(\n shape: [number, number, number, number, number],\n texShape: [number, number]): string {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(\n ['r', 'c', 'd', 'd2', 'd3'], shape);\n\n return `\n ivec5 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx * vec2(${texShape[0]},\n ${texShape[1]}));\n\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec5 outShape = ivec5(r, c, d, d2, d3);\n return outShape;\n }\n `;\n}\n\nfunction getOutput6DCoords(\n shape: [number, number, number, number, number, number],\n texShape: [number, number]): string {\n const coordsFromIndexSnippet = shader_util.getLogicalCoordinatesFromFlatIndex(\n ['r', 'c', 'd', 'd2', 'd3', 'd4'], shape);\n\n return `\n ivec6 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n\n ${coordsFromIndexSnippet}\n\n ivec6 result = ivec6(r, c, d, d2, d3, d4);\n return result;\n }\n `;\n}\n\nfunction getOutputPacked2DCoords(\n shape: [number, number], texShape: [number, number]): string {\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return 2 * ivec2(resultUV.yx * vec2(${packedTexShape[0]}, ${\n packedTexShape[1]}));\n }\n `;\n }\n\n // texels needed to accommodate a logical row\n const texelsInLogicalRow = Math.ceil(shape[1] / 2);\n\n /**\n * getOutputCoords\n *\n * resTexRC: The rows and columns of the texels. If you move over one\n * texel to the right in the packed texture, you are moving over one column\n * (not two).\n *\n * index: The texel index\n */\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${packedTexShape[0]}, ${packedTexShape[1]}));\n\n int index = resTexRC.x * ${packedTexShape[1]} + resTexRC.y;\n int r = 2 * (index / ${texelsInLogicalRow});\n int c = imod(index, ${texelsInLogicalRow}) * 2;\n\n return ivec2(r, c);\n }\n `;\n}\n\nfunction getOutput2DCoords(\n shape: [number, number], texShape: [number, number]): string {\n if (util.arraysEqual(shape, texShape)) {\n return `\n ivec2 getOutputCoords() {\n return ivec2(resultUV.yx * vec2(${texShape[0]}, ${texShape[1]}));\n }\n `;\n }\n if (shape[1] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(index, 0);\n }\n `;\n }\n if (shape[0] === 1) {\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n return ivec2(0, index);\n }\n `;\n }\n return `\n ivec2 getOutputCoords() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${texShape[0]}, ${texShape[1]}));\n int index = resTexRC.x * ${texShape[1]} + resTexRC.y;\n int r = index / ${shape[1]};\n int c = index - r * ${shape[1]};\n return ivec2(r, c);\n }\n `;\n}\n\nfunction getSamplerScalar(inputInfo: InputInfo): string {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n if (inputInfo.shapeInfo.isUniform) {\n return `float ${funcName}() {return ${texName};}`;\n }\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n}\n\nfunction getPackedSampler1D(inputInfo: InputInfo): string {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n\n return `\n vec4 ${funcName}(int index) {\n vec2 uv = packedUVfrom1D(\n ${packedTexShape[0]}, ${packedTexShape[1]}, index);\n return texture2D(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler1D(inputInfo: InputInfo): string {\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n\n return `\n float ${funcName}(int index) {\n return ${funcName}Flat(index);\n }\n `;\n}\n\nfunction getPackedSampler2D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n\n return texture2D(${texName}, uv);\n }\n `;\n }\n\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const valuesPerRow = Math.ceil(shape[1] / 2);\n\n return `\n vec4 ${funcName}(int row, int col) {\n vec2 uv = packedUVfrom2D(${valuesPerRow}, ${packedTexShape[0]}, ${\n packedTexShape[1]}, row, col);\n return texture2D(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler2D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n\n const texShape = inputInfo.shapeInfo.texShape;\n if (texShape != null && util.arraysEqual(shape, texShape)) {\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = (vec2(col, row) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n\n const {newShape, keptDims} = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec2(row, col), vec2(${shape[1]}, 1));\n return ${funcName}Flat(round(index));\n }\n `;\n }\n\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec2(row, col), vec2(${shape[1]}, 1));\n vec2 uv = vec2(0.5, (index + 0.5) / ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumR === 1) {\n // index is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col) {\n float index = dot(vec2(row, col), vec2(${shape[1]}, 1));\n vec2 uv = vec2((index + 0.5) / ${texNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col) {\n vec2 uv = UVfrom2D(${texNumR}, ${texNumC}, ${shape[1]}, row, col);\n return sampleTexture(${texName}, uv);\n }\n`;\n}\n\nfunction getPackedSampler3D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n\n if (shape[0] === 1) {\n const squeezedShape = shape.slice(1);\n const keptDims = [1, 2];\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['b', 'row', 'col'];\n return `\n ${getPackedSamplerFromInInfo(newInputInfo)}\n vec4 ${funcName}(int b, int row, int col) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n\n const valuesPerRow = Math.ceil(shape[2] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[1] / 2);\n\n return `\n vec4 ${funcName}(int b, int row, int col) {\n vec2 uv = packedUVfrom3D(\n ${texNumR}, ${texNumC}, ${texelsInBatch}, ${valuesPerRow}, b, row, col);\n return texture2D(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler3D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride0 = shape[1] * shape[2];\n const stride1 = shape[2];\n\n const {newShape, keptDims} = util.squeezeShape(shape);\n const squeezedShape = newShape;\n if (squeezedShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, squeezedShape);\n const params = ['row', 'col', 'depth'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float index = dot(vec3(row, col, depth),\n vec3(${stride0}, ${stride1}, 1));\n return ${funcName}Flat(round(index));\n }\n `;\n }\n\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = float(row);\n float texC = dot(vec2(col, depth), vec2(${stride1}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n\n if (texNumC === stride1) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth) {\n float texR = dot(vec2(row, col), vec2(${shape[1]}, 1));\n float texC = float(depth);\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n\n return `\n float ${funcName}(int row, int col, int depth) {\n vec2 uv = UVfrom3D(\n ${texNumR}, ${texNumC}, ${stride0}, ${stride1}, row, col, depth);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nfunction getPackedSampler4D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const texShape = inputInfo.shapeInfo.texShape;\n const packedTexShape =\n [Math.ceil(texShape[0] / 2), Math.ceil(texShape[1] / 2)];\n const texNumR = packedTexShape[0];\n const texNumC = packedTexShape[1];\n\n const valuesPerRow = Math.ceil(shape[3] / 2);\n const texelsInBatch = valuesPerRow * Math.ceil(shape[2] / 2);\n const texelsInBatch2 = texelsInBatch * shape[1];\n\n return `\n vec4 ${funcName}(int b2, int b, int row, int col) {\n vec2 uv = packedUVfrom4D(\n ${texNumR}, ${texNumC}, ${texelsInBatch2},\n ${texelsInBatch}, ${valuesPerRow}, b2, b, row, col);\n return texture2D(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler4D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride2 = shape[3];\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float index = dot(vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, 1));\n return ${funcName}Flat(round(index));\n }\n `;\n }\n\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = float(row);\n float texC =\n dot(vec3(col, depth, depth2), vec3(${stride1}, ${stride2}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride2) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n float texR = dot(vec3(row, col, depth),\n vec3(${shape[1] * shape[2]}, ${shape[2]}, 1));\n float texC = float(depth2);\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col, int depth, int depth2) {\n vec2 uv = UVfrom4D(${texNumR}, ${texNumC}, ${stride0}, ${stride1},\n ${stride2}, row, col, depth, depth2);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler5D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride3 = shape[4];\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n\n const {newShape, keptDims} = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n depth3;\n return ${funcName}Flat(index);\n }\n `;\n }\n\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n\n if (texNumC === stride0) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n int texR = row;\n float texC = dot(\n vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, 1));\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n\n if (texNumC === stride3) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3]}, ${shape[2] * shape[3]},\n ${shape[3]}, 1));\n int texC = depth3;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n\n return `\n float ${funcName}(int row, int col, int depth, int depth2, int depth3) {\n vec2 uv = UVfrom5D(${texNumR}, ${texNumC}, ${stride0}, ${stride1},\n ${stride2}, ${stride3}, row, col, depth, depth2, depth3);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nfunction getSampler6D(inputInfo: InputInfo): string {\n const shape = inputInfo.shapeInfo.logicalShape;\n const texName = inputInfo.name;\n const funcName = 'get' + texName.charAt(0).toUpperCase() + texName.slice(1);\n const stride4 = shape[5];\n const stride3 = shape[4] * stride4;\n const stride2 = shape[3] * stride3;\n const stride1 = shape[2] * stride2;\n const stride0 = shape[1] * stride1;\n const {newShape, keptDims} = util.squeezeShape(shape);\n if (newShape.length < shape.length) {\n const newInputInfo = squeezeInputInfo(inputInfo, newShape);\n const params = ['row', 'col', 'depth', 'depth2', 'depth3', 'depth4'];\n return `\n ${getSamplerFromInInfo(newInputInfo)}\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n return ${funcName}(${getSqueezedParams(params, keptDims)});\n }\n `;\n }\n\n if (inputInfo.shapeInfo.isUniform) {\n // Uniform arrays will be less than 65505 (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float index = dot(\n vec4(row, col, depth, depth2),\n vec4(${stride0}, ${stride1}, ${stride2}, ${stride3})) +\n dot(\n vec2(depth3, depth4),\n vec2(${stride4}, 1));\n return ${funcName}Flat(index);\n }\n `;\n }\n\n const texShape = inputInfo.shapeInfo.texShape;\n const texNumR = texShape[0];\n const texNumC = texShape[1];\n if (texNumC === stride0) {\n // texC is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n int texR = row;\n float texC = dot(\n vec4(col, depth, depth2, depth3),\n vec4(${stride1}, ${stride2}, ${stride3}, ${stride4})) + depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (texNumC === stride4) {\n // texR is used directly as physical (no risk of float16 overflow).\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n float texR = dot(\n vec4(row, col, depth, depth2),\n vec4(${shape[1] * shape[2] * shape[3] * shape[4]},\n ${shape[2] * shape[3] * shape[4]},\n ${shape[3] * shape[4]},\n ${shape[4]})) + depth3;\n int texC = depth4;\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${texNumC}.0, ${texNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int row, int col, int depth,\n int depth2, int depth3, int depth4) {\n vec2 uv = UVfrom6D(${texNumR}, ${texNumC}, ${stride0}, ${stride1},\n ${stride2}, ${stride3}, ${stride4}\n ,row, col, depth, depth2, depth3, depth4);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nfunction getSamplerFlat(inputInfo: InputInfo): string {\n const texName = inputInfo.name;\n const funcName =\n 'get' + texName.charAt(0).toUpperCase() + texName.slice(1) + 'Flat';\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n\n if (inputInfo.shapeInfo.isUniform) {\n if (inSize === 1) {\n return `float ${funcName}(int index) {return ${texName};}`;\n }\n return `\n float ${funcName}(int index) {\n for (int i = 0; i < ${inSize}; i++) {\n if (i == index) {\n return ${texName}[i];\n }\n }\n }\n `;\n }\n\n const texShape = inputInfo.shapeInfo.texShape;\n const tNumR = texShape[0];\n const tNumC = texShape[1];\n if (tNumC === 1 && tNumR === 1) {\n return `\n float ${funcName}(int index) {\n return sampleTexture(${texName}, halfCR);\n }\n `;\n }\n if (tNumC === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2(0.5, (float(index) + 0.5) / ${tNumR}.0);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n if (tNumR === 1) {\n return `\n float ${funcName}(int index) {\n vec2 uv = vec2((float(index) + 0.5) / ${tNumC}.0, 0.5);\n return sampleTexture(${texName}, uv);\n }\n `;\n }\n return `\n float ${funcName}(int index) {\n vec2 uv = UVfrom1D(${tNumR}, ${tNumC}, index);\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nfunction getBroadcastOutputCoordsSampler(\n inputInfo: InputInfo, outShapeInfo: ShapeInfo, texFuncSnippet: string,\n funcName: string): string {\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n\n let type = 'int';\n if (outRank === 2) {\n type = 'ivec2';\n } else if (outRank === 3) {\n type = 'ivec3';\n } else if (outRank === 4) {\n type = 'ivec4';\n }\n const broadcastDims = broadcast_util.getBroadcastDims(\n inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const rankDiff = outRank - inRank;\n let coordsSnippet: string;\n if (inRank === 0) {\n coordsSnippet = '';\n } else if (outRank < 2 && broadcastDims.length >= 1) {\n coordsSnippet = 'coords = 0;';\n } else {\n coordsSnippet =\n broadcastDims.map(d => `coords[${d + rankDiff}] = 0;`).join('\\n');\n }\n let unpackedCoordsSnippet = '';\n if (outRank < 2 && inRank > 0) {\n unpackedCoordsSnippet = 'coords';\n } else {\n unpackedCoordsSnippet = inputInfo.shapeInfo.logicalShape\n .map((s, i) => `coords[${i + rankDiff}]`)\n .join(', ');\n }\n return `\n float ${funcName}() {\n ${type} coords = getOutputCoords();\n ${coordsSnippet}\n return get${texFuncSnippet}(${unpackedCoordsSnippet});\n }\n `;\n}\n\nfunction getSamplerAtOutputCoords(\n inputInfo: InputInfo, outShapeInfo: ShapeInfo,\n supportsBroadcasting: boolean) {\n const texName = inputInfo.name;\n const texFuncSnippet = texName.charAt(0).toUpperCase() + texName.slice(1);\n const funcName = 'get' + texFuncSnippet + 'AtOutCoords';\n\n const broadcastDims = broadcast_util.getBroadcastDims(\n inputInfo.shapeInfo.logicalShape, outShapeInfo.logicalShape);\n const inRank = inputInfo.shapeInfo.logicalShape.length;\n const outRank = outShapeInfo.logicalShape.length;\n const doBroadcast =\n supportsBroadcasting && ((outRank > inRank) || broadcastDims.length > 0);\n const broadcastOverOuter =\n broadcast_util.broadcastDimsAreOuter(broadcastDims);\n const isUniform = inputInfo.shapeInfo.isUniform;\n\n if (doBroadcast && !broadcastOverOuter) {\n return getBroadcastOutputCoordsSampler(\n inputInfo, outShapeInfo, texFuncSnippet, funcName);\n }\n\n const inSize = util.sizeFromShape(inputInfo.shapeInfo.logicalShape);\n let broadcastSnippet = '';\n if (doBroadcast && broadcastOverOuter) {\n broadcastSnippet = `\n int mainPart = index / ${inSize};\n index -= mainPart * ${inSize};\n `;\n }\n\n const outTexShape = outShapeInfo.texShape;\n if (isUniform) {\n if (inSize === 1) {\n return `float ${funcName}() {return ${texName};}`;\n }\n return `\n float ${funcName}() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${outTexShape[0]}, ${outTexShape[1]}));\n int index = resTexRC.x * ${outTexShape[1]} + resTexRC.y;\n ${broadcastSnippet}\n return get${texFuncSnippet}Flat(index);\n }\n `;\n }\n\n // At this point, the input is not a uniform.\n const inTexShape = inputInfo.shapeInfo.texShape;\n if (util.arraysEqual(inTexShape, outTexShape)) {\n return `\n float ${funcName}() {\n return sampleTexture(${texName}, resultUV);\n }\n `;\n }\n\n return `\n float ${funcName}() {\n ivec2 resTexRC = ivec2(resultUV.yx *\n vec2(${outTexShape[0]}, ${outTexShape[1]}));\n int index = resTexRC.x * ${outTexShape[1]} + resTexRC.y;\n ${broadcastSnippet}\n int texR = index / ${inTexShape[1]};\n int texC = index - texR * ${inTexShape[1]};\n vec2 uv = (vec2(texC, texR) + halfCR) /\n vec2(${inTexShape[1]}.0, ${inTexShape[0]}.0);\n\n return sampleTexture(${texName}, uv);\n }\n `;\n}\n\nexport function getCoordsDataType(rank: number): string {\n if (rank <= 1) {\n return 'int';\n } else if (rank === 2) {\n return 'ivec2';\n } else if (rank === 3) {\n return 'ivec3';\n } else if (rank === 4) {\n return 'ivec4';\n } else if (rank === 5) {\n return 'ivec5';\n } else if (rank === 6) {\n return 'ivec6';\n } else {\n throw Error(`GPU for rank ${rank} is not yet supported`);\n }\n}\n\n/** Returns a new input info (a copy) that has a squeezed logical shape. */\nfunction squeezeInputInfo(\n inInfo: InputInfo, squeezedShape: number[]): InputInfo {\n // Deep copy.\n const newInputInfo: InputInfo = JSON.parse(JSON.stringify(inInfo));\n newInputInfo.shapeInfo.logicalShape = squeezedShape;\n return newInputInfo;\n}\n\nfunction getSqueezedParams(params: string[], keptDims: number[]): string {\n return keptDims.map(d => params[d]).join(', ');\n}","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class CumSumProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(shape: number[], exclusive: boolean, reverse: boolean) {\n this.outputShape = shape;\n const rank = shape.length;\n const finalDim = shape[shape.length - 1];\n const comparator = reverse ? '<' : '>';\n\n this.userCode = `\n int getIndex(int i) {\n ${reverse ? `return ${finalDim} -i - 1;` : 'return i;'}\n }\n\n void main() {\n ${getCoordsDataType(rank)} coords = getOutputCoords();\n int end = ${getFinalCoord(rank, 'coords')};\n float val = 0.0;\n for (int i = ${finalDim} - 1; i >= 0; i -= 1) {\n int idx = getIndex(i);\n if (idx ${comparator} end) {\n continue;\n }\n if (idx == end && ${exclusive}) {\n continue;\n }\n ${getFinalCoord(rank, 'coords')} = idx;\n val += getX(${getCoords(rank, 'coords')});\n }\n setOutput(val);\n }\n `;\n }\n}\n\nfunction getCoords(rank: number, name: string): string {\n if (rank === 1) {\n return `${name}`;\n } else if (rank === 2) {\n return `${name}.x, ${name}.y`;\n } else if (rank === 3) {\n return `${name}.x, ${name}.y, ${name}.z`;\n } else if (rank === 4) {\n return `${name}.x, ${name}.y, ${name}.z, ${name}.w`;\n } else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n\nfunction getFinalCoord(rank: number, name: string): string {\n if (rank === 1) {\n return `${name}`;\n } else if (rank === 2) {\n return `${name}.y`;\n } else if (rank === 3) {\n return `${name}.z`;\n } else if (rank === 4) {\n return `${name}.w`;\n } else {\n throw Error(`Cumulative sum for rank ${rank} is not yet supported`);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class DepthToSpaceProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[] = [];\n userCode: string;\n blockSize: number;\n dataFormat: string;\n\n constructor(\n outputShape: number[], blockSize: number, dataFormat: 'NHWC'|'NCHW') {\n this.outputShape = outputShape;\n this.blockSize = blockSize;\n this.dataFormat = dataFormat;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int h = ${this.getHeightCoordString()};\n int w = ${this.getWidthCoordString()};\n int d = ${this.getDepthCoordString()};\n\n int in_h = h / ${blockSize};\n int offset_h = imod(h, ${blockSize});\n int in_w = w / ${blockSize};\n int offset_w = imod(w, ${blockSize});\n int offset_d = (offset_h * ${blockSize} + offset_w) *\n ${this.getOutputDepthSize()};\n int in_d = d + offset_d;\n\n float result = ${this.getInputSamplingString()};\n setOutput(result);\n }\n `;\n }\n\n private getHeightCoordString(): string {\n if (this.dataFormat === 'NHWC') {\n return `coords[1]`;\n } else {\n return `coords[2]`;\n }\n }\n\n private getWidthCoordString(): string {\n if (this.dataFormat === 'NHWC') {\n return `coords[2]`;\n } else {\n return `coords[3]`;\n }\n }\n\n private getDepthCoordString(): string {\n if (this.dataFormat === 'NHWC') {\n return `coords[3]`;\n } else {\n return `coords[1]`;\n }\n }\n\n private getOutputDepthSize(): number {\n if (this.dataFormat === 'NHWC') {\n return this.outputShape[3];\n } else {\n return this.outputShape[1];\n }\n }\n\n private getInputSamplingString(): string {\n if (this.dataFormat === 'NHWC') {\n return `getX(b, in_h, in_w, in_d)`;\n } else {\n return `getX(b, in_d, in_h, in_w)`;\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class EncodeFloatProgram implements GPGPUProgram {\n variableNames = ['A'];\n userCode: string;\n outputShape: number[];\n\n constructor(outputShape: number[]) {\n this.outputShape = outputShape;\n this.userCode = `\n const float FLOAT_MAX = 1.70141184e38;\n const float FLOAT_MIN = 1.17549435e-38;\n\n lowp vec4 encode_float(highp float v) {\n if (isNaN(v)) {\n return vec4(255, 255, 255, 255);\n }\n\n highp float av = abs(v);\n\n if(av < FLOAT_MIN) {\n return vec4(0.0, 0.0, 0.0, 0.0);\n } else if(v > FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 127.0) / 255.0;\n } else if(v < -FLOAT_MAX) {\n return vec4(0.0, 0.0, 128.0, 255.0) / 255.0;\n }\n\n highp vec4 c = vec4(0,0,0,0);\n\n highp float e = floor(log2(av));\n highp float m = exp2(fract(log2(av))) - 1.0;\n\n c[2] = floor(128.0 * m);\n m -= c[2] / 128.0;\n c[1] = floor(32768.0 * m);\n m -= c[1] / 32768.0;\n c[0] = floor(8388608.0 * m);\n\n highp float ebias = e + 127.0;\n c[3] = floor(ebias / 2.0);\n ebias -= c[3] * 2.0;\n c[2] += floor(ebias) * 128.0;\n\n c[3] += 128.0 * step(0.0, -v);\n\n return c / 255.0;\n }\n\n void main() {\n float x = getAAtOutCoords();\n gl_FragColor = encode_float(x);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport const COMPLEX_FFT = {\n REAL: 'return real * expR - imag * expI;',\n IMAG: 'return real * expI + imag * expR;'\n};\n\nexport class FFTProgram implements GPGPUProgram {\n variableNames = ['real', 'imag'];\n outputShape: number[];\n userCode: string;\n\n constructor(op: string, inputShape: [number, number], inverse: boolean) {\n const innerDim = inputShape[1];\n this.outputShape = inputShape;\n\n const exponentMultiplierSnippet =\n inverse ? `2.0 * ${Math.PI}` : `-2.0 * ${Math.PI}`;\n const resultDenominator = inverse ? `${innerDim}.0` : '1.0';\n\n this.userCode = `\n const float exponentMultiplier = ${exponentMultiplierSnippet};\n\n float unaryOpComplex(float real, float expR, float imag, float expI) {\n ${op}\n }\n\n float mulMatDFT(int batch, int index) {\n float indexRatio = float(index) / float(${innerDim});\n float exponentMultiplierTimesIndexRatio =\n exponentMultiplier * indexRatio;\n\n float result = 0.0;\n\n for (int i = 0; i < ${innerDim}; i++) {\n // x = (-2|2 * PI / N) * index * i;\n float x = exponentMultiplierTimesIndexRatio * float(i);\n float expR = cos(x);\n float expI = sin(x);\n float real = getReal(batch, i);\n float imag = getImag(batch, i);\n\n result +=\n unaryOpComplex(real, expR, imag, expI) / ${resultDenominator};\n }\n\n return result;\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n setOutput(mulMatDFT(coords[0], coords[1]));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class FromPixelsProgram implements GPGPUProgram {\n variableNames = ['A'];\n userCode: string;\n outputShape: number[];\n\n constructor(outputShape: number[]) {\n const [height, width, ] = outputShape;\n this.outputShape = outputShape;\n this.userCode = `\n void main() {\n ivec3 coords = getOutputCoords();\n int texR = coords[0];\n int texC = coords[1];\n int depth = coords[2];\n vec2 uv = (vec2(texC, texR) + halfCR) / vec2(${width}.0, ${height}.0);\n\n vec4 values = texture2D(A, uv);\n float value;\n if (depth == 0) {\n value = values.r;\n } else if (depth == 1) {\n value = values.g;\n } else if (depth == 2) {\n value = values.b;\n } else if (depth == 3) {\n value = values.a;\n }\n\n setOutput(floor(value * 255.0 + 0.5));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class GatherProgram implements GPGPUProgram {\n variableNames = ['A', 'indices'];\n outputShape: number[];\n userCode: string;\n rank: number;\n\n constructor(aShape: number[], indicesLength: number, axis: number) {\n const outputShape: number[] = aShape.slice();\n outputShape[axis] = indicesLength;\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape, axis);\n\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\n\nfunction getSourceCoords(aShape: number[], axis: number): string {\n const rank = aShape.length;\n if (rank > 4) {\n throw Error(`Gather for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `int(getIndices(resRC))`;\n }\n\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n if (i === axis) {\n sourceCoords.push(`int(getIndices(${currentCoords[i]}))`);\n } else {\n sourceCoords.push(`${currentCoords[i]}`);\n }\n }\n return sourceCoords.join();\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class GatherNDProgram implements GPGPUProgram {\n variableNames = ['x', 'indices'];\n outputShape: number[];\n userCode: string;\n constructor(\n private sliceDim: number, private strides: number[], shape: number[]) {\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n const strideString = this.sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${this.strides});\n void main() {\n ${dtype} coords = getOutputCoords();\n int flattenIndex = 0;\n for (int j = 0; j < ${this.sliceDim}; j++) {\n int index = round(getIndices(coords[0], j));\n flattenIndex += index * ${strideString};\n }\n setOutput(getX(flattenIndex, coords[1]));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from '../../tensor';\nimport {DataType, DataTypeMap} from '../../types';\nimport * as util from '../../util';\n\nexport enum TextureUsage {\n RENDER,\n UPLOAD,\n PIXELS,\n DOWNLOAD\n}\n\nexport enum PhysicalTextureType {\n UNPACKED_FLOAT16,\n UNPACKED_FLOAT32,\n PACKED_4X1_UNSIGNED_BYTE,\n PACKED_2X2_FLOAT32,\n PACKED_2X2_FLOAT16\n}\n\nexport interface TextureData {\n texture: WebGLTexture;\n // For complex numbers, the real and imaginary parts are stored as their own\n // individual tensors, with a parent joining the two with the\n // complexTensors field. When this is defined, texture will be null.\n complexTensors?: {real: Tensor, imag: Tensor};\n\n shape: number[];\n /** [rows, columns] shape of the texture. */\n texShape: [number, number];\n dtype: DataType;\n values: DataTypeMap[DataType];\n usage: TextureUsage;\n isPacked: boolean;\n}\n\nexport function getUnpackedMatrixTextureShapeWidthHeight(\n rows: number, columns: number): [number, number] {\n return [columns, rows];\n}\n\nexport function getUnpackedArraySizeFromMatrixSize(\n matrixSize: number, channelsPerTexture: number): number {\n return matrixSize * channelsPerTexture;\n}\n\nexport function getColorMatrixTextureShapeWidthHeight(\n rows: number, columns: number): [number, number] {\n return [columns * 4, rows];\n}\n\nexport function getMatrixSizeFromUnpackedArraySize(\n unpackedSize: number, channelsPerTexture: number): number {\n if (unpackedSize % channelsPerTexture !== 0) {\n throw new Error(\n `unpackedSize (${unpackedSize}) must be a multiple of ` +\n `${channelsPerTexture}`);\n }\n return unpackedSize / channelsPerTexture;\n}\n\nexport type TypedArray = Float32Array|Uint8Array;\n\nexport function encodeMatrixToUnpackedArray(\n matrix: TypedArray, unpackedArray: TypedArray, channelsPerTexture: number) {\n const requiredSize =\n getUnpackedArraySizeFromMatrixSize(matrix.length, channelsPerTexture);\n if (unpackedArray.length < requiredSize) {\n throw new Error(\n `unpackedArray length (${unpackedArray.length}) must be >= ` +\n `${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < matrix.length; ++src) {\n unpackedArray[dst] = matrix[src];\n dst += channelsPerTexture;\n }\n}\n\nexport function decodeMatrixFromUnpackedArray(\n unpackedArray: Float32Array, matrix: Float32Array,\n channelsPerTexture: number) {\n const requiredSize = getMatrixSizeFromUnpackedArraySize(\n unpackedArray.length, channelsPerTexture);\n if (matrix.length < requiredSize) {\n throw new Error(\n `matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += channelsPerTexture) {\n matrix[dst++] = unpackedArray[src];\n }\n}\n\nexport function decodeMatrixFromUnpackedColorRGBAArray(\n unpackedArray: Float32Array, matrix: Float32Array, channels: number) {\n const requiredSize = unpackedArray.length * channels / 4;\n if (matrix.length < requiredSize) {\n throw new Error(\n `matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n let dst = 0;\n for (let src = 0; src < unpackedArray.length; src += 4) {\n for (let c = 0; c < channels; c++) {\n matrix[dst++] = unpackedArray[src + c];\n }\n }\n}\n\nexport function getPackedMatrixTextureShapeWidthHeight(\n rows: number, columns: number): [number, number] {\n return [Math.ceil(columns / 2), Math.ceil(rows / 2)];\n}\n\nexport function getPackedRGBAArraySizeFromMatrixShape(\n rows: number, columns: number): number {\n const [w, h] = getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return w * h * 4;\n}\n\n/*\nThis is how encodeMatrixToPackedRGBA encodes a tensor with shape = [2, 3, 5]\n(indices are [batch, row, col]).\n\n000|001 002|003 004|xxx 020|021 022|023 024|xxx\n------- ------- ------- ------- ------- -------\n010|011 012|013 014|xxx xxx|xxx xxx|xxx xxx|xxx\n\n100|101 102|103 104|xxx 120|121 122|123 124|xxx\n------- ------- ------- ------- ------- -------\n110|111 112|113 114|xxx xxx|xxx xxx|xxx xxx|xxx\n\nSingle texels contain only values from the same batch, and from adjacent rows\nand columns.\n\nNote the batch dimension is needed so xxx's are inserted below 020, 021, 022,\n023, and 024.\n */\n\nexport function encodeMatrixToPackedRGBA(\n matrix: Float32Array, batches: number, rows: number, columns: number,\n packedRGBA: Float32Array) {\n const requiredSize = getPackedRGBAArraySizeFromMatrixShape(rows, columns);\n if (packedRGBA.length < requiredSize) {\n throw new Error(`packedRGBA length (${packedRGBA.length}) must be >=\n ${requiredSize}`);\n }\n\n const oddWidth = (columns % 2) === 1;\n const oddHeight = (rows % 2) === 1;\n const widthInFullBlocks = Math.floor(columns / 2);\n const heightInFullBlocks = Math.floor(rows / 2);\n\n const texelsPerRow = Math.ceil(columns / 2);\n const texelsPerBatch = texelsPerRow * Math.ceil(rows / 2);\n\n const flattenedMatrixSize =\n util.nearestLargerEven(rows) * util.nearestLargerEven(columns);\n\n for (let batch = 0; batch < batches; batch++) {\n const sourceOffset = batch * rows * columns;\n const batchOffset = batch * flattenedMatrixSize;\n\n // loop over full 2x2 blocks\n {\n const dstStride = (oddWidth ? 4 : 0);\n const oneRow = columns;\n let dst = batchOffset;\n for (let blockY = 0; blockY < heightInFullBlocks; ++blockY) {\n const matrixSrcRow = (blockY * 2 * columns);\n for (let blockX = 0; blockX < widthInFullBlocks; ++blockX) {\n const matrixSrcCol = blockX * 2;\n const src = sourceOffset + matrixSrcRow + matrixSrcCol;\n packedRGBA[dst] = matrix[src];\n packedRGBA[dst + 1] = matrix[src + 1];\n packedRGBA[dst + 2] = matrix[src + oneRow];\n packedRGBA[dst + 3] = matrix[src + oneRow + 1];\n dst += 4;\n }\n dst += dstStride;\n }\n }\n\n // loop down final odd column\n if (oddWidth) {\n let src = sourceOffset + columns - 1;\n let dst = batchOffset + (texelsPerRow - 1) * 4;\n const srcStride = 2 * columns;\n const dstStride = texelsPerRow * 4;\n for (let blockY = 0; blockY < heightInFullBlocks; ++blockY) {\n packedRGBA[dst] = matrix[src];\n packedRGBA[dst + 2] = matrix[src + columns];\n src += srcStride;\n dst += dstStride;\n }\n }\n\n // loop across final row\n if (oddHeight) {\n let src = sourceOffset + (rows - 1) * columns;\n let dst = batchOffset + (texelsPerBatch - texelsPerRow) * 4;\n for (let blockX = 0; blockX < widthInFullBlocks; ++blockX) {\n packedRGBA[dst++] = matrix[src++];\n packedRGBA[dst++] = matrix[src++];\n dst += 2;\n }\n\n // fill in bottom-right texel\n if (oddWidth && oddHeight) {\n packedRGBA[batchOffset + flattenedMatrixSize - 4] = matrix[src];\n }\n }\n }\n\n return packedRGBA;\n}\n\nexport function decodeMatrixFromPackedRGBA(\n packedRGBA: Float32Array, batches: number, rows: number, columns: number,\n matrix: Float32Array): Float32Array {\n const requiredSize = rows * columns;\n if (matrix.length < requiredSize) {\n throw new Error(\n `matrix length (${matrix.length}) must be >= ${requiredSize}`);\n }\n\n const oddWidth = (columns % 2) === 1;\n const oddHeight = (rows % 2) === 1;\n const widthInFullBlocks = Math.floor(columns / 2);\n const heightInFullBlocks = Math.floor(rows / 2);\n\n const texelsPerRow = Math.ceil(columns / 2);\n const texelsPerBatch = texelsPerRow * Math.ceil(rows / 2);\n\n const flattenedMatrixSize =\n util.nearestLargerEven(rows) * util.nearestLargerEven(columns);\n\n for (let batch = 0; batch < batches; batch++) {\n const batchOffset = batch * rows * columns;\n const sourceOffset = batch * flattenedMatrixSize;\n\n // loop over full 2x2 blocks\n {\n const srcStride = oddWidth ? 4 : 0;\n const dstStride = columns + (oddWidth ? 1 : 0);\n let src = sourceOffset;\n let dstRow1 = batchOffset;\n let dstRow2 = batchOffset + columns;\n for (let blockY = 0; blockY < heightInFullBlocks; ++blockY) {\n for (let blockX = 0; blockX < widthInFullBlocks; ++blockX) {\n matrix[dstRow1++] = packedRGBA[src++];\n matrix[dstRow1++] = packedRGBA[src++];\n matrix[dstRow2++] = packedRGBA[src++];\n matrix[dstRow2++] = packedRGBA[src++];\n }\n src += srcStride;\n dstRow1 += dstStride;\n dstRow2 += dstStride;\n }\n }\n\n // loop down final column\n if (oddWidth) {\n let src = sourceOffset + (texelsPerRow - 1) * 4;\n let dst = batchOffset + columns - 1;\n const srcStride = texelsPerRow * 4;\n const dstStride = 2 * columns;\n for (let blockY = 0; blockY < heightInFullBlocks; ++blockY) {\n matrix[dst] = packedRGBA[src];\n matrix[dst + columns] = packedRGBA[src + 2];\n src += srcStride;\n dst += dstStride;\n }\n }\n\n // loop across final row\n if (oddHeight) {\n let src = sourceOffset + (texelsPerBatch - texelsPerRow) * 4;\n let dst = batchOffset + (rows - 1) * columns;\n for (let blockX = 0; blockX < widthInFullBlocks; ++blockX) {\n matrix[dst++] = packedRGBA[src++];\n matrix[dst++] = packedRGBA[src++];\n src += 2;\n }\n\n // fill in bottom-right cell\n if (oddWidth) {\n matrix[batchOffset + (rows * columns) - 1] = packedRGBA[src];\n }\n }\n }\n\n return matrix;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../../environment';\nimport * as util from '../../util';\n\nexport function callAndCheck<T>(gl: WebGLRenderingContext, func: () => T): T {\n const returnValue = func();\n checkWebGLError(gl);\n return returnValue;\n}\n\nlet webGLDebugErrorCheckingEnabled = false;\n\nexport function enableDebugWebGLErrorChecking(enabled: boolean) {\n webGLDebugErrorCheckingEnabled = enabled;\n}\n\nexport function checkWebGLError(gl: WebGLRenderingContext) {\n if (webGLDebugErrorCheckingEnabled) {\n const error = gl.getError();\n if (error !== gl.NO_ERROR) {\n throw new Error('WebGL Error: ' + getWebGLErrorMessage(gl, error));\n }\n }\n}\n\nexport function getWebGLErrorMessage(\n gl: WebGLRenderingContext, status: number): string {\n switch (status) {\n case gl.NO_ERROR:\n return 'NO_ERROR';\n case gl.INVALID_ENUM:\n return 'INVALID_ENUM';\n case gl.INVALID_VALUE:\n return 'INVALID_VALUE';\n case gl.INVALID_OPERATION:\n return 'INVALID_OPERATION';\n case gl.INVALID_FRAMEBUFFER_OPERATION:\n return 'INVALID_FRAMEBUFFER_OPERATION';\n case gl.OUT_OF_MEMORY:\n return 'OUT_OF_MEMORY';\n case gl.CONTEXT_LOST_WEBGL:\n return 'CONTEXT_LOST_WEBGL';\n default:\n return `Unknown error code ${status}`;\n }\n}\n\nexport function getExtensionOrThrow(\n gl: WebGLRenderingContext, extensionName: string): {} {\n return throwIfNull<{}>(\n gl, () => gl.getExtension(extensionName),\n 'Extension \"' + extensionName + '\" not supported on this browser.');\n}\n\nexport function createVertexShader(\n gl: WebGLRenderingContext, vertexShaderSource: string): WebGLShader {\n const vertexShader: WebGLShader = throwIfNull<WebGLShader>(\n gl, () => gl.createShader(gl.VERTEX_SHADER),\n 'Unable to create vertex WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(vertexShader, vertexShaderSource));\n callAndCheck(gl, () => gl.compileShader(vertexShader));\n if (gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) === false) {\n console.log(gl.getShaderInfoLog(vertexShader));\n throw new Error('Failed to compile vertex shader.');\n }\n return vertexShader;\n}\n\nexport function createFragmentShader(\n gl: WebGLRenderingContext, fragmentShaderSource: string): WebGLShader {\n const fragmentShader: WebGLShader = throwIfNull<WebGLShader>(\n gl, () => gl.createShader(gl.FRAGMENT_SHADER),\n 'Unable to create fragment WebGLShader.');\n callAndCheck(gl, () => gl.shaderSource(fragmentShader, fragmentShaderSource));\n callAndCheck(gl, () => gl.compileShader(fragmentShader));\n if (gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) === false) {\n logShaderSourceAndInfoLog(\n fragmentShaderSource, gl.getShaderInfoLog(fragmentShader));\n throw new Error('Failed to compile fragment shader.');\n }\n return fragmentShader;\n}\n\nconst lineNumberRegex = /ERROR: [0-9]+:([0-9]+):/g;\nfunction logShaderSourceAndInfoLog(\n shaderSource: string, shaderInfoLog: string) {\n const lineNumberRegexResult = lineNumberRegex.exec(shaderInfoLog);\n if (lineNumberRegexResult == null) {\n console.log(`Couldn't parse line number in error: ${shaderInfoLog}`);\n console.log(shaderSource);\n return;\n }\n\n const lineNumber = +lineNumberRegexResult[1];\n\n const shaderLines = shaderSource.split('\\n');\n const pad = shaderLines.length.toString().length + 2;\n const linesWithLineNumbers = shaderLines.map(\n (line, lineNumber) =>\n util.rightPad((lineNumber + 1).toString(), pad) + line);\n let maxLineLength = 0;\n for (let i = 0; i < linesWithLineNumbers.length; i++) {\n maxLineLength = Math.max(linesWithLineNumbers[i].length, maxLineLength);\n }\n\n const beforeErrorLines = linesWithLineNumbers.slice(0, lineNumber - 1);\n const errorLine = linesWithLineNumbers.slice(lineNumber - 1, lineNumber);\n const afterErrorLines = linesWithLineNumbers.slice(lineNumber);\n\n console.log(beforeErrorLines.join('\\n'));\n console.log(shaderInfoLog.split('\\n')[0]);\n console.log(\n `%c ${util.rightPad(errorLine[0], maxLineLength)}`,\n 'border:1px solid red; background-color:#e3d2d2; color:#a61717');\n console.log(afterErrorLines.join('\\n'));\n}\n\nexport function createProgram(gl: WebGLRenderingContext): WebGLProgram {\n return throwIfNull<WebGLProgram>(\n gl, () => gl.createProgram(), 'Unable to create WebGLProgram.');\n}\n\nexport function linkProgram(gl: WebGLRenderingContext, program: WebGLProgram) {\n callAndCheck(gl, () => gl.linkProgram(program));\n if (gl.getProgramParameter(program, gl.LINK_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Failed to link vertex and fragment shaders.');\n }\n}\n\nexport function validateProgram(\n gl: WebGLRenderingContext, program: WebGLProgram) {\n callAndCheck(gl, () => gl.validateProgram(program));\n if (gl.getProgramParameter(program, gl.VALIDATE_STATUS) === false) {\n console.log(gl.getProgramInfoLog(program));\n throw new Error('Shader program validation failed.');\n }\n}\n\nexport function createStaticVertexBuffer(\n gl: WebGLRenderingContext, data: Float32Array): WebGLBuffer {\n const buffer: WebGLBuffer = throwIfNull<WebGLBuffer>(\n gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(gl, () => gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\n\nexport function createStaticIndexBuffer(\n gl: WebGLRenderingContext, data: Uint16Array): WebGLBuffer {\n const buffer: WebGLBuffer = throwIfNull<WebGLBuffer>(\n gl, () => gl.createBuffer(), 'Unable to create WebGLBuffer');\n callAndCheck(gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer));\n callAndCheck(\n gl, () => gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW));\n return buffer;\n}\n\nexport function getNumChannels(): number {\n if (ENV.get('WEBGL_VERSION') === 2) {\n return 1;\n }\n return 4;\n}\n\nexport function createTexture(gl: WebGLRenderingContext): WebGLTexture {\n return throwIfNull<WebGLTexture>(\n gl, () => gl.createTexture(), 'Unable to create WebGLTexture.');\n}\n\nexport function validateTextureSize(width: number, height: number) {\n const maxTextureSize = ENV.get('WEBGL_MAX_TEXTURE_SIZE');\n if ((width <= 0) || (height <= 0)) {\n const requested = `[${width}x${height}]`;\n throw new Error('Requested texture size ' + requested + ' is invalid.');\n }\n if ((width > maxTextureSize) || (height > maxTextureSize)) {\n const requested = `[${width}x${height}]`;\n const max = `[${maxTextureSize}x${maxTextureSize}]`;\n throw new Error(\n 'Requested texture size ' + requested +\n ' greater than WebGL maximum on this browser / GPU ' + max + '.');\n }\n}\n\nexport function createFramebuffer(gl: WebGLRenderingContext): WebGLFramebuffer {\n return throwIfNull<WebGLFramebuffer>(\n gl, () => gl.createFramebuffer(), 'Unable to create WebGLFramebuffer.');\n}\n\nexport function bindVertexBufferToProgramAttribute(\n gl: WebGLRenderingContext, program: WebGLProgram, attribute: string,\n buffer: WebGLBuffer, arrayEntriesPerItem: number, itemStrideInBytes: number,\n itemOffsetInBytes: number): boolean {\n const loc = gl.getAttribLocation(program, attribute);\n if (loc === -1) {\n // The GPU compiler decided to strip out this attribute because it's unused,\n // thus no need to bind.\n return false;\n }\n callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, buffer));\n callAndCheck(\n gl,\n () => gl.vertexAttribPointer(\n loc, arrayEntriesPerItem, gl.FLOAT, false, itemStrideInBytes,\n itemOffsetInBytes));\n callAndCheck(gl, () => gl.enableVertexAttribArray(loc));\n return true;\n}\n\nexport function bindTextureUnit(\n gl: WebGLRenderingContext, texture: WebGLTexture, textureUnit: number) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n}\n\nexport function unbindTextureUnit(\n gl: WebGLRenderingContext, textureUnit: number) {\n validateTextureUnit(gl, textureUnit);\n callAndCheck(gl, () => gl.activeTexture(gl.TEXTURE0 + textureUnit));\n callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\n\nexport function getProgramUniformLocationOrThrow(\n gl: WebGLRenderingContext, program: WebGLProgram,\n uniformName: string): WebGLUniformLocation {\n return throwIfNull<WebGLUniformLocation>(\n gl, () => gl.getUniformLocation(program, uniformName),\n 'uniform \"' + uniformName + '\" not present in program.');\n}\n\nexport function getProgramUniformLocation(\n gl: WebGLRenderingContext, program: WebGLProgram,\n uniformName: string): WebGLUniformLocation {\n return gl.getUniformLocation(program, uniformName);\n}\n\nexport function bindTextureToProgramUniformSampler(\n gl: WebGLRenderingContext, program: WebGLProgram, texture: WebGLTexture,\n uniformSamplerLocation: WebGLUniformLocation, textureUnit: number) {\n callAndCheck(gl, () => bindTextureUnit(gl, texture, textureUnit));\n callAndCheck(gl, () => gl.uniform1i(uniformSamplerLocation, textureUnit));\n}\n\nexport function bindCanvasToFramebuffer(gl: WebGLRenderingContext) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n callAndCheck(gl, () => gl.viewport(0, 0, gl.canvas.width, gl.canvas.height));\n callAndCheck(gl, () => gl.scissor(0, 0, gl.canvas.width, gl.canvas.height));\n}\n\nexport function bindColorTextureToFramebuffer(\n gl: WebGLRenderingContext, texture: WebGLTexture,\n framebuffer: WebGLFramebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(\n gl,\n () => gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0));\n}\n\nexport function unbindColorTextureFromFramebuffer(\n gl: WebGLRenderingContext, framebuffer: WebGLFramebuffer) {\n callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer));\n callAndCheck(\n gl,\n () => gl.framebufferTexture2D(\n gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, null, 0));\n}\n\nexport function validateFramebuffer(gl: WebGLRenderingContext) {\n const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);\n if (status !== gl.FRAMEBUFFER_COMPLETE) {\n throw new Error(\n 'Error binding framebuffer: ' + getFramebufferErrorMessage(gl, status));\n }\n}\n\nexport function getFramebufferErrorMessage(\n gl: WebGLRenderingContext, status: number): string {\n switch (status) {\n case gl.FRAMEBUFFER_INCOMPLETE_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:\n return 'FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT';\n case gl.FRAMEBUFFER_INCOMPLETE_DIMENSIONS:\n return 'FRAMEBUFFER_INCOMPLETE_DIMENSIONS';\n case gl.FRAMEBUFFER_UNSUPPORTED:\n return 'FRAMEBUFFER_UNSUPPORTED';\n default:\n return `unknown error ${status}`;\n }\n}\n\nfunction throwIfNull<T>(\n gl: WebGLRenderingContext, returnTOrNull: () => T | null,\n failureMessage: string): T {\n const tOrNull: T|null = callAndCheck(gl, () => returnTOrNull());\n if (tOrNull == null) {\n throw new Error(failureMessage);\n }\n return tOrNull as T;\n}\n\nfunction validateTextureUnit(gl: WebGLRenderingContext, textureUnit: number) {\n const maxTextureUnit = gl.MAX_COMBINED_TEXTURE_IMAGE_UNITS - 1;\n const glTextureUnit = textureUnit + gl.TEXTURE0;\n if (glTextureUnit < gl.TEXTURE0 || glTextureUnit > maxTextureUnit) {\n const textureUnitRange = `[gl.TEXTURE0, gl.TEXTURE${maxTextureUnit}]`;\n throw new Error(`textureUnit must be in ${textureUnitRange}.`);\n }\n}\n\nexport function getTextureShapeFromLogicalShape(\n logShape: number[], isPacked = false): [number, number] {\n let maxTexSize = ENV.get('WEBGL_MAX_TEXTURE_SIZE');\n if (isPacked) {\n maxTexSize = maxTexSize * 2;\n\n // This logic ensures we accurately count the number of packed texels needed\n // to accommodate the tensor. We can only pack values in the same texel if\n // they are from adjacent pairs of rows/cols within the same batch. So if a\n // tensor has 3 rows, we pretend it has 4 rows in order to account for the\n // fact that the texels containing the third row are half empty.\n logShape = logShape.map(\n (d, i) => i >= logShape.length - 2 ?\n util.nearestLargerEven(logShape[i]) :\n logShape[i]);\n }\n\n // If logical shape is 2, we don't squeeze, since we want to match physical.\n if (logShape.length !== 2) {\n const squeezeResult = util.squeezeShape(logShape);\n logShape = squeezeResult.newShape;\n }\n\n const size = util.sizeFromShape(logShape);\n if (logShape.length <= 1 && size <= maxTexSize) {\n return [size, 1];\n } else if (\n logShape.length === 2 && logShape[0] <= maxTexSize &&\n logShape[1] <= maxTexSize) {\n return logShape as [number, number];\n } else if (\n logShape.length === 3 && logShape[0] * logShape[1] <= maxTexSize &&\n logShape[2] <= maxTexSize) {\n return [logShape[0] * logShape[1], logShape[2]];\n } else if (\n logShape.length === 3 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2]];\n } else if (\n logShape.length === 4 &&\n logShape[0] * logShape[1] * logShape[2] <= maxTexSize &&\n logShape[3] <= maxTexSize) {\n return [logShape[0] * logShape[1] * logShape[2], logShape[3]];\n } else if (\n logShape.length === 4 && logShape[0] <= maxTexSize &&\n logShape[1] * logShape[2] * logShape[3] <= maxTexSize) {\n return [logShape[0], logShape[1] * logShape[2] * logShape[3]];\n } else {\n return util.sizeToSquarishShape(size);\n }\n}\n\nfunction isEven(n: number): boolean {\n return n % 2 === 0;\n}\n\n/**\n * This determines whether reshaping a packed texture requires rearranging\n * the data within the texture, assuming 2x2 packing.\n */\nexport function isReshapeFree(shape1: number[], shape2: number[]): boolean {\n shape1 = shape1.slice(-2);\n shape2 = shape2.slice(-2);\n\n if (util.arraysEqual(shape1, shape2)) {\n return true;\n }\n\n if (!shape1.length || !shape2.length) { // One of the shapes is a scalar.\n return true;\n }\n\n if (shape1[0] === 0 || shape1[1] === 0 || shape2[0] === 0 ||\n shape2[1] === 0) {\n return true;\n }\n\n if (shape1.length !== shape2.length) { // One of the shapes is a vector.\n if (util.arraysEqual(\n util.squeezeShape(shape1).newShape,\n util.squeezeShape(shape2).newShape)) {\n return true;\n }\n } else {\n if (isEven(shape1[0]) && isEven(shape2[0])) {\n if (isEven(shape1[1]) && isEven(shape2[1])) {\n return true;\n }\n if (shape1[1] === shape2[1]) {\n return true;\n }\n }\n }\n\n return false;\n}","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../../environment';\nimport * as util from '../../util';\n\nimport * as tex_util from './tex_util';\nimport * as webgl_util from './webgl_util';\n\nexport interface TextureConfig {\n internalFormatFloat: number;\n textureFormatFloat: number;\n internalFormatHalfFloat: number;\n internalFormatPackedFloat: number;\n\n // The format to use during a gl.readPixels call.\n downloadTextureFormat: number;\n // How many channels need to be unpacked after a gl.readPixels call.\n downloadUnpackNumChannels: number;\n\n defaultNumChannels: number;\n textureTypeHalfFloat: number;\n}\n\nexport function createVertexShader(gl: WebGLRenderingContext): WebGLShader {\n const vertexShaderSource = `\n precision highp float;\n attribute vec3 clipSpacePos;\n attribute vec2 uv;\n varying vec2 resultUV;\n\n void main() {\n gl_Position = vec4(clipSpacePos, 1);\n resultUV = uv;\n }`;\n return webgl_util.createVertexShader(gl, vertexShaderSource);\n}\n\nexport function createVertexBuffer(gl: WebGLRenderingContext): WebGLBuffer {\n // [x y z u v] * [upper-left, lower-left, upper-right, lower-right]\n const vertexArray = new Float32Array(\n [-1, 1, 0, 0, 1, -1, -1, 0, 0, 0, 1, 1, 0, 1, 1, 1, -1, 0, 1, 0]);\n return webgl_util.createStaticVertexBuffer(gl, vertexArray);\n}\n\nexport function createIndexBuffer(gl: WebGLRenderingContext): WebGLBuffer {\n // OpenGL (and WebGL) have \"CCW == front\" winding\n const triangleVertexIndices = new Uint16Array([0, 1, 2, 2, 1, 3]);\n return webgl_util.createStaticIndexBuffer(gl, triangleVertexIndices);\n}\n\nexport function getTextureConfig(\n // tslint:disable-next-line:no-any\n gl: WebGLRenderingContext, textureHalfFloatExtension?: any): TextureConfig {\n // tslint:disable-next-line:no-any\n const glany = gl as any;\n\n let internalFormatFloat: number;\n let internalFormatHalfFloat: number;\n let internalFormatPackedFloat: number;\n let textureFormatFloat: number;\n\n let downloadTextureFormat: number;\n let downloadUnpackNumChannels: number;\n\n let defaultNumChannels: number;\n let textureTypeHalfFloat: number;\n\n if (ENV.get('WEBGL_VERSION') === 2) {\n internalFormatFloat = glany.R32F;\n internalFormatHalfFloat = glany.R16F;\n internalFormatPackedFloat = glany.RGBA32F;\n textureFormatFloat = glany.RED;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 1;\n textureTypeHalfFloat = glany.HALF_FLOAT;\n } else {\n internalFormatFloat = gl.RGBA;\n internalFormatHalfFloat = gl.RGBA;\n internalFormatPackedFloat = glany.RGBA;\n textureFormatFloat = gl.RGBA;\n downloadUnpackNumChannels = 4;\n defaultNumChannels = 4;\n textureTypeHalfFloat = textureHalfFloatExtension != null ?\n textureHalfFloatExtension.HALF_FLOAT_OES :\n null;\n }\n downloadTextureFormat = gl.RGBA;\n\n return {\n internalFormatFloat,\n internalFormatHalfFloat,\n internalFormatPackedFloat,\n textureFormatFloat,\n downloadTextureFormat,\n downloadUnpackNumChannels,\n defaultNumChannels,\n textureTypeHalfFloat\n };\n}\n\nfunction createAndConfigureTexture(\n gl: WebGLRenderingContext, width: number, height: number,\n internalFormat: number, textureFormat: number,\n textureType: number): WebGLTexture {\n webgl_util.validateTextureSize(width, height);\n const texture = webgl_util.createTexture(gl);\n\n const tex2d = gl.TEXTURE_2D;\n webgl_util.callAndCheck(gl, () => gl.bindTexture(tex2d, texture));\n webgl_util.callAndCheck(\n gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(\n gl, () => gl.texParameteri(tex2d, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));\n webgl_util.callAndCheck(\n gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MIN_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(\n gl, () => gl.texParameteri(tex2d, gl.TEXTURE_MAG_FILTER, gl.NEAREST));\n webgl_util.callAndCheck(\n gl,\n () => gl.texImage2D(\n tex2d, 0, internalFormat, width, height, 0, textureFormat,\n textureType, null));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n return texture;\n}\n\nexport function createFloat32MatrixTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): WebGLTexture {\n const [width, height] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(\n gl, width, height, textureConfig.internalFormatFloat,\n textureConfig.textureFormatFloat, gl.FLOAT);\n}\n\nexport function createFloat16MatrixTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): WebGLTexture {\n const [width, height] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(\n gl, width, height, textureConfig.internalFormatFloat,\n textureConfig.textureFormatFloat, textureConfig.textureTypeHalfFloat);\n}\n\nexport function createUnsignedBytesMatrixTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): WebGLTexture {\n const [width, height] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(\n gl, width, height, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE);\n}\n\nexport function createPackedMatrixTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): WebGLTexture {\n const [width, height] =\n tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(\n gl, width, height, textureConfig.internalFormatPackedFloat, gl.RGBA,\n gl.FLOAT);\n}\n\nexport function createFloat16PackedMatrixTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): WebGLTexture {\n const [width, height] =\n tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n return createAndConfigureTexture(\n gl, width, height, textureConfig.internalFormatHalfFloat, gl.RGBA,\n textureConfig.textureTypeHalfFloat);\n}\n\nexport function bindVertexProgramAttributeStreams(\n gl: WebGLRenderingContext, program: WebGLProgram,\n vertexBuffer: WebGLBuffer): boolean {\n const posOffset = 0; // x is the first buffer element\n const uvOffset = 3 * 4; // uv comes after [x y z]\n const stride = (3 * 4) + (2 * 4); // xyz + uv, each entry is 4-byte float.\n webgl_util.callAndCheck(\n gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer));\n const success = webgl_util.bindVertexBufferToProgramAttribute(\n gl, program, 'clipSpacePos', vertexBuffer, 3, stride, posOffset);\n return success &&\n webgl_util.bindVertexBufferToProgramAttribute(\n gl, program, 'uv', vertexBuffer, 2, stride, uvOffset);\n}\n\nexport function uploadPixelDataToTexture(\n gl: WebGLRenderingContext, texture: WebGLTexture,\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement) {\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n webgl_util.callAndCheck(\n gl,\n () => gl.texImage2D(\n gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, pixels));\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\n\nfunction uploadDataToTexture(\n gl: WebGLRenderingContext, texture: WebGLTexture, width: number,\n height: number, data: Float32Array, textureFormat: number) {\n webgl_util.validateTextureSize(width, height);\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));\n webgl_util.callAndCheck(\n gl,\n () => gl.texSubImage2D(\n gl.TEXTURE_2D, 0, 0, 0, width, height, textureFormat, gl.FLOAT,\n data));\n\n webgl_util.callAndCheck(gl, () => gl.bindTexture(gl.TEXTURE_2D, null));\n}\n\nexport function uploadMatrixToTexture(\n gl: WebGLRenderingContext, texture: WebGLTexture, rows: number,\n columns: number, matrix: Float32Array, numChannels: number,\n textureConfig: TextureConfig) {\n const [w, h] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n\n let unpackedArray: Float32Array;\n if (textureConfig.defaultNumChannels === 1) {\n // No need to allocate a temporary array.\n unpackedArray = matrix;\n } else {\n unpackedArray =\n new Float32Array(tex_util.getUnpackedArraySizeFromMatrixSize(\n matrix.length, numChannels));\n tex_util.encodeMatrixToUnpackedArray(matrix, unpackedArray, numChannels);\n }\n\n uploadDataToTexture(\n gl, texture, w, h, unpackedArray, textureConfig.textureFormatFloat);\n}\n\nexport function uploadMatrixToPackedTexture(\n gl: WebGLRenderingContext, texture: WebGLTexture, batch: number,\n rows: number, columns: number, matrix: Float32Array,\n textureConfig: TextureConfig) {\n const [w, h] = tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n const packedRGBA = new Float32Array(\n tex_util.getPackedRGBAArraySizeFromMatrixShape(rows, columns));\n tex_util.encodeMatrixToPackedRGBA(matrix, batch, rows, columns, packedRGBA);\n uploadDataToTexture(gl, texture, w, h, packedRGBA, gl.RGBA);\n}\n\nexport function maybeCreateBufferFromOutputTexture(\n gl: WebGLRenderingContext, texture: WebGLTexture, rows: number,\n columns: number, textureConfig: TextureConfig): WebGLBuffer|WebGLTexture {\n let bufferOrTexture: WebGLBuffer|WebGLTexture = texture;\n\n if (ENV.get('WEBGL_VERSION') === 2) {\n const gl2 = gl as WebGL2RenderingContext;\n\n // Create and bind the buffer.\n const buffer = gl2.createBuffer();\n webgl_util.callAndCheck(\n gl, () => gl.bindBuffer(gl2.PIXEL_PACK_BUFFER, buffer));\n\n // Initialize the buffer to the size of the texture in bytes.\n const bytesPerFloat = 4;\n const bufferSizeBytes = bytesPerFloat *\n tex_util.getUnpackedArraySizeFromMatrixSize(\n rows * columns, textureConfig.downloadUnpackNumChannels);\n\n webgl_util.callAndCheck(\n gl,\n () => gl.bufferData(\n gl2.PIXEL_PACK_BUFFER, bufferSizeBytes, gl.STATIC_DRAW));\n\n // Enqueue a command on the GPU command queue to copy of texture into the\n // buffer.\n webgl_util.callAndCheck(\n gl, () => gl2.readPixels(0, 0, columns, rows, gl.RGBA, gl.FLOAT, 0));\n\n webgl_util.callAndCheck(\n gl, () => gl.bindBuffer(gl2.PIXEL_PACK_BUFFER, null));\n\n bufferOrTexture = buffer;\n }\n\n return bufferOrTexture;\n}\n\nexport function downloadFloat32MatrixFromBuffer(\n gl: WebGLRenderingContext, buffer: WebGLBuffer, rows: number,\n columns: number, textureConfig: TextureConfig): Float32Array {\n const gl2 = gl as WebGL2RenderingContext;\n\n const downloadTarget =\n new Float32Array(tex_util.getUnpackedArraySizeFromMatrixSize(\n rows * columns, textureConfig.downloadUnpackNumChannels));\n\n gl2.bindBuffer(gl.ARRAY_BUFFER, buffer);\n gl2.getBufferSubData(gl.ARRAY_BUFFER, 0, downloadTarget);\n gl2.bindBuffer(gl.ARRAY_BUFFER, null);\n\n const matrix = new Float32Array(rows * columns);\n tex_util.decodeMatrixFromUnpackedArray(\n downloadTarget as Float32Array, matrix,\n textureConfig.downloadUnpackNumChannels);\n\n return matrix;\n}\n\nexport function downloadFloat32MatrixFromOutputTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig): Float32Array {\n const [w, h] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n\n const downloadTarget =\n new Float32Array(tex_util.getUnpackedArraySizeFromMatrixSize(\n rows * columns, textureConfig.downloadUnpackNumChannels));\n\n webgl_util.callAndCheck(\n gl,\n () => gl.readPixels(\n 0, 0, w, h, textureConfig.downloadTextureFormat, gl.FLOAT,\n downloadTarget));\n\n const matrix = new Float32Array(rows * columns);\n tex_util.decodeMatrixFromUnpackedArray(\n downloadTarget as Float32Array, matrix,\n textureConfig.downloadUnpackNumChannels);\n return matrix;\n}\n\nexport function downloadByteEncodedFloatMatrixFromOutputTexture(\n gl: WebGLRenderingContext, rows: number, columns: number,\n textureConfig: TextureConfig) {\n const [w, h] =\n tex_util.getUnpackedMatrixTextureShapeWidthHeight(rows, columns);\n\n const numChannels = 4;\n const downloadTarget = new Uint8Array(\n tex_util.getUnpackedArraySizeFromMatrixSize(rows * columns, numChannels));\n\n webgl_util.callAndCheck(\n gl,\n () => gl.readPixels(\n 0, 0, w, h, textureConfig.downloadTextureFormat, gl.UNSIGNED_BYTE,\n downloadTarget));\n\n // By wrapping the buffer in a Float32Array, we use native browser IEEE 754\n // decoding of the 4 bytes that back each 32 bit float.\n return new Float32Array(downloadTarget.buffer);\n}\n\nexport function downloadMatrixFromPackedOutputTexture(\n gl: WebGLRenderingContext, batch: number, rows: number, cols: number,\n physicalRows: number, physicalCols: number,\n textureConfig: TextureConfig): Float32Array {\n const [w, h] = tex_util.getPackedMatrixTextureShapeWidthHeight(\n physicalRows, physicalCols);\n\n const packedRGBA =\n new Float32Array(tex_util.getPackedRGBAArraySizeFromMatrixShape(\n physicalRows, physicalCols));\n webgl_util.callAndCheck(\n gl, () => gl.readPixels(0, 0, w, h, gl.RGBA, gl.FLOAT, packedRGBA));\n const matrix = new Float32Array(util.sizeFromShape([batch, rows, cols]));\n return tex_util.decodeMatrixFromPackedRGBA(\n packedRGBA, batch, rows, cols, matrix);\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {getWebGLContext} from '../../canvas_util';\nimport {ENV} from '../../environment';\nimport * as util from '../../util';\nimport * as gpgpu_util from './gpgpu_util';\nimport {TextureConfig} from './gpgpu_util';\nimport * as tex_util from './tex_util';\nimport {WebGL1DisjointQueryTimerExtension, WebGL2DisjointQueryTimerExtension} from './webgl_types';\nimport * as webgl_util from './webgl_util';\n\nexport interface FenceContext {\n query: WebGLQuery|WebGLSync;\n isFencePassed(): boolean;\n}\n\nexport class GPGPUContext {\n gl: WebGLRenderingContext;\n textureFloatExtension: {};\n textureHalfFloatExtension: {};\n colorBufferFloatExtension: {};\n colorBufferHalfFloatExtension: {};\n getBufferSubDataAsyncExtension: {};\n disjointQueryTimerExtension: WebGL2DisjointQueryTimerExtension|\n WebGL1DisjointQueryTimerExtension;\n vertexBuffer: WebGLBuffer;\n indexBuffer: WebGLBuffer;\n framebuffer: WebGLFramebuffer;\n outputTexture: WebGLTexture|null = null;\n program: WebGLProgram|null = null;\n private disposed = false;\n private autoDebugValidate = false;\n private disjoint: boolean;\n private textureConfig: TextureConfig;\n\n constructor(gl?: WebGLRenderingContext) {\n if (gl != null) {\n this.gl = gl;\n } else {\n this.gl = getWebGLContext(ENV.get('WEBGL_VERSION'));\n }\n // WebGL 2.0 enables texture floats without an extension.\n if (ENV.get('WEBGL_VERSION') === 1) {\n this.textureFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, 'OES_texture_float');\n this.colorBufferFloatExtension =\n this.gl.getExtension('WEBGL_color_buffer_float');\n\n if (!ENV.get('WEBGL_RENDER_FLOAT32_ENABLED')) {\n this.textureHalfFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, 'OES_texture_half_float');\n this.colorBufferHalfFloatExtension =\n this.gl.getExtension('EXT_color_buffer_half_float');\n }\n } else {\n this.colorBufferFloatExtension =\n webgl_util.getExtensionOrThrow(this.gl, 'EXT_color_buffer_float');\n }\n\n this.vertexBuffer = gpgpu_util.createVertexBuffer(this.gl);\n this.indexBuffer = gpgpu_util.createIndexBuffer(this.gl);\n this.framebuffer = webgl_util.createFramebuffer(this.gl);\n\n this.textureConfig =\n gpgpu_util.getTextureConfig(this.gl, this.textureHalfFloatExtension);\n }\n\n public dispose() {\n if (this.disposed) {\n return;\n }\n if (this.program != null) {\n console.warn(\n 'Disposing a GPGPUContext that still has a bound WebGLProgram.' +\n ' This is probably a resource leak, delete the program with ' +\n 'GPGPUContext.deleteProgram before disposing.');\n }\n if (this.outputTexture != null) {\n console.warn(\n 'Disposing a GPGPUContext that still has a bound output matrix ' +\n 'texture. This is probably a resource leak, delete the output ' +\n 'matrix texture with GPGPUContext.deleteMatrixTexture before ' +\n 'disposing.');\n }\n const gl = this.gl;\n webgl_util.callAndCheck(gl, () => gl.finish());\n webgl_util.callAndCheck(gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteFramebuffer(this.framebuffer));\n webgl_util.callAndCheck(gl, () => gl.bindBuffer(gl.ARRAY_BUFFER, null));\n webgl_util.callAndCheck(\n gl, () => gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null));\n webgl_util.callAndCheck(gl, () => gl.deleteBuffer(this.indexBuffer));\n this.disposed = true;\n }\n\n public enableAutomaticDebugValidation(enabled: boolean) {\n this.autoDebugValidate = enabled;\n webgl_util.enableDebugWebGLErrorChecking(enabled);\n }\n\n public createFloat32MatrixTexture(rows: number, columns: number):\n WebGLTexture {\n this.throwIfDisposed();\n return gpgpu_util.createFloat32MatrixTexture(\n this.gl, rows, columns, this.textureConfig);\n }\n\n public createFloat16MatrixTexture(rows: number, columns: number):\n WebGLTexture {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16MatrixTexture(\n this.gl, rows, columns, this.textureConfig);\n }\n\n public createUnsignedBytesMatrixTexture(rows: number, columns: number):\n WebGLTexture {\n this.throwIfDisposed();\n return gpgpu_util.createUnsignedBytesMatrixTexture(\n this.gl, rows, columns, this.textureConfig);\n }\n\n public uploadPixelDataToTexture(\n texture: WebGLTexture,\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement) {\n this.throwIfDisposed();\n gpgpu_util.uploadPixelDataToTexture(this.gl, texture, pixels);\n }\n\n public createFloat16PackedMatrixTexture(rows: number, columns: number):\n WebGLTexture {\n this.throwIfDisposed();\n return gpgpu_util.createFloat16PackedMatrixTexture(\n this.gl, rows, columns, this.textureConfig);\n }\n\n public createPackedMatrixTexture(rows: number, columns: number):\n WebGLTexture {\n this.throwIfDisposed();\n return gpgpu_util.createPackedMatrixTexture(\n this.gl, rows, columns, this.textureConfig);\n }\n\n public deleteMatrixTexture(texture: WebGLTexture) {\n this.throwIfDisposed();\n if (this.outputTexture === texture) {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n this.outputTexture = null;\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteTexture(texture));\n }\n\n public uploadMatrixToTexture(\n texture: WebGLTexture, rows: number, columns: number,\n matrix: Float32Array) {\n this.throwIfDisposed();\n const numChannels = webgl_util.getNumChannels();\n return gpgpu_util.uploadMatrixToTexture(\n this.gl, texture, rows, columns, matrix, numChannels,\n this.textureConfig);\n }\n\n public uploadMatrixToPackedTexture(\n texture: WebGLTexture, batch: number, rows: number, columns: number,\n matrix: Float32Array) {\n this.throwIfDisposed();\n return gpgpu_util.uploadMatrixToPackedTexture(\n this.gl, texture, batch, rows, columns, matrix, this.textureConfig);\n }\n\n public downloadFloat32MatrixFromOutputTexture(\n texture: WebGLTexture, rows: number, columns: number): Float32Array {\n return this.downloadMatrixDriver(\n texture,\n () => gpgpu_util.downloadFloat32MatrixFromOutputTexture(\n this.gl, rows, columns, this.textureConfig));\n }\n\n public downloadByteEncodedFloatMatrixFromOutputTexture(\n texture: WebGLTexture, rows: number, columns: number): Float32Array {\n return this.downloadMatrixDriver(\n texture,\n () => gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture(\n this.gl, rows, columns, this.textureConfig));\n }\n\n public downloadFloat32MatrixFromBuffer(\n buffer: WebGLBuffer, rows: number, columns: number): Float32Array {\n return gpgpu_util.downloadFloat32MatrixFromBuffer(\n this.gl, buffer, rows, columns, this.textureConfig);\n }\n\n public maybeCreateBufferFromTexture(\n texture: WebGLTexture, rows: number, columns: number): WebGLBuffer\n |WebGLTexture {\n this.bindTextureToFrameBuffer(texture);\n const result = gpgpu_util.maybeCreateBufferFromOutputTexture(\n this.gl, texture, rows, columns, this.textureConfig);\n this.unbindTextureToFrameBuffer();\n return result;\n }\n\n public createAndWaitForFence(): Promise<void> {\n const fenceContext = this.createFence(this.gl);\n return this.pollFence(fenceContext);\n }\n\n private createFence(gl: WebGLRenderingContext): FenceContext {\n let query: WebGLQuery|WebGLSync;\n let isFencePassed: () => boolean;\n\n if (ENV.get('WEBGL_FENCE_API_ENABLED')) {\n const gl2 = gl as WebGL2RenderingContext;\n\n const sync = gl2.fenceSync(gl2.SYNC_GPU_COMMANDS_COMPLETE, 0);\n gl.flush();\n\n isFencePassed = () => {\n const status = gl2.clientWaitSync(sync, 0, 0);\n return status === gl2.ALREADY_SIGNALED ||\n status === gl2.CONDITION_SATISFIED;\n };\n\n query = sync;\n } else if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n query = this.beginQuery();\n this.endQuery();\n isFencePassed = () => this.isQueryAvailable(\n query, ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n } else {\n // If we have no way to fence, return true immediately. This will fire in\n // WebGL 1.0 when there is no disjoint query timer. In this case, because\n // the fence passes immediately, we'll immediately ask for a download of\n // the texture, which will cause the UI thread to hang.\n isFencePassed = () => true;\n }\n\n return {query, isFencePassed};\n }\n\n public downloadMatrixFromPackedTexture(\n texture: WebGLTexture, batch: number, rows: number, columns: number,\n physicalRows: number, physicalCols: number): Float32Array {\n return this.downloadMatrixDriver(\n texture,\n () => gpgpu_util.downloadMatrixFromPackedOutputTexture(\n this.gl, batch, rows, columns, physicalRows, physicalCols,\n this.textureConfig));\n }\n\n private vertexAttrsAreBound = false;\n\n public createProgram(fragmentShaderSource: string): WebGLProgram {\n this.throwIfDisposed();\n const gl = this.gl;\n const fragmentShader: WebGLShader =\n webgl_util.createFragmentShader(gl, fragmentShaderSource);\n const vertexShader: WebGLShader = gpgpu_util.createVertexShader(gl);\n const program: WebGLProgram = webgl_util.createProgram(gl);\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, vertexShader));\n webgl_util.callAndCheck(gl, () => gl.attachShader(program, fragmentShader));\n webgl_util.linkProgram(gl, program);\n if (this.autoDebugValidate) {\n webgl_util.validateProgram(gl, program);\n }\n if (!this.vertexAttrsAreBound) {\n this.setProgram(program);\n this.vertexAttrsAreBound = gpgpu_util.bindVertexProgramAttributeStreams(\n gl, this.program, this.vertexBuffer);\n }\n return program;\n }\n\n public deleteProgram(program: WebGLProgram) {\n this.throwIfDisposed();\n if (program === this.program) {\n this.program = null;\n }\n if (program != null) {\n webgl_util.callAndCheck(this.gl, () => this.gl.deleteProgram(program));\n }\n }\n\n public setProgram(program: WebGLProgram|null) {\n this.throwIfDisposed();\n this.program = program;\n if ((this.program != null) && this.autoDebugValidate) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.callAndCheck(this.gl, () => this.gl.useProgram(program));\n }\n\n public getUniformLocation(\n program: WebGLProgram, uniformName: string,\n shouldThrow = true): WebGLUniformLocation {\n this.throwIfDisposed();\n if (shouldThrow) {\n return webgl_util.getProgramUniformLocationOrThrow(\n this.gl, program, uniformName);\n } else {\n return webgl_util.getProgramUniformLocation(\n this.gl, program, uniformName);\n }\n }\n\n public getAttributeLocation(program: WebGLProgram, attribute: string):\n number {\n this.throwIfDisposed();\n return webgl_util.callAndCheck(\n this.gl, () => this.gl.getAttribLocation(program, attribute));\n }\n\n public getUniformLocationNoThrow(program: WebGLProgram, uniformName: string):\n WebGLUniformLocation {\n this.throwIfDisposed();\n return this.gl.getUniformLocation(program, uniformName);\n }\n\n public setInputMatrixTexture(\n inputMatrixTexture: WebGLTexture, uniformLocation: WebGLUniformLocation,\n textureUnit: number) {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n webgl_util.bindTextureToProgramUniformSampler(\n this.gl, this.program, inputMatrixTexture, uniformLocation,\n textureUnit);\n }\n\n public setOutputMatrixTexture(\n outputMatrixTexture: WebGLTexture, rows: number, columns: number) {\n this.setOutputMatrixTextureDriver(outputMatrixTexture, columns, rows);\n }\n\n public setOutputPackedMatrixTexture(\n outputPackedMatrixTexture: WebGLTexture, rows: number, columns: number) {\n this.throwIfDisposed();\n const [width, height] =\n tex_util.getPackedMatrixTextureShapeWidthHeight(rows, columns);\n this.setOutputMatrixTextureDriver(outputPackedMatrixTexture, width, height);\n }\n\n public setOutputMatrixWriteRegion(\n startRow: number, numRows: number, startColumn: number,\n numColumns: number) {\n this.setOutputMatrixWriteRegionDriver(\n startColumn, startRow, numColumns, numRows);\n }\n\n public setOutputPackedMatrixWriteRegion(\n startRow: number, numRows: number, startColumn: number,\n numColumns: number) {\n throw new Error('setOutputPackedMatrixWriteRegion not implemented.');\n }\n\n public debugValidate() {\n if (this.program != null) {\n webgl_util.validateProgram(this.gl, this.program);\n }\n webgl_util.validateFramebuffer(this.gl);\n }\n\n public executeProgram() {\n this.throwIfDisposed();\n this.throwIfNoProgram();\n const gl = this.gl;\n if (this.autoDebugValidate) {\n this.debugValidate();\n }\n webgl_util.callAndCheck(\n gl, () => gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0));\n }\n\n public blockUntilAllProgramsCompleted() {\n this.throwIfDisposed();\n webgl_util.callAndCheck(this.gl, () => this.gl.finish());\n }\n\n private getQueryTimerExtension(): WebGL1DisjointQueryTimerExtension\n |WebGL2DisjointQueryTimerExtension {\n if (this.disjointQueryTimerExtension == null) {\n this.disjointQueryTimerExtension =\n webgl_util.getExtensionOrThrow(\n this.gl,\n ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2 ?\n 'EXT_disjoint_timer_query_webgl2' :\n 'EXT_disjoint_timer_query') as\n WebGL1DisjointQueryTimerExtension |\n WebGL2DisjointQueryTimerExtension;\n }\n return this.disjointQueryTimerExtension;\n }\n\n private getQueryTimerExtensionWebGL2(): WebGL2DisjointQueryTimerExtension {\n return this.getQueryTimerExtension();\n }\n\n private getQueryTimerExtensionWebGL1(): WebGL1DisjointQueryTimerExtension {\n return this.getQueryTimerExtension() as WebGL1DisjointQueryTimerExtension;\n }\n\n beginQuery(): WebGLQuery {\n if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.getQueryTimerExtensionWebGL2();\n\n const query = gl2.createQuery();\n gl2.beginQuery(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n const query = ext.createQueryEXT() as WebGLQuery;\n ext.beginQueryEXT(ext.TIME_ELAPSED_EXT, query);\n return query;\n }\n\n endQuery() {\n if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.getQueryTimerExtensionWebGL2();\n gl2.endQuery(ext.TIME_ELAPSED_EXT);\n return;\n }\n const ext = this.getQueryTimerExtensionWebGL1();\n ext.endQueryEXT(ext.TIME_ELAPSED_EXT);\n }\n\n public async waitForQueryAndGetTime(query: WebGLQuery): Promise<number> {\n await util.repeatedTry(\n () => this.disposed || // while testing contexts are created / disposed\n // in rapid succession, so without this check we\n // may poll for the query timer indefinitely\n this.isQueryAvailable(\n query,\n ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION')));\n return this.getQueryTime(\n query, ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION'));\n }\n\n private getQueryTime(query: WebGLQuery, queryTimerVersion: number): number {\n if (queryTimerVersion === 0) {\n return null;\n }\n\n if (queryTimerVersion === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n\n const timeElapsedNanos = gl2.getQueryParameter(query, gl2.QUERY_RESULT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n } else {\n const ext = this.getQueryTimerExtensionWebGL1();\n\n const timeElapsedNanos =\n ext.getQueryObjectEXT(query, ext.QUERY_RESULT_EXT);\n // Return milliseconds.\n return timeElapsedNanos / 1000000;\n }\n }\n\n private isQueryAvailable(query: WebGLQuery, queryTimerVersion: number):\n boolean {\n if (queryTimerVersion === 0) {\n return true;\n }\n\n if (queryTimerVersion === 2) {\n const gl2 = this.gl as WebGL2RenderingContext;\n const ext = this.getQueryTimerExtensionWebGL2();\n\n const available =\n gl2.getQueryParameter(query, gl2.QUERY_RESULT_AVAILABLE);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n\n return available && !this.disjoint;\n } else {\n const ext = this.getQueryTimerExtensionWebGL1();\n\n const available =\n ext.getQueryObjectEXT(query, ext.QUERY_RESULT_AVAILABLE_EXT);\n if (this.disjoint == null) {\n this.disjoint = this.gl.getParameter(ext.GPU_DISJOINT_EXT);\n }\n\n return available && !this.disjoint;\n }\n }\n\n pollFence(fenceContext: FenceContext) {\n return new Promise<void>(resolve => {\n this.addItemToPoll(() => fenceContext.isFencePassed(), () => resolve());\n });\n }\n\n private itemsToPoll: PollItem[] = [];\n\n pollItems(): void {\n // Find the last query that has finished using binary search.\n // All other queries before it are also done.\n const index = binSearchLastTrue(this.itemsToPoll.map(x => x.isDoneFn));\n for (let i = 0; i <= index; ++i) {\n const {resolveFn} = this.itemsToPoll[i];\n resolveFn();\n }\n this.itemsToPoll = this.itemsToPoll.slice(index + 1);\n }\n\n private addItemToPoll(isDoneFn: () => boolean, resolveFn: () => void) {\n this.itemsToPoll.push({isDoneFn, resolveFn});\n if (this.itemsToPoll.length > 1) {\n // We already have a running loop that polls.\n return;\n }\n // Start a new loop that polls.\n util.repeatedTry(() => {\n this.pollItems();\n // End the loop if no more items to poll.\n return this.itemsToPoll.length === 0;\n });\n }\n\n private bindTextureToFrameBuffer(texture: WebGLTexture) {\n this.throwIfDisposed();\n webgl_util.bindColorTextureToFramebuffer(\n this.gl, texture, this.framebuffer);\n if (this.autoDebugValidate) {\n webgl_util.validateFramebuffer(this.gl);\n }\n }\n\n private unbindTextureToFrameBuffer() {\n if (this.outputTexture != null) {\n webgl_util.bindColorTextureToFramebuffer(\n this.gl, this.outputTexture, this.framebuffer);\n if (this.autoDebugValidate) {\n webgl_util.validateFramebuffer(this.gl);\n }\n } else {\n webgl_util.unbindColorTextureFromFramebuffer(this.gl, this.framebuffer);\n }\n }\n\n private downloadMatrixDriver(\n texture: WebGLTexture,\n downloadAndDecode: () => Float32Array): Float32Array {\n this.bindTextureToFrameBuffer(texture);\n const result = downloadAndDecode();\n this.unbindTextureToFrameBuffer();\n\n return result;\n }\n\n private setOutputMatrixTextureDriver(\n outputMatrixTextureMaybePacked: WebGLTexture, width: number,\n height: number) {\n this.throwIfDisposed();\n const gl = this.gl;\n webgl_util.bindColorTextureToFramebuffer(\n gl, outputMatrixTextureMaybePacked, this.framebuffer);\n if (this.autoDebugValidate) {\n webgl_util.validateFramebuffer(gl);\n }\n this.outputTexture = outputMatrixTextureMaybePacked;\n webgl_util.callAndCheck(gl, () => gl.viewport(0, 0, width, height));\n webgl_util.callAndCheck(gl, () => gl.scissor(0, 0, width, height));\n }\n\n private setOutputMatrixWriteRegionDriver(\n x: number, y: number, width: number, height: number) {\n this.throwIfDisposed();\n webgl_util.callAndCheck(\n this.gl, () => this.gl.scissor(x, y, width, height));\n }\n\n private throwIfDisposed() {\n if (this.disposed) {\n throw new Error('Attempted to use disposed GPGPUContext.');\n }\n }\n\n private throwIfNoProgram() {\n if (this.program == null) {\n throw new Error('No GPU program is currently set.');\n }\n }\n}\n\ntype PollItem = {\n isDoneFn: () => boolean,\n resolveFn: () => void\n};\n\n/**\n * Finds the index of the last true element using binary search where\n * evaluation of an entry is expensive.\n */\nexport function binSearchLastTrue(arr: Array<() => boolean>): number {\n let start = 0;\n let end = arr.length - 1;\n let best = -1;\n while (start <= end) {\n const mid = (start + end) >> 1;\n const isDone = arr[mid]();\n if (isDone) {\n best = mid;\n start = mid + 1;\n } else {\n end = mid - 1;\n }\n }\n return best;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from '../../tensor';\nimport {TypedArray} from '../../types';\nimport * as util from '../../util';\n\nimport {GPGPUContext} from './gpgpu_context';\nimport * as shader_compiler from './shader_compiler';\nimport {InputInfo, ShapeInfo} from './shader_compiler';\nimport {TextureData} from './tex_util';\n\nexport interface GPGPUProgram {\n variableNames: string[];\n outputShape: number[];\n userCode: string;\n usesPackedTextures?: boolean;\n supportsBroadcasting?: boolean;\n}\n\nexport interface GPGPUBinary {\n webGLProgram: WebGLProgram;\n program: GPGPUProgram;\n uniformLocations: {[name: string]: WebGLUniformLocation};\n gpgpu: GPGPUContext;\n source: string;\n inShapeInfos: ShapeInfo[];\n outShapeInfo: ShapeInfo;\n}\n\nexport interface TensorData {\n shape: number[];\n texData: TextureData;\n isUniform: boolean;\n uniformValues?: TypedArray;\n}\n\nexport function compileProgram<T extends Tensor, K extends Tensor>(\n gpgpu: GPGPUContext, program: GPGPUProgram, inputs: TensorData[],\n output: TensorData): GPGPUBinary {\n const userCode = program.userCode;\n const inputInfos: InputInfo[] = inputs.map((input, i) => {\n const shapeInfo = {\n logicalShape: input.shape,\n texShape: input.isUniform ? null : input.texData.texShape,\n isUniform: input.isUniform,\n isPacked: input.isUniform ? false : input.texData.isPacked\n };\n return {name: program.variableNames[i], shapeInfo};\n });\n const inShapeInfos = inputInfos.map(x => x.shapeInfo);\n const outShapeInfo = {\n logicalShape: output.shape,\n texShape: output.texData.texShape,\n isUniform: false,\n isPacked: output.texData.isPacked\n };\n const source = shader_compiler.makeShader(\n inputInfos, outShapeInfo, userCode, program.supportsBroadcasting === true,\n program.usesPackedTextures);\n\n const webGLProgram = gpgpu.createProgram(source);\n\n const uniformLocations: {[name: string]: WebGLUniformLocation} = {};\n for (let i = 0; i < program.variableNames.length; i++) {\n const uniformName = program.variableNames[i];\n const shouldThrow = false;\n uniformLocations[uniformName] =\n gpgpu.getUniformLocation(webGLProgram, uniformName, shouldThrow);\n }\n\n return {\n program,\n source,\n webGLProgram,\n uniformLocations,\n gpgpu,\n inShapeInfos,\n outShapeInfo\n };\n}\n\nfunction validateBinaryAndProgram(\n shapeInfos: ShapeInfo[], inputs: TensorData[]) {\n if (shapeInfos.length !== inputs.length) {\n throw Error(\n `Binary was compiled with ${shapeInfos.length} inputs, but ` +\n `was executed with ${inputs.length} inputs`);\n }\n\n shapeInfos.forEach((s, i) => {\n const shapeA = s.logicalShape;\n const input = inputs[i];\n const shapeB = input.shape;\n\n if (!util.arraysEqual(shapeA, shapeB)) {\n throw Error(\n `Binary was compiled with different shapes than ` +\n `the current args. Shapes ${shapeA} and ${shapeB} must match`);\n }\n // The input is uploaded as uniform.\n if (s.isUniform && input.isUniform) {\n return;\n }\n\n const texShapeA = s.texShape;\n const texShapeB = input.isUniform ? null : input.texData.texShape;\n if (!util.arraysEqual(texShapeA, texShapeB)) {\n throw Error(\n `Binary was compiled with different texture shapes than the` +\n ` current args. Shape ${texShapeA} and ${texShapeB} must match`);\n }\n });\n}\n\nexport function runProgram<T extends Tensor, K extends Tensor>(\n binary: GPGPUBinary, inputs: TensorData[], output: TensorData,\n customSetup?: (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) =>\n void): void {\n validateBinaryAndProgram(binary.inShapeInfos, inputs);\n validateBinaryAndProgram([binary.outShapeInfo], [output]);\n\n const outTex = output.texData.texture;\n const outTexShape = output.texData.texShape;\n const gpgpu = binary.gpgpu;\n if (output.texData.isPacked) {\n gpgpu.setOutputPackedMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n } else {\n gpgpu.setOutputMatrixTexture(outTex, outTexShape[0], outTexShape[1]);\n }\n gpgpu.setProgram(binary.webGLProgram);\n inputs.forEach((input, i) => {\n const variableName = binary.program.variableNames[i];\n const variableUniformLocation = binary.uniformLocations[variableName];\n if (variableUniformLocation != null) {\n if (input.isUniform) {\n if (util.sizeFromShape(input.shape) === 1) {\n gpgpu.gl.uniform1f(variableUniformLocation, input.uniformValues[0]);\n } else {\n let vals = input.uniformValues;\n if (!(vals instanceof Float32Array)) {\n vals = new Float32Array(vals);\n }\n gpgpu.gl.uniform1fv(variableUniformLocation, vals);\n }\n return;\n }\n const tex = input.texData.texture;\n gpgpu.setInputMatrixTexture(tex, variableUniformLocation, i);\n }\n });\n\n if (customSetup != null) {\n customSetup(gpgpu, binary.webGLProgram);\n }\n gpgpu.executeProgram();\n}\n\nexport function makeShaderKey(\n program: GPGPUProgram, inputs: TensorData[], output: TensorData): string {\n let keyInputs = '';\n inputs.concat(output).forEach(x => {\n keyInputs += `${x.shape}_${x.isUniform ? 'uniform' : x.texData.texShape}`;\n });\n const keyUserCode = program.userCode;\n const keyBroadcast = (program.supportsBroadcasting === true).toString();\n let key = program.constructor.name;\n // Fast string concat. See https://jsperf.com/string-concatenation/14.\n key += '_' + keyBroadcast + '_' + keyInputs + '_' + keyUserCode;\n return key;\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class Im2ColProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n outputShape: number[], inputShape: number[], convInfo: Conv2DInfo) {\n this.outputShape = outputShape;\n\n const {\n filterWidth,\n inChannels,\n strideWidth,\n strideHeight,\n padInfo,\n outWidth,\n dilationWidth,\n dilationHeight\n } = convInfo;\n const {left, top} = padInfo;\n const itemsPerBlockRow = inChannels * filterWidth;\n\n this.userCode = `\n void main() {\n ivec2 rc = getOutputCoords();\n\n vec4 result = vec4(0);\n\n for(int row=0; row<=1; row++) {\n for(int col=0; col<=1; col++) {\n int blockIndex = rc.y + col;\n int pos = rc.x + row;\n\n if(blockIndex >= ${outputShape[1]} || pos >= ${\n outputShape[0]}) continue;\n\n int offsetY = int(blockIndex / (${outWidth})) * ${strideHeight} - ${\n top};\n int d0 = offsetY + ${dilationHeight} * (pos / ${itemsPerBlockRow});\n\n if(d0 >= ${inputShape[0]} || d0 < 0) continue;\n\n int offsetX = int(mod(float(blockIndex), ${outWidth}.) * ${\n strideWidth}. - ${left}.);\n int d1 = offsetX + ${dilationWidth} * (int(mod(float(pos), ${\n itemsPerBlockRow}.) / ${inChannels}.));\n\n if(d1 >= ${inputShape[1]} || d1 < 0) continue;\n\n result[row * 2 + col] = getA(d0, d1, int(mod(float(pos), ${\n inChannels}.)));\n }\n }\n\n gl_FragColor = result;\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class LRNProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(\n xShape: number[], radius: number, bias: number, alpha: number,\n beta: number) {\n const rad = radius;\n const maxD = xShape[3] - 1;\n this.outputShape = xShape;\n\n // optimize pow(bias + alpha * sum, -beta)\n // src: https://github.com/tensorflow/tensorflow/..\n // blob/26033a1644a9c4a5fbe3170ab2e864b6a4ccd4ca/..\n // tensorflow/core/kernels/mkl_lrn_op.cc#L320\n let powOperator;\n const basis = `float(${bias}) + float(${alpha}) * sum`;\n if (beta === 0.5) {\n powOperator = `inversesqrt(${basis})`;\n } else if (beta === 1.0) {\n powOperator = `1.0/(${basis})`;\n } else {\n powOperator = `exp(log(${basis}) * float(-${beta}));`;\n }\n\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n int d = coords[3];\n float x = getX(b, r, c, d);\n float sum = 0.0;\n for (int j = -${rad}; j <= ${rad}; j++) {\n int idx = d + j;\n if (idx >= 0 && idx <= ${maxD}) {\n float z = getX(b, r, c, idx);\n sum += z * z;\n }\n }\n float val = x * ${powOperator};\n setOutput(val);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class LRNGradProgram implements GPGPUProgram {\n variableNames = ['inputImage', 'outputImage', 'dy'];\n outputShape: number[] = [];\n userCode: string;\n depthRadius: number;\n bias: number;\n alpha: number;\n beta: number;\n depth: number;\n\n constructor(\n inputShape: number[], depthRadius: number, bias: number, alpha: number,\n beta: number) {\n this.outputShape = inputShape;\n this.depth = inputShape[3];\n this.depthRadius = depthRadius;\n this.bias = bias;\n this.alpha = alpha;\n this.beta = beta;\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int r = coords[1];\n int c = coords[2];\n\n float result = 0.0;\n for (int d = 0; d < ${this.depth}; ++d) {\n int depthBegin = int(max(0.0, float(d - ${depthRadius})));\n int depthEnd = int(min(float(${this.depth}),\n float(d + ${depthRadius} + 1)));\n\n const int MIN_DEPTH_BEGIN = 0;\n const int MAX_DEPTH_END = ${this.depth};\n\n float norm = 0.0;\n for (int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k) {\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd) {\n norm += getInputImage(b, r, c, k) * getInputImage(b, r, c, k);\n }\n else {\n break;\n }\n }\n\n norm = float(${alpha}) * norm + float(${bias});\n\n for(int k = MIN_DEPTH_BEGIN; k < MAX_DEPTH_END; ++k){\n if (k < depthBegin){\n continue;\n }\n else if (k >= depthBegin && k < depthEnd){\n float dyi = -2.0 * float(${alpha})\n * float(${beta})\n * getInputImage(b ,r ,c, k) * getOutputImage(b, r, c, d)\n / norm;\n if (k == d) {\n dyi += pow(norm, -1.0 * ${beta});\n }\n if (k == coords[3]) {\n dyi *= getDy(b, r, c, d);\n result += dyi;\n }\n }\n else {\n break;\n }\n }\n }\n setOutput(result);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class MaxPool2DBackpropProgram implements GPGPUProgram {\n variableNames = ['dy', 'maxPos'];\n outputShape: number[];\n userCode: string;\n\n constructor(convInfo: Conv2DInfo) {\n this.outputShape = convInfo.inShape;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n\n const lastIndex = effectiveFilterHeight * effectiveFilterWidth - 1;\n this.userCode = `\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n\n ivec2 dyRCCorner = coords.yz - pads;\n int dyRCorner = dyRCCorner.x;\n int dyCCorner = dyRCCorner.y;\n\n // Convolve dy(?, ?, d) with pos mask(:, :, d) to get dx(xR, xC, d).\n // ? = to be determined. : = across all values in that axis.\n float dotProd = 0.0;\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n float dyR = float(dyRCorner + wR) / ${strideHeight}.0;\n\n if (dyR < 0.0 || dyR >= ${convInfo.outHeight}.0 || fract(dyR) > 0.0) {\n continue;\n }\n int idyR = int(dyR);\n\n for (int wC = 0; wC < ${effectiveFilterWidth}; wC++) {\n float dyC = float(dyCCorner + wC) / ${strideWidth}.0;\n\n if (dyC < 0.0 || dyC >= ${convInfo.outWidth}.0 ||\n fract(dyC) > 0.0) {\n continue;\n }\n int idyC = int(dyC);\n\n float dyValue = getDy(b, idyR, idyC, d);\n int maxPosValue = ${lastIndex} - int(getMaxPos(b, idyR, idyC, d));\n\n // Get the current value, check it against the value from the\n // position matrix.\n int curPosValue = wR * ${effectiveFilterWidth} + wC;\n float mask = float(maxPosValue == curPosValue ? 1.0 : 0.0);\n\n dotProd += dyValue * mask;\n }\n }\n setOutput(dotProd);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class MatMulProgram implements GPGPUProgram {\n variableNames = ['matrixA', 'matrixB'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n aShape: [number, number, number], bShape: [number, number, number],\n transposeA = false, transposeB = false) {\n const batchSize = aShape[0];\n const outerShapeA = transposeA ? aShape[2] : aShape[1];\n const outerShapeB = transposeB ? bShape[1] : bShape[2];\n const sharedDim = transposeA ? aShape[1] : aShape[2];\n this.outputShape = [batchSize, outerShapeA, outerShapeB];\n\n const aSnippetFromOffset = (vec4Offset: number, indexVar: string|number) =>\n transposeA ? `batch, ${indexVar} + ${vec4Offset}, aRow` :\n `batch, aRow, ${indexVar} + ${vec4Offset}`;\n const bSnippetFromOffset = (vec4Offset: number, indexVar: string|number) =>\n transposeB ? `batch, bCol, ${indexVar} + ${vec4Offset}` :\n `batch, ${indexVar} + ${vec4Offset}, bCol`;\n\n const sharedDimNearestVec4 = Math.floor(sharedDim / 4) * 4;\n const sharedDimVec4Remainder = sharedDim % 4;\n\n this.userCode = ` float dotARowBCol(int batch, int aRow, int bCol) {\n float result = 0.0;\n for (int i = 0; i < ${sharedDimNearestVec4}; i += 4) {\n vec4 a = vec4(\n getMatrixA(${aSnippetFromOffset(0, 'i')}),\n getMatrixA(${aSnippetFromOffset(1, 'i')}),\n getMatrixA(${aSnippetFromOffset(2, 'i')}),\n getMatrixA(${aSnippetFromOffset(3, 'i')})\n );\n vec4 b = vec4(\n getMatrixB(${bSnippetFromOffset(0, 'i')}),\n getMatrixB(${bSnippetFromOffset(1, 'i')}),\n getMatrixB(${bSnippetFromOffset(2, 'i')}),\n getMatrixB(${bSnippetFromOffset(3, 'i')})\n );\n\n result += dot(a, b);\n }\n\n if (${sharedDimVec4Remainder === 1}) {\n result += getMatrixA(${aSnippetFromOffset(0, sharedDimNearestVec4)}) *\n getMatrixB(${bSnippetFromOffset(0, sharedDimNearestVec4)});\n } else if (${sharedDimVec4Remainder === 2}) {\n vec2 a = vec2(\n getMatrixA(${aSnippetFromOffset(0, sharedDimNearestVec4)}),\n getMatrixA(${aSnippetFromOffset(1, sharedDimNearestVec4)})\n );\n vec2 b = vec2(\n getMatrixB(${bSnippetFromOffset(0, sharedDimNearestVec4)}),\n getMatrixB(${bSnippetFromOffset(1, sharedDimNearestVec4)})\n );\n result += dot(a, b);\n } else if (${sharedDimVec4Remainder === 3}) {\n vec3 a = vec3(\n getMatrixA(${aSnippetFromOffset(0, sharedDimNearestVec4)}),\n getMatrixA(${aSnippetFromOffset(1, sharedDimNearestVec4)}),\n getMatrixA(${aSnippetFromOffset(2, sharedDimNearestVec4)})\n );\n vec3 b = vec3(\n getMatrixB(${bSnippetFromOffset(0, sharedDimNearestVec4)}),\n getMatrixB(${bSnippetFromOffset(1, sharedDimNearestVec4)}),\n getMatrixB(${bSnippetFromOffset(2, sharedDimNearestVec4)})\n );\n result += dot(a, b);\n }\n\n return result;\n }\n\n void main() {\n ivec3 resBRC = getOutputCoords();\n setOutput(dotARowBCol(resBRC.x, resBRC.y, resBRC.z));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class MatMulPackedProgram implements GPGPUProgram {\n variableNames = ['matrixA', 'matrixB'];\n usesPackedTextures = true;\n outputShape: number[];\n userCode: string;\n\n constructor(\n aShape: [number, number], bShape: [number, number],\n outputShape: [number, number], transposeA = false, transposeB = false) {\n this.outputShape = outputShape;\n\n const sharedDim = transposeA ? aShape[0] : aShape[1];\n const sharedDimensionPacked = Math.ceil(sharedDim / 2);\n\n const aSample = transposeA ? 'i * 2, rc.x' : 'rc.x, i * 2';\n const bSample = transposeB ? 'rc.y, i * 2' : 'i * 2, rc.y';\n const aSwizzle = transposeA ? ['a.xxyy', 'a.zzww'] : ['a.xxzz', 'a.yyww'];\n const bSwizzle = transposeB ? ['b.xzxz', 'b.ywyw'] : ['b.xyxy', 'b.zwzw'];\n\n this.userCode = `\n const float sharedDimension = ${sharedDimensionPacked}.0;\n\n vec4 dot2x2ARowBCol(ivec2 rc) {\n vec4 result = vec4(0);\n for (int i = 0; i < ${sharedDimensionPacked}; i++) {\n vec4 a = getMatrixA(${aSample});\n vec4 b = getMatrixB(${bSample});\n\n result += (${aSwizzle[0]} * ${bSwizzle[0]}) + (${aSwizzle[1]} * ${\n bSwizzle[1]});\n }\n return result;\n }\n\n void main() {\n ivec2 rc = getOutputCoords();\n setOutput(dot2x2ARowBCol(rc));\n }\n `;\n }\n}","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUContext} from './gpgpu_context';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class MultinomialProgram implements GPGPUProgram {\n variableNames = ['probs'];\n outputShape: number[];\n userCode: string;\n\n // Caching uniform location for speed.\n seedLoc: WebGLUniformLocation;\n\n constructor(batchSize: number, numOutcomes: number, numSamples: number) {\n this.outputShape = [batchSize, numSamples];\n\n this.userCode = `\n uniform float seed;\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n\n float r = random(seed);\n float cdf = 0.0;\n\n for (int i = 0; i < ${numOutcomes - 1}; i++) {\n cdf += getProbs(batch, i);\n\n if (r < cdf) {\n setOutput(float(i));\n return;\n }\n }\n\n // If no other event happened, last event happened.\n setOutput(float(${numOutcomes - 1}));\n }\n `;\n }\n\n getCustomSetupFunc(seed: number) {\n return (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) => {\n if (this.seedLoc == null) {\n this.seedLoc = gpgpu.getUniformLocation(webGLProgram, 'seed');\n }\n gpgpu.gl.uniform1f(this.seedLoc, seed);\n };\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class OneHotProgram implements GPGPUProgram {\n variableNames = ['indices'];\n outputShape: number[];\n userCode: string;\n\n // Caching uniform location for speed.\n seedLoc: WebGLUniformLocation;\n\n constructor(\n numIndices: number, depth: number, onValue: number, offValue: number) {\n this.outputShape = [numIndices, depth];\n\n this.userCode = `\n void main() {\n ivec2 coords = getOutputCoords();\n int index = round(getIndices(coords.x));\n setOutput(mix(float(${offValue}), float(${onValue}),\n float(index == coords.y)));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport function getVecChannels(name: string, rank: number): string[] {\n return ['x', 'y', 'z', 'w', 'u', 'v'].slice(0, rank).map(d => `${name}.${d}`);\n}\n\nexport function getChannels(name: string, rank: number): string[] {\n if (rank === 1) {\n return [name];\n }\n return getVecChannels(name, rank);\n}\n\nexport function getSourceCoords(rank: number, dims: string[]): string {\n if (rank === 1) {\n return 'rc';\n }\n\n let coords = '';\n for (let i = 0; i < rank; i++) {\n coords += dims[i];\n if (i < rank - 1) {\n coords += ',';\n }\n }\n return coords;\n}","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {getChannels} from '../packing_util';\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class PackProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n outputShape:\n number[]) { // TODO(https://github.com/tensorflow/tfjs/issues/893):\n // Only input / output 3D tensors.\n this.outputShape = outputShape;\n const rank = outputShape.length;\n\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const outOfBoundsCondition =\n getOutOfBoundsCondition(rank, outputShape, channels);\n const setup = getSetup(\n rank, outputShape[outputShape.length - 1],\n outputShape[outputShape.length - 2], channels);\n const output = getOutput(outputShape, channels);\n\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n\n if(${outOfBoundsCondition}) {\n gl_FragColor = vec4(0);\n } else {\n ${setup}\n\n setOutput(vec4(${output}));\n }\n }\n `;\n }\n}\n\nfunction getSourceCoordsArr(rank: number, dims: string[]): string[] {\n const coords = [];\n\n for (let row = 0; row <= 1; row++) {\n for (let col = 0; col <= 1; col++) {\n let coord = `${row === 0 ? 'r' : 'rp1'}, ${col === 0 ? 'c' : 'cp1'}`;\n\n for (let d = 2; d < rank; d++) {\n coord = `${dims[dims.length - 1 - d]},` + coord;\n }\n\n coords.push(coord);\n }\n }\n return coords;\n}\n\nfunction getOutOfBoundsCondition(\n rank: number, shape: number[], dims: string[]): string {\n if (rank === 1) {\n return `rc > ${shape[0]}`;\n }\n\n let cond = '';\n for (let i = rank - 2; i < rank; i++) {\n cond += `${dims[i]} >= ${shape[i]}`;\n if (i < rank - 1) {\n cond += '||';\n }\n }\n\n return cond;\n}\n\nfunction getSetup(\n rank: number, cols: number, rows: number, dims: string[]): string {\n if (rank === 1) {\n return '';\n }\n\n const innerDims = dims.slice(-2);\n\n return `\n int r = ${innerDims[0]};\n int c = ${innerDims[1]};\n int rp1 = r + 1;\n int cp1 = c + 1;\n\n bool cEdge = cp1 >= ${cols};\n bool rEdge = rp1 >= ${rows};\n `;\n}\n\nfunction getOutput(shape: number[], dims: string[]): string {\n const rank = shape.length;\n const sourceCoords = getSourceCoordsArr(rank, dims);\n if (rank === 1) {\n return `getA(rc),\n rc + 1 >= ${shape[0]} ? 0. : getA(rc + 1),\n 0, 0`;\n }\n\n return `getA(${sourceCoords[0]}),\n cEdge ? 0. : getA(${sourceCoords[1]}),\n rEdge ? 0. : getA(${sourceCoords[2]}),\n rEdge || cEdge ? 0. : getA(${sourceCoords[3]})`;\n}","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class PadProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n xShape: number[], paddings: Array<[number, number]>,\n constantValue: number) {\n this.outputShape = paddings.map(\n (p, i) => p[0] /* beforePad */ + xShape[i] + p[1] /* afterPad */);\n const rank = xShape.length;\n const type = getCoordsDataType(rank);\n\n const start = paddings.map(p => p[0]).join(',');\n const end = paddings.map((p, i) => p[0] + xShape[i]).join(',');\n const unpackedCoords =\n ['coords[0]', 'coords[1]', 'coords[2]','coords[3]'].slice(0, rank);\n\n if (rank === 1) {\n this.userCode = `\n int start = ${start};\n int end = ${end};\n\n void main() {\n int outC = getOutputCoords();\n if (outC < start || outC >= end) {\n setOutput(float(${constantValue}));\n } else {\n setOutput(getX(outC - start));\n }\n }\n `;\n return;\n }\n this.userCode = `\n ${type} start = ${type}(${start});\n ${type} end = ${type}(${end});\n\n void main() {\n ${type} outC = getOutputCoords();\n if (any(lessThan(outC, start)) || any(greaterThanEqual(outC, end))) {\n setOutput(float(${constantValue}));\n } else {\n ${type} coords = outC - start;\n setOutput(getX(${unpackedCoords}));\n }\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Conv2DInfo} from '../../ops/conv_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class Pool2DProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n convInfo: Conv2DInfo, poolType: 'max'|'avg', computePositions: boolean) {\n if (poolType === 'avg' && computePositions) {\n throw new Error('Cannot compute positions for average pool.');\n }\n\n const filterWidth = convInfo.filterWidth;\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n this.outputShape = convInfo.outShape;\n\n const isAvgPool = poolType === 'avg';\n\n let initializationValue = '0.0';\n if (!isAvgPool) {\n initializationValue = '-1.0 / 0.0';\n }\n\n if (computePositions) {\n const compareOp = '>=';\n\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n float minMaxValue = 0.0;\n float minMaxValueFound = 0.0;\n int minMaxPosition = 0;\n float avgValue = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${effectiveFilterWidth};\n wC += ${dilationWidth}) {\n int xC = xCCorner + wC;\n\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n continue;\n }\n\n float value = getX(batch, xR, xC, d);\n\n // If a min / max value has already been found, use it. If not,\n // use the current value.\n float currMinMaxValue = mix(\n value, minMaxValue, minMaxValueFound);\n if (value ${compareOp} currMinMaxValue) {\n minMaxValue = value;\n minMaxValueFound = 1.0;\n minMaxPosition = wR * ${effectiveFilterWidth} + wC;\n }\n }\n }\n setOutput(float(minMaxPosition));\n }\n `;\n return;\n }\n\n const compareOp = 'max';\n\n let returnValue = `${poolType}(${poolType}(${poolType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n if (poolType === 'avg') {\n returnValue = `avgValue / count`;\n }\n\n const filterWidthNearestVec4 = Math.floor(filterWidth / 4) * 4;\n const filterWidthVec4Remainder = filterWidth % 4;\n\n const updateSnippet = `\n if (${isAvgPool}) {\n avgValue += dot(values, ones);\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n\n this.userCode = `\n const ivec2 strides = ivec2(${strideHeight}, ${strideWidth});\n const ivec2 pads = ivec2(${padTop}, ${padLeft});\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float count = 0.0;\n\n float getValue(int batch, int xR, int xC, int d) {\n if (xC < 0 || xC >= ${convInfo.inWidth}) {\n return initializationValue;\n }\n count += 1.0;\n return getX(batch, xR, xC, d);\n }\n\n void main() {\n ivec4 coords = getOutputCoords();\n int batch = coords[0];\n int d = coords[3];\n\n ivec2 xRCCorner = coords.yz * strides - pads;\n int xRCorner = xRCCorner.x;\n int xCCorner = xRCCorner.y;\n\n // max/min x(?, ?, d) to get y(yR, yC, d).\n // ? = to be determined\n vec4 minMaxValue = vec4(${initializationValue});\n float avgValue = 0.0;\n count = 0.0;\n\n for (int wR = 0; wR < ${effectiveFilterHeight};\n wR += ${dilationHeight}) {\n int xR = xRCorner + wR;\n\n if (xR < 0 || xR >= ${convInfo.inHeight}) {\n continue;\n }\n\n for (int wC = 0; wC < ${filterWidthNearestVec4}; wC += 4) {\n int xC = xCCorner + wC * ${dilationWidth};\n\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n getValue(batch, xR, xC + 3 * ${dilationWidth}, d)\n );\n\n ${updateSnippet}\n }\n\n int xC = xCCorner + ${filterWidthNearestVec4};\n if (${filterWidthVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${filterWidthVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, xR, xC, d),\n getValue(batch, xR, xC + ${dilationWidth}, d),\n getValue(batch, xR, xC + 2 * ${dilationWidth}, d),\n initializationValue\n );\n\n ${updateSnippet}\n }\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ReduceInfo} from '../../ops/reduce_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ReduceProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n reduceInfo: ReduceInfo,\n reduceType: 'all'|'any'|'max'|'min'|'sum'|'prod') {\n const windowSize = reduceInfo.windowSize;\n const batchSize = reduceInfo.batchSize;\n const inSize = reduceInfo.inSize;\n const outSize = Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n\n let initializationValue = '0.0';\n let compareOp = ``;\n\n if (reduceType === 'prod') {\n initializationValue = '1.0';\n } else if (reduceType === 'min') {\n initializationValue = '1.0 / 0.0';\n compareOp = `min`;\n } else if (reduceType === 'max') {\n initializationValue = '-1.0 / 0.0';\n compareOp = `max`;\n }\n\n let returnValue = `${reduceType}(${reduceType}(${reduceType}(` +\n 'minMaxValue[0], minMaxValue[1]), minMaxValue[2]), minMaxValue[3])';\n\n if (reduceType === 'sum') {\n returnValue = `sumValue`;\n } else if (reduceType === 'prod') {\n returnValue = `prodValue`;\n } else if (reduceType === 'all') {\n returnValue = `allValue`;\n } else if (reduceType === 'any') {\n returnValue = `anyValue`;\n }\n\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n\n let updateSnippet = `\n if (${reduceType === 'sum'}) {\n sumValue += dot(values, ones);\n } else if (${reduceType === 'prod'}) {\n vec2 tmp = vec2(values[0], values[1]) * vec2(values[2], values[3]);\n prodValue *= tmp[0] * tmp[1];\n } else {\n minMaxValue = ${compareOp}(values, minMaxValue);\n }\n `;\n\n let vecType = `vec4`;\n\n if (reduceType === 'all') {\n initializationValue = '1.0';\n updateSnippet = `\n bool reducedAllValue = all(values);\n float floatedReducedAllValue = float(reducedAllValue);\n allValue = float(allValue >= 1.0 && floatedReducedAllValue >= 1.0);\n `;\n vecType = `bvec4`;\n } else if (reduceType === 'any') {\n initializationValue = '0.0';\n updateSnippet = `\n bool reducedAnyValue = any(values);\n float floatedReducedAnyValue = float(reducedAnyValue);\n anyValue = float(anyValue >= 1.0 || floatedReducedAnyValue >= 1.0);\n `;\n vecType = `bvec4`;\n }\n\n let checkOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n this.userCode = `\n const float initializationValue = ${initializationValue};\n const vec4 ones = vec4(1.0, 1.0, 1.0, 1.0);\n\n float getValue(int batch, int inIdx) {\n ${checkOutOfBounds}\n return getX(batch, inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = outIdx * ${windowSize};\n\n vec4 minMaxValue = vec4(${initializationValue});\n float prodValue = 1.0;\n float sumValue = 0.0;\n float allValue = 1.0;\n float anyValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n ${vecType} values = ${vecType}(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as util from '../../util';\nimport {GPGPUProgram} from './gpgpu_math';\nimport * as shader_util from './shader_compiler_util';\n\nexport class ReshapePackedProgram implements GPGPUProgram {\n variableNames = ['A'];\n usesPackedTextures = true;\n outputShape: number[];\n userCode: string;\n\n constructor(outputShape: [number, number, number], inputShape: [\n number, number, number\n ]) {\n this.outputShape = outputShape;\n\n let mainLoop = ``;\n for (let i = 0; i < 4; i++) {\n let thisRC = `thisRC = rc;`;\n if (i % 2 === 1) {\n thisRC += `thisRC.z += 1;`;\n }\n if (i > 1) {\n thisRC += `thisRC.y += 1;`;\n }\n\n mainLoop += `\n ${thisRC}\n ${i > 0 ? `if(thisRC.y < rows && thisRC.z < cols){` : ''}\n int flatIndex = getFlatIndex(thisRC);\n\n ivec3 inputRC = inputCoordsFromReshapedOutCoords(flatIndex);\n vec2 inputRCInnerDims = vec2(float(inputRC.y),float(inputRC.z));\n\n result[${i}] =\n getChannel(getA(inputRC.x, inputRC.y, inputRC.z), inputRCInnerDims);\n ${i > 0 ? '}' : ''}\n `;\n }\n\n this.userCode = `\n ${getReshapedInputCoords(inputShape)}\n ${getFlatIndex(outputShape)}\n\n void main() {\n ivec3 rc = getOutputCoords();\n\n vec4 result = vec4(0.);\n\n ivec3 thisRC;\n int rows = ${outputShape[1]};\n int cols = ${outputShape[2]};\n\n ${mainLoop}\n\n setOutput(result);\n }\n `;\n }\n}\n\nfunction getFlatIndex(shape: [number, number, number]): string {\n const dotCoordsWithStrides = shader_util.dotify(\n ['coords.x', 'coords.y', 'coords.z'],\n util.computeStrides(shape).map(d => d.toString()).concat(['1.']));\n\n return `\n int getFlatIndex(ivec3 coords) {\n return round(${dotCoordsWithStrides});\n }\n `;\n}\n\nfunction getReshapedInputCoords(shape: [number, number, number]): string {\n const coordsFromIndexSnippet =\n shader_util.getLogicalCoordinatesFromFlatIndex(['r', 'c', 'd'], shape);\n\n return `\n ivec3 inputCoordsFromReshapedOutCoords(int index) {\n ${coordsFromIndexSnippet}\n return ivec3(r, c, d);\n }\n `;\n}","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor4D} from '../../tensor';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ResizeBilinearBackpropProgram implements GPGPUProgram {\n variableNames = ['dy'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(dy: Tensor4D, x: Tensor4D, alignCorners: boolean) {\n this.outputShape = x.shape;\n const [, xHeight, xWidth, ] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n\n const effectiveXSize: [number, number] = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n\n const effectiveYSize: [number, number] = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(startRLerp - float(winHeight / 2));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(startCLerp - float(winWidth / 2));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float dxR = float(dyR) * heightScale;\n int topDxRIndex = int(floor(dxR));\n int bottomDxRIndex = int(min(ceil(dxR), ${xHeight - 1}.0));\n float dxRLerp = dxR - float(topDxRIndex);\n float inverseDxRLerp = 1.0 - dxRLerp;\n\n float dxC = float(dyC) * widthScale;\n int leftDxCIndex = int(floor(dxC));\n int rightDxCIndex = int(min(ceil(dxC), ${xWidth - 1}.0));\n float dxCLerp = dxC - float(leftDxCIndex);\n float inverseDxCLerp = 1.0 - dxCLerp;\n\n if (r == topDxRIndex && c == leftDxCIndex) {\n // topLeft\n accumulator +=\n getDy(b, dyR, dyC, d) * inverseDxRLerp * inverseDxCLerp;\n }\n\n if (r == topDxRIndex && c == rightDxCIndex) {\n // topRight\n accumulator += getDy(b, dyR, dyC, d) * inverseDxRLerp * dxCLerp;\n }\n\n if (r == bottomDxRIndex && c == leftDxCIndex) {\n // bottomLeft\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * inverseDxCLerp;\n }\n\n if (r == bottomDxRIndex && c == rightDxCIndex) {\n // bottomRight\n accumulator += getDy(b, dyR, dyC, d) * dxRLerp * dxCLerp;\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ResizeBilinearProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(\n inputShape: [number, number, number, number], newHeight: number,\n newWidth: number, alignCorners: boolean) {\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n\n const effectiveInSize: [number, number] = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n\n const effectiveOutSize: [number, number] = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the four integer indices.\n ivec2 sourceFloorRC = ivec2(sourceFracIndexRC);\n ivec2 sourceCeilRC = ivec2(\n min(inputShapeRC - 1.0, ceil(sourceFracIndexRC)));\n\n float topLeft = getA(b, sourceFloorRC.x, sourceFloorRC.y, d);\n float bottomLeft = getA(b, sourceCeilRC.x, sourceFloorRC.y, d);\n float topRight = getA(b, sourceFloorRC.x, sourceCeilRC.y, d);\n float bottomRight = getA(b, sourceCeilRC.x, sourceCeilRC.y, d);\n\n vec2 fracRC = sourceFracIndexRC - vec2(sourceFloorRC);\n\n float top = topLeft + (topRight - topLeft) * fracRC.y;\n float bottom = bottomLeft + (bottomRight - bottomLeft) * fracRC.y;\n float newValue = top + (bottom - top) * fracRC.x;\n\n setOutput(newValue);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor4D} from '../../tensor';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ResizeNearestNeigborBackpropProgram implements GPGPUProgram {\n variableNames = ['dy'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(dy: Tensor4D, x: Tensor4D, alignCorners: boolean) {\n this.outputShape = x.shape;\n const [, xHeight, xWidth, ] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n\n // In the backwards pass, we want to find the pixels that were generated for\n // each pixel in the input image the forward pass and add the corresponding\n // coefficient from dy to the gradient (with some interpolation).\n\n const effectiveXSize: [number, number] = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n\n const effectiveYSize: [number, number] = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n\n this.userCode = `\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n int r = coords[1];\n int c = coords[2];\n\n float accumulator = 0.0;\n\n const float heightScale = float(${heightScale});\n const float widthScale = float(${widthScale});\n\n const float invHeightScale = float(${invHeightScale});\n const float invWidthScale = float(${invWidthScale});\n\n const int winHeight = int(${winHeight});\n const int winWidth = int(${winWidth});\n\n // Compute bounds for where in dy we will look\n float startRLerp = floor(float(r) * invHeightScale);\n int startDyR = int(floor(startRLerp - float(winHeight / 2)));\n\n float startCLerp = floor(float(c) * invWidthScale);\n int startDyC = int(floor(startCLerp - float(winWidth / 2)));\n\n // Loop over dy\n for (int dyROffset = 0; dyROffset < winHeight; dyROffset++) {\n int dyR = dyROffset + startDyR;\n\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= ${yHeight}) {\n continue;\n }\n\n for (int dyCOffset = 0; dyCOffset < winWidth; dyCOffset++) {\n int dyC = dyCOffset + startDyC;\n\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= ${yWidth}) {\n continue;\n }\n\n float sourceFracRow =\n float(${effectiveXSize[0]}) *\n (float(dyR) / float(${effectiveYSize[0]}));\n\n float sourceFracCol =\n float(${effectiveXSize[1]}) *\n (float(dyC) / float(${effectiveYSize[1]}));\n\n int sourceNearestRow = int(min(\n float(int(${xHeight}) - 1),\n ${alignCorners} ? float(round(sourceFracRow)) :\n float(floor(sourceFracRow))));\n\n int sourceNearestCol = int(min(\n float(int(${xWidth}) - 1),\n ${alignCorners} ? float(round(sourceFracCol)) :\n float(floor(sourceFracCol))));\n\n if (r == sourceNearestRow && c == sourceNearestCol) {\n accumulator += getDy(b, dyR, dyC, d);\n }\n }\n }\n // End loop over dy\n\n setOutput(accumulator);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class ResizeNearestNeighborProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[] = [];\n userCode: string;\n\n constructor(\n inputShape: [number, number, number, number], newHeight: number,\n newWidth: number, alignCorners: boolean) {\n const [batch, oldHeight, oldWidth, depth] = inputShape;\n this.outputShape = [batch, newHeight, newWidth, depth];\n\n const effectiveInSize: [number, number] = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n\n const effectiveOutSize: [number, number] = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n\n // When align corners is false, we rounds the value with floor.\n const roundBase = alignCorners ? '0.5' : '0.0';\n\n this.userCode = `\n const vec2 effectiveInputOverOutputRatioRC = vec2(\n ${effectiveInSize[0] / effectiveOutSize[0]},\n ${effectiveInSize[1] / effectiveOutSize[1]});\n const vec2 inputShapeRC = vec2(${oldHeight}.0, ${oldWidth}.0);\n\n void main() {\n ivec4 coords = getOutputCoords();\n int b = coords[0];\n int d = coords[3];\n ivec2 yRC = coords.yz;\n\n // Fractional source index.\n vec2 sourceFracIndexRC = vec2(yRC) * effectiveInputOverOutputRatioRC;\n\n // Compute the coordinators of nearest neighbor point.\n ivec2 sourceNearestRC = ivec2(\n min(inputShapeRC - 1.0, floor(sourceFracIndexRC + ${roundBase})));\n\n float newValue = getA(b, sourceNearestRC.x, sourceNearestRC.y, d);\n\n setOutput(newValue);\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class ReverseProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(xShape: number[], axis: number[]) {\n const rank = xShape.length;\n if (rank > 4) {\n throw new Error(\n `WebGL backend: Reverse of rank-${rank} tensor is not yet supported`);\n }\n this.outputShape = xShape;\n\n if (rank === 1) {\n this.userCode = `\n void main() {\n int coord = getOutputCoords();\n setOutput(getX(${xShape[0]} - coord - 1));\n }\n `;\n return;\n }\n const getInCoord = (i: number) => {\n if (axis.indexOf(i) !== -1 && xShape[i] !== 1) {\n return `${xShape[i]} - coords[${i}] - 1`;\n }\n return `coords[${i}]`;\n };\n const inCoords = xShape.map((_, i) => getInCoord(i)).join(',');\n const type = getCoordsDataType(rank);\n\n this.userCode = `\n void main() {\n ${type} coords = getOutputCoords();\n setOutput(getX(${inCoords}));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class ScatterProgram implements GPGPUProgram {\n variableNames = ['updates', 'indices', 'defaultValue'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n updateSize: number, sliceDim: number, indicesRank: number,\n updatesRank: number, strides: number[], shape: number[],\n summingDupeIndex = true) {\n this.outputShape = shape;\n const stridesType = getCoordsDataType(strides.length);\n const dtype = getCoordsDataType(shape.length);\n let indicesString = '';\n if (indicesRank === 1) {\n indicesString = 'i';\n } else if (indicesRank === 2) {\n indicesString = 'i, j';\n }\n const indicesSnippet = `getIndices(${indicesString})`;\n\n let updatesString = '';\n if (updatesRank === 1) {\n updatesString = 'i';\n } else if (updatesRank === 2) {\n updatesString = 'i, coords[1]';\n }\n const updatesSnippet = `getUpdates(${updatesString})`;\n\n const strideString = sliceDim > 1 ? 'strides[j]' : 'strides';\n this.userCode = `\n ${stridesType} strides = ${stridesType}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n float sum = 0.0;\n bool found = false;\n for (int i = 0; i < ${updateSize}; i++) {\n int flattenedIndex = 0;\n for (int j = 0; j < ${sliceDim}; j++) {\n int index = round(${indicesSnippet});\n flattenedIndex += index * ${strideString};\n }\n if (flattenedIndex == coords[0]) {\n sum += ${updatesSnippet};\n found = true;\n }\n }\n setOutput(mix(getDefaultValue(), sum, float(found)));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {SegOpInfo} from '../../ops/segment_util';\nimport {GPGPUProgram} from './gpgpu_math';\n\nexport class SegmentOpProgram implements GPGPUProgram {\n variableNames = ['x', 'segmentIds'];\n outputShape: number[];\n userCode: string;\n\n constructor(segOpInfo: SegOpInfo, segOpType: 'unsortedSegmentSum') {\n const windowSize = segOpInfo.windowSize;\n const batchSize = segOpInfo.batchSize;\n const inSize = segOpInfo.inSize;\n const numSegments = segOpInfo.numSegments;\n const outSize = numSegments * Math.ceil(inSize / windowSize);\n this.outputShape = [batchSize, outSize];\n\n const initializationValue = '0.0';\n const returnValue = `sumValue`;\n\n const windowSizeNearestVec4 = Math.floor(windowSize / 4) * 4;\n const windowSizeVec4Remainder = windowSize % 4;\n\n const updateSnippet = `\n sumValue += dot(values, filter);\n `;\n\n let checkValueOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkValueOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return initializationValue;\n }\n `;\n }\n\n let checkSegmentIdOutOfBounds = '';\n if (inSize % windowSize > 0) {\n checkSegmentIdOutOfBounds = `\n if (inIdx < 0 || inIdx >= ${inSize}) {\n return -1.0;\n }\n `;\n }\n\n this.userCode = `\n const float initializationValue = ${initializationValue};\n\n float getValue(int batch, int inIdx) {\n ${checkValueOutOfBounds}\n return getX(batch, inIdx);\n }\n\n float getSegmentIdAtIndex(int inIdx) {\n ${checkSegmentIdOutOfBounds}\n return getSegmentIds(inIdx);\n }\n\n void main() {\n ivec2 coords = getOutputCoords();\n int batch = coords[0];\n int outIdx = coords[1];\n int inOffset = int(floor(float(outIdx) / float(\n ${numSegments})) * float(${windowSize}));\n int currentSeg = int(mod(float(outIdx), float(${numSegments})));\n\n float sumValue = 0.0;\n\n for (int i = 0; i < ${windowSizeNearestVec4}; i += 4) {\n int inIdx = inOffset + i;\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n getValue(batch, inIdx + 3)\n );\n\n vec4 filter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 3)) == currentSeg ? 1 : 0\n );\n\n ${updateSnippet}\n }\n\n int inIdx = inOffset + ${windowSizeNearestVec4};\n if (${windowSizeVec4Remainder === 1}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n initializationValue,\n initializationValue,\n initializationValue\n );\n\n int inIdxSeg = int(getSegmentIdAtIndex(inIdx));\n\n vec4 filter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 2}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n initializationValue,\n initializationValue\n );\n\n vec4 filter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n 0,\n 0\n );\n\n ${updateSnippet}\n } else if (${windowSizeVec4Remainder === 3}) {\n vec4 values = vec4(\n getValue(batch, inIdx),\n getValue(batch, inIdx + 1),\n getValue(batch, inIdx + 2),\n initializationValue\n );\n\n vec4 filter = vec4(\n int(getSegmentIdAtIndex(inIdx)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 1)) == currentSeg ? 1 : 0,\n int(getSegmentIdAtIndex(inIdx + 2)) == currentSeg ? 1 : 0,\n 0\n );\n\n ${updateSnippet}\n }\n setOutput(${returnValue});\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class SelectProgram implements GPGPUProgram {\n variableNames = ['c', 'a', 'b'];\n outputShape: number[];\n userCode: string;\n\n constructor(cRank: number, shape: number[], rank: number) {\n this.outputShape = shape;\n\n let cCoords;\n let abCoords;\n if (rank > 4) {\n throw Error(`Where for rank ${rank} is not yet supported`);\n }\n\n if (rank === 1) {\n abCoords = `resRC`;\n cCoords = `resRC`;\n } else {\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w'];\n const cCoordVars = [];\n const abCoordVars = [];\n for (let i = 0; i < shape.length; i++) {\n abCoordVars.push(`${currentCoords[i]}`);\n if (i < cRank) {\n cCoordVars.push(`${currentCoords[i]}`);\n }\n }\n cCoords = cCoordVars.join();\n abCoords = abCoordVars.join();\n }\n\n const dtype = getCoordsDataType(rank);\n\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n float cVal = getC(${cCoords});\n if (cVal >= 1.0) {\n setOutput(getA(${abCoords}));\n } else {\n setOutput(getB(${abCoords}));\n }\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUContext} from './gpgpu_context';\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class SliceProgram implements GPGPUProgram {\n variableNames = ['source'];\n outputShape: number[];\n userCode: string;\n rank: number;\n\n // Caching uniform location for speed.\n startLoc: WebGLUniformLocation;\n\n constructor(destSize: number[]) {\n this.outputShape = destSize;\n this.rank = destSize.length;\n\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getCoords(this.rank);\n\n this.userCode = `\n uniform ${dtype} start;\n\n void main() {\n ${dtype} sourceLoc = start + getOutputCoords();\n setOutput(getSource(${sourceCoords}));\n }\n `;\n }\n\n getCustomSetupFunc(start: number[]) {\n if (start.length !== this.rank) {\n throw Error(\n `The rank (${this.rank}) of the program must match the ` +\n `length of start (${start.length})`);\n }\n return (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'start');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n if (this.rank === 1) {\n gpgpu.gl.uniform1i(this.startLoc, start[0]);\n } else if (this.rank === 2) {\n gpgpu.gl.uniform2i(this.startLoc, start[0], start[1]);\n } else if (this.rank === 3) {\n gpgpu.gl.uniform3i(this.startLoc, start[0], start[1], start[2]);\n } else if (this.rank === 4) {\n gpgpu.gl.uniform4i(\n this.startLoc, start[0], start[1], start[2], start[3]);\n } else {\n throw Error(`Slicing for rank ${this.rank} is not yet supported`);\n }\n };\n }\n}\n\nfunction getCoords(rank: number): string {\n if (rank === 1) {\n return 'sourceLoc';\n } else if (rank === 2) {\n return 'sourceLoc.x, sourceLoc.y';\n } else if (rank === 3) {\n return 'sourceLoc.x, sourceLoc.y, sourceLoc.z';\n } else if (rank === 4) {\n return 'sourceLoc.x, sourceLoc.y, sourceLoc.z, sourceLoc.w';\n } else {\n throw Error(`Slicing for rank ${rank} is not yet supported`);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class StridedSliceProgram implements GPGPUProgram {\n variableNames = ['x'];\n outputShape: number[];\n userCode: string;\n\n constructor(\n begin: number[], strides: number[], size: number[],\n shrinkAxis: number[]) {\n const shape = size.filter((v, index) => shrinkAxis.indexOf(index) === -1);\n this.outputShape = shape;\n const rank = size.length;\n const inputDtype = getCoordsDataType(size.length);\n const dtype = getCoordsDataType(shape.length);\n\n let newCoords = '';\n if (rank === 1) {\n newCoords = 'coords * strides + begin';\n } else {\n let outputAxis = 0;\n newCoords =\n size.map((_, i) => {\n if (shrinkAxis.indexOf(i) === -1) {\n outputAxis++;\n return shape.length === 1 ?\n `coords * strides[${i}] + begin[${i}]` :\n `coords[${outputAxis - 1}] * strides[${i}] + begin[${i}]`;\n } else {\n return `begin[${i}]`;\n }\n })\n .join(',');\n }\n\n this.userCode = `\n ${inputDtype} begin = ${inputDtype}(${begin});\n ${inputDtype} strides = ${inputDtype}(${strides});\n\n void main() {\n ${dtype} coords = getOutputCoords();\n setOutput(getX(${newCoords}));\n }\n `;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../../environment';\n\nimport {GPGPUContext} from './gpgpu_context';\nimport {PhysicalTextureType, TextureUsage} from './tex_util';\n\nexport class TextureManager {\n private numUsedTextures = 0;\n private numFreeTextures = 0;\n private freeTextures: {[shape: string]: WebGLTexture[]} = {};\n private logEnabled = false;\n private usedTextures: {[shape: string]: WebGLTexture[]} = {};\n\n constructor(private gpgpu: GPGPUContext) {}\n\n acquireTexture(\n shapeRC: [number, number], usage: TextureUsage,\n isPacked: boolean): WebGLTexture {\n const physicalTexType = getPhysicalFromLogicalTextureType(usage, isPacked);\n\n const shapeKey = getKeyFromTextureShape(shapeRC, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n if (!(shapeKey in this.usedTextures)) {\n this.usedTextures[shapeKey] = [];\n }\n\n if (this.freeTextures[shapeKey].length > 0) {\n this.numFreeTextures--;\n this.numUsedTextures++;\n this.log();\n const newTexture = this.freeTextures[shapeKey].shift();\n this.usedTextures[shapeKey].push(newTexture);\n return newTexture;\n }\n this.numUsedTextures++;\n this.log();\n\n let newTexture: WebGLTexture;\n if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT32) {\n newTexture = this.gpgpu.createPackedMatrixTexture(shapeRC[0], shapeRC[1]);\n } else if (physicalTexType === PhysicalTextureType.PACKED_2X2_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16PackedMatrixTexture(shapeRC[0], shapeRC[1]);\n } else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT32) {\n newTexture =\n this.gpgpu.createFloat32MatrixTexture(shapeRC[0], shapeRC[1]);\n } else if (physicalTexType === PhysicalTextureType.UNPACKED_FLOAT16) {\n newTexture =\n this.gpgpu.createFloat16MatrixTexture(shapeRC[0], shapeRC[1]);\n\n } else if (\n physicalTexType === PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE) {\n newTexture =\n this.gpgpu.createUnsignedBytesMatrixTexture(shapeRC[0], shapeRC[1]);\n }\n this.usedTextures[shapeKey].push(newTexture);\n\n return newTexture;\n }\n\n releaseTexture(\n texture: WebGLTexture, shape: [number, number],\n logicalTexType: TextureUsage, isPacked: boolean): void {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n const physicalTexType =\n getPhysicalFromLogicalTextureType(logicalTexType, isPacked);\n const shapeKey = getKeyFromTextureShape(shape, physicalTexType, isPacked);\n if (!(shapeKey in this.freeTextures)) {\n this.freeTextures[shapeKey] = [];\n }\n this.freeTextures[shapeKey].push(texture);\n this.numFreeTextures++;\n this.numUsedTextures--;\n const texList = this.usedTextures[shapeKey];\n const texIndex = texList.indexOf(texture);\n if (texIndex < 0) {\n throw new Error(\n 'Cannot release a texture that was never provided by this ' +\n 'texture manager');\n }\n texList.splice(texIndex, 1);\n this.log();\n }\n\n private log() {\n if (!this.logEnabled) {\n return;\n }\n const total = this.numFreeTextures + this.numUsedTextures;\n console.log(\n 'Free/Used', `${this.numFreeTextures} / ${this.numUsedTextures}`,\n `(${total})`);\n }\n\n getNumUsedTextures(): number {\n return this.numUsedTextures;\n }\n\n getNumFreeTextures(): number {\n return this.numFreeTextures;\n }\n\n dispose() {\n if (this.freeTextures == null) {\n // Already disposed.\n return;\n }\n for (const texShape in this.freeTextures) {\n this.freeTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n for (const texShape in this.usedTextures) {\n this.usedTextures[texShape].forEach(tex => {\n this.gpgpu.deleteMatrixTexture(tex);\n });\n }\n this.freeTextures = null;\n this.usedTextures = null;\n this.numUsedTextures = 0;\n this.numFreeTextures = 0;\n }\n}\n\nfunction getPhysicalFromLogicalTextureType(\n logicalTexType: TextureUsage, isPacked: boolean): PhysicalTextureType {\n if (isPacked) {\n return ENV.get('WEBGL_RENDER_FLOAT32_ENABLED') ?\n PhysicalTextureType.PACKED_2X2_FLOAT32 :\n PhysicalTextureType.PACKED_2X2_FLOAT16;\n } else if (\n logicalTexType === TextureUsage.DOWNLOAD ||\n logicalTexType === TextureUsage.PIXELS) {\n return PhysicalTextureType.PACKED_4X1_UNSIGNED_BYTE;\n } else if (logicalTexType === TextureUsage.UPLOAD) {\n return PhysicalTextureType.UNPACKED_FLOAT32;\n } else if (logicalTexType === TextureUsage.RENDER) {\n return ENV.get('WEBGL_RENDER_FLOAT32_ENABLED') ?\n PhysicalTextureType.UNPACKED_FLOAT32 :\n PhysicalTextureType.UNPACKED_FLOAT16;\n }\n throw new Error(`Unknown logical texture type ${logicalTexType}`);\n}\n\nfunction getKeyFromTextureShape(\n shapeRowsCol: [number, number], physicalTexType: PhysicalTextureType,\n isPacked: boolean): string {\n return `${shapeRowsCol[0]}_${shapeRowsCol[1]}_${physicalTexType}_${isPacked}`;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class TileProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[];\n userCode: string;\n rank: number;\n\n constructor(aShape: number[], reps: number[]) {\n const outputShape: number[] = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[i] * reps[i];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const sourceCoords = getSourceCoords(aShape);\n\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${sourceCoords}));\n }\n `;\n }\n}\n\nfunction getSourceCoords(aShape: number[]): string {\n const rank = aShape.length;\n if (rank > 5) {\n throw Error(`Tile for rank ${rank} is not yet supported`);\n }\n if (rank === 1) {\n return `imod(resRC, ${aShape[0]})`;\n }\n\n const currentCoords = ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u'];\n\n const sourceCoords = [];\n for (let i = 0; i < aShape.length; i++) {\n sourceCoords.push(`imod(${currentCoords[i]}, ${aShape[i]})`);\n }\n return sourceCoords.join();\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class TransposeProgram implements GPGPUProgram {\n variableNames = ['A'];\n outputShape: number[];\n userCode: string;\n rank: number;\n\n constructor(aShape: number[], newDim: number[]) {\n const outputShape: number[] = new Array(aShape.length);\n for (let i = 0; i < outputShape.length; i++) {\n outputShape[i] = aShape[newDim[i]];\n }\n this.outputShape = outputShape;\n this.rank = outputShape.length;\n const dtype = getCoordsDataType(this.rank);\n const switched = getSwitchedCoords(newDim);\n\n this.userCode = `\n void main() {\n ${dtype} resRC = getOutputCoords();\n setOutput(getA(${switched}));\n }\n `;\n }\n}\n\nfunction getSwitchedCoords(newDim: number[]): string {\n const rank = newDim.length;\n if (rank > 6) {\n throw Error(`Transpose for rank ${rank} is not yet supported`);\n }\n const originalOrder =\n ['resRC.x', 'resRC.y', 'resRC.z', 'resRC.w', 'resRC.u', 'resRC.v'];\n const switchedCoords = new Array(rank);\n for (let i = 0; i < newDim.length; i++) {\n switchedCoords[newDim[i]] = originalOrder[i];\n }\n return switchedCoords.join();\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const ERF_P = 0.3275911;\nexport const ERF_A1 = 0.254829592;\nexport const ERF_A2 = -0.284496736;\nexport const ERF_A3 = 1.421413741;\nexport const ERF_A4 = -1.453152027;\nexport const ERF_A5 = 1.061405429;\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const SELU_SCALEALPHA = 1.7580993408473768599402175208123;\nexport const SELU_SCALE = 1.0507009873554804934193349852946;\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as erf_util from '../../ops/erf_util';\nimport * as selu_util from '../../ops/selu_util';\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {GPGPUContext} from './gpgpu_context';\n\nexport class UnaryOpProgram implements GPGPUProgram {\n variableNames = ['A'];\n userCode: string;\n outputShape: number[];\n\n // Caching uniform location for speed.\n startLoc: WebGLUniformLocation;\n\n constructor(aShape: number[], opSnippet: string) {\n this.outputShape = aShape;\n this.userCode = `\n uniform float NAN;\n float unaryOperation(float x) {\n ${opSnippet}\n }\n\n void main() {\n float x = getAAtOutCoords();\n float y = unaryOperation(x);\n\n setOutput(y);\n }\n `;\n }\n\n getCustomSetupFunc() {\n return (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) => {\n if (this.startLoc == null) {\n this.startLoc = gpgpu.getUniformLocationNoThrow(webGLProgram, 'NAN');\n if (this.startLoc == null) {\n // This means the compiler has optimized and realized it doesn't need\n // the uniform.\n return;\n }\n }\n gpgpu.gl.uniform1f(this.startLoc, NaN);\n };\n }\n}\n\nconst CHECK_NAN_SNIPPET = `if (isNaN(x)) return x;`;\n\nexport const ABS = `return abs(x);`;\n\nexport const RELU = CHECK_NAN_SNIPPET + `\n return (x < 0.0) ? 0.0 : x;\n`;\n\nexport const ELU = `return (x >= 0.0) ? x : (exp(x) - 1.0);`;\n\nexport const SELU = `\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n float scaleAlpha = ${selu_util.SELU_SCALEALPHA};\n float scale = ${selu_util.SELU_SCALE};\n return (x >= 0.0) ? scale * x : scaleAlpha * (exp(x) - 1.0);\n`;\n\nexport function STEP(alpha = 0.0) {\n return CHECK_NAN_SNIPPET + `\n return x > 0.0 ? 1.0 : float(${alpha});\n `;\n}\n\nexport const NEG = `return -x;`;\n\nexport const CEIL = `return ceil(x);`;\n\nexport const FLOOR = `return floor(x);`;\n\nexport const SIGN = `\n if (isNaN(x)) { return 0.0; }\n return sign(x);\n`;\n\nexport const ROUND = `\n // OpenGL ES does not support round function.\n // The algorithm is based on banker's rounding.\n float base = floor(x);\n if ((x - base) < 0.5) {\n return floor(x);\n } else if ((x - base) > 0.5) {\n return ceil(x);\n } else {\n if (mod(base, 2.0) == 0.0) {\n return base;\n } else {\n return base + 1.0;\n }\n }\n`;\n\nexport const EXP = `return exp(x);`;\n\nexport const EXPM1 = `return exp(x) - 1.0;`;\n\nexport const LOG = `if (x < 0.0) return NAN;\n return log(x);`;\n\nexport const LOG1P = `return log(1.0 + x);`;\n\nexport const SQRT = `return sqrt(x);`;\n\nexport const RSQRT = `return inversesqrt(x);`;\n\nexport const SIGMOID = `return 1.0 / (1.0 + exp(-1.0 * x));`;\n\n/**\n * mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n *\n * epsilon is the difference between 1.0 and the next representable\n * float. For a single precision 32 bit float this should be 2^-23, see:\n * https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n *\n * too_large = (x > -threshold) is value above which exp(x) may overflow\n * but softplus(x) == x is within machine epsilon\n *\n * too_small = (x < threshold) is value below which exp(x) may underflow,\n * but softplus(x) == exp(x) is within machine epsilon.\n */\nexport const SOFTPLUS = `\n float epsilon = 1.1920928955078125e-7;\n float threshold = log(epsilon) + 2.0;\n\n bool too_large = x > -threshold;\n bool too_small = x < threshold;\n\n float result;\n float exp_x = exp(x);\n\n if (too_large){\n result = x;\n }\n else if (too_small){\n result = exp_x;\n }\n else{\n result = log(exp_x + 1.0);\n }\n return result;\n`;\n\nexport const SIN = CHECK_NAN_SNIPPET + `\n return sin(x);\n`;\n\nexport const COS = CHECK_NAN_SNIPPET + `\n return cos(x);\n`;\n\nexport const TAN = `return tan(x);`;\n\nexport const ASIN = `return asin(x);`;\n\nexport const ACOS = `return acos(x);`;\n\nexport const ATAN = CHECK_NAN_SNIPPET + `\n return atan(x);\n`;\n\nexport const SINH = `\n float e2x = exp(x);\n return (e2x - 1.0 / e2x) / 2.0;\n`;\n\nexport const COSH = `\n float e2x = exp(-x);\n return (e2x + 1.0 / e2x) / 2.0;\n`;\n\nexport const TANH = `\n float e2x = exp(-2.0 * abs(x));\n return sign(x) * (1.0 - e2x) / (1.0 + e2x);\n`;\n\nexport const ASINH = `return log(x + sqrt(x * x + 1.0));`;\n\nexport const ACOSH = CHECK_NAN_SNIPPET + `\n if (x < 1.0) return NAN;\n return log(x + sqrt(x * x - 1.0));`;\n\nexport const ATANH = CHECK_NAN_SNIPPET + `\n if ((x < -1.0) || (x > 1.0)) return NAN;\n return (log(1.0 + x) - log(1.0 - x)) / 2.0;`;\n\nexport const ERF = `\n // Error function is calculated approximately with elementary function.\n // See \"Handbook of Mathematical Functions with Formulas,\n // Graphs, and Mathematical Tables\", Abramowitz and Stegun.\n float p = ${erf_util.ERF_P};\n float a1 = ${erf_util.ERF_A1};\n float a2 = ${erf_util.ERF_A2};\n float a3 = ${erf_util.ERF_A3};\n float a4 = ${erf_util.ERF_A4};\n float a5 = ${erf_util.ERF_A5};\n\n float t = 1.0 / (1.0 + p * x);\n return 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*exp(-x*x);\n`;\n\nexport const SQUARE = `return x * x;`;\n\nexport const RECIPROCAL = `return 1.0 / x;`;\n\nexport const LOGICAL_NOT = `return float(!(x >= 1.0));`;\n\nexport const TO_INT = `return float(int(x));`;\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {getChannels, getSourceCoords} from '../packing_util';\n\nimport {GPGPUProgram} from './gpgpu_math';\nimport {getCoordsDataType} from './shader_compiler';\n\nexport class UnpackProgram implements GPGPUProgram {\n variableNames = ['A'];\n usesPackedTextures = true;\n outputShape: number[];\n userCode: string;\n\n constructor(outputShape: number[]) {\n this.outputShape = outputShape;\n const rank = outputShape.length;\n\n const channels = getChannels('rc', rank);\n const dtype = getCoordsDataType(rank);\n const sourceCoords = getSourceCoords(rank, channels);\n const innerDims = channels.slice(-2);\n const coords = rank === 1 ? 'rc' : `vec2(${innerDims.join(',')})`;\n\n this.userCode = `\n void main() {\n ${dtype} rc = getOutputCoords();\n vec4 packedInput = getA(${sourceCoords});\n\n setOutput(getChannel(packedInput, ${coords}));\n }\n `;\n }\n}","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor, convertToTensorArray} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {assert, sizeFromShape} from '../util';\nimport {parseAxisParam} from './axis_util';\nimport {assertParamsConsistent, computeOutShape} from './concat_util';\nimport {op} from './operation';\nimport {tensor} from './tensor_ops';\n\n/**\n * Concatenates a list of`tf.Tensor1D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(3) = |r1, g1, b1|\n * B: shape(2) = |r2, g2|\n * C = tf.concat1d([A, B]) == |r1, g1, b1, r2, g2|\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @return The concatenated array.\n */\nfunction concat1d_(tensors: Tensor1D[]|TensorLike[]): Tensor1D {\n return concat(tensors, 0 /* axis */);\n}\n\n/**\n * Concatenates a list of`tf.Tensor2D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat2d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C = shape(2, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concatenate along.\n * @return The concatenated array.\n */\nfunction concat2d_(tensors: Tensor2D[]|TensorLike[], axis: number): Tensor2D {\n return concat(tensors, axis);\n}\n\n/**\n * Concatenates a list of`tf.Tensor3D`s along an axis. See `concat` for details.\n *\n * For example, if:\n * A: shape(2, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n *\n * B: shape(2, 1, 3) = | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * C = tf.concat3d([A, B], axis)\n *\n * if axis = 0:\n * C: shape(4, 1, 3) = | r1, g1, b1 |\n * | r2, g2, b2 |\n * | r3, g3, b3 |\n * | r4, g4, b4 |\n *\n * if axis = 1:\n * C: shape(2, 2, 3) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * if axis = 2:\n * C = shape(2, 1, 6) = | r1, g1, b1, r3, g3, b3 |\n * | r2, g2, b2, r4, g4, b4 |\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat3d_(tensors: Tensor3D[]|TensorLike[], axis: number): Tensor3D {\n return concat(tensors, axis);\n}\n\n/**\n * Concatenates a list of`tf.Tensor4D`s along an axis. See `concat` for details.\n *\n * @param tensors A list of`tf.Tensor`s to concatenate.\n * @param axis The axis to concate along.\n * @return The concatenated array.\n */\nfunction concat4d_(tensors: Tensor4D[]|TensorLike[], axis: number): Tensor4D {\n return concat(tensors, axis);\n}\n\n/**\n * Concatenates a list of`tf.Tensor`s along a given axis.\n *\n * The tensors ranks and types must match, and their sizes must match in all\n * dimensions except `axis`.\n *\n * Also available are stricter rank-specific methods that assert that\n * `tensors` are of the given rank:\n * - `tf.concat1d`\n * - `tf.concat2d`\n * - `tf.concat3d`\n * - `tf.concat4d`\n *\n * Except `tf.concat1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * a.concat(b).print(); // or a.concat(b)\n * ```\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.concat([a, b, c]).print();\n * ```\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [10, 20]]);\n * const b = tf.tensor2d([[3, 4], [30, 40]]);\n * const axis = 1;\n * tf.concat([a, b], axis).print();\n * ```\n * @param tensors A list of tensors to concatenate.\n * @param axis The axis to concate along. Defaults to 0 (the first dim).\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction concat_<T extends Tensor>(tensors: T[]|TensorLike[], axis = 0): T {\n assert(tensors.length >= 1, 'Pass at least one tensor to concat');\n let $tensors = convertToTensorArray(tensors, 'tensors', 'concat');\n axis = parseAxisParam(axis, $tensors[0].shape)[0];\n const outShape = computeOutShape($tensors.map(t => t.shape), axis);\n if (sizeFromShape(outShape) === 0) {\n return tensor([], outShape) as T;\n }\n // Keep only non-empty tensors (ignore tensors with 0 in their shape).\n $tensors = $tensors.filter(t => t.size > 0);\n if ($tensors.length === 1) {\n return $tensors[0];\n }\n\n const shapes = $tensors.map(t => t.shape);\n assertParamsConsistent(shapes, axis);\n const der = (dy: T) => {\n const sizeSplits = shapes.map(s => s[axis]);\n const derTensors = split(dy, sizeSplits, axis);\n return derTensors.map(t => () => t) as {};\n };\n const inputs = $tensors as {};\n return ENV.engine.runKernel(\n backend => backend.concat($tensors, axis) as T, inputs, der);\n}\n\n/**\n * Splits a`tf.Tensor` into sub tensors.\n *\n * If `numOrSizeSplits` is a number, splits `x` along dimension `axis`\n * into `numOrSizeSplits` smaller tensors.\n * Requires that `numOrSizeSplits` evenly divides `x.shape[axis]`.\n *\n * If `numOrSizeSplits` is a number array, splits `x` into\n * `(numOrSizeSplits.length` pieces. The shape of the `i`-th piece has the\n * same size as `x` except along dimension `axis` where the size is\n * `numOrSizeSplits[i]`.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [2, 4]);\n * const [a, b] = tf.split(x, 2, 1);\n * a.print();\n * b.print();\n *\n * const [c, d, e] = tf.split(x, [1, 2, 1], 1);\n * c.print();\n * d.print();\n * e.print();\n * ```\n *\n * @param x The input tensor to split.\n * @param numOrSizeSplits Either an integer indicating the number of\n * splits along the axis or an array of integers containing the sizes of\n * each output tensor along the axis. If a number then it must evenly divide\n * `x.shape[axis]`; otherwise the sum of sizes must match `x.shape[axis]`.\n * @param axis The dimension along which to split. Defaults to 0 (the first\n * dim).\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction split_<T extends Tensor>(\n x: T|TensorLike, numOrSizeSplits: number[]|number, axis = 0): T[] {\n const $x = convertToTensor(x, 'x', 'split');\n\n axis = parseAxisParam(axis, $x.shape)[0];\n let splitSizes: number[];\n if (typeof (numOrSizeSplits) === 'number') {\n assert(\n $x.shape[axis] % numOrSizeSplits === 0,\n 'Number of splits must evenly divide the axis.');\n splitSizes = Array(numOrSizeSplits).fill($x.shape[axis] / numOrSizeSplits);\n } else {\n assert(\n $x.shape[axis] === numOrSizeSplits.reduce((a, b) => a + b),\n 'The sum of sizes must match the size of the axis dimension.');\n splitSizes = numOrSizeSplits;\n }\n const der = (dy: T[]) => ({$x: () => concat(dy, axis)});\n return ENV.engine.runKernel(\n backend => backend.split($x, splitSizes, axis), {$x}, der);\n}\n\nexport const concat = op({concat_});\nexport const concat1d = op({concat1d_});\nexport const concat2d = op({concat2d_});\nexport const concat3d = op({concat3d_});\nexport const concat4d = op({concat4d_});\nexport const split = op({split_});\n","// A port of an algorithm by Johannes Baagøe <baagoe@baagoe.com>, 2010\n// http://baagoe.com/en/RandomMusings/javascript/\n// https://github.com/nquinlan/better-random-numbers-for-javascript-mirror\n// Original work is under MIT license -\n\n// Copyright (C) 2010 by Johannes Baagøe <baagoe@baagoe.org>\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n// \n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n// \n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n\n\n(function(global, module, define) {\n\nfunction Alea(seed) {\n var me = this, mash = Mash();\n\n me.next = function() {\n var t = 2091639 * me.s0 + me.c * 2.3283064365386963e-10; // 2^-32\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t - (me.c = t | 0);\n };\n\n // Apply the seeding algorithm from Baagoe.\n me.c = 1;\n me.s0 = mash(' ');\n me.s1 = mash(' ');\n me.s2 = mash(' ');\n me.s0 -= mash(seed);\n if (me.s0 < 0) { me.s0 += 1; }\n me.s1 -= mash(seed);\n if (me.s1 < 0) { me.s1 += 1; }\n me.s2 -= mash(seed);\n if (me.s2 < 0) { me.s2 += 1; }\n mash = null;\n}\n\nfunction copy(f, t) {\n t.c = f.c;\n t.s0 = f.s0;\n t.s1 = f.s1;\n t.s2 = f.s2;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new Alea(seed),\n state = opts && opts.state,\n prng = xg.next;\n prng.int32 = function() { return (xg.next() * 0x100000000) | 0; }\n prng.double = function() {\n return prng() + (prng() * 0x200000 | 0) * 1.1102230246251565e-16; // 2^-53\n };\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nfunction Mash() {\n var n = 0xefc8249d;\n\n var mash = function(data) {\n data = data.toString();\n for (var i = 0; i < data.length; i++) {\n n += data.charCodeAt(i);\n var h = 0.02519603282416938 * n;\n n = h >>> 0;\n h -= n;\n h *= n;\n n = h >>> 0;\n h -= n;\n n += h * 0x100000000; // 2^32\n }\n return (n >>> 0) * 2.3283064365386963e-10; // 2^-32\n };\n\n return mash;\n}\n\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.alea = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n","// A Javascript implementaion of the \"xor128\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n\n // Set up generator function.\n me.next = function() {\n var t = me.x ^ (me.x << 11);\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= (me.w >>> 19) ^ t ^ (t >>> 8);\n };\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor128 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n","// A Javascript implementaion of the \"xorwow\" prng algorithm by\n// George Marsaglia. See http://www.jstatsoft.org/v08/i14/paper\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var t = (me.x ^ (me.x >>> 2));\n me.x = me.y; me.y = me.z; me.z = me.w; me.w = me.v;\n return (me.d = (me.d + 362437 | 0)) +\n (me.v = (me.v ^ (me.v << 4)) ^ (t ^ (t << 1))) | 0;\n };\n\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n\n if (seed === (seed | 0)) {\n // Integer seed.\n me.x = seed;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.x = f.x;\n t.y = f.y;\n t.z = f.z;\n t.w = f.w;\n t.v = f.v;\n t.d = f.d;\n return t;\n}\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorwow = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n","// A Javascript implementaion of the \"xorshift7\" algorithm by\n// François Panneton and Pierre L'ecuyer:\n// \"On the Xorgshift Random Number Generators\"\n// http://saluc.engr.uconn.edu/refs/crypto/rng/panneton05onthexorshift.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n // Update xor generator.\n var X = me.x, i = me.i, t, v, w;\n t = X[i]; t ^= (t >>> 7); v = t ^ (t << 24);\n t = X[(i + 1) & 7]; v ^= t ^ (t >>> 10);\n t = X[(i + 3) & 7]; v ^= t ^ (t >>> 3);\n t = X[(i + 4) & 7]; v ^= t ^ (t << 7);\n t = X[(i + 7) & 7]; t = t ^ (t << 13); v ^= t ^ (t << 9);\n X[i] = v;\n me.i = (i + 1) & 7;\n return v;\n };\n\n function init(me, seed) {\n var j, w, X = [];\n\n if (seed === (seed | 0)) {\n // Seed state array using a 32-bit integer.\n w = X[0] = seed;\n } else {\n // Seed state using a string.\n seed = '' + seed;\n for (j = 0; j < seed.length; ++j) {\n X[j & 7] = (X[j & 7] << 15) ^\n (seed.charCodeAt(j) + X[(j + 1) & 7] << 13);\n }\n }\n // Enforce an array length of 8, not all zeroes.\n while (X.length < 8) X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j);\n if (j == 8) w = X[7] = -1; else w = X[j];\n\n me.x = X;\n me.i = 0;\n\n // Discard an initial 256 values.\n for (j = 256; j > 0; --j) {\n me.next();\n }\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.x = f.x.slice();\n t.i = f.i;\n return t;\n}\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xorshift7 = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n","// A Javascript implementaion of Richard Brent's Xorgens xor4096 algorithm.\n//\n// This fast non-cryptographic random number generator is designed for\n// use in Monte-Carlo algorithms. It combines a long-period xorshift\n// generator with a Weyl generator, and it passes all common batteries\n// of stasticial tests for randomness while consuming only a few nanoseconds\n// for each prng generated. For background on the generator, see Brent's\n// paper: \"Some long-period random number generators using shifts and xors.\"\n// http://arxiv.org/pdf/1004.3115v1.pdf\n//\n// Usage:\n//\n// var xor4096 = require('xor4096');\n// random = xor4096(1); // Seed with int32 or string.\n// assert.equal(random(), 0.1520436450538547); // (0, 1) range, 53 bits.\n// assert.equal(random.int32(), 1806534897); // signed int32, 32 bits.\n//\n// For nonzero numeric keys, this impelementation provides a sequence\n// identical to that by Brent's xorgens 3 implementaion in C. This\n// implementation also provides for initalizing the generator with\n// string seeds, or for saving and restoring the state of the generator.\n//\n// On Chrome, this prng benchmarks about 2.1 times slower than\n// Javascript's built-in Math.random().\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this;\n\n // Set up generator function.\n me.next = function() {\n var w = me.w,\n X = me.X, i = me.i, t, v;\n // Update Weyl generator.\n me.w = w = (w + 0x61c88647) | 0;\n // Update xor generator.\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n // Update Xor generator array state.\n v = X[i] = v ^ t;\n me.i = i;\n // Result is the combination.\n return (v + (w ^ (w >>> 16))) | 0;\n };\n\n function init(me, seed) {\n var t, v, i, j, w, X = [], limit = 128;\n if (seed === (seed | 0)) {\n // Numeric seeds initialize v, which is used to generates X.\n v = seed;\n seed = null;\n } else {\n // String seeds are mixed into v and X one character at a time.\n seed = seed + '\\0';\n v = 0;\n limit = Math.max(limit, seed.length);\n }\n // Initialize circular array and weyl value.\n for (i = 0, j = -32; j < limit; ++j) {\n // Put the unicode characters into the array, and shuffle them.\n if (seed) v ^= seed.charCodeAt((j + 32) % seed.length);\n // After 32 shuffles, take v as the starting w value.\n if (j === 0) w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = (w + 0x61c88647) | 0; // Weyl.\n t = (X[j & 127] ^= (v + w)); // Combine xor and weyl to init array.\n i = (0 == t) ? i + 1 : 0; // Count zeroes.\n }\n }\n // We have detected all zeroes; make the key nonzero.\n if (i >= 128) {\n X[(seed && seed.length || 0) & 127] = -1;\n }\n // Run the generator 512 times to further mix the state before using it.\n // Factoring this as a function slows the main generator, so it is just\n // unrolled here. The weyl generator is not advanced while warming up.\n i = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[(i + 34) & 127];\n t = X[i = ((i + 1) & 127)];\n v ^= v << 13;\n t ^= t << 17;\n v ^= v >>> 15;\n t ^= t >>> 12;\n X[i] = v ^ t;\n }\n // Storing state as object members is faster than using closure variables.\n me.w = w;\n me.X = X;\n me.i = i;\n }\n\n init(me, seed);\n}\n\nfunction copy(f, t) {\n t.i = f.i;\n t.w = f.w;\n t.X = f.X.slice();\n return t;\n};\n\nfunction impl(seed, opts) {\n if (seed == null) seed = +(new Date);\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X) copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.xor4096 = impl;\n}\n\n})(\n this, // window object or global\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n","// A Javascript implementaion of the \"Tyche-i\" prng algorithm by\n// Samuel Neves and Filipe Araujo.\n// See https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n\n(function(global, module, define) {\n\nfunction XorGen(seed) {\n var me = this, strseed = '';\n\n // Set up generator function.\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = (b << 25) ^ (b >>> 7) ^ c;\n c = (c - d) | 0;\n d = (d << 24) ^ (d >>> 8) ^ a;\n a = (a - b) | 0;\n me.b = b = (b << 20) ^ (b >>> 12) ^ c;\n me.c = c = (c - d) | 0;\n me.d = (d << 16) ^ (c >>> 16) ^ a;\n return me.a = (a - b) | 0;\n };\n\n /* The following is non-inverted tyche, which has better internal\n * bit diffusion, but which is about 25% slower than tyche-i in JS.\n me.next = function() {\n var a = me.a, b = me.b, c = me.c, d = me.d;\n a = (me.a + me.b | 0) >>> 0;\n d = me.d ^ a; d = d << 16 ^ d >>> 16;\n c = me.c + d | 0;\n b = me.b ^ c; b = b << 12 ^ d >>> 20;\n me.a = a = a + b | 0;\n d = d ^ a; me.d = d = d << 8 ^ d >>> 24;\n me.c = c = c + d | 0;\n b = b ^ c;\n return me.b = (b << 7 ^ b >>> 25);\n }\n */\n\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n\n if (seed === Math.floor(seed)) {\n // Integer seed.\n me.a = (seed / 0x100000000) | 0;\n me.b = seed | 0;\n } else {\n // String seed.\n strseed += seed;\n }\n\n // Mix in string seed, then discard an initial batch of 64 values.\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n}\n\nfunction copy(f, t) {\n t.a = f.a;\n t.b = f.b;\n t.c = f.c;\n t.d = f.d;\n return t;\n};\n\nfunction impl(seed, opts) {\n var xg = new XorGen(seed),\n state = opts && opts.state,\n prng = function() { return (xg.next() >>> 0) / 0x100000000; };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11,\n bot = (xg.next() >>> 0) / 0x100000000,\n result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof(state) == 'object') copy(state, xg);\n prng.state = function() { return copy(xg, {}); }\n }\n return prng;\n}\n\nif (module && module.exports) {\n module.exports = impl;\n} else if (define && define.amd) {\n define(function() { return impl; });\n} else {\n this.tychei = impl;\n}\n\n})(\n this,\n (typeof module) == 'object' && module, // present in node.js\n (typeof define) == 'function' && define // present with an AMD loader\n);\n\n\n","/*\nCopyright 2014 David Bau.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*/\n\n(function (pool, math) {\n//\n// The following constants are related to IEEE 754 limits.\n//\nvar global = this,\n width = 256, // each RC4 output is 0 <= x < 256\n chunks = 6, // at least six RC4 outputs for each double\n digits = 52, // there are 52 significant digits in a double\n rngname = 'random', // rngname: name for Math.random and Math.seedrandom\n startdenom = math.pow(width, chunks),\n significance = math.pow(2, digits),\n overflow = significance * 2,\n mask = width - 1,\n nodecrypto; // node.js crypto module, initialized at the bottom.\n\n//\n// seedrandom()\n// This is the seedrandom function described above.\n//\nfunction seedrandom(seed, options, callback) {\n var key = [];\n options = (options == true) ? { entropy: true } : (options || {});\n\n // Flatten the seed string or build one from local entropy if needed.\n var shortseed = mixkey(flatten(\n options.entropy ? [seed, tostring(pool)] :\n (seed == null) ? autoseed() : seed, 3), key);\n\n // Use the seed to initialize an ARC4 generator.\n var arc4 = new ARC4(key);\n\n // This function returns a random double in [0, 1) that contains\n // randomness in every bit of the mantissa of the IEEE 754 value.\n var prng = function() {\n var n = arc4.g(chunks), // Start with a numerator n < 2 ^ 48\n d = startdenom, // and denominator d = 2 ^ 48.\n x = 0; // and no 'extra last byte'.\n while (n < significance) { // Fill up all significant digits by\n n = (n + x) * width; // shifting numerator and\n d *= width; // denominator and generating a\n x = arc4.g(1); // new least-significant-byte.\n }\n while (n >= overflow) { // To avoid rounding up, before adding\n n /= 2; // last byte, shift everything\n d /= 2; // right using integer math until\n x >>>= 1; // we have exactly the desired bits.\n }\n return (n + x) / d; // Form the number within [0, 1).\n };\n\n prng.int32 = function() { return arc4.g(4) | 0; }\n prng.quick = function() { return arc4.g(4) / 0x100000000; }\n prng.double = prng;\n\n // Mix the randomness into accumulated entropy.\n mixkey(tostring(arc4.S), pool);\n\n // Calling convention: what to return as a function of prng, seed, is_math.\n return (options.pass || callback ||\n function(prng, seed, is_math_call, state) {\n if (state) {\n // Load the arc4 state from the given state if it has an S array.\n if (state.S) { copy(state, arc4); }\n // Only provide the .state method if requested via options.state.\n prng.state = function() { return copy(arc4, {}); }\n }\n\n // If called as a method of Math (Math.seedrandom()), mutate\n // Math.random because that is how seedrandom.js has worked since v1.0.\n if (is_math_call) { math[rngname] = prng; return seed; }\n\n // Otherwise, it is a newer calling convention, so return the\n // prng directly.\n else return prng;\n })(\n prng,\n shortseed,\n 'global' in options ? options.global : (this == math),\n options.state);\n}\nmath['seed' + rngname] = seedrandom;\n\n//\n// ARC4\n//\n// An ARC4 implementation. The constructor takes a key in the form of\n// an array of at most (width) integers that should be 0 <= x < (width).\n//\n// The g(count) method returns a pseudorandom integer that concatenates\n// the next (count) outputs from ARC4. Its return value is a number x\n// that is in the range 0 <= x < (width ^ count).\n//\nfunction ARC4(key) {\n var t, keylen = key.length,\n me = this, i = 0, j = me.i = me.j = 0, s = me.S = [];\n\n // The empty key [] is treated as [0].\n if (!keylen) { key = [keylen++]; }\n\n // Set up S using the standard key scheduling algorithm.\n while (i < width) {\n s[i] = i++;\n }\n for (i = 0; i < width; i++) {\n s[i] = s[j = mask & (j + key[i % keylen] + (t = s[i]))];\n s[j] = t;\n }\n\n // The \"g\" method returns the next (count) outputs as one number.\n (me.g = function(count) {\n // Using instance members instead of closure state nearly doubles speed.\n var t, r = 0,\n i = me.i, j = me.j, s = me.S;\n while (count--) {\n t = s[i = mask & (i + 1)];\n r = r * width + s[mask & ((s[i] = s[j = mask & (j + t)]) + (s[j] = t))];\n }\n me.i = i; me.j = j;\n return r;\n // For robust unpredictability, the function call below automatically\n // discards an initial batch of values. This is called RC4-drop[256].\n // See http://google.com/search?q=rsa+fluhrer+response&btnI\n })(width);\n}\n\n//\n// copy()\n// Copies internal state of ARC4 to or from a plain object.\n//\nfunction copy(f, t) {\n t.i = f.i;\n t.j = f.j;\n t.S = f.S.slice();\n return t;\n};\n\n//\n// flatten()\n// Converts an object tree to nested arrays of strings.\n//\nfunction flatten(obj, depth) {\n var result = [], typ = (typeof obj), prop;\n if (depth && typ == 'object') {\n for (prop in obj) {\n try { result.push(flatten(obj[prop], depth - 1)); } catch (e) {}\n }\n }\n return (result.length ? result : typ == 'string' ? obj : obj + '\\0');\n}\n\n//\n// mixkey()\n// Mixes a string seed into a key that is an array of integers, and\n// returns a shortened string seed that is equivalent to the result key.\n//\nfunction mixkey(seed, key) {\n var stringseed = seed + '', smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] =\n mask & ((smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++));\n }\n return tostring(key);\n}\n\n//\n// autoseed()\n// Returns an object for autoseeding, using window.crypto and Node crypto\n// module if available.\n//\nfunction autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n // The use of 'out' to remember randomBytes makes tight minified code.\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global.crypto || global.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e) {\n var browser = global.navigator,\n plugins = browser && browser.plugins;\n return [+new Date, global, plugins, global.screen, tostring(pool)];\n }\n}\n\n//\n// tostring()\n// Converts an array of charcodes to a string\n//\nfunction tostring(a) {\n return String.fromCharCode.apply(0, a);\n}\n\n//\n// When seedrandom.js is loaded, we immediately mix a few bits\n// from the built-in RNG into the entropy pool. Because we do\n// not want to interfere with deterministic PRNG state later,\n// seedrandom will not call math.random on its own again after\n// initialization.\n//\nmixkey(math.random(), pool);\n\n//\n// Nodejs and AMD support: export the implementation as a module using\n// either convention.\n//\nif ((typeof module) == 'object' && module.exports) {\n module.exports = seedrandom;\n // When in node.js, try using crypto package for autoseeding.\n try {\n nodecrypto = require('crypto');\n } catch (ex) {}\n} else if ((typeof define) == 'function' && define.amd) {\n define(function() { return seedrandom; });\n}\n\n// End anonymous scope, and pass initial values.\n})(\n [], // pool: entropy pool starts empty\n Math // math: package containing random, pow, and seedrandom\n);\n","// A library of seedable RNGs implemented in Javascript.\n//\n// Usage:\n//\n// var seedrandom = require('seedrandom');\n// var random = seedrandom(1); // or any seed.\n// var x = random(); // 0 <= x < 1. Every bit is random.\n// var x = random.quick(); // 0 <= x < 1. 32 bits of randomness.\n\n// alea, a 53-bit multiply-with-carry generator by Johannes Baagøe.\n// Period: ~2^116\n// Reported to pass all BigCrush tests.\nvar alea = require('./lib/alea');\n\n// xor128, a pure xor-shift generator by George Marsaglia.\n// Period: 2^128-1.\n// Reported to fail: MatrixRank and LinearComp.\nvar xor128 = require('./lib/xor128');\n\n// xorwow, George Marsaglia's 160-bit xor-shift combined plus weyl.\n// Period: 2^192-2^32\n// Reported to fail: CollisionOver, SimpPoker, and LinearComp.\nvar xorwow = require('./lib/xorwow');\n\n// xorshift7, by François Panneton and Pierre L'ecuyer, takes\n// a different approach: it adds robustness by allowing more shifts\n// than Marsaglia's original three. It is a 7-shift generator\n// with 256 bits, that passes BigCrush with no systmatic failures.\n// Period 2^256-1.\n// No systematic BigCrush failures reported.\nvar xorshift7 = require('./lib/xorshift7');\n\n// xor4096, by Richard Brent, is a 4096-bit xor-shift with a\n// very long period that also adds a Weyl generator. It also passes\n// BigCrush with no systematic failures. Its long period may\n// be useful if you have many generators and need to avoid\n// collisions.\n// Period: 2^4128-2^32.\n// No systematic BigCrush failures reported.\nvar xor4096 = require('./lib/xor4096');\n\n// Tyche-i, by Samuel Neves and Filipe Araujo, is a bit-shifting random\n// number generator derived from ChaCha, a modern stream cipher.\n// https://eden.dei.uc.pt/~sneves/pubs/2011-snfa2.pdf\n// Period: ~2^127\n// No systematic BigCrush failures reported.\nvar tychei = require('./lib/tychei');\n\n// The original ARC4-based prng included in this library.\n// Period: ~2^1600\nvar sr = require('./seedrandom');\n\nsr.alea = alea;\nsr.xor128 = xor128;\nsr.xorwow = xorwow;\nsr.xorshift7 = xorshift7;\nsr.xor4096 = xor4096;\nsr.tychei = tychei;\n\nmodule.exports = sr;\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as seedrandom from 'seedrandom';\n\nexport interface RandGauss {\n nextValue(): number;\n}\n\nexport interface RandNormalDataTypes {\n float32: Float32Array;\n int32: Int32Array;\n}\n\n// https://en.wikipedia.org/wiki/Marsaglia_polar_method\nexport class MPRandGauss implements RandGauss {\n private mean: number;\n private stdDev: number;\n private nextVal: number;\n private dtype?: keyof RandNormalDataTypes;\n private truncated?: boolean;\n private upper?: number;\n private lower?: number;\n private random: seedrandom.prng;\n\n constructor(\n mean: number, stdDeviation: number, dtype?: keyof RandNormalDataTypes,\n truncated?: boolean, seed?: number) {\n this.mean = mean;\n this.stdDev = stdDeviation;\n this.dtype = dtype;\n this.nextVal = NaN;\n this.truncated = truncated;\n if (this.truncated) {\n this.upper = this.mean + this.stdDev * 2;\n this.lower = this.mean - this.stdDev * 2;\n }\n const seedValue = seed ? seed : Math.random();\n this.random = seedrandom.alea(seedValue.toString());\n }\n\n /** Returns next sample from a gaussian distribution. */\n public nextValue(): number {\n if (!isNaN(this.nextVal)) {\n const value = this.nextVal;\n this.nextVal = NaN;\n return value;\n }\n\n let resultX: number, resultY: number;\n let isValid = false;\n while (!isValid) {\n let v1: number, v2: number, s: number;\n do {\n v1 = 2 * this.random() - 1;\n v2 = 2 * this.random() - 1;\n s = v1 * v1 + v2 * v2;\n } while (s >= 1 || s === 0);\n\n const mul = Math.sqrt(-2.0 * Math.log(s) / s);\n resultX = this.mean + this.stdDev * v1 * mul;\n resultY = this.mean + this.stdDev * v2 * mul;\n\n if (!this.truncated || this.isValidTruncated(resultX)) {\n isValid = true;\n }\n }\n\n if (!this.truncated || this.isValidTruncated(resultY)) {\n this.nextVal = this.convertValue(resultY);\n }\n return this.convertValue(resultX);\n }\n\n /** Handles proper rounding for non floating point numbers. */\n private convertValue(value: number): number {\n if (this.dtype == null || this.dtype === 'float32') {\n return value;\n }\n return Math.round(value);\n }\n\n /** Returns true if less than 2-standard-deviations from the mean. */\n private isValidTruncated(value: number): boolean {\n return value <= this.upper && value >= this.lower;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, TensorBuffer} from '../tensor';\nimport {convertToTensor, convertToTensorArray} from '../tensor_util_env';\nimport {DataType, Rank, ShapeMap, TensorLike, TensorLike1D, TensorLike4D, TypedArray} from '../types';\nimport * as util from '../util';\nimport {getAxesPermutation, getInnerMostAxes} from './axis_util';\nimport {concat} from './concat_split';\nimport {op} from './operation';\nimport {MPRandGauss} from './rand';\nimport {zerosLike} from './tensor_ops';\n\n/**\n * Creates a new tensor with the same values and shape as the specified\n * tensor.\n *\n * ```js\n * const x = tf.tensor([1, 2]);\n *\n * x.clone().print();\n * ```\n *\n * @param x The tensor to clone.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction clone_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'clone');\n const der = (dy: T) => {\n return {$x: () => dy.toFloat()};\n };\n\n return ENV.engine.runKernel(\n backend =>\n Tensor.make($x.shape, {dataId: $x.dataId}, $x.dtype) as T,\n {$x}, der) as T;\n}\n\n/**\n * Create an identity matrix.\n *\n * @param numRows Number of rows.\n * @param numColumns Number of columns. Defaults to `numRows`.\n * @param batchShape If provided, will add the batch shape to the beginning\n * of the shape of the returned `tf.Tensor` by repeating the identity\n * matrix.\n * @param dtype Data type.\n * @returns Identity matrix of the specified size and data type, possibly\n * with batch repetition if `batchShape` is specified.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction eye_(\n numRows: number, numColumns?: number,\n batchShape?:\n [\n number\n ]|[number,\n number]|[number, number, number]|[number, number, number, number],\n dtype: DataType = 'float32'): Tensor2D {\n if (numColumns == null) {\n numColumns = numRows;\n }\n const buff = buffer([numRows, numColumns], dtype);\n const n = numRows <= numColumns ? numRows : numColumns;\n for (let i = 0; i < n; ++i) {\n buff.set(1, i, i);\n }\n const out = buff.toTensor().as2D(numRows, numColumns);\n if (batchShape == null) {\n return out;\n } else {\n if (batchShape.length === 1) {\n return tile(expandDims(out, 0), [batchShape[0], 1, 1]);\n } else if (batchShape.length === 2) {\n return tile(\n expandDims(expandDims(out, 0), 0),\n [batchShape[0], batchShape[1], 1, 1]);\n } else if (batchShape.length === 3) {\n return tile(\n expandDims(expandDims(expandDims(out, 0), 0), 0),\n [batchShape[0], batchShape[1], batchShape[2], 1, 1]);\n } else {\n throw new Error(\n `eye() currently supports only 1D and 2D ` +\n // tslint:disable-next-line:no-any\n `batchShapes, but received ${(batchShape as any).length}D.`);\n }\n }\n}\n\n/**\n * Creates a `tf.Tensor` with values sampled from a normal distribution.\n *\n * ```js\n * tf.randomNormal([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output.\n * @param seed The seed for the random number generator.\n */\n/** @doc {heading: 'Tensors', subheading: 'Random'} */\nfunction randomNormal_<R extends Rank>(\n shape: ShapeMap[R], mean = 0, stdDev = 1, dtype?: 'float32'|'int32',\n seed?: number): Tensor<R> {\n if (dtype != null && (dtype as DataType) === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss =\n new MPRandGauss(mean, stdDev, dtype, false /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\n\n/**\n * Creates a `tf.Tensor` with values sampled from a truncated normal\n * distribution.\n *\n * ```js\n * tf.truncatedNormal([2, 2]).print();\n * ```\n *\n * The generated values follow a normal distribution with specified mean and\n * standard deviation, except that values whose magnitude is more than 2\n * standard deviations from the mean are dropped and re-picked.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param mean The mean of the normal distribution.\n * @param stdDev The standard deviation of the normal distribution.\n * @param dtype The data type of the output tensor.\n * @param seed The seed for the random number generator.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction truncatedNormal_<R extends Rank>(\n shape: ShapeMap[R], mean = 0, stdDev = 1, dtype?: 'float32'|'int32',\n seed?: number): Tensor<R> {\n if (dtype != null && (dtype as DataType) === 'bool') {\n throw new Error(`Unsupported data type ${dtype}`);\n }\n const randGauss =\n new MPRandGauss(mean, stdDev, dtype, true /* truncated */, seed);\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = randGauss.nextValue();\n }\n return res.toTensor();\n}\n\n/**\n * Creates a `tf.Tensor` with values sampled from a uniform distribution.\n *\n * The generated values follow a uniform distribution in the range [minval,\n * maxval). The lower bound minval is included in the range, while the upper\n * bound maxval is excluded.\n *\n * ```js\n * tf.randomUniform([2, 2]).print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param minval The lower bound on the range of random values to generate.\n * Defaults to 0.\n * @param maxval The upper bound on the range of random values to generate.\n * Defaults to 1.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\n/** @doc {heading: 'Tensors', subheading: 'Random'} */\nfunction randomUniform_<R extends Rank>(\n shape: ShapeMap[R], minval = 0, maxval = 1,\n dtype: DataType = 'float32'): Tensor<R> {\n const res = buffer(shape, dtype);\n for (let i = 0; i < res.values.length; i++) {\n res.values[i] = util.randUniform(minval, maxval);\n }\n return res.toTensor();\n}\n\n/**\n * Creates a `tf.Tensor` with values sampled from a random number generator\n * function defined by the user.\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param randFunction A random number generator function which is called\n * for each element in the output tensor.\n * @param dtype The data type of the output tensor. Defaults to 'float32'.\n */\nfunction rand_<R extends Rank>(\n shape: ShapeMap[R], randFunction: () => number,\n dtype?: DataType): Tensor<R> {\n const size = util.sizeFromShape(shape);\n\n let values = null;\n if (dtype == null || dtype === 'float32') {\n values = new Float32Array(size);\n } else if (dtype === 'int32') {\n values = new Int32Array(size);\n } else if (dtype === 'bool') {\n values = new Uint8Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n\n for (let i = 0; i < size; i++) {\n values[i] = randFunction();\n }\n return Tensor.make(shape, {values}, dtype);\n}\n\n/**\n * Creates a `tf.Tensor` with values drawn from a multinomial distribution.\n *\n * ```js\n * const probs = tf.tensor([.75, .25]);\n * tf.multinomial(probs, 3).print();\n * ```\n *\n * @param logits 1D array with unnormalized log-probabilities, or\n * 2D array of shape `[batchSize, numOutcomes]`. See the `normalized`\n * parameter.\n * @param numSamples Number of samples to draw for each row slice.\n * @param seed The seed number.\n * @param normalized Whether the provided `logits` are normalized true\n * probabilities (sum to 1). Defaults to false.\n * @return 1D array of shape `[numSamples]`, or 2D array of shape\n * `[batchSize, numSamples]`, depending on the rank of the input.\n */\n/** @doc {heading: 'Tensors', subheading: 'Random'} */\nfunction multinomial_(\n logits: Tensor1D|Tensor2D|TensorLike, numSamples: number, seed?: number,\n normalized = false): Tensor1D|Tensor2D {\n const $logits = convertToTensor(logits, 'logits', 'multinomial');\n const numOutcomes = $logits.size;\n const origRank = $logits.rank;\n if (numOutcomes < 2) {\n throw new Error(\n `Error in multinomial: you need at least 2 outcomes, but got ` +\n `${numOutcomes}.`);\n }\n if (origRank > 2) {\n throw new Error(`Rank of probabilities must be 1 or 2, but is ${origRank}`);\n }\n seed = seed || Math.random();\n const logits2D = origRank === 1 ? $logits.as2D(1, -1) : $logits as Tensor2D;\n const res = ENV.engine.runKernel(\n backend => backend.multinomial(logits2D, normalized, numSamples, seed),\n {logits2D});\n\n return origRank === 1 ? res.as1D() : res;\n}\n\n/**\n * Creates a one-hot `tf.Tensor`. The locations represented by `indices` take\n * value `onValue` (defaults to 1), while all other locations take value\n * `offValue` (defaults to 0).\n *\n * ```js\n * tf.oneHot(tf.tensor1d([0, 1], 'int32'), 3).print();\n * ```\n *\n * @param indices `tf.Tensor1D` of indices with dtype `int32`.\n * @param depth The depth of the one hot dimension.\n * @param onValue A number used to fill in the output when the index matches\n * the location.\n * @param offValue A number used to fill in the output when the index does\n * not match the location.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction oneHot_(\n indices: Tensor1D|TensorLike1D, depth: number, onValue = 1,\n offValue = 0): Tensor2D {\n const $indices = convertToTensor(indices, 'indices', 'oneHot', 'int32');\n util.assert($indices.dtype === 'int32', 'Indices must be of dtype `int32`');\n\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const grad = (dy: Tensor2D) => {\n return {$indices: () => zerosLike($indices)};\n };\n return ENV.engine.runKernel(\n backend => backend.oneHot($indices, depth, onValue, offValue), {$indices},\n grad);\n}\n\n/**\n * Creates a `tf.Tensor` from an image.\n *\n * ```js\n * const image = new ImageData(1, 1);\n * image.data[0] = 100;\n * image.data[1] = 150;\n * image.data[2] = 200;\n * image.data[3] = 255;\n *\n * tf.fromPixels(image).print();\n * ```\n *\n * @param pixels The input image to construct the tensor from. The\n * supported image types are all 4-channel.\n * @param numChannels The number of channels of the output tensor. A\n * numChannels value less than 4 allows you to ignore channels. Defaults to\n * 3 (ignores alpha channel of input image).\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction fromPixels_(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels = 3): Tensor3D {\n if (numChannels > 4) {\n throw new Error(\n 'Cannot construct Tensor with more than 4 channels from pixels.');\n }\n return ENV.engine.fromPixels(pixels, numChannels);\n}\n\n/**\n * Draws a `tf.Tensor` of pixel values to a byte array or optionally a\n * canvas.\n *\n * When the dtype of the input is 'float32', we assume values in the range\n * [0-1]. Otherwise, when input is 'int32', we assume values in the range\n * [0-255].\n *\n * Returns a promise that resolves when the canvas has been drawn to.\n *\n * @param img A rank-2 or rank-3 tensor. If rank-2, draws grayscale. If\n * rank-3, must have depth of 1, 3 or 4. When depth of 1, draws\n * grayscale. When depth of 3, we draw with the first three components of\n * the depth dimension corresponding to r, g, b and alpha = 1. When depth of\n * 4, all four components of the depth dimension correspond to r, g, b, a.\n * @param canvas The canvas to draw to.\n */\n/** @doc {heading: 'Visualization'} */\nasync function toPixels(\n img: Tensor2D|Tensor3D|TensorLike,\n canvas?: HTMLCanvasElement): Promise<Uint8ClampedArray> {\n const $img = convertToTensor(img, 'img', 'toPixels', 'int32');\n if ($img.rank !== 2 && $img.rank !== 3) {\n throw new Error(\n `toPixels only supports rank 2 or 3 tensors, got rank ${$img.rank}.`);\n }\n const [height, width] = $img.shape.slice(0, 2);\n const depth = $img.rank === 2 ? 1 : $img.shape[2];\n\n if (depth > 4 || depth === 2) {\n throw new Error(\n `toPixels only supports depth of size ` +\n `1, 3 or 4 but got ${depth}`);\n }\n\n const minTensor = $img.min();\n const maxTensor = $img.max();\n const min = (await minTensor.data())[0];\n const max = (await maxTensor.data())[0];\n minTensor.dispose();\n maxTensor.dispose();\n if ($img.dtype === 'float32') {\n if (min < 0 || max > 1) {\n throw new Error(\n `Tensor values for a float32 Tensor must be in the ` +\n `range [0 - 1] but got range [${min} - ${max}].`);\n }\n } else if ($img.dtype === 'int32') {\n if (min < 0 || max > 255) {\n throw new Error(\n `Tensor values for a int32 Tensor must be in the ` +\n `range [0 - 255] but got range [${min} - ${max}].`);\n }\n } else {\n throw new Error(\n `Unsupported type for toPixels: ${$img.dtype}.` +\n ` Please use float32 or int32 tensors.`);\n }\n\n const data = await $img.data();\n const multiplier = $img.dtype === 'float32' ? 255 : 1;\n const bytes = new Uint8ClampedArray(width * height * 4);\n\n for (let i = 0; i < height * width; ++i) {\n let r, g, b, a;\n if (depth === 1) {\n r = data[i] * multiplier;\n g = data[i] * multiplier;\n b = data[i] * multiplier;\n a = 255;\n } else if (depth === 3) {\n r = data[i * 3] * multiplier;\n g = data[i * 3 + 1] * multiplier;\n b = data[i * 3 + 2] * multiplier;\n a = 255;\n } else if (depth === 4) {\n r = data[i * 4] * multiplier;\n g = data[i * 4 + 1] * multiplier;\n b = data[i * 4 + 2] * multiplier;\n a = data[i * 4 + 3] * multiplier;\n }\n\n const j = i * 4;\n bytes[j + 0] = Math.round(r);\n bytes[j + 1] = Math.round(g);\n bytes[j + 2] = Math.round(b);\n bytes[j + 3] = Math.round(a);\n }\n\n if (canvas != null) {\n canvas.width = width;\n canvas.height = height;\n const ctx = canvas.getContext('2d');\n const imageData = new ImageData(bytes, width, height);\n ctx.putImageData(imageData, 0, 0);\n }\n if ($img !== img) {\n $img.dispose();\n }\n return bytes;\n}\n\n/**\n * Reshapes a `tf.Tensor` to a given shape.\n *\n * Given an input tensor, returns a new tensor with the same values as the\n * input tensor with shape `shape`.\n *\n * If one component of shape is the special value -1, the size of that\n * dimension is computed so that the total size remains constant. In\n * particular, a shape of [-1] flattens into 1-D. At most one component of\n * shape can be -1.\n *\n * If shape is 1-D or higher, then the operation returns a tensor with shape\n * shape filled with the values of tensor. In this case, the number of\n * elements implied by shape must be the same as the number of elements in\n * tensor.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.reshape([2, 2]).print();\n * ```\n *\n * @param x The input tensor to be reshaped.\n * @param shape An array of integers defining the output tensor shape.\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction reshape_<R2 extends Rank>(\n x: Tensor|TensorLike, shape: ShapeMap[R2]): Tensor<R2> {\n const $x = convertToTensor(x, 'x', 'reshape');\n shape = util.inferFromImplicitShape(shape, $x.size);\n util.assert(\n $x.size === util.sizeFromShape(shape),\n 'new shape and old shape must have the same number of elements.');\n\n const grad = (dy: Tensor<R2>) => {\n return {$x: () => dy.reshape($x.shape)};\n };\n return ENV.engine.runKernel(\n backend => backend.reshape($x, shape), {$x}, grad);\n}\n\n/**\n * Removes dimensions of size 1 from the shape of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4], [1, 1, 4]);\n * x.squeeze().print();\n * ```\n *\n * @param x The input tensor to be squeezed.\n * @param axis An optional list of numbers. If specified, only\n * squeezes the dimensions listed. The dimension index starts at 0. It\n * is an error to squeeze a dimension that is not 1.\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction squeeze_<T extends Tensor>(x: Tensor|TensorLike, axis?: number[]): T {\n const $x = convertToTensor(x, 'x', 'squeeze');\n return reshape($x, util.squeezeShape($x.shape, axis).newShape) as T;\n}\n\n/**\n * Casts a `tf.Tensor` to a new dtype.\n *\n * ```js\n * const x = tf.tensor1d([1.5, 2.5, 3]);\n * tf.cast(x, 'int32').print();\n * ```\n * @param x The input tensor to be casted.\n * @param dtype The dtype to cast the input tensor to.\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction cast_<T extends Tensor>(x: T|TensorLike, dtype: DataType): T {\n const $x = convertToTensor(x, 'x', 'cast');\n\n const grad = (dy: T) => {\n return {$x: () => dy.clone()};\n };\n return ENV.engine.runKernel(backend => backend.cast($x, dtype), {$x}, grad) as\n T;\n}\n\n/**\n * Construct a tensor by repeating it the number of times given by reps.\n *\n * This operation creates a new tensor by replicating `input` `reps`\n * times. The output tensor's i'th dimension has `input.shape[i] *\n * reps[i]` elements, and the values of `input` are replicated\n * `reps[i]` times along the i'th dimension. For example, tiling\n * `[a, b, c, d]` by `[2]` produces `[a, b, c, d, a, b, c, d]`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n *\n * a.tile([2]).print(); // or a.tile([2])\n * ```\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.tile([1, 2]).print(); // or a.tile([1, 2])\n * ```\n * @param x The tensor to tile.\n * @param reps Determines the number of replications per dimension.\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction tile_<T extends Tensor>(x: T|TensorLike, reps: number[]): T {\n const $x = convertToTensor(x, 'x', 'tile');\n\n util.assert(\n $x.rank === reps.length,\n `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of reps ${reps}.`);\n const grad = (dy: T) => {\n const derX = () => {\n let xGrad = zerosLike($x);\n // TODO(cais): Maybe reduce memory footprint by avoiding repeated\n // slicing.\n if ($x.rank === 1) {\n for (let i = 0; i < reps[0]; ++i) {\n xGrad = xGrad.add(dy.slice([i * $x.shape[0]], [$x.shape[0]]));\n }\n } else if ($x.rank === 2) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n xGrad = xGrad.add(dy.slice(\n [i * $x.shape[0], j * $x.shape[1]],\n [$x.shape[0], $x.shape[1]]));\n }\n }\n } else if ($x.rank === 3) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n xGrad = xGrad.add(dy.slice(\n [i * $x.shape[0], j * $x.shape[1], k * $x.shape[2]],\n [$x.shape[0], $x.shape[1], $x.shape[2]]));\n }\n }\n }\n } else if ($x.rank === 4) {\n for (let i = 0; i < reps[0]; ++i) {\n for (let j = 0; j < reps[1]; ++j) {\n for (let k = 0; k < reps[2]; ++k) {\n for (let l = 0; l < reps[3]; ++l) {\n xGrad = xGrad.add(dy.slice(\n [\n i * $x.shape[0], j * $x.shape[1], k * $x.shape[2],\n l * $x.shape[3]\n ],\n [$x.shape[0], $x.shape[1], $x.shape[2], $x.shape[3]]));\n }\n }\n }\n }\n } else {\n throw new Error(\n `Gradient for tile operation is not implemented for rank-` +\n `${$x.rank} tensors yet.`);\n }\n return xGrad;\n };\n return {$x: derX};\n };\n return ENV.engine.runKernel(backend => backend.tile($x, reps), {$x}, grad);\n}\n\n/**\n * Pads a `tf.Tensor1D` with a given value and paddings. See `pad` for details.\n */\nfunction pad1d_(\n x: Tensor1D|TensorLike, paddings: [number, number],\n constantValue = 0): Tensor1D {\n util.assert(\n paddings.length === 2,\n 'Invalid number of paddings. Must be length of 2.');\n return pad(x, [paddings], constantValue);\n}\n\n/**\n * Pads a `tf.Tensor2D` with a given value and paddings. See `pad` for details.\n */\nfunction pad2d_(\n x: Tensor2D|TensorLike, paddings: [[number, number], [number, number]],\n constantValue = 0): Tensor2D {\n util.assert(\n paddings.length === 2 && paddings[0].length === 2 &&\n paddings[1].length === 2,\n 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\n\n/**\n * Pads a `tf.Tensor3D` with a given value and paddings. See `pad` for details.\n */\nfunction pad3d_(\n x: Tensor3D|TensorLike,\n paddings: [[number, number], [number, number], [number, number]],\n constantValue = 0): Tensor3D {\n util.assert(\n paddings.length === 3 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2,\n 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\n\n/**\n * Pads a `tf.Tensor4D` with a given value and paddings. See `pad` for details.\n */\nfunction pad4d_(\n x: Tensor4D|TensorLike,\n paddings:\n [\n [number, number], [number, number], [number, number], [number, number]\n ],\n constantValue = 0): Tensor4D {\n util.assert(\n paddings.length === 4 && paddings[0].length === 2 &&\n paddings[1].length === 2 && paddings[2].length === 2 &&\n paddings[3].length === 2,\n 'Invalid number of paddings. Must be length of 2 each.');\n return pad(x, paddings, constantValue);\n}\n\n/**\n * Pads a `tf.Tensor` with a given value and paddings.\n *\n * This operation currently only implements the `CONSTANT` mode.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `paddings` is of given length.\n * - `tf.pad1d`\n * - `tf.pad2d`\n * - `tf.pad3d`\n * - `tf.pad4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * x.pad([[1, 2]]).print();\n * ```\n * @param x The tensor to pad.\n * @param paddings An array of length `R` (the rank of the tensor), where\n * each element is a length-2 tuple of ints `[padBefore, padAfter]`,\n * specifying how much to pad along each dimension of the tensor.\n * @param constantValue The pad value to use. Defaults to 0.\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction pad_<T extends Tensor>(\n x: T|TensorLike, paddings: Array<[number, number]>, constantValue = 0): T {\n const $x = convertToTensor(x, 'x', 'pad');\n\n if ($x.rank === 0) {\n throw new Error('pad(scalar) is not defined. Pass non-scalar to pad');\n }\n // Pad introduces values around the original tensor, so the gradient\n // slices the original shape out of the gradient.\n const begin = paddings.map(p => p[0]);\n const grad = (dy: T) => {\n return {$x: () => dy.slice(begin, $x.shape)};\n };\n return ENV.engine.runKernel(\n backend => backend.pad($x, paddings, constantValue), {$x}, grad) as\n T;\n}\n\n/**\n * Stacks a list of rank-`R` `tf.Tensor`s into one rank-`(R+1)` `tf.Tensor`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n * tf.stack([a, b, c]).print();\n * ```\n *\n * @param tensors A list of tensor objects with the same shape and dtype.\n * @param axis The axis to stack along. Defaults to 0 (the first dim).\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction stack_<T extends Tensor>(tensors: T[]|TensorLike[], axis = 0): Tensor {\n const $tensors = convertToTensorArray(tensors, 'tensors', 'stack');\n\n util.assert($tensors.length >= 1, 'Pass at least one tensor to tf.stack');\n if ($tensors.length === 1) {\n return $tensors[0].expandDims(axis);\n }\n const rank = $tensors[0].rank;\n const shape = $tensors[0].shape;\n const dtype = $tensors[0].dtype;\n\n util.assert(axis <= rank, 'Axis must be <= rank of the tensor');\n\n $tensors.forEach(t => {\n util.assertShapesMatch(\n shape, t.shape,\n 'All tensors passed to stack must have matching shapes');\n });\n\n $tensors.forEach(t => {\n util.assert(\n dtype === t.dtype,\n 'All tensors passed to stack must have matching dtypes');\n });\n const expandedTensors = $tensors.map(t => t.expandDims(axis));\n return concat(expandedTensors, axis);\n}\n\n/**\n * This operation reshapes the \"batch\" dimension 0 into `M + 1` dimensions of\n * shape `blockShape + [batch]`, interleaves these blocks back into the grid\n * defined by the spatial dimensions `[1, ..., M]`, to obtain a result with\n * the same rank as the input. The spatial dimensions of this intermediate\n * result are then optionally cropped according to `crops` to produce the\n * output. This is the reverse of `tf.spaceToBatchND`. See below for a precise\n * description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const crops = [[0, 0], [0, 0]];\n *\n * x.batchToSpaceND(blockShape, crops).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must be one of the following types: `int32`,\n * `int64`. Must have shape `[M]`, all values must be >= 1.\n * @param crops A 2-D array. Must be one of the following types: `int32`,\n * `int64`. Must have shape `[M, 2]`, all values must be >= 0. `crops[i] =\n * [cropStart, cropEnd]` specifies the amount to crop from input dimension `i\n * + 1`, which corresponds to spatial dimension `i`. It is required that\n * `cropStart[i] + cropEnd[i] <= blockShape[i] * inputShape[i + 1]`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Reshape `x` to `reshaped` of shape: `[blockShape[0], ...,\n * blockShape[M-1], batch / prod(blockShape), x.shape[1], ...,\n * x.shape[N-1]]`\n *\n * 2. Permute dimensions of `reshaped`to produce `permuted` of shape `[batch /\n * prod(blockShape),x.shape[1], blockShape[0], ..., x.shape[M],\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 3. Reshape `permuted` to produce `reshapedPermuted` of shape `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0], ..., x.shape[M] *\n * blockShape[M-1],x.shape[M+1], ..., x.shape[N-1]]`\n *\n * 4. Crop the start and end of dimensions `[1, ..., M]` of `reshapedPermuted`\n * according to `crops` to produce the output of shape: `[batch /\n * prod(blockShape),x.shape[1] * blockShape[0] - crops[0,0] - crops[0,1],\n * ..., x.shape[M] * blockShape[M-1] - crops[M-1,0] -\n * crops[M-1,1],x.shape[M+1], ..., x.shape[N-1]]`\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction batchToSpaceND_<T extends Tensor>(\n x: T|TensorLike, blockShape: number[], crops: number[][]): T {\n const $x = convertToTensor(x, 'x', 'batchToSpaceND');\n const prod = blockShape.reduce((a, b) => a * b);\n\n util.assert(\n $x.rank >= 1 + blockShape.length,\n `input rank is ${$x.rank} but should be > than blockShape.length ${\n blockShape.length}`);\n\n util.assert(\n crops.length === blockShape.length,\n `crops.length is ${\n crops.length} but should be equal to blockShape.length ${\n blockShape.length}`);\n\n util.assert(\n $x.shape[0] % prod === 0,\n `input tensor batch is ${\n $x.shape[0]} but is not divisible by the product of ` +\n `the elements of blockShape ${blockShape.join(' * ')} === ${prod}`);\n\n const grad = (dy: T) => {\n return {$x: () => dy.spaceToBatchND(blockShape, crops)};\n };\n\n return ENV.engine.runKernel(\n backend => backend.batchToSpaceND($x, blockShape, crops), {$x}, grad);\n}\n\n/**\n * This operation divides \"spatial\" dimensions [1, ..., M] of the input into\n * a grid of blocks of shape block_shape, and interleaves these blocks with\n * the \"batch\" dimension (0) such that in the output, the spatial\n * dimensions [1, ..., M] correspond to the position within the grid,\n * and the batch dimension combines both the position within a spatial block\n * and the original batch position. Prior to division into blocks,\n * the spatial dimensions of the input are optionally zero padded\n * according to paddings. See below for a precise description.\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [4, 1, 1, 1]);\n * const blockShape = [2, 2];\n * const paddings = [[0, 0], [0, 0]];\n *\n * x.spaceToBatchND(blockShape, paddings).print();\n * ```\n *\n * @param x A `tf.Tensor`. N-D with `x.shape` = `[batch] + spatialShape +\n * remainingShape`, where spatialShape has `M` dimensions.\n * @param blockShape A 1-D array. Must be one of the following types: `int32`,\n * `int64`. Must have shape `[M]`, all values must be >= 1.\n * @param paddings A 2-D array. Must be one of the following types: `int32`,\n * `int64`. Must have shape `[M, 2]`, all values must be >= 0. `paddings[i] =\n * [padStart, padEnd]` specifies the amount to zero-pad from input dimension\n * `i + 1`, which corresponds to spatial dimension `i`.\n * It is required that\n * `(inputShape[i + 1] + padStart + padEnd) % blockShape[i] === 0`\n *\n * This operation is equivalent to the following steps:\n *\n * 1. Zero-pad the start and end of dimensions [1, ..., M] of the input\n * according to paddings to produce padded of shape padded_shape.\n *\n * 2. Reshape padded to reshaped_padded of shape:\n * [batch] + [padded_shape[1] / block_shape[0], block_shape[0], ...,\n * padded_shape[M] / block_shape[M-1], block_shape[M-1]] + remaining_shape\n *\n * 3. Permute dimensions of reshaped_padded to produce permuted_\n * reshaped_padded of shape:\n * block_shape + [batch] + [padded_shape[1] / block_shape[0], ...,\n * padded_shape[M] / block_shape[M-1]] + remaining_shape\n *\n * 4. Reshape permuted_reshaped_padded to flatten block_shape into the\n * batch dimension, producing an output tensor of shape:\n * [batch * prod(block_shape)] + [padded_shape[1] / block_shape[0], ...,\n * padded_shape[M] / block_shape[M-1]] + remaining_shape\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction spaceToBatchND_<T extends Tensor>(\n x: T|TensorLike, blockShape: number[], paddings: number[][]): T {\n const $x = convertToTensor(x, 'x', 'spaceToBatchND');\n\n util.assert(\n $x.rank >= 1 + blockShape.length,\n `input rank ${$x.rank} should be > than [blockShape] ${\n blockShape.length}`);\n\n util.assert(\n paddings.length === blockShape.length,\n `paddings.shape[0] ${paddings.length} must be equal to [blockShape] ${\n blockShape.length}`);\n\n util.assert(\n $x.shape.reduce(\n (a, b, i) => {\n if (i > 0 && i <= blockShape.length) {\n return a &&\n ((b + paddings[i - 1][0] + paddings[i - 1][1]) %\n blockShape[i - 1] ===\n 0);\n }\n return a;\n },\n true),\n `input spatial dimensions ${$x.shape.slice(1)} with paddings ${\n paddings.toString()} must be divisible by blockShapes ${\n blockShape.toString()}`);\n\n const grad = (dy: T) => {\n return {$x: () => dy.batchToSpaceND(blockShape, paddings)};\n };\n\n return ENV.engine.runKernel(\n backend => backend.spaceToBatchND($x, blockShape, paddings), {$x}, grad);\n}\n\n/**\n * Unstacks a `tf.Tensor` of rank-`R` into a list of rank-`(R-1)` `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * tf.unstack(a).forEach(tensor => tensor.print());\n * ```\n *\n * @param x A tensor object.\n * @param axis The axis to unstack along. Defaults to 0 (the first dim).\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction unstack_<T extends Tensor>(x: T|TensorLike, axis = 0): Tensor[] {\n const $x = convertToTensor(x, 'x', 'unstack');\n const num = $x.shape[axis];\n const outputShape: number[] = Array($x.rank - 1).fill(0);\n let outIndex = 0;\n for (let i = 0; i < $x.rank; i++) {\n if (i !== axis) {\n outputShape[outIndex] = $x.shape[i];\n outIndex++;\n }\n }\n\n let splitSizes: number[];\n splitSizes = Array(num).fill(1);\n const begin = Array($x.rank).fill(0);\n const size = $x.shape.slice();\n return splitSizes.map(s => {\n size[axis] = s;\n const slice = $x.slice(begin, size);\n begin[axis] += s;\n return slice.reshape(outputShape);\n });\n}\n\n/**\n * Computes the cumulative sum of a `tf.Tensor` along `axis`.\n *\n * ```js\n * const x = tf.tensor([1, 2, 3, 4]);\n * x.cumsum().print();\n * ```\n * ```js\n * const x = tf.tensor([[1, 2], [3, 4]]);\n * x.cumsum().print();\n * ```\n *\n * @param x The input tensor to be summed.\n * @param axis The axis along which to sum. Optional. Defaults to 0.\n * @param exclusive Whether to perform exclusive cumulative sum. Optional.\n * Defaults to false. If set to true then the sum of each tensor entry\n * does not include its own value, but only the values previous to it\n * along the specified axis.\n * @param reverse Whether to sum in the opposite direction. Optional.\n * Defaults to false.\n */\n/** @doc {heading: 'Operations', subheading: 'Scan'} */\nfunction cumsum_<T extends Tensor>(\n x: Tensor|TensorLike, axis = 0, exclusive = false, reverse = false): T {\n const $x = convertToTensor(x, 'x', 'cumsum');\n\n axis = axis | 0;\n const permutation = getAxesPermutation([axis], $x.rank);\n let permutedX = $x;\n if (permutation != null) {\n permutedX = $x.transpose(permutation);\n }\n const permutedAxis = getInnerMostAxes(1, $x.rank)[0];\n\n const grad = (dy: T) => {\n return {permutedX: () => dy.cumsum(axis, exclusive, !reverse)};\n };\n let value = ENV.engine.runKernel(\n backend => backend.cumsum(\n permutedX, permutedAxis, exclusive, reverse),\n {permutedX}, grad) as T;\n\n if (permutation != null) {\n value = value.transpose(permutation);\n }\n return value;\n}\n\n/**\n * Returns a `tf.Tensor` that has expanded rank, by inserting a dimension\n * into the tensor's shape.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const axis = 1;\n * x.expandDims(axis).print();\n * ```\n *\n * @param x The input tensor whose dimensions to be expanded.\n * @param axis The dimension index at which to insert shape of `1`. Defaults\n * to 0 (the first dimension).\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction expandDims_<R2 extends Rank>(\n x: Tensor|TensorLike, axis = 0): Tensor<R2> {\n const $x = convertToTensor(x, 'x', 'expandDims');\n\n util.assert(axis <= $x.rank, 'Axis must be <= rank of the tensor');\n const newShape = $x.shape.slice();\n if (axis < 0) {\n // Negative value is counted from the tail of rank.\n util.assert(\n -($x.rank + 1) <= axis,\n `Axis must be in the interval [${- ($x.rank + 1)}, ${$x.rank}]`);\n axis = $x.rank + axis + 1;\n }\n newShape.splice(axis, 0, 1);\n return reshape($x, newShape);\n}\n\n/**\n * Rearranges data from depth into blocks of spatial data. More specifically,\n * this op outputs a copy of the input tensor where values from the `depth`\n * dimension are moved in spatial blocks to the `height` and `width` dimensions.\n * The attr `blockSize` indicates the input block size and how the data is\n * moved.\n *\n * - Chunks of data of size `blockSize * blockSize` from depth are rearranged\n * into non-overlapping blocks of size `blockSize x blockSize`\n *\n * - The width the output tensor is `inputWidth * blockSize`, whereas the\n * height is `inputHeight * blockSize`\n *\n * - The Y, X coordinates within each block of the output image are determined\n * by the high order component of the input channel index\n *\n * - The depth of the input tensor must be divisible by `blockSize *\n * blockSize`\n *\n * The `dataFormat` attr specifies the layout of the input and output tensors\n * with the following options: \"NHWC\": [ `batch, height, width, channels` ]\n * \"NCHW\": [ `batch, channels, height, width` ]\n *\n * ```js\n * const x = tf.tensor4d([1, 2, 3, 4], [1, 1, 1, 4]);\n * const blockSize = 2;\n * const dataFormat = \"NHWC\";\n *\n * tf.depthToSpace(x, blockSize, dataFormat).print();\n * ```\n *\n * @param x The input tensor of rank 4\n * @param blockSIze An `int` that is `>= 2`. The size of the spatial block\n * @param dataFormat An optional string from: \"NHWC\", \"NCHW\". Defaults to \"NHWC\"\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nfunction depthToSpace_(\n x: Tensor4D|TensorLike4D, blockSize: number,\n dataFormat: 'NHWC'|'NCHW' = 'NHWC'): Tensor4D {\n const $x = convertToTensor(x, 'x', 'depthToSpace');\n\n const inputHeight = (dataFormat === 'NHWC') ? $x.shape[1] : $x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? $x.shape[2] : $x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? $x.shape[3] : $x.shape[1];\n\n util.assert(\n inputHeight * blockSize >= 0,\n `Negative dimension size caused by overflow when multiplying\n ${inputHeight} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n\n util.assert(\n inputWidth * blockSize >= 0,\n `Negative dimension size caused by overflow when multiplying\n ${inputWidth} and ${blockSize} for depthToSpace with input shape\n ${$x.shape}`);\n\n util.assert(\n (inputDepth % (blockSize * blockSize) === 0),\n `Dimension size must be evenly divisible by ${\n blockSize * blockSize} but is ${\n inputDepth} for depthToSpace with input shape ${$x.shape}`);\n\n return ENV.engine.runKernel(\n backend => backend.depthToSpace($x, blockSize, dataFormat), {$x});\n}\n\n/**\n * Computes the difference between two lists of numbers.\n *\n * Given a Tensor `x` and a Tensor `y`, this operation returns a Tensor `out`\n * that represents all values that are in `x` but not in `y`. The returned\n * Tensor `out` is sorted in the same order that the numbers appear in `x`\n * (duplicates are preserved). This operation also returns a Tensor indices that\n * represents the position of each out element in `x`. In other words:\n *\n * `out[i] = x[idx[i]] for i in [0, 1, ..., out.length - 1]`\n *\n * ```js\n * const x = [1, 2, 3, 4, 5, 6];\n * const y = [1, 3, 5];\n *\n * const [out, indices] = await tf.setdiff1dAsync(x, y);\n * out.print(); // [2, 4, 6]\n * indices.print(); // [1, 3, 5]\n * ```\n *\n * @param x 1-D Tensor. Values to keep.\n * @param y 1-D Tensor. Must have the same type as x. Values to exclude in the\n * output.\n * @returns Promise of Tensor tuple [out, indices].\n * out: Tensor with the same type as x.\n * indices: A Tensor of type int32.\n */\n/** @doc {heading: 'Tensors', subheading: 'Transformations'} */\nasync function setdiff1dAsync_(\n x: Tensor|TensorLike, y: Tensor|TensorLike): Promise<[Tensor, Tensor]> {\n const $x = convertToTensor(x, 'x', 'setdiff1d');\n const $y = convertToTensor(y, 'y', 'setdiff1d');\n\n util.assert(\n $x.dtype === $y.dtype,\n `x and y should have the same dtype, but got x (${$x.dtype}) and y (${\n $y.dtype}).`);\n\n util.assert($x.rank === 1, `x should be 1D tensor, but got x (${$x.shape}).`);\n\n util.assert($y.rank === 1, `y should be 1D tensor, but got y (${$y.shape}).`);\n\n const xVals = await $x.data();\n const yVals = await $y.data();\n const ySet = new Set(yVals);\n\n let outputSize = 0;\n for (let i = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n outputSize++;\n }\n }\n\n const buffer = new TensorBuffer([outputSize], $x.dtype);\n const indices = new TensorBuffer([outputSize], 'int32');\n for (let i = 0, p = 0; i < xVals.length; i++) {\n if (!ySet.has(xVals[i])) {\n buffer.values[p] = xVals[i];\n indices.values[p] = i;\n p++;\n }\n }\n return [buffer.toTensor(), indices.toTensor()];\n}\n\n/**\n * Creates an empty `tf.TensorBuffer` with the specified `shape` and `dtype`.\n *\n * The values are stored in CPU as `TypedArray`. Fill the buffer using\n * `buffer.set()`, or by modifying directly `buffer.values`.\n *\n * When done, call `buffer.toTensor()` to get an immutable `tf.Tensor` with\n * those values.\n *\n * ```js\n * // Create a buffer and set values at particular indices.\n * const buffer = tf.buffer([2, 2]);\n * buffer.set(3, 0, 0);\n * buffer.set(5, 1, 0);\n *\n * // Convert the buffer back to a tensor.\n * buffer.toTensor().print();\n * ```\n *\n * @param shape An array of integers defining the output tensor shape.\n * @param dtype The dtype of the buffer. Defaults to 'float32'.\n * @param values The values of the buffer as `TypedArray`. Defaults to\n * zeros.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction buffer<R extends Rank>(\n shape: ShapeMap[R], dtype: DataType = 'float32',\n values?: TypedArray): TensorBuffer<R> {\n return new TensorBuffer<R>(shape, dtype, values);\n}\n\n/**\n * Prints information about the `tf.Tensor` including its data.\n *\n * ```js\n * const verbose = true;\n * tf.tensor2d([1, 2, 3, 4], [2, 2]).print(verbose);\n * ```\n * @param x The tensor to be printed.\n * @param verbose Whether to print verbose information about the ` Tensor`,\n * including dtype and size.\n */\n/** @doc {heading: 'Tensors', subheading: 'Creation'} */\nfunction print<T extends Tensor>(x: T, verbose = false): void {\n console.log(x.toString(verbose));\n}\n\nexport {\n buffer, // Not wrapped in op() since no tensors.\n toPixels, // Not wrapped in op() since async.\n print // Not wrapped in op() since no need to increase stack trace.\n};\n\nexport const batchToSpaceND = op({batchToSpaceND_});\nexport const cast = op({cast_});\nexport const clone = op({clone_});\nexport const cumsum = op({cumsum_});\nexport const depthToSpace = op({depthToSpace_});\nexport const expandDims = op({expandDims_});\nexport const eye = op({eye_});\nexport const fromPixels = op({fromPixels_});\nexport const multinomial = op({multinomial_});\nexport const oneHot = op({oneHot_});\nexport const pad = op({pad_});\nexport const pad1d = op({pad1d_});\nexport const pad2d = op({pad2d_});\nexport const pad3d = op({pad3d_});\nexport const pad4d = op({pad4d_});\nexport const rand = op({rand_});\nexport const randomNormal = op({randomNormal_});\nexport const randomUniform = op({randomUniform_});\nexport const reshape = op({reshape_});\nexport const spaceToBatchND = op({spaceToBatchND_});\nexport const squeeze = op({squeeze_});\nexport const stack = op({stack_});\nexport const tile = op({tile_});\nexport const truncatedNormal = op({truncatedNormal_});\nexport const unstack = op({unstack_});\nexport const setdiff1dAsync = setdiff1dAsync_;\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/** An implementation of the Where kernel shared between cpu and webgl */\n\nimport {buffer} from '../ops/array_ops';\nimport {Tensor2D} from '../tensor';\nimport {TypedArray} from '../types';\n\nexport function whereImpl(condShape: number[], condVals: TypedArray): Tensor2D {\n const indices = [];\n for (let i = 0; i < condVals.length; i++) {\n if (condVals[i]) {\n indices.push(i);\n }\n }\n\n const inBuffer = buffer(condShape, 'int32');\n\n const out = buffer([indices.length, condShape.length], 'int32');\n for (let i = 0; i < indices.length; i++) {\n const loc = inBuffer.indexToLoc(indices[i]);\n const offset = i * condShape.length;\n out.values.set(loc, offset);\n }\n return out.toTensor() as Tensor2D;\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {getWebGLContext} from '../canvas_util';\nimport {MemoryInfo, TimingInfo} from '../engine';\nimport {ENV} from '../environment';\nimport {tidy} from '../globals';\nimport {warn} from '../log';\nimport * as array_ops_util from '../ops/array_ops_util';\nimport * as axis_util from '../ops/axis_util';\nimport {computeOutShape} from '../ops/concat_util';\nimport {Conv2DInfo} from '../ops/conv_util';\nimport * as gather_nd_util from '../ops/gather_nd_util';\nimport * as reduce_util from '../ops/reduce_util';\nimport * as scatter_nd_util from '../ops/scatter_nd_util';\nimport * as segment_util from '../ops/segment_util';\nimport {getStridedSlicedInfo} from '../ops/slice_util';\nimport {softmax} from '../ops/softmax';\nimport {range, scalar, tensor} from '../ops/tensor_ops';\nimport {DataId, Scalar, setTensorTracker, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {DataType, DataTypeMap, Rank, RecursiveArray, ShapeMap, sumOutType, TypedArray, upcastType} from '../types';\nimport * as util from '../util';\nimport {getTypedArrayFromDType, sizeFromShape} from '../util';\n\nimport {DataMover, DataStorage, KernelBackend} from './backend';\nimport * as backend_util from './backend_util';\nimport {mergeRealAndImagArrays} from './complex_util';\nimport {nonMaxSuppressionImpl} from './non_max_suppression_impl';\nimport {split} from './split_shared';\nimport {topkImpl} from './topk_impl';\nimport {ArgMinMaxProgram} from './webgl/argminmax_gpu';\nimport {AvgPool2DBackpropProgram} from './webgl/avg_pool_backprop_gpu';\nimport {BatchNormProgram} from './webgl/batchnorm_gpu';\nimport {BatchNormPackedProgram} from './webgl/batchnorm_packed_gpu';\nimport * as binaryop_complex_gpu from './webgl/binaryop_complex_gpu';\nimport {BinaryOpComplexProgram} from './webgl/binaryop_complex_gpu';\nimport * as binaryop_gpu from './webgl/binaryop_gpu';\nimport {BinaryOpProgram} from './webgl/binaryop_gpu';\nimport {ClipProgram} from './webgl/clip_gpu';\nimport {ComplexAbsProgram} from './webgl/complex_abs_gpu';\nimport {ConcatProgram} from './webgl/concat_gpu';\nimport {Conv2DDerFilterProgram, Conv2DDerInputProgram} from './webgl/conv_backprop_gpu';\nimport {DepthwiseConv2DDerFilterProgram, DepthwiseConv2DDerInputProgram} from './webgl/conv_backprop_gpu_depthwise';\nimport {Conv2DProgram} from './webgl/conv_gpu';\nimport {DepthwiseConv2DProgram} from './webgl/conv_gpu_depthwise';\nimport {CropAndResizeProgram} from './webgl/crop_and_resize_gpu';\nimport {CumSumProgram} from './webgl/cumsum_gpu';\nimport {DepthToSpaceProgram} from './webgl/depth_to_space_gpu';\nimport {EncodeFloatProgram} from './webgl/encode_float_gpu';\nimport * as fft_gpu from './webgl/fft_gpu';\nimport {FFTProgram} from './webgl/fft_gpu';\nimport {FromPixelsProgram} from './webgl/from_pixels_gpu';\nimport {GatherProgram} from './webgl/gather_gpu';\nimport {GatherNDProgram} from './webgl/gather_nd_gpu';\nimport {GPGPUContext} from './webgl/gpgpu_context';\nimport * as gpgpu_math from './webgl/gpgpu_math';\nimport {GPGPUBinary, GPGPUProgram, TensorData} from './webgl/gpgpu_math';\nimport {Im2ColProgram} from './webgl/im2col_gpu';\nimport {LRNProgram} from './webgl/lrn_gpu';\nimport {LRNGradProgram} from './webgl/lrn_grad_gpu';\nimport {MaxPool2DBackpropProgram} from './webgl/max_pool_backprop_gpu';\nimport {MatMulProgram} from './webgl/mulmat_gpu';\nimport {MatMulPackedProgram} from './webgl/mulmat_packed_gpu';\nimport {MultinomialProgram} from './webgl/multinomial_gpu';\nimport {OneHotProgram} from './webgl/onehot_gpu';\nimport {PackProgram} from './webgl/pack_gpu';\nimport {PadProgram} from './webgl/pad_gpu';\nimport {Pool2DProgram} from './webgl/pool_gpu';\nimport {ReduceProgram} from './webgl/reduce_gpu';\nimport {ReshapePackedProgram} from './webgl/reshape_packed_gpu';\nimport {ResizeBilinearBackpropProgram} from './webgl/resize_bilinear_backprop_gpu';\nimport {ResizeBilinearProgram} from './webgl/resize_bilinear_gpu';\nimport {ResizeNearestNeigborBackpropProgram} from './webgl/resize_nearest_neighbor_backprop_gpu';\nimport {ResizeNearestNeighborProgram} from './webgl/resize_nearest_neighbor_gpu';\nimport {ReverseProgram} from './webgl/reverse_gpu';\nimport {ScatterProgram} from './webgl/scatter_gpu';\nimport {SegmentOpProgram} from './webgl/segment_gpu';\nimport {SelectProgram} from './webgl/select_gpu';\nimport {SliceProgram} from './webgl/slice_gpu';\nimport {StridedSliceProgram} from './webgl/strided_slice_gpu';\nimport {TextureData, TextureUsage} from './webgl/tex_util';\nimport {TextureManager} from './webgl/texture_manager';\nimport {TileProgram} from './webgl/tile_gpu';\nimport {TransposeProgram} from './webgl/transpose_gpu';\nimport * as unary_op from './webgl/unaryop_gpu';\nimport {UnaryOpProgram} from './webgl/unaryop_gpu';\nimport {UnpackProgram} from './webgl/unpack_gpu';\nimport * as webgl_util from './webgl/webgl_util';\nimport {whereImpl} from './where_impl';\n\ntype KernelInfo = {\n name: string; query: Promise<number>;\n};\n\nexport type TimerNode = RecursiveArray<KernelInfo>|KernelInfo;\nexport interface CPUTimerQuery {\n startMs: number;\n endMs?: number;\n}\n\nexport interface WebGLMemoryInfo extends MemoryInfo {\n numBytesInGPU: number;\n unreliable: boolean;\n}\n\nexport interface WebGLTimingInfo extends TimingInfo {\n uploadWaitMs: number;\n downloadWaitMs: number;\n}\n\n// Combines a dataId, a shape, and a dtype without a Tensor object so that\n// programs can be executed without a full Tensor object.\nexport interface TensorHandle {\n dataId: DataId;\n shape: number[];\n dtype: DataType;\n}\n\n// Empirically determined constant used to determine size threshold for handing\n// off execution to the CPU.\nconst CPU_HANDOFF_SIZE_THRESHOLD = 10;\n// Empirically determined constant used to decide the number of bytes on GPU\n// before we start paging. The bytes are this constant * screen area * dpi.\nconst BEFORE_PAGING_CONSTANT = 300;\n// Tensors with size <= than this will be uploaded as uniforms, not textures.\nexport const SIZE_UPLOAD_UNIFORM = 4;\n// Empirically determined minimal shared dimension in matmul before we forward\n// to a.mul(b).sum() in order to take advantage of GPU parallelism. See\n// https://github.com/tensorflow/tfjs-core/pull/1379 for benchmarks.\nexport const MATMUL_SHARED_DIM_THRESHOLD = 1000;\n\nexport class MathBackendWebGL implements KernelBackend {\n private texData: DataStorage<TextureData>;\n // Maps data ids that have a pending read operation, to list of subscribers.\n private pendingRead = new WeakMap<DataId, Array<(arr: TypedArray) => void>>();\n // List of data ids that are scheduled for disposal, but are waiting on a\n // pending read operation.\n private pendingDisposal = new WeakSet<DataId>();\n // List of data ids that are currently residing on gpu memory. Sorted with\n // least recently used being first.\n private lruDataGPU: DataId[] = [];\n private numBytesInGPU = 0;\n /**\n * Number of bytes allocated on the GPU before we start moving data to cpu.\n * Moving avoids gpu memory leaks and relies on JS's garbage collector.\n */\n private NUM_BYTES_BEFORE_PAGING: number;\n\n private canvas: HTMLCanvasElement;\n private fromPixels2DContext: CanvasRenderingContext2D;\n\n private programTimersStack: TimerNode[];\n private activeTimers: TimerNode[];\n // Accumulated time spent (including blocking) in uploading data to webgl.\n private uploadWaitMs = 0;\n // Accumulated time spent (including blocking in downloading data from webgl.\n private downloadWaitMs = 0;\n private cpuBackend: KernelBackend;\n\n register(dataId: DataId, shape: number[], dtype: DataType): void {\n if (this.texData.has(dataId)) {\n throw new Error('Data buffer is already registered');\n }\n this.texData.set(dataId, {\n shape,\n dtype,\n values: null,\n texture: null,\n complexTensors: null,\n texShape: null,\n usage: TextureUsage.RENDER,\n isPacked: false\n });\n }\n\n setDataMover(dataMover: DataMover): void {\n this.texData = new DataStorage(dataMover);\n }\n\n fromPixels(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels: number): Tensor3D {\n if (pixels == null) {\n throw new Error('pixels passed to tf.fromPixels() can not be null');\n }\n const texShape: [number, number] = [pixels.height, pixels.width];\n const outShape = [pixels.height, pixels.width, numChannels];\n\n if (!(pixels instanceof HTMLVideoElement) &&\n !(pixels instanceof HTMLImageElement) &&\n !(pixels instanceof HTMLCanvasElement) &&\n !(pixels instanceof ImageData)) {\n throw new Error(\n 'pixels passed to tf.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement or ` +\n `ImageData, but was ${(pixels as {}).constructor.name}`);\n }\n if (pixels instanceof HTMLVideoElement) {\n if (this.fromPixels2DContext == null) {\n if (!ENV.get('IS_BROWSER')) {\n throw new Error(\n 'Can\\'t read pixels from HTMLImageElement outside the browser.');\n }\n if (document.readyState !== 'complete') {\n throw new Error(\n 'The DOM is not ready yet. Please call tf.fromPixels() ' +\n 'once the DOM is ready. One way to do that is to add an event ' +\n 'listener for `DOMContentLoaded` on the document object');\n }\n this.fromPixels2DContext =\n document.createElement('canvas').getContext('2d');\n }\n this.fromPixels2DContext.canvas.width = pixels.width;\n this.fromPixels2DContext.canvas.height = pixels.height;\n this.fromPixels2DContext.drawImage(\n pixels, 0, 0, pixels.width, pixels.height);\n pixels = this.fromPixels2DContext.canvas;\n }\n const tempPixelHandle = this.makeTensorHandle(texShape, 'int32');\n // This is a byte texture with pixels.\n this.texData.get(tempPixelHandle.dataId).usage = TextureUsage.PIXELS;\n this.gpgpu.uploadPixelDataToTexture(\n this.getTexture(tempPixelHandle.dataId), pixels);\n const program = new FromPixelsProgram(outShape);\n const res = this.compileAndRun(program, [tempPixelHandle]);\n\n this.disposeData(tempPixelHandle.dataId);\n\n return res as Tensor3D;\n }\n\n private makeTensorHandle(shape: number[], dtype: DataType): TensorHandle {\n const dataId = {};\n this.register(dataId, shape, dtype);\n return {dataId, shape, dtype};\n }\n\n write(dataId: DataId, values: TypedArray): void {\n if (values == null) {\n throw new Error('MathBackendWebGL.write(): values can not be null');\n }\n const texData = this.texData.get(dataId);\n const {texture, texShape, usage, dtype, isPacked} = texData;\n if (dtype === 'complex64') {\n throw new Error(\n `Cannot write to a complex64 dtype. ` +\n `Please use tf.complex(real, imag).`);\n }\n\n if (texture != null) {\n // Release the old texture.\n this.releaseTexture(dataId, texture, texShape, usage, isPacked);\n texData.texture = null;\n texData.texShape = null;\n }\n texData.usage = TextureUsage.UPLOAD;\n texData.values = values;\n\n if (!this.delayedStorage) {\n this.uploadToGPU(dataId);\n }\n }\n readSync(dataId: DataId): TypedArray {\n const texData = this.texData.get(dataId);\n const {values, dtype, complexTensors} = texData;\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start: number;\n if (shouldTimeProgram) {\n start = performance.now();\n }\n\n let result: Float32Array;\n if (dtype === 'complex64') {\n const realValues = complexTensors.real.dataSync() as Float32Array;\n const imagValues = complexTensors.imag.dataSync() as Float32Array;\n result = mergeRealAndImagArrays(realValues, imagValues);\n } else {\n result = this.getValuesFromTexture(dataId);\n }\n\n if (shouldTimeProgram) {\n this.downloadWaitMs += performance.now() - start;\n }\n return this.convertAndCacheOnCPU(dataId, result);\n }\n\n async read(dataId: DataId): Promise<TypedArray> {\n if (this.pendingRead.has(dataId)) {\n const subscribers = this.pendingRead.get(dataId);\n return new Promise<TypedArray>(resolve => subscribers.push(resolve));\n }\n const texData = this.texData.get(dataId);\n const {texture, values, texShape} = texData;\n if (values != null) {\n return this.convertAndCacheOnCPU(dataId);\n }\n\n this.pendingRead.set(dataId, []);\n\n if (!ENV.get('WEBGL_DOWNLOAD_FLOAT_ENABLED') &&\n ENV.get('WEBGL_VERSION') === 2) {\n throw new Error(\n `tensor.data() with WEBGL_DOWNLOAD_FLOAT_ENABLED=false and ` +\n `WEBGL_VERSION=2 not yet supported.`);\n }\n\n // Possibly copy the texture into a buffer before inserting a fence.\n const bufferOrTexture = this.gpgpu.maybeCreateBufferFromTexture(\n texture, texShape[0], texShape[1]);\n\n // Create a fence and wait for it to resolve.\n await this.gpgpu.createAndWaitForFence();\n\n // Download the values from the GPU.\n let vals: Float32Array;\n if (bufferOrTexture instanceof WebGLTexture) {\n vals = this.getValuesFromTexture(dataId);\n } else {\n vals = this.gpgpu.downloadFloat32MatrixFromBuffer(\n bufferOrTexture, texShape[0], texShape[1]);\n }\n const dTypeVals = this.convertAndCacheOnCPU(dataId, vals);\n\n const subscribers = this.pendingRead.get(dataId);\n this.pendingRead.delete(dataId);\n\n // Notify all pending reads.\n subscribers.forEach(resolve => resolve(dTypeVals));\n if (this.pendingDisposal.has(dataId)) {\n this.pendingDisposal.delete(dataId);\n this.disposeData(dataId);\n }\n return dTypeVals;\n }\n\n private getValuesFromTexture(dataId: DataId): Float32Array {\n const {shape, dtype, texture, texShape} = this.texData.get(dataId);\n if (ENV.get('WEBGL_DOWNLOAD_FLOAT_ENABLED')) {\n if (this.texData.get(dataId).isPacked) {\n const batch = util.sizeFromShape(shape.slice(0, shape.length - 2));\n const rows = shape.length > 1 ? shape[shape.length - 2] : 1;\n const cols = shape[shape.length - 1];\n return this.gpgpu.downloadMatrixFromPackedTexture(\n texture, batch, rows, cols, texShape[0], texShape[1]);\n } else {\n return this.gpgpu.downloadFloat32MatrixFromOutputTexture(\n texture, texShape[0], texShape[1]);\n }\n }\n\n const tmpTarget = this.makeTensorHandle(shape, 'float32') as TensorHandle &\n {size: number};\n tmpTarget.size = sizeFromShape(shape);\n this.texData.get(tmpTarget.dataId).usage = TextureUsage.DOWNLOAD;\n const program = new EncodeFloatProgram(shape);\n const pageToCpu = false;\n this.compileAndRun(\n program, [{shape, dtype, dataId}], tmpTarget, null, pageToCpu);\n const tmpData = this.texData.get(tmpTarget.dataId);\n const vals = this.gpgpu.downloadByteEncodedFloatMatrixFromOutputTexture(\n tmpData.texture, tmpData.texShape[0], tmpData.texShape[1]);\n this.disposeData(tmpTarget.dataId);\n\n return vals;\n }\n\n async time(f: () => void): Promise<WebGLTimingInfo> {\n const oldActiveTimers = this.activeTimers;\n const newActiveTimers: TimerNode[] = [];\n\n let outerMostTime = false;\n if (this.programTimersStack == null) {\n this.programTimersStack = newActiveTimers;\n outerMostTime = true;\n } else {\n this.activeTimers.push(newActiveTimers);\n }\n this.activeTimers = newActiveTimers;\n\n f();\n\n // needing to split these up because util.flatten only accepts certain types\n const flattenedActiveTimerQueries =\n util.flatten(this.activeTimers.map((d: KernelInfo) => d.query))\n .filter(d => d != null);\n const flattenedActiveTimerNames =\n util.flatten(this.activeTimers.map((d: KernelInfo) => d.name))\n .filter(d => d != null);\n\n this.activeTimers = oldActiveTimers;\n\n if (outerMostTime) {\n this.programTimersStack = null;\n }\n\n const kernelMs = await Promise.all(flattenedActiveTimerQueries);\n\n const res: WebGLTimingInfo = {\n uploadWaitMs: this.uploadWaitMs,\n downloadWaitMs: this.downloadWaitMs,\n kernelMs: util.sum(kernelMs),\n getExtraProfileInfo: () =>\n kernelMs.map((d, i) => ({name: flattenedActiveTimerNames[i], ms: d}))\n .map(d => `${d.name}: ${d.ms}`)\n .join(', '),\n wallMs: null // will be filled by the engine\n };\n this.uploadWaitMs = 0;\n this.downloadWaitMs = 0;\n return res;\n }\n memory(): WebGLMemoryInfo {\n return {unreliable: false, numBytesInGPU: this.numBytesInGPU} as\n WebGLMemoryInfo;\n }\n\n private startTimer(): WebGLQuery|CPUTimerQuery {\n if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n return this.gpgpu.beginQuery();\n }\n return {startMs: performance.now(), endMs: null};\n }\n\n private endTimer(query: WebGLQuery|CPUTimerQuery): WebGLQuery|CPUTimerQuery {\n if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n this.gpgpu.endQuery();\n return query;\n }\n (query as CPUTimerQuery).endMs = performance.now();\n return query;\n }\n\n private async getQueryTime(query: WebGLQuery|CPUTimerQuery): Promise<number> {\n if (ENV.get('WEBGL_DISJOINT_QUERY_TIMER_EXTENSION_VERSION') > 0) {\n return this.gpgpu.waitForQueryAndGetTime(query as WebGLQuery);\n }\n const timerQuery = query as CPUTimerQuery;\n return timerQuery.endMs - timerQuery.startMs;\n }\n\n disposeData(dataId: DataId): void {\n if (this.pendingDisposal.has(dataId)) {\n return;\n }\n if (this.pendingRead.has(dataId)) {\n this.pendingDisposal.add(dataId);\n return;\n }\n if (this.texData.has(dataId)) {\n const {texture, texShape, usage, complexTensors, isPacked} =\n this.texData.get(dataId);\n if (texture != null) {\n this.releaseTexture(dataId, texture, texShape, usage, isPacked);\n }\n if (complexTensors != null) {\n complexTensors.real.dispose();\n complexTensors.imag.dispose();\n }\n this.texData.delete(dataId);\n }\n }\n\n getTexture(dataId: DataId): WebGLTexture {\n this.uploadToGPU(dataId);\n return this.texData.get(dataId).texture;\n }\n\n private textureManager: TextureManager;\n private binaryCache: {[key: string]: GPGPUBinary} = {};\n private gpgpuCreatedLocally: boolean;\n\n constructor(private gpgpu?: GPGPUContext, private delayedStorage = true) {\n if (ENV.get('WEBGL_VERSION') < 1) {\n throw new Error('WebGL is not supported on this device');\n }\n\n if (gpgpu == null) {\n const gl = getWebGLContext(ENV.get('WEBGL_VERSION'));\n this.gpgpu = new GPGPUContext(gl);\n this.canvas = gl.canvas;\n this.gpgpuCreatedLocally = true;\n } else {\n this.gpgpuCreatedLocally = false;\n this.canvas = gpgpu.gl.canvas;\n }\n if (ENV.get('WEBGL_PAGING_ENABLED')) {\n // Use the device screen's resolution as a heuristic to decide on the\n // maximum memory allocated on the GPU before starting to page.\n this.NUM_BYTES_BEFORE_PAGING =\n (window.screen.height * window.screen.width *\n window.devicePixelRatio) *\n BEFORE_PAGING_CONSTANT;\n }\n this.textureManager = new TextureManager(this.gpgpu);\n }\n\n private getCPUBackend(): KernelBackend|null {\n if (!ENV.get('WEBGL_CPU_FORWARD')) {\n return null;\n }\n\n if (this.cpuBackend == null) {\n this.cpuBackend = ENV.findBackend('cpu');\n }\n\n return this.cpuBackend;\n }\n\n /*\n Tests whether all the inputs to an op are small and on the CPU. This heuristic\n determines when it would be faster to execute a kernel on the CPU. WebGL\n kernels opt into running this check and forwarding when appropriate.\n TODO(https://github.com/tensorflow/tfjs/issues/872): Develop a more\n sustainable strategy for optimizing backend execution of ops.\n */\n private shouldExecuteOnCPU(\n inputs: Tensor[], sizeThreshold = CPU_HANDOFF_SIZE_THRESHOLD): boolean {\n return this.getCPUBackend() != null &&\n inputs.every(\n input => this.texData.get(input.dataId).texture == null &&\n input.size < sizeThreshold);\n }\n\n getGPGPUContext(): GPGPUContext {\n return this.gpgpu;\n }\n getCanvas(): HTMLCanvasElement {\n return this.canvas;\n }\n\n complex<T extends Tensor>(real: T, imag: T): T {\n const result = this.makeOutputArray(real.shape, 'complex64') as T;\n const resultData = this.texData.get(result.dataId);\n // The backend owns the reference to the underlying real and imaginary\n // clones. These will explicitly get disposed when the complex tensor is\n // disposed.\n resultData.complexTensors = {\n real: ENV.engine.keep(real.clone()),\n imag: ENV.engine.keep(imag.clone())\n };\n\n return result;\n }\n real<T extends Tensor>(input: T): T {\n const resultData = this.texData.get(input.dataId);\n return resultData.complexTensors.real.clone() as T;\n }\n imag<T extends Tensor>(input: T): T {\n const resultData = this.texData.get(input.dataId);\n return resultData.complexTensors.imag.clone() as T;\n }\n\n slice<T extends Tensor>(x: T, begin: number[], size: number[]): T {\n if (this.shouldExecuteOnCPU([x])) {\n return this.cpuBackend.slice(x, begin, size);\n }\n\n const program = new SliceProgram(size);\n const customSetup = program.getCustomSetupFunc(begin);\n return this.compileAndRun(program, [x], null, customSetup);\n }\n\n stridedSlice<T extends Tensor>(\n x: T, begin: number[], end: number[], strides: number[],\n beginMask: number, endMask: number, ellipsisMask: number,\n newAxisMask: number, shrinkAxisMask: number): T {\n if (this.shouldExecuteOnCPU([x])) {\n return this.cpuBackend.stridedSlice(\n x, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask,\n shrinkAxisMask);\n }\n\n const [beginIndex, size, shrinkAxis] = getStridedSlicedInfo(\n x.shape, begin, end, strides, beginMask, endMask, ellipsisMask,\n newAxisMask, shrinkAxisMask);\n\n const shape = size.filter((v, index) => shrinkAxis.indexOf(index) === -1);\n if (shape.some(axis => axis === 0)) {\n return tensor([], shape) as T;\n }\n\n const program =\n new StridedSliceProgram(beginIndex, strides, size, shrinkAxis);\n return this.compileAndRun(program, [x]);\n }\n\n reverse<T extends Tensor>(x: T, axis: number[]): T {\n const program = new ReverseProgram(x.shape, axis);\n return this.compileAndRun(program, [x]);\n }\n\n private concat2Tensors<T extends Tensor>(a: T, b: T, axis: number): T {\n // Any concat of n-dimensional tensors across any axis can be reduced to\n // a concatenation of two-dimensional tensors across the axis 1 by first\n // partitioning the axes of the original tensors into those less than the\n // axis to be concatenated and the rest. Then reshape the tensors\n // into a two-dimensional tensor by collapsing these two sets of axes and\n // concatenate the resulting matrices across the axis 1, finally reshaping\n // the result to have the proper shape.\n const outShape = computeOutShape([a.shape, b.shape], axis);\n const a2D = a.as2D(-1, sizeFromShape(a.shape.slice(axis)));\n const b2D = b.as2D(-1, sizeFromShape(b.shape.slice(axis)));\n const program = new ConcatProgram(a2D.shape, b2D.shape);\n const res = this.compileAndRun(program, [a2D, b2D]) as Tensor;\n return res.reshape(outShape) as T;\n }\n\n concat(tensors: Tensor[], axis: number): Tensor {\n if (this.shouldExecuteOnCPU(tensors)) {\n return this.cpuBackend.concat(tensors, axis);\n }\n\n if (tensors.length === 1) {\n return tensors[0];\n }\n let result = tensors[0];\n for (let i = 1; i < tensors.length; ++i) {\n result = this.concat2Tensors(result, tensors[i], axis);\n }\n return result;\n }\n\n neg<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.NEG);\n return this.compileAndRun(program, [x]) as T;\n }\n\n batchMatMul(\n a: Tensor3D, b: Tensor3D, transposeA: boolean,\n transposeB: boolean): Tensor3D {\n const outerShapeA = transposeA ? a.shape[2] : a.shape[1];\n const outerShapeB = transposeB ? b.shape[1] : b.shape[2];\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const [batch, , ] = a.shape;\n\n // Since the matrices are vectors, it is faster to call mul().sum()\n // because sum() is O(sqrt(N)) due to divide-and-conquer.\n if ((outerShapeA === 1 || outerShapeB === 1) &&\n sharedDim > MATMUL_SHARED_DIM_THRESHOLD) {\n if (transposeA) {\n a = a.transpose([0, 2, 1]);\n }\n if (transposeB) {\n b = b.transpose([0, 2, 1]);\n }\n\n const a3D = outerShapeB === 1 ? a : a.as3D(batch, sharedDim, 1);\n const axis = outerShapeB === 1 ? 2 : 1;\n const b3D = outerShapeB === 1 ? b.as3D(batch, 1, sharedDim) : b;\n return this.multiply(a3D, b3D).sum(axis, true /* keepDims */);\n }\n\n // TODO(https://github.com/tensorflow/tfjs/issues/693): Support 3D tensors\n if (batch === 1) {\n const aSqueezed = a.as2D(a.shape[1], a.shape[2]);\n const bSqueezed = b.as2D(b.shape[1], b.shape[2]);\n\n const program = new MatMulPackedProgram(\n aSqueezed.shape, bSqueezed.shape, [outerShapeA, outerShapeB],\n transposeA, transposeB);\n let result = this.compileAndRun(\n program, [aSqueezed, bSqueezed],\n this.makePackedTensor<Tensor2D>(program.outputShape));\n\n if (ENV.get('WEBGL_LAZILY_UNPACK') === false) {\n result = this.unpackTensor(result);\n }\n\n return result.reshape([1, result.shape[0], result.shape[1]]);\n } else {\n return this.compileAndRun(\n new MatMulProgram(a.shape, b.shape, transposeA, transposeB), [a, b]);\n }\n }\n\n multiply(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64') {\n const aData = this.texData.get(a.dataId);\n const bData = this.texData.get(b.dataId);\n\n const realProgram = new BinaryOpComplexProgram(\n binaryop_complex_gpu.COMPLEX_MULTIPLY.REAL, a.shape, b.shape);\n const imagProgram = new BinaryOpComplexProgram(\n binaryop_complex_gpu.COMPLEX_MULTIPLY.IMAG, a.shape, b.shape);\n\n const inputs = [\n this.makeComplexComponentTensorHandle(a, aData.complexTensors.real),\n this.makeComplexComponentTensorHandle(a, aData.complexTensors.imag),\n this.makeComplexComponentTensorHandle(b, bData.complexTensors.real),\n this.makeComplexComponentTensorHandle(b, bData.complexTensors.imag)\n ];\n const real = this.compileAndRun<Tensor>(realProgram, inputs);\n const imag = this.compileAndRun<Tensor>(imagProgram, inputs);\n\n const complex = this.complex(real, imag);\n real.dispose();\n imag.dispose();\n return complex;\n }\n\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.multiply(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.MUL, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, a.dtype) as Tensor;\n return this.compileAndRun(program, [a, b], output) as Tensor;\n }\n\n batchNormalization(\n x: Tensor4D, mean: Tensor4D|Tensor1D, variance: Tensor4D|Tensor1D,\n varianceEpsilon: number, scale?: Tensor4D|Tensor1D,\n offset?: Tensor4D|Tensor1D): Tensor4D {\n const inputs = [x, mean, variance];\n\n let offsetShape = null;\n if (offset != null) {\n offsetShape = offset.shape;\n inputs.push(offset);\n }\n\n let scaleShape = null;\n if (scale != null) {\n scaleShape = scale.shape;\n inputs.push(scale);\n }\n\n let output = null;\n let envSpecificBatchNormProgram = BatchNormProgram;\n\n if (ENV.get('WEBGL_PACK_BATCHNORMALIZATION')) {\n output = this.makePackedTensor(x.shape);\n envSpecificBatchNormProgram = BatchNormPackedProgram;\n }\n\n const program = new envSpecificBatchNormProgram(\n x.shape, mean.shape, variance.shape, offsetShape, scaleShape,\n varianceEpsilon);\n return this.compileAndRun(program, inputs, output);\n }\n\n localResponseNormalization4D(\n x: Tensor4D, radius: number, bias: number, alpha: number,\n beta: number): Tensor4D {\n const program = new LRNProgram(x.shape, radius, bias, alpha, beta);\n return this.compileAndRun(program, [x]);\n }\n\n LRNGrad(\n dy: Tensor4D, inputImage: Tensor4D, outputImage: Tensor4D,\n depthRadius: number, bias: number, alpha: number,\n beta: number): Tensor4D {\n const program =\n new LRNGradProgram(inputImage.shape, depthRadius, bias, alpha, beta);\n return this.compileAndRun(program, [inputImage, outputImage, dy]);\n }\n\n tile<T extends Tensor>(x: T, reps: number[]): T {\n const program = new TileProgram(x.shape, reps);\n return this.compileAndRun(program, [x]);\n }\n\n pad<T extends Tensor>(\n x: T, paddings: Array<[number, number]>, constantValue: number): T {\n const program = new PadProgram(x.shape, paddings, constantValue);\n return this.compileAndRun(program, [x]);\n }\n\n transpose<T extends Tensor>(x: T, perm: number[]): T {\n const program = new TransposeProgram(x.shape, perm);\n return this.compileAndRun(program, [x]);\n }\n\n gather<T extends Tensor>(x: T, indices: Tensor1D, axis: number): T {\n const program = new GatherProgram(x.shape, indices.size, axis);\n return this.compileAndRun(program, [x, indices]);\n }\n\n batchToSpaceND<T extends Tensor>(\n x: T, blockShape: number[], crops: number[][]): T {\n util.assert(\n x.rank <= 4,\n 'batchToSpaceND for rank > 4 with a WebGL backend not implemented yet');\n const prod = blockShape.reduce((a, b) => a * b);\n\n const reshaped = array_ops_util.getReshaped(x.shape, blockShape, prod);\n const permuted =\n array_ops_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted =\n array_ops_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords =\n array_ops_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize =\n array_ops_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n\n return x.reshape(reshaped)\n .transpose(permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize) as T;\n }\n\n spaceToBatchND<T extends Tensor>(\n x: T, blockShape: number[], paddings: Array<[number, number]>): T {\n util.assert(\n x.rank <= 4,\n 'spaceToBatchND for rank > 4 with a WebGL backend not implemented yet');\n\n const prod = blockShape.reduce((a, b) => a * b);\n\n const completePaddings: Array<[number, number]> = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n\n const paddedX = x.pad(completePaddings);\n\n const reshapedPaddedShape =\n array_ops_util.getReshaped(paddedX.shape, blockShape, prod, false);\n\n const permutedReshapedPaddedPermutation = array_ops_util.getPermuted(\n reshapedPaddedShape.length, blockShape.length, false);\n\n const flattenShape = array_ops_util.getReshapedPermuted(\n paddedX.shape, blockShape, prod, false);\n\n return paddedX.reshape(reshapedPaddedShape)\n .transpose(permutedReshapedPaddedPermutation)\n .reshape(flattenShape) as T;\n }\n\n private reduce(\n x: Tensor2D, reduceType: 'all'|'any'|'max'|'min'|'sum'|'prod',\n dtype: DataType): Tensor2D {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize = reduce_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {windowSize, inSize, batchSize};\n const program = new ReduceProgram(reduceInfo, reduceType);\n const [rows, cols] = program.outputShape;\n const output = this.makeOutputArray<Tensor2D>([rows, cols], dtype);\n\n this.compileAndRun(program, [x], output);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.reduce(output, reduceType, dtype);\n }\n\n private argReduce(\n x: Tensor2D, reduceType: 'max'|'min',\n bestIndicesA: Tensor2D = null): Tensor2D {\n let batchSize = x.shape[0];\n let inSize = x.shape[1];\n if (bestIndicesA != null) {\n batchSize = bestIndicesA.shape[0];\n inSize = bestIndicesA.shape[1];\n }\n const windowSize = reduce_util.computeOptimalWindowSize(inSize);\n const reduceInfo = {windowSize, inSize, batchSize};\n const program =\n new ArgMinMaxProgram(reduceInfo, reduceType, bestIndicesA == null);\n const [rows, cols] = program.outputShape;\n const output = this.makeOutputArray<Tensor2D>([rows, cols], 'int32');\n const inputs = [x];\n if (bestIndicesA != null) {\n inputs.push(bestIndicesA);\n }\n this.compileAndRun(program, inputs, output);\n // No need to run another GPGPU program.\n if (output.shape[1] === 1) {\n return output;\n }\n return this.argReduce(x, reduceType, output);\n }\n\n sum(x: Tensor, axes: number[]): Tensor {\n axis_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = sumOutType(x.dtype);\n return this.reduce(a2D, 'sum', outputDType).reshape(outShape);\n }\n\n prod(x: Tensor, axes: number[]): Tensor {\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n const outputDType = sumOutType(x.dtype);\n return this.reduce(a2D, 'prod', outputDType).reshape(outShape);\n }\n\n unsortedSegmentSum<T extends Tensor>(\n x: T, segmentIds: Tensor1D, numSegments: number): Tensor {\n let axis = 0;\n const permutation = axis_util.getAxesPermutation([axis], x.rank);\n let permutedX = x;\n if (permutation != null) {\n permutedX = x.transpose(permutation);\n axis = axis_util.getInnerMostAxes(1, x.rank)[0];\n }\n\n const outShape =\n segment_util.computeOutShape(permutedX.shape, axis, numSegments);\n const inSize = util.sizeFromShape([permutedX.shape[axis]]);\n const a2D = permutedX.as2D(-1, inSize);\n const outputDType = sumOutType(x.dtype);\n let result =\n this.segOpCompute(\n a2D, 'unsortedSegmentSum', segmentIds, outputDType, numSegments)\n .reshape(outShape);\n if (permutation != null) {\n result = result.transpose(axis_util.getUndoAxesPermutation(permutation));\n }\n return result;\n }\n\n private segOpCompute(\n x: Tensor2D, segOpType: 'unsortedSegmentSum', segmentIds: Tensor1D,\n dtype: DataType, numSegments: number): Tensor2D {\n const batchSize = x.shape[0];\n const inSize = x.shape[1];\n const windowSize =\n segment_util.segOpComputeOptimalWindowSize(inSize, numSegments);\n const segOpInfo = {windowSize, inSize, batchSize, numSegments};\n const program = new SegmentOpProgram(segOpInfo, segOpType);\n const [rows, cols] = program.outputShape;\n const output = this.makeOutputArray<Tensor2D>([rows, cols], dtype);\n this.compileAndRun(program, [x, segmentIds], output);\n // No need to run another GPGPU program.\n if (output.shape[1] === numSegments) {\n return output;\n }\n segmentIds = range(0, numSegments).tile([inSize / windowSize]);\n return this.segOpCompute(output, segOpType, segmentIds, dtype, numSegments);\n }\n\n argMin(x: Tensor, axis: number): Tensor {\n const axes = [axis];\n axis_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, 'min').reshape(outShape);\n }\n\n argMax(x: Tensor, axis: number): Tensor {\n const axes = [axis];\n axis_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.argReduce(a2D, 'max').reshape(outShape);\n }\n\n cumsum(x: Tensor, axis: number, exclusive: boolean, reverse: boolean):\n Tensor {\n if (axis !== x.rank - 1) {\n throw new Error(\n `WebGL cumsum shader expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const program = new CumSumProgram(x.shape, exclusive, reverse);\n return this.compileAndRun(program, [x]);\n }\n\n equal(a: Tensor, b: Tensor): Tensor {\n const program = new BinaryOpProgram(binaryop_gpu.EQUAL, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n notEqual(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.NOT_EQUAL, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n less(a: Tensor, b: Tensor): Tensor {\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.less(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.LESS, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n lessEqual(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.LESS_EQUAL, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n greater(a: Tensor, b: Tensor): Tensor {\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.greater(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.GREATER, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n greaterEqual(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.GREATER_EQUAL, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n logicalNot<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.LOGICAL_NOT);\n return this.compileAndRun(program, [x]) as T;\n }\n\n logicalAnd(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.LOGICAL_AND, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n logicalOr(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.LOGICAL_OR, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, 'bool');\n return this.compileAndRun(program, [a, b], output);\n }\n\n select(condition: Tensor, a: Tensor, b: Tensor): Tensor {\n const program = new SelectProgram(condition.rank, a.shape, a.rank);\n const output =\n this.makeOutputArray(program.outputShape, upcastType(a.dtype, b.dtype));\n return this.compileAndRun(program, [condition, a, b], output);\n }\n\n where(condition: Tensor): Tensor2D {\n warn(\n 'tf.where() in webgl locks the UI thread. ' +\n 'Call tf.whereAsync() instead');\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n\n topk<T extends Tensor>(x: T, k: number, sorted: boolean): [T, T] {\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n\n min(x: Tensor, axes: number[]): Tensor {\n axis_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'min', a2D.dtype).reshape(outShape);\n }\n\n minimum(a: Tensor, b: Tensor): Tensor {\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.minimum(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.MIN, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n\n mod(a: Tensor, b: Tensor): Tensor {\n const program = new BinaryOpProgram(binaryop_gpu.MOD, a.shape, b.shape);\n const customSetup = program.getCustomSetupFunc();\n return this.compileAndRun(program, [a, b], null, customSetup);\n }\n\n max(x: Tensor, axes: number[]): Tensor {\n axis_util.assertAxesAreInnerMostDims('max', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'max', a2D.dtype).reshape(outShape);\n }\n\n maximum(a: Tensor, b: Tensor): Tensor {\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.maximum(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.MAX, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n\n all(x: Tensor, axes: number[]): Tensor {\n axis_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'all', a2D.dtype).reshape(outShape);\n }\n\n any(x: Tensor, axes: number[]): Tensor {\n axis_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const inSize = util.sizeFromShape(reduceShape);\n const a2D = x.as2D(-1, inSize);\n return this.reduce(a2D, 'any', a2D.dtype).reshape(outShape);\n }\n\n squaredDifference(a: Tensor, b: Tensor): Tensor {\n const program =\n new BinaryOpProgram(binaryop_gpu.SQUARED_DIFFERENCE, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]);\n }\n\n realDivide(a: Tensor, b: Tensor): Tensor {\n const op = binaryop_gpu.DIV;\n const outputDtype = 'float32';\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, outputDtype);\n return this.compileAndRun<Tensor>(program, [a, b], output);\n }\n\n floorDiv(a: Tensor, b: Tensor): Tensor {\n const op = binaryop_gpu.INT_DIV;\n const outputDtype = 'int32';\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n const output = this.makeOutputArray(program.outputShape, outputDtype);\n return this.compileAndRun<Tensor>(program, [a, b], output);\n }\n\n add(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64' && b.dtype === 'complex64') {\n return this.complexSeparableBinaryOp(a, b, binaryop_gpu.ADD);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.ADD, a.shape, b.shape);\n const output =\n this.makeOutputArray(\n program.outputShape, upcastType(a.dtype, b.dtype)) as Tensor;\n return this.compileAndRun<Tensor>(program, [a, b], output);\n }\n\n /**\n * Computes a complex binary operation that can be decomposed into a simple\n * binary operation on both the real and imagary parts.\n */\n private complexSeparableBinaryOp(a: Tensor, b: Tensor, op: string): Tensor {\n const aData = this.texData.get(a.dataId);\n const bData = this.texData.get(b.dataId);\n\n const [real, imag] = [\n [aData.complexTensors.real, bData.complexTensors.real],\n [aData.complexTensors.imag, bData.complexTensors.imag]\n ].map(complexParts => {\n const [aPart, bPart] = complexParts;\n\n const program = new BinaryOpProgram(op, a.shape, b.shape);\n const output = this.makeOutputArray(\n program.outputShape,\n upcastType(aPart.dtype, bPart.dtype)) as Tensor;\n\n const aHandle = this.makeComplexComponentTensorHandle(a, aPart);\n const bHandle = this.makeComplexComponentTensorHandle(b, bPart);\n\n return this.compileAndRun<Tensor>(program, [aHandle, bHandle], output);\n });\n\n const complex = this.complex(real, imag);\n real.dispose();\n imag.dispose();\n return complex;\n }\n\n // Returns a TensorHandle with the complex shape and the dataId of the\n // underlying part. We need to do this because a reshaped complex tensor is\n // not reflected in its parts.\n private makeComplexComponentTensorHandle(\n complexTensor: Tensor, complexPart: Tensor): TensorHandle {\n return {\n dataId: complexPart.dataId,\n dtype: complexPart.dtype,\n shape: complexTensor.shape\n };\n }\n\n addN<T extends Tensor>(tensors: T[]): T {\n let res = tensors[0];\n for (let i = 1; i < tensors.length; i++) {\n res = this.add(res, tensors[i]) as T;\n }\n return res;\n }\n\n subtract(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64' && b.dtype === 'complex64') {\n return this.complexSeparableBinaryOp(a, b, binaryop_gpu.SUB);\n }\n\n if (this.shouldExecuteOnCPU([a, b])) {\n return this.cpuBackend.subtract(a, b);\n }\n\n const program = new BinaryOpProgram(binaryop_gpu.SUB, a.shape, b.shape);\n const output =\n this.makeOutputArray(\n program.outputShape, upcastType(a.dtype, b.dtype)) as Tensor;\n return this.compileAndRun<Tensor>(program, [a, b], output);\n }\n\n pow<T extends Tensor>(a: T, b: Tensor): T {\n const program = new BinaryOpProgram(binaryop_gpu.POW, a.shape, b.shape);\n const customSetup = program.getCustomSetupFunc();\n const output = this.makeOutputArray(\n program.outputShape, upcastType(a.dtype, b.dtype)) as T;\n return this.compileAndRun<T>(program, [a, b], output, customSetup);\n }\n\n ceil<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.CEIL);\n return this.compileAndRun(program, [x]) as T;\n }\n\n floor<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.FLOOR);\n return this.compileAndRun(program, [x]) as T;\n }\n\n sign<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGN);\n return this.compileAndRun(program, [x]) as T;\n }\n\n round<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ROUND);\n return this.compileAndRun(program, [x]) as T;\n }\n\n exp<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.EXP);\n return this.compileAndRun(program, [x]) as T;\n }\n\n expm1<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.EXPM1);\n return this.compileAndRun(program, [x]) as T;\n }\n\n log<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG);\n const customSetup = program.getCustomSetupFunc();\n return this.compileAndRun(program, [x], null, customSetup) as T;\n }\n\n log1p<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.LOG1P);\n return this.compileAndRun(program, [x]) as T;\n }\n\n sqrt<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SQRT);\n return this.compileAndRun(program, [x]) as T;\n }\n\n rsqrt<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.RSQRT);\n return this.compileAndRun(program, [x]) as T;\n }\n\n square<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SQUARE);\n return this.compileAndRun(program, [x]) as T;\n }\n\n reciprocal<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.RECIPROCAL);\n return this.compileAndRun(program, [x]) as T;\n }\n\n relu<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.RELU);\n return this.compileAndRun(program, [x]) as T;\n }\n\n elu<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ELU);\n return this.compileAndRun(program, [x]) as T;\n }\n\n eluDer<T extends Tensor>(dy: T, y: T): T {\n const program =\n new BinaryOpProgram(binaryop_gpu.ELU_DER, dy.shape, y.shape);\n return this.compileAndRun(program, [dy, y]) as T;\n }\n\n selu<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SELU);\n return this.compileAndRun(program, [x]) as T;\n }\n\n int<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.TO_INT);\n const output = this.makeOutputArray(program.outputShape, 'int32');\n return this.compileAndRun(program, [x], output) as T;\n }\n\n clip<T extends Tensor>(x: T, min: number, max: number): T {\n const program = new ClipProgram(x.shape, min, max);\n return this.compileAndRun(program, [x]) as T;\n }\n\n abs<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ABS);\n return this.compileAndRun(program, [x]) as T;\n }\n\n complexAbs<T extends Tensor>(x: T): T {\n const xData = this.texData.get(x.dataId);\n\n const program = new ComplexAbsProgram(x.shape);\n const inputs = [\n this.makeComplexComponentTensorHandle(x, xData.complexTensors.real),\n this.makeComplexComponentTensorHandle(x, xData.complexTensors.imag),\n ];\n\n return this.compileAndRun<Tensor>(program, inputs) as T;\n }\n\n sigmoid<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SIGMOID);\n return this.compileAndRun(program, [x]) as T;\n }\n\n softplus<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SOFTPLUS);\n return this.compileAndRun(program, [x]) as T;\n }\n\n sin<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SIN);\n return this.compileAndRun(program, [x]) as T;\n }\n\n cos<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.COS);\n return this.compileAndRun(program, [x]) as T;\n }\n\n tan<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.TAN);\n return this.compileAndRun(program, [x]) as T;\n }\n\n asin<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ASIN);\n return this.compileAndRun(program, [x]) as T;\n }\n\n acos<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOS);\n return this.compileAndRun(program, [x]) as T;\n }\n\n atan<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ATAN);\n return this.compileAndRun(program, [x]) as T;\n }\n\n atan2<T extends Tensor>(a: T, b: T): T {\n const program = new BinaryOpProgram(binaryop_gpu.ATAN2, a.shape, b.shape);\n return this.compileAndRun(program, [a, b]) as T;\n }\n\n sinh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.SINH);\n return this.compileAndRun(program, [x]) as T;\n }\n\n cosh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.COSH);\n return this.compileAndRun(program, [x]) as T;\n }\n\n tanh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.TANH);\n return this.compileAndRun(program, [x]) as T;\n }\n\n asinh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ASINH);\n return this.compileAndRun(program, [x]) as T;\n }\n\n acosh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ACOSH);\n const customSetup = program.getCustomSetupFunc();\n return this.compileAndRun(program, [x], null, customSetup) as T;\n }\n\n atanh<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ATANH);\n const customSetup = program.getCustomSetupFunc();\n return this.compileAndRun(program, [x], null, customSetup) as T;\n }\n\n erf<T extends Tensor>(x: T): T {\n const program = new UnaryOpProgram(x.shape, unary_op.ERF);\n return this.compileAndRun(program, [x]) as T;\n }\n\n step<T extends Tensor>(x: T, alpha: number): T {\n const program = new UnaryOpProgram(x.shape, unary_op.STEP(alpha));\n return this.compileAndRun(program, [x]) as T;\n }\n\n conv2dWithIm2Row(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n // Rearranges conv2d input so each block to be convolved over forms the\n // column of a new matrix with shape [filterWidth * filterHeight *\n // inChannels, outHeight * outWidth]. The filter is also rearranged so each\n // output channel forms a row of a new matrix with shape [outChannels,\n // filterWidth * filterHeight * inChannels]. The convolution is then\n // computed by multiplying these matrices and reshaping the result.\n const {\n filterWidth,\n filterHeight,\n inChannels,\n outWidth,\n outHeight,\n } = convInfo;\n\n const sharedDim = filterWidth * filterHeight * inChannels;\n const numCols = outHeight * outWidth;\n const x2ColShape = [sharedDim, numCols];\n\n const xSqueezed = x.squeeze([0]);\n const w2Row = filter.reshape([sharedDim, -1]) as Tensor2D;\n\n const im2ColProgram =\n new Im2ColProgram(x2ColShape, xSqueezed.shape, convInfo);\n const im2Col = this.compileAndRun<Tensor2D>(\n im2ColProgram, [xSqueezed],\n this.makePackedTensor<Tensor2D>(x2ColShape));\n\n const matmulProgram = new MatMulPackedProgram(\n im2Col.shape, w2Row.shape, [numCols, convInfo.outChannels], true,\n false);\n let product = this.compileAndRun(\n matmulProgram, [im2Col, w2Row],\n this.makePackedTensor<Tensor2D>(matmulProgram.outputShape));\n\n if (ENV.get('WEBGL_LAZILY_UNPACK') === false) {\n product = this.unpackTensor(product);\n }\n\n return product.reshape([1, outHeight, outWidth, convInfo.outChannels]);\n }\n\n conv2d(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n if (ENV.get('WEBGL_CONV_IM2COL') && x.shape[0] === 1) {\n return this.conv2dWithIm2Row(x, filter, convInfo);\n }\n const program = new Conv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n\n conv2dDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n const program = new Conv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n\n conv2dDerFilter(x: Tensor4D, dy: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n const program = new Conv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n\n depthwiseConv2D(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n const program = new DepthwiseConv2DProgram(convInfo);\n return this.compileAndRun(program, [x, filter]);\n }\n\n depthwiseConv2DDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n const program = new DepthwiseConv2DDerInputProgram(convInfo);\n return this.compileAndRun(program, [dy, filter]);\n }\n\n depthwiseConv2DDerFilter(x: Tensor4D, dy: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n const program = new DepthwiseConv2DDerFilterProgram(convInfo);\n return this.compileAndRun(program, [x, dy]);\n }\n\n maxPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n const program = new Pool2DProgram(convInfo, 'max', false);\n const output =\n this.makeOutputArray(program.outputShape, x.dtype) as Tensor4D;\n return this.compileAndRun(program, [x], output);\n }\n\n avgPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n const program = new Pool2DProgram(convInfo, 'avg', false);\n const output = this.makeOutputArray(program.outputShape, 'float32');\n return this.compileAndRun(program, [x], output) as Tensor4D;\n }\n\n maxPoolBackprop(dy: Tensor4D, x: Tensor4D, y: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n const getPositions = true;\n const maxPoolPositionsProgram =\n new Pool2DProgram(convInfo, 'max', getPositions);\n const maxPoolPositions: Tensor4D =\n this.compileAndRun(maxPoolPositionsProgram, [x]);\n\n const maxPoolBackPropProgram = new MaxPool2DBackpropProgram(convInfo);\n const output =\n this.makeOutputArray(maxPoolBackPropProgram.outputShape, x.dtype);\n const result = this.compileAndRun(\n maxPoolBackPropProgram, [dy, maxPoolPositions], output);\n maxPoolPositions.dispose();\n return result as Tensor4D;\n }\n\n avgPoolBackprop(dy: Tensor4D, x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n const avgPoolBackpropProgram = new AvgPool2DBackpropProgram(convInfo);\n const output =\n this.makeOutputArray(avgPoolBackpropProgram.outputShape, x.dtype);\n return this.compileAndRun(avgPoolBackpropProgram, [dy], output) as Tensor4D;\n }\n\n cast<T extends Tensor>(x: T, dtype: DataType): T {\n return backend_util.castTensor(x, dtype, this);\n }\n\n reshape<R extends Rank>(x: Tensor, shape: ShapeMap[R]): Tensor<R> {\n if (this.texData.get(x.dataId).isPacked &&\n !webgl_util.isReshapeFree(x.shape, shape)) {\n return this.packedReshape(x, shape);\n }\n return backend_util.reshapeTensor(x, shape);\n }\n\n resizeBilinear(\n x: Tensor4D, newHeight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n const program =\n new ResizeBilinearProgram(x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n\n resizeBilinearBackprop(dy: Tensor4D, x: Tensor4D, alignCorners: boolean):\n Tensor4D {\n const program = new ResizeBilinearBackpropProgram(dy, x, alignCorners);\n\n return this.compileAndRun(program, [dy]);\n }\n\n resizeNearestNeighbor(\n x: Tensor4D, newHeight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n const program = new ResizeNearestNeighborProgram(\n x.shape, newHeight, newWidth, alignCorners);\n return this.compileAndRun(program, [x]);\n }\n\n resizeNearestNeighborBackprop(\n dy: Tensor4D, x: Tensor4D, alignCorners: boolean): Tensor4D {\n const program =\n new ResizeNearestNeigborBackpropProgram(dy, x, alignCorners);\n return this.compileAndRun(program, [dy]);\n }\n\n multinomial(\n logits: Tensor2D, normalized: boolean, numSamples: number,\n seed: number): Tensor2D {\n const probs = normalized ? logits : softmax(logits);\n const batchSize = probs.shape[0];\n const numOutcomes = probs.shape[1];\n const program = new MultinomialProgram(batchSize, numOutcomes, numSamples);\n const output =\n this.makeOutputArray(program.outputShape, 'int32') as Tensor2D;\n const customSetup = program.getCustomSetupFunc(seed);\n return this.compileAndRun(program, [probs], output, customSetup);\n }\n\n oneHot(indices: Tensor1D, depth: number, onValue: number, offValue: number):\n Tensor2D {\n const program = new OneHotProgram(indices.size, depth, onValue, offValue);\n return this.compileAndRun(program, [indices]);\n }\n\n nonMaxSuppression(\n boxes: Tensor2D, scores: Tensor1D, maxOutputSize: number,\n iouThreshold: number, scoreThreshold: number): Tensor1D {\n warn(\n 'tf.nonMaxSuppression() in webgl locks the UI thread. ' +\n 'Call tf.nonMaxSuppressionAsync() instead');\n const boxesVals = boxes.dataSync();\n const scoresVals = scores.dataSync();\n return nonMaxSuppressionImpl(\n boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n\n cropAndResize(\n image: Tensor4D, boxes: Tensor2D, boxIndex: Tensor1D,\n cropSize: [number, number], method: 'bilinear'|'nearest',\n extrapolationValue: number): Tensor4D {\n const program = new CropAndResizeProgram(\n image.shape, boxes.shape, cropSize, method, extrapolationValue);\n return this.compileAndRun(program, [image, boxes, boxIndex]);\n }\n\n depthToSpace(x: Tensor4D, blockSize: number, dataFormat: 'NHWC'|'NCHW'):\n Tensor4D {\n util.assert(\n blockSize > 1,\n `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n\n const batchSize = x.shape[0];\n const inputHeight = (dataFormat === 'NHWC') ? x.shape[1] : x.shape[2];\n const inputWidth = (dataFormat === 'NHWC') ? x.shape[2] : x.shape[3];\n const inputDepth = (dataFormat === 'NHWC') ? x.shape[3] : x.shape[1];\n\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n\n const outputShape = (dataFormat === 'NHWC') ?\n [batchSize, outputHeight, outputWidth, outputDepth] :\n [batchSize, outputDepth, outputHeight, outputWidth];\n\n const program = new DepthToSpaceProgram(outputShape, blockSize, dataFormat);\n return this.compileAndRun(program, [x]);\n }\n\n split<T extends Tensor>(x: T, sizeSplits: number[], axis: number): T[] {\n return split(x, sizeSplits, axis);\n }\n\n scatterND<R extends Rank>(\n indices: Tensor, updates: Tensor, shape: ShapeMap[R]): Tensor<R> {\n const {sliceRank, numUpdates, sliceSize, strides, outputSize} =\n scatter_nd_util.calculateShapes(updates, indices, shape);\n\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const flattenIndices = indices.reshape([numUpdates, sliceRank]);\n const flattenX = updates.reshape([numUpdates, sliceSize]);\n\n if (outputSize === 0) {\n return backend_util.reshapeTensor(tensor([]), shape);\n }\n const defaultValue = scalar(0);\n const program = new ScatterProgram(\n numUpdates, sliceRank, flattenIndices.rank, flattenX.rank, strides,\n flattenShape);\n return (this.compileAndRun(\n program, [flattenX, flattenIndices, defaultValue]) as Tensor)\n .reshape(shape);\n }\n\n sparseToDense<R extends Rank>(\n sparseIndices: Tensor, sparseValues: Tensor, outputShape: ShapeMap[R],\n defaultValue: Scalar): Tensor<R> {\n const {sliceRank, numUpdates, strides, outputSize} =\n scatter_nd_util.calculateShapes(\n sparseValues, sparseIndices, outputShape);\n\n const sumDupeIndices = false;\n const program = new ScatterProgram(\n numUpdates, sliceRank, sparseIndices.rank, sparseValues.rank, strides,\n [outputSize, 1], sumDupeIndices);\n return (this.compileAndRun(\n program, [sparseValues, sparseIndices, defaultValue]) as Tensor)\n .reshape(outputShape);\n }\n\n fft(x: Tensor2D): Tensor2D {\n const inverse = false;\n return this.fftImpl(x, inverse);\n }\n\n ifft(x: Tensor2D): Tensor2D {\n const inverse = true;\n return this.fftImpl(x, inverse);\n }\n\n private fftImpl(x: Tensor2D, inverse: boolean): Tensor2D {\n const xData = this.texData.get(x.dataId);\n\n const realProgram =\n new FFTProgram(fft_gpu.COMPLEX_FFT.REAL, x.shape, inverse);\n const imagProgram =\n new FFTProgram(fft_gpu.COMPLEX_FFT.IMAG, x.shape, inverse);\n const inputs = [\n this.makeComplexComponentTensorHandle(x, xData.complexTensors.real),\n this.makeComplexComponentTensorHandle(x, xData.complexTensors.imag),\n ];\n\n const real = this.compileAndRun<Tensor>(realProgram, inputs);\n const imag = this.compileAndRun<Tensor>(imagProgram, inputs);\n const complex = this.complex(real, imag).as2D(x.shape[0], x.shape[1]);\n real.dispose();\n imag.dispose();\n return complex;\n }\n\n gatherND(x: Tensor, indices: Tensor): Tensor<Rank> {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n\n const [resultShape, numSlices, sliceSize, strides] =\n gather_nd_util.prepareAndValidate(x, indices);\n\n const flattenIndices = indices.reshape([numSlices, sliceRank]);\n const flattenX = x.reshape([x.size / sliceSize, sliceSize]);\n const program =\n new GatherNDProgram(sliceRank, strides, [numSlices, sliceSize]);\n return (this.compileAndRun(program, [flattenX, flattenIndices]) as Tensor)\n .reshape(resultShape);\n }\n\n private makeOutputArray<T extends Tensor>(shape: number[], dtype: DataType):\n T {\n return Tensor.make(shape, {}, dtype) as T;\n }\n\n private makePackedTensor<T extends Tensor>(shape: number[]): T {\n const packedTensor = Tensor.make(shape, {});\n this.texData.get(packedTensor.dataId).isPacked = true;\n return packedTensor as T;\n }\n\n private unpackTensor<T extends Tensor>(input: T): T {\n const program = new UnpackProgram(input.shape);\n return this.compileAndRun(program, [input]);\n }\n\n private getBatchDim(shape: number[], dimsToSkip = 2): number {\n return util.sizeFromShape(shape.slice(0, shape.length - dimsToSkip));\n }\n\n private getRowsCols(shape: number[]): [number, number] {\n if (shape.length === 0) {\n throw Error('Cannot get rows and columns of an empty shape array.');\n }\n\n return [\n shape.length > 1 ? shape[shape.length - 2] : 1, shape[shape.length - 1]\n ];\n }\n\n private packedReshape<R extends Rank>(input: Tensor, afterShape: ShapeMap[R]):\n Tensor<R> {\n const inputAs3D = input.reshape(\n [this.getBatchDim(input.shape), ...this.getRowsCols(input.shape)]);\n const afterShapeAs3D =\n [this.getBatchDim(afterShape), ...this.getRowsCols(afterShape)];\n const program = new ReshapePackedProgram(\n afterShapeAs3D as [number, number, number],\n inputAs3D.shape as [number, number, number]);\n return this\n .compileAndRun(\n program, [inputAs3D], this.makePackedTensor(afterShapeAs3D))\n .reshape(afterShape);\n }\n\n public compileAndRun<\n K extends {dtype: DataType, size: number, dataId: {}, shape: number[]}>(\n program: GPGPUProgram, inputs: TensorHandle[], output?: K,\n customSetup?: (gpgpu: GPGPUContext, webGLProgram: WebGLProgram) => void,\n pageToCpu = true): K {\n if (output == null) {\n output =\n this.makeOutputArray(program.outputShape, inputs[0].dtype) as {} as K;\n }\n if (output.size === 0) {\n // Short-circuit the computation since the result is empty (has 0 in its\n // shape).\n this.texData.get(output.dataId).values =\n getTypedArrayFromDType(output.dtype, 0);\n return output;\n }\n\n const inputsData: TensorData[] = inputs.map(input => {\n if (input.dtype === 'complex64') {\n throw new Error(\n `GPGPUProgram does not support complex64 input. For complex64 ` +\n `dtypes, please separate the program into real and imaginary ` +\n `parts.`);\n }\n\n let texData = this.texData.get(input.dataId);\n // Upload small tensors that live on the CPU as uniforms, not as\n // textures. Do this only when the environment supports 32bit floats due\n // to problems when comparing 16bit floats with 32bit floats.\n // TODO(https://github.com/tensorflow/tfjs/issues/821): Make it possible\n // for packed shaders to sample from uniforms.\n if (texData.texture == null &&\n !(!texData.isPacked && program.usesPackedTextures) &&\n util.sizeFromShape(input.shape) <=\n ENV.get('WEBGL_SIZE_UPLOAD_UNIFORM')) {\n return {\n shape: input.shape,\n texData: null,\n isUniform: true,\n uniformValues: this.readSync(input.dataId)\n };\n }\n\n if (texData.isPacked !== !!program.usesPackedTextures) {\n let preProcessProgram: UnpackProgram|PackProgram;\n let processedInput: Tensor;\n if (texData.isPacked) {\n preProcessProgram = new UnpackProgram(input.shape);\n processedInput = this.compileAndRun(preProcessProgram, [input]);\n } else {\n preProcessProgram = new PackProgram(input.shape);\n processedInput = this.compileAndRun(\n preProcessProgram, [input], this.makePackedTensor(input.shape));\n }\n\n texData = this.texData.get(processedInput.dataId);\n input = processedInput;\n }\n\n this.uploadToGPU(input.dataId);\n return {shape: input.shape, texData, isUniform: false};\n });\n\n this.uploadToGPU(output.dataId);\n const outputData = {\n shape: output.shape,\n texData: this.texData.get(output.dataId),\n isUniform: false\n };\n const key = gpgpu_math.makeShaderKey(program, inputsData, outputData);\n const binary = this.getAndSaveBinary(key, () => {\n return gpgpu_math.compileProgram(\n this.gpgpu, program, inputsData, outputData);\n });\n const shouldTimeProgram = this.activeTimers != null;\n let query: WebGLQuery|CPUTimerQuery;\n if (shouldTimeProgram) {\n query = this.startTimer();\n }\n\n gpgpu_math.runProgram(binary, inputsData, outputData, customSetup);\n\n if (ENV.get('WEBGL_PAGING_ENABLED') && pageToCpu &&\n this.numBytesInGPU > this.NUM_BYTES_BEFORE_PAGING) {\n let numBytesToPage = this.numBytesInGPU - this.NUM_BYTES_BEFORE_PAGING;\n while (numBytesToPage > 0 && this.lruDataGPU.length > 0) {\n const dataId = this.lruDataGPU.shift();\n const {shape, dtype} = this.texData.get(dataId);\n numBytesToPage -= this.computeBytes(shape, dtype);\n this.read(dataId);\n }\n }\n\n if (shouldTimeProgram) {\n query = this.endTimer(query);\n this.activeTimers.push(\n {name: program.constructor.name, query: this.getQueryTime(query)});\n }\n return output;\n }\n\n private getAndSaveBinary(key: string, getBinary: () => GPGPUBinary):\n GPGPUBinary {\n if (!(key in this.binaryCache)) {\n this.binaryCache[key] = getBinary();\n }\n return this.binaryCache[key];\n }\n\n getTextureManager(): TextureManager {\n return this.textureManager;\n }\n\n private disposed = false;\n\n dispose() {\n if (this.disposed) {\n return;\n }\n for (const key in this.binaryCache) {\n this.gpgpu.deleteProgram(this.binaryCache[key].webGLProgram);\n }\n this.textureManager.dispose();\n this.canvas.remove();\n if (this.fromPixels2DContext != null) {\n this.fromPixels2DContext.canvas.remove();\n }\n if (this.gpgpuCreatedLocally) {\n this.gpgpu.dispose();\n }\n this.disposed = true;\n }\n\n floatPrecision(): number {\n return tidy(() => {\n if (this.abs(scalar(1e-8)).get() > 0) {\n return 32;\n }\n return 16;\n });\n }\n\n private uploadToGPU(dataId: DataId): void {\n const texData = this.texData.get(dataId);\n const {shape, values, texture, dtype, usage, isPacked} = texData;\n if (texture != null) {\n // Array is already on GPU. No-op.\n // Touching the texture.\n if (ENV.get('WEBGL_PAGING_ENABLED')) {\n const index = this.lruDataGPU.indexOf(dataId);\n if (index >= 0) {\n this.lruDataGPU.splice(this.lruDataGPU.indexOf(dataId), 1);\n this.lruDataGPU.push(dataId);\n }\n }\n return;\n }\n const shouldTimeProgram = this.activeTimers != null;\n let start: number;\n if (shouldTimeProgram) {\n start = performance.now();\n }\n const texShape =\n webgl_util.getTextureShapeFromLogicalShape(shape, isPacked);\n texData.texShape = texShape;\n const newTexture = this.acquireTexture(dataId, texShape, usage, isPacked);\n texData.texture = newTexture;\n if (values != null) {\n // TODO(smilkov): Propagate the original typed array to gpgpu.\n if (isPacked) {\n const batch = util.sizeFromShape(shape.slice(0, shape.length - 2));\n const rows = shape.length > 1 ? shape[shape.length - 2] : 1;\n const cols = shape[shape.length - 1];\n this.gpgpu.uploadMatrixToPackedTexture(\n newTexture, batch, rows, cols, typedArrayToFloat32(values, dtype));\n } else {\n this.gpgpu.uploadMatrixToTexture(\n newTexture, texShape[0], texShape[1],\n typedArrayToFloat32(values, dtype));\n }\n // Once uploaded, don't store the values on cpu.\n texData.values = null;\n if (shouldTimeProgram) {\n this.uploadWaitMs += performance.now() - start;\n }\n }\n }\n\n private convertAndCacheOnCPU(dataId: DataId, float32Values?: Float32Array):\n TypedArray {\n // In delayed storage mode, when the user reads data, we don't keep a\n // copy on the gpu, to minimize likelihood of memory leak. We re-upload\n // to gpu the next time a gpgpu program needs the texture.\n const dontKeepCopyOnGPU = this.delayedStorage;\n const texData = this.texData.get(dataId);\n const {texture, texShape, dtype, usage, isPacked} = texData;\n if (dontKeepCopyOnGPU && texture != null) {\n this.releaseTexture(dataId, texture, texShape, usage, isPacked);\n texData.texture = null;\n texData.texShape = null;\n }\n texData.usage = TextureUsage.UPLOAD;\n if (float32Values != null) {\n texData.values = float32ToTypedArray(float32Values, dtype);\n }\n return texData.values;\n }\n\n private releaseTexture(\n dataId: DataId, texture: WebGLTexture, texShape: [number, number],\n texType: TextureUsage, isPacked: boolean) {\n const {shape, dtype} = this.texData.get(dataId);\n\n if (ENV.get('WEBGL_PAGING_ENABLED')) {\n const idx = this.lruDataGPU.indexOf(dataId);\n if (idx >= 0) {\n this.lruDataGPU.splice(idx, 1);\n }\n }\n this.numBytesInGPU -= this.computeBytes(shape, dtype);\n this.textureManager.releaseTexture(texture, texShape, texType, isPacked);\n }\n\n private acquireTexture(\n dataId: DataId, texShape: [number, number], texType: TextureUsage,\n isPacked: boolean): WebGLTexture {\n const {shape, dtype} = this.texData.get(dataId);\n if (ENV.get('WEBGL_PAGING_ENABLED')) {\n this.lruDataGPU.push(dataId);\n }\n this.numBytesInGPU += this.computeBytes(shape, dtype);\n return this.textureManager.acquireTexture(texShape, texType, isPacked);\n }\n\n private computeBytes(shape: number[], dtype: DataType) {\n return util.sizeFromShape(shape) * util.bytesPerElement(dtype);\n }\n}\n\nif (ENV.get('IS_BROWSER')) {\n ENV.registerBackend(\n 'webgl', () => new MathBackendWebGL(), 2 /* priority */,\n setTensorTracker);\n}\n\nfunction float32ToTypedArray<D extends DataType>(\n a: Float32Array, dtype: D): DataTypeMap[D] {\n if (dtype === 'float32' || dtype === 'complex64') {\n return a;\n } else if (dtype === 'int32' || dtype === 'bool') {\n const result = (dtype === 'int32') ? new Int32Array(a.length) :\n new Uint8Array(a.length);\n for (let i = 0; i < result.length; ++i) {\n result[i] = Math.round(a[i]);\n }\n return result;\n } else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\n\nfunction typedArrayToFloat32<D extends DataType>(\n a: DataTypeMap[D], dtype: D): Float32Array {\n return (a instanceof Float32Array) ? a : new Float32Array(a);\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\n\nimport {op} from './operation';\nimport {scalar, zerosLike} from './tensor_ops';\n\n/**\n * Computes `-1 * x` element-wise.\n *\n * ```js\n * const x = tf.tensor2d([1, 2, -2, 0], [2, 2]);\n *\n * x.neg().print(); // or tf.neg(x)\n * ```\n *\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction neg_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'neg');\n\n const grad = (dy: T) => {\n return {$x: () => dy.neg()};\n };\n return ENV.engine.runKernel(backend => backend.neg($x), {$x}, grad);\n}\n\n/**\n * Computes ceiling of input `tf.Tensor` element-wise: `ceil(x)`\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.ceil().print(); // or tf.ceil(x)\n * ```\n * @param x The input Tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction ceil_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'ceil');\n\n // TODO(manrajgrover): Return null for gradients when backprop supports it.\n const grad = (dy: T) => {\n return {$x: () => zerosLike(dy)};\n };\n return ENV.engine.runKernel(backend => backend.ceil($x), {$x}, grad);\n}\n\n/**\n * Computes floor of input `tf.Tensor` element-wise: `floor(x)`.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.floor().print(); // or tf.floor(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction floor_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'floor');\n\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n const grad = (dy: T) => {\n return {$x: () => zerosLike(dy)};\n };\n return ENV.engine.runKernel(backend => backend.floor($x), {$x}, grad);\n}\n\n/**\n * Returns an element-wise indication of the sign of a number.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3, NaN, 0]);\n *\n * x.sign().print(); // or tf.sign(x)\n * ```\n * @param x The input Tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction sign_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'sign');\n\n const grad = (dy: T) => {\n return {$x: () => zerosLike(dy)};\n };\n return ENV.engine.runKernel(backend => backend.sign($x), {$x}, grad);\n}\n\n/**\n * Computes round of input `tf.Tensor` element-wise: `round(x)`.\n * It implements banker's rounding.\n *\n * ```js\n * const x = tf.tensor1d([.6, 1.1, -3.3]);\n *\n * x.round().print(); // or tf.round(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction round_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'round');\n\n // TODO(nsthorat): Let gradients be null for cases where we want to stop\n // backpropgation.\n const grad = (dy: T) => {\n return {$x: () => zerosLike(dy)};\n };\n return ENV.engine.runKernel(backend => backend.round($x), {$x}, grad);\n}\n\n/**\n * Computes exponential of the input `tf.Tensor` element-wise. `e ^ x`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.exp().print(); // or tf.exp(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction exp_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'exp');\n\n const bck = (dy: T, saved: Tensor[]) => {\n const [y] = saved;\n return {$x: () => dy.mulStrict(y as T)};\n };\n return ENV.engine.runKernel(\n (backend, save) => save(backend.exp($x)), {$x}, bck);\n}\n\n/**\n * Computes exponential of the input `tf.Tensor` minus one element-wise.\n * `e ^ x - 1`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, -3]);\n *\n * x.expm1().print(); // or tf.expm1(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction expm1_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'expm1');\n\n const grad = (dy: T) => {\n return {$x: () => dy.mulStrict($x.exp())};\n };\n return ENV.engine.runKernel(backend => backend.expm1($x), {$x}, grad);\n}\n\n/**\n * Computes natural logarithm of the input `tf.Tensor` element-wise: `ln(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E]);\n *\n * x.log().print(); // or tf.log(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction log_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'log');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.toFloat())};\n };\n return ENV.engine.runKernel(backend => backend.log($x), {$x}, grad);\n}\n\n/**\n * Computes natural logarithm of the input `tf.Tensor` plus one\n * element-wise: `ln(1 + x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.E - 1]);\n *\n * x.log1p().print(); // or tf.log1p(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction log1p_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'log1p');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.add(scalar(1)))};\n };\n return ENV.engine.runKernel(backend => backend.log1p($x), {$x}, grad);\n}\n\n/**\n * Computes square root of the input `tf.Tensor` element-wise: `y = sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.sqrt().print(); // or tf.sqrt(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction sqrt_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'sqrt');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.toFloat().sqrt().mul(scalar(2)))};\n };\n return ENV.engine.runKernel(backend => backend.sqrt($x), {$x}, grad);\n}\n\n/**\n * Computes reciprocal of square root of the input `tf.Tensor` element-wise:\n * `y = 1 / sqrt(x)`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 4, -1]);\n *\n * x.rsqrt().print(); // or tf.rsqrt(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction rsqrt_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'rsqrt');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.pow(scalar(1.5)).mul(scalar(2))).neg()};\n };\n return ENV.engine.runKernel(backend => backend.rsqrt($x), {$x}, grad);\n}\n\n/**\n * Computes square of `x` element-wise: `x ^ 2`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, Math.sqrt(2), -1]);\n *\n * x.square().print(); // or tf.square(x)\n * ```\n * @param x The input Tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction square_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'square');\n\n const grad = (dy: T) => {\n return {$x: () => dy.mulStrict($x.toFloat().mul(scalar(2)))};\n };\n return ENV.engine.runKernel(backend => backend.square($x), {$x}, grad);\n}\n\n/**\n * Computes reciprocal of x element-wise: `1 / x`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, 2]);\n *\n * x.reciprocal().print(); // or tf.reciprocal(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction reciprocal_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'reciprocal');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.square().neg())};\n };\n return ENV.engine.runKernel(backend => backend.reciprocal($x), {$x}, grad);\n}\n\n/**\n * Computes absolute value element-wise: `abs(x)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.abs().print(); // or tf.abs(x)\n * ```\n * @param x The input `tf.Tensor`.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction abs_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'abs');\n\n if ($x.dtype === 'complex64') {\n return ENV.engine.runKernel(backend => backend.complexAbs($x), {$x});\n }\n\n const grad = (dy: T) => {\n return {$x: () => dy.mulStrict($x.toFloat().step(-1))};\n };\n return ENV.engine.runKernel(backend => backend.abs($x), {$x}, grad);\n}\n\n/**\n * Clips values element-wise. `max(min(x, clipValueMax), clipValueMin)`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.clipByValue(-2, 3).print(); // or tf.clipByValue(x, -2, 3)\n * ```\n * @param x The input tensor.\n * @param clipValueMin Lower-bound of range to be clipped to.\n * @param clipValueMax Upper-bound of range to be clipped to.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction clipByValue_<T extends Tensor>(\n x: T|TensorLike, clipValueMin: number, clipValueMax: number): T {\n const $x = convertToTensor(x, 'x', 'clipByValue');\n util.assert(\n (clipValueMin <= clipValueMax),\n `Error in clip: min (${clipValueMin}) must be ` +\n `less than or equal to max (${clipValueMax}).`);\n\n const grad = (dy: T) => {\n return {\n $x: () => dy.where(\n $x.greaterEqual(scalar(clipValueMin))\n .logicalAnd($x.lessEqual(scalar(clipValueMax))),\n zerosLike(dy)) as T,\n };\n };\n return ENV.engine.runKernel(\n backend => backend.clip($x, clipValueMin, clipValueMax), {$x}, grad);\n}\n\n/**\n * Computes sigmoid element-wise, `1 / (1 + exp(-x))`\n *\n * ```js\n * const x = tf.tensor1d([0, -1, 2, -3]);\n *\n * x.sigmoid().print(); // or tf.sigmoid(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction sigmoid_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'sigmoid');\n\n const grad = (dy: T, saved: Tensor[]) => {\n const [y] = saved;\n return {$x: () => dy.mulStrict(y.mul(scalar(1).sub(y)))};\n };\n return ENV.engine.runKernel(\n (backend, save) => save(backend.sigmoid($x)), {$x}, grad);\n}\n\n/**\n * Computes log sigmoid of the input `tf.Tensor` element-wise:\n * `logSigmoid(x)`. For numerical stability, we use `-tf.softplus(-x)`.\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.logSigmoid().print(); // or tf.logSigmoid(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction logSigmoid_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'logSigmoid');\n\n const grad = (dy: T) => {\n return {$x: () => dy.mulStrict($x.neg().sigmoid())};\n };\n return ENV.engine.runKernel(\n backend => backend.softplus($x.neg()).neg(), {$x}, grad);\n}\n\n/**\n * Computes softplus of the input `tf.Tensor` element-wise: `log(exp(x) + 1)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.softplus().print(); // or tf.softplus(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction softplus_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'softplus');\n\n const grad = (dy: T) => {\n return {$x: () => dy.mulStrict($x.sigmoid())};\n };\n return ENV.engine.runKernel(backend => backend.softplus($x), {$x}, grad);\n}\n\n/**\n * Computes sin of the input Tensor element-wise: `sin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.sin().print(); // or tf.sin(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction sin_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'sin');\n\n const grad = (dy: T) => {\n return {$x: () => $x.toFloat().cos().mulStrict(dy)};\n };\n return ENV.engine.runKernel(backend => backend.sin($x), {$x}, grad);\n}\n\n/**\n * Computes cos of the input `tf.Tensor` element-wise: `cos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.cos().print(); // or tf.cos(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction cos_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'cos');\n\n const grad = (dy: T) => {\n return {$x: () => $x.toFloat().sin().neg().mulStrict(dy)};\n };\n return ENV.engine.runKernel(backend => backend.cos($x), {$x}, grad);\n}\n\n/**\n * Computes tan of the input `tf.Tensor` element-wise, `tan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, Math.PI / 2, Math.PI * 3 / 4]);\n *\n * x.tan().print(); // or tf.tan(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction tan_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'tan');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict($x.cos().square())};\n };\n return ENV.engine.runKernel(backend => backend.tan($x), {$x}, grad);\n}\n\n/**\n * Computes asin of the input `tf.Tensor` element-wise: `asin(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asin().print(); // or tf.asin(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction asin_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'asin');\n\n const grad = (dy: T) => {\n return {\n $x: () => dy.divStrict(scalar(1).sub($x.toFloat().square()).sqrt() as T)\n };\n };\n return ENV.engine.runKernel(backend => backend.asin($x), {$x}, grad);\n}\n\n/**\n * Computes acos of the input `tf.Tensor` element-wise: `acos(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.acos().print(); // or tf.acos(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction acos_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'acos');\n\n const grad = (dy: T) => {\n return {\n $x: () =>\n dy.divStrict(scalar(1).sub($x.toFloat().square()).sqrt() as T).neg()\n };\n };\n return ENV.engine.runKernel(backend => backend.acos($x), {$x}, grad);\n}\n\n/**\n * Computes atan of the input `tf.Tensor` element-wise: `atan(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.atan().print(); // or tf.atan(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction atan_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'atan');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict(scalar(1).add($x.toFloat().square()))};\n };\n return ENV.engine.runKernel(backend => backend.atan($x), {$x}, grad);\n}\n\n/**\n * Computes hyperbolic sin of the input `tf.Tensor` element-wise: `sinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.sinh().print(); // or tf.sinh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction sinh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'sinh');\n\n const grad = (dy: T) => {\n return {$x: () => $x.toFloat().cosh().mulStrict(dy)};\n };\n return ENV.engine.runKernel(backend => backend.sinh($x), {$x}, grad);\n}\n\n/**\n * Computes hyperbolic cos of the input `tf.Tensor` element-wise: `cosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.cosh().print(); // or tf.cosh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction cosh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'cosh');\n\n const grad = (dy: T) => {\n return {$x: () => $x.toFloat().sinh().mulStrict(dy)};\n };\n return ENV.engine.runKernel(backend => backend.cosh($x), {$x}, grad);\n}\n\n/**\n * Computes hyperbolic tangent of the input `tf.Tensor` element-wise: `tanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, 70]);\n *\n * x.tanh().print(); // or tf.tanh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction tanh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'tanh');\n\n const grad = (dy: T, saved: Tensor[]) => {\n const [y] = saved;\n return {$x: () => scalar(1).sub(y.square()).mulStrict(dy) as T};\n };\n return ENV.engine.runKernel(\n (backend, save) => save(backend.tanh($x)), {$x}, grad);\n}\n\n/**\n * Computes inverse hyperbolic sin of the input `tf.Tensor` element-wise:\n * `asinh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, 1, -1, .7]);\n *\n * x.asinh().print(); // or tf.asinh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction asinh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'asinh');\n\n const grad = (dy: T) => {\n return {\n $x: () => dy.divStrict(scalar(1).add($x.toFloat().square()).sqrt() as T)\n };\n };\n return ENV.engine.runKernel(backend => backend.asinh($x), {$x}, grad);\n}\n\n/**\n * Computes the inverse hyperbolic cos of the input `tf.Tensor` element-wise:\n * `acosh(x)`\n *\n * ```js\n * const x = tf.tensor1d([10, 1, 3, 5.7]);\n *\n * x.acosh().print(); // or tf.acosh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction acosh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'acosh');\n\n const grad = (dy: T) => {\n return {\n $x: () => dy.divStrict($x.toFloat().square().sub(scalar(1)).sqrt() as T)\n };\n };\n return ENV.engine.runKernel(backend => backend.acosh($x), {$x}, grad);\n}\n\n/**\n * Computes inverse hyperbolic tan of the input `tf.Tensor` element-wise:\n * `atanh(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.atanh().print(); // or tf.atanh(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction atanh_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'atanh');\n\n const grad = (dy: T) => {\n return {$x: () => dy.divStrict(scalar(1).sub($x.toFloat().square()))};\n };\n return ENV.engine.runKernel(backend => backend.atanh($x), {$x}, grad);\n}\n\n/**\n * Computes gause error function of the input `tf.Tensor` element-wise:\n * `erf(x)`\n *\n * ```js\n * const x = tf.tensor1d([0, .1, -.1, .7]);\n *\n * x.erf().print(); // or tf.erf(x);\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction erf_<T extends Tensor>(x: T|TensorLike): T {\n let $x = convertToTensor(x, 'x', 'erf');\n util.assert(\n $x.dtype === 'int32' || $x.dtype === 'float32',\n 'Input dtype must be `int32` or `float32`.');\n\n if ($x.dtype === 'int32') {\n $x = $x.toFloat();\n }\n\n const grad = (dy: T) => {\n return {\n $x: () => dy.mulStrict(\n scalar(2 / Math.sqrt(Math.PI)).mul($x.square().neg().exp()))\n };\n };\n return ENV.engine.runKernel(backend => backend.erf($x), {$x}, grad);\n}\n\n/**\n * Computes step of the input `tf.Tensor` element-wise: `x > 0 ? 1 : alpha * x`\n *\n * ```js\n * const x = tf.tensor1d([0, 2, -1, -3]);\n *\n * x.step(.5).print(); // or tf.step(x, .5)\n * ```\n * @param x The input tensor.\n * @param alpha The gradient when input is negative.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction step_<T extends Tensor>(x: T|TensorLike, alpha = 0.0): T {\n const $x = convertToTensor(x, 'x', 'step');\n\n // TODO(manrajgrover): Return null for gradients when backprop supports\n // it.\n const grad = (dy: T) => {\n return {$x: () => zerosLike(dy)};\n };\n return ENV.engine.runKernel(backend => backend.step($x, alpha), {$x}, grad);\n}\n\nexport const abs = op({abs_});\nexport const acos = op({acos_});\nexport const acosh = op({acosh_});\nexport const asin = op({asin_});\nexport const asinh = op({asinh_});\nexport const atan = op({atan_});\nexport const atanh = op({atanh_});\nexport const ceil = op({ceil_});\nexport const clipByValue = op({clipByValue_});\nexport const cos = op({cos_});\nexport const cosh = op({cosh_});\nexport const erf = op({erf_});\nexport const exp = op({exp_});\nexport const expm1 = op({expm1_});\nexport const floor = op({floor_});\nexport const log = op({log_});\nexport const log1p = op({log1p_});\nexport const logSigmoid = op({logSigmoid_});\nexport const neg = op({neg_});\nexport const reciprocal = op({reciprocal_});\nexport const round = op({round_});\nexport const rsqrt = op({rsqrt_});\nexport const sigmoid = op({sigmoid_});\nexport const sign = op({sign_});\nexport const sin = op({sin_});\nexport const sinh = op({sinh_});\nexport const softplus = op({softplus_});\nexport const sqrt = op({sqrt_});\nexport const square = op({square_});\nexport const step = op({step_});\nexport const tan = op({tan_});\nexport const tanh = op({tanh_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, TensorLike} from '../types';\nimport * as util from '../util';\n\nimport {tile} from './array_ops';\nimport {getReductionAxes} from './broadcast_util';\nimport {op} from './operation';\nimport {scalar} from './tensor_ops';\nimport {rsqrt} from './unary_ops';\n\n/**\n * Batch normalization, strictly for 2D. For the more relaxed version, see\n * `tf.batchNormalization`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n * @param scale A scale Tensor.\n * @param offset An offset Tensor.\n */\nfunction batchNormalization2d_(\n x: Tensor2D|TensorLike, mean: Tensor2D|Tensor1D|TensorLike,\n variance: Tensor2D|Tensor1D|TensorLike, varianceEpsilon = .001,\n scale?: Tensor2D|Tensor1D|TensorLike,\n offset?: Tensor2D|Tensor1D|TensorLike): Tensor2D {\n const $x = convertToTensor(x, 'x', 'batchNormalization');\n const $mean = convertToTensor(mean, 'mean', 'batchNormalization');\n const $variance = convertToTensor(variance, 'variance', 'batchNormalization');\n let $scale: Tensor2D|Tensor1D;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNormalization');\n }\n let $offset: Tensor2D|Tensor1D;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNormalization');\n }\n util.assert(\n $x.rank === 2,\n `Error in batchNormalization3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert(\n $mean.rank === 2 || $mean.rank === 1,\n `Error in batchNormalization2D: mean must be rank 2 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert(\n $variance.rank === 2 || $variance.rank === 1,\n `Error in batchNormalization2D: variance must be rank 2 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert(\n $scale.rank === 2 || $scale.rank === 1,\n `Error in batchNormalization2D: scale must be rank 2 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert(\n $offset.rank === 2 || $offset.rank === 1,\n `Error in batchNormalization2D: offset must be rank 2 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n\n return batchNormalization(\n $x, $mean, $variance, varianceEpsilon, $scale, $offset);\n}\n\n/**\n * Batch normalization, strictly for 3D. For the more relaxed version, see\n * `tf.batchNormalization`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n * @param scale A scale Tensor.\n * @param offset An offset Tensor.\n */\nfunction batchNormalization3d_(\n x: Tensor3D|TensorLike, mean: Tensor3D|Tensor1D|TensorLike,\n variance: Tensor3D|Tensor1D|TensorLike, varianceEpsilon = .001,\n scale?: Tensor3D|Tensor1D|TensorLike,\n offset?: Tensor3D|Tensor1D|TensorLike): Tensor3D {\n const $x = convertToTensor(x, 'x', 'batchNormalization');\n const $mean = convertToTensor(mean, 'mean', 'batchNormalization');\n const $variance = convertToTensor(variance, 'variance', 'batchNormalization');\n let $scale: Tensor3D|Tensor1D;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNormalization');\n }\n let $offset: Tensor3D|Tensor1D;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNormalization');\n }\n util.assert(\n $x.rank === 3,\n `Error in batchNormalization3D: x must be rank 3 but got rank ` +\n `${$x.rank}.`);\n util.assert(\n $mean.rank === 3 || $mean.rank === 1,\n `Error in batchNormalization3D: mean must be rank 3 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert(\n $variance.rank === 3 || $variance.rank === 1,\n `Error in batchNormalization3D: variance must be rank 3 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert(\n $scale.rank === 3 || $scale.rank === 1,\n `Error in batchNormalization3D: scale must be rank 3 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert(\n $offset.rank === 3 || $offset.rank === 1,\n `Error in batchNormalization3D: offset must be rank 3 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n\n return batchNormalization(\n $x, $mean, $variance, varianceEpsilon, $scale, $offset);\n}\n\n/**\n * Batch normalization, strictly for 4D. For the more relaxed version, see\n * `tf.batchNormalization`.\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n * @param scale A scale Tensor.\n * @param offset An offset Tensor.\n */\nfunction batchNormalization4d_(\n x: Tensor4D|TensorLike, mean: Tensor4D|Tensor1D|TensorLike,\n variance: Tensor4D|Tensor1D|TensorLike, varianceEpsilon = .001,\n scale?: Tensor4D|Tensor1D|TensorLike,\n offset?: Tensor4D|Tensor1D|TensorLike): Tensor4D {\n const $x = convertToTensor(x, 'x', 'batchNormalization');\n const $mean = convertToTensor(mean, 'mean', 'batchNormalization');\n const $variance = convertToTensor(variance, 'variance', 'batchNormalization');\n let $scale: Tensor4D|Tensor1D;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNormalization');\n }\n let $offset: Tensor4D|Tensor1D;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNormalization');\n }\n util.assert(\n $x.rank === 4,\n `Error in batchNormalization4D: x must be rank 4 but got rank ` +\n `${$x.rank}.`);\n util.assert(\n $mean.rank === 4 || $mean.rank === 1,\n `Error in batchNormalization4D: mean must be rank 4 or rank 1 but ` +\n `got rank ${$mean.rank}.`);\n util.assert(\n $variance.rank === 4 || $variance.rank === 1,\n `Error in batchNormalization4D: variance must be rank 4 or rank 1 ` +\n `but got rank ${$variance.rank}.`);\n if ($scale != null) {\n util.assert(\n $scale.rank === 4 || $scale.rank === 1,\n `Error in batchNormalization4D: scale must be rank 4 or rank 1 ` +\n `but got rank ${$scale.rank}.`);\n }\n if ($offset != null) {\n util.assert(\n $offset.rank === 4 || $offset.rank === 1,\n `Error in batchNormalization4D: offset must be rank 4 or rank 1 ` +\n `but got rank ${$offset.rank}.`);\n }\n return batchNormalization(\n $x, $mean, $variance, varianceEpsilon, $scale, $offset);\n}\n\n/**\n * Batch normalization.\n *\n * As described in\n * [http://arxiv.org/abs/1502.03167](http://arxiv.org/abs/1502.03167).\n *\n * Mean, variance, scale, and offset can be of two shapes:\n * - The same shape as the input.\n * - In the common case, the depth dimension is the last dimension of x, so\n * the values would be an `tf.Tensor1D` of shape [depth].\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that parameters passed are of given rank\n * - `tf.batchNormalization2d`\n * - `tf.batchNormalization3d`\n * - `tf.batchNormalization4d`\n *\n * @param x The input Tensor.\n * @param mean A mean Tensor.\n * @param variance A variance Tensor.\n * @param varianceEpsilon A small float number to avoid dividing by 0.\n * @param scale A scale Tensor.\n * @param offset An offset Tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction batchNormalization_<R extends Rank>(\n x: Tensor<R>|Tensor1D|TensorLike, mean: Tensor<R>|Tensor1D|TensorLike,\n variance: Tensor<R>|Tensor1D|TensorLike, varianceEpsilon = .001,\n scale?: Tensor<R>|Tensor1D|TensorLike,\n offset?: Tensor<R>|Tensor1D|TensorLike): Tensor<R> {\n const $x = convertToTensor(x, 'x', 'batchNormalization');\n const $mean = convertToTensor(mean, 'mean', 'batchNormalization');\n const $variance = convertToTensor(variance, 'variance', 'batchNormalization');\n let $scale: Tensor<R>|Tensor1D;\n if (scale != null) {\n $scale = convertToTensor(scale, 'scale', 'batchNormalization');\n }\n let $offset: Tensor<R>|Tensor1D;\n if (offset != null) {\n $offset = convertToTensor(offset, 'offset', 'batchNormalization');\n }\n\n util.assert(\n $mean.rank === $variance.rank,\n 'Batch normalization gradient requires mean and variance to have ' +\n 'equal ranks.');\n util.assert(\n $offset == null || $mean.rank === $offset.rank,\n 'Batch normalization gradient requires mean and offset to have ' +\n 'equal ranks.');\n util.assert(\n $scale == null || $mean.rank === $scale.rank,\n 'Batch normalization gradient requires mean and scale to have ' +\n 'equal ranks.');\n\n let x4D: Tensor4D;\n if ($x.rank === 0 || $x.rank === 1) {\n x4D = $x.as4D(1, 1, 1, $x.size);\n } else if ($x.rank === 2) {\n x4D = $x.as4D(1, 1, $x.shape[0], $x.shape[1]);\n } else if ($x.rank === 3) {\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]) as Tensor4D;\n } else {\n x4D = $x as Tensor4D;\n }\n\n const der = (dy: Tensor) => {\n const scaleValue = $scale == null ? scalar(1) : $scale;\n const reductionAxes = getReductionAxes($mean.shape, x4D.shape);\n const tileShape: number[] = [];\n if ($mean.rank === 1) {\n for (let i = 0; i < x4D.shape.length - 1; ++i) {\n tileShape.push(x4D.shape[i]);\n }\n tileShape.push(1);\n }\n\n const xMinusMean = $x.sub($mean);\n const dyTimesScaleValue = dy.mul(scaleValue);\n const oneOverSqrtVariance = rsqrt($variance.add(scalar(varianceEpsilon)));\n const minusHalfRCube = oneOverSqrtVariance.mul(oneOverSqrtVariance)\n .mul(oneOverSqrtVariance)\n .mul(scalar(-0.5));\n const derX = () => {\n if ($mean.rank === 1) {\n return dy\n .mul(tile(\n oneOverSqrtVariance.as4D(1, 1, 1, $mean.shape[0]), tileShape))\n .mul(scaleValue)\n .reshape($x.shape);\n } else {\n return dy.mul(oneOverSqrtVariance).mul(scaleValue).reshape($x.shape);\n }\n };\n const derMean = () => {\n let meanDer = oneOverSqrtVariance.mul(scalar(-1)).mul(dyTimesScaleValue);\n if ($mean.rank === 1) {\n meanDer = meanDer.sum(reductionAxes);\n }\n return meanDer.reshape($mean.shape);\n };\n const derVariance = () => {\n let varianceDer = minusHalfRCube.mul(xMinusMean).mul(dyTimesScaleValue);\n if ($mean.rank === 1) {\n varianceDer = varianceDer.sum(reductionAxes);\n }\n return varianceDer.reshape($mean.shape);\n };\n const derScale = () => {\n const xMinusMean2TimesRsqrt = xMinusMean.mul(oneOverSqrtVariance);\n let scaleDer = dy.mul(xMinusMean2TimesRsqrt);\n if ($mean.rank === 1) {\n scaleDer = scaleDer.sum(reductionAxes);\n }\n return scaleDer.reshape($mean.shape);\n };\n const derOffset = () => {\n let offsetDer = dy;\n if ($mean.rank === 1) {\n offsetDer = offsetDer.sum(reductionAxes);\n }\n return offsetDer.reshape($mean.shape);\n };\n return {\n $x: derX,\n $mean: derMean,\n $variance: derVariance,\n $scale: derScale,\n $offset: derOffset\n };\n };\n\n const res = ENV.engine.runKernel(\n backend => backend.batchNormalization(\n x4D, batchnormReshape4D($mean), batchnormReshape4D($variance),\n varianceEpsilon, batchnormReshape4D($scale),\n batchnormReshape4D($offset)),\n {$x, $mean, $variance, $scale, $offset}, der);\n return res.reshape($x.shape);\n}\n\nfunction batchnormReshape4D(x: Tensor): Tensor4D|Tensor1D {\n if (x == null) {\n return null;\n }\n if (x.rank === 0) {\n return x.as1D();\n } else if (x.rank === 1) {\n return x as Tensor1D;\n } else if (x.rank === 2) {\n return x.as4D(1, 1, x.shape[0], x.shape[1]);\n } else if (x.rank === 3) {\n return x.as4D(1, x.shape[0], x.shape[1], x.shape[2]);\n }\n return x as Tensor4D;\n}\n\nexport const batchNormalization2d = op({batchNormalization2d_});\nexport const batchNormalization3d = op({batchNormalization3d_});\nexport const batchNormalization4d = op({batchNormalization4d_});\nexport const batchNormalization = op({batchNormalization_});\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as util from '../util';\n\nexport type PadInfo = {\n top: number,\n left: number,\n right: number,\n bottom: number,\n type: string\n};\n\n/**\n * Information about the forward pass of a convolution/pooling operation.\n * It includes input and output shape, strides, filter size and padding\n * information.\n */\nexport type Conv2DInfo = {\n batchSize: number,\n inHeight: number,\n inWidth: number,\n inChannels: number,\n outHeight: number,\n outWidth: number,\n outChannels: number,\n dataFormat: 'channelsFirst'|'channelsLast',\n strideHeight: number,\n strideWidth: number,\n dilationHeight: number,\n dilationWidth: number,\n filterHeight: number,\n filterWidth: number,\n effectiveFilterHeight: number,\n effectiveFilterWidth: number,\n padInfo: PadInfo,\n inShape: [number, number, number, number],\n outShape: [number, number, number, number],\n filterShape: [number, number, number, number]\n};\n\nexport function computePool2DInfo(\n inShape: [number, number, number, number],\n filterSize: [number, number]|number, strides: number|[number, number],\n dilations: number|[number, number], pad: 'same'|'valid'|number,\n roundingMode?: 'floor'|'round'|'ceil',\n dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv2DInfo {\n const [filterHeight, filterWidth] = parseTupleParam(filterSize);\n\n let filterShape: [number, number, number, number];\n if (dataFormat === 'channelsLast') {\n filterShape = [filterHeight, filterWidth, inShape[3], inShape[3]];\n } else if (dataFormat === 'channelsFirst') {\n filterShape = [filterHeight, filterWidth, inShape[1], inShape[1]];\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n\n return computeConv2DInfo(\n inShape, filterShape, strides, dilations, pad, roundingMode, false,\n dataFormat);\n}\n\n/**\n * Computes the information for a forward pass of a convolution/pooling\n * operation.\n */\nexport function computeConv2DInfo(\n inShape: [number, number, number, number],\n filterShape: [number, number, number, number],\n strides: number|[number, number], dilations: number|[number, number],\n pad: 'same'|'valid'|number, roundingMode?: 'floor'|'round'|'ceil',\n depthwise = false,\n dataFormat: 'channelsFirst'|'channelsLast' = 'channelsLast'): Conv2DInfo {\n let [batchSize, inHeight, inWidth, inChannels] = [-1, -1, -1, -1];\n if (dataFormat === 'channelsLast') {\n [batchSize, inHeight, inWidth, inChannels] = inShape;\n } else if (dataFormat === 'channelsFirst') {\n [batchSize, inChannels, inHeight, inWidth] = inShape;\n } else {\n throw new Error(`Unknown dataFormat ${dataFormat}`);\n }\n\n const [filterHeight, filterWidth, , filterChannels] = filterShape;\n const [strideHeight, strideWidth] = parseTupleParam(strides);\n const [dilationHeight, dilationWidth] = parseTupleParam(dilations);\n\n const effectiveFilterHeight =\n getEffectiveFilterSize(filterHeight, dilationHeight);\n const effectiveFilterWidth =\n getEffectiveFilterSize(filterWidth, dilationWidth);\n const {padInfo, outHeight, outWidth} = getPadAndOutInfo(\n pad, inHeight, inWidth, strideHeight, strideWidth, effectiveFilterHeight,\n effectiveFilterWidth, roundingMode);\n\n const outChannels = depthwise ? filterChannels * inChannels : filterChannels;\n\n let outShape: [number, number, number, number];\n if (dataFormat === 'channelsFirst') {\n outShape = [batchSize, outChannels, outHeight, outWidth];\n } else if (dataFormat === 'channelsLast') {\n outShape = [batchSize, outHeight, outWidth, outChannels];\n }\n\n return {\n batchSize,\n dataFormat,\n inHeight,\n inWidth,\n inChannels,\n outHeight,\n outWidth,\n outChannels,\n padInfo,\n strideHeight,\n strideWidth,\n filterHeight,\n filterWidth,\n effectiveFilterHeight,\n effectiveFilterWidth,\n dilationHeight,\n dilationWidth,\n inShape,\n outShape,\n filterShape\n };\n}\n\nfunction computeOutputShape3D(\n inShape: [number, number, number], fieldSize: number, outDepth: number,\n stride: number, zeroPad?: number,\n roundingMode?: 'floor'|'round'|'ceil'): [number, number, number] {\n if (zeroPad == null) {\n zeroPad = computeDefaultPad(inShape, fieldSize, stride);\n }\n const inputRows = inShape[0];\n const inputCols = inShape[1];\n\n const outputRows = conditionalRound(\n (inputRows - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(\n util.isInt(outputRows),\n `The output # of rows (${outputRows}) must be an integer. Change the ` +\n `stride and/or zero pad parameters`);\n\n const outputCols = conditionalRound(\n (inputCols - fieldSize + 2 * zeroPad) / stride + 1, roundingMode);\n util.assert(\n util.isInt(outputCols),\n `The output # of columns (${outputCols}) must be an integer. Change ` +\n `the stride and/or zero pad parameters`);\n\n return [outputRows, outputCols, outDepth];\n}\n\nexport function computeDefaultPad(\n inputShape: [number, number, number], fieldSize: number, stride: number,\n dilation = 1): number {\n const effectiveFieldSize = getEffectiveFilterSize(fieldSize, dilation);\n return Math.floor(\n (inputShape[0] * (stride - 1) - stride + effectiveFieldSize) / 2);\n}\n\nfunction parseTupleParam(param: number|[number, number]): [number, number] {\n return typeof param === 'number' ? [param, param] : param;\n}\n\n/* See https://www.tensorflow.org/api_docs/python/tf/nn/atrous_conv2d\n * Atrous convolution is equivalent to standard convolution with upsampled\n * filters with effective_filter_height =\n * filter_height + (filter_height - 1) * (dilation - 1)\n * and effective_filter_width =\n * filter_width + (filter_width - 1) * (dilation - 1),\n * produced by inserting dilation - 1 zeros along consecutive elements across\n * the filters' spatial dimensions.\n * When there is a dilation, this converts a filter dimension to the\n * effective filter dimension, so it can be used in a standard convolution.\n */\nfunction getEffectiveFilterSize(filterSize: number, dilation: number) {\n if (dilation <= 1) {\n return filterSize;\n }\n\n return filterSize + (filterSize - 1) * (dilation - 1);\n}\n\nfunction getPadAndOutInfo(\n pad: 'same'|'valid'|number, inHeight: number, inWidth: number,\n strideHeight: number, strideWidth: number, filterHeight: number,\n filterWidth: number, roundingMode?: 'floor'|'round'|'ceil'):\n {padInfo: PadInfo, outHeight: number, outWidth: number} {\n let padInfo: PadInfo;\n let outHeight: number;\n let outWidth: number;\n\n if (typeof pad === 'number') {\n const padType = (pad === 0) ? 'VALID' : 'NUMBER';\n padInfo = {top: pad, bottom: pad, left: pad, right: pad, type: padType};\n const outShape = computeOutputShape3D(\n [inHeight, inWidth, 1], filterHeight, 1, strideHeight, pad,\n roundingMode);\n outHeight = outShape[0];\n outWidth = outShape[1];\n } else if (pad === 'same') {\n outHeight = Math.ceil(inHeight / strideHeight);\n outWidth = Math.ceil(inWidth / strideWidth);\n const padAlongHeight =\n (outHeight - 1) * strideHeight + filterHeight - inHeight;\n const padAlongWidth = (outWidth - 1) * strideWidth + filterWidth - inWidth;\n const top = Math.floor(padAlongHeight / 2);\n const bottom = padAlongHeight - top;\n const left = Math.floor(padAlongWidth / 2);\n const right = padAlongWidth - left;\n padInfo = {top, bottom, left, right, type: 'SAME'};\n } else if (pad === 'valid') {\n padInfo = {top: 0, bottom: 0, left: 0, right: 0, type: 'VALID'};\n outHeight = Math.ceil((inHeight - filterHeight + 1) / strideHeight);\n outWidth = Math.ceil((inWidth - filterWidth + 1) / strideWidth);\n } else {\n throw Error(`Unknown padding parameter: ${pad}`);\n }\n return {padInfo, outHeight, outWidth};\n}\n\n/**\n * Rounds a value depending on the rounding mode\n * @param value\n * @param roundingMode\n */\nfunction conditionalRound(\n value: number, roundingMode?: 'floor'|'round'|'ceil') {\n if (!roundingMode) {\n return value;\n }\n switch (roundingMode) {\n case 'round':\n // used for Caffe Conv\n return Math.round(value);\n case 'ceil':\n // used for Caffe Pool\n return Math.ceil(value);\n case 'floor':\n return Math.floor(value);\n default:\n throw new Error(`Unknown roundingMode ${roundingMode}`);\n }\n}\n\nexport function tupleValuesAreOne(param: number|[number, number]): boolean {\n const [dimA, dimB] = parseTupleParam(param);\n return dimA === 1 && dimB === 1;\n}\n\nexport function eitherStridesOrDilationsAreOne(\n strides: number|[number, number],\n dilations: number|[number, number]): boolean {\n return tupleValuesAreOne(strides) || tupleValuesAreOne(dilations);\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {op} from './operation';\n\n/**\n * Computes the dot product of two matrices, A * B. These must be matrices.\n *\n * ```js\n * const a = tf.tensor2d([1, 2], [1, 2]);\n * const b = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * a.matMul(b).print(); // or tf.matMul(a, b)\n * ```\n * @param a First matrix in dot product operation.\n * @param b Second matrix in dot product operation.\n * @param transposeA If true, `a` is transposed before multiplication.\n * @param transposeB If true, `b` is transposed before multiplication.\n */\n/** @doc {heading: 'Operations', subheading: 'Matrices'} */\nfunction matMul_<T extends Tensor>(\n a: T|TensorLike, b: T|TensorLike, transposeA = false,\n transposeB = false): T {\n const $a = convertToTensor(a, 'a', 'matMul');\n const $b = convertToTensor(b, 'b', 'matMul');\n\n const innerShapeA =\n transposeA ? $a.shape[$a.rank - 2] : $a.shape[$a.rank - 1];\n const innerShapeB =\n transposeB ? $b.shape[$b.rank - 1] : $b.shape[$b.rank - 2];\n\n const outerShapeA =\n transposeA ? $a.shape[$a.rank - 1] : $a.shape[$a.rank - 2];\n const outerShapeB =\n transposeB ? $b.shape[$b.rank - 2] : $b.shape[$b.rank - 1];\n\n const outerDimsA = $a.shape.slice(0, -2);\n const outerDimsB = $b.shape.slice(0, -2);\n const batchDimA = util.sizeFromShape(outerDimsA);\n const batchDimB = util.sizeFromShape(outerDimsB);\n\n util.assert(\n $a.rank >= 2 && $b.rank >= 2 && $a.rank === $b.rank,\n `Error in matMul: inputs must have the same rank of at least 2, ` +\n `got ranks ${$a.rank} and ${$b.rank}.`);\n\n util.assert(\n util.arraysEqual(outerDimsA, outerDimsB),\n `Error in matMul: outer dimensions (${outerDimsA}) and (` +\n `${outerDimsB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} must match.`);\n\n util.assert(\n innerShapeA === innerShapeB,\n `Error in matMul: inner shapes (${innerShapeA}) and (` +\n `${innerShapeB}) of Tensors with shapes ${$a.shape} and ` +\n `${$b.shape} and transposeA=${transposeA}` +\n ` and transposeB=${transposeB} must match.`);\n\n const outShape = $a.shape.slice(0, -2).concat([outerShapeA, outerShapeB]);\n\n const a3D = transposeA ? $a.as3D(batchDimA, innerShapeA, outerShapeA) :\n $a.as3D(batchDimA, outerShapeA, innerShapeA);\n const b3D = transposeB ? $b.as3D(batchDimB, outerShapeB, innerShapeB) :\n $b.as3D(batchDimB, innerShapeB, outerShapeB);\n\n const grad = (dy: Tensor3D) => {\n if (!transposeA && !transposeB) {\n return {\n $a: () => dy.matMul(b3D.toFloat(), false, true),\n $b: () => a3D.toFloat().matMul(dy, true, false)\n };\n } else if (!transposeA && transposeB) {\n return {\n $a: () => dy.matMul(b3D.toFloat(), false, false),\n $b: () => dy.matMul(a3D.toFloat(), true, false)\n };\n } else if (transposeA && !transposeB) {\n return {\n $a: () => b3D.toFloat().matMul(dy, false, true),\n $b: () => a3D.toFloat().matMul(dy, false, false)\n };\n } else {\n return {\n $a: () => b3D.toFloat().matMul(dy, true, true),\n $b: () => dy.matMul(a3D.toFloat(), true, true)\n };\n }\n };\n\n const res = ENV.engine.runKernel(\n backend => backend.batchMatMul(a3D, b3D, transposeA, transposeB),\n {$a: a3D, $b: b3D}, grad);\n return res.reshape(outShape) as T;\n}\n\n/**\n * Computes the outer product of two vectors, `v1` and `v2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([3, 4, 5]);\n *\n * tf.outerProduct(a, b).print();\n * ```\n * @param v1 The first vector in the outer product operation.\n * @param v2 The second vector in the outer product operation.\n */\n/** @doc {heading: 'Operations', subheading: 'Matrices'} */\nfunction outerProduct_(\n v1: Tensor1D|TensorLike, v2: Tensor1D|TensorLike): Tensor2D {\n const $v1 = convertToTensor(v1, 'v1', 'outerProduct');\n const $v2 = convertToTensor(v2, 'v2', 'outerProduct');\n\n util.assert(\n $v1.rank === 1 && $v2.rank === 1,\n `Error in outerProduct: inputs must be rank 1, but got ranks ` +\n `${$v1.rank} and ${$v2.rank}.`);\n\n return $v1.as2D(-1, 1).matMul($v2.as2D(1, -1));\n}\n\n/**\n * Computes the dot product of two matrices and/or vectors, `t1` and `t2`.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor2d([[1, 2], [3, 4]]);\n * const c = tf.tensor2d([[1, 2, 3], [4, 5, 6]]);\n *\n * a.dot(b).print(); // or tf.dot(a, b)\n * b.dot(a).print();\n * b.dot(c).print();\n * ```\n * @param t1 The first tensor in the dot operation.\n * @param t2 The second tensor in the dot operation.\n */\n/** @doc {heading: 'Operations', subheading: 'Matrices'} */\nfunction dot_(t1: Tensor|TensorLike, t2: Tensor|TensorLike): Tensor {\n const $t1 = convertToTensor(t1, 't1', 'dot');\n const $t2 = convertToTensor(t2, 't2', 'dot');\n util.assert(\n ($t1.rank === 1 || $t1.rank === 2) && ($t2.rank === 1 || $t2.rank === 2),\n `Error in dot: inputs must all be rank 1 or 2, but got ranks ` +\n `${$t1.rank} and ${$t2.rank}.`);\n\n const t1Inner = ($t1.rank === 1 ? $t1.size : $t1.shape[1]);\n const t2Inner = ($t2.rank === 1 ? $t2.size : $t2.shape[0]);\n\n util.assert(\n t1Inner === t2Inner,\n `Error in dot: inner dimensions of inputs must match, but got ` +\n `${t1Inner} and ${t2Inner}.`);\n\n if ($t1.rank === 1 && $t2.rank === 1) {\n return $t1.as2D(1, -1).matMul($t2.as2D(-1, 1)).asScalar();\n } else if ($t1.rank === 1 && $t2.rank === 2) {\n return $t1.as2D(1, -1).matMul($t2.as2D($t2.shape[0], $t2.shape[1])).as1D();\n } else if ($t1.rank === 2 && $t2.rank === 1) {\n return $t1.matMul($t2.as2D(-1, 1)).as1D();\n } else {\n return $t1.matMul($t2.as2D($t2.shape[0], $t2.shape[1]));\n }\n}\n\nexport const matMul = op({matMul_});\nexport const dot = op({dot_});\nexport const outerProduct = op({outerProduct_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, TensorLike} from '../types';\nimport * as util from '../util';\nimport * as conv_util from './conv_util';\nimport {matMul} from './matmul';\nimport {op} from './operation';\n\n/**\n * Computes a 1D convolution over the input x.\n *\n * @param x The input tensor, of rank 3 or rank 2, of shape\n * `[batch, width, inChannels]`. If rank 2, batch of 1 is assumed.\n * @param filter The filter, rank 3, of shape\n * `[filterWidth, inDepth, outDepth]`.\n * @param stride The number of entries by which the filter is moved right at\n * each step.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat An optional string from \"NWC\", \"NCW\". Defaults to \"NWC\",\n * the data is stored in the order of [batch, in_width, in_channels]. Only\n * \"NWC\" is currently supported.\n * @param dilation The dilation rate in which we sample input values in\n * atrous convolution. Defaults to `1`. If it is greater than 1, then\n * stride must be `1`.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction conv1d_<T extends Tensor2D|Tensor3D>(\n x: T|TensorLike, filter: Tensor3D|TensorLike, stride: number,\n pad: 'valid'|'same'|number, dataFormat: 'NWC'|'NCW' = 'NWC', dilation = 1,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'conv1d');\n const $filter = convertToTensor(filter, 'filter', 'conv1d');\n\n let x3D = $x as Tensor3D;\n let reshapedTo3D = false;\n if ($x.rank === 2) {\n reshapedTo3D = true;\n x3D = $x.as3D(1, $x.shape[0], $x.shape[1]);\n }\n\n util.assert(\n x3D.rank === 3,\n `Error in conv1d: input must be rank 3, but got rank ${x3D.rank}.`);\n util.assert(\n $filter.rank === 3,\n `Error in conv1d: filter must be rank 3, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in conv1d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n util.assert(\n x3D.shape[2] === $filter.shape[1],\n `Error in conv1d: depth of input (${x3D.shape[2]}) must match ` +\n `input depth for filter ${$filter.shape[1]}.`);\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(stride, dilation),\n 'Error in conv1D: Either stride or dilation must be 1. ' +\n `Got stride ${stride} and dilation '${dilation}'`);\n util.assert(\n dataFormat === 'NWC',\n `Error in conv1d: got dataFormat of ${\n dataFormat} but only NWC is currently supported.`);\n\n const filter4D =\n $filter.as4D(1, $filter.shape[0], $filter.shape[1], $filter.shape[2]);\n const input4D = x3D.as4D(x3D.shape[0], 1, x3D.shape[1], x3D.shape[2]);\n const strides: [number, number] = [1, stride];\n const dilations: [number, number] = [1, dilation];\n\n const conv2dDataFormat = 'NHWC';\n\n const res = conv2d(\n input4D, filter4D, strides, pad, conv2dDataFormat, dilations,\n dimRoundingMode);\n\n if (reshapedTo3D) {\n return res.as2D(res.shape[2], res.shape[3]) as T;\n }\n return res.as3D(res.shape[0], res.shape[2], res.shape[3]) as T;\n}\n\n/**\n * Computes a 2D convolution over the input x.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction conv2d_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filter: Tensor4D|TensorLike,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dataFormat: 'NHWC'|'NCHW' = 'NHWC',\n dilations: [number, number]|number = [1, 1],\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'conv2d');\n const $filter = convertToTensor(filter, 'filter', 'conv2d');\n\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n util.assert(\n x4D.rank === 4,\n `Error in conv2d: input must be rank 4, but got rank ${x4D.rank}.`);\n util.assert(\n $filter.rank === 4,\n `Error in conv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in conv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n util.assert(\n x4D.shape[3] === $filter.shape[2],\n `Error in conv2d: depth of input (${x4D.shape[3]}) must match ` +\n `input depth for filter ${$filter.shape[2]}.`);\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in conv2D: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n util.assert(\n dataFormat === 'NHWC',\n `Error in conv2d: got dataFormat of ${\n dataFormat} but only NHWC is currently supported.`);\n\n const convInfo = conv_util.computeConv2DInfo(\n x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode);\n\n let res: Tensor3D|Tensor4D;\n if (convInfo.filterHeight === 1 && convInfo.filterWidth === 1 &&\n convInfo.dilationHeight === 1 && convInfo.dilationWidth === 1 &&\n convInfo.strideHeight === 1 && convInfo.strideWidth === 1 &&\n (convInfo.padInfo.type === 'SAME' || convInfo.padInfo.type === 'VALID')) {\n const x2d = x4D.reshape([-1, convInfo.inChannels]) as Tensor2D;\n const w2d = $filter.reshape([convInfo.inChannels, convInfo.outChannels]) as\n Tensor2D;\n\n res = matMul(x2d, w2d).reshape<Rank.R4>(convInfo.outShape);\n } else {\n const grad = (dy: Tensor4D) => {\n util.assert(\n conv_util.tupleValuesAreOne(dilations),\n 'Error in gradient of conv2D: dilation rates greater than 1 are not' +\n `yet supported in gradients. Got dilations '${dilations}'`);\n\n return {\n x: () => conv2dDerInput_(x4D.shape, dy, $filter, strides, pad),\n $filter: () => conv2dDerFilter_(x4D, dy, $filter.shape, strides, pad)\n };\n };\n\n res = ENV.engine.runKernel(\n backend => backend.conv2d(x4D, $filter, convInfo), {x: x4D, $filter},\n grad);\n }\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the derivative of the input of a 2D convolution.\n *\n * @param xShape The shape of the input: [batch, height, width, inDepth].\n * If length of 3, batch of 1 is assumed.\n * @param dy The derivative of the output, of rank 4 or rank 3 of shape\n * `[batch, outHeight, outWidth, outDepth]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, inDepth, outDepth]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`.\n * @param pad The type of padding algorithm used:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction conv2dDerInput_<T extends Tensor3D|Tensor4D>(\n xShape: [number, number, number, number]|[number, number, number], dy: T,\n filter: Tensor4D, strides: [number, number]|number,\n pad: 'valid'|'same'|number, dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n util.assert(\n xShape.length === dy.rank,\n `Length of inShape ` +\n `(${xShape.length}) and rank of dy (${dy.rank}) must match`);\n\n let xShape4D = xShape as [number, number, number, number];\n let dy4D = dy as Tensor4D;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = dy.as4D(1, dy.shape[0], dy.shape[1], dy.shape[2]);\n xShape4D = [1, xShape[0], xShape[1], xShape[2]];\n }\n\n const inDepth = xShape4D[3];\n const outDepth = dy4D.shape[3];\n util.assert(\n xShape4D.length === 4,\n `Error in conv2dDerInput: inShape must be length 4, but got length ` +\n `${xShape4D.length}.`);\n util.assert(\n dy4D.rank === 4,\n `Error in conv2dDerInput: dy must be rank 4, but got ` +\n `rank ${dy4D.rank}`);\n util.assert(\n filter.rank === 4,\n `Error in conv2dDerInput: filter must be rank 4, but got ` +\n `rank ${filter.rank}`);\n util.assert(\n inDepth === filter.shape[2],\n `Error in conv2dDerInput: depth of input (${inDepth}) must ` +\n `match input depth for filter ${filter.shape[2]}.`);\n util.assert(\n outDepth === filter.shape[3],\n `Error in conv2dDerInput: depth of output (${outDepth}) must ` +\n `match output depth for filter ${filter.shape[3]}.`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in conv2dDerInput: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n const dilations = 1;\n\n const grad = (ddx: Tensor4D) => {\n const dataFormat = 'NHWC';\n return {\n dy4D: () => conv2d(\n ddx, filter, strides, pad, dataFormat, dilations, dimRoundingMode),\n filter: () => conv2dDerFilter(\n ddx, dy4D, filter.shape, strides, pad, dimRoundingMode)\n };\n };\n\n const convInfo = conv_util.computeConv2DInfo(\n xShape4D, filter.shape, strides, dilations, pad, dimRoundingMode);\n const res = ENV.engine.runKernel(\n backend => backend.conv2dDerInput(dy4D, filter, convInfo), {dy4D, filter},\n grad);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the derivative of the filter of a 2D convolution.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * [batch, height, width, inChannels]. If rank 3, batch of 1 is assumed.\n * @param dy The dy image, of rank 4 or rank 3, of shape\n * [batch, height, width, outDepth]. If rank 3, batch of 1 is assumed.\n * @param filterShape The shape of the filter, length 4,\n * [filterHeight, filterWidth, inDepth, outDepth].\n * @param strides The strides of the convolution: [strideHeight,\n * strideWidth].\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction conv2dDerFilter_<T extends Tensor3D|Tensor4D>(\n x: T, dy: T, filterShape: [number, number, number, number],\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): Tensor4D {\n let x4D = x as Tensor4D;\n if (x.rank === 3) {\n x4D = x.as4D(1, x.shape[0], x.shape[1], x.shape[2]);\n }\n let dy4D = dy as Tensor4D;\n if (dy4D.rank === 3) {\n dy4D = dy.as4D(1, dy.shape[0], dy.shape[1], dy.shape[2]);\n }\n util.assert(\n x4D.rank === 4,\n `Error in conv2dDerFilter: input must be rank 4, but got shape ` +\n `${x4D.shape}.`);\n util.assert(\n dy4D.rank === 4,\n `Error in conv2dDerFilter: dy must be rank 4, but got shape ` +\n `${dy4D.shape}.`);\n util.assert(\n filterShape.length === 4,\n `Error in conv2dDerFilter: filterShape must be length 4, but got ` +\n `${filterShape}.`);\n util.assert(\n x4D.shape[3] === filterShape[2],\n `Error in conv2dDerFilter: depth of input ${x4D.shape[3]}) must ` +\n `match input depth in filter (${filterShape[2]}.`);\n util.assert(\n dy4D.shape[3] === filterShape[3],\n `Error in conv2dDerFilter: depth of dy (${dy4D.shape[3]}) must ` +\n `match output depth for filter (${filterShape[3]}).`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in conv2dDerFilter: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n const dilations = 1;\n\n const convInfo = conv_util.computeConv2DInfo(\n x4D.shape, filterShape, strides, dilations, pad, dimRoundingMode);\n return ENV.engine.runKernel(\n backend => backend.conv2dDerFilter(x4D, dy4D, convInfo), {x4D, dy4D});\n}\n\n/**\n * Computes the transposed 2D convolution of an image, also known as a\n * deconvolution.\n *\n * @param x The input image, of rank 4 or rank 3, of shape\n * `[batch, height, width, inDepth]`. If rank 3, batch of 1 is assumed.\n * @param filter The filter, rank 4, of shape\n * `[filterHeight, filterWidth, outDepth, inDepth]`.\n * `inDepth` must match `inDepth` in `x`.\n * @param outputShape Output shape, of rank 4 or rank 3:\n * `[batch, height, width, outDepth]`. If rank 3, batch of 1 is assumed.\n * @param strides The strides of the original convolution:\n * `[strideHeight, strideWidth]`.\n * @param pad The type of padding algorithm used in the non-transpose version\n * of the op.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction conv2dTranspose_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filter: Tensor4D|TensorLike,\n outputShape: [number, number, number, number]|[number, number, number],\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'conv2dTranspose');\n const $filter = convertToTensor(filter, 'filter', 'conv2dTranspose');\n\n return conv2dDerInput_(\n outputShape, $x, $filter, strides, pad, dimRoundingMode);\n}\n\n/**\n * Depthwise 2D convolution.\n *\n * Given a 4D `input` array and a `filter` array of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]` containing\n * `inChannels` convolutional filters of depth 1, this op applies a\n * different filter to each input channel (expanding from 1 channel to\n * `channelMultiplier` channels for each), then concatenates the results\n * together. The output has `inChannels * channelMultiplier` channels.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/depthwise_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param filter The filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction depthwiseConv2d_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filter: Tensor4D|TensorLike,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dataFormat: 'NHWC'|'NCHW' = 'NHWC',\n dilations: [number, number]|number = [1, 1],\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'depthwiseConv2d');\n const $filter = convertToTensor(filter, 'filter', 'depthwiseConv2d');\n\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n util.assert(\n x4D.rank === 4,\n `Error in depthwiseConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert(\n $filter.rank === 4,\n `Error in depthwiseConv2d: filter must be rank 4, but got rank ` +\n `${$filter.rank}.`);\n util.assert(\n x4D.shape[3] === $filter.shape[2],\n `Error in depthwiseConv2d: number of input channels ` +\n `(${x4D.shape[3]}) must match the inChannels dimension in ` +\n `filter ${$filter.shape[2]}.`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in depthwiseConv2d: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in depthwiseConv2d: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n const convInfo = conv_util.computeConv2DInfo(\n x4D.shape, $filter.shape, strides, dilations, pad, dimRoundingMode,\n true /* depthwise */);\n\n const grad = (dy: Tensor4D) => {\n util.assert(\n conv_util.tupleValuesAreOne(dilations),\n 'Error in gradient of depthwiseConv2d: dilation rates greater than ' +\n `1 are not yet supported. Got dilations '${dilations}'`);\n return {\n x: () => depthwiseConv2dDerInput(x4D.shape, dy, $filter, convInfo),\n $filter: () => depthwiseConv2dDerFilter(x4D, dy, $filter.shape, convInfo),\n };\n };\n\n const res = ENV.engine.runKernel(\n backend => backend.depthwiseConv2D(x4D, $filter, convInfo),\n {x: x4D, $filter}, grad);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * 2-D convolution with separable filters.\n *\n * Performs a depthwise convolution that acts separately on channels followed\n * by a pointwise convolution that mixes channels. Note that this is\n * separability between dimensions [1, 2] and 3, not spatial separability\n * between dimensions 1 and 2.\n *\n * See\n * [https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d](\n * https://www.tensorflow.org/api_docs/python/tf/nn/separable_conv2d)\n * for more details.\n *\n * @param x The input tensor, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is\n * assumed.\n * @param depthwiseFilter The depthwise filter tensor, rank 4, of shape\n * `[filterHeight, filterWidth, inChannels, channelMultiplier]`. This is\n * the filter used in the first step.\n * @param pointwiseFilter The pointwise filter tensor, rank 4, of shape\n * `[1, 1, inChannels * channelMultiplier, outChannels]`. This is\n * the filter used in the second step.\n * @param strides The strides of the convolution: `[strideHeight,\n * strideWidth]`. If strides is a single number, then `strideHeight ==\n * strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in atrous convolution. Defaults to `[1, 1]`. If `rate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param dataFormat: An optional string from: \"NHWC\", \"NCHW\". Defaults to\n * \"NHWC\". Specify the data format of the input and output data. With the\n * default format \"NHWC\", the data is stored in the order of: [batch,\n * height, width, channels]. Only \"NHWC\" is currently supported.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction separableConv2d_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, depthwiseFilter: Tensor4D|TensorLike,\n pointwiseFilter: Tensor4D|TensorLike, strides: [number, number]|number,\n pad: 'valid'|'same', dilation: [number, number]|number = [1, 1],\n dataFormat: 'NHWC'|'NCHW' = 'NHWC'): T {\n const $x = convertToTensor(x, 'x', 'separableConv2d');\n const $depthwiseFilter =\n convertToTensor(depthwiseFilter, 'depthwiseFilter', 'separableConv2d');\n const $pointwiseFilter =\n convertToTensor(pointwiseFilter, 'pointwiseFilter', 'separableConv2d');\n\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n\n if (dataFormat === 'NCHW') {\n throw new Error(\n 'separableConv2d currently does not support dataFormat NCHW; only ' +\n 'NHWC is supported');\n }\n\n util.assert(\n x4D.rank === 4,\n `Error in separableConv2d: input must be rank 4, but got ` +\n `rank ${x4D.rank}.`);\n util.assert(\n $depthwiseFilter.rank === 4,\n `Error in separableConv2d: depthwise filter must be rank 4, but got ` +\n `rank ${$depthwiseFilter.rank}.`);\n util.assert(\n $pointwiseFilter.rank === 4,\n `Error in separableConv2d: pointwise filter must be rank 4, but got ` +\n `rank ${$depthwiseFilter.rank}.`);\n util.assert(\n $pointwiseFilter.shape[0] === 1,\n `Error in separableConv2d: the first dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[0]}.`);\n util.assert(\n $pointwiseFilter.shape[1] === 1,\n `Error in separableConv2d: the second dimension of pointwise filter ` +\n ` must be 1, but got ${$pointwiseFilter.shape[1]}.`);\n\n const inChannels = $depthwiseFilter.shape[2];\n const channelMultiplier = $depthwiseFilter.shape[3];\n util.assert(\n $pointwiseFilter.shape[2] === inChannels * channelMultiplier,\n `Error in separableConv2d: the third dimension of pointwise filter ` +\n `must be ${inChannels * channelMultiplier}, ` +\n `but got ${$pointwiseFilter.shape[2]}.`);\n\n const depthwise = depthwiseConv2d(\n x4D, $depthwiseFilter, strides, pad, dataFormat, dilation);\n const pointwiseStride = 1;\n const res =\n conv2d(depthwise, $pointwiseFilter, pointwiseStride, 'valid', dataFormat);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\nfunction depthwiseConv2dDerInput<T extends Tensor3D|Tensor4D>(\n xShape: [number, number, number, number]|[number, number, number], dy: T,\n filter: Tensor4D, convInfo: conv_util.Conv2DInfo): T {\n let dy4D = dy as Tensor4D;\n let reshapedTo4D = false;\n if (dy.rank === 3) {\n reshapedTo4D = true;\n dy4D = dy.as4D(1, dy.shape[0], dy.shape[1], dy.shape[2]);\n }\n const res = ENV.engine.runKernel(\n backend => backend.depthwiseConv2DDerInput(dy4D, filter, convInfo),\n {dy4D});\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\nfunction depthwiseConv2dDerFilter<T extends Tensor3D|Tensor4D>(\n x: T, dy: T, filterShape: [number, number, number, number],\n convInfo: conv_util.Conv2DInfo): Tensor4D {\n let x4D = x as Tensor4D;\n if (x.rank === 3) {\n x4D = x.as4D(1, x.shape[0], x.shape[1], x.shape[2]);\n }\n let dy4D = dy as Tensor4D;\n if (dy4D.rank === 3) {\n dy4D = dy.as4D(1, dy.shape[0], dy.shape[1], dy.shape[2]);\n }\n return ENV.engine.runKernel(\n backend => backend.depthwiseConv2DDerFilter(x4D, dy4D, convInfo),\n {x4D, dy4D});\n}\n\nexport const conv1d = op({conv1d_});\nexport const conv2d = op({conv2d_});\nexport const conv2dDerFilter = op({conv2dDerFilter_});\nexport const depthwiseConv2d = op({depthwiseConv2d_});\nexport const separableConv2d = op({separableConv2d_});\nexport const conv2dTranspose = op({conv2dTranspose_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {parseAxisParam} from './axis_util';\nimport {op} from './operation';\n\n/**\n * Reverses a `tf.Tensor1D`.\n *\n * @param x The input tensor.\n */\nfunction reverse1d_(x: Tensor1D|TensorLike): Tensor1D {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 1, `Error in reverse1D: x must be rank 1 but got\n rank ${$x.rank}.`);\n return reverse($x, 0);\n}\n\n/**\n * Reverses a `tf.Tensor2D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse2d_(x: Tensor2D|TensorLike, axis?: number|number[]): Tensor2D {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 2, `Error in reverse2D: x must be rank 2 but got\n rank ${$x.rank}.`);\n return reverse($x, axis);\n}\n\n/**\n * Reverses a `tf.Tensor3D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse3d_(x: Tensor3D|TensorLike, axis?: number|number[]): Tensor3D {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 3, `Error in reverse3D: x must be rank 3 but got\n rank ${$x.rank}.`);\n return reverse($x, axis);\n}\n\n/**\n * Reverses a `tf.Tensor4D` along a specified axis.\n *\n * @param x The input tensor.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\nfunction reverse4d_(x: Tensor4D|TensorLike, axis?: number|number[]): Tensor4D {\n const $x = convertToTensor(x, 'x', 'reverse');\n util.assert($x.rank === 4, `Error in reverse4D: x must be rank 4 but got\n rank ${$x.rank}.`);\n return reverse($x, axis);\n}\n\n/**\n * Reverses a `tf.Tensor` along a specified axis.\n *\n * Also available are stricter rank-specific methods that assert that `x` is\n * of the given rank:\n * - `tf.reverse1d`\n * - `tf.reverse2d`\n * - `tf.reverse3d`\n * - `tf.reverse4d`\n *\n * Except `tf.reverse1d` (which does not have axis param), all methods have\n * same signature as this method.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.reverse().print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.reverse(axis).print();\n * ```\n * @param x The input tensor to be reversed.\n * @param axis The set of dimensions to reverse. Must be in the\n * range [-rank(x), rank(x)). Defaults to all axes.\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction reverse_<T extends Tensor>(\n x: T|TensorLike, axis?: number|number[]): T {\n const $x = convertToTensor(x, 'x', 'reverse');\n\n if ($x.rank === 0) {\n return $x.clone();\n }\n const axes = parseAxisParam(axis, $x.shape);\n const grad = (dy: T) => {\n return {$x: () => dy.reverse(axes)};\n };\n const res =\n ENV.engine.runKernel(backend => backend.reverse($x, axes), {$x}, grad);\n return res.reshapeAs($x);\n}\n\nexport const reverse = op({reverse_});\nexport const reverse1d = op({reverse1d_});\nexport const reverse2d = op({reverse2d_});\nexport const reverse3d = op({reverse3d_});\nexport const reverse4d = op({reverse4d_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {batchToSpaceND, spaceToBatchND} from './array_ops';\nimport * as conv_util from './conv_util';\nimport {op} from './operation';\n\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction maxPoolImpl_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, dilations: [number, number]|number,\n pad: 'valid'|'same'|number, dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'maxPool');\n\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(\n x4D.rank === 4,\n `Error in maxPool: input must be rank 4 but got rank ${x4D.rank}.`);\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in maxPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in maxPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n const convInfo = conv_util.computePool2DInfo(\n x4D.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n\n const grad = (dy: Tensor4D, saved: Tensor[]) => {\n const [y4D] = saved;\n return {\n x: () => maxPoolBackprop(\n dy, x4D, y4D as Tensor4D, filterSize, strides, dilations, pad)\n };\n };\n\n const res = ENV.engine.runKernel(\n (backend, save) => save(backend.maxPool(x4D, convInfo)), {x: x4D}, grad);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the 2D max pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm.\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction maxPool_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n return maxPoolImpl_(x, filterSize, strides, 1, pad, dimRoundingMode);\n}\n\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilations` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\nfunction avgPoolImpl_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, dilations: [number, number]|number,\n pad: 'valid'|'same'|number, dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n const $x = convertToTensor(x, 'x', 'avgPool');\n util.assert(\n $x.dtype === 'float32', 'The input dtype to avgPool must be float32');\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in avgPool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n util.assert(\n x4D.rank === 4,\n `Error in avgPool: x must be rank 4 but got rank ${x4D.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in avgPool: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n const convInfo = conv_util.computePool2DInfo(\n x4D.shape, filterSize, strides, dilations, pad);\n\n const grad = (dy: Tensor4D) => {\n return {\n x: () => avgPoolBackprop(dy, x4D, filterSize, strides, dilations, pad)\n };\n };\n let res = ENV.engine.runKernel(\n backend => backend.avgPool(x4D, convInfo), {x: x4D}, grad);\n res = res.cast($x.dtype);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the 2D average pooling of an image.\n *\n * @param x The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dimRoundingMode The rounding mode used when computing output\n * dimensions if pad is a number. If none is provided, it will not round\n * and error if the output is of fractional size.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction avgPool_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): T {\n return avgPoolImpl_(x, filterSize, strides, 1, pad, dimRoundingMode);\n}\n\n/**\n * Performs an N-D pooling operation\n *\n * @param input The input tensor, of rank 4 or rank 3 of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param windowShape The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param poolingType The type of pooling, either 'max' or 'avg'.\n * @param pad The type of padding algorithm:\n * - `same` and stride 1: output will be of same size as input,\n * regardless of filter size.\n * - `valid`: output will be smaller than input if filter is larger\n * than 1x1.\n * - For more info, see this guide:\n * [https://www.tensorflow.org/api_guides/python/nn#Convolution](\n * https://www.tensorflow.org/api_guides/python/nn#Convolution)\n * @param dilations The dilation rates: `[dilationHeight, dilationWidth]`\n * in which we sample input values across the height and width dimensions\n * in dilated pooling. Defaults to `[1, 1]`. If `dilationRate` is a single\n * number, then `dilationHeight == dilationWidth`. If it is greater than\n * 1, then all values of `strides` must be 1.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n */\n/** @doc {heading: 'Operations', subheading: 'Convolution'} */\nfunction pool_<T extends Tensor3D|Tensor4D>(\n input: T|TensorLike, windowShape: [number, number]|number,\n poolingType: 'avg'|'max', pad: 'valid'|'same'|number,\n dilations?: [number, number]|number, strides?: [number, number]|number) {\n if (dilations == null) {\n dilations = [1, 1];\n }\n if (strides == null) {\n strides = 1;\n }\n if (pad === 0) {\n pad = 'valid';\n }\n const $x = convertToTensor(input, 'x', 'maxPool');\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in pool: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n const convInfo = conv_util.computePool2DInfo(\n x4D.shape, windowShape, strides, dilations, pad);\n const dilation: [number, number] =\n [convInfo.dilationHeight, convInfo.dilationWidth];\n\n // The following implementation does batchToSpace(pool(spaceToBatch(x)))\n // whenever dilation > 1 since the TF kernels do not support dilation > 1.\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L1037\n\n let basePadding: number[][];\n if (pad === 'same') {\n basePadding = withSpaceToBatchBasePaddings(\n [convInfo.filterHeight, convInfo.filterWidth], dilation);\n } else {\n basePadding = [[0, 0], [0, 0]];\n }\n const isDilationOne = dilation[0] === 1 && dilation[1] === 1;\n const [adjustedPadding, adjustedCrops] = requiredSpaceToBatchPaddings(\n [convInfo.inHeight, convInfo.inWidth], dilation, basePadding);\n const convertedPad = isDilationOne ? pad : 'valid';\n const convertedX =\n isDilationOne ? x4D : spaceToBatchND(x4D, dilation, adjustedPadding);\n const forwardOp = poolingType === 'avg' ?\n () => avgPoolImpl_(\n convertedX, windowShape, strides, 1 /* dilation */, convertedPad) :\n () => maxPoolImpl_(\n convertedX, windowShape, strides, 1 /* dilation */, convertedPad);\n const y = forwardOp();\n const res = isDilationOne ? y : batchToSpaceND(y, dilation, adjustedCrops);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the backprop of a max pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The original input image, of rank 4, of shape\n * [batchSize, height, width, channels].\n * @param output The original output image, of rank 4, of shape\n * [batchSize, outHeight, outWidth, channels].\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n * @param dimRoundingMode A string from: 'ceil', 'round', 'floor'. The\n * rounding mode used when computing output dimensions if pad is a\n * number. If none is provided, it will not round and error if the output\n * is of fractional size.\n */\nfunction maxPoolBackprop(\n dy: Tensor4D|TensorLike, input: Tensor4D|TensorLike,\n output: Tensor4D|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, dilations: [number, number]|number,\n pad: 'valid'|'same'|number,\n dimRoundingMode?: 'floor'|'round'|'ceil'): Tensor4D {\n const $dy = convertToTensor(dy, 'dy', 'maxPoolBackprop');\n const $input = convertToTensor(input, 'input', 'maxPoolBackprop');\n const $output = convertToTensor(output, 'output', 'maxPoolBackprop');\n util.assert(\n $input.rank === $dy.rank,\n `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in maxPoolBackProp: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n\n util.assert(\n $dy.rank === 4,\n `Error in maxPoolBackprop: dy must be rank 4 but got rank ` +\n `${$dy.rank}.`);\n util.assert(\n $input.rank === 4,\n `Error in maxPoolBackprop: input must be rank 4 but got rank ` +\n `${$input.rank}.`);\n if (dimRoundingMode != null) {\n util.assert(\n util.isInt(pad as number),\n `Error in maxPoolBackprop: pad must be an integer when using, ` +\n `dimRoundingMode ${dimRoundingMode} but got pad ${pad}.`);\n }\n\n const convInfo = conv_util.computePool2DInfo(\n $input.shape, filterSize, strides, dilations, pad, dimRoundingMode);\n const res = ENV.engine.runKernel(\n backend => backend.maxPoolBackprop($dy, $input, $output, convInfo),\n {$dy, $input});\n return res;\n}\n\n/**\n * Computes the backprop of an avg pool.\n *\n * @param dy The dy error, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param input The input image, of rank 4 or rank 3 of shape\n * [batchSize, height, width, channels]. If rank 3, batch of 1 is\n * assumed.\n * @param filterSize The filter size: `[filterHeight, filterWidth]`. If\n * `filterSize` is a single number, then `filterHeight == filterWidth`.\n * @param strides The strides of the pooling: `[strideHeight, strideWidth]`. If\n * `strides` is a single number, then `strideHeight == strideWidth`.\n * @param pad A string from: 'same', 'valid'. The type of padding algorithm\n * used in the forward prop of the op.\n */\nfunction avgPoolBackprop<T extends Tensor3D|Tensor4D>(\n dy: T|TensorLike, input: T|TensorLike, filterSize: [number, number]|number,\n strides: [number, number]|number, dilations: [number, number]|number,\n pad: 'valid'|'same'|number): T {\n const $dy = convertToTensor(dy, 'dy', 'avgPoolBackprop');\n const $input = convertToTensor(input, 'input', 'avgPoolBackprop');\n util.assert(\n $input.rank === $dy.rank,\n `Rank of input (${$input.rank}) does not match rank of dy (${$dy.rank})`);\n if (dilations == null) {\n dilations = [1, 1];\n }\n util.assert(\n conv_util.eitherStridesOrDilationsAreOne(strides, dilations),\n 'Error in avgPoolBackprop: Either strides or dilations must be 1. ' +\n `Got strides ${strides} and dilations '${dilations}'`);\n\n let input4D = $input as Tensor4D;\n let dy4D = $dy as Tensor4D;\n let reshapedTo4D = false;\n if ($input.rank === 3) {\n reshapedTo4D = true;\n input4D = $input.as4D(1, $input.shape[0], $input.shape[1], $input.shape[2]);\n dy4D = $dy.as4D(1, $dy.shape[0], $dy.shape[1], $dy.shape[2]);\n }\n\n util.assert(\n dy4D.rank === 4,\n `Error in avgPoolBackprop: dy must be rank 4 but got rank ` +\n `${dy4D.rank}.`);\n util.assert(\n input4D.rank === 4,\n `Error in avgPoolBackprop: input must be rank 4 but got rank ` +\n `${input4D.rank}.`);\n\n const convInfo = conv_util.computePool2DInfo(\n input4D.shape, filterSize, strides, dilations, pad);\n const res = ENV.engine.runKernel(\n backend => backend.avgPoolBackprop(dy4D, input4D, convInfo),\n {dy4D, input4D});\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n// Helper function to compute crops and paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/array_ops.py#L2184\nfunction requiredSpaceToBatchPaddings(\n inputShape: [number, number], blockShape: [number, number],\n basePadding: number[][]) {\n const padStart = basePadding.map(b => b[0]);\n const origPadEnd = basePadding.map(b => b[1]);\n const fullInputShape = inputShape.concat(padStart, origPadEnd);\n const padEndExtra = blockShape.map((b, i) => (b - fullInputShape[i] % b) % b);\n const padEnd = origPadEnd.map((s, i) => s + padEndExtra[i]);\n const paddings = blockShape.map((_, i) => [padStart[i], padEnd[i]]);\n const crops = blockShape.map((_, i) => [0, padEndExtra[i]]);\n return [paddings, crops];\n}\n\n// Helper function to compute base paddings for pool with dilation > 1.\n// tslint:disable-next-line:max-line-length\n// https://github.com/tensorflow/tensorflow/blob/50f6bb67dc98c9b74630b6047aae7a4f8a40fd02/tensorflow/python/ops/nn_ops.py#L524\nfunction withSpaceToBatchBasePaddings(\n filterShape: [number, number], dilation: [number, number]) {\n // Spatial dimensions of the filters and the upsampled filters in which we\n // introduce (rate - 1) zeros between consecutive filter values.\n const dilatedFilterShape = filterShape.map((s, i) => {\n return s + (s - 1) * (dilation[i] - 1);\n });\n const padExtraShape = dilatedFilterShape.map(s => s - 1);\n\n // When padding is odd, we pad more at end, following the same\n // convention as conv2d.\n const padExtraStart = padExtraShape.map(s => Math.floor(s / 2));\n const padExtraEnd = padExtraShape.map((s, i) => s - padExtraStart[i]);\n return padExtraShape.map((_, i) => {\n return [padExtraStart[i], padExtraEnd[i]];\n });\n}\n\nexport const maxPool = op({maxPool_});\nexport const avgPool = op({avgPool_});\nexport const pool = op({pool_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, TensorLike} from '../types';\nimport * as util from '../util';\nimport {op} from './operation';\nimport * as slice_util from './slice_util';\n\n/**\n * Extracts a 1D slice from 1D array starting at coordinates `begin` and is\n * of length `size`. See `slice` for details.\n */\nfunction slice1d_(\n x: Tensor1D|TensorLike, begin: number, size: number): Tensor1D {\n const $x = convertToTensor(x, 'x', 'slice1d');\n util.assert(\n $x.rank === 1,\n `slice1d expects a rank-1 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, [begin], [size]);\n}\n\n/**\n * Extracts a 2D slice from a 2D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice2d_(\n x: Tensor2D|TensorLike, begin: [number, number],\n size: [number, number]): Tensor2D {\n const $x = convertToTensor(x, 'x', 'slice2d');\n util.assert(\n $x.rank === 2,\n `slice2d expects a rank-2 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\n\n/**\n * Extracts a 3D slice from a 3D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice3d_(\n x: Tensor3D|TensorLike, begin: [number, number, number],\n size: [number, number, number]): Tensor3D {\n const $x = convertToTensor(x, 'x', 'slice3d');\n util.assert(\n $x.rank === 3,\n `slice3d expects a rank-3 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\n\n/**\n * Extracts a 4D slice from a 4D array starting at coordinates `begin` and\n * is of size `size`. See `slice` for details.\n */\nfunction slice4d_(\n x: Tensor4D|TensorLike, begin: [number, number, number, number],\n size: [number, number, number, number]): Tensor4D {\n const $x = convertToTensor(x, 'x', 'slice4d');\n util.assert(\n $x.rank === 4,\n `slice4d expects a rank-4 tensor, but got a rank-${$x.rank} tensor`);\n return slice($x, begin, size);\n}\n\n/**\n * Extracts a slice from a `tf.Tensor` starting at coordinates `begin`\n * and is of size `size`.\n *\n * Also available are stricter rank-specific methods with the same signature\n * as this method that assert that `x` is of the given rank:\n * - `tf.slice1d`\n * - `tf.slice2d`\n * - `tf.slice3d`\n * - `tf.slice4d`\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.slice([1], [2]).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * x.slice([1, 0], [1, 2]).print();\n * ```\n * @param x The input `tf.Tensor` to slice from.\n * @param begin The coordinates to start the slice from. The length can be\n * less than the rank of x - the rest of the axes will have implicit 0 as\n * start. Can also be a single number, in which case it specifies the\n * first axis.\n * @param size The size of the slice. The length can be less than the rank of\n * x - the rest of the axes will have implicit -1. A value of -1 requests\n * the rest of the dimensions in the axis. Can also be a single number,\n * in which case it specifies the size of the first axis.\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction slice_<R extends Rank, T extends Tensor<R>>(\n x: T|TensorLike, begin: number|number[], size?: number|number[]): T {\n const $x = convertToTensor(x, 'x', 'slice');\n\n if ($x.rank === 0) {\n throw new Error('Slicing scalar is not possible');\n }\n // The following logic allows for more ergonomic calls.\n let begin_: number[];\n if (typeof begin === 'number') {\n begin_ = [begin, ...new Array($x.rank - 1).fill(0)];\n } else if (begin.length < $x.rank) {\n begin_ = begin.concat(new Array($x.rank - begin.length).fill(0));\n } else {\n begin_ = begin.slice();\n }\n let size_: number[];\n if (size == null) {\n size_ = new Array($x.rank).fill(-1);\n } else if (typeof size === 'number') {\n size_ = [size, ...new Array($x.rank - 1).fill(-1)];\n } else if (size.length < $x.rank) {\n size_ = size.concat(new Array($x.rank - size.length).fill(-1));\n } else {\n size_ = size;\n }\n size_ = size_.map((d, i) => {\n if (d >= 0) {\n return d;\n } else {\n util.assert(d === -1, 'Bad value in size');\n return $x.shape[i] - begin_[i];\n }\n });\n slice_util.assertParamsValid($x, begin_, size_);\n const inputShape = $x.shape;\n const grad = (dy: T) => {\n // Create an Nx2 padding where the first column represents how many\n // zeros are prepended (at start) for each dimension, and the second\n // column indicates how many zeros are appended (at end).\n\n // The number of zeros to append is the shape of the input\n // elementwise-subtracted by both the begin vector and sizes vector.\n const paddings: Array<[number, number]> = [];\n for (let i = 0; i < dy.rank; i++) {\n paddings.push([begin_[i], inputShape[i] - begin_[i] - size_[i]]);\n }\n return {$x: () => dy.pad(paddings)};\n };\n return ENV.engine.runKernel(\n backend => backend.slice($x, begin_, size_), {$x}, grad) as T;\n}\n\nexport const slice = op({slice_});\nexport const slice1d = op({slice1d_});\nexport const slice2d = op({slice2d_});\nexport const slice3d = op({slice3d_});\nexport const slice4d = op({slice4d_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {customGrad} from '../globals';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport {op} from './operation';\nimport {ones, scalar, zerosLike} from './tensor_ops';\n\n/**\n * Computes the log(sum(exp(elements across the reduction dimensions)).\n *\n * Reduces the input along the dimensions given in `axis`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.logSumExp().print(); // or tf.logSumExp(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)\n * ```\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. If null (the default),\n * reduces all dimensions.\n * @param keepDims If true, retains reduced dimensions with length\n * of 1. Defaults to false.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction logSumExp_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n const $x = convertToTensor(x, 'x', 'logSumExp');\n\n const axes = axis_util.parseAxisParam(axis, $x.shape);\n const xMax = $x.max(axes, true /* keepDims */);\n const a = $x.sub(xMax);\n const b = a.exp();\n const c = b.sum(axes);\n const d = c.log();\n const res = xMax.reshape(d.shape).add(d);\n\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(res.shape, axes);\n return res.reshape(newShape) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the sum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If axes has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.sum().print(); // or tf.sum(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.sum(axis).print(); // or tf.sum(x, axis)\n * ```\n *\n * @param x The input tensor to compute the sum over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction sum_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'sum');\n\n if ($x.dtype === 'bool') {\n $x = $x.toInt();\n }\n const axes = axis_util.parseAxisParam(axis, $x.shape);\n\n // Use a custom gradient to bypass 2 gradient backprops since sum is used\n // extremely often.\n const customOp = customGrad(x => {\n const permutation = axis_util.getAxesPermutation(axes, x.rank);\n let reductionAxes = axes;\n let permutedX = x;\n if (permutation != null) {\n permutedX = x.transpose(permutation);\n reductionAxes = axis_util.getInnerMostAxes(reductionAxes.length, x.rank);\n }\n let value = ENV.engine.runKernel(\n backend => backend.sum(permutedX, reductionAxes), {permutedX});\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(value.shape, axes);\n value = value.reshape(newShape);\n }\n\n const gradFunc = (dy: Tensor) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = dy.reshape(expandedDyShape);\n const derX = expandedDy.mul(ones(x.shape, 'float32'));\n return derX;\n };\n return {value, gradFunc};\n });\n\n return customOp($x) as T;\n}\n\n/**\n * Computes the product of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and a\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.prod().print(); // or tf.prod(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.prod(axis).print(); // or tf.prod(x, axis)\n * ```\n *\n * @param x The input tensor to compute the product over. If the dtype is `bool`\n * it will be converted to `int32` and the output dtype will be `int32`.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction prod_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'prod');\n\n if ($x.dtype === 'bool') {\n $x = $x.toInt();\n }\n const axes = axis_util.parseAxisParam(axis, $x.shape);\n\n const permutation = axis_util.getAxesPermutation(axes, $x.rank);\n let reductionAxes = axes;\n let permutedX = $x;\n if (permutation != null) {\n permutedX = $x.transpose(permutation);\n reductionAxes = axis_util.getInnerMostAxes(reductionAxes.length, $x.rank);\n }\n let value = ENV.engine.runKernel(\n backend => backend.prod(permutedX, reductionAxes), {permutedX});\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(value.shape, axes);\n value = value.reshape(newShape);\n }\n\n return value as T;\n}\n/**\n * Computes the mean of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is\n * true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with\n * a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.mean().print(); // or tf.mean(a)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.mean(axis).print(); // or tf.mean(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction mean_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n const $x = convertToTensor(x, 'x', 'mean');\n\n const axes = axis_util.parseAxisParam(axis, $x.shape);\n const shapes = axis_util.computeOutAndReduceShapes($x.shape, axes);\n const reduceShape = shapes[1];\n const reduceSize = util.sizeFromShape(reduceShape);\n\n // Use a custom gradient to bypass 2 gradient backprops since mean is used\n // extremely often.\n const customOp = customGrad(x => {\n const reduceSizeScalar = scalar(reduceSize);\n // Cast if needed.\n const xReduce =\n reduceSizeScalar.dtype === x.dtype ? x : x.cast(reduceSizeScalar.dtype);\n const res = xReduce.div(reduceSizeScalar);\n const value = res.sum(axis, keepDims);\n\n const gradFunc = (dy: Tensor) => {\n const expandedDyShape = x.shape.slice();\n axes.forEach(axis => {\n expandedDyShape[axis] = 1;\n });\n const expandedDy = dy.reshape(expandedDyShape);\n const derX =\n expandedDy.mul(ones(x.shape, 'float32')).div(reduceSizeScalar);\n return derX;\n };\n return {value, gradFunc};\n });\n\n return customOp($x) as T;\n}\n\n/**\n * Gradient helper function for the min and max operations.\n */\nfunction gradForMinAndMax<T extends Tensor>(\n dy: T, saved: Tensor[], xOrig: Tensor, origAxes: number[],\n permutedAxes: number[]) {\n let [y] = saved;\n if (y.rank < xOrig.rank) {\n y = y.reshape(axis_util.expandShapeToKeepDim(y.shape, origAxes)) as T;\n }\n if (dy.rank < xOrig.rank) {\n dy = dy.reshape(axis_util.expandShapeToKeepDim(dy.shape, origAxes)) as T;\n }\n return {\n $x: () => {\n const dx = dy.mul(xOrig.equal(y).cast(dy.dtype));\n return permutedAxes == null ? dx : dx.transpose(permutedAxes);\n }\n };\n}\n\n/**\n * Computes the minimum value from the input.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the array is reduced by 1 for each entry in `axes`.\n * If `keepDims` is true, the reduced dimensions are retained with length 1.\n * If `axes` has no entries, all dimensions are reduced, and an array with a\n * single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.min().print(); // or tf.min(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.min(axis).print(); // or tf.min(x, axis)\n * ```\n *\n * @param x The input Tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction min_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'min');\n const xOrig = $x;\n\n const origAxes = axis_util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n\n const grad = (dy: T, saved: Tensor[]) =>\n gradForMinAndMax(dy, saved, xOrig, origAxes, permutedAxes);\n let res = ENV.engine.runKernel(\n (backend, save) => save(backend.min($x, axes)), {$x}, grad);\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = res.reshape(newShape) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the maximum of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.max().print(); // or tf.max(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n *\n * const axis = 1;\n * x.max(axis).print(); // or tf.max(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction max_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'max');\n const xOrig = $x;\n\n const origAxes = axis_util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n\n const grad = (dy: T, saved: Tensor[]) =>\n gradForMinAndMax(dy, saved, xOrig, origAxes, permutedAxes);\n let res = ENV.engine.runKernel(\n (backend, save) => save(backend.max($x, axes)), {$x}, grad);\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n res = res.reshape(newShape) as T;\n }\n return res as T;\n}\n\n/**\n * Returns the indices of the minimum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMin().print(); // or tf.argMin(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMin(axis).print(); // or tf.argMin(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n *\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction argMin_<T extends Tensor>(x: Tensor|TensorLike, axis = 0): T {\n let $x = convertToTensor(x, 'x', 'argMin');\n\n if (axis == null) {\n axis = 0;\n }\n let axes = axis_util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const grad = (dy: T) => {\n return {$x: () => zerosLike($x)};\n };\n return ENV.engine.runKernel(\n backend => backend.argMin($x, axes[0]), {$x}, grad) as T;\n}\n\n/**\n * Returns the indices of the maximum values along an `axis`.\n *\n * The result has the same shape as `input` with the dimension along `axis`\n * removed.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3]);\n *\n * x.argMax().print(); // or tf.argMax(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);\n *\n * const axis = 1;\n * x.argMax(axis).print(); // or tf.argMax(x, axis)\n * ```\n *\n * @param x The input tensor.\n * @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction argMax_<T extends Tensor>(x: Tensor|TensorLike, axis = 0): T {\n let $x = convertToTensor(x, 'x', 'argMax');\n\n if (axis == null) {\n axis = 0;\n }\n let axes = axis_util.parseAxisParam(axis, $x.shape);\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const grad = (dy: T) => {\n return {$x: () => zerosLike($x)};\n };\n return ENV.engine.runKernel(\n backend => backend.argMax($x, axes[0]), {$x}, grad) as T;\n}\n\n/**\n * Computes the logical and of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1]);\n *\n * x.all().print(); // or tf.all(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.all(axis).print(); // or tf.all(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction all_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'all', 'bool');\n util.assert(\n $x.dtype === 'bool',\n `Error Tensor must be of type bool. Got: ${$x.dtype}`);\n\n const origAxes = axis_util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const res = ENV.engine.runKernel(backend => backend.all($x, axes), {$x});\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n return res.reshape(newShape) as T;\n }\n return res as T;\n}\n\n/**\n * Computes the logical or of elements across dimensions of a `tf.Tensor`.\n *\n * Reduces the input along the dimensions given in `axes`. Unless `keepDims`\n * is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in\n * `axes`. If `keepDims` is true, the reduced dimensions are retained with\n * length 1. If `axes` has no entries, all dimensions are reduced, and an\n * `tf.Tensor` with a single element is returned.\n *\n * ```js\n * const x = tf.tensor1d([1, 1, 1]);\n *\n * x.any().print(); // or tf.any(x)\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');\n *\n * const axis = 1;\n * x.any(axis).print(); // or tf.any(x, axis)\n * ```\n *\n * @param x The input tensor. Must be of dtype bool.\n * @param axis The dimension(s) to reduce. By default it reduces\n * all dimensions.\n * @param keepDims If true, retains reduced dimensions with size 1.\n */\n/** @doc {heading: 'Operations', subheading: 'Reduction'} */\nfunction any_<T extends Tensor>(\n x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {\n let $x = convertToTensor(x, 'x', 'any', 'bool');\n util.assert(\n $x.dtype === 'bool',\n `Error Tensor must be of type bool. Got: ${$x.dtype}`);\n\n const origAxes = axis_util.parseAxisParam(axis, $x.shape);\n let axes = origAxes;\n const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);\n if (permutedAxes != null) {\n $x = $x.transpose(permutedAxes);\n axes = axis_util.getInnerMostAxes(axes.length, $x.rank);\n }\n const res = ENV.engine.runKernel(backend => backend.any($x, axes), {$x});\n if (keepDims) {\n const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);\n return res.reshape(newShape) as T;\n }\n return res as T;\n}\n\n/**\n * Calculates the mean and variance of `x`. The mean and variance are\n * calculated by aggregating the contents of `x` across `axes`. If `x` is\n * 1-D and `axes = [0]` this is just the mean and variance of a vector.\n *\n * @param x The input tensor.\n * @param axis The dimension(s) along with to compute mean and\n * variance. By default it reduces all dimensions.\n * @param keepDims If true, the moments have the same dimensionality as the\n * input.\n * @return An object with two keys: `mean` and `variance`.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction moments_(\n x: Tensor|TensorLike, axis: number|number[] = null,\n keepDims = false): {mean: Tensor, variance: Tensor} {\n x = convertToTensor(x, 'x', 'moments');\n const axes = axis_util.parseAxisParam(axis, x.shape);\n const mean = x.mean(axes, keepDims);\n let keepDimsShape = mean.shape;\n if (!keepDims) {\n keepDimsShape = axis_util.expandShapeToKeepDim(mean.shape, axes);\n }\n const devSquared = x.toFloat().sub(mean.reshape(keepDimsShape)).square();\n const variance = devSquared.mean(axes, keepDims);\n return {mean, variance};\n}\n\nexport const all = op({all_});\n// tslint:disable-next-line:variable-name\nexport const any = op({any_});\nexport const argMax = op({argMax_});\nexport const argMin = op({argMin_});\nexport const logSumExp = op({logSumExp_});\nexport const max = op({max_});\nexport const mean = op({mean_});\nexport const min = op({min_});\nexport const moments = op({moments_});\nexport const sum = op({sum_});\nexport const prod = op({prod_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {assertTypesMatch} from '../tensor_util';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {assertShapesMatch} from '../util';\nimport {assertAndGetBroadcastShape} from './broadcast_util';\nimport {op} from './operation';\nimport {zerosLike} from './tensor_ops';\n\n/**\n * Returns the truth value of (a != b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.notEqualStrict` which has the same signature as this op\n * and asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([0, 2, 3]);\n *\n * a.notEqual(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction notEqual_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'notEqual');\n const $b = convertToTensor(b, 'b', 'notEqual');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n return ENV.engine.runKernel(backend => backend.notEqual($a, $b), {$a, $b}) as\n T;\n}\n\n/**\n * Strict version of `tf.notEqual` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction notEqualStrict_<T extends Tensor>(\n a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'notEqualStrict');\n const $b = convertToTensor(b, 'b', 'notEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in notEqualStrict: ');\n return $a.notEqual($b);\n}\n\n/**\n * Returns the truth value of (a < b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.lessStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.less(b).print();\n * ```\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction less_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'less');\n const $b = convertToTensor(b, 'b', 'less');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(backend => backend.less($a, $b), {$a, $b}) as T;\n}\n\n/**\n * Strict version of `tf.less` that forces `a` and `b` to be of the same\n * shape.\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same shape and dtype as\n * `a`.\n */\nfunction lessStrict_<T extends Tensor>(a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'lessStrict');\n const $b = convertToTensor(b, 'b', 'lessStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessStrict: ');\n return $a.less($b);\n}\n\n/**\n * Returns the truth value of (a == b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.equalStrict` which has the same signature as this op\n * and asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.equal(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction equal_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'equal');\n const $b = convertToTensor(b, 'b', 'equal');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(backend => backend.equal($a, $b), {$a, $b}) as T;\n}\n\nfunction equalStrict_<T extends Tensor>(a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'equalStrict');\n const $b = convertToTensor(b, 'b', 'equalStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in equalStrict: ');\n return $a.equal($b);\n}\n\n/**\n * Returns the truth value of (a <= b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.lessEqualStrict` which has the same signature as this op\n * and asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.lessEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction lessEqual_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'lessEqual');\n const $b = convertToTensor(b, 'b', 'lessEqual');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(backend => backend.lessEqual($a, $b), {$a, $b}) as\n T;\n}\n\nfunction lessEqualStrict_<T extends Tensor>(\n a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'lessEqualStrict');\n const $b = convertToTensor(b, 'b', 'lessEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in lessEqualStrict: ');\n return $a.lessEqual($b);\n}\n\n/**\n * Returns the truth value of (a > b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.greaterStrict` which has the same signature as this\n * op and asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greater(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction greater_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'greater');\n const $b = convertToTensor(b, 'b', 'greater');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(backend => backend.greater($a, $b), {$a, $b}) as\n T;\n}\n\nfunction greaterStrict_<T extends Tensor>(a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'greaterStrict');\n const $b = convertToTensor(b, 'b', 'greaterStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterStrict: ');\n return $a.greater($b);\n}\n\n/**\n * Returns the truth value of (a >= b) element-wise. Supports broadcasting.\n *\n * We also expose `tf.greaterEqualStrict` which has the same signature as this\n * op and asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3]);\n * const b = tf.tensor1d([2, 2, 2]);\n *\n * a.greaterEqual(b).print();\n * ```\n *\n * @param a The first input tensor.\n * @param b The second input tensor. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction greaterEqual_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'greaterEqual');\n const $b = convertToTensor(b, 'b', 'greaterEqual');\n assertTypesMatch($a, $b);\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n const grad = (dy: T) => {\n return {$a: () => zerosLike($a), $b: () => zerosLike($b)};\n };\n return ENV.engine.runKernel(\n backend => backend.greaterEqual($a, $b), {$a, $b}, grad) as T;\n}\n\nfunction greaterEqualStrict_<T extends Tensor>(\n a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'greaterEqualStrict');\n const $b = convertToTensor(b, 'b', 'greaterEqualStrict');\n assertShapesMatch($a.shape, $b.shape, 'Error in greaterEqualStrict: ');\n return $a.greaterEqual($b);\n}\n\nexport const equal = op({equal_});\nexport const equalStrict = op({equalStrict_});\nexport const greater = op({greater_});\nexport const greaterEqual = op({greaterEqual_});\nexport const greaterEqualStrict = op({greaterEqualStrict_});\nexport const greaterStrict = op({greaterStrict_});\nexport const less = op({less_});\nexport const lessEqual = op({lessEqual_});\nexport const lessEqualStrict = op({lessEqualStrict_});\nexport const lessStrict = op({lessStrict_});\nexport const notEqual = op({notEqual_});\nexport const notEqualStrict = op({notEqualStrict_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {KernelBackend} from '../kernels/backend';\nimport {Tensor} from '../tensor';\nimport {NamedTensorMap} from '../tensor_types';\nimport {assertTypesMatch} from '../tensor_util';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike, upcastType} from '../types';\nimport * as util from '../util';\nimport * as broadcast_util from './broadcast_util';\nimport {op} from './operation';\nimport {scalar} from './tensor_ops';\nimport {neg} from './unary_ops';\n\n/**\n * Adds two `tf.Tensor`s element-wise, A + B. Supports broadcasting.\n *\n * We also expose `tf.addStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n *\n * ```js\n * // Broadcast add a with b.\n * const a = tf.scalar(5);\n * const b = tf.tensor1d([10, 20, 30, 40]);\n *\n * a.add(b).print(); // or tf.add(a, b)\n * ```\n * @param a The first `tf.Tensor` to add.\n * @param b The second `tf.Tensor` to add. Must have the same type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction add_<T extends Tensor>(a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'add');\n const $b = convertToTensor(b, 'b', 'add');\n assertTypesMatch($a, $b);\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n\n const der = (dy: Tensor) => {\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($b.shape);\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(backend => backend.add($a, $b), {$a, $b}, der) as\n T;\n}\n\n/**\n * Adds a list of `tf.Tensor`s element-wise, each with the same shape and dtype.\n *\n * ```js\n * const a = tf.tensor1d([1, 2]);\n * const b = tf.tensor1d([3, 4]);\n * const c = tf.tensor1d([5, 6]);\n *\n * tf.addN([a, b, c]).print();\n * ```\n * @param tensors A list of tensors with the same shape and dtype.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction addN_<T extends Tensor>(tensors: Array<T|TensorLike>): T {\n util.assert(\n Array.isArray(tensors),\n () => 'The argument passed to tf.addN() must be a list of tensors');\n util.assert(\n tensors.length >= 1,\n () => `Must pass at least one tensor to tf.addN(), but got ` +\n `${tensors.length}`);\n const $tensors =\n tensors.map((t, i) => convertToTensor(t, `tensors${i}`, 'addN'));\n const firstTensor = $tensors[0];\n $tensors.forEach(t => {\n if (t.dtype !== firstTensor.dtype) {\n throw new Error(\n 'All tensors passed to tf.addN() must have the same dtype');\n }\n });\n $tensors.forEach(t => {\n if (!util.arraysEqual(t.shape, firstTensor.shape)) {\n throw new Error(\n 'All tensors passed to tf.addN() must have the same shape');\n }\n });\n\n const der = (dy: T) => {\n const ders: {[key: string]: () => Tensor} = {};\n $tensors.forEach((t, i) => {\n ders[i] = () => dy.clone();\n });\n return ders;\n };\n const inputs: NamedTensorMap = $tensors as {} as NamedTensorMap;\n return ENV.engine.runKernel(backend => backend.addN($tensors), inputs, der);\n}\n\n/**\n * Adds two `tf.Tensor`s element-wise, A + B.\n *\n * Inputs must be the same shape. For broadcasting support, use add() instead.\n *\n * @param a The first Tensor to add element-wise.\n * @param b The second Tensor to add element-wise.\n */\nfunction addStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in addStrict: ');\n return a.add(b);\n}\n\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Supports broadcasting.\n *\n * We also expose `tf.subStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n *\n * ```js\n * // Broadcast subtract a with b.\n * const a = tf.tensor1d([10, 20, 30, 40]);\n * const b = tf.scalar(5);\n *\n * a.sub(b).print(); // or tf.sub(a, b)\n * ```\n * @param a The first `tf.Tensor` to subtract from.\n * @param b The second `tf.Tensor` to be subtracted. Must have the same dtype as\n * `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction sub_<T extends Tensor>(a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'sub');\n const $b = convertToTensor(b, 'b', 'sub');\n assertTypesMatch($a, $b);\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n\n const der = (dy: Tensor) => {\n const derA = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($a.shape);\n };\n const derB = () => {\n let res = dy;\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.neg().reshape($b.shape);\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.subtract($a, $b), {$a, $b}, der) as T;\n}\n\n/**\n * Subtracts two `tf.Tensor`s element-wise, A - B. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.sub` instead.\n *\n * @param a The first Tensor to subtract element-wise.\n * @param b The second Tensor to subtract element-wise.\n */\nfunction subStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in subStrict: ');\n return a.sub(b);\n}\n\n/**\n * Computes the power of one `tf.Tensor` to another. Supports broadcasting.\n *\n * Given a `tf.Tensor` x and a `tf.Tensor` y, this operation computes x^y for\n * corresponding elements in x and y. The result's dtype will be the upcasted\n * type of the `base` and `exp` dtypes.\n *\n * ```js\n * const a = tf.tensor([[2, 3], [4, 5]])\n * const b = tf.tensor([[1, 2], [3, 0]]).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n *\n * ```js\n * const a = tf.tensor([[1, 2], [3, 4]])\n * const b = tf.tensor(2).toInt();\n *\n * a.pow(b).print(); // or tf.pow(a, b)\n * ```\n * We also expose `powStrict` which has the same signature as this op and\n * asserts that `base` and `exp` are the same shape (does not broadcast).\n *\n * @param base The base `tf.Tensor` to pow element-wise.\n * @param exp The exponent `tf.Tensor` to pow element-wise.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction pow_<T extends Tensor>(base: T|TensorLike, exp: Tensor|TensorLike): T {\n const $base = convertToTensor(base, 'base', 'pow');\n const $exp = convertToTensor(exp, 'exp', 'pow');\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($base.shape, $exp.shape);\n base = $base.cast(upcastType($base.dtype, $exp.dtype));\n exp = $exp.cast(upcastType($base.dtype, $exp.dtype));\n const grad = (dy: Tensor, saved: Tensor[]) => {\n const [y] = saved;\n const derBase = () => {\n const expFloat = $exp.toFloat();\n let res = dy.mul(expFloat.mul($base.pow(expFloat.sub(scalar(1)))));\n const reduceAxes = broadcast_util.getReductionAxes($base.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($base.shape) as T;\n };\n const derExp = () => {\n let res = dy.mul(y.mul($base.log()).toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($exp.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($exp.shape);\n };\n return {$base: derBase, $exp: derExp};\n };\n return ENV.engine.runKernel(\n (backend, save) => save(backend.pow($base, $exp)), {$base, $exp},\n grad) as T;\n}\n\n/**\n * Computes the power of one `tf.Tensor` to another. Inputs must\n * be the same shape.\n *\n * For broadcasting support, use `tf.pow` instead.\n *\n * @param base The base tensor to pow element-wise.\n * @param exp The exponent tensor to pow element-wise.\n */\nfunction powStrict_<T extends Tensor>(base: T, exp: Tensor): T {\n util.assertShapesMatch(base.shape, exp.shape, 'Error in powStrict: ');\n return base.pow(exp);\n}\n\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B. Supports broadcasting.\n *\n * We also expose `tf.mulStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.tensor1d([2, 3, 4, 5]);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n *\n * ```js\n * // Broadcast mul a with b.\n * const a = tf.tensor1d([1, 2, 3, 4]);\n * const b = tf.scalar(5);\n *\n * a.mul(b).print(); // or tf.mul(a, b)\n * ```\n * @param a The first tensor to multiply.\n * @param b The second tensor to multiply. Must have the same dtype as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction mul_<T extends Tensor>(a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'mul');\n const $b = convertToTensor(b, 'b', 'mul');\n assertTypesMatch($a, $b);\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n\n const der = (dy: Tensor) => {\n const derA = () => {\n const res = dy.mul($b.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n return res.sum(reduceAxes).reshape($a.shape);\n }\n return res;\n };\n const derB = () => {\n const res = dy.mul($a.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n return res.sum(reduceAxes).reshape($b.shape);\n }\n return res;\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.multiply($a, $b), {$a, $b}, der) as T;\n}\n\n/**\n * Multiplies two `tf.Tensor`s element-wise, A * B.\n *\n * Inputs must be the same shape. For broadcasting support, use `tf.mul`.\n *\n * @param a The first tensor to multiply.\n * @param b The first tensor to multiply. Must have the same\n * dtype as `a`.\n */\nfunction mulStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in multiplyStrict: ');\n return a.mul(b) as T;\n}\n\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n *\n * We also expose `tf.divStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.div(b).print(); // or tf.div(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction div_<T extends Tensor>(a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'div');\n const $b = convertToTensor(b, 'b', 'div');\n assertTypesMatch($a, $b);\n\n let forwardFunc: (backend: KernelBackend) => Tensor;\n if ($a.dtype === 'int32' && $b.dtype === 'int32') {\n return floorDiv($a, $b);\n } else {\n forwardFunc = (backend: KernelBackend) => backend.realDivide($a, $b);\n }\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const derA = () => {\n const res = dy.div($b.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n return res.sum(reduceAxes).reshape($a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = dy.mul($a.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes).reshape($b.shape);\n }\n const tmp = $b.square() as Tensor;\n return res.div(tmp.toFloat()).neg() as Tensor;\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(forwardFunc, {$a, $b}, der) as T;\n}\n\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Supports broadcasting.\n * The result is rounded with floor function.\n *\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 9, 16]);\n * const b = tf.tensor1d([1, 2, 3, 4]);\n *\n * a.floorDiv(b).print(); // or tf.div(a, b)\n * ```\n *\n * ```js\n * // Broadcast div a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(2);\n *\n * a.floorDiv(b).print(); // or tf.floorDiv(a, b)\n * ```\n *\n * @param a The first tensor as the numerator.\n * @param b The second tensor as the denominator. Must have the same dtype as\n * `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction floorDiv_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'floorDiv');\n const $b = convertToTensor(b, 'b', 'floorDiv');\n assertTypesMatch($a, $b);\n\n const forwardFunc = (backend: KernelBackend) => backend.floorDiv($a, $b);\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const derA = () => {\n const res = dy.div($b.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n return res.sum(reduceAxes).reshape($a.shape);\n }\n return res;\n };\n const derB = () => {\n let res = dy.mul($a.toFloat());\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes).reshape($b.shape);\n }\n const tmp = $b.square() as Tensor;\n return res.div(tmp.toFloat()).neg() as Tensor;\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(forwardFunc, {$a, $b}, der) as T;\n}\n\n/**\n * Divides two `tf.Tensor`s element-wise, A / B. Inputs must\n * be the same shape.\n *\n * @param a The first tensor as the numerator for element-wise division.\n * @param b The second tensor as the denominator for element-wise division.\n */\nfunction divStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in divideStrict: ');\n return a.div(b) as T;\n}\n\n/**\n * Returns the mod of a and b element-wise.\n * `floor(x / y) * y + mod(x, y) = x`\n * Supports broadcasting.\n *\n * We also expose `tf.modStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * ```js\n * // Broadcast a mod b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.mod(b).print(); // or tf.mod(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction mod_<T extends Tensor>(a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'mod');\n const $b = convertToTensor(b, 'b', 'mod');\n assertTypesMatch($a, $b);\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const derA = () => {\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n return dy.sum(reduceAxes).reshape($a.shape);\n }\n return dy;\n };\n const derB = () => {\n const res = dy.mul($a.div($b).floor().neg());\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n return res.sum(reduceAxes).reshape($b.shape);\n }\n return res;\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(backend => backend.mod($a, $b), {$a, $b}, der) as\n T;\n}\n\n/**\n * Returns the mod of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use mod().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction modStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in modStrict: ');\n return a.mod(b);\n}\n\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `minimumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * ```js\n * // Broadcast minimum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.minimum(b).print(); // or tf.minimum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction minimum_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n let $a = convertToTensor(a, 'a', 'minimum');\n let $b = convertToTensor(b, 'b', 'minimum');\n assertTypesMatch($a, $b);\n\n if ($a.dtype === 'bool') {\n $a = $a.toInt();\n }\n if ($b.dtype === 'bool') {\n $b = $b.toInt();\n }\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const derA = () => dy.mul($a.lessEqual($b).toFloat());\n const derB = () => dy.mul($a.greater($b).toFloat());\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.minimum($a, $b), {$a, $b}, der) as T;\n}\n\n/**\n * Returns the min of a and b (`a < b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use minimum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction minimumStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in minimumStrict: ');\n return a.minimum(b);\n}\n\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.maximumStrict` which has the same signature as this op and\n * asserts that `a` and `b` are the same shape (does not broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * ```js\n * // Broadcast maximum a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.maximum(b).print(); // or tf.maximum(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction maximum_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n let $a = convertToTensor(a, 'a', 'maximum');\n let $b = convertToTensor(b, 'b', 'maximum');\n assertTypesMatch($a, $b);\n\n if ($a.dtype === 'bool') {\n $a = $a.toInt();\n }\n if ($b.dtype === 'bool') {\n $b = $b.toInt();\n }\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const derA = () => dy.mul($a.greaterEqual($b).toFloat());\n const derB = () => dy.mul($a.less($b).toFloat());\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.maximum($a, $b), {$a, $b}, der) as T;\n}\n\n/**\n * Returns the max of a and b (`a > b ? a : b`) element-wise. Inputs must\n * be the same shape. For broadcasting support, use maximum().\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n */\nfunction maximumStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(a.shape, b.shape, 'Error in maximumStrict: ');\n return a.maximum(b);\n}\n\n/**\n * Returns (a - b) * (a - b) element-wise.\n * Supports broadcasting.\n *\n * We also expose `tf.squaredDifferenceStrict` which has the same signature as\n * this op and asserts that `a` and `b` are the same shape (does not\n * broadcast).\n *\n * ```js\n * const a = tf.tensor1d([1, 4, 3, 16]);\n * const b = tf.tensor1d([1, 2, 9, 4]);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * ```js\n * // Broadcast squared difference a with b.\n * const a = tf.tensor1d([2, 4, 6, 8]);\n * const b = tf.scalar(5);\n *\n * a.squaredDifference(b).print(); // or tf.squaredDifference(a, b)\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Arithmetic'} */\nfunction squaredDifference_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'squaredDifference');\n const $b = convertToTensor(b, 'b', 'squaredDifference');\n assertTypesMatch($a, $b);\n\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n const der = (dy: Tensor) => {\n const two = scalar(2);\n const derA = () => dy.mul($a.sub($b).mul(two));\n const derB = () => dy.mul($b.sub($a).mul(two));\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.squaredDifference($a, $b), {$a, $b}, der) as T;\n}\n\n/**\n * Returns (a - b) * (a - b) element-wise.\n *\n * Inputs must be the same shape. For broadcasting support, use\n * `tf.squaredDifference` instead.\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same type as `a`.\n */\nfunction squaredDifferenceStrict_<T extends Tensor>(a: T, b: T): T {\n util.assertShapesMatch(\n a.shape, b.shape, 'Error in squaredDifferenceStrict: ');\n return a.squaredDifference(b);\n}\n\n/**\n * Computes arctangent of `tf.Tensor`s a / b element-wise: `atan2(a, b)`.\n * Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([1.0, 1.0, -1.0, .7]);\n * const b = tf.tensor1d([2.0, 13.0, 3.5, .21]);\n *\n * tf.atan2(a, b).print()\n * ```\n *\n * @param a The first tensor.\n * @param b The second tensor. Must have the same dtype as `a`.\n *\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction atan2_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'atan2');\n const $b = convertToTensor(b, 'b', 'atan2');\n assertTypesMatch($a, $b);\n\n const outShape =\n broadcast_util.assertAndGetBroadcastShape($a.shape, $b.shape);\n\n const der = (dy: Tensor) => {\n const derA = () => {\n const d = add($a.square(), $b.square());\n let res = dy.mul($b.div(d));\n const reduceAxes = broadcast_util.getReductionAxes($a.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($a.shape);\n };\n const derB = () => {\n const d = add($a.square(), $b.square()) as T;\n let res = neg(dy.mul($a.div(d)));\n const reduceAxes = broadcast_util.getReductionAxes($b.shape, outShape);\n if (reduceAxes.length > 0) {\n res = res.sum(reduceAxes);\n }\n return res.reshape($b.shape);\n };\n return {$a: derA, $b: derB};\n };\n return ENV.engine.runKernel(\n backend => backend.atan2($a, $b), {$a, $b}, der) as T;\n}\n\nexport const add = op({add_});\nexport const addN = op({addN_});\nexport const addStrict = op({addStrict_});\nexport const atan2 = op({atan2_});\nexport const div = op({div_});\nexport const divStrict = op({divStrict_});\nexport const floorDiv = op({floorDiv_});\nexport const maximum = op({maximum_});\nexport const maximumStrict = op({maximumStrict_});\nexport const minimum = op({minimum_});\nexport const minimumStrict = op({minimumStrict_});\nexport const mod = op({mod_});\nexport const modStrict = op({modStrict_});\nexport const mul = op({mul_});\nexport const mulStrict = op({mulStrict_});\nexport const pow = op({pow_});\nexport const powStrict = op({powStrict_});\nexport const squaredDifference = op({squaredDifference_});\nexport const squaredDifferenceStrict = op({squaredDifferenceStrict_});\nexport const sub = op({sub_});\nexport const subStrict = op({subStrict_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {whereImpl} from '../kernels/where_impl';\nimport {Tensor, Tensor2D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {assert, assertShapesMatch} from '../util';\nimport {assertAndGetBroadcastShape} from './broadcast_util';\nimport {op} from './operation';\nimport {zerosLike} from './tensor_ops';\n\n/**\n * Returns the truth value of `NOT x` element-wise.\n *\n * ```js\n * const a = tf.tensor1d([false, true], 'bool');\n *\n * a.logicalNot().print();\n * ```\n *\n * @param x The input tensor. Must be of dtype 'bool'.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction logicalNot_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'logicalNot', 'bool');\n assert($x.dtype === 'bool', 'Error Array must be of type bool.');\n\n return ENV.engine.runKernel(backend => backend.logicalNot($x), {$x});\n}\n\n/**\n * Returns the truth value of `a AND b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalAnd(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction logicalAnd_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'logicalAnd', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalAnd', 'bool');\n assert(\n $a.dtype === 'bool' && $b.dtype === 'bool',\n 'Error Array must be of type bool.');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(\n backend => backend.logicalAnd($a, $b), {$a, $b}) as T;\n}\n\n/**\n * Returns the truth value of `a OR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalOr(b).print();\n * ```\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction logicalOr_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'logicalOr', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalOr', 'bool');\n assert(\n $a.dtype === 'bool' && $b.dtype === 'bool',\n 'Error Array must be of type bool.');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n return ENV.engine.runKernel(backend => backend.logicalOr($a, $b), {$a, $b}) as\n T;\n}\n\n/**\n * Returns the truth value of `a XOR b` element-wise. Supports broadcasting.\n *\n * ```js\n * const a = tf.tensor1d([false, false, true, true], 'bool');\n * const b = tf.tensor1d([false, true, false, true], 'bool');\n *\n * a.logicalXor(b).print();\n * ```\n *\n * @param a The first input tensor. Must be of dtype bool.\n * @param b The second input tensor. Must be of dtype bool.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction logicalXor_<T extends Tensor>(\n a: Tensor|TensorLike, b: Tensor|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'logicalXor', 'bool');\n const $b = convertToTensor(b, 'b', 'logicalXor', 'bool');\n assert(\n $a.dtype === 'bool' && $b.dtype === 'bool',\n 'Error Array must be of type bool.');\n assertAndGetBroadcastShape($a.shape, $b.shape);\n\n // x ^ y = (x | y) & ~(x & y)\n return logicalOr(a, b).logicalAnd(logicalAnd(a, b).logicalNot()) as T;\n}\n\n/**\n * Returns the elements, either `a` or `b` depending on the `condition`.\n *\n * If the condition is true, select from `a`, otherwise select from `b`.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * const a = tf.tensor1d([1 , 2, 3]);\n * const b = tf.tensor1d([-1, -2, -3]);\n *\n * a.where(cond, b).print();\n * ```\n *\n * @param condition The input condition. Must be of dtype bool.\n * @param a If `condition` is rank 1, `a` may have a higher rank but\n * its first dimension must match the size of `condition`.\n * @param b A tensor with the same shape and type as `a`.\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nfunction where_<T extends Tensor>(\n condition: Tensor|TensorLike, a: T|TensorLike, b: T|TensorLike): T {\n const $a = convertToTensor(a, 'a', 'where');\n const $b = convertToTensor(b, 'b', 'where');\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n\n assert($condition.dtype === 'bool', 'Error Condition must be of type bool.');\n assertShapesMatch($a.shape, $b.shape, 'Error in where: ');\n\n if ($condition.rank === 1) {\n // If condition rank is 1, then the first dimension must match the size of\n // condition.\n assert(\n $condition.shape[0] === $a.shape[0],\n 'The first dimension of `a` must match the size of `condition`.');\n } else {\n // A must have the same shape as condition.\n assertShapesMatch($condition.shape, $b.shape, 'Error in where: ');\n }\n\n // TODO(julianoks): Return null for condition gradient\n // when backprop supports it.\n const grad = (dy: T) => ({\n $condition: () => zerosLike($condition),\n $a: () => dy.mul($condition.cast($a.dtype)) as T,\n $b: () => dy.mul($condition.logicalNot().cast($b.dtype)) as T\n });\n\n return ENV.engine.runKernel(\n backend => backend.select($condition, $a, $b),\n {$condition, $a, $b}, grad) as T;\n}\n\n/**\n * Returns the coordinates of true elements of condition.\n *\n * The coordinates are returned in a 2-D tensor where the first dimension (rows)\n * represents the number of true elements, and the second dimension (columns)\n * represents the coordinates of the true elements. Keep in mind, the shape of\n * the output tensor can vary depending on how many true values there are in\n * input. Indices are output in row-major order. The resulting tensor has the\n * shape `[numTrueElems, condition.rank]`.\n *\n * This is analogous to calling the python `tf.where(cond)` without an x or y.\n *\n * ```js\n * const cond = tf.tensor1d([false, false, true], 'bool');\n * tf.whereAsync(cond).then(result => result.print());\n * ```\n */\n/** @doc {heading: 'Operations', subheading: 'Logical'} */\nasync function whereAsync_(condition: Tensor|TensorLike): Promise<Tensor2D> {\n const $condition = convertToTensor(condition, 'condition', 'where', 'bool');\n assert($condition.dtype === 'bool', 'Condition must be of type bool.');\n const vals = await $condition.data();\n const res = whereImpl($condition.shape, vals);\n if (condition !== $condition) {\n $condition.dispose();\n }\n return res;\n}\n\nexport const logicalAnd = op({logicalAnd_});\nexport const logicalNot = op({logicalNot_});\nexport const logicalOr = op({logicalOr_});\nexport const logicalXor = op({logicalXor_});\nexport const where = op({where_});\nexport const whereAsync = whereAsync_;\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {maximum, minimum} from './binary_ops';\nimport {where} from './logical_ops';\nimport {op} from './operation';\nimport {SELU_SCALE, SELU_SCALEALPHA} from './selu_util';\nimport {scalar} from './tensor_ops';\n\n/**\n * Computes rectified linear element-wise: `max(x, 0)`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.relu().print(); // or tf.relu(x)\n * ```\n * @param x The input tensor. If the dtype is `bool`, the output dtype will be\n * `int32'.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction relu_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'relu');\n\n if ($x.dtype === 'bool') {\n return $x.toInt();\n }\n const grad = (dy: T) => {\n const stepRes = $x.step();\n return {$x: () => dy.mulStrict(stepRes.toFloat())};\n };\n return ENV.engine.runKernel(backend => backend.relu($x), {$x}, grad);\n}\n\n/**\n * Computes exponential linear element-wise: `x > 0 ? e ^ x - 1 : 0`.\n *\n * ```js\n * const x = tf.tensor1d([-1, 1, -3, 2]);\n *\n * x.elu().print(); // or tf.elu(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction elu_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'elu');\n\n const grad = (dy: T, saved: Tensor[]) => {\n const [y] = saved;\n return {\n $x: () =>\n ENV.engine.runKernel(backend => backend.eluDer(dy, y), {dy, y}) as T\n };\n };\n return ENV.engine.runKernel(\n (backend, save) => save(backend.elu($x)), {$x}, grad);\n}\n\n/**\n * Computes scaled exponential linear element-wise.\n *\n * `x < 0 ? scale * alpha * (exp(x) - 1) : x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.selu().print(); // or tf.selu(x)\n * ```\n * @param x The input tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction selu_<T extends Tensor>(x: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'selu');\n\n const grad = (dy: T) => {\n return {\n $x: () => {\n const mask = $x.greater(scalar(0));\n\n const scaleAlpha = scalar(SELU_SCALEALPHA);\n const scale = scalar(SELU_SCALE);\n\n const greaterThanZeroDer = dy.mul(scale);\n const lessEqualZeroDer = dy.mul(scaleAlpha).mul($x.toFloat().exp());\n\n return where(mask, greaterThanZeroDer, lessEqualZeroDer) as T;\n }\n };\n };\n return ENV.engine.runKernel(backend => backend.selu($x), {$x}, grad);\n}\n\n/**\n * Computes leaky rectified linear element-wise.\n *\n * See\n * [http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf](\n * http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf)\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n *\n * x.leakyRelu(0.1).print(); // or tf.leakyRelu(x, 0.1)\n * ```\n * @param x The input tensor.\n * @param alpha The scaling factor for negative values, defaults to 0.2.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction leakyRelu_<T extends Tensor>(x: T|TensorLike, alpha = 0.2): T {\n const $x = convertToTensor(x, 'x', 'leakyRelu');\n return maximum(scalar(alpha).mul($x), $x);\n}\n\n/**\n * Computes leaky rectified linear element-wise with parametric alphas.\n *\n * `x < 0 ? alpha * x : f(x) = x`\n *\n * ```js\n * const x = tf.tensor1d([-1, 2, -3, 4]);\n * const alpha = tf.scalar(0.1);\n *\n * x.prelu(alpha).print(); // or tf.prelu(x, alpha)\n * ```\n * @param x The input tensor.\n * @param alpha Scaling factor for negative values.\n */\n/** @doc {heading: 'Operations', subheading: 'Basic math'} */\nfunction prelu_<T extends Tensor>(x: T|TensorLike, alpha: T|TensorLike): T {\n const $x = convertToTensor(x, 'x', 'prelu');\n const $alpha = convertToTensor(alpha, 'alpha', 'prelu');\n\n const zero = scalar(0);\n return maximum(zero, $x).add($alpha.mul(minimum(zero, $x)));\n}\n\nexport const elu = op({elu_});\nexport const leakyRelu = op({leakyRelu_});\nexport const prelu = op({prelu_});\nexport const relu = op({relu_});\nexport const selu = op({selu_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport * as axis_util from './axis_util';\nimport {op} from './operation';\n\n/**\n * Transposes the `tf.Tensor`. Permutes the dimensions according to `perm`.\n *\n * The returned `tf.Tensor`'s dimension `i` will correspond to the input\n * dimension `perm[i]`. If `perm` is not given, it is set to `[n-1...0]`,\n * where `n` is the rank of the input `tf.Tensor`. Hence by default, this\n * operation performs a regular matrix transpose on 2-D input `tf.Tensor`s.\n *\n * ```js\n * const a = tf.tensor2d([1, 2, 3, 4, 5, 6], [2, 3]);\n *\n * a.transpose().print(); // or tf.transpose(a)\n * ```\n *\n * @param x The tensor to transpose.\n * @param perm The permutation of the dimensions of a.\n */\n/** @doc {heading: 'Operations', subheading: 'Matrices'} */\nfunction transpose_<T extends Tensor>(x: T|TensorLike, perm?: number[]): T {\n const $x = convertToTensor(x, 'x', 'transpose');\n\n if (perm == null) {\n perm = $x.shape.map((s, i) => i).reverse();\n }\n util.assert(\n $x.rank === perm.length,\n `Error in transpose: rank of input ${$x.rank} ` +\n `must match length of perm ${perm}.`);\n perm.forEach(axis => {\n util.assert(\n axis >= 0 && axis < $x.rank,\n `All entries in 'perm' must be between 0 and ${$x.rank - 1}` +\n ` but got ${perm}`);\n });\n\n if ($x.rank <= 1) {\n return $x.clone();\n }\n\n const der = (dy: T) => {\n const undoPerm = axis_util.getUndoAxesPermutation(perm);\n return {$x: () => dy.transpose(undoPerm)};\n };\n return ENV.engine.runKernel(\n backend => backend.transpose($x, perm), {$x}, der);\n}\n\nexport const transpose = op({transpose_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {op} from './operation';\n\n/**\n * Normalizes the activation of a local neighborhood across or within\n * channels.\n *\n * @param x The input tensor. The 4-D input tensor is treated as a 3-D array\n * of 1D vectors (along the last dimension), and each vector is\n * normalized independently.\n * @param depthRadius The number of adjacent channels in the 1D normalization\n * window.\n * @param bias A constant bias term for the basis.\n * @param alpha A scale factor, usually positive.\n * @param beta An exponent.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction localResponseNormalization_<T extends Tensor3D|Tensor4D>(\n x: T|TensorLike, depthRadius = 5, bias = 1, alpha = 1, beta = 0.5): T {\n const $x = convertToTensor(x, 'x', 'localResponseNormalization');\n util.assert(\n $x.rank === 4 || $x.rank === 3,\n `Error in localResponseNormalization: x must be rank 3 or 4 but got\n rank ${$x.rank}.`);\n util.assert(\n util.isInt(depthRadius),\n `Error in localResponseNormalization: depthRadius must be an integer\n but got depthRadius ${depthRadius}.`);\n\n let x4D = $x as Tensor4D;\n let reshapedTo4D = false;\n if ($x.rank === 3) {\n reshapedTo4D = true;\n x4D = $x.as4D(1, $x.shape[0], $x.shape[1], $x.shape[2]);\n }\n const backward = (dy: Tensor4D, saved: Tensor[]) => {\n const [outputImage] = saved;\n return {\n x4D: () => ENV.engine.runKernel(\n backend => backend.LRNGrad(\n dy, x4D, outputImage as Tensor4D, depthRadius, bias, alpha, beta),\n {})\n };\n };\n const res = ENV.engine.runKernel(\n (backend, save) => save(backend.localResponseNormalization4D(\n x4D, depthRadius, bias, alpha, beta)),\n {x4D}, backward);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n } else {\n return res as T;\n }\n}\n\nexport const localResponseNormalization = op({localResponseNormalization_});","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as axis_util from './axis_util';\nimport {op} from './operation';\nimport {scalar} from './tensor_ops';\n\n/**\n * Computes the norm of scalar, vectors, and matrices.\n * This function can compute several different vector norms (the 1-norm, the\n * Euclidean or 2-norm, the inf-norm, and in general the p-norm for p > 0)\n * and matrix norms (Frobenius, 1-norm, and inf-norm).\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n *\n * x.norm().print(); // or tf.norm(x)\n * ```\n *\n * @param x The input array.\n * @param ord Optional. Order of the norm. Supported norm types are\n * following:\n *\n * | ord | norm for matrices | norm for vectors\n * |------------|---------------------------|---------------------\n * |'euclidean' |Frobenius norm |2-norm\n * |'fro' |Frobenius norm\t |\n * |Infinity |max(sum(abs(x), axis=1)) |max(abs(x))\n * |-Infinity |min(sum(abs(x), axis=1)) |min(abs(x))\n * |1 |max(sum(abs(x), axis=0)) |sum(abs(x))\n * |2 | |sum(abs(x)^2)^1/2*\n *\n * @param axis Optional. If axis is null (the default), the input is\n * considered a vector and a single vector norm is computed over the entire\n * set of values in the Tensor, i.e. norm(x, ord) is equivalent\n * to norm(x.reshape([-1]), ord). If axis is a integer, the input\n * is considered a batch of vectors, and axis determines the axis in x\n * over which to compute vector norms. If axis is a 2-tuple of integer it is\n * considered a batch of matrices and axis determines the axes in NDArray\n * over which to compute a matrix norm.\n * @param keepDims Optional. If true, the norm have the same dimensionality\n * as the input.\n */\n/** @doc {heading: 'Operations', subheading: 'Matrices'} */\nfunction norm_(\n x: Tensor|TensorLike, ord: number|'euclidean'|'fro' = 'euclidean',\n axis: number|number[] = null, keepDims = false): Tensor {\n x = convertToTensor(x, 'x', 'norm');\n\n const norm = normImpl(x, ord, axis);\n let keepDimsShape = norm.shape;\n if (keepDims) {\n const axes = axis_util.parseAxisParam(axis, x.shape);\n keepDimsShape = axis_util.expandShapeToKeepDim(norm.shape, axes);\n }\n return norm.reshape(keepDimsShape);\n}\n\nfunction normImpl(\n x: Tensor, p: number|string, axis: number|number[] = null): Tensor {\n if (x.rank === 0) {\n return x.abs();\n }\n\n // consider vector when no axis is specified\n if (x.rank !== 1 && axis === null) {\n return normImpl(x.reshape([-1]), p, axis);\n }\n\n // vector\n if (x.rank === 1 || typeof axis === 'number' ||\n axis instanceof Array && axis.length === 1) {\n if (p === 1) {\n return x.abs().sum(axis);\n }\n if (p === Infinity) {\n return x.abs().max(axis);\n }\n if (p === -Infinity) {\n return x.abs().min(axis);\n }\n if (p === 'euclidean' || p === 2) {\n // norm(x, 2) = sum(abs(xi) ^ 2) ^ 1/2\n return x.abs().pow(scalar(2, 'int32')).sum(axis).sqrt() as Tensor;\n }\n\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n\n // matrix (assumption axis[0] < axis[1])\n if (axis instanceof Array && axis.length === 2) {\n if (p === 1) {\n return x.abs().sum(axis[0]).max(axis[1] - 1);\n }\n if (p === Infinity) {\n return x.abs().sum(axis[1]).max(axis[0]);\n }\n if (p === -Infinity) {\n return x.abs().sum(axis[1]).min(axis[0]);\n }\n if (p === 'fro' || p === 'euclidean') {\n // norm(x) = sqrt(sum(pow(x, 2)))\n return x.square().sum(axis).sqrt();\n }\n\n throw new Error(`Error in norm: invalid ord value: ${p}`);\n }\n\n throw new Error(`Error in norm: invalid axis: ${axis}`);\n}\n\nexport const norm = op({norm_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor, Tensor1D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {assert, isInt} from '../util';\nimport {expandDims} from './array_ops';\nimport {getUndoAxesPermutation, parseAxisParam} from './axis_util';\nimport {maximum} from './binary_ops';\nimport {greaterEqual} from './compare';\nimport {logicalAnd, where} from './logical_ops';\nimport {op} from './operation';\nimport {ones, scalar, zerosLike} from './tensor_ops';\n\n/**\n * Computes the sum along segments of a `tf.Tensor`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const segmentIds = tf.tensor1d([1, 2, 0, 1], 'int32');\n * const numSegments = 3;\n *\n * x.unsortedSegmentSum(segmentIds, numSegments).print()\n * //or tf.unsortedSegmentSum(x, segmentIds, numSegments)\n * ```\n * @param x The `tf.Tensor` that will be summed along its segments.\n * @param segmentIds A `tf.Tensor1D` whose rank is equal to the rank of `x`'s\n * dimension along the `axis`. Maps each element of `x` to a segment.\n * @param numSegments The number of distinct `segmentIds`.\n */\n/** @doc {heading: 'Operations', subheading: 'Segment'} */\nfunction unsortedSegmentSum_<T extends Tensor>(\n x: T|TensorLike, segmentIds: Tensor1D|TensorLike, numSegments: number): T {\n const $x = convertToTensor(x, 'x', 'unsortedSegmentSum');\n const $segmentIds =\n convertToTensor(segmentIds, 'segmentIds', 'unsortedSegmentSum', 'int32');\n assert($segmentIds.dtype === 'int32', 'segmentIds must be of dtype `int32`');\n assert(isInt(numSegments), 'numSegments must be of dtype int');\n\n const gradFunc = (dy: T) => {\n const derX = () => {\n return gatherDropNegatives(dy, $segmentIds);\n };\n return {$x: derX};\n };\n return ENV.engine.runKernel(\n backend =>\n backend.unsortedSegmentSum($x, $segmentIds, numSegments),\n {$x}, gradFunc) as T;\n}\n\n/**\n * Gather slices from tensor `x`'s axis `axis` according to `indices`.\n *\n * ```js\n * const x = tf.tensor1d([1, 2, 3, 4]);\n * const indices = tf.tensor1d([1, 3, 3], 'int32');\n *\n * x.gather(indices).print();\n * ```\n *\n * ```js\n * const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const indices = tf.tensor1d([1, 1, 0], 'int32');\n *\n * x.gather(indices).print();\n * ```\n * @param x The input tensor whose slices to be gathered.\n * @param indices The indices of the values to extract.\n * @param axis The axis over which to select values. Defaults to 0.\n */\n/** @doc {heading: 'Tensors', subheading: 'Slicing and Joining'} */\nfunction gather_<T extends Tensor>(\n x: T|TensorLike, indices: Tensor1D|TensorLike, axis = 0): T {\n const $x = convertToTensor(x, 'x', 'gather');\n const $indices = convertToTensor(indices, 'indices', 'gather', 'int32');\n\n assert($indices.dtype === 'int32', 'Indices must be of dtype `int32`');\n axis = parseAxisParam(axis, $x.shape)[0];\n const grad = (dy: T) => {\n const derX = () => {\n if (axis === 0) {\n return unsortedSegmentSum(dy, $indices, $x.shape[axis]);\n }\n const paramsShape = $x.shape;\n const indicesSize = $indices.size;\n\n const outerShape = paramsShape.slice(0, axis);\n const outerDims = outerShape.length;\n const innerShape = paramsShape.slice(axis, paramsShape.length).slice(1);\n const innerDims = innerShape.length;\n\n const outerAxesIndices = arrayRange(0, outerDims);\n const innerAxesIndices =\n arrayRange(outerDims + 1, outerDims + 1 + innerDims);\n\n const valuesShape = arrayConcat([outerShape, [indicesSize], innerShape]);\n\n const values = dy.reshape(valuesShape);\n const reshapedIndices = $indices.reshape([indicesSize]);\n\n const transposeDims =\n arrayConcat([[outerDims], outerAxesIndices, innerAxesIndices]);\n const valuesTranspose = values.transpose(transposeDims);\n\n let paramsGrad = unsortedSegmentSum(\n valuesTranspose, reshapedIndices as Tensor1D, $x.shape[axis]);\n\n const invertTransposeDims = getUndoAxesPermutation(transposeDims);\n paramsGrad = paramsGrad.transpose(invertTransposeDims);\n\n return paramsGrad as T;\n };\n return {$x: derX};\n };\n return ENV.engine.runKernel(\n backend => backend.gather($x, $indices as Tensor1D, axis), {$x},\n grad) as T;\n}\n\nfunction arrayRange(start: number, stop: number): number[] {\n const result = [];\n for (let i = start; i < stop; ++i) {\n result.push(i);\n }\n return result;\n}\n\nfunction arrayConcat(arrays: number[][]): number[] {\n const result = [];\n for (let i = 0; i < arrays.length; ++i) {\n for (let j = 0; j < arrays[i].length; ++j) {\n result.push(arrays[i][j]);\n }\n }\n return result;\n}\n\nfunction gatherDropNegatives<T extends Tensor>(x: T, indices: Tensor1D) {\n // Helper function for unsorted segment ops. Gathers params for\n // positive segment ids and gathers 0 for inputs with negative segment id.\n // Mirrors _GatherDropNegatives from tensorflow/python/ops/math_grad.py\n const zeroClippedIndices = maximum(indices, zerosLike(indices));\n const gathered = gather(x, zeroClippedIndices as Tensor1D);\n let isPositive = greaterEqual(indices, scalar(0, 'int32'));\n const numIters = gathered.rank - isPositive.rank;\n for (let i = 0; i < numIters; ++i) {\n isPositive = expandDims(isPositive, i + 1);\n }\n isPositive = logicalAnd(isPositive, ones(gathered.shape, 'bool'));\n const zeroSlice = zerosLike(gathered);\n return where(isPositive, gathered, zeroSlice);\n}\n\nexport const gather = op({gather_});\nexport const unsortedSegmentSum = op({unsortedSegmentSum_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Scalar, Tensor1D, Tensor2D} from '../tensor';\nimport {convertToTensor, convertToTensorArray} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {op} from './operation';\n\n/**\n * @docalias (data: Tensor2D, c: Tensor2D, h: Tensor2D): [Tensor2D, Tensor2D]\n */\nexport type LSTMCellFunc = {\n (data: Tensor2D, c: Tensor2D, h: Tensor2D): [Tensor2D, Tensor2D];\n};\n\n/**\n * Computes the next states and outputs of a stack of LSTMCells.\n *\n * Each cell output is used as input to the next cell.\n *\n * Returns `[cellState, cellOutput]`.\n *\n * Derived from tf.contrib.rn.MultiRNNCell.\n *\n * @param lstmCells Array of LSTMCell functions.\n * @param data The input to the cell.\n * @param c Array of previous cell states.\n * @param h Array of previous cell outputs.\n */\n/** @doc {heading: 'Operations', subheading: 'RNN'} */\nfunction multiRNNCell_(\n lstmCells: LSTMCellFunc[], data: Tensor2D|TensorLike,\n c: Tensor2D[]|TensorLike[],\n h: Tensor2D[]|TensorLike[]): [Tensor2D[], Tensor2D[]] {\n const $data = convertToTensor(data, 'data', 'multiRNNCell');\n const $c = convertToTensorArray(c, 'c', 'multiRNNCell');\n const $h = convertToTensorArray(h, 'h', 'multiRNNCell');\n\n let input = $data;\n const newStates = [];\n for (let i = 0; i < lstmCells.length; i++) {\n const output = lstmCells[i](input, $c[i], $h[i]);\n newStates.push(output[0]);\n newStates.push(output[1]);\n input = output[1];\n }\n const newC: Tensor2D[] = [];\n const newH: Tensor2D[] = [];\n for (let i = 0; i < newStates.length; i += 2) {\n newC.push(newStates[i]);\n newH.push(newStates[i + 1]);\n }\n return [newC, newH];\n}\n\n/**\n * Computes the next state and output of a BasicLSTMCell.\n *\n * Returns `[newC, newH]`.\n *\n * Derived from tf.contrib.rnn.BasicLSTMCell.\n *\n * @param forgetBias Forget bias for the cell.\n * @param lstmKernel The weights for the cell.\n * @param lstmBias The bias for the cell.\n * @param data The input to the cell.\n * @param c Previous cell state.\n * @param h Previous cell output.\n */\n/** @doc {heading: 'Operations', subheading: 'RNN'} */\nfunction basicLSTMCell_(\n forgetBias: Scalar|TensorLike, lstmKernel: Tensor2D|TensorLike,\n lstmBias: Tensor1D|TensorLike, data: Tensor2D|TensorLike,\n c: Tensor2D|TensorLike, h: Tensor2D|TensorLike): [Tensor2D, Tensor2D] {\n const $forgetBias =\n convertToTensor(forgetBias, 'forgetBias', 'basicLSTMCell');\n const $lstmKernel =\n convertToTensor(lstmKernel, 'lstmKernel', 'basicLSTMCell');\n const $lstmBias = convertToTensor(lstmBias, 'lstmBias', 'basicLSTMCell');\n const $data = convertToTensor(data, 'data', 'basicLSTMCell');\n const $c = convertToTensor(c, 'c', 'basicLSTMCell');\n const $h = convertToTensor(h, 'h', 'basicLSTMCell');\n\n const combined = $data.concat($h, 1);\n const weighted = combined.matMul($lstmKernel);\n const res = weighted.add($lstmBias) as Tensor2D;\n\n // i = input_gate, j = new_input, f = forget_gate, o = output_gate\n const batchSize = res.shape[0];\n const sliceCols = res.shape[1] / 4;\n const sliceSize: [number, number] = [batchSize, sliceCols];\n const i = res.slice([0, 0], sliceSize);\n const j = res.slice([0, sliceCols], sliceSize);\n const f = res.slice([0, sliceCols * 2], sliceSize);\n const o = res.slice([0, sliceCols * 3], sliceSize);\n\n const newC = i.sigmoid().mulStrict(j.tanh()).addStrict(\n $c.mulStrict($forgetBias.add(f).sigmoid() as Tensor2D));\n const newH = newC.tanh().mulStrict(o.sigmoid());\n return [newC, newH];\n}\n\nexport const basicLSTMCell = op({basicLSTMCell_});\nexport const multiRNNCell = op({multiRNNCell_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Scalar, Tensor} from '../tensor';\nimport {assertTypesMatch} from '../tensor_util';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {pow} from './binary_ops';\nimport {op} from './operation';\nimport {scalar} from './tensor_ops';\n\n/**\n * Compute the moving average of a variable.\n *\n * Without zeroDebias, the moving average operation is defined by:\n * `v += delta`\n * where\n * `delta = (1 - decay) * (x - v)`\n *\n * With zeroDebias (default), the `delta` term is scaled to debias the\n * effect of the (assumed) zero-initialization of `v`.\n * `delta /= (1 - decay ^ step)`\n *\n * For more details on the zero-debiasing algorithm, see:\n * https://arxiv.org/abs/1412.6980\n *\n * Note that this function is completely stateless and does not keep track of\n * step count. The step count needs to be maintained by the caller and passed\n * in as `step`.\n *\n * @param v The current moving average value.\n * @param x New input value, must have the same shape and dtype as `v`.\n * @param decay The decay factor. Typical values are 0.95 and 0.99.\n * @param step Step count.\n * @param zeroDebias: Whether zeroDebias is to be performed (default: `true`).\n * @returns The new moving average value.\n */\n/** @doc {heading: 'Operations', subheading: 'Moving Average'} */\nfunction movingAverage_<T extends Tensor>(\n v: T|TensorLike, x: T|TensorLike, decay: number|Scalar,\n step?: number|Scalar, zeroDebias = true): T {\n const $v = convertToTensor(v, 'v', 'movingAverage');\n const $x = convertToTensor(x, 'x', 'movingAverage');\n const $decay = convertToTensor(decay, 'decay', 'movingAverage');\n\n assertTypesMatch($v, $x);\n util.assert(\n util.arraysEqual($v.shape, $x.shape), 'Shape mismatch in v and x');\n\n const one = scalar(1);\n const oneMinusDecay = one.sub($decay);\n\n let update = $x.sub($v).mul(oneMinusDecay);\n if (zeroDebias) {\n util.assert(step != null, 'When using zeroDebias: true, step is required.');\n const $step = convertToTensor(step, 'step', 'movingAverage');\n update = update.div(one.sub(pow($decay, $step)));\n }\n return $v.add(update);\n}\n\nexport const movingAverage = op({movingAverage_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {op} from './operation';\n\n/**\n * Extracts a strided slice of a tensor.\n *\n * Roughly speaking, this op extracts a slice of size (end-begin)/stride from\n * the given input_ tensor. Starting at the location specified by begin the\n * slice continues by adding stride to the index until all dimensions are not\n * less than end. Note that a stride can be negative, which causes a reverse\n * slice.\n *\n * ```js\n * t = tf.tensor3d([1, 1, 1 ,2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6],\n * [3, 2, 3]);\n * t.stridedSlice([1, 0, 0], [2, 1, 3], [1, 1, 1]).print() // [[[3, 3, 3]]]\n * t.stridedSlice([1, 0, 0], [2, 2, 3], [1, 1, 1]).print() // [[[3, 3, 3],\n * // [4, 4, 4]]]\n * t.stridedSlice([1, -1, 0], [2, -3, 3], [1, -1, 1]).print() // [[[4, 4, 4],\n * // [3, 3, 3]]]\n * ```\n *\n * @param x The tensor to stride slice.\n * @param begin The coordinates to start the slice from.\n * @param end: The coordinates to end the slice at.\n * @param strides: The size of the slice.\n * @param beginMask: If the ith bit of begin_mask is set, begin[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param endMask: If the ith bit of end_mask is set, end[i] is ignored\n * and the fullest possible range in that dimension is used instead.\n * @param shrinkAxisMask: a bitmask where bit i implies that\n * the ith specification should shrink the dimensionality. begin and end must\n * imply a slice of size 1 in the dimension.\n */\n/** @doc {heading: 'Operations', subheading: 'Slicing and Joining'} */\nfunction stridedSlice_<T extends Tensor>(\n x: T|TensorLike, begin: number[], end: number[], strides: number[],\n beginMask = 0, endMask = 0, ellipsisMask = 0, newAxisMask = 0,\n shrinkAxisMask = 0): T {\n if (ellipsisMask !== 0) {\n throw new Error('ellipsis mask is not yet supported');\n }\n if (newAxisMask !== 0) {\n throw new Error('new axis mask is not yet supported');\n }\n const $x = convertToTensor(x, 'x', 'stridedSlice');\n return ENV.engine.runKernel(\n backend => backend.stridedSlice(\n $x, begin, end, strides, beginMask, endMask, ellipsisMask,\n newAxisMask, shrinkAxisMask),\n {$x}) as T;\n}\n\nexport const stridedSlice = op({stridedSlice_});\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {op} from './operation';\n\n/**\n * Finds the values and indices of the `k` largest entries along the last\n * dimension.\n *\n * If the input is a vector (rank=1), finds the k largest entries in the vector\n * and outputs their values and indices as vectors. Thus values[j] is the j-th\n * largest entry in input, and its index is indices[j].\n * For higher rank inputs, computes the top k entries along the last dimension.\n *\n * If two elements are equal, the lower-index element appears first.\n *\n * ```js\n * const a = tf.tensor2d([[1, 5], [4, 3]]);\n * const {values, indices} = tf.topk(a);\n * values.print();\n * indices.print();\n * ```\n * @param x 1-D or higher `tf.Tensor` with last dimension being at least `k`.\n * @param k Number of top elements to look for along the last dimension.\n * @param sorted If true, the resulting `k` elements will be sorted by the\n * values in descending order.\n */\n/** @doc {heading: 'Operations', subheading: 'Evaluation'} */\nfunction topk_<T extends Tensor>(\n x: T|TensorLike, k = 1, sorted = true): {values: T, indices: T} {\n const $x = convertToTensor(x, 'x', 'topk');\n if ($x.rank === 0) {\n throw new Error('topk() expects the input to be of rank 1 or higher');\n }\n const lastDim = $x.shape[$x.shape.length - 1];\n if (k > lastDim) {\n throw new Error(\n `'k' passed to topk() must be <= the last dimension (${lastDim}) ` +\n `but got ${k}`);\n }\n\n const [values, indices] =\n ENV.engine.runKernel(b => b.topk($x, k, sorted), {$x});\n return {values, indices};\n}\n\nexport const topk = op({topk_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, ShapeMap, TensorLike} from '../types';\n\nimport {op} from './operation';\nimport * as scatter_nd_util from './scatter_nd_util';\n\n/**\n * Creates a new tensor by applying sparse updates to individual\n * values or slices within a zero tensor of the given shape tensor according to\n * indices. This operator is the inverse of the `tf.gatherND` operator which\n * extracts values or slices from a given tensor.\n *\n * ```js\n * const indices = tf.tensor2d([[4], [3], [1], [7]]);\n * const updates = tf.tensor2d([9, 10, 11, 12]);\n * const shape = [8];\n * tf.scatterND(indices, updates, shape]).print() //[0, 11, 0, 10, 9, 0, 0, 12]\n * ```\n *\n * @param indices The tensor contains the indices into the output tensor.\n * @param updates The tensor contains the value for the indices.\n * @param shape: The shape of the output tensor.\n */\n/** @doc {heading: 'Operations', subheading: 'Slicing and Joining'} */\nfunction scatterND_<R extends Rank>(\n indices: Tensor|TensorLike, updates: Tensor|TensorLike,\n shape: ShapeMap[R]): Tensor<R> {\n const $indices = convertToTensor(indices, 'indices', 'scatterND', 'int32');\n const $updates = convertToTensor(updates, 'updates', 'scatterND');\n scatter_nd_util.validateInput($updates, $indices, shape);\n\n return ENV.engine.runKernel(\n backend => backend.scatterND($indices, $updates, shape),\n {$indices, $updates}) as Tensor<R>;\n}\n\nexport const scatterND = op({scatterND_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {complex, imag, real} from '../ops/complex_ops';\nimport {op} from '../ops/operation';\nimport {Tensor} from '../tensor';\nimport {assert} from '../util';\n\n/**\n * Fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the inner-most\n * dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.fft().print(); // tf.spectral.fft(x).print();\n * ```\n * @param input The complex input to compute an fft over.\n */\n/**\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction fft_(input: Tensor): Tensor {\n assert(input.dtype === 'complex64',\n `The dtype for tf.spectral.fft() must be complex64 ` +\n `but got ${input.dtype}.`);\n\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n\n const ret = ENV.engine.runKernel(backend => backend.fft(input2D), {input});\n\n return ret.reshape(input.shape);\n}\n\n/**\n * Inverse fast Fourier transform.\n *\n * Computes the inverse 1-dimensional discrete Fourier transform over the\n * inner-most dimension of input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n * const imag = tf.tensor1d([1, 2, 3]);\n * const x = tf.complex(real, imag);\n *\n * x.ifft().print(); // tf.spectral.ifft(x).print();\n * ```\n * @param input The complex input to compute an ifft over.\n */\n/**\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction ifft_(input: Tensor): Tensor {\n assert(input.dtype === 'complex64',\n `The dtype for tf.spectral.ifft() must be complex64 ` +\n `but got ${input.dtype}.`);\n\n // Collapse all outer dimensions to a single batch dimension.\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n const input2D = input.as2D(batch, innerDimensionSize);\n\n const ret = ENV.engine.runKernel(backend => backend.ifft(input2D), {input});\n\n return ret.reshape(input.shape);\n}\n\n/**\n * Real value input fast Fourier transform.\n *\n * Computes the 1-dimensional discrete Fourier transform over the\n * inner-most dimension of the real input.\n *\n * ```js\n * const real = tf.tensor1d([1, 2, 3]);\n *\n * x.rfft().print();\n * ```\n * @param input The real value input to compute an rfft over.\n */\n/**\n * @doc {heading: 'Operations', subheading: 'Spectral', namespace: 'spectral'}\n */\nfunction rfft_(input: Tensor): Tensor {\n assert(input.dtype === 'float32', `The dtype for rfft() must be real value but\n got ${input.dtype}`);\n\n const innerDimensionSize = input.shape[input.shape.length - 1];\n const batch = input.size / innerDimensionSize;\n\n // Complement the input with zero imaginary numbers.\n const zeros = input.zerosLike();\n const complexInput = complex(input, zeros).as2D(batch, innerDimensionSize);\n\n const ret = ENV.engine.runKernel(backend => backend.fft(complexInput),\n {complexInput});\n\n // Exclude complex conjugations. These conjugations are put symmetrically.\n const half = Math.floor(innerDimensionSize / 2) + 1;\n const realValues = real(ret);\n const imagValues = imag(ret);\n const realComplexConjugate = realValues.split(\n [ half, innerDimensionSize - half ], realValues.shape.length - 1);\n const imagComplexConjugate = imagValues.split(\n [ half, innerDimensionSize - half ], imagValues.shape.length - 1);\n\n const outputShape = input.shape.slice();\n outputShape[input.shape.length - 1] = half;\n\n return complex(realComplexConjugate[0], imagComplexConjugate[0])\n .reshape(outputShape);\n}\n\nexport const fft = op({fft_});\nexport const ifft = op({ifft_});\nexport const rfft = op({rfft_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {Tensor} from '../tensor';\n\n/**\n * Validate sparseToDense inputs.\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape number[]. Shape of the dense output tensor.\n * @param validateIndices boolean. indice validation is not supported, error\n * will be thrown if it is set.\n */\nexport function validateInput(\n sparseIndices: Tensor, sparseValues: Tensor, outputShape: number[],\n defaultValues: Tensor) {\n if (sparseIndices.dtype !== 'int32') {\n throw new Error(\n 'tf.sparseToDense() expects the indices to be int32 type,' +\n ` but the dtype was ${sparseIndices.dtype}.`);\n }\n if (sparseIndices.rank > 2) {\n throw new Error(\n 'sparseIndices should be a scalar, vector, or matrix,' +\n ` but got shape ${sparseIndices.shape}.`);\n }\n\n const numElems = sparseIndices.rank > 0 ? sparseIndices.shape[0] : 1;\n const numDims = sparseIndices.rank > 1 ? sparseIndices.shape[1] : 1;\n\n if (outputShape.length !== numDims) {\n throw new Error(\n 'outputShape has incorrect number of elements:,' +\n ` ${outputShape.length}, should be: ${numDims}.`);\n }\n\n const numValues = sparseValues.size;\n if (!(sparseValues.rank === 0 ||\n sparseValues.rank === 1 && numValues === numElems)) {\n throw new Error(\n 'sparseValues has incorrect shape ' +\n `${sparseValues.shape}, should be [] or [${numElems}]`);\n }\n\n if (sparseValues.dtype !== defaultValues.dtype) {\n throw new Error('sparseValues.dtype must match defaultValues.dtype');\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport * as sparse_to_dense from '../ops/sparse_to_dense_util';\nimport {Scalar, Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, ShapeMap, TensorLike} from '../types';\n\nimport {op} from './operation';\n\n/**\n * Converts a sparse representation into a dense tensor.\n *\n * Builds an array dense with shape outputShape such that:\n *\n * // If sparseIndices is scalar\n * dense[i] = (i == sparseIndices ? sparseValues : defaultValue)\n *\n * // If sparseIndices is a vector, then for each i\n * dense[sparseIndices[i]] = sparseValues[i]\n *\n * // If sparseIndices is an n by d matrix, then for each i in [0, n)\n * dense[sparseIndices[i][0], ..., sparseIndices[i][d-1]] = sparseValues[i]\n * All other values in dense are set to defaultValue. If sparseValues is a\n * scalar, all sparse indices are set to this single value.\n *\n * ```js\n * const indices = tf.tensor1d([4, 5, 6, 1, 2, 3], 'int32');\n * const values = tf.tensor1d([10, 11, 12, 13, 14, 15], 'float32');\n * const shape = [8];\n * tf.sparseToDense(indices, values, shape).print();\n * ```\n *\n * @param sparseIndices A 0-D, 1-D, or 2-D Tensor of type int32.\n * sparseIndices[i] contains the complete index where sparseValues[i] will be\n * placed.\n * @param sparseValues A 0-D or 1-D Tensor. Values\n * corresponding to each row of sparseIndices, or a scalar value to be used for\n * all sparse indices.\n * @param outputShape Shape of the dense output tensor. the type is inferred.\n * @param defaultValue Scalar. Value to set for indices not specified in\n * sparseIndices. Defaults to zero.\n */\n/** @doc {heading: 'Operations', subheading: 'Normalization'} */\nfunction sparseToDense_<R extends Rank>(\n sparseIndices: Tensor|TensorLike, sparseValues: Tensor|TensorLike,\n outputShape: ShapeMap[R], defaultValue: Scalar|TensorLike): Tensor<R> {\n const $sparseIndices =\n convertToTensor(sparseIndices, 'sparseIndices', 'sparseToDense', 'int32');\n const $sparseValues =\n convertToTensor(sparseValues, 'sparseValues', 'sparseToDense');\n const $defaultValue = convertToTensor(\n defaultValue, 'defaultValue', 'sparseToDense', $sparseValues.dtype);\n\n sparse_to_dense.validateInput(\n $sparseIndices, $sparseValues, outputShape, $defaultValue);\n\n return ENV.engine.runKernel(\n backend => backend.sparseToDense(\n $sparseIndices, $sparseValues, outputShape, $defaultValue),\n {$sparseIndices, $sparseValues, $defaultValue});\n}\n\nexport const sparseToDense = op({sparseToDense_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {ENV} from '../environment';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {Rank, TensorLike} from '../types';\nimport {op} from './operation';\n\n/**\n * Gather slices from input tensor into a Tensor with shape specified by\n * `indices`.\n *\n * `indices` is an K-dimensional integer tensor, best thought of as a\n * (K-1)-dimensional tensor of indices into input, where each element defines a\n * slice of input:\n * output[\\\\(i_0, ..., i_{K-2}\\\\)] = input[indices[\\\\(i_0, ..., i_{K-2}\\\\)]]\n *\n * Whereas in `tf.gather`, `indices` defines slices into the first dimension of\n * input, in `tf.gatherND`, `indices` defines slices into the first N dimensions\n * of input, where N = indices.shape[-1].\n *\n * The last dimension of indices can be at most the rank of input:\n * indices.shape[-1] <= input.rank\n *\n * The last dimension of `indices` corresponds to elements\n * (if indices.shape[-1] == input.rank) or slices\n * (if indices.shape[-1] < input.rank) along dimension indices.shape[-1] of\n * input.\n * The output tensor has shape\n * indices.shape[:-1] + input.shape[indices.shape[-1]:]\n *\n * Note that on CPU, if an out of bound index is found, an error is returned. On\n * GPU, if an out of bound index is found, a 0 is stored in the corresponding\n * output value.\n *\n * ```js\n * const indices = tf.tensor2d([0, 1, 1, 0], [2,2], 'int32');\n * const input = tf.tensor2d([9, 10, 11, 12], [2, 2]);\n * tf.gatherND(input, indices).print() //[10, 11]\n * ```\n *\n * @param x The tensor from which to gather values.\n * @param indices Index tensor, must be of type int32.\n */\n/** @doc {heading: 'Operations', subheading: 'Slicing and Joining'} */\nfunction gatherND_(\n x: Tensor|TensorLike, indices: Tensor|TensorLike): Tensor<Rank> {\n const $indices = convertToTensor(indices, 'indices', 'gatherND', 'int32');\n const $x = convertToTensor(x, 'x', 'gatherND');\n return ENV.engine.runKernel(\n backend => backend.gatherND($x, $indices), {$x, $indices}) as\n Tensor<Rank>;\n}\nexport const gatherND = op({gatherND_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {customGrad} from '../globals';\nimport {Tensor} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport {assertShapesMatch, sizeFromShape} from '../util';\n\nimport {expandShapeToKeepDim} from './axis_util';\n\nimport {minimum} from './binary_ops';\nimport {op} from './operation';\nimport {ones, scalar} from './tensor_ops';\n\nexport enum Reduction {\n NONE,\n MEAN,\n SUM,\n SUM_BY_NONZERO_WEIGHTS\n}\n\n/**\n * Computes the weighted loss between two tensors.\n *\n * @param losses Tensor of shape `[batch_size, d1, ... dN]`.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `losses`, and must be broadcastable to `losses` (i.e., all\n * dimensions must be either `1`, or the same as the corresponding\n * `losses` dimension).\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction computeWeightedLoss_<T extends Tensor, O extends Tensor>(\n losses: T|TensorLike, weights?: Tensor|TensorLike,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $losses = convertToTensor(losses, 'losses', 'computeWeightedLoss');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'computeWeightedLoss');\n }\n\n const weightedLoss = ($weights == null) ? $losses : $losses.mul($weights);\n\n if (reduction === Reduction.NONE) {\n return weightedLoss as O;\n }\n if (reduction === Reduction.SUM) {\n return weightedLoss.sum();\n }\n if (reduction === Reduction.MEAN) {\n if ($weights == null) {\n return weightedLoss.mean();\n } else {\n const broadcastFactor =\n sizeFromShape($losses.shape) / sizeFromShape($weights.shape);\n const result = weightedLoss.sum().div($weights.sum());\n return broadcastFactor > 1 ? result.div(scalar(broadcastFactor)) :\n result as O;\n }\n }\n if (reduction === Reduction.SUM_BY_NONZERO_WEIGHTS) {\n if ($weights == null) {\n return weightedLoss.sum().div(scalar($losses.size));\n } else {\n const broadcastedWeights = $weights.mul(ones($losses.shape));\n\n const numNonZeros =\n broadcastedWeights.notEqual(scalar(0)).sum().toFloat();\n return weightedLoss.sum().div(numNonZeros);\n }\n }\n\n throw Error(`Unknown reduction: ${reduction}`);\n}\n\n/**\n * Computes the absolute difference loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction absoluteDifference_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike,\n weights?: Tensor|TensorLike,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $labels = convertToTensor(labels, 'labels', 'absoluteDifference');\n const $predictions =\n convertToTensor(predictions, 'predictions', 'absoluteDifference');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'absoluteDifference');\n }\n assertShapesMatch(\n $labels.shape, $predictions.shape, 'Error in absoluteDifference: ');\n\n const losses = $labels.sub($predictions).abs();\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes the mean squared error between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction meanSquaredError_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike,\n weights?: Tensor|TensorLike,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $labels = convertToTensor(labels, 'labels', 'meanSquaredError');\n const $predictions =\n convertToTensor(predictions, 'predictions', 'meanSquaredError');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'meanSquaredError');\n }\n assertShapesMatch(\n $labels.shape, $predictions.shape, 'Error in meanSquaredError: ');\n\n const losses = $labels.squaredDifference($predictions);\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes the cosine distance loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param axis The dimension along which the cosine distance is computed.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction cosineDistance_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike, axis: number,\n weights?: Tensor|TensorLike,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $labels = convertToTensor(labels, 'labels', 'cosineDistance');\n const $predictions =\n convertToTensor(predictions, 'predictions', 'cosineDistance');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'cosineDistance');\n }\n assertShapesMatch(\n $labels.shape, $predictions.shape, 'Error in cosineDistance: ');\n\n const one = scalar(1);\n const losses = one.sub($labels.mul($predictions).sum(axis, true));\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes the Hinge loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction hingeLoss_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike,\n weights?: Tensor|TensorLike,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n let $labels = convertToTensor(labels, 'labels', 'hingeLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'hingeLoss');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'hingeLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in hingeLoss: ');\n\n const one = scalar(1);\n // Convert binary labels to (-1, 1)\n $labels = scalar(2).mul($labels).sub(one);\n const losses = one.sub($labels.mul($predictions)).relu();\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes the log loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param epsilon A small increment to avoid taking log of zero\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction logLoss_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike,\n weights?: Tensor|TensorLike, epsilon = 1e-7,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $labels = convertToTensor(labels, 'labels', 'logLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'logLoss');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'logLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in logLoss: ');\n\n const one = scalar(1);\n const epsilonScalar = scalar(epsilon);\n const losses = $labels.mul($predictions.add(epsilonScalar).log())\n .neg()\n .sub(one.sub($labels).mul(\n one.sub($predictions).add(epsilonScalar).log()));\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\nfunction sigmoidCrossEntropyWithLogits_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, logits: T|TensorLike): O {\n const $labels =\n convertToTensor(labels, 'labels', 'sigmoidCrossEntropyWithLogits');\n const $logits =\n convertToTensor(logits, 'logits', 'sigmoidCrossEntropyWithLogits');\n assertShapesMatch(\n $labels.shape, $logits.shape, 'Error in sigmoidCrossEntropyWithLogits: ');\n\n /**\n * Implementation Details:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n *\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n *\n * Hence, to ensure stability and avoid overflow, the implementation uses\n * this equivalent formulation:\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n */\n const maxOutput = $logits.relu();\n const outputXTarget = $logits.mul($labels);\n const sigmoidOutput = $logits.abs().neg().exp().log1p();\n\n return maxOutput.sub(outputXTarget).add(sigmoidOutput);\n}\n\n/**\n * Computes the sigmoid cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newMulticlassLabels = multiclassLabels * (1 - labelSmoothing)\n * + 0.5 * labelSmoothing\n *\n * @param multiClassLabels The ground truth output tensor of shape\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' } */\nfunction sigmoidCrossEntropy_<T extends Tensor, O extends Tensor>(\n multiClassLabels: T|TensorLike, logits: T|TensorLike,\n weights?: Tensor|TensorLike, labelSmoothing = 0,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n let $multiClassLabels = convertToTensor(\n multiClassLabels, 'multiClassLabels', 'sigmoidCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'sigmoidCrossEntropy');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'sigmoidCrossEntropy');\n }\n assertShapesMatch(\n $multiClassLabels.shape, $logits.shape, 'Error in sigmoidCrossEntropy: ');\n\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const half = scalar(0.5);\n\n $multiClassLabels = $multiClassLabels.mul(one.sub(labelSmoothingScalar))\n .add(half.mul(labelSmoothingScalar));\n }\n const losses = sigmoidCrossEntropyWithLogits_($multiClassLabels, $logits);\n\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes the huber loss between two tensors.\n *\n * @param labels The ground truth output tensor, same dimensions as\n * 'predictions'.\n * @param predictions The predicted outputs.\n * @param weights Tensor whose rank is either 0, or the same rank as\n * `labels`, and must be broadcastable to `labels` (i.e., all dimensions\n * must be either `1`, or the same as the corresponding `losses`\n * dimension).\n * @param delta Point where huber loss changes from quadratic to linear.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`.\n */\n/** @doc {heading: 'Training', subheading: 'Losses', namespace: 'losses'} */\nfunction huberLoss_<T extends Tensor, O extends Tensor>(\n labels: T|TensorLike, predictions: T|TensorLike,\n weights?: Tensor|TensorLike, delta = 1.0,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n const $labels = convertToTensor(labels, 'labels', 'huberLoss');\n const $predictions = convertToTensor(predictions, 'predictions', 'huberLoss');\n let $weights: Tensor = null;\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'huberLoss');\n }\n assertShapesMatch($labels.shape, $predictions.shape, 'Error in huberLoss: ');\n\n const deltaScalar = scalar(delta);\n const error = $predictions.sub($labels).abs();\n const quadratic = minimum(error, deltaScalar);\n const linear = error.sub(quadratic);\n\n const losses =\n scalar(0.5).mul(quadratic.square()).add(deltaScalar.mul(linear));\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\n/**\n * Computes softmax cross entropy between logits and labels.\n *\n * Measures the probability error in discrete classification tasks in which\n * the classes are mutually exclusive (each entry is in exactly one class).\n * For example, each CIFAR-10 image is labeled with one and only one label: an\n * image can be a dog or a truck, but not both.\n *\n * `NOTE`: While the classes are mutually exclusive, their probabilities need\n * not be. All that is required is that each row of labels is a valid\n * probability distribution. If they are not, the computation of the gradient\n * will be incorrect.\n *\n * `WARNING`: This op expects unscaled logits, since it performs a softmax on\n * logits internally for efficiency. Do not call this op with the output of\n * softmax, as it will produce incorrect results.\n *\n * logits and labels must have the same shape, e.g. [batch_size, num_classes]\n * and the same dtype.\n * @param labels The labels array.\n * @param logits The logits array.\n * @param dim The dimension softmax would be performed on. Defaults to `-1`\n * which indicates the last dimension.\n */\nfunction softmaxCrossEntropyWithLogits_<T extends Tensor, O extends Tensor>(\n labels: T, logits: T, dim = -1): O {\n if (dim === -1) {\n dim = logits.rank - 1;\n }\n\n if (dim !== logits.rank - 1) {\n throw Error(\n `Softmax cross entropy along a non-last dimension is not yet ` +\n `supported. Labels / logits was rank ${logits.rank} ` +\n `and dim was ${dim}`);\n }\n // Use a custom gradient for numerical stability.\n const customOp = customGrad((labels, logits) => {\n // Reference:\n // 1. http://cs231n.github.io/linear-classify/#softmax\n // 2. https://blog.feedly.com/tricks-of-the-trade-logsumexp/\n const keepDims = true;\n const lse = logits.logSumExp([dim], keepDims);\n\n const logResult = logits.toFloat().sub(lse);\n const costVector = logResult.mul(labels).neg();\n\n const value = costVector.sum([dim]) as O;\n\n const gradFunc = (dy: O) => {\n const dyShape = expandShapeToKeepDim(dy.shape, [dim]);\n return [\n dy.reshape(dyShape).mul(labels.toFloat().sub(logResult.exp())),\n dy.reshape(dyShape).mul(logResult.exp().sub(labels.toFloat())),\n ];\n };\n return {value, gradFunc};\n });\n\n return customOp(labels, logits);\n}\n\n/**\n * Computes the softmax cross entropy loss between two tensors.\n *\n * If labelSmoothing is nonzero, smooth the labels towards 1/2:\n *\n * newOnehotLabels = onehotLabels * (1 - labelSmoothing)\n * + labelSmoothing / numClasses\n *\n * @param onehotLabels One hot encoded labels\n * [batch_size, num_classes], same dimensions as 'predictions'.\n * @param logits The predicted outputs.\n * @param weights Tensor whose rank is either 0, or 1, and must be\n * broadcastable to `loss` of shape [batch_size]\n * @param labelSmoothing If greater than 0, then smooth the labels.\n * @param reduction Type of reduction to apply to loss. Should be of type\n * `Reduction`\n */\n/** @doc { heading: 'Training', subheading: 'Losses', namespace: 'losses' } */\nfunction softmaxCrossEntropy_<T extends Tensor, O extends Tensor>(\n onehotLabels: T|TensorLike, logits: T|TensorLike,\n weights?: Tensor|TensorLike, labelSmoothing = 0,\n reduction = Reduction.SUM_BY_NONZERO_WEIGHTS): O {\n let $onehotLabels =\n convertToTensor(onehotLabels, 'onehotLabels', 'softmaxCrossEntropy');\n const $logits = convertToTensor(logits, 'logits', 'softmaxCrossEntropy');\n let $weights: Tensor = null;\n\n if (weights != null) {\n $weights = convertToTensor(weights, 'weights', 'softmaxCrossEntropy');\n }\n\n assertShapesMatch(\n $onehotLabels.shape, $logits.shape, 'Error in softmaxCrossEntropy: ');\n\n if (labelSmoothing > 0) {\n const labelSmoothingScalar = scalar(labelSmoothing);\n const one = scalar(1);\n const numClasses = scalar($onehotLabels.shape[1]);\n\n $onehotLabels = $onehotLabels.mul(one.sub(labelSmoothingScalar))\n .add(labelSmoothingScalar.div(numClasses));\n }\n\n const losses = softmaxCrossEntropyWithLogits_($onehotLabels, $logits);\n\n return computeWeightedLoss(losses, $weights, reduction);\n}\n\nexport const absoluteDifference = op({absoluteDifference_});\nexport const computeWeightedLoss = op({computeWeightedLoss_});\nexport const cosineDistance = op({cosineDistance_});\nexport const hingeLoss = op({hingeLoss_});\nexport const huberLoss = op({huberLoss_});\nexport const logLoss = op({logLoss_});\nexport const meanSquaredError = op({meanSquaredError_});\nexport const sigmoidCrossEntropy = op({sigmoidCrossEntropy_});\nexport const softmaxCrossEntropy = op({softmaxCrossEntropy_});\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Linear algebra ops.\n */\n\nimport {ENV} from '../environment';\nimport {dispose} from '../globals';\nimport {Tensor, Tensor1D, Tensor2D} from '../tensor';\nimport {assert} from '../util';\nimport {eye, squeeze, stack, unstack} from './array_ops';\nimport {split} from './concat_split';\nimport {norm} from './norm';\nimport {op} from './operation';\nimport {sum} from './reduction_ops';\nimport {tensor2d} from './tensor_ops';\n\n/**\n * Gram-Schmidt orthogonalization.\n *\n * ```js\n * const x = tf.tensor2d([[1, 2], [3, 4]]);\n * let y = tf.linalg.gramSchmidt(x);\n * y.print();\n * console.log('Othogonalized:');\n * y.dot(y.transpose()).print(); // should be nearly the identity matrix.\n * console.log('First row direction maintained:');\n * console.log(y.get(0, 1) / y.get(0, 0)); // should be nearly 2.\n * ```\n *\n * @param xs The vectors to be orthogonalized, in one of the two following\n * formats:\n * - An Array of `tf.Tensor1D`.\n * - A `tf.Tensor2D`, i.e., a matrix, in which case the vectors are the rows\n * of `xs`.\n * In each case, all the vectors must have the same length and the length\n * must be greater than or equal to the number of vectors.\n * @returns The orthogonalized and normalized vectors or matrix.\n * Orthogonalization means that the vectors or the rows of the matrix\n * are orthogonal (zero inner products). Normalization means that each\n * vector or each row of the matrix has an L2 norm that equals `1`.\n */\n/**\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction gramSchmidt_(xs: Tensor1D[]|Tensor2D): Tensor1D[]|Tensor2D {\n let inputIsTensor2D: boolean;\n if (Array.isArray(xs)) {\n inputIsTensor2D = false;\n assert(\n xs != null && xs.length > 0,\n 'Gram-Schmidt process: input must not be null, undefined, or empty');\n const dim = xs[0].shape[0];\n for (let i = 1; i < xs.length; ++i) {\n assert(\n xs[i].shape[0] === dim,\n 'Gram-Schmidt: Non-unique lengths found in the input vectors: ' +\n `(${xs[i].shape[0]} vs. ${dim})`);\n }\n } else {\n inputIsTensor2D = true;\n xs = split(xs, xs.shape[0], 0).map(x => squeeze(x, [0]));\n }\n\n assert(\n xs.length <= xs[0].shape[0],\n `Gram-Schmidt: Number of vectors (${xs.length}) exceeds ` +\n `number of dimensions (${xs[0].shape[0]}).`);\n\n const ys: Tensor1D[] = [];\n const xs1d = xs as Tensor1D[];\n for (let i = 0; i < xs.length; ++i) {\n ys.push(ENV.engine.tidy(() => {\n let x = xs1d[i];\n if (i > 0) {\n for (let j = 0; j < i; ++j) {\n const proj = sum(ys[j].mulStrict(x)).mul(ys[j]);\n x = x.sub(proj);\n }\n }\n return x.div(norm(x, 'euclidean'));\n }));\n }\n\n if (inputIsTensor2D) {\n return stack(ys, 0) as Tensor2D;\n } else {\n return ys;\n }\n}\n\n/**\n * Compute QR decomposition of m-by-n matrix using Householder transformation.\n *\n * Implementation based on\n * [http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf]\n * (http://www.cs.cornell.edu/~bindel/class/cs6210-f09/lec18.pdf)\n *\n * ```js\n * const a = tf.tensor2d([[1, 2], [3, 4]]);\n * let [q, r] = tf.linalg.qr(a);\n * console.log('Q');\n * q.print();\n * console.log('R');\n * r.print();\n * console.log('Orthogonalized');\n * q.dot(q.transpose()).print() // should be nearly the identity matrix.\n * console.log('Reconstructed');\n * q.dot(r).print(); // should be nearly [[1, 2], [3, 4]];\n * ```\n *\n * @param x The `tf.Tensor` to be QR-decomposed. Must have rank >= 2. Suppose\n * it has the shape `[..., M, N]`.\n * @param fullMatrices An optional boolean parameter. Defaults to `false`.\n * If `true`, compute full-sized `Q`. If `false` (the default),\n * compute only the leading N columns of `Q` and `R`.\n * @returns An `Array` of two `tf.Tensor`s: `[Q, R]`. `Q` is a unitary matrix,\n * i.e., its columns all have unit norm and are mutually orthogonal.\n * If `M >= N`,\n * If `fullMatrices` is `false` (default),\n * - `Q` has a shape of `[..., M, N]`,\n * - `R` has a shape of `[..., N, N]`.\n * If `fullMatrices` is `true` (default),\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * If `M < N`,\n * - `Q` has a shape of `[..., M, M]`,\n * - `R` has a shape of `[..., M, N]`.\n * @throws If the rank of `x` is less than 2.\n */\n/**\n * @doc {heading:'Operations',\n * subheading:'Linear Algebra',\n * namespace:'linalg'}\n */\nfunction qr_(x: Tensor, fullMatrices = false): [Tensor, Tensor] {\n if (x.rank < 2) {\n throw new Error(\n `qr() requires input tensor to have a rank >= 2, but got rank ${\n x.rank}`);\n } else if (x.rank === 2) {\n return qr2d(x as Tensor2D, fullMatrices);\n } else {\n // Rank > 2.\n // TODO(cais): Below we split the input into individual 2D tensors,\n // perform QR decomposition on them and then stack the results back\n // together. We should explore whether this can be parallelized.\n const outerDimsProd = x.shape.slice(0, x.shape.length - 2)\n .reduce((value, prev) => value * prev);\n const x2ds = unstack(\n x.reshape([\n outerDimsProd, x.shape[x.shape.length - 2],\n x.shape[x.shape.length - 1]\n ]),\n 0);\n const q2ds: Tensor2D[] = [];\n const r2ds: Tensor2D[] = [];\n x2ds.forEach(x2d => {\n const [q2d, r2d] = qr2d(x2d as Tensor2D, fullMatrices);\n q2ds.push(q2d);\n r2ds.push(r2d);\n });\n const q = stack(q2ds, 0).reshape(x.shape);\n const r = stack(r2ds, 0).reshape(x.shape);\n return [q, r];\n }\n}\n\nfunction qr2d(x: Tensor2D, fullMatrices = false): [Tensor2D, Tensor2D] {\n return ENV.engine.tidy(() => {\n if (x.shape.length !== 2) {\n throw new Error(\n `qr2d() requires a 2D Tensor, but got a ${x.shape.length}D Tensor.`);\n }\n\n const m = x.shape[0];\n const n = x.shape[1];\n\n let q = eye(m) as Tensor2D; // Orthogonal transform so far.\n let r = x.clone(); // Transformed matrix so far.\n\n const one2D = tensor2d([[1]], [1, 1]);\n let w: Tensor2D = one2D.clone();\n\n const iters = m >= n ? n : m;\n for (let j = 0; j < iters; ++j) {\n // This tidy within the for-loop ensures we clean up temporary\n // tensors as soon as they are no longer needed.\n const rTemp = r;\n const wTemp = w;\n const qTemp = q;\n [w, r, q] = ENV.engine.tidy((): [Tensor2D, Tensor2D, Tensor2D] => {\n // Find H = I - tau * w * w', to put zeros below R(j, j).\n const rjEnd1 = r.slice([j, j], [m - j, 1]);\n const normX = rjEnd1.norm();\n const rjj = r.slice([j, j], [1, 1]);\n const s = rjj.sign().neg() as Tensor2D;\n const u1 = rjj.sub(s.mul(normX)) as Tensor2D;\n const wPre = rjEnd1.div(u1);\n if (wPre.shape[0] === 1) {\n w = one2D.clone();\n } else {\n w = one2D.concat(\n wPre.slice([1, 0], [wPre.shape[0] - 1, wPre.shape[1]]), 0) as\n Tensor2D;\n }\n const tau = s.matMul(u1).div(normX).neg() as Tensor2D;\n\n // -- R := HR, Q := QH.\n const rjEndAll = r.slice([j, 0], [m - j, n]);\n const tauTimesW = tau.mul(w) as Tensor2D;\n if (j === 0) {\n r = rjEndAll.sub(tauTimesW.matMul(w.transpose().matMul(rjEndAll)));\n } else {\n r = r.slice([0, 0], [j, n])\n .concat(\n rjEndAll.sub(\n tauTimesW.matMul(w.transpose().matMul(rjEndAll))),\n 0) as Tensor2D;\n }\n const qAllJEnd = q.slice([0, j], [m, q.shape[1] - j]);\n if (j === 0) {\n q = qAllJEnd.sub(qAllJEnd.matMul(w).matMul(tauTimesW.transpose()));\n } else {\n q = q.slice([0, 0], [m, j])\n .concat(\n qAllJEnd.sub(\n qAllJEnd.matMul(w).matMul(tauTimesW.transpose())),\n 1) as Tensor2D;\n }\n return [w, r, q];\n });\n dispose([rTemp, wTemp, qTemp]);\n }\n\n if (!fullMatrices && m > n) {\n q = q.slice([0, 0], [m, n]);\n r = r.slice([0, 0], [n, n]);\n }\n\n return [q, r];\n }) as [Tensor2D, Tensor2D];\n}\n\nexport const gramSchmidt = op({gramSchmidt_});\nexport const qr = op({qr_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ForwardFunc} from '../engine';\nimport {ENV} from '../environment';\nimport {nonMaxSuppressionImpl} from '../kernels/non_max_suppression_impl';\nimport {Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\nimport {op} from './operation';\n\n/**\n * Bilinear resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction resizeBilinear_<T extends Tensor3D|Tensor4D>(\n images: T|TensorLike, size: [number, number], alignCorners = false): T {\n const $images = convertToTensor(images, 'images', 'resizeBilinear');\n util.assert(\n $images.rank === 3 || $images.rank === 4,\n `Error in resizeBilinear: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(\n size.length === 2,\n `Error in resizeBilinear: new shape must 2D, but got shape ` +\n `${size}.`);\n\n let batchImages = $images as Tensor4D;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages =\n $images.as4D(1, $images.shape[0], $images.shape[1], $images.shape[2]);\n }\n\n const [newHeight, newWidth] = size;\n const forward: ForwardFunc<Tensor4D> = (backend, save) =>\n backend.resizeBilinear(batchImages, newHeight, newWidth, alignCorners);\n\n const backward = (dy: Tensor4D, saved: Tensor[]) => {\n return {\n batchImages: () => ENV.engine.runKernel(\n backend =>\n backend.resizeBilinearBackprop(dy, batchImages, alignCorners),\n {})\n };\n };\n\n const res = ENV.engine.runKernel(forward, {batchImages}, backward);\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * NearestNeighbor resize a batch of 3D images to a new shape.\n *\n * @param images The images, of rank 4 or rank 3, of shape\n * `[batch, height, width, inChannels]`. If rank 3, batch of 1 is assumed.\n * @param size The new shape `[newHeight, newWidth]` to resize the\n * images to. Each channel is resized individually.\n * @param alignCorners Defaults to False. If true, rescale\n * input by `(new_height - 1) / (height - 1)`, which exactly aligns the 4\n * corners of images and resized images. If false, rescale by\n * `new_height / height`. Treat similarly the width dimension.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction resizeNearestNeighbor_<T extends Tensor3D|Tensor4D>(\n images: T|TensorLike, size: [number, number], alignCorners = false): T {\n const $images = convertToTensor(images, 'images', 'resizeNearestNeighbor');\n util.assert(\n $images.rank === 3 || $images.rank === 4,\n `Error in resizeNearestNeighbor: x must be rank 3 or 4, but got ` +\n `rank ${$images.rank}.`);\n util.assert(\n size.length === 2,\n `Error in resizeNearestNeighbor: new shape must 2D, but got shape ` +\n `${size}.`);\n util.assert(\n $images.dtype === 'float32' || $images.dtype === 'int32',\n '`images` must have `int32` or `float32` as dtype');\n\n let batchImages = $images as Tensor4D;\n let reshapedTo4D = false;\n if ($images.rank === 3) {\n reshapedTo4D = true;\n batchImages =\n $images.as4D(1, $images.shape[0], $images.shape[1], $images.shape[2]);\n }\n const [newHeight, newWidth] = size;\n\n const forward: ForwardFunc<Tensor4D> = (backend, save) =>\n backend.resizeNearestNeighbor(\n batchImages, newHeight, newWidth, alignCorners);\n\n const backward = (dy: Tensor4D, saved: Tensor[]) => {\n return {\n batchImages: () => ENV.engine.runKernel(\n backend => backend.resizeNearestNeighborBackprop(\n dy, batchImages, alignCorners),\n {})\n };\n };\n\n const res = ENV.engine.runKernel(forward, {batchImages}, backward);\n\n if (reshapedTo4D) {\n return res.as3D(res.shape[1], res.shape[2], res.shape[3]) as T;\n }\n return res as T;\n}\n\n/**\n * Performs non maximum suppression of bounding boxes based on\n * iou (intersection over union)\n *\n * @param boxes a 2d tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the corners of\n * the bounding box.\n * @param scores a 1d tensor providing the box scores of shape `[numBoxes]`.\n * @param maxOutputSize The maximum number of boxes to be selected.\n * @param iouThreshold A float representing the threshold for deciding whether\n * boxes overlap too much with respect to IOU. Must be between [0, 1].\n * Defaults to 0.5 (50% box overlap).\n * @param scoreThreshold A threshold for deciding when to remove boxes based\n * on score. Defaults to -inf, which means any score is accepted.\n * @return A 1D tensor with the selected box indices.\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction nonMaxSuppression_(\n boxes: Tensor2D|TensorLike, scores: Tensor1D|TensorLike,\n maxOutputSize: number, iouThreshold = 0.5,\n scoreThreshold = Number.NEGATIVE_INFINITY): Tensor1D {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppression');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppression');\n\n const inputs = nonMaxSuppSanityCheck(\n $boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n\n return ENV.engine.runKernel(\n b => b.nonMaxSuppression(\n $boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold),\n {$boxes});\n}\n\n/** This is the async version of `nonMaxSuppression` */\nasync function nonMaxSuppressionAsync_(\n boxes: Tensor2D|TensorLike, scores: Tensor1D|TensorLike,\n maxOutputSize: number, iouThreshold = 0.5,\n scoreThreshold = Number.NEGATIVE_INFINITY): Promise<Tensor1D> {\n const $boxes = convertToTensor(boxes, 'boxes', 'nonMaxSuppressionAsync');\n const $scores = convertToTensor(scores, 'scores', 'nonMaxSuppressionAsync');\n\n const inputs = nonMaxSuppSanityCheck(\n $boxes, $scores, maxOutputSize, iouThreshold, scoreThreshold);\n maxOutputSize = inputs.maxOutputSize;\n iouThreshold = inputs.iouThreshold;\n scoreThreshold = inputs.scoreThreshold;\n\n const boxesVals = await $boxes.data();\n const scoresVals = await $scores.data();\n const res = nonMaxSuppressionImpl(\n boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n if ($boxes !== boxes) {\n $boxes.dispose();\n }\n if ($scores !== scores) {\n $scores.dispose();\n }\n return res;\n}\n\nfunction nonMaxSuppSanityCheck(\n boxes: Tensor2D, scores: Tensor1D, maxOutputSize: number,\n iouThreshold: number, scoreThreshold: number):\n {maxOutputSize: number, iouThreshold: number, scoreThreshold: number} {\n if (iouThreshold == null) {\n iouThreshold = 0.5;\n }\n if (scoreThreshold == null) {\n scoreThreshold = Number.NEGATIVE_INFINITY;\n }\n const numBoxes = boxes.shape[0];\n maxOutputSize = Math.min(maxOutputSize, numBoxes);\n\n util.assert(\n 0 <= iouThreshold && iouThreshold <= 1,\n `iouThreshold must be in [0, 1], but was '${iouThreshold}'`);\n util.assert(\n boxes.rank === 2,\n `boxes must be a 2D tensor, but was of rank '${boxes.rank}'`);\n util.assert(\n boxes.shape[1] === 4,\n `boxes must have 4 columns, but 2nd dimension was ${boxes.shape[1]}`);\n util.assert(scores.rank === 1, 'scores must be a 1D tensor');\n util.assert(\n scores.shape[0] === numBoxes,\n `scores has incompatible shape with boxes. Expected ${numBoxes}, ` +\n `but was ${scores.shape[0]}`);\n return {maxOutputSize, iouThreshold, scoreThreshold};\n}\n\n/**\n * Extracts crops from the input image tensor and resizes them using bilinear\n * sampling or nearest neighbor sampling (possibly with aspect ratio change)\n * to a common output size specified by crop_size.\n *\n * @param image 4d tensor of shape `[batch,imageHeight,imageWidth, depth]`,\n * where imageHeight and imageWidth must be positive, specifying the\n * batch of images from which to take crops\n * @param boxes 2d float32 tensor of shape `[numBoxes, 4]`. Each entry is\n * `[y1, x1, y2, x2]`, where `(y1, x1)` and `(y2, x2)` are the normalized\n * coordinates of the box in the boxInd[i]'th image in the batch\n * @param boxInd 1d int32 tensor of shape `[numBoxes]` with values in range\n * `[0, batch)` that specifies the image that the `i`-th box refers to.\n * @param cropSize 1d int32 tensor of 2 elements `[cropHeigh, cropWidth]`\n * specifying the size to which all crops are resized to.\n * @param method Optional string from `'bilinear' | 'nearest'`,\n * defaults to bilinear, which specifies the sampling method for resizing\n * @param extrapolationValue A threshold for deciding when to remove boxes based\n * on score. Defaults to 0.\n * @return A 4D tensor of the shape `[numBoxes,cropHeight,cropWidth,depth]`\n */\n/** @doc {heading: 'Operations', subheading: 'Images', namespace: 'image'} */\nfunction cropAndResize_(\n image: Tensor4D|TensorLike,\n boxes: Tensor2D|TensorLike,\n boxInd: Tensor1D|TensorLike,\n cropSize: [number, number],\n method?: 'bilinear'|'nearest',\n extrapolationValue?: number,\n ): Tensor4D {\n const $image = convertToTensor(image, 'image', 'cropAndResize', 'float32');\n const $boxes = convertToTensor(boxes, 'boxes', 'cropAndResize', 'float32');\n const $boxInd = convertToTensor(boxInd, 'boxInd', 'cropAndResize', 'int32');\n method = method || 'bilinear';\n extrapolationValue = extrapolationValue || 0;\n\n const numBoxes = $boxes.shape[0];\n\n util.assert(\n $image.rank === 4,\n 'Error in cropAndResize: image must be rank 4,' +\n `but got rank ${$image.rank}.`);\n util.assert(\n $boxes.rank === 2 && $boxes.shape[1] === 4,\n `Error in cropAndResize: boxes must be have size [${numBoxes},4] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(\n $boxInd.rank === 1 && $boxInd.shape[0] === numBoxes,\n `Error in cropAndResize: boxInd must be have size [${numBoxes}] ` +\n `but had shape ${$boxes.shape}.`);\n util.assert(\n $boxInd.dtype === 'int32',\n `Error in cropAndResize: boxInd must be of dtype int32, but got dtype ` +\n `${$boxInd.dtype}.`);\n util.assert(\n cropSize.length === 2,\n `Error in cropAndResize: cropSize must be of length 2, but got length ` +\n `${cropSize.length}.`);\n util.assert(\n cropSize[0] >= 1 && cropSize[1] >= 1,\n `cropSize must be atleast [1,1], but was ${cropSize}`);\n util.assert(\n method === 'bilinear' || method === 'nearest',\n `method must be bilinear or nearest, but was ${method}`);\n\n const forward: ForwardFunc<Tensor4D> = (backend, save) =>\n backend.cropAndResize(\n $image, $boxes, $boxInd, cropSize, method, extrapolationValue);\n\n const res = ENV.engine.runKernel(forward, {$image, $boxes});\n return res as Tensor4D;\n}\n\nexport const resizeBilinear = op({resizeBilinear_});\nexport const resizeNearestNeighbor = op({resizeNearestNeighbor_});\nexport const nonMaxSuppression = op({nonMaxSuppression_});\nexport const nonMaxSuppressionAsync = nonMaxSuppressionAsync_;\nexport const cropAndResize = cropAndResize_;\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as seedrandom from 'seedrandom';\nimport {ENV} from '../environment';\nimport {warn} from '../log';\nimport * as array_ops_util from '../ops/array_ops_util';\nimport * as axis_util from '../ops/axis_util';\nimport * as broadcast_util from '../ops/broadcast_util';\nimport * as concat_util from '../ops/concat_util';\nimport {Conv2DInfo} from '../ops/conv_util';\nimport * as erf_util from '../ops/erf_util';\nimport * as gather_nd_util from '../ops/gather_nd_util';\nimport * as ops from '../ops/ops';\nimport {buffer, scalar, tensor, tensor3d, tensor4d} from '../ops/ops';\nimport * as scatter_nd_util from '../ops/scatter_nd_util';\nimport * as selu_util from '../ops/selu_util';\nimport {getStridedSlicedInfo} from '../ops/slice_util';\nimport {DataId, Scalar, setTensorTracker, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, TensorBuffer} from '../tensor';\nimport {DataType, DataTypeMap, Rank, ShapeMap, TypedArray, upcastType} from '../types';\nimport * as util from '../util';\nimport {now} from '../util';\nimport {BackendTimingInfo, DataMover, DataStorage, KernelBackend} from './backend';\nimport * as backend_util from './backend_util';\nimport * as complex_util from './complex_util';\nimport {nonMaxSuppressionImpl} from './non_max_suppression_impl';\nimport {split} from './split_shared';\nimport {topkImpl} from './topk_impl';\nimport {whereImpl} from './where_impl';\n\ninterface TensorData<T extends DataType> {\n values?: DataTypeMap[T];\n dtype: T;\n // For complex numbers, the real and imaginary parts are stored as their own\n // individual tensors, with a parent joining the two with the\n // complexTensors field. When this is defined, texture will be null.\n complexTensors?: {real: Tensor, imag: Tensor};\n}\n\nexport class MathBackendCPU implements KernelBackend {\n public blockSize = 48;\n\n private data: DataStorage<TensorData<DataType>>;\n private fromPixels2DContext: CanvasRenderingContext2D;\n private firstUse = true;\n\n constructor() {\n if (ENV.get('IS_BROWSER')) {\n this.fromPixels2DContext =\n document.createElement('canvas').getContext('2d');\n }\n }\n\n setDataMover(dataMover: DataMover): void {\n this.data = new DataStorage(dataMover);\n }\n\n register(dataId: DataId, shape: number[], dtype: DataType): void {\n if (this.firstUse) {\n this.firstUse = false;\n if (ENV.get('IS_NODE')) {\n warn(\n '\\n============================\\n' +\n 'Hi there 👋. Looks like you are running TensorFlow.js in ' +\n 'Node.js. To speed things up dramatically, install our node ' +\n 'backend, which binds to TensorFlow C++, by running ' +\n 'npm i @tensorflow/tfjs-node, ' +\n 'or npm i @tensorflow/tfjs-node-gpu if you have CUDA. ' +\n 'Then call require(\\'@tensorflow/tfjs-node\\'); (-gpu ' +\n 'suffix for CUDA) at the start of your program. ' +\n 'Visit https://github.com/tensorflow/tfjs-node for more details.' +\n '\\n============================\\n');\n }\n }\n if (this.data.has(dataId)) {\n throw new Error(`Data buffer is already registered`);\n }\n this.data.set(dataId, {dtype});\n }\n write(dataId: DataId, values: TypedArray): void {\n if (values == null) {\n throw new Error('MathBackendCPU.write(): values can not be null');\n }\n this.data.get(dataId).values = values;\n }\n fromPixels(\n pixels: ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement,\n numChannels: number): Tensor3D {\n if (pixels == null) {\n throw new Error('pixels passed to tf.fromPixels() can not be null');\n }\n let vals: Uint8ClampedArray;\n // tslint:disable-next-line:no-any\n if (ENV.get('IS_NODE') && (pixels as any).getContext == null) {\n throw new Error(\n 'When running in node, pixels must be an HTMLCanvasElement ' +\n 'like the one returned by the `canvas` npm package');\n }\n // tslint:disable-next-line:no-any\n if ((pixels as any).getContext != null) {\n // tslint:disable-next-line:no-any\n vals = (pixels as any)\n .getContext('2d')\n .getImageData(0, 0, pixels.width, pixels.height)\n .data;\n } else if (pixels instanceof ImageData) {\n vals = pixels.data;\n } else if (\n pixels instanceof HTMLImageElement ||\n pixels instanceof HTMLVideoElement) {\n if (this.fromPixels2DContext == null) {\n throw new Error(\n 'Can\\'t read pixels from HTMLImageElement outside ' +\n 'the browser.');\n }\n this.fromPixels2DContext.canvas.width = pixels.width;\n this.fromPixels2DContext.canvas.height = pixels.height;\n this.fromPixels2DContext.drawImage(\n pixels, 0, 0, pixels.width, pixels.height);\n vals = this.fromPixels2DContext\n .getImageData(0, 0, pixels.width, pixels.height)\n .data;\n } else {\n throw new Error(\n 'pixels passed to tf.fromPixels() must be either an ' +\n `HTMLVideoElement, HTMLImageElement, HTMLCanvasElement or ` +\n `ImageData, but was ${(pixels as {}).constructor.name}`);\n }\n let values: Int32Array;\n if (numChannels === 4) {\n values = new Int32Array(vals);\n } else {\n const numPixels = pixels.width * pixels.height;\n values = new Int32Array(numPixels * numChannels);\n for (let i = 0; i < numPixels; i++) {\n for (let channel = 0; channel < numChannels; ++channel) {\n values[i * numChannels + channel] = vals[i * 4 + channel];\n }\n }\n }\n const outShape: [number, number, number] =\n [pixels.height, pixels.width, numChannels];\n return tensor3d(values, outShape, 'int32');\n }\n async read(dataId: DataId): Promise<TypedArray> {\n return this.readSync(dataId);\n }\n readSync(dataId: DataId): TypedArray {\n const {dtype, complexTensors} = this.data.get(dataId);\n if (dtype === 'complex64') {\n const realValues = complexTensors.real.dataSync() as Float32Array;\n const imagValues = complexTensors.imag.dataSync() as Float32Array;\n return complex_util.mergeRealAndImagArrays(realValues, imagValues);\n }\n return this.data.get(dataId).values;\n }\n\n disposeData(dataId: DataId): void {\n if (this.data.has(dataId)) {\n const {complexTensors} = this.data.get(dataId);\n if (complexTensors != null) {\n complexTensors.real.dispose();\n complexTensors.imag.dispose();\n }\n this.data.delete(dataId);\n }\n }\n\n async time(f: () => void): Promise<BackendTimingInfo> {\n const start = now();\n f();\n const kernelMs = now() - start;\n return {kernelMs};\n }\n\n memory() {\n return {\n // Unreliable due to automatic gc. The numbers above are cumulative.\n unreliable: true\n };\n }\n\n complex<T extends Tensor>(real: T, imag: T): T {\n const result = Tensor.make(real.shape, {}, 'complex64') as T;\n\n const resultData = this.data.get(result.dataId);\n // The backend owns the reference to the underlying real and imaginary\n // clones. These will explicitly get disposed when the complex tensor is\n // disposed.\n resultData.complexTensors = {\n real: ENV.engine.keep(real.clone()),\n imag: ENV.engine.keep(imag.clone())\n };\n\n return result;\n }\n real<T extends Tensor>(input: T): T {\n const resultData = this.data.get(input.dataId);\n return resultData.complexTensors.real.clone() as T;\n }\n imag<T extends Tensor>(input: T): T {\n const resultData = this.data.get(input.dataId);\n return resultData.complexTensors.imag.clone() as T;\n }\n\n private assertNotComplex(tensor: Tensor|Tensor[], opName: string) {\n if (!Array.isArray(tensor)) {\n tensor = [tensor];\n }\n tensor.forEach(t => {\n if (t != null) {\n util.assert(\n t.dtype !== 'complex64',\n `${opName} does not support complex64 tensors.`);\n }\n });\n }\n\n slice<T extends Tensor>(x: T, begin: number[], size: number[]): T {\n this.assertNotComplex(x, 'slice');\n\n const buffer = ops.buffer(size, x.dtype);\n\n for (let i = 0; i < buffer.size; ++i) {\n const loc = buffer.indexToLoc(i);\n const xLoc = loc.map((idx, j) => idx + begin[j]);\n buffer.set(x.get(...xLoc), ...loc);\n }\n return buffer.toTensor() as T;\n }\n\n stridedSlice<T extends Tensor>(\n x: T, begin: number[], end: number[], strides: number[],\n beginMask: number, endMask: number, ellipsisMask: number,\n newAxisMask: number, shrinkAxisMask: number): T {\n this.assertNotComplex(x, 'stridedSlice');\n\n const [beginIndex, size, shrinkAxis] = getStridedSlicedInfo(\n x.shape, begin, end, strides, beginMask, endMask, ellipsisMask,\n newAxisMask, shrinkAxisMask);\n\n const shape = size.filter((v, index) => shrinkAxis.indexOf(index) === -1);\n\n if (shape.some(axis => axis === 0)) {\n return ops.tensor([], shape) as T;\n }\n\n const buffer = ops.buffer(size, x.dtype);\n\n for (let i = 0; i < buffer.size; i++) {\n const loc = buffer.indexToLoc(i);\n\n const newLoc: number[] = new Array(loc.length);\n for (let j = 0; j < newLoc.length; j++) {\n newLoc[j] = loc[j] * strides[j] + beginIndex[j];\n }\n buffer.set(x.get(...newLoc), ...loc);\n }\n\n return buffer.toTensor().reshape(shape) as T;\n }\n\n reverse<T extends Tensor>(x: T, axis: number[]): T {\n this.assertNotComplex(x, 'reverse');\n\n const buffer = ops.buffer(x.shape, x.dtype);\n const xBuffer = x.buffer();\n\n for (let i = 0; i < buffer.size; i++) {\n const outLoc = buffer.indexToLoc(i);\n const inLoc = outLoc.slice();\n axis.forEach(ax => inLoc[ax] = x.shape[ax] - 1 - inLoc[ax]);\n buffer.set(xBuffer.get(...inLoc), ...outLoc);\n }\n\n return buffer.toTensor() as T;\n }\n\n concat(tensors: Tensor[], axis: number): Tensor {\n this.assertNotComplex(tensors, 'concat');\n const tensors2D = tensors.map(t => {\n const innerSize = util.sizeFromShape(t.shape.slice(axis));\n return t.as2D(-1, innerSize);\n });\n const outShape =\n concat_util.computeOutShape(tensors2D.map(t => t.shape), 1 /* axis */);\n const values =\n ops.buffer<Rank.R2>(outShape as [number, number], tensors[0].dtype)\n .values;\n if (tensors2D[0].shape[0] === 1) {\n // Use built-in TypedArray.set() method for speed.\n let offset = 0;\n tensors2D.forEach(t => {\n values.set(t.dataSync(), offset);\n offset += t.size;\n });\n } else {\n let colOffset = 0;\n tensors2D.forEach(t => {\n const tVals = t.dataSync();\n let tIdx = 0;\n for (let row = 0; row < t.shape[0]; ++row) {\n const resIdx = row * outShape[1] + colOffset;\n for (let col = 0; col < t.shape[1]; ++col) {\n values[resIdx + col] = tVals[tIdx++];\n }\n }\n colOffset += t.shape[1];\n });\n }\n const finalOutShape =\n concat_util.computeOutShape(tensors.map(t => t.shape), axis);\n return tensor(values, finalOutShape, tensors[0].dtype);\n }\n\n neg<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'neg');\n\n return this.multiply(ops.scalar(-1), x) as T;\n }\n\n add(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n return this.broadcastedBinaryComplexOp(\n a.cast('complex64'), b.cast('complex64'),\n (aReal, aImag, bReal, bImag) => {\n return {real: aReal + bReal, imag: aImag + bImag};\n }) as Tensor;\n }\n\n return this.broadcastedBinaryOp(\n a, b, upcastType(a.dtype, b.dtype),\n (aValue, bValue) => aValue + bValue) as Tensor;\n }\n\n addN<T extends Tensor>(tensors: T[]): T {\n this.assertNotComplex(tensors, 'addN');\n\n const vals = tensors.map(t => t.dataSync());\n const result = ops.buffer(tensors[0].shape, tensors[0].dtype);\n const resultVals = result.values;\n for (let i = 0; i < tensors.length; i++) {\n const currVals = vals[i];\n for (let j = 0; j < resultVals.length; j++) {\n resultVals[j] += currVals[j];\n }\n }\n return result.toTensor() as T;\n }\n\n subtract(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n return this.broadcastedBinaryComplexOp(\n a.cast('complex64'), b.cast('complex64'),\n (aReal, aImag, bReal, bImag) => {\n return {real: aReal - bReal, imag: aImag - bImag};\n }) as Tensor;\n }\n\n return this.broadcastedBinaryOp(\n a, b, upcastType(a.dtype, b.dtype),\n (aValue, bValue) => aValue - bValue) as Tensor;\n }\n\n pow<T extends Tensor>(a: T, b: Tensor): T {\n this.assertNotComplex([a, b], 'pow');\n\n return this.broadcastedBinaryOp(\n a, b, a.dtype, (aValue, bValue) => Math.pow(aValue, bValue)) as\n T;\n }\n\n batchMatMul(\n a: Tensor3D, b: Tensor3D, transposeA: boolean,\n transposeB: boolean): Tensor3D {\n this.assertNotComplex([a, b], 'matMul');\n\n const sharedDim = transposeA ? a.shape[1] : a.shape[2];\n const leftDim = transposeA ? a.shape[2] : a.shape[1];\n const rightDim = transposeB ? b.shape[1] : b.shape[2];\n const batchDim = a.shape[0];\n\n const aValues = a.dataSync();\n const bValues = b.dataSync();\n const [aBatch, aOuterStep, aInnerStep] = transposeA ?\n [a.strides[0], 1, a.strides[1]] :\n [a.strides[0], a.strides[1], 1];\n const [bInnerStep, bOuterStep, bBatch] = transposeB ?\n [1, b.strides[1], b.strides[0]] :\n [b.strides[1], 1, b.strides[0]];\n\n const size = leftDim * rightDim;\n const result = new Float32Array(batchDim * size);\n\n const blockSize = this.blockSize;\n\n for (let b = 0; b < batchDim; b++) {\n for (let i0 = 0; i0 < leftDim; i0 += blockSize) {\n for (let j0 = 0; j0 < rightDim; j0 += blockSize) {\n for (let k0 = 0; k0 < sharedDim; k0 += blockSize) {\n // for when blockSize doesn't evenly divide the input\n const iBlock = Math.min(i0 + blockSize, leftDim);\n const jBlock = Math.min(j0 + blockSize, rightDim);\n const kBlock = Math.min(k0 + blockSize, sharedDim);\n\n for (let i = i0; i < iBlock; i++) {\n for (let j = j0; j < jBlock; j++) {\n let sum = 0.0;\n\n for (let k = k0; k < kBlock; k++) {\n sum += aValues[b * aBatch + i * aOuterStep + k * aInnerStep] *\n bValues[k * bInnerStep + j * bOuterStep + b * bBatch];\n }\n result[b * size + (i * rightDim + j)] += sum;\n }\n }\n }\n }\n }\n }\n\n return ops.tensor3d(result, [batchDim, leftDim, rightDim]);\n }\n\n multiply(a: Tensor, b: Tensor): Tensor {\n if (a.dtype === 'complex64' || b.dtype === 'complex64') {\n return this.broadcastedBinaryComplexOp(\n a.cast('complex64'), b.cast('complex64'),\n (aReal, aImag, bReal, bImag) => {\n return {\n real: aReal * bReal - aImag * bImag,\n imag: aReal * bImag + aImag * bReal\n };\n }) as Tensor;\n }\n\n return this.broadcastedBinaryOp(\n a, b, upcastType(a.dtype, b.dtype),\n (aValue, bValue) => aValue * bValue) as Tensor;\n }\n\n realDivide(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'realDivide');\n\n const op = (a: number, b: number) => a / b;\n const outputDtype = 'float32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op) as Tensor;\n }\n\n floorDiv(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'floorDiv');\n\n const op = (a: number, b: number) => Math.floor(a / b);\n const outputDtype = 'int32';\n return this.broadcastedBinaryOp(a, b, outputDtype, op) as Tensor;\n }\n\n sum(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'sum');\n\n axis_util.assertAxesAreInnerMostDims('sum', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = ops.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let sum = 0;\n for (let j = 0; j < reduceSize; ++j) {\n sum += aVals[offset + j];\n }\n vals[i] = sum;\n }\n return result;\n }\n\n prod(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'sum');\n\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = ops.zeros(outShape, resultDtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let prod = 1;\n for (let j = 0; j < reduceSize; ++j) {\n prod *= aVals[offset + j];\n }\n vals[i] = prod;\n }\n return result;\n }\n\n unsortedSegmentSum<T extends Tensor>(\n x: T, segmentIds: Tensor1D, numSegments: number): Tensor {\n this.assertNotComplex(x, 'unsortedSegmentSum');\n\n const res = [];\n\n // Reshape the segment id's so that they can be broadcast with\n // x. The new shape should be [segmentIds.shape, 1, ..., 1]\n const numIters = x.rank - segmentIds.rank;\n for (let i = 0; i < numIters; ++i) {\n segmentIds = segmentIds.expandDims(i + 1);\n }\n\n for (let i = 0; i < numSegments; ++i) {\n const segmentId = ops.scalar(i, 'int32');\n const mask = ops.equal(segmentId, segmentIds).asType('float32');\n const sum = mask.mul(x).sum(0);\n res.push(sum);\n }\n\n return ops.stack(res);\n }\n\n argMin(x: Tensor, axis: number): Tensor {\n this.assertNotComplex(x, 'argMin');\n\n const axes = [axis];\n axis_util.assertAxesAreInnerMostDims('argMin', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n let minIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n minIndex = j;\n }\n }\n vals[i] = minIndex;\n }\n return result;\n }\n\n argMax(x: Tensor, axis: number): Tensor {\n this.assertNotComplex(x, 'argMax');\n\n const axes = [axis];\n axis_util.assertAxesAreInnerMostDims('argMax', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, 'int32');\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n let maxIndex = 0;\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n maxIndex = j;\n }\n }\n vals[i] = maxIndex;\n }\n return result;\n }\n\n cumsum(x: Tensor, axis: number, exclusive: boolean, reverse: boolean):\n Tensor {\n this.assertNotComplex(x, 'cumsum');\n\n if (axis !== x.rank - 1) {\n throw new Error(\n `backend.cumsum in CPU expects an inner-most axis=${x.rank - 1} ` +\n `but got axis=${axis}`);\n }\n const resultDtype = upcastType(x.dtype, 'int32');\n const result = ops.zeros(x.shape, resultDtype);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n const finalDim = x.shape[x.rank - 1];\n const indexAdjuster = reverse ?\n (i: number, j: number) => i + finalDim - j - 1 :\n (i: number, j: number) => i + j;\n for (let i = 0; i < aVals.length; i += finalDim) {\n for (let j = 0; j < finalDim; j++) {\n const idx = indexAdjuster(i, j);\n if (j === 0) {\n vals[idx] = exclusive ? 0 : aVals[idx];\n } else {\n const prevIdx = indexAdjuster(i, j - 1);\n vals[idx] = exclusive ? aVals[prevIdx] + vals[prevIdx] :\n aVals[idx] + vals[prevIdx];\n }\n }\n }\n return result;\n }\n\n equal(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'equal');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal === bVal) ? 1 : 0;\n });\n }\n\n notEqual(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'notEqual');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal !== bVal) ? 1 : 0;\n });\n }\n\n less(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'less');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal < bVal) ? 1 : 0;\n });\n }\n\n lessEqual(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'lessEqual');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal <= bVal) ? 1 : 0;\n });\n }\n\n greater(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'greater');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal > bVal) ? 1 : 0;\n });\n }\n\n greaterEqual(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'greaterEqual');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return (aVal >= bVal) ? 1 : 0;\n });\n }\n\n logicalNot<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'logicalNot');\n\n const values = x.dataSync();\n const newValues = new Int32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = values[i] ? 0 : 1;\n }\n return Tensor.make(x.shape, {values: newValues}, 'bool') as T;\n }\n\n logicalAnd(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'logicalAnd');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal && bVal;\n });\n }\n\n logicalOr(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'logicalOr');\n\n return this.broadcastedBinaryOp(a, b, 'bool', (aVal, bVal) => {\n return aVal || bVal;\n });\n }\n\n select(condition: Tensor, a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([condition, a, b], 'select');\n\n const values = condition.dataSync();\n const aValues = a.dataSync();\n const bValues = b.dataSync();\n const result = ops.zeros(a.shape, upcastType(a.dtype, b.dtype));\n const newValues = result.dataSync();\n let index = 0;\n const offset = condition.rank === 0 || condition.rank > 1 || a.rank === 1 ?\n 1 :\n a.shape[1];\n\n for (let i = 0; i < values.length; i++) {\n for (let j = 0; j < offset; j++) {\n if (values[i] === 1) {\n newValues[index++] = aValues[i];\n } else {\n newValues[index++] = bValues[i];\n }\n }\n }\n return result;\n }\n\n where(condition: Tensor): Tensor2D {\n this.assertNotComplex([condition], 'where');\n\n const condVals = condition.dataSync();\n return whereImpl(condition.shape, condVals);\n }\n\n topk<T extends Tensor>(x: T, k: number, sorted: boolean): [T, T] {\n this.assertNotComplex(x, 'topk');\n\n const xVals = x.dataSync();\n return topkImpl(xVals, x.shape, x.dtype, k, sorted);\n }\n\n min(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'min');\n\n axis_util.assertAxesAreInnerMostDims('min', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let min = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value < min) {\n min = value;\n }\n }\n vals[i] = min;\n }\n return result;\n }\n\n minimum(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'minimum');\n\n return this.broadcastedBinaryOp(\n a, b, a.dtype, (aVal, bVal) => Math.min(aVal, bVal));\n }\n\n mod(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'mod');\n\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const rem = aVal % bVal;\n if ((aVal < 0 && bVal < 0) || (aVal >= 0 && bVal >= 0)) {\n return rem;\n } else {\n return (rem + bVal) % bVal;\n }\n });\n }\n\n max(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'max');\n\n axis_util.assertAxesAreInnerMostDims('max', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let max = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n if (value > max) {\n max = value;\n }\n }\n vals[i] = max;\n }\n return result;\n }\n\n maximum(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'maximum');\n\n return this.broadcastedBinaryOp(\n a, b, a.dtype, (aVal, bVal) => Math.max(aVal, bVal));\n }\n\n all(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'all');\n\n axis_util.assertAxesAreInnerMostDims('all', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let all = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n all = all && value;\n }\n vals[i] = all;\n }\n return result;\n }\n\n any(x: Tensor, axes: number[]): Tensor {\n this.assertNotComplex(x, 'any');\n\n axis_util.assertAxesAreInnerMostDims('any', axes, x.rank);\n const [outShape, reduceShape] =\n axis_util.computeOutAndReduceShapes(x.shape, axes);\n const result = ops.zeros(outShape, x.dtype);\n const reduceSize = util.sizeFromShape(reduceShape);\n const vals = result.dataSync();\n\n const aVals = x.dataSync();\n for (let i = 0; i < vals.length; ++i) {\n const offset = i * reduceSize;\n let anyVal = aVals[offset];\n for (let j = 0; j < reduceSize; ++j) {\n const value = aVals[offset + j];\n anyVal = anyVal || value;\n }\n vals[i] = anyVal;\n }\n return result;\n }\n\n squaredDifference(a: Tensor, b: Tensor): Tensor {\n this.assertNotComplex([a, b], 'squaredDifference');\n\n return this.broadcastedBinaryOp(a, b, a.dtype, (aVal, bVal) => {\n const diff = aVal - bVal;\n return diff * diff;\n });\n }\n\n ceil<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'ceil');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = Math.ceil(values[i]);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n floor<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'floor');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = Math.floor(values[i]);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n sign<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'x');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n if (values[i] < 0) {\n newValues[i] = -1;\n } else if (values[i] > 0) {\n newValues[i] = 1;\n } else {\n newValues[i] = 0;\n }\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n round<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'round');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n // The algorithm is based on banker's rounding.\n const base = Math.floor(values[i]);\n if (values[i] - base < 0.5) {\n newValues[i] = Math.floor(values[i]);\n } else if (values[i] - base > 0.5) {\n newValues[i] = Math.ceil(values[i]);\n } else {\n if (base % 2.0 === 0.0) {\n newValues[i] = base;\n } else {\n newValues[i] = base + 1.0;\n }\n }\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n exp<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'exp');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = Math.exp(values[i]);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n expm1<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'expm1');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = Math.expm1(values[i]);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n log<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'log');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = Math.log(value);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n log1p<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'log1p');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = Math.log1p(value);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n sqrt<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'sqrt');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = Math.sqrt(value);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n rsqrt<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'rsqrt');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = 1 / Math.sqrt(value);\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n square<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'square');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n newValues[i] = value * value;\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n reciprocal<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'reciprocal');\n\n const values = x.dataSync();\n const newValues = new Float32Array(values.length);\n for (let i = 0; i < values.length; ++i) {\n newValues[i] = 1 / values[i];\n }\n return Tensor.make(x.shape, {values: newValues}) as T;\n }\n\n relu<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'relu');\n\n const res = ops.zeros(x.shape, x.dtype);\n const resVals = res.dataSync();\n const inVals = x.dataSync();\n for (let i = 0; i < inVals.length; ++i) {\n resVals[i] = Math.max(0, inVals[i]);\n }\n return res as T;\n }\n\n elu<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'elu');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 0) {\n resultValues[i] = v;\n } else {\n resultValues[i] = (Math.exp(v) - 1);\n }\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n eluDer<T extends Tensor>(dy: T, y: T): T {\n this.assertNotComplex([dy, y], 'eluDer');\n\n const resultValues = new Float32Array(y.size);\n const values = y.dataSync();\n const dyValues = dy.dataSync();\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 1) {\n resultValues[i] = dyValues[i];\n } else {\n resultValues[i] = dyValues[i] * (v + 1);\n }\n }\n return Tensor.make(y.shape, {values: resultValues}) as T;\n }\n\n selu<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'selu');\n\n // Stable and Attracting Fixed Point (0, 1) for Normalized Weights.\n // see: https://arxiv.org/abs/1706.02515\n const scaleAlpha = selu_util.SELU_SCALEALPHA;\n const scale = selu_util.SELU_SCALE;\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n if (v >= 0) {\n resultValues[i] = scale * v;\n } else {\n resultValues[i] = scaleAlpha * (Math.exp(v) - 1);\n }\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n clip<T extends Tensor>(x: T, min: number, max: number): T {\n this.assertNotComplex(x, 'clip');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n resultValues[i] = v > max ? max : (v < min ? min : v);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n abs<T extends Tensor>(x: T): T {\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.abs(values[i]);\n }\n\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n complexAbs<T extends Tensor>(x: T): T {\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n\n for (let i = 0; i < x.size; ++i) {\n const real = values[i * 2];\n const imag = values[i * 2 + 1];\n resultValues[i] = Math.sqrt(real * real + imag * imag);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n int<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'int');\n\n const resultValues = new Int32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = values[i];\n }\n return Tensor.make(x.shape, {values: resultValues}, 'int32');\n }\n\n sigmoid<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'sigmoid');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = 1 / (1 + Math.exp(-values[i]));\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n softplus<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'softplus');\n\n // mirrors the implementation of tf.nn.softplus: https://goo.gl/vkcvwX\n\n // epsilon is the difference between 1.0 and the next representable float.\n // For a single precision 32 bit float this should be 2^-23, see:\n // https://math.byu.edu/~schow/work/IEEEFloatingPoint.htm\n const epsilon = 1.1920928955078125e-7;\n const threshold = Math.log(epsilon) + 2.0;\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n\n for (let i = 0; i < values.length; ++i) {\n // Value above which exp(x) may overflow, but softplus(x) == x\n // is within machine epsilon.\n const tooLarge = values[i] > -threshold;\n\n // Value below which exp(x) may underflow, but softplus(x) == exp(x)\n // is within machine epsilon.\n const tooSmall = values[i] < threshold;\n\n const expX = Math.exp(values[i]);\n let result;\n\n if (tooSmall) {\n result = expX;\n } else if (tooLarge) {\n result = values[i];\n } else {\n result = Math.log(1.0 + expX);\n }\n resultValues[i] = result;\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n sin<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'sin');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.sin(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n cos<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'cos');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.cos(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n tan<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'tan');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.tan(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n asin<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'asin');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.asin(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n acos<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'acos');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.acos(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n atan<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'atan');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.atan(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n atan2<T extends Tensor>(a: T, b: T): T {\n this.assertNotComplex([a, b], 'atan2');\n\n return this.broadcastedBinaryOp(\n a, b, a.dtype, (aValue, bValue) => Math.atan2(aValue, bValue)) as\n T;\n }\n\n sinh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'sinh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.sinh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n cosh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'cosh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.cosh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n tanh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'tanh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = util.tanh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n asinh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'asinh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.asinh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n acosh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'acosh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.acosh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n atanh<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'atanh');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n resultValues[i] = Math.atanh(values[i]);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n erf<T extends Tensor>(x: T): T {\n this.assertNotComplex(x, 'erf');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n const p = erf_util.ERF_P;\n const a1 = erf_util.ERF_A1;\n const a2 = erf_util.ERF_A2;\n const a3 = erf_util.ERF_A3;\n const a4 = erf_util.ERF_A4;\n const a5 = erf_util.ERF_A5;\n for (let i = 0; i < values.length; ++i) {\n const v = values[i];\n const t = 1.0 / (1.0 + p * v);\n resultValues[i] = 1.0 -\n (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t *\n Math.exp(-v * v);\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n step<T extends Tensor>(x: T, alpha = 0): T {\n this.assertNotComplex(x, 'step');\n\n const resultValues = new Float32Array(x.size);\n const values = x.dataSync();\n for (let i = 0; i < values.length; ++i) {\n const value = values[i];\n if (isNaN(value)) {\n resultValues[i] = NaN;\n } else {\n resultValues[i] = value > 0 ? 1 : alpha;\n }\n }\n return Tensor.make(x.shape, {values: resultValues}) as T;\n }\n\n conv2d(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n this.assertNotComplex([x, filter], 'conv2d');\n\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const y = ops.buffer<Rank.R4>(convInfo.outShape, x.dtype);\n\n const xVals = x.dataSync();\n const wVals = filter.dataSync();\n const yVals = y.values;\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * x.strides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; wR++) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filter.strides[0];\n const xOffset2 = xOffset1 + xR * x.strides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * convInfo.outChannels;\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; wC++) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filter.strides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n yVals[yOffset3 + d2] += xVal * wVals[wOffset3 + d2];\n }\n wOffset3 += convInfo.outChannels;\n }\n }\n }\n }\n }\n }\n return y.toTensor();\n }\n\n conv2dDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n this.assertNotComplex([dy, filter], 'conv2dDerInput');\n\n const dx = ops.buffer<Rank.R4>(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = dy.dataSync();\n const [dyS0, dyS1, dyS2] = dy.strides;\n const fltValues = filter.dataSync();\n const [fltS0, fltS1, fltS2] = filter.strides;\n const {\n batchSize,\n filterHeight,\n filterWidth,\n inChannels,\n inHeight,\n inWidth,\n outChannels,\n outHeight,\n outWidth,\n strideHeight,\n strideWidth\n } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax =\n Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax =\n Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n\n for (let d2 = 0; d2 < outChannels; ++d2) {\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + d2];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return dx.toTensor();\n }\n\n conv2dDerFilter(x: Tensor4D, dy: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n this.assertNotComplex([x, dy], 'conv2dDerFilter');\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dW = ops.buffer<Rank.R4>(convInfo.filterShape, 'float32');\n\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(\n convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(\n convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n // Need to convolve.\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += x.get(b, xR, xC, d1) * dy.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, d2);\n }\n }\n }\n }\n return dW.toTensor();\n }\n\n depthwiseConv2D(x: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n this.assertNotComplex([x, filter], 'depthwiseConv2D');\n\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const padLeft = convInfo.padInfo.left;\n const padTop = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n const y = ops.buffer<Rank.R4>(convInfo.outShape, x.dtype);\n const xVals = x.dataSync();\n const wVals = filter.dataSync();\n const yVals = y.values;\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const xOffset1 = b * x.strides[0];\n const yOffset1 = b * y.strides[0];\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const yOffset2 = yOffset1 + yR * y.strides[1];\n const xRCorner = yR * convInfo.strideHeight - padLeft;\n for (let wR = 0; wR < filterHeight; ++wR) {\n const xR = xRCorner + wR * dilationHeight;\n if (xR < 0 || xR >= convInfo.inHeight) {\n continue;\n }\n const wOffset1 = wR * filter.strides[0];\n const xOffset2 = xOffset1 + xR * x.strides[1];\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const yOffset3 = yOffset2 + yC * y.strides[2];\n const xCCorner = yC * convInfo.strideWidth - padTop;\n for (let wC = 0; wC < filterWidth; ++wC) {\n const xC = xCCorner + wC * dilationWidth;\n if (xC < 0 || xC >= convInfo.inWidth) {\n continue;\n }\n const wOffset2 = wOffset1 + wC * filter.strides[1];\n const xOffset3 = xOffset2 + xC * convInfo.inChannels;\n let yOffset4 = yOffset3;\n let wOffset3 = wOffset2;\n for (let d1 = 0; d1 < convInfo.inChannels; ++d1) {\n const xVal = xVals[xOffset3 + d1];\n for (let q = 0; q < chMul; ++q) {\n yVals[yOffset4 + q] += xVal * wVals[wOffset3 + q];\n }\n yOffset4 += chMul;\n wOffset3 += chMul;\n }\n }\n }\n }\n }\n }\n\n return y.toTensor();\n }\n\n depthwiseConv2DDerInput(dy: Tensor4D, filter: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n this.assertNotComplex([dy, filter], 'depthwiseConv2DDerInput');\n\n const dx = ops.buffer<Rank.R4>(convInfo.inShape, 'float32');\n const dxValues = dx.values;\n const [dxS0, dxS1, dxS2] = dx.strides;\n const dyValues = dy.dataSync();\n const [dyS0, dyS1, dyS2] = dy.strides;\n const fltValues = filter.dataSync();\n const [fltS0, fltS1, fltS2] = filter.strides;\n const {\n batchSize,\n filterHeight,\n filterWidth,\n inChannels,\n inHeight,\n inWidth,\n outChannels,\n outHeight,\n outWidth,\n strideHeight,\n strideWidth\n } = convInfo;\n const topPad = filterHeight - 1 - convInfo.padInfo.top;\n const leftPad = filterWidth - 1 - convInfo.padInfo.left;\n const chMul = outChannels / inChannels;\n\n for (let b = 0; b < batchSize; ++b) {\n for (let d1 = 0; d1 < inChannels; ++d1) {\n for (let xR = 0; xR < inHeight; ++xR) {\n const xRCorner = xR - topPad;\n const xRMin = Math.max(0, Math.ceil(xRCorner / strideHeight));\n const yRMax =\n Math.min(outHeight, (filterHeight + xRCorner) / strideHeight);\n\n for (let xC = 0; xC < inWidth; ++xC) {\n const xCCorner = xC - leftPad;\n const xCMin = Math.max(0, Math.ceil(xCCorner / strideWidth));\n const yCMax =\n Math.min(outWidth, (filterWidth + xCCorner) / strideWidth);\n\n let dotProd = 0;\n for (let yR = xRMin; yR < yRMax; ++yR) {\n const wR = yR * strideHeight - xRCorner;\n\n for (let yC = xCMin; yC < yCMax; ++yC) {\n const wC = yC * strideWidth - xCCorner;\n const dyOffset = dyS0 * b + dyS1 * yR + dyS2 * yC;\n const fltOffset = fltS0 * (filterHeight - 1 - wR) +\n fltS1 * (filterWidth - 1 - wC) + fltS2 * d1;\n\n for (let dm = 0; dm < chMul; ++dm) {\n const d2 = d1 * chMul + dm;\n const pixel = dyValues[dyOffset + d2];\n const weight = fltValues[fltOffset + dm];\n dotProd += pixel * weight;\n }\n }\n }\n dxValues[dxS0 * b + dxS1 * xR + dxS2 * xC + d1] = dotProd;\n }\n }\n }\n }\n return dx.toTensor();\n }\n\n depthwiseConv2DDerFilter(x: Tensor4D, dy: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n this.assertNotComplex([x, dy], 'depthwiseConv2DDerFilter');\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dW = ops.buffer<Rank.R4>(convInfo.filterShape, 'float32');\n\n const leftPad = convInfo.padInfo.left;\n const topPad = convInfo.padInfo.top;\n const chMul = convInfo.outChannels / convInfo.inChannels;\n\n for (let wR = 0; wR < filterHeight; ++wR) {\n const yRMin = Math.max(0, Math.ceil((topPad - wR) / strideHeight));\n const yRMax = Math.min(\n convInfo.outHeight, (convInfo.inHeight + topPad - wR) / strideHeight);\n\n for (let wC = 0; wC < filterWidth; ++wC) {\n const yCMin = Math.max(0, Math.ceil((leftPad - wC) / strideWidth));\n const yCMax = Math.min(\n convInfo.outWidth, (convInfo.inWidth + leftPad - wC) / strideWidth);\n\n for (let d2 = 0; d2 < convInfo.outChannels; ++d2) {\n const d1 = Math.trunc(d2 / chMul);\n const dm = d2 % chMul;\n\n let dotProd = 0;\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let yR = yRMin; yR < yRMax; ++yR) {\n const xR = wR + yR * strideHeight - topPad;\n for (let yC = yCMin; yC < yCMax; ++yC) {\n const xC = wC + yC * strideWidth - leftPad;\n dotProd += x.get(b, xR, xC, d1) * dy.get(b, yR, yC, d2);\n }\n }\n }\n dW.set(dotProd, wR, wC, d1, dm);\n }\n }\n }\n return dW.toTensor();\n }\n\n tile<T extends Tensor>(x: T, reps: number[]): T {\n this.assertNotComplex(x, 'tile');\n\n const newShape: number[] = new Array(x.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[i] * reps[i];\n }\n const result = ops.buffer(newShape, x.dtype);\n const xBuf = x.buffer();\n for (let i = 0; i < result.values.length; ++i) {\n const newLoc = result.indexToLoc(i);\n\n const originalLoc: number[] = new Array(x.rank);\n for (let i = 0; i < originalLoc.length; i++) {\n originalLoc[i] = newLoc[i] % x.shape[i];\n }\n\n const originalIndex = xBuf.locToIndex(originalLoc);\n\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor() as T;\n }\n\n pad<T extends Tensor>(\n x: T, paddings: Array<[number, number]>, constantValue: number): T {\n this.assertNotComplex(x, 'pad');\n\n const outShape = paddings.map(\n (p, i) => p[0] /* beforePad */ + x.shape[i] + p[1] /* afterPad */);\n const start = paddings.map(p => p[0]);\n const xBuffer = x.buffer();\n const buffer = ops.buffer(outShape, x.dtype);\n if (constantValue !== 0) {\n buffer.values.fill(constantValue);\n }\n\n for (let i = 0; i < x.size; i++) {\n const coords = xBuffer.indexToLoc(i);\n const outCoords = coords.map((c, i) => c + start[i]);\n buffer.set(x.get(...coords), ...outCoords);\n }\n return buffer.toTensor() as T;\n }\n\n transpose<T extends Tensor>(x: T, perm: number[]): T {\n this.assertNotComplex(x, 'transpose');\n\n const newShape: number[] = new Array(x.rank);\n for (let i = 0; i < newShape.length; i++) {\n newShape[i] = x.shape[perm[i]];\n }\n const values = x.dataSync();\n const result = buffer(newShape, x.dtype);\n\n const xBuf = x.buffer();\n for (let i = 0; i < x.size; ++i) {\n const loc = xBuf.indexToLoc(i);\n\n // Permute location.\n const newLoc: number[] = new Array(loc.length);\n for (let i = 0; i < newLoc.length; i++) {\n newLoc[i] = loc[perm[i]];\n }\n\n const newIndex = result.locToIndex(newLoc);\n result.values[newIndex] = values[i];\n }\n return result.toTensor() as T;\n }\n\n gather<T extends Tensor>(x: T, indices: Tensor1D, axis: number): T {\n this.assertNotComplex([x, indices], 'gather');\n\n const newShape: number[] = x.shape.slice();\n const indicesValues = indices.dataSync();\n newShape[axis] = indicesValues.length;\n const result = buffer(newShape, x.dtype);\n const xBuf = x.buffer();\n\n for (let i = 0; i < result.size; ++i) {\n const newLoc = result.indexToLoc(i);\n\n const originalLoc: number[] = newLoc.slice();\n originalLoc[axis] = indicesValues[newLoc[axis]];\n\n const originalIndex = xBuf.locToIndex(originalLoc);\n result.values[i] = xBuf.values[originalIndex];\n }\n return result.toTensor() as T;\n }\n\n batchToSpaceND<T extends Tensor>(\n x: T, blockShape: number[], crops: number[][]): T {\n this.assertNotComplex([x], 'batchToSpaceND');\n\n const prod = blockShape.reduce((a, b) => a * b);\n\n const reshaped = array_ops_util.getReshaped(x.shape, blockShape, prod);\n const permuted =\n array_ops_util.getPermuted(reshaped.length, blockShape.length);\n const reshapedPermuted =\n array_ops_util.getReshapedPermuted(x.shape, blockShape, prod);\n const sliceBeginCoords =\n array_ops_util.getSliceBeginCoords(crops, blockShape.length);\n const sliceSize =\n array_ops_util.getSliceSize(reshapedPermuted, crops, blockShape.length);\n\n return x.reshape(reshaped)\n .transpose(permuted)\n .reshape(reshapedPermuted)\n .slice(sliceBeginCoords, sliceSize) as T;\n }\n\n spaceToBatchND<T extends Tensor>(\n x: T, blockShape: number[], paddings: Array<[number, number]>): T {\n this.assertNotComplex([x], 'spaceToBatchND');\n\n const prod = blockShape.reduce((a, b) => a * b);\n\n const completePaddings: Array<[number, number]> = [[0, 0]];\n completePaddings.push(...paddings);\n for (let i = 1 + blockShape.length; i < x.shape.length; ++i) {\n completePaddings.push([0, 0]);\n }\n\n const paddedX = x.pad(completePaddings);\n\n const reshapedPaddedShape =\n array_ops_util.getReshaped(paddedX.shape, blockShape, prod, false);\n const permutedReshapedPaddedPermutation = array_ops_util.getPermuted(\n reshapedPaddedShape.length, blockShape.length, false);\n const flattenShape = array_ops_util.getReshapedPermuted(\n paddedX.shape, blockShape, prod, false);\n\n return paddedX.reshape(reshapedPaddedShape)\n .transpose(permutedReshapedPaddedPermutation)\n .reshape(flattenShape) as T;\n }\n\n private pool(x: Tensor4D, convInfo: Conv2DInfo, poolType: 'max'|'avg'):\n Tensor4D {\n this.assertNotComplex(x, 'pool');\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n\n const initialValue =\n (poolType === 'max' ? Number.NEGATIVE_INFINITY :\n Number.POSITIVE_INFINITY);\n\n const xValues = x.dataSync();\n const output = ops.buffer<Rank.R4>(convInfo.outShape, x.dtype);\n const outputVals = output.values;\n\n const outputBatchStrides =\n convInfo.outShape[1] * convInfo.outShape[2] * convInfo.outShape[3];\n const outputRowStrides = convInfo.outShape[2] * convInfo.outShape[3];\n const outputColStrides = convInfo.outShape[3];\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n const outputBatchOffset = b * outputBatchStrides;\n const inputBatchOffset = b * x.strides[0];\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n const xRMin = Math.max(0, xRCorner);\n const xRMax =\n Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n const outputRowOffset = outputBatchOffset + yR * outputRowStrides;\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n const xCMin = Math.max(0, xCCorner);\n const xCMax =\n Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let minMaxValue = initialValue;\n let avgValue = 0;\n let count = 0;\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const xROffset = inputBatchOffset + xR * x.strides[1];\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const xCOffset = xROffset + xC * x.strides[2];\n const pixel = xValues[xCOffset + d];\n if ((poolType === 'max' && pixel > minMaxValue)) {\n minMaxValue = pixel;\n } else if (poolType === 'avg') {\n avgValue += pixel;\n count++;\n }\n }\n if (isNaN(minMaxValue)) {\n break;\n }\n }\n const outputOffset = outputRowOffset + yC * outputColStrides + d;\n outputVals[outputOffset] =\n poolType === 'avg' ? avgValue / count : minMaxValue;\n }\n }\n }\n }\n return output.toTensor();\n }\n\n maxPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n return this.pool(x, convInfo, 'max');\n }\n\n private maxPoolPositions(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n const maxPositions = ops.buffer<Rank.R4>(convInfo.outShape, 'int32');\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padTop = convInfo.padInfo.top;\n const padLeft = convInfo.padInfo.left;\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let yR = 0; yR < convInfo.outHeight; ++yR) {\n const xRCorner = yR * strideHeight - padTop;\n let xRMin = xRCorner;\n while (xRMin < 0) {\n xRMin += dilationHeight;\n }\n // const xRMin = Math.max(0, xRCorner);\n const xRMax =\n Math.min(convInfo.inHeight, effectiveFilterHeight + xRCorner);\n for (let yC = 0; yC < convInfo.outWidth; ++yC) {\n const xCCorner = yC * strideWidth - padLeft;\n let xCMin = xCCorner;\n while (xCMin < 0) {\n xCMin += dilationWidth;\n }\n const xCMax =\n Math.min(convInfo.inWidth, effectiveFilterWidth + xCCorner);\n let maxValue = Number.NEGATIVE_INFINITY;\n let maxPosition = -1;\n\n for (let xR = xRMin; xR < xRMax; xR += dilationHeight) {\n const wR = xR - xRCorner;\n for (let xC = xCMin; xC < xCMax; xC += dilationWidth) {\n const wC = xC - xCCorner;\n const pixel = x.get(b, xR, xC, d);\n if (pixel > maxValue) {\n maxValue = pixel;\n maxPosition = wR * effectiveFilterWidth + wC;\n }\n }\n }\n maxPositions.set(maxPosition, b, yR, yC, d);\n }\n }\n }\n }\n return maxPositions.toTensor();\n }\n\n maxPoolBackprop(dy: Tensor4D, x: Tensor4D, y: Tensor4D, convInfo: Conv2DInfo):\n Tensor4D {\n this.assertNotComplex([x, y], 'maxPoolBackprop');\n\n const maxPositions = this.maxPoolPositions(x, convInfo);\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = ops.buffer<Rank.R4>(x.shape, 'float32');\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n const maxPos = effectiveFilterHeight * effectiveFilterWidth -\n 1 - maxPositions.get(b, dyR, dyC, d);\n const curPos = wR * effectiveFilterWidth + wC;\n\n const mask = maxPos === curPos ? 1 : 0;\n if (mask === 0) {\n continue;\n }\n\n const pixel = dy.get(b, dyR, dyC, d);\n dotProd += pixel * mask;\n }\n }\n dx.set(dotProd, b, dxR, dxC, d);\n }\n }\n }\n }\n return dx.toTensor();\n }\n\n avgPoolBackprop(dy: Tensor4D, x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n this.assertNotComplex([dy, x], 'avgPoolBackprop');\n\n const strideHeight = convInfo.strideHeight;\n const strideWidth = convInfo.strideWidth;\n const filterHeight = convInfo.filterHeight;\n const filterWidth = convInfo.filterWidth;\n const dilationHeight = convInfo.dilationHeight;\n const dilationWidth = convInfo.dilationWidth;\n const effectiveFilterHeight = convInfo.effectiveFilterHeight;\n const effectiveFilterWidth = convInfo.effectiveFilterWidth;\n const padLeft = effectiveFilterWidth - 1 - convInfo.padInfo.left;\n const padTop = effectiveFilterHeight - 1 - convInfo.padInfo.top;\n const dx = ops.buffer<Rank.R4>(x.shape, 'float32');\n\n const avgMultiplier = 1 / (filterHeight * filterWidth);\n\n for (let b = 0; b < convInfo.batchSize; ++b) {\n for (let d = 0; d < convInfo.inChannels; ++d) {\n for (let dxR = 0; dxR < convInfo.inHeight; ++dxR) {\n for (let dxC = 0; dxC < convInfo.inWidth; ++dxC) {\n // Shader code begins.\n const dyRCorner = dxR - padTop;\n const dyCCorner = dxC - padLeft;\n let dotProd = 0;\n for (let wR = 0; wR < effectiveFilterHeight; wR += dilationHeight) {\n const dyR = (dyRCorner + wR) / strideHeight;\n if (dyR < 0 || dyR >= convInfo.outHeight ||\n Math.floor(dyR) !== dyR) {\n continue;\n }\n for (let wC = 0; wC < effectiveFilterWidth; wC += dilationWidth) {\n const dyC = (dyCCorner + wC) / strideWidth;\n if (dyC < 0 || dyC >= convInfo.outWidth ||\n Math.floor(dyC) !== dyC) {\n continue;\n }\n\n const pixel = dy.get(b, dyR, dyC, d);\n dotProd += pixel;\n }\n }\n dx.set(dotProd * avgMultiplier, b, dxR, dxC, d);\n }\n }\n }\n }\n return dx.toTensor();\n }\n\n cast<T extends Tensor>(x: T, dtype: DataType): T {\n return backend_util.castTensor(x, dtype, this);\n }\n\n reshape<R extends Rank>(x: Tensor, shape: ShapeMap[R]): Tensor<R> {\n return backend_util.reshapeTensor(x, shape);\n }\n\n avgPool(x: Tensor4D, convInfo: Conv2DInfo): Tensor4D {\n this.assertNotComplex(x, 'avgPool');\n\n return this.pool(x, convInfo, 'avg').toFloat();\n }\n\n resizeBilinear(\n x: Tensor4D, newHeight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n this.assertNotComplex(x, 'resizeBilinear');\n\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = x.dataSync();\n const result = new Float32Array(\n util.sizeFromShape([batch, newHeight, newWidth, numChannels]));\n\n const effectiveInputSize: [number, number] = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n\n const effectiveOutputSize: [number, number] = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n let outputIdx = 0;\n const effectiveRowSizeRatio =\n effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio =\n effectiveInputSize[1] / effectiveOutputSize[1];\n for (let b = 0; b < batch; b++) {\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceRowFloor = Math.floor(sourceFracRow);\n const rowFrac = sourceFracRow - sourceRowFloor;\n const sourceRowCeil = Math.min(oldHeight - 1, Math.ceil(sourceFracRow));\n const topRowOffset = b * x.strides[0] + sourceRowFloor * x.strides[1];\n const botRowOffset = b * x.strides[0] + sourceRowCeil * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceColFloor = Math.floor(sourceFracCol);\n const colFrac = sourceFracCol - sourceColFloor;\n const sourceColCeil =\n Math.min(oldWidth - 1, Math.ceil(sourceFracCol));\n const topLeftOffest = topRowOffset + sourceColFloor * x.strides[2];\n const botLeftOffset = botRowOffset + sourceColFloor * x.strides[2];\n const topRightOffset = topRowOffset + +sourceColCeil * x.strides[2];\n const botRightOffest = botRowOffset + sourceColCeil * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n\n // Compute the fractional index of the source.\n const topLeft = xValues[topLeftOffest + d];\n const bottomLeft = xValues[botLeftOffset + d];\n const topRight = xValues[topRightOffset + d];\n const bottomRight = xValues[botRightOffest + d];\n\n const top = topLeft + (topRight - topLeft) * colFrac;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * colFrac;\n const newValue = top + (bottom - top) * rowFrac;\n\n result[outputIdx++] = newValue;\n }\n }\n }\n }\n return ops.tensor(result, [batch, newHeight, newWidth, numChannels]);\n }\n\n resizeBilinearBackprop(dy: Tensor4D, x: Tensor4D, alignCorners: boolean) {\n this.assertNotComplex([dy, x], 'resizeBilinearBackprop');\n\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass and add the\n // corresponding coefficient from dy to the gradient (with some\n // interpolation).\n\n const effectiveXSize: [number, number] = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n\n const effectiveYSize: [number, number] = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/3039375c86a5bbc9610c7725dcaa95d635f87ba2/tensorflow/core/kernels/resize_bilinear_op.cc#L275\n\n const dyValues = dy.dataSync();\n let offset = 0;\n for (let b = 0; b < batch; b++) {\n const bOffset = b * x.strides[0];\n for (let r = 0; r < yHeight; r++) {\n const dxR = r * heightScale;\n const topDxRIndex = Math.floor(dxR);\n const bottomDxRIndex = Math.min(Math.ceil(dxR), xHeight - 1);\n\n const topDxROffset = bOffset + topDxRIndex * x.strides[1];\n const bottomDxROffset = bOffset + bottomDxRIndex * x.strides[1];\n\n const dxRLerp = dxR - topDxRIndex;\n const inverseDxRLerp = 1.0 - dxRLerp;\n for (let c = 0; c < yWidth; c++) {\n const dxC = c * widthScale;\n const leftDxCIndex = Math.floor(dxC);\n const rightDxCIndex = Math.min(Math.ceil(dxC), xWidth - 1);\n const dxCLerp = dxC - leftDxCIndex;\n const inverseDxCLerp = 1.0 - dxCLerp;\n\n const topLeftRCOffset = topDxROffset + leftDxCIndex * x.strides[2];\n const topRightRCOffset = topDxROffset + rightDxCIndex * x.strides[2];\n const bottomLeftRCOffset =\n bottomDxROffset + leftDxCIndex * x.strides[2];\n const bottomRightRCOffset =\n bottomDxROffset + rightDxCIndex * x.strides[2];\n\n const inverseDxRLerpTimesInverseDxCLerp =\n inverseDxRLerp * inverseDxCLerp;\n const inverseDxRLerpTimesDxCLerp = inverseDxRLerp * dxCLerp;\n const dxRLerpTimesInverseDxCLerp = dxRLerp * inverseDxCLerp;\n const dxRLerpTimesDxCLerp = dxRLerp * dxCLerp;\n for (let d = 0; d < depth; d++) {\n const dyVal = dyValues[offset++];\n output[topLeftRCOffset + d] +=\n dyVal * inverseDxRLerpTimesInverseDxCLerp;\n output[topRightRCOffset + d] += dyVal * inverseDxRLerpTimesDxCLerp;\n output[bottomLeftRCOffset + d] +=\n dyVal * dxRLerpTimesInverseDxCLerp;\n output[bottomRightRCOffset + d] += dyVal * dxRLerpTimesDxCLerp;\n }\n }\n }\n }\n return ops.tensor4d(output, [batch, xWidth, xHeight, depth], x.dtype);\n }\n\n resizeNearestNeighbor(\n x: Tensor4D, newHeight: number, newWidth: number,\n alignCorners: boolean): Tensor4D {\n this.assertNotComplex(x, 'resizeNearestNeighbor');\n\n const [batch, oldHeight, oldWidth, numChannels] = x.shape;\n const xValues = x.dataSync();\n const output = new Float32Array(batch * newHeight * newWidth * numChannels);\n\n const effectiveInputSize: [number, number] = [\n (alignCorners && newHeight > 1) ? oldHeight - 1 : oldHeight,\n (alignCorners && newWidth > 1) ? oldWidth - 1 : oldWidth\n ];\n\n const effectiveOutputSize: [number, number] = [\n (alignCorners && newHeight > 1) ? newHeight - 1 : newHeight,\n (alignCorners && newWidth > 1) ? newWidth - 1 : newWidth\n ];\n\n const effectiveRowSizeRatio =\n effectiveInputSize[0] / effectiveOutputSize[0];\n const effectiveColSizeRatio =\n effectiveInputSize[1] / effectiveOutputSize[1];\n\n let outputOffset = 0;\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < newHeight; r++) {\n const sourceFracRow = effectiveRowSizeRatio * r;\n const sourceNearestRow = Math.min(\n oldHeight - 1,\n alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n const rowOffset = batchOffset + sourceNearestRow * x.strides[1];\n for (let c = 0; c < newWidth; c++) {\n const sourceFracCol = effectiveColSizeRatio * c;\n const sourceNearestCol = Math.min(\n oldWidth - 1,\n alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n const colOffset = rowOffset + sourceNearestCol * x.strides[2];\n for (let d = 0; d < numChannels; d++) {\n // Begin shader.\n // Compute the fractional index of the source.\n const newVal = xValues[colOffset + d];\n output[outputOffset++] = newVal;\n }\n }\n }\n }\n return ops.tensor(\n output, [batch, newHeight, newWidth, numChannels], x.dtype);\n }\n\n resizeNearestNeighborBackprop(\n dy: Tensor4D, x: Tensor4D, alignCorners: boolean) {\n this.assertNotComplex([dy, x], 'resizeNearestNeighborBackprop');\n\n const [batch, xHeight, xWidth, depth] = x.shape;\n const [, yHeight, yWidth] = dy.shape;\n\n const output = new Float32Array(batch * xHeight * xWidth * depth);\n const dyValues = dy.dataSync();\n\n // In the backwards pass, we want to find the pixels that were generated\n // for each pixel in the input image the forward pass\n\n const effectiveXSize: [number, number] = [\n (alignCorners && yHeight > 1) ? xHeight - 1 : xHeight,\n (alignCorners && yWidth > 1) ? xWidth - 1 : xWidth\n ];\n\n const effectiveYSize: [number, number] = [\n (alignCorners && yHeight > 1) ? yHeight - 1 : yHeight,\n (alignCorners && yWidth > 1) ? yWidth - 1 : yWidth\n ];\n\n const heightScale = effectiveXSize[0] / effectiveYSize[0];\n const widthScale = effectiveXSize[1] / effectiveYSize[1];\n\n const invHeightScale = 1 / heightScale;\n const invWidthScale = 1 / widthScale;\n\n // This defines the size of the window of values around a particular\n // index in dy that we want to search for contributions to dx.\n const winHeight = (Math.ceil(invHeightScale) * 2) + 2;\n const winWidth = (Math.ceil(invWidthScale) * 2) + 2;\n\n // Loop over the output space.\n for (let b = 0; b < batch; b++) {\n const batchOffset = b * x.strides[0];\n for (let r = 0; r < xHeight; r++) {\n const rowOffset = batchOffset + r * x.strides[1];\n\n // Compute bounds for where in dy we will look\n const startRLerp = Math.floor(r * invHeightScale);\n const startDyR = Math.floor(startRLerp - (winHeight / 2));\n for (let c = 0; c < xWidth; c++) {\n const colOffset = rowOffset + c * x.strides[2];\n\n // Compute bounds for where in dy we will look\n const startCLerp = Math.floor(c * invWidthScale);\n const startDyC = Math.floor(startCLerp - (winWidth / 2));\n\n for (let d = 0; d < depth; d++) {\n let accum = 0;\n // loop over dy\n\n for (let dyRIndex = 0; dyRIndex < winHeight; dyRIndex++) {\n const dyR = dyRIndex + startDyR;\n // Guard against the window exceeding the bounds of dy\n if (dyR < 0 || dyR >= yHeight) {\n continue;\n }\n\n const dyROffset = batchOffset + dyR * dy.strides[1];\n const sourceFracRow = dyR * heightScale;\n const sourceNearestRow = Math.min(\n xHeight - 1,\n alignCorners ? Math.round(sourceFracRow) :\n Math.floor(sourceFracRow));\n if (r !== sourceNearestRow) {\n continue;\n }\n for (let dyCIndex = 0; dyCIndex < winWidth; dyCIndex++) {\n const dyC = dyCIndex + startDyC;\n // Guard against the window exceeding the bounds of dy\n if (dyC < 0 || dyC >= yWidth) {\n continue;\n }\n\n const dyCOffset = dyROffset + dyC * dy.strides[2];\n const sourceFracCol = dyC * widthScale;\n const sourceNearestCol = Math.min(\n xWidth - 1,\n alignCorners ? Math.round(sourceFracCol) :\n Math.floor(sourceFracCol));\n\n if (c === sourceNearestCol) {\n accum += dyValues[dyCOffset + d];\n }\n }\n }\n output[colOffset + d] = accum;\n }\n }\n }\n }\n return ops.tensor4d(output, x.shape, x.dtype);\n }\n\n batchNormalization(\n x: Tensor4D, mean: Tensor4D|Tensor1D, variance: Tensor4D|Tensor1D,\n varianceEpsilon: number, scale?: Tensor4D|Tensor1D,\n offset?: Tensor4D|Tensor1D): Tensor4D {\n this.assertNotComplex(\n [x, mean, variance, scale, offset], 'batchNormalization');\n\n const xVals = x.dataSync();\n const mVals = mean.dataSync();\n const varVals = variance.dataSync();\n const sVals = scale ? scale.dataSync() : new Float32Array([1]);\n const offVals = offset ? offset.dataSync() : new Float32Array([0]);\n const outVals = new Float32Array(xVals.length);\n\n const offValsLength = offVals.length;\n const sValsLength = sVals.length;\n const varValsLength = varVals.length;\n const mValsLength = mVals.length;\n\n let offi = 0;\n let mi = 0;\n let si = 0;\n let vi = 0;\n for (let i = 0; i < xVals.length; ++i) {\n outVals[i] = offVals[offi++] +\n (xVals[i] - mVals[mi++]) * sVals[si++] /\n Math.sqrt(varVals[vi++] + varianceEpsilon);\n if (offi >= offValsLength) {\n offi = 0;\n }\n if (mi >= mValsLength) {\n mi = 0;\n }\n if (si >= sValsLength) {\n si = 0;\n }\n if (vi >= varValsLength) {\n vi = 0;\n }\n }\n return tensor4d(outVals, x.shape);\n }\n\n localResponseNormalization4D(\n x: Tensor4D, depthRadius: number, bias: number, alpha: number,\n beta: number): Tensor4D {\n this.assertNotComplex(x, 'localResponseNormalization4D');\n\n const channels = x.shape[3];\n const maxD = channels - 1;\n const xValues = x.dataSync();\n const size = util.sizeFromShape(x.shape);\n const result = new Float32Array(size);\n\n function sumAcrossChannels(offset: number) {\n const currentChannel = offset % channels;\n let beginSumOffset =\n offset - currentChannel + Math.max(0, currentChannel - depthRadius);\n const endSumOffset = offset - currentChannel +\n Math.min(currentChannel + depthRadius, maxD);\n\n let sum = 0.0;\n for (; beginSumOffset <= endSumOffset; beginSumOffset++) {\n const z = xValues[beginSumOffset];\n sum += z * z;\n }\n return sum;\n }\n\n for (let offset = 0; offset < size; offset++) {\n const sum = sumAcrossChannels(offset);\n const val = xValues[offset] * Math.pow(bias + alpha * sum, -beta);\n result[offset] = val;\n }\n\n return ops.tensor4d(result, x.shape);\n }\n\n LRNGrad(\n dy: Tensor4D, inputImage: Tensor4D, outputImage: Tensor4D,\n depthRadius: number, bias: number, alpha: number,\n beta: number): Tensor4D {\n this.assertNotComplex(dy, 'LRNGrad');\n const channels = dy.shape[3];\n const dyValues = dy.dataSync();\n const inputImageValues = inputImage.dataSync();\n const outputImageValues = outputImage.dataSync();\n const result = new Float32Array(util.sizeFromShape(dy.shape));\n const size = util.sizeFromShape(dy.shape);\n\n for (let offset = 0; offset < size; offset++) {\n const currentChannel = offset % channels;\n const depthBegin =\n (offset - currentChannel) + Math.max(0, currentChannel - depthRadius);\n const depthEnd = (offset - currentChannel) +\n Math.min(channels, currentChannel + depthRadius + 1);\n\n let norm = 0;\n for (let k = depthBegin; k < depthEnd; k++) {\n norm += Math.pow(inputImageValues[k], 2);\n }\n norm = alpha * norm + bias;\n\n for (let k = depthBegin; k < depthEnd; k++) {\n let dyi = -2 * alpha * beta * inputImageValues[k] *\n outputImageValues[offset] / norm;\n if (offset === k) {\n dyi += Math.pow(norm, -beta);\n }\n dyi *= dyValues[offset];\n result[k] += dyi;\n }\n }\n return ops.tensor4d(result, dy.shape);\n }\n\n multinomial(\n logits: Tensor2D, normalized: boolean, numSamples: number,\n seed: number): Tensor2D {\n this.assertNotComplex(logits, 'multinomial');\n\n const probabilities = normalized ? logits : ops.softmax(logits);\n const batchSize = probabilities.shape[0];\n const numEvents = probabilities.shape[1];\n const res = ops.zeros<Rank.R2>([batchSize, numSamples], 'int32');\n const resVals = res.dataSync();\n const probVals = probabilities.dataSync();\n\n for (let b = 0; b < batchSize; ++b) {\n const offset = b * numEvents;\n // The cdf won't include the last event. It will be implicit if no other\n // event happened.\n const cdf = new Float32Array(numEvents - 1);\n cdf[0] = probVals[offset];\n for (let event = 1; event < cdf.length; ++event) {\n cdf[event] = cdf[event - 1] + probVals[offset + event];\n }\n\n const random = seedrandom.alea(seed.toString());\n const outOffset = b * numSamples;\n for (let sampleId = 0; sampleId < numSamples; ++sampleId) {\n const r = random();\n\n // Assume last event happened by default.\n resVals[outOffset + sampleId] = cdf.length;\n\n for (let event = 0; event < cdf.length; event++) {\n if (r < cdf[event]) {\n resVals[outOffset + sampleId] = event;\n break;\n }\n }\n }\n }\n return res;\n }\n\n oneHot(indices: Tensor1D, depth: number, onValue: number, offValue: number):\n Tensor2D {\n this.assertNotComplex(indices, 'oneHot');\n\n const res = new Float32Array(indices.size * depth);\n res.fill(offValue);\n\n for (let event = 0; event < indices.size; ++event) {\n if (indices.get(event) >= 0 && indices.get(event) < depth) {\n res[event * depth + indices.get(event)] = onValue;\n }\n }\n return ops.tensor2d(res, [indices.size, depth], 'int32');\n }\n\n nonMaxSuppression(\n boxes: Tensor2D, scores: Tensor1D, maxOutputSize: number,\n iouThreshold: number, scoreThreshold: number): Tensor1D {\n this.assertNotComplex(boxes, 'nonMaxSuppression');\n\n const boxesVals = boxes.dataSync();\n const scoresVals = scores.dataSync();\n return nonMaxSuppressionImpl(\n boxesVals, scoresVals, maxOutputSize, iouThreshold, scoreThreshold);\n }\n\n fft(x: Tensor2D): Tensor2D {\n if (x.shape[0] !== 1) {\n throw new Error(`tf.fft() on CPU only supports vectors.`);\n }\n const inverse = false;\n return this.fftImpl(x, inverse);\n }\n\n ifft(x: Tensor2D): Tensor2D {\n if (x.shape[0] !== 1) {\n throw new Error(`tf.ifft() on CPU only supports vectors.`);\n }\n const inverse = true;\n return this.fftImpl(x, inverse);\n }\n\n private fftImpl(x: Tensor2D, inverse: boolean): Tensor2D {\n const x1D = x.as1D();\n\n const n = x1D.size;\n\n if (this.isExponentOf2(n)) {\n let result = this.fftRadix2(x1D, n, inverse).as2D(x.shape[0], x.shape[1]);\n if (inverse) {\n result = ops.complex(\n ops.real(result).div(scalar(n)),\n ops.imag(result).div(scalar(n))) as Tensor2D;\n }\n return result;\n } else {\n const data = x.dataSync();\n const rawOutput =\n this.fourierTransformByMatmul(data, n, inverse) as Float32Array;\n const output = complex_util.splitRealAndImagArrays(rawOutput);\n return ops.complex(output.real, output.imag).as2D(x.shape[0], x.shape[1]);\n }\n }\n\n private isExponentOf2(size: number): boolean {\n return (size & size - 1) === 0;\n }\n\n // FFT using Cooley-Tukey algorithm on radix 2 dimensional input.\n private fftRadix2(input: Tensor1D, size: number, inverse: boolean): Tensor1D {\n if (size === 1) {\n return input;\n }\n const data = input.dataSync() as Float32Array;\n const half = size / 2;\n const evenComplex = complex_util.complexWithEvenIndex(data);\n let evenTensor = ops.complex(evenComplex.real, evenComplex.imag).as1D();\n const oddComplex = complex_util.complexWithOddIndex(data);\n let oddTensor = ops.complex(oddComplex.real, oddComplex.imag).as1D();\n\n // Recursive call for half part of original input.\n evenTensor = this.fftRadix2(evenTensor, half, inverse);\n oddTensor = this.fftRadix2(oddTensor, half, inverse);\n\n const e = complex_util.exponents(size, inverse);\n const exponent = ops.complex(e.real, e.imag).mul(oddTensor);\n\n const addPart = evenTensor.add(exponent);\n const subPart = evenTensor.sub(exponent);\n\n const realTensor = ops.real(addPart).concat(ops.real(subPart));\n const imagTensor = ops.imag(addPart).concat(ops.imag(subPart));\n\n return ops.complex(realTensor, imagTensor).as1D();\n }\n\n // Calculate fourier transform by multplying sinusoid matrix.\n private fourierTransformByMatmul(\n data: TypedArray, size: number, inverse: boolean): TypedArray {\n const ret = new Float32Array(size * 2);\n // TODO: Use matmul instead once it supports complex64 type.\n for (let r = 0; r < size; r++) {\n let real = 0.0;\n let imag = 0.0;\n for (let c = 0; c < size; c++) {\n const e = complex_util.exponent(r * c, size, inverse);\n const term = complex_util.getComplexWithIndex(data as Float32Array, c);\n real += term.real * e.real - term.imag * e.imag;\n imag += term.real * e.imag + term.imag * e.real;\n }\n if (inverse) {\n real /= size;\n imag /= size;\n }\n complex_util.assignToTypedArray(ret, real, imag, r);\n }\n return ret;\n }\n\n depthToSpace(x: Tensor4D, blockSize: number, dataFormat: 'NHWC'|'NCHW'):\n Tensor4D {\n util.assert(\n dataFormat === 'NHWC',\n `Only NHWC dataFormat supported on CPU for depthToSpace. Got ${\n dataFormat}`);\n util.assert(\n blockSize > 1,\n `blockSize should be > 1 for depthToSpace, but was: ${blockSize}`);\n\n const batchSize = x.shape[0];\n const inputHeight = x.shape[1];\n const inputWidth = x.shape[2];\n const inputDepth = x.shape[3];\n\n const outputHeight = inputHeight * blockSize;\n const outputWidth = inputWidth * blockSize;\n const outputDepth = inputDepth / (blockSize * blockSize);\n\n const xValues = x.dataSync();\n const result =\n new Float32Array(batchSize * outputHeight * outputWidth * outputDepth);\n\n let outputIdx = 0;\n for (let b = 0; b < batchSize; ++b) {\n for (let h = 0; h < outputHeight; ++h) {\n const inH = Math.floor(h / blockSize);\n const offsetH = (h % blockSize);\n for (let w = 0; w < outputWidth; ++w) {\n const inW = Math.floor(w / blockSize);\n const offsetW = (w % blockSize);\n const offsetD = (offsetH * blockSize + offsetW) * outputDepth;\n for (let d = 0; d < outputDepth; ++d) {\n const inD = d + offsetD;\n const inputIdx =\n inD + inputDepth * (inW + inputWidth * (inH + inputHeight * b));\n result[outputIdx++] = xValues[inputIdx];\n }\n }\n }\n }\n return ops.tensor4d(\n result, [batchSize, outputHeight, outputWidth, outputDepth]);\n }\n\n private broadcastedBinaryOp(\n a: Tensor, b: Tensor, dtype: DataType,\n op: (a: number, b: number) => number): Tensor {\n const newShape =\n broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const result = ops.buffer(newShape, dtype);\n const aVals = a.dataSync();\n const bVals = b.dataSync();\n const aBroadcastDims = broadcast_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = broadcast_util.getBroadcastDims(b.shape, newShape);\n\n const resVals = result.values;\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < resVals.length; ++i) {\n resVals[i] = op(aVals[i % aVals.length], bVals[i % bVals.length]);\n }\n } else {\n const aBuf = a.buffer();\n const bBuf = b.buffer();\n for (let i = 0; i < resVals.length; ++i) {\n const loc = result.indexToLoc(i);\n\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aBuf.locToIndex(aLoc);\n\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bBuf.locToIndex(bLoc);\n\n resVals[i] = op(aVals[aIndex], bVals[bIndex]);\n }\n }\n return result.toTensor();\n }\n\n private broadcastedBinaryComplexOp(\n a: Tensor, b: Tensor,\n op:\n (aReal: number, aImag: number, bReal: number,\n bImag: number) => {real: number, imag: number}): Tensor {\n const newShape =\n broadcast_util.assertAndGetBroadcastShape(a.shape, b.shape);\n const realResult = ops.buffer(newShape, 'float32');\n const imagResult = ops.buffer(newShape, 'float32');\n\n const aVals = a.dataSync();\n const bVals = b.dataSync();\n const aBroadcastDims = broadcast_util.getBroadcastDims(a.shape, newShape);\n const bBroadcastDims = broadcast_util.getBroadcastDims(b.shape, newShape);\n\n const realVals = realResult.values;\n const imagVals = imagResult.values;\n\n if (aBroadcastDims.length + bBroadcastDims.length === 0) {\n for (let i = 0; i < realVals.length; i++) {\n const aIdx = i % aVals.length;\n const bIdx = i % bVals.length;\n\n const result =\n op(aVals[aIdx * 2], aVals[aIdx * 2 + 1], bVals[bIdx * 2],\n bVals[bIdx * 2 + 1]);\n\n realVals[i] = result.real;\n imagVals[i] = result.imag;\n }\n } else {\n const aRealBuf = this.data.get(a.dataId).complexTensors.real.buffer();\n const bRealBuf = this.data.get(b.dataId).complexTensors.real.buffer();\n for (let i = 0; i < realVals.length; i++) {\n const loc = realResult.indexToLoc(i);\n\n const aLoc = loc.slice(-a.rank);\n aBroadcastDims.forEach(d => aLoc[d] = 0);\n const aIndex = aRealBuf.locToIndex(aLoc);\n\n const bLoc = loc.slice(-b.rank);\n bBroadcastDims.forEach(d => bLoc[d] = 0);\n const bIndex = bRealBuf.locToIndex(bLoc);\n\n const opResult =\n op(aVals[aIndex * 2], aVals[aIndex * 2 + 1], bVals[bIndex * 2],\n bVals[bIndex * 2 + 1]);\n\n realVals[i] = opResult.real;\n imagVals[i] = opResult.imag;\n }\n }\n return this.complex(realResult.toTensor(), imagResult.toTensor());\n }\n\n split<T extends Tensor>(x: T, sizeSplits: number[], axis: number): T[] {\n return split(x, sizeSplits, axis);\n }\n\n dispose() {}\n\n floatPrecision() {\n return 32;\n }\n\n cropAndResize(\n images: Tensor4D,\n boxes: Tensor2D,\n boxIndex: Tensor1D,\n cropSize: [number, number],\n method: string,\n extrapolationValue: number,\n ) {\n const [batch, imageHeight, imageWidth, numChannels] = images.shape;\n const numBoxes = boxes.shape[0];\n\n const [cropHeight, cropWidth] = cropSize;\n const output =\n ops.buffer<Rank.R4>([numBoxes, cropHeight, cropWidth, numChannels]);\n\n const boxVals = boxes.dataSync();\n const boxIndVals = boxIndex.dataSync();\n const imageVals = images.dataSync();\n\n const inStride = images.strides; // to calculate flat indexes into image\n const outStride = output.strides; // to calculate flat indexes into output\n\n // Reference implementation\n // tslint:disable-next-line:max-line-length\n // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/kernels/crop_and_resize_op.cc\n for (let b = 0; b < numBoxes; b++) {\n const startInd = b * 4;\n const y1 = boxVals[startInd];\n const x1 = boxVals[startInd + 1];\n const y2 = boxVals[startInd + 2];\n const x2 = boxVals[startInd + 3];\n\n const bInd: number = boxIndVals[b];\n if (bInd >= batch) {\n continue;\n }\n\n const heightScale = (cropHeight > 1) ?\n (y2 - y1) * (imageHeight - 1) / (cropHeight - 1) :\n 0;\n const widthScale =\n (cropWidth > 1) ? (x2 - x1) * (imageWidth - 1) / (cropWidth - 1) : 0;\n\n for (let y = 0; y < cropHeight; y++) {\n const yInd: number = (cropHeight > 1) ?\n y1 * (imageHeight - 1) + y * (heightScale) :\n 0.5 * (y1 + y2) * (imageHeight - 1);\n\n if (yInd < 0 || yInd > imageHeight - 1) {\n for (let x = 0; x < cropWidth; x++) {\n for (let c = 0; c < numChannels; c++) {\n const ind =\n c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n }\n continue;\n }\n\n if (method === 'bilinear') {\n const topInd = Math.floor(yInd);\n const bottomInd = Math.ceil(yInd);\n const yLerp = yInd - topInd;\n\n for (let x = 0; x < cropWidth; x++) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind =\n c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n\n const leftInd = Math.floor(xInd);\n const rightInd = Math.ceil(xInd);\n const xLerp = xInd - leftInd;\n\n for (let c = 0; c < numChannels; c++) {\n let ind = c + leftInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topLeft = imageVals[ind];\n\n ind = c + rightInd * inStride[2] + topInd * inStride[1] +\n bInd * inStride[0];\n const topRight = imageVals[ind];\n\n ind = c + leftInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomLeft = imageVals[ind];\n\n ind = c + rightInd * inStride[2] + bottomInd * inStride[1] +\n bInd * inStride[0];\n const bottomRight = imageVals[ind];\n\n const top = topLeft + (topRight - topLeft) * xLerp;\n const bottom = bottomLeft + (bottomRight - bottomLeft) * xLerp;\n\n ind = c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = top + ((bottom - top) * yLerp);\n }\n }\n } else { // method == \"nearest\"\n for (let x = 0; x < cropWidth; ++x) {\n const xInd = (cropWidth > 1) ?\n x1 * (imageWidth - 1) + x * widthScale :\n 0.5 * (x1 + x2) * (imageWidth - 1);\n\n if (xInd < 0 || xInd > imageWidth - 1) {\n for (let c = 0; c < numChannels; c++) {\n const ind =\n c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[ind] = extrapolationValue;\n }\n continue;\n }\n\n const closestX = Math.round(xInd);\n const closestY = Math.round(yInd);\n for (let c = 0; c < numChannels; c++) {\n const inInd = c + closestX * inStride[2] +\n closestY * inStride[1] + bInd * inStride[0];\n const outInd =\n c + x * outStride[2] + y * outStride[1] + b * outStride[0];\n output.values[outInd] = imageVals[inInd];\n }\n }\n }\n }\n }\n return output.toTensor();\n }\n\n sparseToDense<R extends Rank>(\n sparseIndices: Tensor, sparseValues: Tensor, outputShape: ShapeMap[R],\n defaultValue: Scalar): Tensor<R> {\n const {sliceRank, numUpdates, sliceSize, strides, outputSize} =\n scatter_nd_util.calculateShapes(\n sparseValues, sparseIndices, outputShape);\n const sumDupeIndices = false;\n return this.scatter(\n sparseIndices, sparseValues, outputShape, outputSize, sliceSize,\n numUpdates, sliceRank, strides, defaultValue, sumDupeIndices);\n }\n\n gatherND(x: Tensor, indices: Tensor): Tensor<Rank> {\n const indicesShape = indices.shape;\n const sliceRank = indicesShape[indicesShape.length - 1];\n\n const [resultShape, numSlices, sliceSize, strides] =\n gather_nd_util.prepareAndValidate(x, indices);\n if (numSlices === 0) {\n return tensor([], resultShape, x.dtype);\n }\n\n const buffer = new TensorBuffer([numSlices, sliceSize], x.dtype);\n const indicesData = indices.dataSync();\n const xData = x.dataSync();\n\n for (let i = 0; i < numSlices; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n flattenIndex += dim * strides[j];\n index.push(dim);\n }\n if (flattenIndex < 0 || flattenIndex >= x.size / sliceSize) {\n throw new Error(\n `Invalid indices: ${index} does not index into ${x.shape}`);\n }\n\n for (let k = 0; k < sliceSize; k++) {\n buffer.values[i * sliceSize + k] = xData[flattenIndex * sliceSize + k];\n }\n }\n return buffer.toTensor().reshape(resultShape);\n }\n\n scatterND<R extends Rank>(\n indices: Tensor, updates: Tensor, shape: ShapeMap[R]): Tensor<R> {\n const {sliceRank, numUpdates, sliceSize, strides, outputSize} =\n scatter_nd_util.calculateShapes(updates, indices, shape);\n const defaultValue = scalar(0);\n const sumDupeIndices = true;\n return this.scatter(\n indices, updates, shape, outputSize, sliceSize, numUpdates, sliceRank,\n strides, defaultValue, sumDupeIndices);\n }\n\n private scatter<R extends Rank>(\n indices: Tensor, updates: Tensor, shape: ShapeMap[R], outputSize: number,\n sliceSize: number, numUpdates: number, sliceRank: number,\n strides: number[], defaultValue: Scalar,\n sumDupeIndices: boolean): Tensor<R> {\n const flattenShape = [outputSize / sliceSize, sliceSize];\n const indicesData = indices.dataSync();\n const updatesData = updates.dataSync();\n\n if (outputSize === 0) {\n return tensor([], shape, updates.dtype);\n }\n\n const buffer = new TensorBuffer(flattenShape, updates.dtype);\n buffer.values.fill(defaultValue.dataSync()[0]);\n\n for (let i = 0; i < numUpdates; i++) {\n const index = [];\n let flattenIndex = 0;\n for (let j = 0; j < sliceRank; j++) {\n const dim = indicesData[i * sliceRank + j];\n index.push(dim);\n flattenIndex += dim * strides[j];\n }\n\n if (flattenIndex < 0 || flattenIndex >= outputSize / sliceSize) {\n throw new Error(\n `Invalid indices: ${index} does not index into ${shape}`);\n }\n\n for (let k = 0; k < sliceSize; k++) {\n if (sumDupeIndices) {\n buffer.values[flattenIndex * sliceSize + k] +=\n updatesData[i * sliceSize + k];\n } else {\n buffer.values[flattenIndex * sliceSize + k] = updates.rank === 0 ?\n updatesData[0] :\n updatesData[i * sliceSize + k];\n }\n }\n }\n return buffer.toTensor().reshape(shape);\n }\n}\n\nENV.registerBackend(\n 'cpu', () => new MathBackendCPU(), 1 /* priority */, setTensorTracker);\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nconst delayCallback = typeof requestAnimationFrame !== 'undefined' ?\n requestAnimationFrame : // Browsers\n setImmediate; // Node.js\n\n/**\n * Returns a promise that resolve when a requestAnimationFrame has completed.\n *\n * On Node.js this uses setImmediate instead of requestAnimationFrame.\n *\n * This is simply a sugar method so that users can do the following:\n * `await tf.nextFrame();`\n */\n/** @doc {heading: 'Performance', subheading: 'Timing'} */\nfunction nextFrame(): Promise<void> {\n return new Promise<void>(resolve => delayCallback(() => resolve()));\n}\n\nexport {nextFrame};\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/* Type definitions for exporting and importing of models. */\n\n/**\n * A map from Tensor dtype to number of bytes per element of the Tensor.\n */\nexport const DTYPE_VALUE_SIZE_MAP: {[dtype: string]: number} = {\n 'float32': 4,\n 'int32': 4,\n 'uint16': 2,\n 'uint8': 1,\n 'bool': 1,\n};\n\n/**\n * A weight manifest.\n *\n * The weight manifest consists of an ordered list of weight-manifest groups.\n * Each weight-manifest group (\"group\" for short hereafter) consists of a\n * number of weight values stored in a number of paths.\n * See the documentation of `WeightManifestGroupConfig` below for more details.\n */\nexport declare type WeightsManifestConfig = WeightsManifestGroupConfig[];\n\n/**\n * A weight-manifest group.\n *\n * Consists of an ordered list of weight values encoded in binary format,\n * stored in an ordered list of paths.\n */\nexport declare interface WeightsManifestGroupConfig {\n /**\n * An ordered list of paths.\n *\n * Paths are intentionally abstract in order to be general. For example, they\n * can be relative URL paths or relative paths on the file system.\n */\n paths: string[];\n\n /**\n * Specifications of the weights stored in the paths.\n */\n weights: WeightsManifestEntry[];\n}\n\n/**\n * An entry in the weight manifest.\n *\n * The entry contains specification of a weight.\n */\nexport declare interface WeightsManifestEntry {\n /**\n * Name of the weight, e.g., 'Dense_1/bias'\n */\n name: string;\n\n /**\n * Shape of the weight.\n */\n shape: number[];\n\n /**\n * Data type of the weight.\n */\n dtype: 'float32'|'int32'|'bool';\n\n /**\n * Information for dequantization of the weight.\n */\n quantization?: {\n scale: number, // The scaling constant to multiply by.\n min: number, // The (possibly nudged) minimum weight to add.\n dtype: 'uint16'|'uint8' // The dtype of the quantized weights.\n };\n}\n\n/**\n * Options for saving a model.\n */\nexport interface SaveConfig {\n /**\n * Whether to save only the trainable weights of the model, ignoring the\n * untrainable ones.\n */\n trainableOnly?: boolean;\n}\n\n/**\n * Result of a saving operation.\n */\nexport interface SaveResult {\n /**\n * Information about the model artifacts saved.\n */\n modelArtifactsInfo: ModelArtifactsInfo;\n\n /**\n * HTTP responses from the server that handled the model-saving request (if\n * any). This is applicable only to server-based saving routes.\n */\n responses?: Response[];\n\n /**\n * Error messages and related data (if any).\n */\n errors?: Array<{}|string>;\n}\n\nexport declare interface ModelArtifactsInfo {\n /**\n * Timestamp for when the model is saved.\n */\n dateSaved: Date;\n\n /**\n * Type of the model topology\n *\n * Possible values:\n * - JSON: JSON config (human-readable, e.g., Keras JSON).\n * - GraphDef: TensorFlow\n * [GraphDef](https://www.tensorflow.org/extend/tool_developers/#graphdef)\n * protocol buffer (binary).\n */\n modelTopologyType: 'JSON'|'GraphDef';\n\n /**\n * Size of model topology (Keras JSON or GraphDef), in bytes.\n */\n modelTopologyBytes?: number;\n\n /**\n * Size of weight specification or manifest, in bytes.\n */\n weightSpecsBytes?: number;\n\n /**\n * Size of weight value data, in bytes.\n */\n weightDataBytes?: number;\n}\n\n/**\n * The serialized artifacts of a model, including topology and weights.\n *\n * The `modelTopology`, `weightSpecs` and `weightData` fields of this interface\n * are optional, in order to support topology- or weights-only saving and\n * loading.\n */\nexport declare interface ModelArtifacts {\n /**\n * Model topology.\n *\n * For Keras-style `tf.Model`s, this is a JSON object.\n * For TensorFlow-style models (e.g., `FrozenModel`), this is a binary buffer\n * carrying the `GraphDef` protocol buffer.\n */\n modelTopology?: {}|ArrayBuffer;\n\n /**\n * Weight specifications.\n *\n * This corresponds to the weightsData below.\n */\n weightSpecs?: WeightsManifestEntry[];\n\n /**\n * Binary buffer for all weight values concatenated in the order specified\n * by `weightSpecs`.\n */\n weightData?: ArrayBuffer;\n}\n\n/**\n * Type definition for handlers of loading operations.\n */\nexport type LoadHandler = () => Promise<ModelArtifacts>;\n\n/**\n * Type definition for handlers of saving operations.\n */\nexport type SaveHandler = (modelArtifact: ModelArtifacts) =>\n Promise<SaveResult>;\n\n/**\n * Interface for a model import/export handler.\n *\n * The `save` and `load` handlers are both optional, in order to allow handlers\n * that support only saving or loading.\n */\n// tslint:disable-next-line:interface-name\nexport interface IOHandler {\n save?: SaveHandler;\n load?: LoadHandler;\n}\n\n/**\n * An interface for the manager of a model store.\n *\n * A model store is defined as a storage medium on which multiple models can\n * be stored. Each stored model has a unique `path` as its identifier.\n * A `ModelStoreManager` for the store allows actions including\n *\n * - Listing the models stored in the store.\n * - Deleting a model from the store.\n */\nexport interface ModelStoreManager {\n /**\n * List all models in the model store.\n *\n * @returns A dictionary mapping paths of existing models to their\n * model artifacts info. Model artifacts info include type of the model's\n * topology, byte sizes of the topology, weights, etc.\n */\n listModels(): Promise<{[path: string]: ModelArtifactsInfo}>;\n\n /**\n * Remove a model specified by `path`.\n *\n * @param path\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n */\n removeModel(path: string): Promise<ModelArtifactsInfo>;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {tensor} from '../ops/tensor_ops';\nimport {Tensor} from '../tensor';\nimport {NamedTensorMap} from '../tensor_types';\nimport {TypedArray} from '../types';\nimport {sizeFromShape} from '../util';\nimport {DTYPE_VALUE_SIZE_MAP, ModelArtifacts, ModelArtifactsInfo, WeightsManifestEntry} from './types';\n\n/**\n * Encode a map from names to weight values as an ArrayBuffer, along with an\n * `Array` of `WeightsManifestEntry` as specification of the encoded weights.\n *\n * This function does not perform sharding.\n *\n * This function is the reverse of `decodeWeights`.\n *\n * @param tensors A map (\"dict\") from names to tensors.\n * @returns A `Promise` of\n * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s\n * concatenated.\n * - An `Array` of `WeightManifestEntry`s, carrying information including\n * tensor names, `dtype`s and shapes.\n * @throws Error: on unsupported tensor `dtype`.\n */\nexport async function encodeWeights(tensors: NamedTensorMap):\n Promise<{data: ArrayBuffer, specs: WeightsManifestEntry[]}> {\n // TODO(adarob, cais): Support quantization.\n const specs: WeightsManifestEntry[] = [];\n const dataPromises: Array<Promise<TypedArray>> = [];\n for (const name in tensors) {\n const t = tensors[name];\n\n if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool') {\n throw new Error(`Unsupported dtype in weight '${name}': ${t.dtype}`);\n }\n specs.push({name, shape: t.shape, dtype: t.dtype});\n dataPromises.push(t.data());\n }\n const tensorValues = await Promise.all(dataPromises);\n return {data: concatenateTypedArrays(tensorValues), specs};\n}\n\n/**\n * Decode flat ArrayBuffer as weights.\n *\n * This function does not handle sharding.\n *\n * This function is the reverse of `encodeWeights`.\n *\n * @param buffer A flat ArrayBuffer carrying the binary values of the tensors\n * concatenated in the order specified in `specs`.\n * @param specs Specifications of the names, dtypes and shapes of the tensors\n * whose value are encoded by `buffer`.\n * @return A map from tensor name to tensor value, with the names corresponding\n * to names in `specs`.\n * @throws Error, if any of the tensors has unsupported dtype.\n */\nexport function decodeWeights(\n buffer: ArrayBuffer, specs: WeightsManifestEntry[]): NamedTensorMap {\n // TODO(adarob, cais): Support quantization.\n const out: NamedTensorMap = {};\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let typedArray: TypedArray;\n\n if ('quantization' in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype !== 'uint8' && quantization.dtype !== 'uint16') {\n throw new Error(\n `Weight ${spec.name} has unknown ` +\n `quantization dtype ${quantization.dtype}. ` +\n `Supported quantization dtypes are: 'uint8' and 'uint16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer =\n buffer.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = (quantization.dtype === 'uint8') ?\n new Uint8Array(byteBuffer) :\n new Uint16Array(byteBuffer);\n if (dtype === 'float32') {\n typedArray = Float32Array.from(\n quantizedArray, v => v * quantization.scale + quantization.min);\n } else if (dtype === 'int32') {\n typedArray = Int32Array.from(\n quantizedArray,\n v => Math.round(v * quantization.scale + quantization.min));\n } else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n } else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer.slice(offset, offset + size * dtypeFactor);\n\n if (dtype === 'float32') {\n typedArray = new Float32Array(byteBuffer);\n } else if (dtype === 'int32') {\n typedArray = new Int32Array(byteBuffer);\n } else if (dtype === 'bool') {\n typedArray = new Uint8Array(byteBuffer);\n } else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n\n let value: Tensor;\n if (dtype === 'float32') {\n value = tensor(typedArray, shape, 'float32');\n } else if (dtype === 'int32') {\n value = tensor(typedArray, shape, 'int32');\n } else if (dtype === 'bool') {\n value = tensor(typedArray, shape, 'bool');\n } else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n out[name] = value;\n }\n return out;\n}\n\n/**\n * Concatenate TypedArrays into an ArrayBuffer.\n */\nexport function concatenateTypedArrays(xs: TypedArray[]): ArrayBuffer {\n // TODO(adarob, cais): Support quantization.\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n\n let totalByteLength = 0;\n\n // `normalizedXs` is here for this reason: a `TypedArray`'s `buffer'\n // can have a different byte length from that of the `TypedArray` itself,\n // for example, when the `TypedArray` is created from an offset in an\n // `ArrayBuffer`. `normliazedXs` holds `TypedArray`s whose `buffer`s match\n // the `TypedArray` in byte length. If an element of `xs` does not show\n // this property, a new `TypedArray` that satisfy this property will be\n // constructed and pushed into `normalizedXs`.\n const normalizedXs: TypedArray[] = [];\n xs.forEach((x: TypedArray) => {\n totalByteLength += x.byteLength;\n // tslint:disable:no-any\n normalizedXs.push(\n x.byteLength === x.buffer.byteLength ? x :\n new (x.constructor as any)(x));\n if (!(x as any instanceof Float32Array || x as any instanceof Int32Array ||\n x as any instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n // tslint:enable:no-any\n });\n\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x: TypedArray) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n\n return y.buffer;\n}\n\n// Use Buffer on Node.js instead of Blob/atob/btoa\nconst useNodeBuffer = typeof Buffer !== 'undefined' &&\n (typeof Blob === 'undefined' || typeof atob === 'undefined' ||\n typeof btoa === 'undefined');\n\n/**\n * Calculate the byte length of a JavaScript string.\n *\n * Note that a JavaScript string can contain wide characters, therefore the\n * length of the string is not necessarily equal to the byte length.\n *\n * @param str Input string.\n * @returns Byte length.\n */\nexport function stringByteLength(str: string): number {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\n\n/**\n * Encode an ArrayBuffer as a base64 encoded string.\n *\n * @param buffer `ArrayBuffer` to be converted.\n * @returns A string that base64-encodes `buffer`.\n */\nexport function arrayBufferToBase64String(buffer: ArrayBuffer): string {\n if (useNodeBuffer) {\n return Buffer.from(buffer).toString('base64');\n }\n return btoa(String.fromCharCode.apply(null, new Uint8Array(buffer)));\n}\n\n/**\n * Decode a base64 string as an ArrayBuffer.\n *\n * @param str Base64 string.\n * @returns Decoded `ArrayBuffer`.\n */\nexport function base64StringToArrayBuffer(str: string): ArrayBuffer {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, 'base64');\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s = atob(str);\n const buffer = new Uint8Array(s.length);\n for (let i = 0; i < s.length; ++i) {\n buffer.set([s.charCodeAt(i)], i);\n }\n return buffer.buffer;\n}\n\n/**\n * Concatenate a number of ArrayBuffers into one.\n *\n * @param buffers A number of array buffers to concatenate.\n * @returns Result of concatenating `buffers` in order.\n */\nexport function concatenateArrayBuffers(buffers: ArrayBuffer[]): ArrayBuffer {\n let totalByteLength = 0;\n buffers.forEach((buffer: ArrayBuffer) => {\n totalByteLength += buffer.byteLength;\n });\n\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer: ArrayBuffer) => {\n temp.set(new Uint8Array(buffer), offset);\n offset += buffer.byteLength;\n });\n return temp.buffer;\n}\n\n/**\n * Get the basename of a path.\n *\n * Behaves in a way analogous to Linux's basename command.\n *\n * @param path\n */\nexport function basename(path: string): string {\n const SEPARATOR = '/';\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\n\n/**\n * Populate ModelArtifactsInfo fields for a model with JSON topology.\n * @param modelArtifacts\n * @returns A ModelArtifactsInfo object.\n */\nexport function getModelArtifactsInfoForJSON(modelArtifacts: ModelArtifacts):\n ModelArtifactsInfo {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error('Expected JSON model topology, received ArrayBuffer.');\n }\n\n return {\n dateSaved: new Date(),\n modelTopologyType: 'JSON',\n modelTopologyBytes: modelArtifacts.modelTopology == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ?\n 0 :\n stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ?\n 0 :\n modelArtifacts.weightData.byteLength,\n };\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {IOHandler} from './types';\n\nexport type IORouter = (url: string|string[]) => IOHandler;\n\nexport class IORouterRegistry {\n // Singleton instance.\n private static instance: IORouterRegistry;\n\n private saveRouters: IORouter[];\n private loadRouters: IORouter[];\n\n private constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n\n private static getInstance(): IORouterRegistry {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerSaveRouter(saveRouter: IORouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n\n /**\n * Register a load-handler router.\n *\n * @param loadRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `load` method defined or `null`.\n */\n static registerLoadRouter(loadRouter: IORouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n\n /**\n * Look up IOHandler for saving, given a URL-like string.\n *\n * @param url\n * @returns If only one match is found, an instance of IOHandler with the\n * `save` method defined. If no match is found, `null`.\n * @throws Error, if more than one match is found.\n */\n static getSaveHandlers(url: string|string[]): IOHandler[] {\n return IORouterRegistry.getHandlers(url, 'save');\n }\n\n /**\n * Look up IOHandler for loading, given a URL-like string.\n *\n * @param url\n * @returns All valid handlers for `url`, given the currently registered\n * handler routers.\n */\n static getLoadHandlers(url: string|string[]): IOHandler[] {\n return IORouterRegistry.getHandlers(url, 'load');\n }\n\n private static getHandlers(url: string|string[], handlerType: 'save'|'load'):\n IOHandler[] {\n const validHandlers: IOHandler[] = [];\n const routers = handlerType === 'load' ? this.getInstance().loadRouters :\n this.getInstance().saveRouters;\n routers.forEach(router => {\n const handler = router(url);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * Classes and functions for model management across multiple storage mediums.\n *\n * Supported client actions:\n * - Listing models on all registered storage mediums.\n * - Remove model by URL from any registered storage mediums, by using URL\n * string.\n * - Moving or copying model from one path to another in the same medium or from\n * one medium to another, by using URL strings.\n */\n\nimport {assert} from '../util';\n\nimport {IORouterRegistry} from './router_registry';\nimport {ModelArtifactsInfo, ModelStoreManager} from './types';\n\nconst URL_SCHEME_SUFFIX = '://';\n\nexport class ModelStoreManagerRegistry {\n // Singleton instance.\n private static instance: ModelStoreManagerRegistry;\n\n private managers: {[scheme: string]: ModelStoreManager};\n\n private constructor() {\n this.managers = {};\n }\n\n private static getInstance(): ModelStoreManagerRegistry {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n\n /**\n * Register a save-handler router.\n *\n * @param saveRouter A function that maps a URL-like string onto an instance\n * of `IOHandler` with the `save` method defined or `null`.\n */\n static registerManager(scheme: string, manager: ModelStoreManager) {\n assert(scheme != null, 'scheme must not be undefined or null.');\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, 'scheme must not be an empty string.');\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(\n registry.managers[scheme] == null,\n `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n\n static getManager(scheme: string): ModelStoreManager {\n const manager = this.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n\n static getSchemes(): string[] {\n return Object.keys(this.getInstance().managers);\n }\n}\n\n/**\n * Helper method for parsing a URL string into a scheme and a path.\n *\n * @param url E.g., 'localstorage://my-model'\n * @returns A dictionary with two fields: scheme and path.\n * Scheme: e.g., 'localstorage' in the example above.\n * Path: e.g., 'my-model' in the example above.\n */\nfunction parseURL(url: string): {scheme: string, path: string} {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(\n `The url string provided does not contain a scheme. ` +\n `Supported schemes are: ` +\n `${ModelStoreManagerRegistry.getSchemes().join(',')}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1],\n };\n}\n\nasync function cloneModelInternal(\n sourceURL: string, destURL: string,\n deleteSource = false): Promise<ModelArtifactsInfo> {\n assert(\n sourceURL !== destURL,\n `Old path and new path are the same: '${sourceURL}'`);\n\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(\n loadHandlers.length > 0,\n `Copying failed because no load handler is found for source URL ${\n sourceURL}.`);\n assert(\n loadHandlers.length < 2,\n `Copying failed because more than one (${loadHandlers.length}) ` +\n `load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(\n saveHandlers.length > 0,\n `Copying failed because no save handler is found for destination URL ` +\n `${destURL}.`);\n assert(\n saveHandlers.length < 2,\n `Copying failed because more than one (${loadHandlers.length}) ` +\n `save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n\n const modelArtifacts = await loadHandler.load();\n\n // If moving within the same storage medium, remove the old model as soon as\n // the loading is done. Without doing this, it is possible that the combined\n // size of the two models will cause the cloning to fail.\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n\n const saveResult = await saveHandler.save(modelArtifacts);\n\n // If moving between mediums, the deletion is done after the save succeeds.\n // This guards against the case in which saving to the destination medium\n // fails.\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme)\n .removeModel(sourcePath);\n }\n\n return saveResult.modelArtifactsInfo;\n}\n\n/**\n * List all models stored in registered storage mediums.\n *\n * For a web browser environment, the registered mediums are Local Storage and\n * IndexedDB.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @returns A `Promise` of a dictionary mapping URLs of existing models to\n * their model artifacts info. URLs include medium-specific schemes, e.g.,\n * 'indexeddb://my/model/1'. Model artifacts info include type of the\n * model's topology, byte sizes of the topology, weights, etc.\n */\n/** @doc {heading: 'Models', subheading: 'Management', namespace: 'io'} */\nasync function listModels(): Promise<{[url: string]: ModelArtifactsInfo}> {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out: {[url: string]: ModelArtifactsInfo} = {};\n for (const scheme of schemes) {\n const schemeOut =\n await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\n\n/**\n * Remove a model specified by URL from a reigstered storage medium.\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Delete the model.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n * ```\n *\n * @param url A URL to a stored model, with a scheme prefix, e.g.,\n * 'localstorage://my-model-1', 'indexeddb://my/model/2'.\n * @returns ModelArtifactsInfo of the deleted model (if and only if deletion\n * is successful).\n * @throws Error if deletion fails, e.g., if no model exists at `path`.\n */\n/** @doc {heading: 'Models', subheading: 'Management', namespace: 'io'} */\nasync function removeModel(url: string): Promise<ModelArtifactsInfo> {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return await manager.removeModel(schemeAndPath.path);\n}\n\n/**\n * Copy a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Copying within a storage medium, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Copying between two storage mediums, e.g.,\n * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Copy the model, from Local Storage to IndexedDB.\n * await tf.io.copyModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove both models.\n * await tf.io.removeModel('localstorage://demo/management/model1');\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of copying.\n * @param destURL Destination URL of copying.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n */\n/** @doc {heading: 'Models', subheading: 'Management', namespace: 'io'} */\nasync function copyModel(\n sourceURL: string, destURL: string): Promise<ModelArtifactsInfo> {\n const deleteSource = false;\n return await cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n\n/**\n * Move a model from one URL to another.\n *\n * This function supports:\n *\n * 1. Moving within a storage medium, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')`\n * 2. Moving between two storage mediums, e.g.,\n * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')`\n *\n * ```js\n * // First create and save a model.\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * await model.save('localstorage://demo/management/model1');\n *\n * // Then list existing models.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Move the model, from Local Storage to IndexedDB.\n * await tf.io.moveModel(\n * 'localstorage://demo/management/model1',\n * 'indexeddb://demo/management/model1');\n *\n * // List models again.\n * console.log(JSON.stringify(await tf.io.listModels()));\n *\n * // Remove the moved model.\n * await tf.io.removeModel('indexeddb://demo/management/model1');\n * ```\n *\n * @param sourceURL Source URL of moving.\n * @param destURL Destination URL of moving.\n * @returns ModelArtifactsInfo of the copied model (if and only if copying\n * is successful).\n * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or\n * if `oldPath` and `newPath` are identical.\n */\n/** @doc {heading: 'Models', subheading: 'Management', namespace: 'io'} */\nasync function moveModel(\n sourceURL: string, destURL: string): Promise<ModelArtifactsInfo> {\n const deleteSource = true;\n return await cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n\nexport {moveModel, copyModel, removeModel, listModels};\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {getModelArtifactsInfoForJSON} from './io_utils';\nimport {ModelStoreManagerRegistry} from './model_management';\nimport {IORouter, IORouterRegistry} from './router_registry';\nimport {IOHandler, ModelArtifacts, ModelArtifactsInfo, ModelStoreManager, SaveResult} from './types';\n\nconst DATABASE_NAME = 'tensorflowjs';\nconst DATABASE_VERSION = 1;\n\n// Model data and ModelArtifactsInfo (metadata) are stored in two separate\n// stores for efficient access of the list of stored models and their metadata.\n// 1. The object store for model data: topology, weights and weight manifests.\nconst MODEL_STORE_NAME = 'models_store';\n// 2. The object store for ModelArtifactsInfo, including meta-information such\n// as the type of topology (JSON vs binary), byte size of the topology, byte\n// size of the weights, etc.\nconst INFO_STORE_NAME = 'model_info_store';\n\n/**\n * Delete the entire database for tensorflow.js, including the models store.\n */\nexport async function deleteDatabase(): Promise<void> {\n const idbFactory = getIndexedDBFactory();\n\n return new Promise<void>((resolve, reject) => {\n const deleteRequest = idbFactory.deleteDatabase(DATABASE_NAME);\n deleteRequest.onsuccess = () => resolve();\n deleteRequest.onerror = error => reject(error);\n });\n}\n\nfunction getIndexedDBFactory(): IDBFactory {\n if (!ENV.get('IS_BROWSER')) {\n // TODO(cais): Add more info about what IOHandler subtypes are available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error(\n 'Failed to obtain IndexedDB factory because the current environment' +\n 'is not a web browser.');\n }\n // tslint:disable-next-line:no-any\n const theWindow: any = window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB ||\n theWindow.webkitIndexedDB || theWindow.msIndexedDB ||\n theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error(\n 'The current browser does not appear to support IndexedDB.');\n }\n return factory;\n}\n\nfunction setUpDatabase(openRequest: IDBRequest) {\n const db = openRequest.result as IDBDatabase;\n db.createObjectStore(MODEL_STORE_NAME, {keyPath: 'modelPath'});\n db.createObjectStore(INFO_STORE_NAME, {keyPath: 'modelPath'});\n}\n\n/**\n * IOHandler subclass: Browser IndexedDB.\n *\n * See the doc string of `browserIndexedDB` for more details.\n */\nexport class BrowserIndexedDB implements IOHandler {\n protected readonly indexedDB: IDBFactory;\n protected readonly modelPath: string;\n\n static readonly URL_SCHEME = 'indexeddb://';\n\n constructor(modelPath: string) {\n this.indexedDB = getIndexedDBFactory();\n\n if (modelPath == null || !modelPath) {\n throw new Error(\n 'For IndexedDB, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n }\n\n async save(modelArtifacts: ModelArtifacts): Promise<SaveResult> {\n // TODO(cais): Support saving GraphDef models.\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\n 'BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n\n return this.databaseAction(this.modelPath, modelArtifacts) as\n Promise<SaveResult>;\n }\n\n async load(): Promise<ModelArtifacts> {\n return this.databaseAction(this.modelPath) as Promise<ModelArtifacts>;\n }\n\n /**\n * Perform database action to put model artifacts into or read model artifacts\n * from IndexedDB object store.\n *\n * Whether the action is put or get depends on whether `modelArtifacts` is\n * specified. If it is specified, the action will be put; otherwise the action\n * will be get.\n *\n * @param modelPath A unique string path for the model.\n * @param modelArtifacts If specified, it will be the model artifacts to be\n * stored in IndexedDB.\n * @returns A `Promise` of `SaveResult`, if the action is put, or a `Promise`\n * of `ModelArtifacts`, if the action is get.\n */\n private databaseAction(modelPath: string, modelArtifacts?: ModelArtifacts):\n Promise<ModelArtifacts|SaveResult> {\n return new Promise<ModelArtifacts|SaveResult>((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n\n openRequest.onsuccess = () => {\n const db = openRequest.result as IDBDatabase;\n\n if (modelArtifacts == null) {\n // Read model out from object store.\n const modelTx = db.transaction(MODEL_STORE_NAME, 'readonly');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(\n `Cannot find model with path '${this.modelPath}' ` +\n `in IndexedDB.`));\n } else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = error => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n } else {\n // Put model into object store.\n const modelArtifactsInfo: ModelArtifactsInfo =\n getModelArtifactsInfoForJSON(modelArtifacts);\n // First, put ModelArtifactsInfo into info store.\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest =\n infoStore.put({modelPath: this.modelPath, modelArtifactsInfo});\n let modelTx: IDBTransaction;\n putInfoRequest.onsuccess = () => {\n // Second, put model data into model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({modelArtifactsInfo});\n putModelRequest.onerror = error => {\n // If the put-model request fails, roll back the info entry as\n // well.\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = error => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = error => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n } else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n\nexport const indexedDBRouter: IORouter = (url: string|string[]) => {\n if (!ENV.get('IS_BROWSER')) {\n return null;\n } else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\n\n/**\n * Creates a browser IndexedDB IOHandler for saving and loading models.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save('indexeddb://MyModel'));\n * console.log(saveResult);\n * ```\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `BrowserIndexedDB` (sublcass of `IOHandler`),\n * which can be used with, e.g., `tf.Model.save`.\n */\nexport function browserIndexedDB(modelPath: string): IOHandler {\n return new BrowserIndexedDB(modelPath);\n}\n\nfunction maybeStripScheme(key: string) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ?\n key.slice(BrowserIndexedDB.URL_SCHEME.length) :\n key;\n}\n\nexport class BrowserIndexedDBManager implements ModelStoreManager {\n private indexedDB: IDBFactory;\n\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n\n async listModels(): Promise<{[path: string]: ModelArtifactsInfo}> {\n return new Promise<{[path: string]: ModelArtifactsInfo}>(\n (resolve, reject) => {\n const openRequest =\n this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n\n openRequest.onsuccess = () => {\n const db = openRequest.result as IDBDatabase;\n const tx = db.transaction(INFO_STORE_NAME, 'readonly');\n const store = tx.objectStore(INFO_STORE_NAME);\n // tslint:disable:max-line-length\n // Need to cast `store` as `any` here because TypeScript's DOM\n // library does not have the `getAll()` method even though the\n // method is supported in the latest version of most mainstream\n // browsers:\n // https://developer.mozilla.org/en-US/docs/Web/API/IDBObjectStore/getAll\n // tslint:enable:max-line-length\n // tslint:disable-next-line:no-any\n const getAllInfoRequest = (store as any).getAll() as IDBRequest;\n getAllInfoRequest.onsuccess = () => {\n const out: {[path: string]: ModelArtifactsInfo} = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = error => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n\n async removeModel(path: string): Promise<ModelArtifactsInfo> {\n path = maybeStripScheme(path);\n return new Promise<ModelArtifactsInfo>((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n\n openRequest.onsuccess = () => {\n const db = openRequest.result as IDBDatabase;\n const infoTx = db.transaction(INFO_STORE_NAME, 'readwrite');\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n\n const getInfoRequest = infoStore.get(path);\n let modelTx: IDBTransaction;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(\n `Cannot find model with path '${path}' ` +\n `in IndexedDB.`));\n } else {\n // First, delete the entry in the info store.\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n // Second, delete the entry in the model store.\n modelTx = db.transaction(MODEL_STORE_NAME, 'readwrite');\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () =>\n resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = error =>\n reject(getInfoRequest.error);\n };\n // Proceed with deleting model data regardless of whether deletion\n // of info data succeeds or not.\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = error => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = error => {\n db.close();\n return reject(getInfoRequest.error);\n };\n\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n } else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = error => reject(openRequest.error);\n });\n }\n}\n\nif (ENV.get('IS_BROWSER')) {\n // Wrap the construction and registration, to guard against browsers that\n // don't support Local Storage.\n try {\n ModelStoreManagerRegistry.registerManager(\n BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n } catch (err) {\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {assert} from '../util';\nimport {arrayBufferToBase64String, base64StringToArrayBuffer, getModelArtifactsInfoForJSON} from './io_utils';\nimport {ModelStoreManagerRegistry} from './model_management';\nimport {IORouter, IORouterRegistry} from './router_registry';\nimport {IOHandler, ModelArtifacts, ModelArtifactsInfo, ModelStoreManager, SaveResult} from './types';\n\nconst PATH_SEPARATOR = '/';\nconst PATH_PREFIX = 'tensorflowjs_models';\nconst INFO_SUFFIX = 'info';\nconst MODEL_TOPOLOGY_SUFFIX = 'model_topology';\nconst WEIGHT_SPECS_SUFFIX = 'weight_specs';\nconst WEIGHT_DATA_SUFFIX = 'weight_data';\n\n/**\n * Purge all tensorflow.js-saved model artifacts from local storage.\n *\n * @returns Paths of the models purged.\n */\nexport function purgeLocalStorageArtifacts(): string[] {\n if (!ENV.get('IS_BROWSER') || typeof window.localStorage === 'undefined') {\n throw new Error(\n 'purgeLocalStorageModels() cannot proceed because local storage is ' +\n 'unavailable in the current environment.');\n }\n const LS = window.localStorage;\n const purgedModelPaths: string[] = [];\n for (let i = 0; i < LS.length; ++i) {\n const key = LS.key(i);\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n if (key.startsWith(prefix) && key.length > prefix.length) {\n LS.removeItem(key);\n const modelName = getModelPathFromKey(key);\n if (purgedModelPaths.indexOf(modelName) === -1) {\n purgedModelPaths.push(modelName);\n }\n }\n }\n return purgedModelPaths;\n}\n\nfunction getModelKeys(path: string):\n {info: string, topology: string, weightSpecs: string, weightData: string} {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\n\n/**\n * Get model path from a local-storage key.\n *\n * E.g., 'tensorflowjs_models/my/model/1/info' --> 'my/model/1'\n *\n * @param key\n */\nfunction getModelPathFromKey(key: string) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\n\nfunction maybeStripScheme(key: string) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ?\n key.slice(BrowserLocalStorage.URL_SCHEME.length) :\n key;\n}\n\n/**\n * IOHandler subclass: Browser Local Storage.\n *\n * See the doc string to `browserLocalStorage` for more details.\n */\nexport class BrowserLocalStorage implements IOHandler {\n protected readonly LS: Storage;\n protected readonly modelPath: string;\n protected readonly keys: {[key: string]: string};\n\n static readonly URL_SCHEME = 'localstorage://';\n\n constructor(modelPath: string) {\n if (!ENV.get('IS_BROWSER') || typeof window.localStorage === 'undefined') {\n // TODO(cais): Add more info about what IOHandler subtypes are\n // available.\n // Maybe point to a doc page on the web and/or automatically determine\n // the available IOHandlers and print them in the error message.\n throw new Error(\n 'The current environment does not support local storage.');\n }\n this.LS = window.localStorage;\n\n if (modelPath == null || !modelPath) {\n throw new Error(\n 'For local storage, modelPath must not be null, undefined or empty.');\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n\n /**\n * Save model artifacts to browser local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @param modelArtifacts The model artifacts to be stored.\n * @returns An instance of SaveResult.\n */\n async save(modelArtifacts: ModelArtifacts): Promise<SaveResult> {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\n 'BrowserLocalStorage.save() does not support saving model topology ' +\n 'in binary formats yet.');\n } else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n\n const modelArtifactsInfo: ModelArtifactsInfo =\n getModelArtifactsInfoForJSON(modelArtifacts);\n\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(\n this.keys.weightData,\n arrayBufferToBase64String(modelArtifacts.weightData));\n\n return {modelArtifactsInfo};\n } catch (err) {\n // If saving failed, clean up all items saved so far.\n for (const key in this.keys) {\n this.LS.removeItem(this.keys[key]);\n }\n\n throw new Error(\n `Failed to save model '${this.modelPath}' to local storage: ` +\n `size quota being exceeded is a possible cause of this failure: ` +\n `modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, ` +\n `weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, ` +\n `weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n\n /**\n * Load a model from local storage.\n *\n * See the documentation to `browserLocalStorage` for details on the saved\n * artifacts.\n *\n * @returns The loaded model (if loading succeeds).\n */\n async load(): Promise<ModelArtifacts> {\n const info =\n JSON.parse(this.LS.getItem(this.keys.info)) as ModelArtifactsInfo;\n if (info == null) {\n throw new Error(\n `In local storage, there is no model with name '${this.modelPath}'`);\n }\n\n if (info.modelTopologyType !== 'JSON') {\n throw new Error(\n 'BrowserLocalStorage does not support loading non-JSON model ' +\n 'topology yet.');\n }\n\n const out: ModelArtifacts = {};\n\n // Load topology.\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(\n `In local storage, the topology of model '${this.modelPath}' ` +\n `is missing.`);\n }\n out.modelTopology = topology;\n\n // Load weight specs.\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(\n `In local storage, the weight specs of model '${this.modelPath}' ` +\n `are missing.`);\n }\n out.weightSpecs = weightSpecs;\n\n // Load weight data.\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(\n `In local storage, the binary weight values of model ` +\n `'${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n\n return out;\n }\n}\n\nexport const localStorageRouter: IORouter = (url: string|string[]) => {\n if (!ENV.get('IS_BROWSER')) {\n return null;\n } else {\n if (!Array.isArray(url) &&\n url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(\n url.slice(BrowserLocalStorage.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\n\n/**\n * Factory function for local storage IOHandler.\n *\n * This `IOHandler` supports both `save` and `load`.\n *\n * For each model's saved artifacts, four items are saved to local storage.\n * - `${PATH_SEPARATOR}/${modelPath}/info`: Contains meta-info about the\n * model, such as date saved, type of the topology, size in bytes, etc.\n * - `${PATH_SEPARATOR}/${modelPath}/topology`: Model topology. For Keras-\n * style models, this is a stringized JSON.\n * - `${PATH_SEPARATOR}/${modelPath}/weight_specs`: Weight specs of the\n * model, can be used to decode the saved binary weight values (see\n * item below).\n * - `${PATH_SEPARATOR}/${modelPath}/weight_data`: Concatenated binary\n * weight values, stored as a base64-encoded string.\n *\n * Saving may throw an `Error` if the total size of the artifacts exceed the\n * browser-specific quota.\n *\n * @param modelPath A unique identifier for the model to be saved. Must be a\n * non-empty string.\n * @returns An instance of `IOHandler`, which can be used with, e.g.,\n * `tf.Model.save`.\n */\nexport function browserLocalStorage(modelPath: string): IOHandler {\n return new BrowserLocalStorage(modelPath);\n}\n\nexport class BrowserLocalStorageManager implements ModelStoreManager {\n private readonly LS: Storage;\n\n constructor() {\n assert(ENV.get('IS_BROWSER'), 'Current environment is not a web browser');\n assert(\n typeof window.localStorage !== 'undefined',\n 'Current browser does not appear to support localStorage');\n this.LS = window.localStorage;\n }\n\n async listModels(): Promise<{[path: string]: ModelArtifactsInfo}> {\n const out: {[path: string]: ModelArtifactsInfo} = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i = 0; i < this.LS.length; ++i) {\n const key = this.LS.key(i);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key)) as ModelArtifactsInfo;\n }\n }\n return out;\n }\n\n async removeModel(path: string): Promise<ModelArtifactsInfo> {\n path = maybeStripScheme(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info)) as ModelArtifactsInfo;\n\n this.LS.removeItem(keys.info);\n this.LS.removeItem(keys.topology);\n this.LS.removeItem(keys.weightSpecs);\n this.LS.removeItem(keys.weightData);\n return info;\n }\n}\n\nif (ENV.get('IS_BROWSER')) {\n // Wrap the construction and registration, to guard against browsers that\n // don't support Local Storage.\n try {\n ModelStoreManagerRegistry.registerManager(\n BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n } catch (err) {\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * IOHandlers related to files, such as browser-triggered file downloads,\n * user-selected files in browser.\n */\n\nimport {ENV} from '../environment';\nimport {basename, concatenateArrayBuffers, getModelArtifactsInfoForJSON} from './io_utils';\nimport {IORouter, IORouterRegistry} from './router_registry';\nimport {IOHandler, ModelArtifacts, SaveResult, WeightsManifestConfig, WeightsManifestEntry} from './types';\n\nconst DEFAULT_FILE_NAME_PREFIX = 'model';\nconst DEFAULT_JSON_EXTENSION_NAME = '.json';\nconst DEFAULT_WEIGHT_DATA_EXTENSION_NAME = '.weights.bin';\n\nexport class BrowserDownloads implements IOHandler {\n private readonly modelTopologyFileName: string;\n private readonly weightDataFileName: string;\n private readonly jsonAnchor: HTMLAnchorElement;\n private readonly weightDataAnchor: HTMLAnchorElement;\n\n static readonly URL_SCHEME = 'downloads://';\n\n constructor(fileNamePrefix?: string) {\n if (!ENV.get('IS_BROWSER')) {\n // TODO(cais): Provide info on what IOHandlers are available under the\n // current environment.\n throw new Error(\n 'triggerDownloads() cannot proceed because the current environment ' +\n 'is not a browser.');\n }\n\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n\n this.modelTopologyFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName =\n fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n\n async save(modelArtifacts: ModelArtifacts): Promise<SaveResult> {\n const weightsURL = window.URL.createObjectURL(new Blob(\n [modelArtifacts.weightData], {type: 'application/octet-stream'}));\n\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\n 'DownloadTrigger.save() does not support saving model topology ' +\n 'in binary formats yet.');\n } else {\n const weightsManifest: WeightsManifestConfig = [{\n paths: ['./' + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n weightsManifest\n };\n const modelTopologyAndWeightManifestURL =\n window.URL.createObjectURL(new Blob(\n [JSON.stringify(modelTopologyAndWeightManifest)],\n {type: 'application/json'}));\n\n // If anchor elements are not provided, create them without attaching them\n // to parents, so that the downloaded file names can be controlled.\n const jsonAnchor = this.jsonAnchor == null ? document.createElement('a') :\n this.jsonAnchor;\n jsonAnchor.download = this.modelTopologyFileName;\n jsonAnchor.href = modelTopologyAndWeightManifestURL;\n // Trigger downloads by calling the `click` methods on the download\n // anchors.\n jsonAnchor.click();\n\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ?\n document.createElement('a') :\n this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n weightDataAnchor.click();\n }\n\n return {modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts)};\n }\n }\n}\n\nclass BrowserFiles implements IOHandler {\n private readonly files: File[];\n\n constructor(files: File[]) {\n if (files == null || files.length < 1) {\n throw new Error(\n `When calling browserFiles, at least 1 file is required, ` +\n `but received ${files}`);\n }\n this.files = files;\n }\n\n async load(): Promise<ModelArtifacts> {\n const jsonFile = this.files[0];\n const weightFiles = this.files.slice(1);\n\n return new Promise<ModelArtifacts>((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event: Event) => {\n // tslint:disable-next-line:no-any\n const modelJSON = JSON.parse((event.target as any).result);\n const modelTopology = modelJSON.modelTopology as {};\n if (modelTopology == null) {\n reject(new Error(\n `modelTopology field is missing from file ${jsonFile.name}`));\n return;\n }\n\n if (weightFiles.length === 0) {\n resolve({modelTopology});\n }\n\n const weightsManifest =\n modelJSON.weightsManifest as WeightsManifestConfig;\n if (weightsManifest == null) {\n reject(new Error(\n `weightManifest field is missing from file ${jsonFile.name}`));\n return;\n }\n\n let pathToFile: {[path: string]: File};\n try {\n pathToFile =\n this.checkManifestAndWeightFiles(weightsManifest, weightFiles);\n } catch (err) {\n reject(err);\n return;\n }\n\n const weightSpecs: WeightsManifestEntry[] = [];\n const paths: string[] = [];\n const perFileBuffers: ArrayBuffer[] = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n paths.push(path);\n perFileBuffers.push(null);\n });\n weightSpecs.push(...weightsGroup.weights);\n });\n\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event: Event) => {\n // tslint:disable-next-line:no-any\n const weightData = (event.target as any).result as ArrayBuffer;\n const index = paths.indexOf(path);\n perFileBuffers[index] = weightData;\n if (perFileBuffers.indexOf(null) === -1) {\n resolve({\n modelTopology,\n weightSpecs,\n weightData: concatenateArrayBuffers(perFileBuffers),\n });\n }\n };\n weightFileReader.onerror = (error: FileReaderProgressEvent) => {\n reject(`Failed to weights data from file of path '${path}'.`);\n return;\n };\n weightFileReader.readAsArrayBuffer(pathToFile[path]);\n });\n });\n };\n jsonReader.onerror = (error: FileReaderProgressEvent) => {\n reject(\n `Failed to read model topology and weights manifest JSON ` +\n `from file '${jsonFile.name}'. BrowserFiles supports loading ` +\n `Keras-style tf.Model artifacts only.`);\n return;\n };\n jsonReader.readAsText(jsonFile);\n });\n }\n\n /**\n * Check the compatibility between weights manifest and weight files.\n */\n private checkManifestAndWeightFiles(\n manifest: WeightsManifestConfig, files: File[]): {[path: string]: File} {\n const basenames: string[] = [];\n const fileNames = files.map(file => basename(file.name));\n const pathToFile: {[path: string]: File} = {};\n for (const group of manifest) {\n group.paths.forEach(path => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(\n `Duplicate file basename found in weights manifest: ` +\n `'${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(\n `Weight file with basename '${pathBasename}' is not provided.`);\n } else {\n pathToFile[path] = files[fileNames.indexOf(pathBasename)];\n }\n });\n }\n\n if (basenames.length !== files.length) {\n throw new Error(\n `Mismatch in the number of files in weights manifest ` +\n `(${basenames.length}) and the number of weight files provided ` +\n `(${files.length}).`);\n }\n return pathToFile;\n }\n}\n\nexport const browserDownloadsRouter: IORouter = (url: string|string[]) => {\n if (!ENV.get('IS_BROWSER')) {\n return null;\n } else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\n\n/**\n * Creates an IOHandler that triggers file downloads from the browser.\n *\n * The returned `IOHandler` instance can be used as model exporting methods such\n * as `tf.Model.save` and supports only saving.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense(\n * {units: 1, inputShape: [10], activation: 'sigmoid'}));\n * const saveResult = await model.save('downloads://mymodel'));\n * // This will trigger downloading of two files:\n * // 'mymodel.json' and 'mymodel.weights.bin'.\n * console.log(saveResult);\n * ```\n *\n * @param fileNamePrefix Prefix name of the files to be downloaded. For use with\n * `tf.Model`, `fileNamePrefix` should follow either of the following two\n * formats:\n * 1. `null` or `undefined`, in which case the default file\n * names will be used:\n * - 'model.json' for the JSON file containing the model topology and\n * weights manifest.\n * - 'model.weights.bin' for the binary file containing the binary weight\n * values.\n * 2. A single string or an Array of a single string, as the file name prefix.\n * For example, if `'foo'` is provided, the downloaded JSON\n * file and binary weights file will be named 'foo.json' and\n * 'foo.weights.bin', respectively.\n * @param config Additional configuration for triggering downloads.\n * @returns An instance of `DownloadTrigger` `IOHandler`.\n */\n/** @doc {heading: 'Models', subheading: 'Loading', namespace: 'io'} */\nexport function browserDownloads(fileNamePrefix = 'model'): IOHandler {\n return new BrowserDownloads(fileNamePrefix);\n}\n\n/**\n * Creates an IOHandler that loads model artifacts from user-selected files.\n *\n * This method can be used for loading from files such as user-selected files\n * in the browser.\n * When used in conjunction with `tf.loadModel`, an instance of `tf.Model`\n * (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * // Note: This code snippet won't run properly without the actual file input\n * // elements in the HTML DOM.\n *\n * // Suppose there are two HTML file input (`<input type=\"file\" ...>`)\n * // elements.\n * const uploadJSONInput = document.getElementById('upload-json');\n * const uploadWeightsInput = document.getElementById('upload-weights');\n * const model = await tfl.loadModel(tf.io.browserFiles(\n * [uploadJSONInput.files[0], uploadWeightsInput.files[0]]));\n * ```\n *\n * @param files `File`s to load from. Currently, this function supports only\n * loading from files that contain Keras-style models (i.e., `tf.Model`s), for\n * which an `Array` of `File`s is expected (in that order):\n * - A JSON file containing the model topology and weight manifest.\n * - Optionally, One or more binary files containing the binary weights.\n * These files must have names that match the paths in the `weightsManifest`\n * contained by the aforementioned JSON file, or errors will be thrown\n * during loading. These weights files have the same format as the ones\n * generated by `tensorflowjs_converter` that comes with the `tensorflowjs`\n * Python PIP package. If no weights files are provided, only the model\n * topology will be loaded from the JSON file above.\n * @returns An instance of `Files` `IOHandler`.\n */\n/** @doc {heading: 'Models', subheading: 'Loading', namespace: 'io'} */\nexport function browserFiles(files: File[]): IOHandler {\n return new BrowserFiles(files);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {NamedTensorMap} from '../tensor_types';\nimport * as util from '../util';\n\nimport {decodeWeights} from './io_utils';\nimport {DTYPE_VALUE_SIZE_MAP, WeightsManifestConfig, WeightsManifestEntry} from './types';\n\n/**\n * Reads binary weights data from a number of URLs.\n *\n * @param fetchURLs URLs to send the HTTP requests at, using `fetch` calls.\n * @param requestOptions RequestInit (options) for the HTTP requests.\n * @returns A `Promise` of an Array of `ArrayBuffer`. The Array has the same\n * length as `fetchURLs`.\n */\nexport async function loadWeightsAsArrayBuffer(\n fetchURLs: string[], requestOptions?: RequestInit): Promise<ArrayBuffer[]> {\n // Create the requests for all of the weights in parallel.\n const requests = fetchURLs.map(fetchURL => fetch(fetchURL, requestOptions));\n const responses = await Promise.all(requests);\n const buffers =\n await Promise.all(responses.map(response => response.arrayBuffer()));\n return buffers;\n}\n\n/**\n * Reads a weights manifest JSON configuration, fetches the weights and\n * returns them as `Tensor`s.\n *\n * @param manifest The weights manifest JSON.\n * @param filePathPrefix The path prefix for filenames given in the manifest.\n * Defaults to the empty string.\n * @param weightNames The names of the weights to be fetched.\n */\nexport async function loadWeights(\n manifest: WeightsManifestConfig, filePathPrefix = '',\n weightNames?: string[],\n requestOptions?: RequestInit): Promise<NamedTensorMap> {\n // TODO(nsthorat): Groups are currently fetched atomically. If you need a\n // single weight from a group, the whole group will be fetched. At a future\n // date, we should support fetching only the individual shards within a\n // group that are needed to reconstruct the requested weight.\n // TODO(cais): Use `decodeWeights` for implementation.\n\n // Collect all the groups, weights, and their relative offsets to be\n // fetched.\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch: {\n [group: number]: Array<{\n manifestEntry: WeightsManifestEntry; groupOffset: number;\n sizeBytes: number;\n }>\n } = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames: string[] = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach(weightsEntry => {\n const rawDtype = ('quantization' in weightsEntry) ?\n weightsEntry.quantization.dtype :\n weightsEntry.dtype;\n\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *\n util.sizeFromShape(weightsEntry.shape);\n\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n } else {\n enqueueWeightsForFetchingFn();\n }\n\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n\n if (!weightsFound.every(found => found)) {\n const weightsNotFound = weightNames.filter((weight, i) => !weightsFound[i]);\n throw new Error(\n `Could not find weights in manifest with names: ` +\n `${weightsNotFound.join(', ')}. \\n` +\n `Manifest JSON has weights with names: ` +\n `${allManifestWeightNames.join(', ')}.`);\n }\n\n // Convert the one-hot boolean groupId => shouldFetch map to a list of group\n // IDs.\n const groupIndicesToFetch =\n groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i) => {\n if (shouldFetch) {\n accumulator.push(i);\n }\n return accumulator;\n }, []);\n\n const fetchUrls: string[] = [];\n groupIndicesToFetch.forEach(i => {\n manifest[i].paths.forEach(filepath => {\n const fetchUrl = filePathPrefix +\n (!filePathPrefix.endsWith('/') ? '/' : '') + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await loadWeightsAsArrayBuffer(fetchUrls, requestOptions);\n\n const weightsTensorMap: NamedTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach(i => {\n const numBuffers = manifest[i].paths.length;\n\n let groupBytes = 0;\n for (let i = 0; i < numBuffers; i++) {\n groupBytes += buffers[bufferIndexOffset + i].byteLength;\n }\n\n // Create a buffer for the whole group.\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i = 0; i < numBuffers; i++) {\n const buffer = new Uint8Array(buffers[bufferIndexOffset + i]);\n groupByteBuffer.set(buffer, groupBufferOffset);\n groupBufferOffset += buffer.byteLength;\n }\n\n const weightsEntries = groupWeightsToFetch[i];\n weightsEntries.forEach(weightsEntry => {\n const byteBuffer = groupBuffer.slice(\n weightsEntry.groupOffset,\n weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap =\n decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n\n bufferIndexOffset += numBuffers;\n });\n\n return weightsTensorMap;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * IOHandler implementations based on HTTP requests in the web browser.\n *\n * Uses [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).\n */\n\nimport {assert} from '../util';\nimport {concatenateArrayBuffers, getModelArtifactsInfoForJSON} from './io_utils';\nimport {IORouter, IORouterRegistry} from './router_registry';\nimport {IOHandler, ModelArtifacts, SaveResult, WeightsManifestConfig, WeightsManifestEntry} from './types';\nimport {loadWeightsAsArrayBuffer} from './weights_loader';\n\nexport class BrowserHTTPRequest implements IOHandler {\n protected readonly path: string|string[];\n protected readonly requestInit: RequestInit;\n\n readonly DEFAULT_METHOD = 'POST';\n\n static readonly URL_SCHEME_REGEX = /^https?:\\/\\//;\n\n constructor(\n path: string|string[], requestInit?: RequestInit,\n private readonly weightPathPrefix?: string) {\n if (typeof fetch === 'undefined') {\n throw new Error(\n // tslint:disable-next-line:max-line-length\n 'browserHTTPRequest is not supported outside the web browser without a fetch polyfill.');\n }\n\n assert(\n path != null && path.length > 0,\n 'URL path for browserHTTPRequest must not be null, undefined or ' +\n 'empty.');\n\n if (Array.isArray(path)) {\n assert(\n path.length === 2,\n 'URL paths for browserHTTPRequest must have a length of 2, ' +\n `(actual length is ${path.length}).`);\n }\n this.path = path;\n\n if (requestInit != null && requestInit.body != null) {\n throw new Error(\n 'requestInit is expected to have no pre-existing body, but has one.');\n }\n this.requestInit = requestInit || {};\n }\n\n async save(modelArtifacts: ModelArtifacts): Promise<SaveResult> {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\n 'BrowserHTTPRequest.save() does not support saving model topology ' +\n 'in binary formats yet.');\n }\n\n const init = Object.assign({method: this.DEFAULT_METHOD}, this.requestInit);\n init.body = new FormData();\n\n const weightsManifest: WeightsManifestConfig = [{\n paths: ['./model.weights.bin'],\n weights: modelArtifacts.weightSpecs,\n }];\n const modelTopologyAndWeightManifest = {\n modelTopology: modelArtifacts.modelTopology,\n weightsManifest\n };\n\n init.body.append(\n 'model.json',\n new Blob(\n [JSON.stringify(modelTopologyAndWeightManifest)],\n {type: 'application/json'}),\n 'model.json');\n\n if (modelArtifacts.weightData != null) {\n init.body.append(\n 'model.weights.bin',\n new Blob(\n [modelArtifacts.weightData], {type: 'application/octet-stream'}),\n 'model.weights.bin');\n }\n\n const response = await fetch(this.path as string, init);\n\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response],\n };\n } else {\n throw new Error(\n `BrowserHTTPRequest.save() failed due to HTTP response status ` +\n `${response.status}.`);\n }\n }\n\n /**\n * Load model artifacts via HTTP request(s).\n *\n * See the documentation to `browserHTTPRequest` for details on the saved\n * artifacts.\n *\n * @returns The loaded model artifacts (if loading succeeds).\n */\n async load(): Promise<ModelArtifacts> {\n return Array.isArray(this.path) ? this.loadBinaryModel() :\n this.loadJSONModel();\n }\n\n /**\n * Loads the model topology file and build the in memory graph of the model.\n */\n private async loadBinaryTopology(): Promise<ArrayBuffer> {\n try {\n const response = await fetch(this.path[0], this.requestInit);\n if (!response.ok) {\n throw new Error(\n `BrowserHTTPRequest.load() failed due to HTTP response: ${\n response.statusText}`);\n }\n return await response.arrayBuffer();\n } catch (error) {\n throw new Error(`${this.path[0]} not found. ${error}`);\n }\n }\n\n protected async loadBinaryModel(): Promise<ModelArtifacts> {\n const graphPromise = this.loadBinaryTopology();\n const manifestPromise = await fetch(this.path[1], this.requestInit);\n if (!manifestPromise.ok) {\n throw new Error(`BrowserHTTPRequest.load() failed due to HTTP response: ${\n manifestPromise.statusText}`);\n }\n\n const results = await Promise.all([graphPromise, manifestPromise]);\n const [modelTopology, weightsManifestResponse] = results;\n\n const weightsManifest =\n await weightsManifestResponse.json() as WeightsManifestConfig;\n\n let weightSpecs: WeightsManifestEntry[];\n let weightData: ArrayBuffer;\n if (weightsManifest != null) {\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n\n return {modelTopology, weightSpecs, weightData};\n }\n\n protected async loadJSONModel(): Promise<ModelArtifacts> {\n const modelConfigRequest =\n await fetch(this.path as string, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`BrowserHTTPRequest.load() failed due to HTTP response: ${\n modelConfigRequest.statusText}`);\n }\n const modelConfig = await modelConfigRequest.json();\n const modelTopology = modelConfig['modelTopology'];\n const weightsManifest = modelConfig['weightsManifest'];\n\n // We do not allow both modelTopology and weightsManifest to be missing.\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(\n `The JSON from HTTP path ${this.path} contains neither model ` +\n `topology or manifest for weights.`);\n }\n\n let weightSpecs: WeightsManifestEntry[];\n let weightData: ArrayBuffer;\n if (weightsManifest != null) {\n const weightsManifest =\n modelConfig['weightsManifest'] as WeightsManifestConfig;\n const results = await this.loadWeights(weightsManifest);\n [weightSpecs, weightData] = results;\n }\n\n return {modelTopology, weightSpecs, weightData};\n }\n\n private async loadWeights(weightsManifest: WeightsManifestConfig):\n Promise<[WeightsManifestEntry[], ArrayBuffer]> {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n\n const fetchURLs: string[] = [];\n weightsManifest.forEach(weightsGroup => {\n weightsGroup.paths.forEach(path => {\n fetchURLs.push(pathPrefix + path + suffix);\n });\n });\n\n return [\n weightSpecs,\n concatenateArrayBuffers(\n await loadWeightsAsArrayBuffer(fetchURLs, this.requestInit))\n ];\n }\n}\n\n/**\n * Extract the prefix and suffix of the url, where the prefix is the path before\n * the last file, and suffix is the search params after the last file.\n * ```\n * const url = 'http://tfhub.dev/model/1/tensorflowjs_model.pb?tfjs-format=file'\n * [prefix, suffix] = parseUrl(url)\n * // prefix = 'http://tfhub.dev/model/1/'\n * // suffix = '?tfjs-format=file'\n * ```\n * @param url the model url to be parsed.\n */\nexport function parseUrl(url: string): [string, string] {\n const lastSlash = url.lastIndexOf('/');\n const lastSearchParam = url.lastIndexOf('?');\n const prefix = url.substring(0, lastSlash);\n const suffix =\n lastSearchParam > lastSlash ? url.substring(lastSearchParam) : '';\n return [prefix + '/', suffix];\n}\n\nfunction isHTTPScheme(url: string): boolean {\n return url.match(BrowserHTTPRequest.URL_SCHEME_REGEX) != null;\n}\n\nexport const httpRequestRouter: IORouter = (url: string|string[]) => {\n if (typeof fetch === 'undefined') {\n // browserHTTPRequest uses `fetch`, if one wants to use it in node.js\n // they have to setup a global fetch polyfill.\n return null;\n } else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every(urlItem => isHTTPScheme(urlItem));\n } else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return browserHTTPRequest(url);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRequestRouter);\nIORouterRegistry.registerLoadRouter(httpRequestRouter);\n\n/**\n * Creates an IOHandler subtype that sends model artifacts to HTTP server.\n *\n * An HTTP request of the `multipart/form-data` mime type will be sent to the\n * `path` URL. The form data includes artifacts that represent the topology\n * and/or weights of the model. In the case of Keras-style `tf.Model`, two\n * blobs (files) exist in form-data:\n * - A JSON file consisting of `modelTopology` and `weightsManifest`.\n * - A binary weights file consisting of the concatenated weight values.\n * These files are in the same format as the one generated by\n * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html).\n *\n * The following code snippet exemplifies the client-side code that uses this\n * function:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'}));\n *\n * const saveResult = await model.save(tf.io.browserHTTPRequest(\n * 'http://model-server:5000/upload', {method: 'PUT'}));\n * console.log(saveResult);\n * ```\n *\n * If the default `POST` method is to be used, without any custom parameters\n * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`:\n *\n * ```js\n * const saveResult = await model.save('http://model-server:5000/upload');\n * ```\n *\n * The following Python code snippet based on the\n * [flask](https://github.com/pallets/flask) server framework implements a\n * server that can receive the request. Upon receiving the model artifacts\n * via the requst, this particular server reconsistutes instances of\n * [Keras Models](https://keras.io/models/model/) in memory.\n *\n * ```python\n * # pip install -U flask flask-cors tensorflow tensorflowjs\n *\n * from __future__ import absolute_import\n * from __future__ import division\n * from __future__ import print_function\n *\n * import io\n *\n * from flask import Flask, Response, request\n * from flask_cors import CORS, cross_origin\n * import tensorflow as tf\n * import tensorflowjs as tfjs\n * import werkzeug.formparser\n *\n *\n * class ModelReceiver(object):\n *\n * def __init__(self):\n * self._model = None\n * self._model_json_bytes = None\n * self._model_json_writer = None\n * self._weight_bytes = None\n * self._weight_writer = None\n *\n * @property\n * def model(self):\n * self._model_json_writer.flush()\n * self._weight_writer.flush()\n * self._model_json_writer.seek(0)\n * self._weight_writer.seek(0)\n *\n * json_content = self._model_json_bytes.read()\n * weights_content = self._weight_bytes.read()\n * return tfjs.converters.deserialize_keras_model(\n * json_content,\n * weight_data=[weights_content],\n * use_unique_name_scope=True)\n *\n * def stream_factory(self,\n * total_content_length,\n * content_type,\n * filename,\n * content_length=None):\n * # Note: this example code is *not* thread-safe.\n * if filename == 'model.json':\n * self._model_json_bytes = io.BytesIO()\n * self._model_json_writer = io.BufferedWriter(self._model_json_bytes)\n * return self._model_json_writer\n * elif filename == 'model.weights.bin':\n * self._weight_bytes = io.BytesIO()\n * self._weight_writer = io.BufferedWriter(self._weight_bytes)\n * return self._weight_writer\n *\n *\n * def main():\n * app = Flask('model-server')\n * CORS(app)\n * app.config['CORS_HEADER'] = 'Content-Type'\n *\n * model_receiver = ModelReceiver()\n *\n * @app.route('/upload', methods=['POST'])\n * @cross_origin()\n * def upload():\n * print('Handling request...')\n * werkzeug.formparser.parse_form_data(\n * request.environ, stream_factory=model_receiver.stream_factory)\n * print('Received model:')\n * with tf.Graph().as_default(), tf.Session():\n * model = model_receiver.model\n * model.summary()\n * # You can perform `model.predict()`, `model.fit()`,\n * # `model.evaluate()` etc. here.\n * return Response(status=200)\n *\n * app.run('localhost', 5000)\n *\n *\n * if __name__ == '__main__':\n * main()\n * ```\n *\n * @param path A single URL path or an Array of URL paths.\n * Currently, only a single URL path is supported. Array input is reserved\n * for future development.\n * Can be an absolute HTTP path (e.g.,\n * 'http://localhost:8000/model-upload)') or a relative path (e.g.,\n * './model-upload').\n * @param requestInit Request configurations to be used when sending\n * HTTP request to server using `fetch`. It can contain fields such as\n * `method`, `credentials`, `headers`, `mode`, etc. See\n * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request\n * for more information. `requestInit` must not have a body, because the\n * body will be set by TensorFlow.js. File blobs representing the model\n * topology (filename: 'model.json') and the weights of the model (filename:\n * 'model.weights.bin') will be appended to the body. If `requestInit` has a\n * `body`, an Error will be thrown.\n * @param weightPathPrefix Optional, this specifies the path prefix for weight\n * files, by default this is calculated from the path param.\n * @returns An instance of `IOHandler`.\n */\nexport function browserHTTPRequest(\n path: string|string[], requestInit?: RequestInit,\n weightPathPrefix?: string): IOHandler {\n return new BrowserHTTPRequest(path, requestInit, weightPathPrefix);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n/**\n * IOHandlers that pass through the in-memory ModelArtifacts format.\n */\n\nimport {IOHandler, ModelArtifacts, SaveResult, WeightsManifestEntry} from './types';\n\nclass PassthroughLoader implements IOHandler {\n constructor(\n private readonly modelTopology?: {}|ArrayBuffer,\n private readonly weightSpecs?: WeightsManifestEntry[],\n private readonly weightData?: ArrayBuffer) {}\n\n async load(): Promise<ModelArtifacts> {\n let result = {};\n if (this.modelTopology != null) {\n result = {modelTopology: this.modelTopology, ...result};\n }\n if (this.weightSpecs != null && this.weightSpecs.length > 0) {\n result = {weightSpecs: this.weightSpecs, ...result};\n }\n if (this.weightData != null && this.weightData.byteLength > 0) {\n result = {weightData: this.weightData, ...result};\n }\n return result;\n }\n}\n\nclass PassthroughSaver implements IOHandler {\n constructor(\n private readonly saveHandler:\n (artifacts: ModelArtifacts) => Promise<SaveResult>) {}\n\n async save(modelArtifacts: ModelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n}\n\n/**\n * Creates an IOHandler that loads model artifacts from memory.\n *\n * When used in conjunction with `tf.loadModel`, an instance of `tf.Model`\n * (Keras-style) can be constructed from the loaded artifacts.\n *\n * ```js\n * const model = await tf.loadModel(tf.io.fromMemory(\n * modelTopology, weightSpecs, weightData));\n * ```\n *\n * @param modelTopology a object containing model topology (i.e., parsed from\n * the JSON format).\n * @param weightSpecs An array of `WeightsManifestEntry` objects describing the\n * names, shapes, types, and quantization of the weight data.\n * @param weightData A single `ArrayBuffer` containing the weight data,\n * concatenated in the order described by the weightSpecs.\n *\n * @returns A passthrough `IOHandler` that simply loads the provided data.\n */\nexport function fromMemory(\n modelTopology: {}, weightSpecs?: WeightsManifestEntry[],\n weightData?: ArrayBuffer): IOHandler {\n return new PassthroughLoader(modelTopology, weightSpecs, weightData);\n}\n\n/**\n * Creates an IOHandler that passes saved model artifacts to a callback.\n *\n * ```js\n * function handleSave(artifacts) {\n * // ... do something with the artifacts ...\n * return {modelArtifactsInfo: {...}, ...};\n * }\n *\n * const saveResult = model.save(tf.io.withSaveHandler(handleSave));\n * ```\n *\n * @param saveHandler A function that accepts a `ModelArtifacts` and returns a\n * `SaveResult`.\n */\nexport function withSaveHandler(\n saveHandler: (artifacts: ModelArtifacts) =>\n Promise<SaveResult>): IOHandler {\n return new PassthroughSaver(saveHandler);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// Importing local_storage and indexed_db is necessary for the routers to be\n// registered.\nimport './indexed_db';\nimport './local_storage';\n\nimport {browserFiles} from './browser_files';\nimport {browserHTTPRequest} from './browser_http';\nimport {concatenateArrayBuffers, decodeWeights, encodeWeights, getModelArtifactsInfoForJSON} from './io_utils';\nimport {fromMemory, withSaveHandler} from './passthrough';\nimport {IORouterRegistry} from './router_registry';\nimport {IOHandler, LoadHandler, ModelArtifacts, ModelStoreManager, SaveConfig, SaveHandler, SaveResult, WeightsManifestConfig, WeightsManifestEntry} from './types';\nimport {loadWeights} from './weights_loader';\n\nconst registerSaveRouter = IORouterRegistry.registerSaveRouter;\nconst registerLoadRouter = IORouterRegistry.registerLoadRouter;\nconst getSaveHandlers = IORouterRegistry.getSaveHandlers;\nconst getLoadHandlers = IORouterRegistry.getLoadHandlers;\n\nexport {copyModel, listModels, moveModel, removeModel} from './model_management';\n\nexport {\n browserFiles,\n browserHTTPRequest,\n concatenateArrayBuffers,\n decodeWeights,\n encodeWeights,\n fromMemory,\n getLoadHandlers,\n getModelArtifactsInfoForJSON,\n getSaveHandlers,\n IOHandler,\n LoadHandler,\n loadWeights,\n ModelArtifacts,\n ModelStoreManager,\n registerLoadRouter,\n registerSaveRouter,\n SaveConfig,\n SaveHandler,\n SaveResult,\n WeightsManifestConfig,\n WeightsManifestEntry,\n withSaveHandler\n};\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {Tensor1D, Tensor2D} from '../tensor';\nimport {convertToTensor} from '../tensor_util_env';\nimport {TensorLike} from '../types';\nimport * as util from '../util';\n\nimport {oneHot} from './array_ops';\nimport {op} from './operation';\n\n/**\n * Computes the confusion matrix from true labels and predicted labels.\n *\n * ```js\n * const labels = tf.tensor1d([0, 1, 2, 1, 0], 'int32');\n * const predictions = tf.tensor1d([0, 2, 2, 1, 0], 'int32');\n * const numClasses = 3;\n * const out = tf.math.confusionMatrix(labels, predictions, numClasses);\n * out.print();\n * // Expected output matrix:\n * // [[2, 0, 0],\n * // [0, 1, 1],\n * // [0, 0, 1]]\n * ```\n *\n * @param labels The target labels, assumed to be 0-based integers\n * for the classes. The shape is `[numExamples]`, where\n * `numExamples` is the number of examples included.\n * @param predictions The predicted classes, assumed to be\n * 0-based integers for the classes. Must have the same shape as `labels`.\n * @param numClasses Number of all classes, as an integer.\n * Its value must be larger than the largest element in `labels` and\n * `predictions`.\n * @returns The confusion matrix as a int32-type 2D tensor. The value at\n * row `r` and column `c` is the number of times examples of actual class\n * `r` were predicted as class `c`.\n */\n/** @doc {heading: 'Operations', subheading: 'Evaluation'} */\nexport function confusionMatrix_(\n labels: Tensor1D|TensorLike, predictions: Tensor1D|TensorLike,\n numClasses: number): Tensor2D {\n const $labels = convertToTensor(labels, 'label', 'confusionMatrix', 'int32');\n const $predictions =\n convertToTensor(predictions, 'label', 'confusionMatrix', 'int32');\n\n util.assert(\n numClasses == null || numClasses > 0 && Number.isInteger(numClasses),\n `If provided, numClasses must be a positive integer, ` +\n `but got ${numClasses}`);\n util.assert(\n $labels.rank === 1,\n `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n util.assert(\n $predictions.rank === 1,\n `Expected the rank of predictions to be 1, ` +\n `but got ${$predictions.rank}`);\n util.assert(\n $labels.shape[0] === $predictions.shape[0],\n `Mismatch in the number of examples: ` +\n `${$labels.shape[0]} vs. ${$predictions.shape[0]}. ` +\n `Labels and predictions should have the same number of elements.`);\n util.assert(\n numClasses > 0 && Number.isInteger(numClasses),\n `numClasses is required to be a positive integer, but got ${numClasses}`);\n // TODO(cais): In the future, if oneHot supports tensors inputs for\n // `numClasses`, `confusionMatrix` can make `numClasses` optional.\n\n const oneHotLabels = oneHot($labels.asType('int32'), numClasses);\n const oneHotPredictions = oneHot($predictions.asType('int32'), numClasses);\n return oneHotLabels.transpose().matMul(oneHotPredictions).asType('int32');\n}\n\nexport const confusionMatrix = op({confusionMatrix_});\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {assert} from './util';\n\n/**\n * Types to support JSON-esque data structures internally.\n *\n * Internally ConfigDict's use camelCase keys and values where the\n * values are class names to be instantiated. On the python side, these\n * will be snake_case. Internally we allow Enums into the values for better\n * type safety, but these need to be converted to raw primitives (usually\n * strings) for round-tripping with python.\n *\n * toConfig returns the TS-friendly representation. model.toJSON() returns\n * the pythonic version as that's the portable format. If you need to\n * python-ify a non-model level toConfig output, you'll need to use a\n * convertTsToPythonic from serialization_utils in -Layers.\n *\n */\nexport type ConfigDictValue =\n boolean|number|string|null|ConfigDictArray|ConfigDict;\nexport interface ConfigDict {\n [key: string]: ConfigDictValue;\n}\nexport interface ConfigDictArray extends Array<ConfigDictValue> {}\n\n/**\n * Type to represent the class-type of Serializable objects.\n *\n * Ie the class prototype with access to the constructor and any\n * static members/methods. Instance methods are not listed here.\n *\n * Source for this idea: https://stackoverflow.com/a/43607255\n */\nexport type SerializableConstructor<T extends Serializable> = {\n // tslint:disable-next-line:no-any\n new (...args: any[]): T; className: string; fromConfig: FromConfigMethod<T>;\n};\nexport type FromConfigMethod<T extends Serializable> =\n (cls: SerializableConstructor<T>, config: ConfigDict) => T;\n\n/**\n * Serializable defines the serialization contract.\n *\n * TFJS requires serializable classes to return their className when asked\n * to avoid issues with minification.\n */\nexport abstract class Serializable {\n /**\n * Return the class name for this class to use in serialization contexts.\n *\n * Generally speaking this will be the same thing that constructor.name\n * would have returned. However, the class name needs to be robust\n * against minification for serialization/deserialization to work properly.\n *\n * There's also places such as initializers.VarianceScaling, where\n * implementation details between different languages led to different\n * class hierarchies and a non-leaf node is used for serialization purposes.\n */\n getClassName(): string {\n return (this.constructor as SerializableConstructor<Serializable>)\n .className;\n }\n\n /**\n * Return all the non-weight state needed to serialize this object.\n */\n abstract getConfig(): ConfigDict;\n\n /**\n * Creates an instance of T from a ConfigDict.\n *\n * This works for most descendants of serializable. A few need to\n * provide special handling.\n * @param cls A Constructor for the class to instantiate.\n * @param config The Configuration for the object.\n */\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(config);\n }\n}\n\n/**\n * Maps string keys to class constructors.\n *\n * Used during (de)serialization from the cross-language JSON format, which\n * requires the class name in the serialization format matches the class\n * names as used in Python, should it exist.\n */\nexport class SerializationMap {\n private static instance: SerializationMap;\n classNameMap: {\n [className: string]:\n [\n SerializableConstructor<Serializable>, FromConfigMethod<Serializable>\n ]\n };\n\n private constructor() {\n this.classNameMap = {};\n }\n\n /**\n * Returns the singleton instance of the map.\n */\n static getMap(): SerializationMap {\n if (SerializationMap.instance == null) {\n SerializationMap.instance = new SerializationMap();\n }\n return SerializationMap.instance;\n }\n\n /**\n * Registers the class as serializable.\n */\n static register<T extends Serializable>(cls: SerializableConstructor<T>) {\n SerializationMap.getMap().classNameMap[cls.className] =\n [cls, cls.fromConfig];\n }\n}\n\n/**\n * Register a class with the serialization map of TensorFlow.js.\n * \n * This is often used for registering custom Layers, so they can be\n * serialized and deserialized.\n * \n * Example:\n * \n * ```js\n * class MyCustomLayer extends tf.layers.Layer {\n * static className = 'MyCustomLayer';\n * \n * constructor(config) {\n * super(config);\n * }\n * }\n * tf.serialization.registerClass(MyCustomLayer);\n * ```\n * \n * @param cls The class to be registered. It must have a public static member\n * called `className` defined and the value must be a non-empty string.\n */\n/** @doc {heading: 'Models', subheading: 'Serialization'} */\nexport function registerClass<T extends Serializable>(\n cls: SerializableConstructor<T>) {\n assert(\n cls.className != null,\n `Class being registered does not have the static className property ` +\n `defined.`);\n assert(\n typeof cls.className === 'string',\n `className is required to be a string, but got type ` +\n typeof cls.className);\n assert(\n cls.className.length > 0,\n `Class being registered has an empty-string as its className, which ` +\n `is disallowed.`);\n\n SerializationMap.register(cls);\n}\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from './environment';\nimport {Features} from './environment_util';\nimport {Tensor} from './tensor';\nimport {TypedArray} from './types';\nimport * as util from './util';\n\n// TODO(smilkov): Move these constants to jasmine_util.\nexport const WEBGL_ENVS: Features = {\n 'HAS_WEBGL': true\n};\nexport const NODE_ENVS: Features = {\n 'IS_NODE': true\n};\nexport const CHROME_ENVS: Features = {\n 'IS_CHROME': true\n};\nexport const BROWSER_ENVS: Features = {\n 'IS_BROWSER': true\n};\nexport const CPU_ENVS: Features = {\n 'HAS_WEBGL': false\n};\nexport const BROWSER_CPU_ENVS: Features = {\n 'BACKEND': 'test-cpu'\n};\n\nexport const ALL_ENVS: Features = {};\n\nexport function expectArraysClose(\n actual: Tensor|TypedArray|number[],\n expected: Tensor|TypedArray|number[]|boolean[], epsilon?: number) {\n if (epsilon == null) {\n epsilon = ENV.get('TEST_EPSILON');\n }\n if (!(actual instanceof Tensor) && !(expected instanceof Tensor)) {\n const aType = actual.constructor.name;\n const bType = expected.constructor.name;\n\n if (aType !== bType) {\n throw new Error(\n `Arrays are of different type actual: ${aType} ` +\n `vs expected: ${bType}`);\n }\n } else if (actual instanceof Tensor && expected instanceof Tensor) {\n if (actual.dtype !== expected.dtype) {\n throw new Error(\n `Arrays are of different type actual: ${actual.dtype} ` +\n `vs expected: ${expected.dtype}.`);\n }\n if (!util.arraysEqual(actual.shape, expected.shape)) {\n throw new Error(\n `Arrays are of different shape actual: ${actual.shape} ` +\n `vs expected: ${expected.shape}.`);\n }\n }\n\n let actualValues: TypedArray|number[];\n let expectedValues: TypedArray|number[]|boolean[];\n if (actual instanceof Tensor) {\n actualValues = actual.dataSync();\n } else {\n actualValues = actual;\n }\n if (expected instanceof Tensor) {\n expectedValues = expected.dataSync();\n } else {\n expectedValues = expected;\n }\n\n if (actualValues.length !== expectedValues.length) {\n throw new Error(\n `Arrays have different lengths actual: ${actualValues.length} vs ` +\n `expected: ${expectedValues.length}.\\n` +\n `Actual: ${actualValues}.\\n` +\n `Expected: ${expectedValues}.`);\n }\n for (let i = 0; i < expectedValues.length; ++i) {\n const a = actualValues[i];\n const e = expectedValues[i];\n\n if (!areClose(a, Number(e), epsilon)) {\n throw new Error(\n `Arrays differ: actual[${i}] = ${a}, expected[${i}] = ${e}.\\n` +\n `Actual: ${actualValues}.\\n` +\n `Expected: ${expectedValues}.`);\n }\n }\n}\n\nexport interface DoneFn {\n (): void;\n fail: (message?: Error|string) => void;\n}\n\nexport function expectPromiseToFail(fn: () => Promise<{}>, done: DoneFn): void {\n fn().then(() => done.fail(), () => done());\n}\n\nexport function expectArraysEqual(\n actual: Tensor|TypedArray|number[],\n expected: Tensor|TypedArray|number[]|boolean[]) {\n return expectArraysClose(actual, expected, 0);\n}\n\nexport function expectNumbersClose(a: number, e: number, epsilon?: number) {\n if (epsilon == null) {\n epsilon = ENV.get('TEST_EPSILON');\n }\n if (!areClose(a, e, epsilon)) {\n throw new Error(`Numbers differ: actual === ${a}, expected === ${e}`);\n }\n}\n\nfunction areClose(a: number, e: number, epsilon: number): boolean {\n if (isNaN(a) && isNaN(e)) {\n return true;\n }\n if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {\n return false;\n }\n return true;\n}\n\nexport function expectValuesInRange(\n actual: Tensor|TypedArray|number[], low: number, high: number) {\n let actualVals: TypedArray|number[];\n if (actual instanceof Tensor) {\n actualVals = actual.dataSync();\n } else {\n actualVals = actual;\n }\n for (let i = 0; i < actualVals.length; i++) {\n if (actualVals[i] < low || actualVals[i] > high) {\n throw new Error(\n `Value out of range:${actualVals[i]} low: ${low}, high: ${high}`);\n }\n }\n}\n\nexport function expectArrayBuffersEqual(\n actual: ArrayBuffer, expected: ArrayBuffer) {\n // Safari & Jasmine don't like comparing ArrayBuffers directly. Wrapping in\n // a Float32Array solves this issue.\n expect(new Float32Array(actual)).toEqual(new Float32Array(expected));\n}\n","/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '0.13.11';\nexport {version};\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {variableGrads} from '../globals';\nimport {Serializable} from '../serialization';\nimport {Scalar, Variable} from '../tensor';\nimport {NamedTensorMap} from '../tensor_types';\n\n/** @doc {heading: 'Training', subheading: 'Classes', namespace: 'train'} */\nexport abstract class Optimizer extends Serializable {\n /**\n * Executes `f()` and minimizes the scalar output of `f()` by computing\n * gradients of y with respect to the list of trainable variables provided by\n * `varList`. If no list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to minimize.\n * @param returnCost Whether to return the scalar cost value produced by\n * executing `f()`.\n * @param varList An optional list of variables to update. If specified, only\n * the trainable variables in varList will be updated by minimize. Defaults to\n * all trainable variables.\n */\n /** @doc {heading: 'Training', subheading: 'Optimizers'} */\n minimize(f: () => Scalar, returnCost = false, varList?: Variable[]): Scalar\n |null {\n const {value, grads} = this.computeGradients(f, varList);\n\n this.applyGradients(grads);\n\n // Dispose gradients.\n const varNames = Object.keys(grads);\n varNames.forEach(varName => grads[varName].dispose());\n\n if (returnCost) {\n return value as Scalar;\n } else {\n value.dispose();\n return null;\n }\n }\n\n /**\n * Executes f() and computes the gradient of the scalar output of f() with\n * respect to the list of trainable variables provided by `varList`. If no\n * list is provided, it defaults to all trainable variables.\n *\n * @param f The function to execute and whose output to use for computing\n * gradients with respect to variables.\n * @param varList An optional list of variables to compute gradients with\n * respect to. If specified, only the trainable variables in varList will have\n * gradients computed with respect to. Defaults to all trainable variables.\n */\n computeGradients(f: () => Scalar, varList?: Variable[]):\n {value: Scalar, grads: NamedTensorMap} {\n return variableGrads(f, varList);\n }\n\n /**\n * Updates variables by using the computed gradients.\n *\n * @param variableGradients A mapping of variable name to its gradient value.\n */\n abstract applyGradients(variableGradients: NamedTensorMap): void;\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {scalar, zerosLike} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\n/** @doclink Optimizer */\nexport class AdadeltaOptimizer extends Optimizer {\n static className = 'AdadeltaOptimizer';\n private c: Scalar;\n private epsilonScalar: Scalar;\n private rhoScalar: Scalar;\n private oneMinusRho: Scalar;\n\n private accumulatedGrads: NamedVariableMap = {};\n private accumulatedUpdates: NamedVariableMap = {};\n\n constructor(\n protected learningRate: number, protected rho: number,\n protected epsilon: number = null) {\n super();\n\n this.c = keep(scalar(-learningRate));\n this.rhoScalar = keep(scalar(rho));\n this.oneMinusRho = keep(scalar(1 - rho));\n\n if (epsilon === null) {\n epsilon = ENV.get('EPSILON');\n }\n\n this.epsilonScalar = keep(scalar(epsilon));\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulatedGrads[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulatedGrads[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n if (this.accumulatedUpdates[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulatedUpdates[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n\n const gradient = variableGradients[variableName];\n const accumulatedGrad = this.accumulatedGrads[variableName];\n const accumulatedUpdate = this.accumulatedUpdates[variableName];\n\n tidy(() => {\n const newAccumulatedGrad =\n this.rhoScalar.mul(accumulatedGrad)\n .add(this.oneMinusRho.mul(gradient.square()));\n\n const updates = accumulatedUpdate.add(this.epsilonScalar)\n .sqrt()\n .div(accumulatedGrad.add(this.epsilonScalar).sqrt())\n .mul(gradient);\n\n const newAccumulatedUpdate =\n this.rhoScalar.mul(accumulatedUpdate)\n .add(this.oneMinusRho.mul(updates.square()));\n\n this.accumulatedGrads[variableName].assign(newAccumulatedGrad);\n this.accumulatedUpdates[variableName].assign(newAccumulatedUpdate);\n\n const newValue = this.c.mul(updates).add(value);\n value.assign(newValue);\n });\n }\n }\n\n dispose() {\n this.c.dispose();\n this.epsilonScalar.dispose();\n this.rhoScalar.dispose();\n this.oneMinusRho.dispose();\n if (this.accumulatedUpdates != null) {\n Object.keys(this.accumulatedUpdates)\n .forEach(name => this.accumulatedUpdates[name].dispose());\n Object.keys(this.accumulatedGrads)\n .forEach(name => this.accumulatedGrads[name].dispose());\n }\n }\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n rho: this.rho,\n epsilon: this.epsilon\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(config.learningRate, config.rho, config.epsilon);\n }\n}\nregisterClass(AdadeltaOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {fill, scalar} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\n/** @doclink Optimizer */\nexport class AdagradOptimizer extends Optimizer {\n static className = 'AdagradOptimizer';\n private c: Scalar;\n private epsilon: Scalar;\n\n private accumulatedGrads: NamedVariableMap = {};\n\n constructor(\n protected learningRate: number, private initialAccumulatorValue = 0.1) {\n super();\n this.c = keep(scalar(-learningRate));\n\n this.epsilon = keep(scalar(ENV.get('EPSILON')));\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulatedGrads[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulatedGrads[variableName] =\n fill(value.shape, this.initialAccumulatorValue)\n .variable(trainable);\n });\n }\n\n const gradient = variableGradients[variableName];\n const accumulatedGrad = this.accumulatedGrads[variableName];\n\n tidy(() => {\n const newAccumulatedGrad = accumulatedGrad.add(gradient.square());\n this.accumulatedGrads[variableName].assign(newAccumulatedGrad);\n\n const newValue =\n this.c\n .mul(gradient.div(newAccumulatedGrad.add(this.epsilon).sqrt()))\n .add(value);\n value.assign(newValue);\n });\n }\n }\n\n dispose() {\n this.epsilon.dispose();\n this.c.dispose();\n if (this.accumulatedGrads != null) {\n Object.keys(this.accumulatedGrads)\n .forEach(name => this.accumulatedGrads[name].dispose());\n }\n }\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n initialAccumulatorValue: this.initialAccumulatorValue,\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(config.learningRate, config.initialAccumulatorValue);\n }\n}\nregisterClass(AdagradOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {scalar, zerosLike} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar, Variable} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\nexport class AdamOptimizer extends Optimizer {\n static className = 'AdamOptimizer';\n private c: Scalar;\n private epsScalar: Scalar;\n private beta1Scalar: Scalar;\n private beta2Scalar: Scalar;\n private accBeta1: Variable;\n private accBeta2: Variable;\n private oneMinusBeta1: Scalar;\n private oneMinusBeta2: Scalar;\n private one: Scalar;\n\n private accumulatedFirstMoment: NamedVariableMap = {};\n private accumulatedSecondMoment: NamedVariableMap = {};\n\n constructor(\n protected learningRate: number, protected beta1: number,\n protected beta2: number, protected epsilon: number = null) {\n super();\n this.c = keep(scalar(-learningRate));\n // b1, b2 keep initial value of beta* hyperparameters.\n this.beta1Scalar = keep(scalar(beta1));\n this.beta2Scalar = keep(scalar(beta2));\n tidy(() => {\n // accB* will be updated by batch.\n this.accBeta1 = scalar(beta1).variable();\n this.accBeta2 = scalar(beta2).variable();\n });\n this.oneMinusBeta1 = keep(scalar(1 - beta1));\n this.oneMinusBeta2 = keep(scalar(1 - beta2));\n this.one = keep(scalar(1));\n\n if (epsilon === null) {\n epsilon = ENV.get('EPSILON');\n }\n\n this.epsScalar = keep(scalar(epsilon));\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n tidy(() => {\n const oneMinusAccBeta1 = this.one.sub(this.accBeta1);\n const oneMinusAccBeta2 = this.one.sub(this.accBeta2);\n\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulatedFirstMoment[variableName] == null) {\n const trainable = false;\n this.accumulatedFirstMoment[variableName] =\n zerosLike(value).variable(trainable);\n }\n if (this.accumulatedSecondMoment[variableName] == null) {\n const trainable = false;\n this.accumulatedSecondMoment[variableName] =\n zerosLike(value).variable(trainable);\n }\n\n const gradient = variableGradients[variableName];\n const firstMoment = this.accumulatedFirstMoment[variableName];\n const secondMoment = this.accumulatedSecondMoment[variableName];\n\n const newFirstMoment = this.beta1Scalar.mul(firstMoment)\n .add(this.oneMinusBeta1.mul(gradient));\n const newSecondMoment =\n this.beta2Scalar.mul(secondMoment)\n .add(this.oneMinusBeta2.mul(gradient.square()));\n\n const biasCorrectedFirstMoment = newFirstMoment.div(oneMinusAccBeta1);\n const biasCorrectedSecondMoment = newSecondMoment.div(oneMinusAccBeta2);\n\n this.accumulatedFirstMoment[variableName].assign(newFirstMoment);\n this.accumulatedSecondMoment[variableName].assign(newSecondMoment);\n\n const newValue =\n this.c\n .mul(biasCorrectedFirstMoment.div(\n this.epsScalar.add(biasCorrectedSecondMoment.sqrt())))\n .add(value);\n value.assign(newValue);\n }\n\n this.accBeta1.assign(this.accBeta1.mul(this.beta1Scalar));\n this.accBeta2.assign(this.accBeta2.mul(this.beta2Scalar));\n });\n }\n\n dispose() {\n this.c.dispose();\n this.epsScalar.dispose();\n this.beta1Scalar.dispose();\n this.beta2Scalar.dispose();\n this.accBeta1.dispose();\n this.accBeta2.dispose();\n this.oneMinusBeta1.dispose();\n this.oneMinusBeta2.dispose();\n this.one.dispose();\n\n if (this.accumulatedFirstMoment != null) {\n Object.keys(this.accumulatedFirstMoment)\n .forEach(name => this.accumulatedFirstMoment[name].dispose());\n }\n\n if (this.accumulatedSecondMoment != null) {\n Object.keys(this.accumulatedSecondMoment)\n .forEach(name => this.accumulatedSecondMoment[name].dispose());\n }\n }\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n beta1: this.beta1,\n beta2: this.beta2,\n epsilon: this.epsilon,\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(\n config.learningRate, config.beta1, config.beta2, config.epsilon);\n }\n}\nregisterClass(AdamOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {scalar, zerosLike} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar, Variable} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\nexport class AdamaxOptimizer extends Optimizer {\n static className = 'AdamaxOptimizer';\n private c: Scalar;\n private epsScalar: Scalar;\n private accBeta1: Variable;\n private beta1Scalar: Scalar;\n private beta2Scalar: Scalar;\n private decayScalar: Scalar;\n private oneMinusBeta1: Scalar;\n private one: Scalar;\n private iteration: Variable;\n\n private accumulatedFirstMoment: NamedVariableMap = {};\n private accumulatedWeightedInfNorm: NamedVariableMap = {};\n\n constructor(\n protected learningRate: number, protected beta1: number,\n protected beta2: number, protected epsilon: number = null,\n protected decay = 0.0) {\n super();\n this.c = keep(scalar(-learningRate));\n\n // b1, b2 keep initial value of beta* hyperparameters.\n this.beta1Scalar = keep(scalar(beta1));\n this.beta2Scalar = keep(scalar(beta2));\n\n this.decayScalar = keep(scalar(decay));\n\n tidy(() => {\n this.iteration = scalar(0).variable();\n this.accBeta1 = scalar(beta1).variable();\n });\n\n this.oneMinusBeta1 = keep(scalar(1 - beta1));\n this.one = keep(scalar(1));\n\n if (epsilon === null) {\n epsilon = ENV.get('EPSILON');\n }\n\n this.epsScalar = keep(scalar(epsilon));\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n tidy(() => {\n const oneMinusAccBeta1 = this.one.sub(this.accBeta1);\n const lr = this.c.div(this.one.add(this.decayScalar.mul(this.iteration)));\n\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulatedFirstMoment[variableName] == null) {\n const trainable = false;\n this.accumulatedFirstMoment[variableName] =\n zerosLike(value).variable(trainable);\n }\n if (this.accumulatedWeightedInfNorm[variableName] == null) {\n const trainable = false;\n this.accumulatedWeightedInfNorm[variableName] =\n zerosLike(value).variable(trainable);\n }\n\n const gradient = variableGradients[variableName];\n const firstMoment = this.accumulatedFirstMoment[variableName];\n const weightedInfNorm = this.accumulatedWeightedInfNorm[variableName];\n\n const newFirstMoment = this.beta1Scalar.mul(firstMoment)\n .add(this.oneMinusBeta1.mul(gradient));\n\n const ut0 = this.beta2Scalar.mul(weightedInfNorm);\n const ut1 = gradient.abs();\n\n const newWeightedInfNorm = ut0.maximum(ut1);\n\n this.accumulatedFirstMoment[variableName].assign(newFirstMoment);\n this.accumulatedWeightedInfNorm[variableName].assign(\n newWeightedInfNorm);\n\n const newValue =\n lr.div(oneMinusAccBeta1)\n .mul(newFirstMoment.div(this.epsScalar.add(newWeightedInfNorm)))\n .add(value);\n\n value.assign(newValue);\n }\n\n this.iteration.assign(this.iteration.add(this.one));\n this.accBeta1.assign(this.accBeta1.mul(this.beta1Scalar));\n });\n }\n\n dispose() {\n this.c.dispose();\n this.epsScalar.dispose();\n this.accBeta1.dispose();\n this.beta1Scalar.dispose();\n this.beta2Scalar.dispose();\n this.oneMinusBeta1.dispose();\n\n this.decayScalar.dispose();\n this.iteration.dispose();\n\n this.one.dispose();\n\n if (this.accumulatedFirstMoment != null) {\n Object.keys(this.accumulatedFirstMoment)\n .forEach(name => this.accumulatedFirstMoment[name].dispose());\n }\n\n if (this.accumulatedWeightedInfNorm != null) {\n Object.keys(this.accumulatedWeightedInfNorm)\n .forEach(name => this.accumulatedWeightedInfNorm[name].dispose());\n }\n }\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n beta1: this.beta1,\n beta2: this.beta2,\n epsilon: this.epsilon,\n decay: this.decay\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(\n config.learningRate, config.beta1, config.beta2, config.epsilon,\n config.decay);\n }\n}\nregisterClass(AdamaxOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {scalar} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar} from '../tensor';\nimport {NamedTensorMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\n/** @doclink Optimizer */\nexport class SGDOptimizer extends Optimizer {\n static className = 'SGDOptimizer';\n protected c: Scalar;\n\n constructor(protected learningRate: number) {\n super();\n this.setLearningRate(learningRate);\n }\n\n applyGradients(variableGradients: NamedTensorMap) {\n const varNames = Object.keys(variableGradients);\n varNames.forEach(varName => {\n const gradient = variableGradients[varName];\n const value = ENV.engine.registeredVariables[varName];\n\n tidy(() => {\n const newValue = this.c.mul(gradient).add(value);\n value.assign(newValue);\n });\n });\n }\n\n /**\n * Sets the learning rate of the optimizer.\n */\n setLearningRate(learningRate: number) {\n this.learningRate = learningRate;\n if (this.c != null) {\n this.c.dispose();\n }\n this.c = keep(scalar(-learningRate));\n }\n\n dispose() {\n this.c.dispose();\n }\n\n getConfig(): ConfigDict {\n return {learningRate: this.learningRate};\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(config.learningRate);\n }\n}\nregisterClass(SGDOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {tidy} from '../globals';\nimport {scalar, zerosLike} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar, Tensor} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {SGDOptimizer} from './sgd_optimizer';\n\n/** @doclink Optimizer */\nexport class MomentumOptimizer extends SGDOptimizer {\n static className = 'MomentumOptimizer';\n private m: Scalar;\n private accumulations: NamedVariableMap;\n\n constructor(\n protected learningRate: number, private momentum: number,\n private useNesterov = false) {\n super(learningRate);\n this.m = scalar(this.momentum);\n this.accumulations = {};\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulations[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulations[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n\n const accumulation = this.accumulations[variableName];\n const gradient = variableGradients[variableName];\n\n tidy(() => {\n let newValue: Tensor;\n const newAccumulation = this.m.mul(accumulation).add(gradient);\n if (this.useNesterov) {\n newValue =\n this.c.mul(gradient.add(newAccumulation.mul(this.m))).add(value);\n } else {\n newValue = this.c.mul(newAccumulation).add(value);\n }\n this.accumulations[variableName].assign(newAccumulation);\n value.assign(newValue);\n });\n }\n }\n\n dispose() {\n super.dispose();\n this.m.dispose();\n if (this.accumulations != null) {\n for (const variableName in this.accumulations) {\n this.accumulations[variableName].dispose();\n }\n }\n }\n\n /**\n * Sets the momentum of the optimizer.\n *\n * @param momentum\n */\n setMomentum(momentum: number) {\n this.momentum = momentum;\n }\n\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n momentum: this.momentum,\n useNesterov: this.useNesterov\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(config.learningRate, config.momentum, config.useNesterov);\n }\n}\nregisterClass(MomentumOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {ENV} from '../environment';\nimport {keep, tidy} from '../globals';\nimport {scalar, zerosLike} from '../ops/ops';\nimport {ConfigDict, registerClass, Serializable, SerializableConstructor} from '../serialization';\nimport {Scalar} from '../tensor';\nimport {NamedVariableMap} from '../tensor_types';\nimport {Optimizer} from './optimizer';\n\n/** @doclink Optimizer */\nexport class RMSPropOptimizer extends Optimizer {\n static className = 'RMSPropOptimizer';\n private c: Scalar;\n private epsilonScalar: Scalar;\n private decayScalar: Scalar;\n private momentumScalar: Scalar;\n private oneMinusDecay: Scalar;\n private centered: boolean;\n\n private accumulatedMeanSquares: NamedVariableMap = {};\n private accumulatedMeanGrads: NamedVariableMap = {};\n private accumulatedMoments: NamedVariableMap = {};\n\n constructor(\n protected learningRate: number, protected decay = 0.9,\n protected momentum = 0.0, protected epsilon: number = null,\n centered = false) {\n super();\n\n this.c = keep(scalar(learningRate));\n this.decayScalar = keep(scalar(decay));\n this.momentumScalar = keep(scalar(momentum));\n this.oneMinusDecay = keep(scalar(1 - decay));\n this.centered = centered;\n\n if (epsilon === null) {\n epsilon = ENV.get('EPSILON');\n }\n\n this.epsilonScalar = keep(scalar(epsilon));\n }\n\n applyGradients(variableGradients: NamedVariableMap) {\n for (const variableName in variableGradients) {\n const value = ENV.engine.registeredVariables[variableName];\n if (this.accumulatedMeanSquares[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulatedMeanSquares[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n if (this.accumulatedMeanGrads[variableName] == null && this.centered) {\n const trainable = false;\n tidy(() => {\n this.accumulatedMeanGrads[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n if (this.accumulatedMoments[variableName] == null) {\n const trainable = false;\n tidy(() => {\n this.accumulatedMoments[variableName] =\n zerosLike(value).variable(trainable);\n });\n }\n\n const accumulatedMeanSquare = this.accumulatedMeanSquares[variableName];\n const accumulatedMeanGrad = this.accumulatedMeanGrads[variableName];\n const accumulatedMoments = this.accumulatedMoments[variableName];\n const gradient = variableGradients[variableName];\n\n tidy(() => {\n const newAccumulatedMeanSquare =\n this.decayScalar.mul(accumulatedMeanSquare)\n .add(this.oneMinusDecay.mul(gradient.square()));\n\n if (this.centered) {\n // Centered gradient\n const newAccumulatedMeanGrad =\n this.decayScalar.mul(accumulatedMeanGrad)\n .add(this.oneMinusDecay.mul(gradient));\n\n const newAccumulatedMoments =\n this.momentumScalar.mul(accumulatedMoments)\n .add(this.c.mul(gradient).div(\n newAccumulatedMeanSquare\n .sub(newAccumulatedMeanGrad.square().add(\n this.epsilonScalar))\n .sqrt()));\n\n this.accumulatedMeanSquares[variableName].assign(\n newAccumulatedMeanSquare);\n this.accumulatedMeanGrads[variableName].assign(\n newAccumulatedMeanGrad);\n this.accumulatedMoments[variableName].assign(newAccumulatedMoments);\n\n const newValue = value.sub(newAccumulatedMoments);\n value.assign(newValue);\n } else {\n // Plain gradient\n const newAccumulatedMeanSquare =\n this.decayScalar.mul(accumulatedMeanSquare)\n .add(this.oneMinusDecay.mul(gradient.square()));\n\n const newAccumulatedMoments =\n this.momentumScalar.mul(accumulatedMoments)\n .add(this.c.mul(gradient).div(\n newAccumulatedMeanSquare.add(this.epsilonScalar).sqrt()));\n\n this.accumulatedMeanSquares[variableName].assign(\n newAccumulatedMeanSquare);\n this.accumulatedMoments[variableName].assign(newAccumulatedMoments);\n\n const newValue = value.sub(newAccumulatedMoments);\n value.assign(newValue);\n }\n });\n }\n }\n\n dispose() {\n this.c.dispose();\n this.epsilonScalar.dispose();\n this.decayScalar.dispose();\n this.momentumScalar.dispose();\n this.oneMinusDecay.dispose();\n if (this.accumulatedMeanSquares != null) {\n Object.keys(this.accumulatedMeanSquares)\n .forEach(name => this.accumulatedMeanSquares[name].dispose());\n }\n if (this.accumulatedMeanGrads != null && this.centered) {\n Object.keys(this.accumulatedMeanGrads)\n .forEach(name => this.accumulatedMeanGrads[name].dispose());\n }\n if (this.accumulatedMoments != null) {\n Object.keys(this.accumulatedMoments)\n .forEach(name => this.accumulatedMoments[name].dispose());\n }\n }\n\n getConfig(): ConfigDict {\n return {\n learningRate: this.learningRate,\n decay: this.decay,\n momentum: this.momentum,\n epsilon: this.epsilon,\n centered: this.centered\n };\n }\n static fromConfig<T extends Serializable>(\n cls: SerializableConstructor<T>, config: ConfigDict): T {\n return new cls(\n config.learningRate, config.decay, config.momentum, config.epsilon,\n config.centered);\n }\n}\nregisterClass(RMSPropOptimizer);\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {AdadeltaOptimizer} from './adadelta_optimizer';\nimport {AdagradOptimizer} from './adagrad_optimizer';\nimport {AdamOptimizer} from './adam_optimizer';\nimport {AdamaxOptimizer} from './adamax_optimizer';\nimport {MomentumOptimizer} from './momentum_optimizer';\nimport {RMSPropOptimizer} from './rmsprop_optimizer';\nimport {SGDOptimizer} from './sgd_optimizer';\n\nexport class OptimizerConstructors {\n /**\n * Constructs a `tf.SGDOptimizer` that uses stochastic gradient descent.\n *\n * ```js\n * // Fit a quadratic function by learning the coefficients a, b, c.\n * const xs = tf.tensor1d([0, 1, 2, 3]);\n * const ys = tf.tensor1d([1.1, 5.9, 16.8, 33.9]);\n *\n * const a = tf.scalar(Math.random()).variable();\n * const b = tf.scalar(Math.random()).variable();\n * const c = tf.scalar(Math.random()).variable();\n *\n * // y = a * x^2 + b * x + c.\n * const f = x => a.mul(x.square()).add(b.mul(x)).add(c);\n * const loss = (pred, label) => pred.sub(label).square().mean();\n *\n * const learningRate = 0.01;\n * const optimizer = tf.train.sgd(learningRate);\n *\n * // Train the model.\n * for (let i = 0; i < 10; i++) {\n * optimizer.minimize(() => loss(f(xs), ys));\n * }\n *\n * // Make predictions.\n * console.log(\n * `a: ${a.dataSync()}, b: ${b.dataSync()}, c: ${c.dataSync()}`);\n * const preds = f(xs).dataSync();\n * preds.forEach((pred, i) => {\n * console.log(`x: ${i}, pred: ${pred}`);\n * });\n * ```\n *\n * @param learningRate The learning rate to use for the SGD algorithm.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static sgd(learningRate: number): SGDOptimizer {\n return new SGDOptimizer(learningRate);\n }\n\n /**\n * Constructs a `tf.MomentumOptimizer` that uses momentum gradient\n * descent.\n *\n * See\n * [http://proceedings.mlr.press/v28/sutskever13.pdf](\n * http://proceedings.mlr.press/v28/sutskever13.pdf)\n *\n * @param learningRate The learning rate to use for the Momentum gradient\n * descent algorithm.\n * @param momentum The momentum to use for the momentum gradient descent\n * algorithm.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static momentum(learningRate: number, momentum: number, useNesterov = false):\n MomentumOptimizer {\n return new MomentumOptimizer(learningRate, momentum, useNesterov);\n }\n\n /**\n * Constructs a `tf.RMSPropOptimizer` that uses RMSProp gradient\n * descent. This implementation uses plain momentum and is not centered\n * version of RMSProp.\n *\n * See\n * [http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf](\n * http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf)\n *\n * @param learningRate The learning rate to use for the RMSProp gradient\n * descent algorithm.\n * @param decay The discounting factor for the history/coming gradient.\n * @param momentum The momentum to use for the RMSProp gradient descent\n * algorithm.\n * @param epsilon Small value to avoid zero denominator.\n * @param centered If true, gradients are normalized by the estimated\n * variance of the gradient.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static rmsprop(\n learningRate: number, decay = .9, momentum = 0.0, epsilon: number = null,\n centered = false): RMSPropOptimizer {\n return new RMSPropOptimizer(\n learningRate, decay, momentum, epsilon, centered);\n }\n\n /**\n * Constructs a `tf.AdamOptimizer` that uses the Adam algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adam gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adam(\n learningRate = 0.001, beta1 = 0.9, beta2 = 0.999,\n epsilon: number = null): AdamOptimizer {\n return new AdamOptimizer(learningRate, beta1, beta2, epsilon);\n }\n\n /**\n * Constructs a `tf.AdadeltaOptimizer` that uses the Adadelta algorithm.\n * See [https://arxiv.org/abs/1212.5701](https://arxiv.org/abs/1212.5701)\n *\n * @param learningRate The learning rate to use for the Adadelta gradient\n * descent algorithm.\n * @param rho The learning rate decay over each update.\n * @param epsilon A constant epsilon used to better condition the grad\n * update.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adadelta(learningRate = .001, rho = .95, epsilon: number = null):\n AdadeltaOptimizer {\n return new AdadeltaOptimizer(learningRate, rho, epsilon);\n }\n\n /**\n * Constructs a `tf.AdamaxOptimizer` that uses the Adamax algorithm.\n * See [https://arxiv.org/abs/1412.6980](https://arxiv.org/abs/1412.6980)\n *\n * @param learningRate The learning rate to use for the Adamax gradient\n * descent algorithm.\n * @param beta1 The exponential decay rate for the 1st moment estimates.\n * @param beta2 The exponential decay rate for the 2nd moment estimates.\n * @param epsilon A small constant for numerical stability.\n * @param decay The learning rate decay over each update.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adamax(\n learningRate = 0.002, beta1 = 0.9, beta2 = 0.999, epsilon: number = null,\n decay = 0.0): AdamaxOptimizer {\n return new AdamaxOptimizer(learningRate, beta1, beta2, epsilon, decay);\n }\n\n /**\n * Constructs a `tf.AdagradOptimizer` that uses the Adagrad algorithm.\n * See\n * [http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf](\n * http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)\n * or\n * [http://ruder.io/optimizing-gradient-descent/index.html#adagrad](\n * http://ruder.io/optimizing-gradient-descent/index.html#adagrad)\n *\n * @param learningRate The learning rate to use for the Adagrad gradient\n * descent algorithm.\n * @param initialAccumulatorValue Starting value for the accumulators, must be\n * positive.\n */\n /**\n * @doc {heading: 'Training', subheading: 'Optimizers', namespace: 'train'}\n */\n static adagrad(learningRate: number, initialAccumulatorValue = 0.1):\n AdagradOptimizer {\n return new AdagradOptimizer(learningRate, initialAccumulatorValue);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// So typings can propagate.\nimport {AdadeltaOptimizer} from './optimizers/adadelta_optimizer';\nimport {AdagradOptimizer} from './optimizers/adagrad_optimizer';\nimport {AdamOptimizer} from './optimizers/adam_optimizer';\nimport {AdamaxOptimizer} from './optimizers/adamax_optimizer';\nimport {MomentumOptimizer} from './optimizers/momentum_optimizer';\nimport {OptimizerConstructors} from './optimizers/optimizer_constructors';\nimport {RMSPropOptimizer} from './optimizers/rmsprop_optimizer';\nimport {SGDOptimizer} from './optimizers/sgd_optimizer';\n\n// tslint:disable-next-line:no-unused-expression\n[MomentumOptimizer, SGDOptimizer, AdadeltaOptimizer, AdagradOptimizer,\n RMSPropOptimizer, AdamaxOptimizer, AdamOptimizer];\n\nexport const train = {\n sgd: OptimizerConstructors.sgd,\n momentum: OptimizerConstructors.momentum,\n adadelta: OptimizerConstructors.adadelta,\n adagrad: OptimizerConstructors.adagrad,\n rmsprop: OptimizerConstructors.rmsprop,\n adamax: OptimizerConstructors.adamax,\n adam: OptimizerConstructors.adam\n};\n","/**\n * @license\n * Copyright 2017 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// backend_cpu.ts and backend_webgl.ts are standalone files and should be\n// explicitly included here. Below, there is an export from backend_webgl, but\n// that doesn't count since it's exporting a Typescript interface.\nimport './kernels/backend_webgl';\nimport './kernels/backend_cpu';\n\nimport {nextFrame} from './browser_util';\nimport * as environment from './environment';\nimport {Environment} from './environment';\n\n// Serialization.\nimport * as io from './io/io';\nimport * as math from './math';\nimport * as serialization from './serialization';\nimport {setOpHandler} from './tensor';\nimport * as test_util from './test_util';\nimport * as util from './util';\nimport {version} from './version';\nimport * as webgl from './webgl';\n\nexport {InferenceModel, ModelPredictConfig} from './model_types';\n\n// Optimizers.\nexport {AdadeltaOptimizer} from './optimizers/adadelta_optimizer';\nexport {AdagradOptimizer} from './optimizers/adagrad_optimizer';\nexport {AdamOptimizer} from './optimizers/adam_optimizer';\nexport {AdamaxOptimizer} from './optimizers/adamax_optimizer';\nexport {MomentumOptimizer} from './optimizers/momentum_optimizer';\nexport {Optimizer} from './optimizers/optimizer';\nexport {RMSPropOptimizer} from './optimizers/rmsprop_optimizer';\nexport {SGDOptimizer} from './optimizers/sgd_optimizer';\nexport {Scalar, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, TensorBuffer, variable, Variable} from './tensor';\nexport {NamedTensorMap} from './tensor_types';\nexport {DataType, Rank, ShapeMap} from './types';\n\nexport * from './ops/ops';\nexport {LSTMCellFunc} from './ops/lstm';\nexport {Reduction} from './ops/loss_ops';\n\nexport * from './train';\nexport * from './globals';\n\nexport {Features} from './environment_util';\nexport {TimingInfo} from './engine';\nexport {ENV, Environment} from './environment';\n\nexport const setBackend = Environment.setBackend;\nexport const getBackend = Environment.getBackend;\nexport const disposeVariables = Environment.disposeVariables;\nexport const memory = Environment.memory;\nexport {version as version_core};\n\nexport {nextFrame};\n\n// Second level exports.\nexport {environment, io, math, serialization, test_util, util, webgl};\n\n// Backend specific.\nexport {KernelBackend, BackendTimingInfo, DataMover, DataStorage} from './kernels/backend';\n\nimport * as ops from './ops/ops';\nsetOpHandler(ops);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {ENV} from '@tensorflow/tfjs-core';\n\nimport {DataFormat} from '../common';\n\nlet _epsilon = ENV.get('EPSILON');\n\n/**\n * Returns the value of the fuzz factor used in numeric expressions.\n */\nexport function epsilon() {\n return _epsilon;\n}\n\n/**\n * Sets the value of the fuzz factor used in numeric expressions.\n * @param e New value of epsilon.\n */\nexport function setEpsilon(e: number) {\n _epsilon = e;\n}\n\n/**\n * Returns the default image data format convention.\n */\nexport function imageDataFormat(): DataFormat {\n return 'channelsLast';\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Utilities related to persistent state in the backend.\n */\n\nimport {DataType, keep, Scalar, scalar} from '@tensorflow/tfjs-core';\n\n/**\n * An ID to track `tf.SymbolicTensor`s and derived classes.\n * Required in different places in engine/topology.ts to identify unique\n * tensors.\n */\nlet _nextUniqueTensorId = 0;\n\nexport function getNextUniqueTensorId(): number {\n return _nextUniqueTensorId++;\n}\n\nconst _uidPrefixes: {[prefix: string]: number} = {};\n\n/**\n * Provides a unique UID given a string prefix.\n *\n * @param prefix\n */\nexport function getUid(prefix = ''): string {\n if (!(prefix in _uidPrefixes)) {\n _uidPrefixes[prefix] = 0;\n }\n _uidPrefixes[prefix] += 1;\n return prefix + _uidPrefixes[prefix].toString();\n}\n\nconst scalarCache: {[typeKey: string]: {[key: number]: Scalar}} = {\n float32: {},\n int32: {}\n};\n\nconst DEFAULT_DTYPE: DataType = 'float32';\n\n/**\n * Get scalar, with caching.\n */\nexport function getScalar(value: number, dtype?: DataType): Scalar {\n if (dtype === undefined) {\n dtype = DEFAULT_DTYPE;\n }\n if (scalarCache[dtype][value] == null) {\n scalarCache[dtype][value] = scalar(value, dtype);\n keep(scalarCache[dtype][value]);\n }\n return scalarCache[dtype][value];\n}\n\nexport function disposeScalarCache() {\n for (const typeKey in scalarCache) {\n for (const key in scalarCache[typeKey]) {\n scalarCache[typeKey][key].dispose();\n delete scalarCache[typeKey][key];\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Explicit error types.\n *\n * See the following link for more information about why the code includes\n * calls to setPrototypeOf:\n *\n * https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n */\n// tslint:enable\n\n/**\n * Equivalent of Python's AttributeError.\n */\nexport class AttributeError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AttributeError.prototype);\n }\n}\n\n/**\n * Equivalent of Python's RuntimeError.\n */\nexport class RuntimeError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, RuntimeError.prototype);\n }\n}\n\n/**\n * Equivalent of Python's ValueError.\n */\nexport class ValueError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, ValueError.prototype);\n }\n}\n\n/**\n * Equivalent of Python's NotImplementedError.\n */\nexport class NotImplementedError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, NotImplementedError.prototype);\n }\n}\n\n/**\n * Equivalent of Python's AssertionError.\n */\nexport class AssertionError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, AssertionError.prototype);\n }\n}\n\n/**\n * Equivalent of Python's IndexError.\n */\nexport class IndexError extends Error {\n constructor(message?: string) {\n super(message);\n // Set the prototype explicitly.\n Object.setPrototypeOf(this, IndexError.prototype);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: utils/generic_utils.py */\n\nimport {DataType, serialization} from '@tensorflow/tfjs-core';\n\nimport {AssertionError, ValueError} from '../errors';\n// tslint:enable\n\n/**\n * If `value` is an Array, equivalent to Python's `value * numValues`.\n * If `value` is not an Array, equivalent to Python's `[value] * numValues`\n */\n// tslint:disable-next-line:no-any\nexport function pyListRepeat(value: any, numValues: number): any[] {\n if (Array.isArray(value)) {\n // tslint:disable-next-line:no-any\n let newArray: any[] = [];\n for (let i = 0; i < numValues; i++) {\n newArray = newArray.concat(value);\n }\n return newArray;\n } else {\n const newArray = new Array(numValues);\n newArray.fill(value);\n return newArray;\n }\n}\n\nexport function assert(val: boolean, message?: string): void {\n if (!val) {\n throw new AssertionError(message);\n }\n}\n\n/**\n * Count the number of elements of the `array` that are equal to `reference`.\n */\nexport function count<T>(array: T[], refernce: T) {\n let counter = 0;\n for (const item of array) {\n if (item === refernce) {\n counter++;\n }\n }\n return counter;\n}\n\n/**\n * If an array is of length 1, just return the first element. Otherwise, return\n * the full array.\n * @param tensors\n */\nexport function singletonOrArray<T>(xs: T[]): T|T[] {\n if (xs.length === 1) {\n return xs[0];\n }\n return xs;\n}\n\n/**\n * Normalizes a list/tensor into a list.\n *\n * If a tensor is passed, we return\n * a list of size 1 containing the tensor.\n *\n * @param x target object to be normalized.\n */\n// tslint:disable-next-line:no-any\nexport function toList(x: any): any[] {\n if (Array.isArray(x)) {\n return x;\n }\n return [x];\n}\n\n/**\n * Generate a UID for a list\n */\n// tslint:disable-next-line:no-any\nexport function objectListUid(objs: any|any[]): string {\n const objectList = toList(objs);\n let retVal = '';\n for (const obj of objectList) {\n if (obj.id == null) {\n throw new ValueError(\n `Object ${obj} passed to objectListUid without an id`);\n }\n if (retVal !== '') {\n retVal = retVal + ', ';\n }\n retVal = retVal + Math.abs(obj.id);\n }\n return retVal;\n}\n/**\n * Converts string to snake-case.\n * @param name\n */\nexport function toSnakeCase(name: string): string {\n const intermediate = name.replace(/(.)([A-Z][a-z0-9]+)/g, '$1_$2');\n const insecure =\n intermediate.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();\n /*\n If the class is private the name starts with \"_\" which is not secure\n for creating scopes. We prefix the name with \"private\" in this case.\n */\n if (insecure[0] !== '_') {\n return insecure;\n }\n return 'private' + insecure;\n}\n\nexport function toCamelCase(identifier: string): string {\n // quick return for empty string or single character strings\n if (identifier.length <= 1) {\n return identifier;\n }\n // Check for the underscore indicating snake_case\n if (identifier.indexOf('_') === -1) {\n return identifier;\n }\n return identifier.replace(/[_]+(\\w|$)/g, (m, p1) => p1.toUpperCase());\n}\n\n// tslint:disable-next-line:no-any\nlet _GLOBAL_CUSTOM_OBJECTS = {} as {[objName: string]: any};\n\nexport function serializeKerasObject(instance: serialization.Serializable):\n serialization.ConfigDictValue {\n if (instance === null || instance === undefined) {\n return null;\n }\n return {className: instance.getClassName(), config: instance.getConfig()};\n}\n\n/**\n * Deserialize a saved Keras Object\n * @param identifier either a string ID or a saved Keras dictionary\n * @param moduleObjects a list of Python class names to object constructors\n * @param customObjects a list of Python class names to object constructors\n * @param printableModuleName debug text for the object being reconstituted\n * @returns a TensorFlow.js Layers object\n */\n// tslint:disable:no-any\nexport function deserializeKerasObject(\n identifier: string|serialization.ConfigDict,\n moduleObjects = {} as {[objName: string]: any},\n customObjects = {} as {[objName: string]: any},\n printableModuleName = 'object'): any {\n // tslint:enable\n if (typeof identifier === 'string') {\n const functionName = identifier;\n let fn;\n if (functionName in customObjects) {\n fn = customObjects[functionName];\n } else if (functionName in _GLOBAL_CUSTOM_OBJECTS) {\n fn = _GLOBAL_CUSTOM_OBJECTS[functionName];\n } else {\n fn = moduleObjects[functionName];\n if (fn == null) {\n throw new ValueError(\n `Unknown ${printableModuleName}: ${identifier}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n }\n return fn;\n } else {\n // In this case we are dealing with a Keras config dictionary.\n const config = identifier;\n if (config.className == null || config.config == null) {\n throw new ValueError(\n `${printableModuleName}: Improper config format: ` +\n `${JSON.stringify(config)}.\\n` +\n `'className' and 'config' must set.`);\n }\n const className = config.className as string;\n let cls, fromConfig;\n if (className in customObjects) {\n [cls, fromConfig] = customObjects.get(className);\n } else if (className in _GLOBAL_CUSTOM_OBJECTS) {\n [cls, fromConfig] = _GLOBAL_CUSTOM_OBJECTS.className;\n } else if (className in moduleObjects) {\n [cls, fromConfig] = moduleObjects[className];\n }\n if (cls == null) {\n throw new ValueError(\n `Unknown ${printableModuleName}: ${className}. ` +\n `This may be due to one of the following reasons:\\n` +\n `1. The ${printableModuleName} is defined in Python, in which ` +\n `case it needs to be ported to TensorFlow.js or your JavaScript ` +\n `code.\\n` +\n `2. The custom ${printableModuleName} is defined in JavaScript, ` +\n `but is not registered properly with ` +\n `tf.serialization.registerClass().`);\n // TODO(cais): Add link to tutorial page on custom layers.\n }\n if (fromConfig != null) {\n // Porting notes: Instead of checking to see whether fromConfig accepts\n // customObjects, we create a customObjects dictionary and tack it on to\n // config.config as config.config.customObjects. Objects can use it, if\n // they want.\n\n // tslint:disable-next-line:no-any\n const customObjectsCombined = {} as {[objName: string]: any};\n for (const key of Object.keys(_GLOBAL_CUSTOM_OBJECTS)) {\n customObjectsCombined[key] = _GLOBAL_CUSTOM_OBJECTS[key];\n }\n for (const key of Object.keys(customObjects)) {\n customObjectsCombined[key] = customObjects[key];\n }\n // Add the customObjects to config\n const nestedConfig = config.config as serialization.ConfigDict;\n nestedConfig.customObjects = customObjectsCombined;\n\n const backupCustomObjects = {..._GLOBAL_CUSTOM_OBJECTS};\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n const returnObj = fromConfig(cls, config.config);\n _GLOBAL_CUSTOM_OBJECTS = {...backupCustomObjects};\n\n return returnObj;\n } else {\n // Then `cls` may be a function returning a class.\n // In this case by convention `config` holds\n // the kwargs of the function.\n const backupCustomObjects = {..._GLOBAL_CUSTOM_OBJECTS};\n for (const key of Object.keys(customObjects)) {\n _GLOBAL_CUSTOM_OBJECTS[key] = customObjects[key];\n }\n // In python this is **config['config'], for tfjs-layers we require\n // classes that use this fall-through construction method to take\n // a config interface that mimics the expansion of named parameters.\n const returnObj = new cls(config.config);\n _GLOBAL_CUSTOM_OBJECTS = {...backupCustomObjects};\n return returnObj;\n }\n }\n}\n\n/**\n * Compares two numbers for sorting.\n * @param a\n * @param b\n */\nexport function numberCompare(a: number, b: number) {\n return (a < b) ? -1 : ((a > b) ? 1 : 0);\n}\n\n/**\n * Comparison of two numbers for reverse sorting.\n * @param a\n * @param b\n */\nexport function reverseNumberCompare(a: number, b: number) {\n return -1 * numberCompare(a, b);\n}\n\n/**\n * Convert a string into the corresponding DType.\n * @param dtype\n * @returns An instance of DType.\n */\nexport function stringToDType(dtype: string): DataType {\n switch (dtype) {\n case 'float32':\n return 'float32';\n default:\n throw new ValueError(`Invalid dtype: ${dtype}`);\n }\n}\n\n/**\n * Test the element-by-element equality of two Arrays of strings.\n * @param xs First array of strings.\n * @param ys Second array of strings.\n * @returns Wether the two arrays are all equal, element by element.\n */\nexport function stringsEqual(xs: string[], ys: string[]): boolean {\n if (xs == null || ys == null) {\n return xs === ys;\n }\n if (xs.length !== ys.length) {\n return false;\n }\n for (let i = 0; i < xs.length; ++i) {\n if (xs[i] !== ys[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Get the unique elements of an array.\n * @param xs Array.\n * @returns An Array consisting of the unique elements in `xs`.\n */\nexport function unique<T>(xs: T[]): T[] {\n if (xs == null) {\n return xs;\n }\n const out: T[] = [];\n // TODO(cais): Maybe improve performance by sorting.\n for (const x of xs) {\n if (out.indexOf(x) === -1) {\n out.push(x);\n }\n }\n return out;\n}\n\n/**\n * Determine if an Object is empty (i.e., does not have own properties).\n * @param obj Object\n * @returns Whether the Object is empty.\n * @throws ValueError: If object is `null` or `undefined`.\n */\nexport function isObjectEmpty(obj: {}): boolean {\n if (obj == null) {\n throw new ValueError(`Invalid value in obj: ${JSON.stringify(obj)}`);\n }\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Helper function used to build type union/enum run-time checkers.\n * @param values The list of allowed values.\n * @param label A string name for the type\n * @param value The value to test.\n * @throws ValueError: If the value is not in values nor `undefined`/`null`.\n */\nexport function checkStringTypeUnionValue(\n values: string[], label: string, value: string): void {\n if (value == null) {\n return;\n }\n if (values.indexOf(value) < 0) {\n throw new ValueError(`${value} is not a valid ${label}. Valid values are ${\n values} or null/undefined.`);\n }\n}\n\n/**\n * Helper function for verifying the types of inputs.\n *\n * Ensures that the elements of `x` are all of type `expectedType`.\n * Also verifies that the length of `x` is within bounds.\n *\n * @param x Object to test.\n * @param expectedType The string expected type of all of the elements in the\n * Array.\n * @param minLength Return false if x.length is less than this.\n * @param maxLength Return false if x.length is greater than this.\n * @returns true if and only if `x` is an `Array<expectedType>` with\n * length >= `minLength` and <= `maxLength`.\n */\n// tslint:disable:no-any\nexport function checkArrayTypeAndLength(\n x: any, expectedType: string, minLength = 0,\n maxLength = Infinity): boolean {\n assert(minLength >= 0);\n assert(maxLength >= minLength);\n return (\n Array.isArray(x) && x.length >= minLength && x.length <= maxLength &&\n x.every(e => typeof e === expectedType));\n}\n// tslint:enable:no-any\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: keras/contraints.py */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport {epsilon} from './backend/common';\nimport {getScalar} from './backend/state';\nimport {deserializeKerasObject, serializeKerasObject} from './utils/generic_utils';\n\n/**\n * Helper function used by many of the Constraints to find the L2Norms.\n */\nfunction calcL2Norms(w: Tensor, axis: number): Tensor {\n return tidy(() => tfc.sqrt(tfc.sum(tfc.mulStrict(w, w), axis, true)));\n}\n\n/**\n * Base class for functions that impose constraints on weight values\n */\n/**\n * @doc {\n * heading: 'Constraints',\n * subheading: 'Classes',\n * namespace: 'constraints'\n * }\n */\nexport abstract class Constraint extends serialization.Serializable {\n /* Porting note: was __call__, apply chosen to match other similar choices */\n abstract apply(w: Tensor): Tensor;\n getConfig(): serialization.ConfigDict {\n return {};\n }\n}\n\nexport interface MaxNormConfig {\n /**\n * Maximum norm for incoming weights\n */\n maxValue?: number;\n /**\n * Axis along which to calculate norms.\n *\n * For instance, in a `Dense` layer the weight matrix\n * has shape `[inputDim, outputDim]`,\n * set `axis` to `0` to constrain each weight vector\n * of length `[inputDim,]`.\n * In a `Conv2D` layer with `dataFormat=\"channels_last\"`,\n * the weight tensor has shape\n * `[rows, cols, inputDepth, outputDepth]`,\n * set `axis` to `[0, 1, 2]`\n * to constrain the weights of each filter tensor of size\n * `[rows, cols, inputDepth]`.\n */\n axis?: number;\n}\n\n/**\n * MaxNorm weight constraint.\n *\n * Constrains the weights incident to each hidden unit\n * to have a norm less than or equal to a desired value.\n *\n * References\n * - [Dropout: A Simple Way to Prevent Neural Networks from Overfitting\n * Srivastava, Hinton, et al.\n * 2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)\n */\nexport class MaxNorm extends Constraint {\n static readonly className = 'MaxNorm';\n private maxValue: number;\n private axis: number;\n private readonly defaultMaxValue = 2;\n private readonly defaultAxis = 0;\n\n constructor(config: MaxNormConfig) {\n super();\n this.maxValue =\n config.maxValue != null ? config.maxValue : this.defaultMaxValue;\n this.axis = config.axis != null ? config.axis : this.defaultAxis;\n }\n\n apply(w: Tensor): Tensor {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.clipByValue(norms, 0, this.maxValue);\n return tfc.mul(w, tfc.div(desired, tfc.add(getScalar(epsilon()), norms)));\n });\n }\n\n getConfig(): serialization.ConfigDict {\n return {maxValue: this.maxValue, axis: this.axis};\n }\n}\nserialization.registerClass(MaxNorm);\n\nexport interface UnitNormConfig {\n /**\n * Axis along which to calculate norms.\n *\n * For instance, in a `Dense` layer the weight matrix\n * has shape `[inputDim, outputDim]`,\n * set `axis` to `0` to constrain each weight vector\n * of length `[inputDim,]`.\n * In a `Conv2D` layer with `dataFormat=\"channels_last\"`,\n * the weight tensor has shape\n * [rows, cols, inputDepth, outputDepth]`,\n * set `axis` to `[0, 1, 2]`\n * to constrain the weights of each filter tensor of size\n * `[rows, cols, inputDepth]`.\n */\n axis?: number;\n}\n\n/**\n * Constrains the weights incident to each hidden unit to have unit norm.\n */\nexport class UnitNorm extends Constraint {\n static readonly className = 'UnitNorm';\n private axis: number;\n private readonly defaultAxis = 0;\n constructor(config: UnitNormConfig) {\n super();\n this.axis = config.axis != null ? config.axis : this.defaultAxis;\n }\n\n apply(w: Tensor): Tensor {\n return tidy(\n () => tfc.div(\n w, tfc.add(getScalar(epsilon()), calcL2Norms(w, this.axis))));\n }\n\n getConfig(): serialization.ConfigDict {\n return {axis: this.axis};\n }\n}\nserialization.registerClass(UnitNorm);\n\n/**\n * Constains the weight to be non-negative.\n */\nexport class NonNeg extends Constraint {\n static readonly className = 'NonNeg';\n\n apply(w: Tensor): Tensor {\n return tfc.relu(w);\n }\n}\nserialization.registerClass(NonNeg);\n\nexport interface MinMaxNormConfig {\n /**\n * Minimum norm for incoming weights\n */\n minValue?: number;\n /**\n * Maximum norm for incoming weights\n */\n maxValue?: number;\n /**\n * Axis along which to calculate norms.\n * For instance, in a `Dense` layer the weight matrix\n * has shape `[inputDim, outputDim]`,\n * set `axis` to `0` to constrain each weight vector\n * of length `[inputDim,]`.\n * In a `Conv2D` layer with `dataFormat=\"channels_last\"`,\n * the weight tensor has shape\n * `[rows, cols, inputDepth, outputDepth]`,\n * set `axis` to `[0, 1, 2]`\n * to constrain the weights of each filter tensor of size\n * `[rows, cols, inputDepth]`.\n */\n axis?: number;\n /**\n * Rate for enforcing the constraint: weights will be rescaled to yield:\n * `(1 - rate) * norm + rate * norm.clip(minValue, maxValue)`.\n * Effectively, this means that rate=1.0 stands for strict\n * enforcement of the constraint, while rate<1.0 means that\n * weights will be rescaled at each step to slowly move\n * towards a value inside the desired interval.\n */\n rate?: number;\n}\n\nexport class MinMaxNorm extends Constraint {\n static readonly className = 'MinMaxNorm';\n private minValue: number;\n private maxValue: number;\n private rate: number;\n private axis: number;\n private readonly defaultMinValue = 0.0;\n private readonly defaultMaxValue = 1.0;\n private readonly defaultRate = 1.0;\n private readonly defaultAxis = 0;\n\n constructor(config: MinMaxNormConfig) {\n super();\n this.minValue =\n config.minValue != null ? config.minValue : this.defaultMinValue;\n this.maxValue =\n config.maxValue != null ? config.maxValue : this.defaultMaxValue;\n this.rate = config.rate != null ? config.rate : this.defaultRate;\n this.axis = config.axis != null ? config.axis : this.defaultAxis;\n }\n\n apply(w: Tensor): Tensor {\n return tidy(() => {\n const norms = calcL2Norms(w, this.axis);\n const desired = tfc.add(\n tfc.mul(\n getScalar(this.rate),\n tfc.clipByValue(norms, this.minValue, this.maxValue)),\n tfc.mul(getScalar(1.0 - this.rate), norms));\n return tfc.mul(w, tfc.div(desired, tfc.add(getScalar(epsilon()), norms)));\n });\n }\n\n getConfig(): serialization.ConfigDict {\n return {\n minValue: this.minValue,\n maxValue: this.maxValue,\n rate: this.rate,\n axis: this.axis\n };\n }\n}\nserialization.registerClass(MinMaxNorm);\n\n/** @docinline */\nexport type ConstraintIdentifier =\n 'maxNorm'|'minMaxNorm'|'nonNeg'|'unitNorm'|string;\n\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP:\n {[identifier in ConstraintIdentifier]: string} = {\n 'maxNorm': 'MaxNorm',\n 'minMaxNorm': 'MinMaxNorm',\n 'nonNeg': 'NonNeg',\n 'unitNorm': 'UnitNorm'\n };\n\nexport function serializeConstraint(constraint: Constraint):\n serialization.ConfigDictValue {\n return serializeKerasObject(constraint);\n}\n\nexport function deserializeConstraint(\n config: serialization.ConfigDict,\n customObjects: serialization.ConfigDict = {}): Constraint {\n return deserializeKerasObject(\n config, serialization.SerializationMap.getMap().classNameMap,\n customObjects, 'constraint');\n}\n\nexport function getConstraint(identifier: ConstraintIdentifier|\n serialization.ConfigDict|Constraint): Constraint {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = {className, config: {}};\n return deserializeConstraint(config);\n } else if (identifier instanceof Constraint) {\n return identifier;\n } else {\n return deserializeConstraint(identifier);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport {Constraint, MaxNorm, MaxNormConfig, MinMaxNorm, MinMaxNormConfig, NonNeg, UnitNorm, UnitNormConfig} from './constraints';\n\n/**\n * @doc {\n * heading: 'Constraints',\n * namespace: 'constraints',\n * useDocsFrom: 'MaxNorm',\n * configParamIndices: [0]\n * }\n */\nexport function maxNorm(config: MaxNormConfig): Constraint {\n return new MaxNorm(config);\n}\n\n/**\n * @doc {\n * heading: 'Constraints',\n * namespace: 'constraints',\n * useDocsFrom: 'UnitNorm',\n * configParamIndices: [0]\n * }\n */\nexport function unitNorm(config: UnitNormConfig): Constraint {\n return new UnitNorm(config);\n}\n\n/**\n * @doc {\n * heading: 'Constraints',\n * namespace: 'constraints',\n * useDocsFrom: 'NonNeg'\n * }\n */\nexport function nonNeg(): Constraint {\n return new NonNeg();\n}\n\n/**\n * @doc {\n * heading: 'Constraints',\n * namespace: 'constraints',\n * useDocsFrom: 'MinMaxNormConfig',\n * configParamIndices: [0]\n * }\n */\nexport function minMaxNorm(config: MinMaxNormConfig): Constraint {\n return new MinMaxNorm(config);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Common functions for TensorFlow.js Layers.\n */\nimport {checkStringTypeUnionValue} from './utils/generic_utils';\n// A map from the requested scoped name of a Tensor to the number of Tensors\n// wanting that name so far. This allows enforcing name uniqueness by appending\n// an incrementing index, e.g. scope/name, scope/name_1, scope/name_2, etc.\nconst nameMap: Map<string, number> = new Map<string, number>();\n\n// TODO(cais): Perhaps move the enums to a more suitable place, e.g.,\n// constants.ts.\n/** @docinline */\nexport type DataFormat = 'channelsFirst'|'channelsLast';\nexport const VALID_DATA_FORMAT_VALUES = ['channelsFirst', 'channelsLast'];\nexport function checkDataFormat(value?: string): void {\n checkStringTypeUnionValue(VALID_DATA_FORMAT_VALUES, 'DataFormat', value);\n}\n\n/** @docinline */\nexport type PaddingMode = 'valid'|'same'|'causal';\nexport const VALID_PADDING_MODE_VALUES = ['valid', 'same', 'causal'];\nexport function checkPaddingMode(value?: string): void {\n checkStringTypeUnionValue(VALID_PADDING_MODE_VALUES, 'PaddingMode', value);\n}\n\n/** @docinline */\nexport type PoolMode = 'max'|'avg';\nexport const VALID_POOL_MODE_VALUES = ['max', 'avg'];\nexport function checkPoolMode(value?: string): void {\n checkStringTypeUnionValue(VALID_POOL_MODE_VALUES, 'PoolMode', value);\n}\n\nconst _nameScopeStack: string[] = [];\nconst _nameScopeDivider = '/';\n\n/**\n * Enter namescope, which can be nested.\n */\nexport function nameScope<T>(name: string, fn: () => T): T {\n _nameScopeStack.push(name);\n try {\n const val: T = fn();\n _nameScopeStack.pop();\n return val;\n } catch (e) {\n _nameScopeStack.pop();\n throw e;\n }\n}\n\n/**\n * Get the current namescope as a flat, concatenated string.\n */\nfunction currentNameScopePrefix(): string {\n if (_nameScopeStack.length === 0) {\n return '';\n } else {\n return _nameScopeStack.join(_nameScopeDivider) + _nameScopeDivider;\n }\n}\n\n/**\n * Get the name a Tensor (or Variable) would have if not uniqueified.\n * @param tensorName\n * @return Scoped name string.\n */\nexport function getScopedTensorName(tensorName: string): string {\n if (!isValidTensorName(tensorName)) {\n throw new Error('Not a valid tensor name: \\'' + tensorName + '\\'');\n }\n return currentNameScopePrefix() + tensorName;\n}\n\n/**\n * Get unique names for Tensors and Variables.\n * @param scopedName The fully-qualified name of the Tensor, i.e. as produced by\n * `getScopedTensorName()`.\n * @return A unique version of the given fully scoped name.\n * If this is the first time that the scoped name is seen in this session,\n * then the given `scopedName` is returned unaltered. If the same name is\n * seen again (producing a collision), an incrementing suffix is added to the\n * end of the name, so it takes the form 'scope/name_1', 'scope/name_2', etc.\n */\nexport function getUniqueTensorName(scopedName: string): string {\n if (!isValidTensorName(scopedName)) {\n throw new Error('Not a valid tensor name: \\'' + scopedName + '\\'');\n }\n if (!nameMap.has(scopedName)) {\n nameMap.set(scopedName, 0);\n }\n const index = nameMap.get(scopedName);\n nameMap.set(scopedName, nameMap.get(scopedName) + 1);\n\n if (index > 0) {\n const result = scopedName + '_' + index;\n // Mark the composed name as used in case someone wants\n // to call getUniqueTensorName(\"name_1\").\n nameMap.set(result, 1);\n return result;\n } else {\n return scopedName;\n }\n}\n\nconst tensorNameRegex = new RegExp(/^[A-Za-z][-A-Za-z0-9\\._\\/]*$/);\n\n/**\n * Determine whether a string is a valid tensor name.\n * @param name\n * @returns A Boolean indicating whether `name` is a valid tensor name.\n */\nexport function isValidTensorName(name: string): boolean {\n return name.match(tensorNameRegex) ? true : false;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Math utility functions.\n *\n * This file contains some frequently used math function that operates on\n * number[] or Float32Array and return a number. Many of these functions are\n * not-so-thick wrappers around TF.js Core functions. But they offer the\n * convenience of\n * 1) not having to convert the inputs into Tensors,\n * 2) not having to convert the returned Tensors to numbers.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {scalar, Tensor1D, tensor1d} from '@tensorflow/tfjs-core';\nimport {ValueError} from '../errors';\n\nexport type ArrayTypes = Uint8Array | Int32Array | Float32Array;\n\n/**\n * Determine if a number is an integer.\n */\nexport function isInteger(x: number): boolean {\n return x === parseInt(x.toString(), 10);\n}\n\n/**\n * Calculate the product of an array of numbers.\n * @param array The array to calculate the product over.\n * @param begin Beginning index, inclusive.\n * @param end Ending index, exclusive.\n * @return The product.\n */\nexport function arrayProd(\n array: number[] | ArrayTypes, begin?: number, end?: number): number {\n if (begin == null) {\n begin = 0;\n }\n if (end == null) {\n end = array.length;\n }\n\n let prod = 1;\n for (let i = begin; i < end; ++i) {\n prod *= array[i];\n }\n return prod;\n}\n\n/**\n * A helper function transforms the two input types to an instance of Tensor1D,\n * so the return value can be fed directly into various TF.js Core functions.\n * @param array\n */\nfunction toArray1D(array: number[] | Float32Array): Tensor1D {\n array = Array.isArray(array) ? new Float32Array(array) : array;\n return tensor1d(array);\n}\n\n/**\n * Compute minimum value.\n * @param array\n * @return minimum value.\n */\nexport function min(array: number[] | Float32Array): number {\n return tfc.min(toArray1D(array)).dataSync()[0];\n}\n\n/**\n * Compute maximum value.\n * @param array\n * @return maximum value\n */\nexport function max(array: number[] | Float32Array): number {\n return tfc.max(toArray1D(array)).dataSync()[0];\n}\n\n/**\n * Compute sum of array.\n * @param array\n * @return The sum.\n */\nexport function sum(array: number[] | Float32Array): number {\n return tfc.sum(toArray1D(array)).dataSync()[0];\n}\n\n/**\n * Compute mean of array.\n * @param array\n * @return The mean.\n */\nexport function mean(array: number[] | Float32Array): number {\n return sum(array) / array.length;\n}\n\n/**\n * Compute variance of array.\n * @param array\n * @return The variance.\n */\nexport function variance(array: number[] | Float32Array): number {\n const demeaned = tfc.sub(toArray1D(array), scalar(mean(array)));\n const sumSquare = tfc.sum(tfc.mulStrict(demeaned, demeaned)).dataSync()[0];\n return sumSquare / array.length;\n}\n\n/**\n * Compute median of array.\n * @param array\n * @return The median value.\n */\nexport function median(array: number[] | Float32Array): number {\n const arraySorted = array.slice().sort((a, b) => a - b);\n const lowIdx = Math.floor((arraySorted.length - 1) / 2);\n const highIdx = Math.ceil((arraySorted.length - 1) / 2);\n if (lowIdx === highIdx) {\n return arraySorted[lowIdx];\n }\n return (arraySorted[lowIdx] + arraySorted[highIdx]) / 2;\n}\n\n/**\n * Generate an array of integers in [begin, end).\n * @param begin Beginning integer, inclusive.\n * @param end Ending integer, exclusive.\n * @returns Range array.\n * @throws ValueError, iff `end` < `begin`.\n */\nexport function range(begin: number, end: number): number[] {\n if (end < begin) {\n throw new ValueError(`end (${end}) < begin (${begin}) is forbidden.`);\n }\n const out: number[] = [];\n for (let i = begin; i < end; ++i) {\n out.push(i);\n }\n return out;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * deeplearn.js backend.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {onesLike as coreOnesLike, Scalar, scalar, Tensor, Tensor1D, tensor1d, Tensor2D, Tensor3D, Tensor4D, tidy, util, where, zerosLike as coreZerosLike} from '@tensorflow/tfjs-core';\n\nimport {disposeScalarCache, getScalar} from '../backend/state';\nimport {checkDataFormat, DataFormat} from '../common';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {HasShape, Shape} from '../types';\nimport * as math_utils from '../utils/math_utils';\n\nimport {imageDataFormat} from './common';\n\n// tslint:enable\n\n/* Setting and getting backend from deeplearn.js. */\n\n// Default deeplearn.js backend is WebGL (GPU).\nlet backend: 'cpu'|'webgl' = 'webgl';\n\nexport function setBackend(requestedBackend: 'cpu'|'webgl') {\n tfc.setBackend(requestedBackend);\n backend = requestedBackend;\n disposeScalarCache();\n}\n\nexport function getBackend(): 'cpu'|'webgl' {\n return backend;\n}\n\n/**\n * Indicates whether the backend is operating symbolically.\n *\n * This function will be used to determine how to interpret user code. If\n * it returns true, calls to the backend construct a symbolic graph; if\n * it returns false, calls to the backend execute immediately.\n */\nexport function isBackendSymbolic(): boolean {\n return false;\n}\n\n/**\n * Get the number of elements in a Tensor.\n * @param x The Tensor.\n * @return Number of elements in `x`.\n */\nexport function countParams(x: HasShape): number {\n const shape = x.shape;\n if (shape.length > 0) {\n return shape.reduce((a: number, b: number) => a * b);\n } else {\n // Scalar.\n return 1;\n }\n}\n\n/**\n * Casts a tensor to a different dtype and returns it.\n * @param x Input tensor.\n * @param dtype String: 'float32'|'int32'|'bool'.\n * @returns Tensor of the specified `dtype`.\n */\nexport function cast(x: Tensor, dtype: tfc.DataType): Tensor {\n return x.asType(dtype);\n}\n\n/**\n * Adds a 1-sized dimension at index \"axis\".\n * @param x Input tensor.\n * @param axis Position where to add the new axis.\n * @returns Result of the dimension expansion.\n */\nexport function expandDims(x: Tensor, axis = -1): Tensor {\n const outShape = x.shape.slice();\n if (axis < 0) {\n axis = outShape.length + axis + 1;\n }\n outShape.splice(axis, 0, 1);\n return x.reshape(outShape);\n}\n\n/**\n * Repeats a 2D tensor.\n *\n * If `x` has shape `[samples, dim]` and `n` is 2, for example, the output\n * will have shape `[samples, 2, dim]`.\n *\n * @param x Input tensor.\n * @param n Integer, number of times to repeat.\n * @returns The result of the repeat operation.\n * @throws ValueError: If input tensor is not 2D.\n */\nexport function repeat(x: Tensor, n: number): Tensor {\n return tidy(() => {\n if (x.shape.length !== 2) {\n throw new ValueError(\n `repeat() expects a rank-2 tensor, but received a ` +\n `rank-${x.shape.length} tensor.`);\n }\n const y = expandDims(x, 1);\n return tile(y, [1, n, 1]);\n });\n}\n\n/**\n * Flatten an Tensor into 1D.\n * @param x Input tensor.\n * @return The result of the flattening `x`.\n */\nexport function flatten(x: Tensor): Tensor {\n const newShape = [math_utils.arrayProd(x.shape)];\n return x.reshape(newShape);\n}\n\n/**\n * Turn a nD tensor into a 2D tensor with same 0th dimension.\n * In other words, it flattens each data samples of a batch.\n *\n * @param x The tensor to flatten. The rank of this tensor is required to be 2\n * or higher.\n * @return The result of the flattening.\n */\nexport function batchFlatten(x: Tensor): Tensor {\n if (x.rank <= 1) {\n throw new ValueError(\n `batchFlatten requires a minimum rank of 2. Got rank: ${x.rank}.`);\n }\n const newShape = [x.shape[0], math_utils.arrayProd(x.shape, 1)];\n return x.reshape(newShape);\n}\n\n/**\n * Do slicing along the first axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the first axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongFirstAxis(\n array: Tensor, start: number, size: number): Tensor {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array as Tensor1D, start, size);\n case 2:\n return tfc.slice2d(\n array as Tensor2D, [start, 0], [size, array.shape[1]]);\n case 3:\n return tfc.slice3d(\n array as Tensor3D, [start, 0, 0],\n [size, array.shape[1], array.shape[2]]);\n case 4:\n return tfc.slice4d(\n array as Tensor4D, [start, 0, 0, 0],\n [size, array.shape[1], array.shape[2], array.shape[3]]);\n default:\n throw new ValueError(\n `sliceAlongFirstAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n\n/**\n * Do slicing along the last axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size size of the slice along the last axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongLastAxis(\n array: Tensor, start: number, size: number): Tensor {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array as Tensor1D, start, size);\n case 2:\n return tfc.slice2d(\n array as Tensor2D, [0, start], [array.shape[0], size]);\n case 3:\n return tfc.slice3d(\n array as Tensor3D, [0, 0, start],\n [array.shape[0], array.shape[1], size]);\n case 4:\n return tfc.slice4d(\n array as Tensor4D, [0, 0, 0, start],\n [array.shape[0], array.shape[1], array.shape[2], size]);\n default:\n throw new ValueError(\n `sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n\n/**\n * Do slicing along the sepcified axis.\n * @param array input `tf.Tensor`.\n * @param start starting index, inclusive.\n * @param size of the slice along the chosen axis.\n * @param choose an axis.\n * @returns result of the slicing.\n * @throws ValueError: If `array` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function sliceAlongAxis(\n array: Tensor, start: number, size: number, axis: number): Tensor {\n return tidy(() => {\n switch (array.rank) {\n case 1:\n return tfc.slice1d(array as Tensor1D, start, size);\n case 2:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(\n `The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 3:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice3d(\n array as Tensor3D, [0, start, 0],\n [array.shape[0], size, array.shape[2]]);\n case 3:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(\n `The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n case 4:\n switch (axis) {\n case 1:\n return sliceAlongFirstAxis(array, start, size);\n case 2:\n return tfc.slice4d(\n array as Tensor4D, [0, start, 0, 0],\n [array.shape[0], size, array.shape[2], array.shape[3]]);\n case 3:\n return tfc.slice4d(\n array as Tensor4D, [0, 0, start, 0],\n [array.shape[0], array.shape[1], size, array.shape[3]]);\n case 4:\n return sliceAlongLastAxis(array, start, size);\n default:\n throw new ValueError(\n `The axis is not within the rank of the tensor ` +\n `${axis}`);\n }\n default:\n throw new ValueError(\n `sliceAlongLastAxis() received an unsupported tensor rank: ` +\n `${array.rank}`);\n }\n });\n}\n\n/**\n * Concatenates a list of tensors alongside the specified axis.\n * @param tensors `Array` of tensors to concatenate.\n * @param axis Concatenation axis.\n * @returns The result of the concatenation.\n */\nexport function concatenate(tensors: Tensor[], axis = -1): Tensor {\n let rank: number;\n if (axis < 0) {\n rank = tensors[0].rank;\n if (rank !== 0) {\n axis = rank;\n } else {\n axis = 0;\n }\n }\n if (axis === tensors[0].rank) {\n // Porting Note: This is necessary because tfc.concat() requires axis to be\n // in the interval [-rank, rank).\n axis = -1;\n }\n // Porting Note: Sparse concat is not supported yet.\n return tfc.concat(tensors, axis);\n}\n\n/**\n * Concatenate two arrays along the first dimension.\n * @param a The 1st `tf.Tensor` to concatenate.\n * @param b The 2nd `tf.Tensor` to concatenate.\n * @returns Result of the concatenation.\n * @throws ValueError: If `a` is of an unsupported subtype of `tf.Tensor`.\n */\nexport function concatAlongFirstAxis(a: Tensor, b: Tensor): Tensor {\n switch (a.rank) {\n case 1:\n return tfc.concat1d([a as Tensor1D, b as Tensor1D]);\n case 2:\n return tfc.concat2d([a as Tensor2D, b as Tensor2D], 0);\n case 3:\n return tfc.concat3d([a as Tensor3D, b as Tensor3D], 0);\n case 4:\n return tfc.concat4d([a as Tensor4D, b as Tensor4D], 0);\n default:\n throw new ValueError(\n 'concatAlongFirstAxis() received an unsupported tensor rank: ' +\n a.rank);\n }\n}\n\n/**\n * Creates a tensor by tiling `x` by `n`.\n * @param x A tensor.\n * @param n An Array of integers or a single integer. If an Array, the length\n * must be the same as the number of dimensions in `x`. If a single integer,\n * it will be treated as an Array of length 1.\n */\nexport function tile(x: Tensor, n: number|number[]): Tensor {\n if (!Array.isArray(n)) {\n n = [n];\n }\n if (x.rank !== n.length) {\n throw new ValueError(\n `The length of input n (${n.length}) does not match ` +\n `the number of dimensions in input x (${x.rank})`);\n }\n return tfc.tile(x, n);\n}\n\n/* Creation of random tensors. */\n\n\n/**\n * Get a tensor with normal distribution of values.\n *\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @return The normal tensor.\n */\nexport function randomNormal(\n shape: Shape, mean = 0.0, stddev = 1.0, dtype?: 'float32'|'int32',\n seed?: number): Tensor {\n return tfc.randomNormal(shape, mean, stddev, dtype, seed);\n}\n\n/* Linear Algebra */\n\n/**\n * Multiply two tensors and returns the result as a tensor.\n *\n * For 2D tensors, this is equivalent to matrix multiplication (matMul).\n * For tensors of higher ranks, it follows the Theano behavior,\n * (e.g. `(2, 3) * (4, 3, 5) -> (2, 4, 5)`). From the Theano documentation:\n *\n * For N dimensions it is a sum product over the last axis of x and the\n * second-to-last of y:\n *\n * @param x A tensor of at least rank 2.\n * @param y A tensor of at least rank 2.\n * @return Result of the dot operation.\n */\nexport function dot(x: Tensor, y: Tensor): Tensor {\n if ((x.rank < 2) || (y.rank < 2)) {\n throw new NotImplementedError(\n `dot requires both inputs to be rank >= 2` +\n ` but got x shape = ${x.shape} and y shape = ${y.shape}`);\n }\n if (y.rank >= 3) {\n const xLastDim = x.shape.slice(-1)[0];\n const ySecondLastDim = y.shape.slice(-2)[0];\n if (xLastDim !== ySecondLastDim) {\n throw new NotImplementedError(\n `If rank y >= 3, then the second last dim` +\n ` of y must equal the last dim of x but got x shape = ${\n x.shape} and ` +\n ` y shape = ${y.shape}`);\n }\n }\n // Handle basic 2D x 2D case.\n if ((x.rank === 2) && (y.rank === 2)) {\n return tfc.matMul(x as Tensor2D, y as Tensor2D);\n } else {\n // Reshape x into the analogous 2D Tensor.\n const xFirstDims = x.shape.slice(); // Holds all but the last dim of x.\n const xLastDim = xFirstDims.pop();\n x = x.reshape([-1, xLastDim]);\n\n // Reshape y into the analogous 2D Tensor, and keep track of the\n // required dimensions to reproduce the output shape.\n const yShape = y.shape.slice();\n const yLastDim = yShape.pop();\n const ySecondLastDim = yShape.pop();\n const yOtherDims = [...yShape, yLastDim];\n // permutation should be like [r-2, 0, 1, 2, ... r-4, r-3, r-1]\n // where r is the rank of y.\n const perm = Array.from({length: y.rank}, (_, i) => {\n if (i === 0) {\n return y.rank - 2;\n } else if (i <= y.rank - 2) {\n return i - 1;\n }\n return i;\n });\n y = y.transpose(perm).reshape([ySecondLastDim, -1]);\n\n // Multiply x and y as 2D Tensors, and then reshape back to original.\n const outputShape = [...xFirstDims, ...yOtherDims];\n return tfc.matMul(x as Tensor2D, y as Tensor2D).reshape(outputShape);\n }\n}\n\n/**\n * Compute the sign Tensor of an input Tensor.\n *\n * Elements of the input `tf.Tensor` that are === 0 are mapped to 0.\n * Elements of the input `tf.Tensor` that are > 0 are mapped to 1.\n * Elements of the input `tf.Tensor` that are < 0 are mapped to -1.\n *\n * @param x Input `tf.Tensor`.\n * @return The sign `tf.Tensor`.\n */\nexport function sign(x: Tensor): Tensor {\n // TODO(cais): Move to the core.\n return tidy(() => {\n const zerosLikeX = coreZerosLike(x);\n const onesLikeX = coreOnesLike(x);\n return where(\n tfc.equal(x, zerosLikeX), zerosLikeX,\n where(\n tfc.greater(x, coreZerosLike(x)), onesLikeX,\n tfc.mul(getScalar(-1), onesLikeX)));\n });\n}\n\n/**\n * Computes the one-hot representation of an integer tensor.\n * @param indices nD integer tensor of shape\n * `(batch_size, dim1, dim2, ... dim(n-1))`\n * @param numClasses Integer, number of classes to consider.\n * @returns (n + 1)D one hot representation of the input\n * with shape `(batch_size, dim1, dim2, ... dim(n-1), num_classes)`\n */\nexport function oneHot(indices: Tensor, numClasses: number): Tensor {\n return tidy(() => {\n if (indices.rank !== 1) {\n throw new Error(\n 'Only 1D one-hot tensors are supported in the ' +\n 'deeplearn backend, at present.');\n }\n indices = indices.toInt();\n return tfc.oneHot(indices as Tensor1D, numClasses).toFloat();\n });\n}\n\n/* Elementary math functions. */\n\n/**\n * Retrieves the elements of indices `indices` in the tensor `reference`.\n * @param reference A tensor.\n * @param indices An integer tensor of indices or an `Array` of integers.\n * @param axis Axis along which to perform the gather operation.\n * @returns The result of the gathering as a tensor.\n */\nexport function gather(\n reference: Tensor, indices: number[]|Tensor1D, axis?: number): Tensor {\n return tidy(() => {\n if (Array.isArray(indices)) {\n indices = tensor1d(indices, 'int32');\n } else {\n indices = indices.toInt();\n }\n return tfc.gather(reference, indices, axis);\n });\n}\n\n/**\n * Element-wise square.\n * @param x Input tensor.\n * @return element-wise x^2\n */\nexport function square(x: Tensor): Tensor {\n return tfc.mulStrict(x, x);\n}\n\n/**\n * Element-wise exponentiation.\n *\n * Porting Note: In PyKeras, `a` (the exponent) is a Python integer, which\n * takes advatnage of the backend's (e.g., TensorFlow's) automatic conversion\n * to tensor. Here we allow `a` to be either a number or a tensor.\n *\n * @param x The base tensor.\n * @param a The exponent, tensor or number. If a number, it is rounded to the\n * nearest integer and converted to a tensor.\n * @returns A tensor of the same shape as `x`.\n */\nexport function pow(x: Tensor, a: Tensor|number): Tensor {\n return tidy(() => {\n if (typeof (a) === 'number') {\n a = scalar(Math.round(a), 'int32');\n }\n if (a.dtype !== 'int32') {\n throw new NotImplementedError(\n `Non-int32 dtype (${a.dtype}) is not supported by pow() yet`);\n }\n return tfc.pow(x, a as Tensor);\n });\n}\n\n/* Neural-network operations. */\n\n/**\n * Add a bias to a tensor.\n *\n * @param x The tensor to add the bias to.\n * @param bias The bias to add to `x`. Must be 1D or the same rank as `x`.\n * @return Result of the bias adding.\n * @throws ValueError: If the rank of `bias` is incorrect.\n */\nexport function biasAdd(\n x: Tensor, bias: Tensor, dataFormat?: DataFormat): Tensor {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n\n if (bias.rank !== 1 && bias.rank !== x.rank) {\n throw new ValueError(\n 'Unexpected bias dimensions: ' + bias.rank +\n '; expected it to be 1 or ' + x.rank);\n }\n const biasShape = bias.shape;\n\n let y: Tensor;\n if (x.rank === 5) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, biasShape[0], 1, 1, 1]));\n } else {\n y = x.add(bias.reshape(\n [1, biasShape[3], biasShape[0], biasShape[1], biasShape[2]]));\n }\n } else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, 1, 1, 1, biasShape[0]]));\n } else {\n y = x.add(bias.reshape([1].concat(biasShape)));\n }\n }\n } else if (x.rank === 4) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, biasShape[0], 1, 1]));\n } else {\n y = x.add(\n bias.reshape([1, biasShape[2], biasShape[0], biasShape[1]]));\n }\n } else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, 1, 1, biasShape[0]]));\n } else {\n y = x.add(bias.reshape([1].concat(biasShape)));\n }\n }\n } else if (x.rank === 3) {\n if (dataFormat === 'channelsFirst') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, biasShape[0], 1]));\n } else {\n y = x.add(bias.reshape([1, biasShape[1], biasShape[0]]));\n }\n } else if (dataFormat === 'channelsLast') {\n if (biasShape.length === 1) {\n y = x.add(bias.reshape([1, 1, biasShape[0]]));\n } else {\n y = x.add(bias.reshape([1].concat(biasShape)));\n }\n }\n } else if (x.rank < 3) {\n y = x.add(bias);\n } else {\n throw new ValueError(`Unsupported input rank by biasAdd: ${x.rank}`);\n }\n return y;\n });\n}\n\n/**\n * Exponential linear unit (ELU).\n * @param x A tensor or variable to compute the activation function for.\n * @param alpha: A scalar, a scaling factor for the negative section.\n * @return Output of the ELU operation.\n */\nexport function elu(x: Tensor, alpha = 1): Tensor {\n // TODO(cais): Add support for alpha values other than 1.\n if (alpha !== 1) {\n throw new NotImplementedError(\n `Support for alpha values other than 1 (${alpha}) is not implemented ` +\n `yet.`);\n }\n return tfc.elu(x);\n}\n\n/**\n * Softsign of a tensor.\n *\n * Defined as x / (abs(x) + 1), element-wise.\n *\n * @param x: Input.\n * @returns Output.\n */\nexport function softsign(x: Tensor): Tensor {\n return tidy(() => tfc.div(x, tfc.add(getScalar(1), tfc.abs(x))));\n}\n\n/**\n * Sets entries in `x` to zero at random, while scaling the entire tensor.\n *\n * @param x input tensor.\n * @param level fraction of the entries in the tensor that will be set to 0.\n * @param noiseShape shape of randomly generated keep/drop flags, must be\n * broadcastable to the shape of `x`.\n * @param seed random seed to ensure determinism.\n * @returns Result of the dropout operation.\n */\nexport function dropout(\n x: Tensor, level: Scalar, noiseShape?: number[], seed?: number): Tensor {\n return tidy(() => {\n // TODO(cais): Switch to deeplearn.js implementation of dropout when it\n // becomes avaialable.\n if (noiseShape != null && !util.arraysEqual(x.shape, noiseShape)) {\n throw new NotImplementedError(\n 'Non-default noise shape is not implemented yet: ' +\n JSON.stringify(noiseShape));\n }\n if (seed != null) {\n throw new NotImplementedError('seed is not implemented for dropout yet.');\n }\n let multiplier = tfc.step(tfc.add(\n tfc.neg(level) as Scalar, tfc.randomUniform(x.shape, 0, 1, 'float32')));\n // Scale the kept elements, so the expected sum is unchanged.\n multiplier = tfc.mul(\n tfc.div(getScalar(1), tfc.sub(getScalar(1), level)) as Scalar,\n multiplier);\n return tfc.mul(x, multiplier);\n });\n}\n\n/**\n * Element-wise, segment-wise linear approximation of sigmoid.\n *\n * Returns `0.` if `x < -2.5`, `1.` if `x > 2.5`.\n * In `-2.5 <= x <= 2.5`, returns `0.2 * x + 0.5`.\n *\n * @param x Input tensor.\n * @returns Output tensor.\n */\nexport function hardSigmoid(x: Tensor): Tensor {\n return tidy(() => {\n const y = tfc.add(getScalar(0.5), tfc.mul(getScalar(0.2), x));\n return tfc.clipByValue(y, 0, 1);\n });\n}\n\n/**\n * Invoke `x` in the training phase, and `alt` otherwise.\n *\n * Porting Note: We do not create placeholder tensors for the `training` boolean\n * flag here, because there is no such thing in the TF.js imperative backend.\n *\n * @param x The function to invoke iff `training` is `true`.\n * @param alt The function to invoke iff `training` is `false`.\n * @param training Boolean flag for whether training phase is active.\n * @returns The return value of `x()` if `training` is `true`, or the return\n * value of `alt()` if `training` is `false`.\n */\nexport function inTrainPhase<T>(x: () => T, alt: () => T, training = false): T {\n return training ? x() : alt();\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {DataType, eye, linalg, mul, ones, randomUniform, scalar, Scalar, serialization, Tensor, Tensor2D, tidy, truncatedNormal, zeros} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from './backend/state';\nimport * as K from './backend/tfjs_backend';\nimport {checkDataFormat, DataFormat} from './common';\nimport {NotImplementedError, ValueError} from './errors';\nimport {Shape} from './types';\nimport {checkStringTypeUnionValue, deserializeKerasObject, serializeKerasObject} from './utils/generic_utils';\nimport {arrayProd} from './utils/math_utils';\n\n\n/** @docinline */\nexport type FanMode = 'fanIn'|'fanOut'|'fanAvg';\nexport const VALID_FAN_MODE_VALUES = ['fanIn', 'fanOut', 'fanAvg'];\nexport function checkFanMode(value?: string): void {\n checkStringTypeUnionValue(VALID_FAN_MODE_VALUES, 'FanMode', value);\n}\n\n/** @docinline */\nexport type Distribution = 'normal'|'uniform';\nexport const VALID_DISTRIBUTION_VALUES = ['normal', 'uniform'];\nexport function checkDistribution(value?: string): void {\n checkStringTypeUnionValue(VALID_DISTRIBUTION_VALUES, 'Distribution', value);\n}\n\n/**\n * Initializer base class.\n *\n * @doc {\n * heading: 'Initializers', subheading: 'Classes', namespace: 'initializers'}\n */\nexport abstract class Initializer extends serialization.Serializable {\n public fromConfigUsesCustomObjects(): boolean {\n return false;\n }\n /**\n * Generate an initial value.\n * @param shape\n * @param dtype\n * @return The init value.\n */\n abstract apply(shape: Shape, dtype?: DataType): Tensor;\n\n getConfig(): serialization.ConfigDict {\n return {};\n }\n}\n\n/**\n * Initializer that generates tensors initialized to 0.\n */\nexport class Zeros extends Initializer {\n static className = 'Zeros';\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return zeros(shape, dtype);\n }\n}\nserialization.registerClass(Zeros);\n\n/**\n * Initializer that generates tensors initialized to 1.\n */\nexport class Ones extends Initializer {\n static className = 'Ones';\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return ones(shape, dtype);\n }\n}\nserialization.registerClass(Ones);\n\nexport interface ConstantConfig {\n /** The value for each element in the variable. */\n value: number;\n}\n\n/**\n * Initializer that generates values initialized to some constant.\n */\nexport class Constant extends Initializer {\n static className = 'Constant';\n private value: number;\n constructor(config: ConstantConfig) {\n super();\n if (typeof config !== 'object') {\n throw new ValueError(\n `Expected argument of type ConstantConfig but got ${config}`);\n }\n if (config.value === undefined) {\n throw new ValueError(`config must have value set but got ${config}`);\n }\n this.value = config.value;\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return tidy(() => mul(scalar(this.value), ones(shape, dtype)));\n }\n\n getConfig(): serialization.ConfigDict {\n return {\n value: this.value,\n };\n }\n}\nserialization.registerClass(Constant);\n\nexport interface RandomUniformConfig {\n /** Lower bound of the range of random values to generate. */\n minval?: number;\n /** Upper bound of the range of random values to generate. */\n maxval?: number;\n /** Used to seed the random generator. */\n seed?: number;\n}\n\n/**\n * Initializer that generates random values initialized to a uniform\n * distribution.\n *\n * Values will be distributed uniformly between the configured minval and\n * maxval.\n */\nexport class RandomUniform extends Initializer {\n static className = 'RandomUniform';\n readonly DEFAULT_MINVAL = -0.05;\n readonly DEFAULT_MAXVAL = 0.05;\n private minval: number;\n private maxval: number;\n private seed: number;\n\n constructor(config: RandomUniformConfig) {\n super();\n this.minval = config.minval || this.DEFAULT_MINVAL;\n this.maxval = config.maxval || this.DEFAULT_MAXVAL;\n this.seed = config.seed;\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return randomUniform(shape, this.minval, this.maxval, dtype);\n }\n\n getConfig(): serialization.ConfigDict {\n return {minval: this.minval, maxval: this.maxval, seed: this.seed};\n }\n}\nserialization.registerClass(RandomUniform);\n\nexport interface RandomNormalConfig {\n /** Mean of the random values to generate. */\n mean?: number;\n /** Standard deviation of the random values to generate. */\n stddev?: number;\n /** Used to seed the random generator. */\n seed?: number;\n}\n\n/**\n * Initializer that generates random values initialized to a normal\n * distribution.\n */\nexport class RandomNormal extends Initializer {\n static className = 'RandomNormal';\n readonly DEFAULT_MEAN = 0.;\n readonly DEFAULT_STDDEV = 0.05;\n private mean: number;\n private stddev: number;\n private seed: number;\n\n constructor(config: RandomNormalConfig) {\n super();\n this.mean = config.mean || this.DEFAULT_MEAN;\n this.stddev = config.stddev || this.DEFAULT_STDDEV;\n this.seed = config.seed;\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(\n `randomNormal does not support dType ${dtype}.`);\n }\n\n return K.randomNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n\n getConfig(): serialization.ConfigDict {\n return {mean: this.mean, stddev: this.stddev, seed: this.seed};\n }\n}\nserialization.registerClass(RandomNormal);\n\nexport interface TruncatedNormalConfig {\n /** Mean of the random values to generate. */\n mean?: number;\n /** Standard deviation of the random values to generate. */\n stddev?: number;\n /** Used to seed the random generator. */\n seed?: number;\n}\n\n/**\n * Initializer that generates random values initialized to a truncated normal.\n * distribution.\n *\n * These values are similar to values from a `RandomNormal` except that values\n * more than two standard deviations from the mean are discarded and re-drawn.\n * This is the recommended initializer for neural network weights and filters.\n */\nexport class TruncatedNormal extends Initializer {\n static className = 'TruncatedNormal';\n\n readonly DEFAULT_MEAN = 0.;\n readonly DEFAULT_STDDEV = 0.05;\n private mean: number;\n private stddev: number;\n private seed: number;\n\n constructor(config: TruncatedNormalConfig) {\n super();\n this.mean = config.mean || this.DEFAULT_MEAN;\n this.stddev = config.stddev || this.DEFAULT_STDDEV;\n this.seed = config.seed;\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(\n `truncatedNormal does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, this.mean, this.stddev, dtype, this.seed);\n }\n\n getConfig(): serialization.ConfigDict {\n return {mean: this.mean, stddev: this.stddev, seed: this.seed};\n }\n}\nserialization.registerClass(TruncatedNormal);\n\nexport interface IdentityConfig {\n /**\n * Multiplicative factor to apply to the identity matrix.\n */\n gain?: number;\n}\n\n/**\n * Initializer that generates the identity matrix.\n * Only use for square 2D matrices.\n */\nexport class Identity extends Initializer {\n static className = 'Identity';\n private gain: Scalar;\n constructor(config: IdentityConfig) {\n super();\n this.gain = config.gain != null ? scalar(config.gain) : getScalar(1.0);\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return tidy(() => {\n if (shape.length !== 2 || shape[0] !== shape[1]) {\n throw new ValueError(\n 'Identity matrix initializer can only be used for' +\n ' 2D square matrices.');\n } else {\n return mul(this.gain, eye(shape[0]));\n }\n });\n }\n\n getConfig(): serialization.ConfigDict {\n return {gain: this.gain.get()};\n }\n}\nserialization.registerClass(Identity);\n\n/**\n * Computes the number of input and output units for a weight shape.\n * @param shape Shape of weight.\n * @param dataFormat data format to use for convolution kernels.\n * Note that all kernels in Keras are standardized on the\n * CHANNEL_LAST ordering (even when inputs are set to CHANNEL_FIRST).\n * @return An length-2 array: fanIn, fanOut.\n */\nfunction computeFans(\n shape: Shape, dataFormat: DataFormat = 'channelsLast'): number[] {\n let fanIn: number;\n let fanOut: number;\n checkDataFormat(dataFormat);\n if (shape.length === 2) {\n fanIn = shape[0];\n fanOut = shape[1];\n } else if ([3, 4, 5].indexOf(shape.length) !== -1) {\n if (dataFormat === 'channelsFirst') {\n const receptiveFieldSize = arrayProd(shape, 2);\n fanIn = shape[1] * receptiveFieldSize;\n fanOut = shape[0] * receptiveFieldSize;\n } else if (dataFormat === 'channelsLast') {\n const receptiveFieldSize = arrayProd(shape, 0, shape.length - 2);\n fanIn = shape[shape.length - 2] * receptiveFieldSize;\n fanOut = shape[shape.length - 1] * receptiveFieldSize;\n }\n } else {\n const shapeProd = arrayProd(shape);\n fanIn = Math.sqrt(shapeProd);\n fanOut = Math.sqrt(shapeProd);\n }\n\n return [fanIn, fanOut];\n}\n\nexport interface VarianceScalingConfig {\n /** Scaling factor (positive float). */\n scale: number;\n\n /** Fanning mode for inputs and outputs. */\n mode: FanMode;\n\n /** Probabilistic distribution of the values. */\n distribution: Distribution;\n\n /** Random number generator seed. */\n seed?: number;\n}\n\n\n/**\n * Initializer capable of adapting its scale to the shape of weights.\n * With distribution=NORMAL, samples are drawn from a truncated normal\n * distribution centered on zero, with `stddev = sqrt(scale / n)` where n is:\n * - number of input units in the weight tensor, if mode = FAN_IN.\n * - number of output units, if mode = FAN_OUT.\n * - average of the numbers of input and output units, if mode = FAN_AVG.\n * With distribution=UNIFORM,\n * samples are drawn from a uniform distribution\n * within [-limit, limit], with `limit = sqrt(3 * scale / n)`.\n */\nexport class VarianceScaling extends Initializer {\n static className = 'VarianceScaling';\n private scale: number;\n private mode: FanMode;\n private distribution: Distribution;\n private seed: number;\n\n /**\n * Constructor of VarianceScaling.\n * @throws ValueError for invalid value in scale.\n */\n constructor(config: VarianceScalingConfig) {\n super();\n if (config.scale < 0.0) {\n throw new ValueError(\n `scale must be a positive float. Got: ${config.scale}`);\n }\n this.scale = config.scale == null ? 1.0 : config.scale;\n this.mode = config.mode;\n checkFanMode(this.mode);\n this.distribution = config.distribution;\n checkDistribution(this.distribution);\n this.seed = config.seed;\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n const fans = computeFans(shape);\n const fanIn = fans[0];\n const fanOut = fans[1];\n let scale = this.scale;\n if (this.mode === 'fanIn') {\n scale /= Math.max(1, fanIn);\n } else if (this.mode === 'fanOut') {\n scale /= Math.max(1, fanOut);\n } else {\n scale /= Math.max(1, (fanIn + fanOut) / 2);\n }\n\n if (this.distribution === 'normal') {\n const stddev = Math.sqrt(scale);\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(\n `${this.getClassName()} does not support dType ${dtype}.`);\n }\n return truncatedNormal(shape, 0, stddev, dtype, this.seed);\n } else {\n const limit = Math.sqrt(3 * scale);\n return randomUniform(shape, -limit, limit, dtype);\n }\n }\n\n getConfig(): serialization.ConfigDict {\n return {\n scale: this.scale,\n mode: this.mode,\n distribution: this.distribution,\n seed: this.seed\n };\n }\n}\nserialization.registerClass(VarianceScaling);\n\nexport interface SeedOnlyInitializerConfig {\n /** Random number generator seed. */\n seed?: number;\n}\n\n/**\n * Glorot uniform initializer, also called Xavier uniform initializer.\n * It draws samples from a uniform distribution within [-limit, limit]\n * where `limit` is `sqrt(6 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf.\n */\nexport class GlorotUniform extends VarianceScaling {\n static className = 'GlorotUniform';\n\n /**\n * Constructor of GlorotUniform\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(config?: SeedOnlyInitializerConfig) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'uniform',\n seed: config == null ? null : config.seed\n });\n }\n\n getClassName(): string {\n // In Python Keras, GlorotUniform is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\nserialization.registerClass(GlorotUniform);\n\n/**\n * Glorot normal initializer, also called Xavier normal initializer.\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / (fan_in + fan_out))`\n * where `fan_in` is the number of input units in the weight tensor\n * and `fan_out` is the number of output units in the weight tensor.\n *\n * Reference:\n * Glorot & Bengio, AISTATS 2010\n * http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf\n */\nexport class GlorotNormal extends VarianceScaling {\n static className = 'GlorotNormal';\n\n /**\n * Constructor of GlorotNormal.\n * @param scale\n * @param mode\n * @param distribution\n * @param seed\n */\n constructor(config?: SeedOnlyInitializerConfig) {\n super({\n scale: 1.0,\n mode: 'fanAvg',\n distribution: 'normal',\n seed: config == null ? null : config.seed\n });\n }\n\n getClassName(): string {\n // In Python Keras, GlorotNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\nserialization.registerClass(GlorotNormal);\n\n/**\n * He normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(2 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * Reference:\n * He et al., http://arxiv.org/abs/1502.01852\n */\nexport class HeNormal extends VarianceScaling {\n static className = 'HeNormal';\n\n constructor(config?: SeedOnlyInitializerConfig) {\n super({\n scale: 2.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: config == null ? null : config.seed\n });\n }\n\n getClassName(): string {\n // In Python Keras, HeNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\nserialization.registerClass(HeNormal);\n\n/**\n * LeCun normal initializer.\n *\n * It draws samples from a truncated normal distribution centered on 0\n * with `stddev = sqrt(1 / fanIn)`\n * where `fanIn` is the number of input units in the weight tensor.\n *\n * References:\n * [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)\n * [Efficient Backprop](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)\n */\nexport class LeCunNormal extends VarianceScaling {\n static className = 'LeCunNormal';\n\n constructor(config?: SeedOnlyInitializerConfig) {\n super({\n scale: 1.0,\n mode: 'fanIn',\n distribution: 'normal',\n seed: config == null ? null : config.seed\n });\n }\n\n getClassName(): string {\n // In Python Keras, LeCunNormal is not a class, but a helper method\n // that creates a VarianceScaling object. Use 'VarianceScaling' as\n // class name to be compatible with that.\n return VarianceScaling.className;\n }\n}\nserialization.registerClass(LeCunNormal);\n\nexport interface OrthogonalConfig extends SeedOnlyInitializerConfig {\n /**\n * Multiplicative factor to apply to the orthogonal matrix. Defaults to 1.\n */\n gain?: number;\n}\n\n/**\n * Initializer that generates a random orthogonal matrix.\n *\n * Reference:\n * [Saxe et al., http://arxiv.org/abs/1312.6120](http://arxiv.org/abs/1312.6120)\n */\nexport class Orthogonal extends Initializer {\n static className = 'Orthogonal';\n readonly DEFAULT_GAIN = 1;\n protected readonly gain: number;\n protected readonly seed: number;\n\n constructor(config?: OrthogonalConfig) {\n super();\n this.gain = config.gain == null ? this.DEFAULT_GAIN : config.gain;\n this.seed = config.seed;\n\n if (this.seed != null) {\n throw new NotImplementedError(\n 'Random seed is not implemented for Orthogonal Initializer yet.');\n }\n }\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n return tidy(() => {\n if (shape.length !== 2) {\n throw new NotImplementedError(\n 'The Orthogonal Initializer does not support non-2D shapes yet.');\n }\n if (shape[0] * shape[1] > 2000) {\n console.warn(\n `Orthogonal initializer is being called on a matrix with more ` +\n `than 2000 (${shape[0] * shape[1]}) elements: ` +\n `Slowness may result.`);\n }\n\n // TODO(cais): Add seed support.\n const normalizedShape =\n shape[0] > shape[1] ? [shape[1], shape[0]] : shape;\n const a = K.randomNormal(normalizedShape, 0, 1, 'float32') as Tensor2D;\n let q = linalg.gramSchmidt(a) as Tensor2D;\n if (shape[0] > shape[1]) {\n q = q.transpose();\n }\n return mul(getScalar(this.gain), q);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n return {\n gain: this.gain,\n seed: this.seed,\n };\n }\n}\nserialization.registerClass(Orthogonal);\n\n/** @docinline */\nexport type InitializerIdentifier = 'constant'|'glorotNormal'|'glorotUniform'|\n 'heNormal'|'identity'|'leCunNormal'|'ones'|'orthogonal'|'randomNormal'|\n 'randomUniform'|'truncatedNormal'|'varianceScaling'|'zeros'|string;\n\n// Maps the JavaScript-like identifier keys to the corresponding registry\n// symbols.\nexport const INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP:\n {[identifier in InitializerIdentifier]: string} = {\n 'constant': 'Constant',\n 'glorotNormal': 'GlorotNormal',\n 'glorotUniform': 'GlorotUniform',\n 'heNormal': 'HeNormal',\n 'identity': 'Identity',\n 'leCunNormal': 'LeCunNormal',\n 'ones': 'Ones',\n 'orthogonal': 'Orthogonal',\n 'randomNormal': 'RandomNormal',\n 'randomUniform': 'RandomUniform',\n 'truncatedNormal': 'TruncatedNormal',\n 'varianceScaling': 'VarianceScaling',\n 'zeros': 'Zeros'\n };\n\nfunction deserializeInitializer(\n config: serialization.ConfigDict,\n customObjects: serialization.ConfigDict = {}): Initializer {\n return deserializeKerasObject(\n config, serialization.SerializationMap.getMap().classNameMap,\n customObjects, 'initializer');\n}\n\nexport function serializeInitializer(initializer: Initializer):\n serialization.ConfigDictValue {\n return serializeKerasObject(initializer);\n}\n\nexport function getInitializer(identifier: InitializerIdentifier|Initializer|\n serialization.ConfigDict): Initializer {\n if (typeof identifier === 'string') {\n const className = identifier in INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n /* We have four 'helper' classes for common initializers that\n all get serialized as 'VarianceScaling' and shouldn't go through\n the deserializeInitializer pathway. */\n if (className === 'GlorotUniform') {\n return new GlorotUniform();\n } else if (className === 'GlorotNormal') {\n return new GlorotNormal();\n } else if (className === 'HeNormal') {\n return new HeNormal();\n } else if (className === 'LeCunNormal') {\n return new LeCunNormal();\n } else {\n const config = {className, config: {}};\n return deserializeInitializer(config);\n }\n } else if (identifier instanceof Initializer) {\n return identifier;\n } else {\n return deserializeInitializer(identifier);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport {Constant, ConstantConfig, GlorotNormal, GlorotUniform, HeNormal, Identity, IdentityConfig, Initializer, LeCunNormal, Ones, Orthogonal, OrthogonalConfig, RandomNormal, RandomNormalConfig, RandomUniform, RandomUniformConfig, SeedOnlyInitializerConfig, TruncatedNormal, TruncatedNormalConfig, VarianceScaling, VarianceScalingConfig, Zeros} from './initializers';\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'Zeros'\n * }\n */\nexport function zeros(): Zeros {\n return new Zeros();\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'Ones'\n * }\n */\nexport function ones(): Initializer {\n return new Ones();\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'Constant',\n * configParamIndices: [0]\n * }\n */\nexport function constant(config: ConstantConfig): Initializer {\n return new Constant(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'RandomUniform',\n * configParamIndices: [0]\n * }\n */\nexport function randomUniform(config: RandomUniformConfig): Initializer {\n return new RandomUniform(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'RandomNormal',\n * configParamIndices: [0]\n * }\n */\nexport function randomNormal(config: RandomNormalConfig): Initializer {\n return new RandomNormal(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'TruncatedNormal',\n * configParamIndices: [0]\n * }\n */\nexport function truncatedNormal(config: TruncatedNormalConfig): Initializer {\n return new TruncatedNormal(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'Identity',\n * configParamIndices: [0]\n * }\n */\nexport function identity(config: IdentityConfig): Initializer {\n return new Identity(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'VarianceScaling',\n * configParamIndices: [0]\n * }\n */\nexport function varianceScaling(config: VarianceScalingConfig): Initializer {\n return new VarianceScaling(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'GlorotUniform',\n * configParamIndices: [0]\n * }\n */\nexport function glorotUniform(config: SeedOnlyInitializerConfig): Initializer {\n return new GlorotUniform(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'GlorotNormal',\n * configParamIndices: [0]\n * }\n */\nexport function glorotNormal(config: SeedOnlyInitializerConfig): Initializer {\n return new GlorotNormal(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'HeNormal',\n * configParamIndices: [0]\n * }\n */\nexport function heNormal(config: SeedOnlyInitializerConfig): Initializer {\n return new HeNormal(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'LeCunNormal',\n * configParamIndices: [0]\n * }\n */\nexport function leCunNormal(config: SeedOnlyInitializerConfig): Initializer {\n return new LeCunNormal(config);\n}\n\n/**\n * @doc {\n * heading: 'Initializers',\n * namespace: 'initializers',\n * useDocsFrom: 'Orthogonal',\n * configParamIndices: [0]\n * }\n */\nexport function orthogonal(config: OrthogonalConfig): Initializer {\n return new Orthogonal(config);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: utils/generic_utils.py */\n\nimport {Tensor} from '@tensorflow/tfjs-core';\nimport {ValueError} from '../errors';\nimport {Shape} from '../types';\n// tslint:enable\n\n\n/**\n * Determine whether the input is an Array of Shapes.\n */\nexport function isArrayOfShapes(x: Shape|Shape[]): boolean {\n return Array.isArray(x) && Array.isArray(x[0]);\n}\n\n/**\n * Special case of normalizing shapes to lists.\n *\n * @param x A shape or list of shapes to normalize into a list of Shapes.\n * @return A list of Shapes.\n */\nexport function normalizeShapeList(x: Shape|Shape[]): Shape[] {\n if (x.length === 0) {\n return [];\n }\n if (!Array.isArray(x[0])) {\n return [x] as Shape[];\n }\n return x as Shape[];\n}\n\n/**\n * Helper function to obtain exactly one Tensor.\n * @param xs: A single `tf.Tensor` or an `Array` of `tf.Tensor`s.\n * @return A single `tf.Tensor`. If `xs` is an `Array`, return the first one.\n * @throws ValueError: If `xs` is an `Array` and its length is not 1.\n */\nexport function getExactlyOneTensor(xs: Tensor|Tensor[]): Tensor {\n let x: Tensor;\n if (Array.isArray(xs)) {\n if (xs.length !== 1) {\n throw new ValueError(`Expected Tensor length to be 1; got ${xs.length}`);\n }\n x = xs[0];\n } else {\n x = xs as Tensor;\n }\n return x;\n}\n\n/**\n * Helper function to obtain exactly on instance of Shape.\n *\n * @param shapes Input single `Shape` or Array of `Shape`s.\n * @returns If input is a single `Shape`, return it unchanged. If the input is\n * an `Array` containing exactly one instance of `Shape`, return the instance.\n * Otherwise, throw a `ValueError`.\n * @throws ValueError: If input is an `Array` of `Shape`s, and its length is not\n * 1.\n */\nexport function getExactlyOneShape(shapes: Shape|Shape[]): Shape {\n if (Array.isArray(shapes) && Array.isArray(shapes[0])) {\n if (shapes.length === 1) {\n shapes = shapes as Shape[];\n return shapes[0];\n } else {\n throw new ValueError(`Expected exactly 1 Shape; got ${shapes.length}`);\n }\n } else {\n return shapes as Shape;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {LayerVariable} from '../variables';\n\n/**\n * Count the elements in an Array of LayerVariables.\n *\n * @param weights: The LayerVariables of which the constituent numbers are to\n * be counted.\n * @returns A count of the elements in all the LayerVariables\n */\nexport function countParamsInWeights(weights: LayerVariable[]): number {\n let count = 0;\n for (const weight of weights) {\n if (weight.shape.length === 0) {\n count += 1;\n } else {\n count += weight.shape.reduce((a, b) => a * b);\n }\n }\n return count;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {DataType, Tensor, variableGrads} from '@tensorflow/tfjs-core';\n\nimport {getNextUniqueTensorId} from './backend/state';\nimport {getScopedTensorName, getUniqueTensorName} from './common';\nimport {Constraint} from './constraints';\nimport {NotImplementedError} from './errors';\nimport {HasShape, Shape} from './types';\n\nconst DEFAULT_VARIABLE_NAME_PREFIX = 'Variable';\n\n/**\n * A `tf.layers.LayerVariable` is similar to a `tf.Tensor` in that it has a\n * dtype and shape, but its value is mutable. The value is itself represented\n * as a`tf.Tensor`, and can be read with the `read()` method and updated with\n * the `write()` method.\n */\nexport class LayerVariable {\n readonly dtype: DataType;\n readonly shape: Shape;\n\n readonly id: number;\n // The fully scoped name of this Variable, including a unique suffix if needed\n readonly name: string;\n // The originally requested fully scoped name of this Variable, not including\n // any unique suffix. This may be needed when restoring weights because this\n // original name is used as a key.\n readonly originalName: string;\n readonly trainable: boolean;\n\n protected readonly val: tfc.Variable;\n readonly constraint: Constraint;\n /**\n * Construct Variable from a `tf.Tensor`.\n *\n * If not explicitly named, the Variable will be given a name with the\n * prefix 'Variable'. Variable names are unique. In the case of name\n * collision, suffixies '_<num>' will be added to the name.\n *\n * @param val Initial value of the Variable.\n * @param name Name of the variable. If `null` or `undefined` is provided, it\n * will default a name with the prefix 'Variable'.\n * @param constraint Optional, projection function to be applied to the\n * variable after optimize updates\n * @throws ValueError if `name` is `null` or `undefined`.\n */\n constructor(\n val: Tensor, dtype: DataType = 'float32',\n name = DEFAULT_VARIABLE_NAME_PREFIX, trainable = true,\n constraint: Constraint = null) {\n this.dtype = dtype == null ? 'float32' : dtype;\n this.shape = val.shape;\n this.id = getNextUniqueTensorId();\n\n name = name == null ? DEFAULT_VARIABLE_NAME_PREFIX : name;\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n\n this.trainable = trainable;\n this.constraint = constraint;\n\n this.val = tfc.variable(val, this.trainable, this.name, this.dtype);\n }\n\n /**\n * Get a snapshot of the Variable's value.\n *\n * The returned value is a snapshot of the Variable's value at the time of\n * the invocation. Future mutations in the value of the tensor will only\n * be reflected by future calls to this method.\n */\n read(): Tensor {\n this.assertNotDisposed();\n return this.val;\n }\n\n /**\n * Update the value of the Variable.\n *\n * @param newVal: The new value to update to. Must be consistent with the\n * dtype and shape of the Variable.\n * @return This Variable.\n */\n write(newVal: Tensor) {\n // TODO(cais): Once TF.js Core supports Tensor.dtype, check dtype match.\n this.assertNotDisposed();\n checkShapesMatch(this.val, newVal);\n // Skip updating if this is the exact same tensor.\n if (this.val.id !== newVal.id) {\n this.val.assign(newVal);\n if (this.constraint != null) {\n this.val.assign(this.constraint.apply(this.val));\n }\n }\n return this;\n }\n\n /**\n * Dispose this LayersVariable instance from memory.\n */\n dispose(): void {\n this.assertNotDisposed();\n this.val.dispose();\n }\n\n protected assertNotDisposed(): void {\n if (this.val.isDisposed) {\n throw new Error(`LayersVariable ${this.name} is already disposed.`);\n }\n }\n}\n\nfunction checkShapesMatch(x: HasShape, y: HasShape): void {\n if (x.shape.toString() !== y.shape.toString()) {\n throw new Error(\n 'Shape mismatch: ' + JSON.stringify(x.shape) + ' vs. ' +\n JSON.stringify(y.shape));\n }\n}\n\n/**\n * Create a Variable.\n * @param x The initial value of the `Variable`.\n * @param dtype optional, the type of the variable.\n * @param name optional, the name of the variable, default provided by\n * Variable.\n * @param constraint optional, a constraint to be applied after every update.\n * @return The newly instantiated `Variable`.\n */\nexport function variable(\n x: Tensor, dtype?: DataType, name?: string,\n constraint?: Constraint): LayerVariable {\n return new LayerVariable(x, dtype, name, true, constraint);\n}\n\n/**\n * Instantiates an all-zeros Variable and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-zero Variable.\n */\nexport function zerosVariable(\n shape: Shape, dtype?: DataType, name?: string): LayerVariable {\n // TODO(cais): Implement logic for dtype.\n return new LayerVariable(tfc.zeros(shape), dtype, name);\n}\n\n/**\n * Instantiates an all-zeros tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function zerosLike(\n x: Tensor, dtype?: DataType, name?: string): LayerVariable {\n return new LayerVariable(tfc.zerosLike(x), dtype, name);\n}\n\n/**\n * Instantiates an all-ones tensor and returns it.\n *\n * @param shape Shape of the tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return An all-ones Variable.\n */\nexport function onesVariable(\n shape: Shape, dtype?: DataType, name?: string): LayerVariable {\n // TODO(cais): Implement logic for dtype.\n const allocated = tfc.ones(shape);\n return new LayerVariable(allocated, dtype, name);\n}\n\n/**\n * Instantiates an all-ones tensor of the same shape as another tensor.\n *\n * @param x The other tensor.\n * @param dtype DType of the tensor.\n * @param name Name of the tensor.\n * @return A newly instantiated Variable.\n */\nexport function onesLike(\n x: Tensor, dtype?: DataType, name?: string): LayerVariable {\n const allocated = tfc.onesLike(x);\n return new LayerVariable(allocated, dtype, name);\n}\n\n/**\n * Instantiate an identity matrix and returns it, as a Variable\n *\n * @param size Number of rows/columns.\n * @param dtype Data type of returned Variable.\n * @param name Name of returned Variable.\n * @return A Variable, an identity matrix.\n */\nexport function eyeVariable(\n size: number, dtype?: DataType, name?: string): LayerVariable {\n return new LayerVariable(tfc.eye(size), dtype, name);\n}\n\n/**\n * Get a Variable with uniform distribution of values.\n * @param shape Shape of the tensor.\n * @param minval Lower bound of the uniform distribution.\n * @param maxval Upper bound of the uniform distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The uniform-random Variable.\n */\nexport function randomUniformVariable(\n shape: Shape, minval: number, maxval: number, dtype?: DataType,\n seed?: number, name = 'randomUniform'): LayerVariable {\n return new LayerVariable(\n tfc.randomUniform(shape, minval, maxval, dtype), dtype, name);\n}\n\n/**\n * Get a Variable with truncated-normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function truncatedNormalVariable(\n shape: Shape, mean = 0.0, stddev = 1.0, dtype?: DataType, seed?: number,\n name = 'truncatedNormal'): LayerVariable {\n // TODO(cais): Implement logic for dtype and seed once they are supported\n // by deeplearn.js.\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(\n `randomNormal does not support dType ${dtype}.`);\n }\n return new LayerVariable(\n tfc.truncatedNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n/**\n * Get a Variable with normal distribution of values.\n * @param shape Shape of the tensor.\n * @param mean mean value of the normal distribution.\n * @param stddev standard deviation of the normal distribution.\n * @param dtype\n * @param seed\n * @param name Optional name.\n * @return The truncated-normal-random Variable.\n */\nexport function randomNormalVariable(\n shape: Shape, mean = 0.0, stddev = 1.0, dtype?: DataType, seed?: number,\n name = 'randomNormal'): LayerVariable {\n dtype = dtype || 'float32';\n if (dtype !== 'float32' && dtype !== 'int32') {\n throw new NotImplementedError(\n `randomNormalVariable does not support dType ${dtype}.`);\n }\n return new LayerVariable(\n tfc.randomNormal(shape, mean, stddev, dtype, seed), dtype, name);\n}\n\n/**\n * Update the value of a Variable.\n * @param x The Variable to be updated.\n * @param xNew The new value to update to.\n * @return The Variable updated.\n */\nexport function update(x: LayerVariable, xNew: Tensor): LayerVariable {\n return x.write(xNew);\n}\n\n/**\n * Update the value of a Variable by adding an increment.\n * @param x The Variable to be updated.\n * @param increment The incrment to add to `x`.\n * @return The Variable updated.\n */\nexport function updateAdd(x: LayerVariable, increment: Tensor): LayerVariable {\n return x.write(tfc.add(x.read(), increment));\n}\n\n/**\n * Update the value of a Variable by subtracting a decrement.\n * @param x The Variable to be updated.\n * @param decrement The decrement to subtract from `x`.\n * @return The Variable updated.\n */\nexport function updateSub(x: LayerVariable, decrement: Tensor): LayerVariable {\n return x.write(tfc.sub(x.read(), decrement));\n}\n\n/**\n * Get the values of an array of Variables.\n *\n * @param tensors An `Array` of `Variable`s to get the values of.\n * @return The values of the inputs, as an `Array` of`tf.Tensor`s.\n */\nexport function batchGetValue(xs: LayerVariable[]): Tensor[] {\n return xs.map(x => x.read());\n}\n\n/**\n * Update the value of multiple Variables at once.\n *\n * @param variablesAndValues An `Array`, each element is of type\n * [Variable, Tensor]. The first item is the\n * `Variable` of which the value is to be updated. The second item\n * carries the new value.\n */\nexport function batchSetValue(\n variablesAndValues: Array<[LayerVariable, Tensor]>): void {\n variablesAndValues.map((variableAndValue) => {\n const variable: LayerVariable = variableAndValue[0];\n variable.write(variableAndValue[1]);\n });\n}\n\n/**\n * Returns the gradients of `variables` w.r.t. the return value of `lossFn`.\n * @param lossFn A function which returns a Scalar to be used as the function\n * value (i.e., numerator) for differentiation.\n * @param variables List of variables to be used as the independent variables\n * (i.e., denominator) for differentiation.\n * @returns An Array of gradients tensors.\n */\nexport function gradients(\n lossFn: () => tfc.Scalar, variables: LayerVariable[]): Tensor[] {\n // TODO(cais): The return type signature can be simplified if deeplearn makes\n // the corresponding type public.\n const variableList =\n variables.map(variable => variable.read() as tfc.Variable);\n const valudAndGrads = variableGrads(lossFn, variableList);\n return variables.map(variable => valudAndGrads.grads[variable.name]);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: keras/engine/topology.py */\n\nimport {DataType, Scalar, serialization, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {getNextUniqueTensorId, getUid} from '../backend/state';\nimport {getScopedTensorName, getUniqueTensorName, nameScope} from '../common';\nimport {Constraint} from '../constraints';\nimport {AttributeError, NotImplementedError, RuntimeError, ValueError} from '../errors';\nimport {Initializer} from '../initializers';\nimport {Regularizer} from '../regularizers';\nimport {Kwargs, RegularizerFn, Shape} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as types_utils from '../utils/types_utils';\nimport * as variable_utils from '../utils/variable_utils';\nimport {batchGetValue, batchSetValue, LayerVariable} from '../variables';\n\n// TODO(michaelterry): This is a stub until it's defined.\nexport type Op = (x: LayerVariable) => LayerVariable;\n\n/**\n * Constructor arguments for InputSpec.\n */\nexport interface InputSpecConfig {\n /** Expected datatype of the input. */\n dtype?: DataType;\n /** Expected shape of the input (may include null for unchecked axes). */\n shape?: Shape;\n /** Expected rank of the input. */\n ndim?: number;\n /** Maximum rank of the input. */\n maxNDim?: number;\n /** Minimum rank of the input. */\n minNDim?: number;\n /** Dictionary mapping integer axes to a specific dimension value. */\n axes?: {[axis: number]: number};\n}\n\n/**\n * Specifies the ndim, dtype and shape of every input to a layer.\n *\n * Every layer should expose (if appropriate) an `inputSpec` attribute:\n * a list of instances of InputSpec (one per input tensor).\n *\n * A null entry in a shape is compatible with any dimension,\n * a null shape is compatible with any shape.\n */\nexport class InputSpec {\n /** Expected datatype of the input. */\n dtype?: DataType;\n /** Expected shape of the input (may include null for unchecked axes). */\n shape?: Shape;\n /** Expected rank of the input. */\n ndim?: number;\n /** Maximum rank of the input. */\n maxNDim?: number;\n /** Minimum rank of the input. */\n minNDim?: number;\n /** Dictionary mapping integer axes to a specific dimension value. */\n axes?: {[axis: number]: number};\n\n constructor(config: InputSpecConfig) {\n this.dtype = config.dtype;\n this.shape = config.shape;\n /*\n TODO(michaelterry): Could throw error if ndim and shape are both defined\n (then backport).\n */\n if (config.shape != null) {\n this.ndim = config.shape.length;\n } else {\n this.ndim = config.ndim;\n }\n this.maxNDim = config.maxNDim;\n this.minNDim = config.minNDim;\n this.axes = config.axes || {};\n }\n}\n\n/**\n * `tf.SymbolicTensor` is a placeholder for a Tensor without any concrete value.\n *\n * They are most often encountered when building a graph of `Layer`s for a\n * a `tf.Model` and the input data's shape, but not values are known.\n */\n/** @doc {heading: 'Models', 'subheading': 'Classes'} */\nexport class SymbolicTensor {\n /* A unique ID for the tensor to be able to differentiate tensors. */\n readonly id: number;\n // The fully scoped name of this Variable, including a unique suffix if needed\n readonly name: string;\n // The originally requested fully scoped name of this Variable, not including\n // any unique suffix. This may be needed when restoring weights because this\n // original name is used as a key.\n readonly originalName?: string;\n /**\n * Rank/dimensionality of the tensor.\n */\n readonly rank: number;\n /**\n * Replacement for _keras_history.\n */\n nodeIndex: number;\n /**\n * Replacement for _keras_history.\n */\n tensorIndex: number;\n\n /**\n *\n * @param dtype\n * @param shape\n * @param sourceLayer The Layer that produced this symbolic tensor.\n * @param inputs The inputs passed to sourceLayer's __call__() method.\n * @param nodeIndex\n * @param tensorIndex\n * @param callArgs The keyword arguments passed to the __call__() method.\n * @param name\n * @param outputTensorIndex The index of this tensor in the list of outputs\n * returned by apply().\n */\n constructor(\n readonly dtype: DataType, readonly shape: Shape,\n public sourceLayer: Layer, readonly inputs: SymbolicTensor[],\n readonly callArgs: Kwargs, name?: string,\n readonly outputTensorIndex?: number) {\n this.id = getNextUniqueTensorId();\n if (name != null) {\n this.originalName = getScopedTensorName(name);\n this.name = getUniqueTensorName(this.originalName);\n }\n this.rank = shape.length;\n }\n}\n\n/**\n * Constructor arguments for Node.\n */\nexport interface NodeConfig {\n /**\n * The layer that takes `inputTensors` and turns them into `outputTensors`.\n * (the node gets created when the `call` method of the layer is called).\n */\n outboundLayer: Layer;\n /**\n * A list of layers, the same length as `inputTensors`, the layers from where\n * `inputTensors` originate.\n */\n inboundLayers: Layer[];\n /**\n * A list of integers, the same length as `inboundLayers`. `nodeIndices[i]` is\n * the origin node of `inputTensors[i]` (necessary since each inbound layer\n * might have several nodes, e.g. if the layer is being shared with a\n * different data stream).\n */\n nodeIndices: number[];\n /**\n * A list of integers, the same length as `inboundLayers`. `tensorIndices[i]`\n * is the index of `inputTensors[i]` within the output of the inbound layer\n * (necessary since each inbound layer might have multiple tensor outputs,\n * with each one being independently manipulable).\n */\n tensorIndices: number[];\n /** List of input tensors. */\n inputTensors: SymbolicTensor[];\n /** List of output tensors. */\n outputTensors: SymbolicTensor[];\n /** List of input masks (a mask can be a tensor, or null). */\n inputMasks: Tensor[];\n /** List of output masks (a mask can be a tensor, or null). */\n outputMasks: Tensor[];\n /** List of input shape tuples. */\n inputShapes: Shape|Shape[];\n /** List of output shape tuples. */\n outputShapes: Shape|Shape[];\n}\n\n/**\n * The type of the return value of Layer.dispose() and Container.dispose().\n */\nexport interface DisposeResult {\n /**\n * Reference count after the dispose call.\n */\n refCountAfterDispose: number;\n\n /**\n * Number of variables dispose in this dispose call.\n */\n numDisposedVariables: number;\n}\n\nlet _nextNodeID = 0;\n\n/**\n * A `Node` describes the connectivity between two layers.\n *\n * Each time a layer is connected to some new input,\n * a node is added to `layer.inboundNodes`.\n *\n * Each time the output of a layer is used by another layer,\n * a node is added to `layer.outboundNodes`.\n *\n * `nodeIndices` and `tensorIndices` are basically fine-grained coordinates\n * describing the origin of the `inputTensors`, verifying the following:\n *\n * `inputTensors[i] ==\n * inboundLayers[i].inboundNodes[nodeIndices[i]].outputTensors[\n * tensorIndices[i]]`\n *\n * A node from layer A to layer B is added to:\n * A.outboundNodes\n * B.inboundNodes\n */\nexport class Node {\n /**\n * The layer that takes `inputTensors` and turns them into `outputTensors`\n * (the node gets created when the `call` method of the layer is called).\n */\n outboundLayer: Layer;\n /**\n * A list of layers, the same length as `inputTensors`, the layers from where\n * `inputTensors` originate.\n */\n inboundLayers: Layer[];\n /**\n * A list of integers, the same length as `inboundLayers`. `nodeIndices[i]` is\n * the origin node of `inputTensors[i]` (necessary since each inbound layer\n * might have several nodes, e.g. if the layer is being shared with a\n * different data stream).\n */\n nodeIndices: number[];\n /**\n * A list of integers, the same length as `inboundLayers`. `tensorIndices[i]`\n * is the index of `inputTensors[i]` within the output of the inbound layer\n * (necessary since each inbound layer might have multiple tensor outputs,\n * with each one being independently manipulable).\n */\n tensorIndices: number[];\n /** List of input tensors. */\n inputTensors: SymbolicTensor[];\n /** List of output tensors. */\n outputTensors: SymbolicTensor[];\n /** List of input masks (a mask can be a tensor, or null). */\n inputMasks: Tensor[];\n /** List of output masks (a mask can be a tensor, or null). */\n outputMasks: Tensor[];\n /** List of input shape tuples. */\n inputShapes: Shape|Shape[];\n /** List of output shape tuples. */\n outputShapes: Shape|Shape[];\n\n readonly id: number;\n\n constructor(\n config: NodeConfig,\n // TODO(michaelterry): Define actual type for this.\n public callArgs?: Kwargs) {\n this.id = _nextNodeID++;\n /*\n Layer instance (NOT a list).\n this is the layer that takes a list of input tensors\n and turns them into a list of output tensors.\n the current node will be added to\n the inboundNodes of outboundLayer.\n */\n this.outboundLayer = config.outboundLayer;\n\n /*\n The following 3 properties describe where\n the input tensors come from: which layers,\n and for each layer, which node and which\n tensor output of each node.\n */\n\n // List of layer instances.\n this.inboundLayers = config.inboundLayers;\n // List of integers, 1:1 mapping with inboundLayers.\n this.nodeIndices = config.nodeIndices;\n // List of integers, 1:1 mapping with inboundLayers.\n this.tensorIndices = config.tensorIndices;\n\n /*\n Following 2 properties:\n tensor inputs and outputs of outboundLayer.\n */\n\n // List of tensors. 1:1 mapping with inboundLayers.\n this.inputTensors = config.inputTensors;\n // List of tensors, created by outboundLayer.call().\n this.outputTensors = config.outputTensors;\n\n /*\n Following 2 properties: input and output masks.\n List of tensors, 1:1 mapping with inputTensor.\n */\n this.inputMasks = config.inputMasks;\n // List of tensors, created by outboundLayer.computeMask().\n this.outputMasks = config.outputMasks;\n\n // Following 2 properties: input and output shapes.\n\n // List of shape tuples, shapes of inputTensors.\n this.inputShapes = config.inputShapes;\n // List of shape tuples, shapes of outputTensors.\n this.outputShapes = config.outputShapes;\n\n // Add nodes to all layers involved.\n for (const layer of config.inboundLayers) {\n if (layer != null) {\n layer.outboundNodes.push(this);\n }\n }\n config.outboundLayer.inboundNodes.push(this);\n }\n\n getConfig(): serialization.ConfigDict {\n const inboundNames: string[] = [];\n for (const layer of this.inboundLayers) {\n if (layer != null) {\n inboundNames.push(layer.name);\n } else {\n inboundNames.push(null);\n }\n }\n return {\n outboundLayer: this.outboundLayer ? this.outboundLayer.name : null,\n inboundLayers: inboundNames,\n nodeIndices: this.nodeIndices,\n tensorIndices: this.tensorIndices\n };\n }\n}\n\n/** Constructor arguments for Layer. */\nexport interface LayerConfig {\n /**\n * If defined, will be used to create an input layer to insert before this\n * layer. If both `inputShape` and `batchInputShape` are defined,\n * `batchInputShape` will be used. This argument is only applicable to input\n * layers (the first layer of a model).\n */\n inputShape?: Shape;\n /**\n * If defined, will be used to create an input layer to insert before this\n * layer. If both `inputShape` and `batchInputShape` are defined,\n * `batchInputShape` will be used. This argument is only applicable to input\n * layers (the first layer of a model).\n */\n batchInputShape?: Shape;\n /**\n * If `inputShape` is specified and `batchInputShape` is *not* specifiedd,\n * `batchSize` is used to construct the `batchInputShape`: `[batchSize,\n * ...inputShape]`\n */\n batchSize?: number;\n /**\n * The data-type for this layer. Defaults to 'float32'.\n * This argument is only applicable to input layers (the first layer of a\n * model).\n */\n dtype?: DataType;\n /** Name for this layer. */\n name?: string;\n /** Whether this layer is trainable. Defaults to true. */\n trainable?: boolean;\n /** Whether the weights of this layer are updatable by `fit`. */\n updatable?: boolean;\n /**\n * Initial weight values of the layer.\n */\n weights?: Tensor[];\n /** Legacy support. Do not use for new code. */\n inputDType?: DataType;\n}\n\n// If necessary, add `output` arguments to the CallHook function.\n// This is currently used for testing only, but may be used for debugger-related\n// purposes in the future.\nexport type CallHook = (inputs: Tensor|Tensor[], kwargs: Kwargs) => void;\n\nlet _nextLayerID = 0;\n\n/**\n * A layer is a grouping of operations and weights that can be composed to\n * create a `tf.Model`.\n *\n * Layers are constructed by using the functions under the\n * [tf.layers](#Layers-Basic) namespace.\n */\n/** @doc {heading: 'Layers', subheading: 'Classes', namespace: 'layers'} */\nexport abstract class Layer extends serialization.Serializable {\n /** Name for this layer. Must be unique within a model. */\n name: string;\n /**\n * List of InputSpec class instances.\n *\n * Each entry describes one required input:\n * - ndim\n * - dtype\n * A layer with `n` input tensors must have an `inputSpec` of length `n`.\n */\n inputSpec: InputSpec[];\n supportsMasking: boolean;\n /** Whether the layer weights will be updated during training. */\n trainable: boolean;\n updatable: boolean;\n batchInputShape: Shape;\n dtype: DataType;\n initialWeights: Tensor[];\n\n inboundNodes: Node[];\n outboundNodes: Node[];\n\n activityRegularizer: Regularizer;\n\n protected _trainableWeights: LayerVariable[];\n private _nonTrainableWeights: LayerVariable[];\n private _losses: RegularizerFn[];\n // TODO(cais): _updates is currently unused.\n private _updates: Tensor[];\n private _built: boolean;\n private _callHook: CallHook = null;\n\n private _addedWeightNames: string[] = [];\n\n readonly id: number;\n\n // Porting Notes: PyKeras does not have this property in this base Layer\n // class. Instead lets Layer subclass set it dynamically and checks the\n // value with `hasattr`. In tfjs-layers, we let this be a member of this\n // base class.\n protected _stateful = false;\n\n protected _refCount: number|null;\n\n constructor(config: LayerConfig) {\n super();\n this.id = _nextLayerID++;\n\n this.activityRegularizer = null;\n\n this.inputSpec = null;\n this.supportsMasking = false;\n\n // These properties will be set upon call of this.build()\n this._trainableWeights = [];\n this._nonTrainableWeights = [];\n this._losses = [];\n this._updates = [];\n this._built = false;\n\n /*\n These lists will be filled via successive calls\n to this.addInboundNode().\n */\n this.inboundNodes = [];\n this.outboundNodes = [];\n\n let name = config.name;\n if (!name) {\n const prefix = this.getClassName();\n name = generic_utils.toSnakeCase(prefix) + '_' + getUid(prefix);\n }\n this.name = name;\n\n this.trainable = config.trainable == null ? true : config.trainable;\n this.updatable = config.updatable == null ? true : config.updatable;\n\n if (config.inputShape != null || config.batchInputShape != null) {\n /*\n In this case we will later create an input layer\n to insert before the current layer\n */\n let batchInputShape: Shape;\n if (config.batchInputShape != null) {\n batchInputShape = config.batchInputShape;\n } else if (config.inputShape != null) {\n let batchSize: number = null;\n if (config.batchSize != null) {\n batchSize = config.batchSize;\n }\n batchInputShape = [batchSize].concat(config.inputShape);\n }\n this.batchInputShape = batchInputShape;\n\n // Set dtype.\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = config.inputDType;\n }\n if (dtype == null) {\n dtype = 'float32';\n }\n this.dtype = dtype;\n }\n\n if (config.weights != null) {\n this.initialWeights = config.weights;\n } else {\n this.initialWeights = null;\n }\n\n // The value of `_refCount` is initialized to null. When the layer is used\n // in a symbolic way for the first time, it will be set to 1.\n this._refCount = null;\n }\n\n /**\n * Converts a layer and its index to a unique (immutable type) name.\n * This function is used internally with `this.containerNodes`.\n * @param layer The layer.\n * @param nodeIndex The layer's position (e.g. via enumerate) in a list of\n * nodes.\n *\n * @returns The unique name.\n */\n protected static nodeKey(layer: Layer, nodeIndex: number) {\n return layer.name + '_ib-' + nodeIndex.toString();\n }\n\n /**\n * Returns this.inboundNode at index nodeIndex.\n *\n * Porting note: This is a replacement for _get_node_attribute_at_index()\n * @param nodeIndex\n * @param attrName The name of the attribute related to request for this node.\n */\n private getNodeAtIndex(nodeIndex: number, attrName: string): Node {\n if (this.inboundNodes.length === 0) {\n throw new RuntimeError(\n 'The layer has never been called ' +\n `and thus has no defined ${attrName}.`);\n }\n if (this.inboundNodes.length <= nodeIndex) {\n throw new ValueError(\n `Asked to get ${attrName} at node ${nodeIndex}, ` +\n `but the layer has only ${this.inboundNodes.length} inbound nodes.`);\n }\n return this.inboundNodes[nodeIndex];\n }\n\n /**\n * Retrieves the input tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple inputs).\n */\n getInputAt(nodeIndex: number): SymbolicTensor|SymbolicTensor[] {\n return generic_utils.singletonOrArray(\n this.getNodeAtIndex(nodeIndex, 'input').inputTensors);\n }\n\n /**\n * Retrieves the output tensor(s) of a layer at a given node.\n *\n * @param nodeIndex Integer, index of the node from which to retrieve the\n * attribute. E.g. `nodeIndex=0` will correspond to the first time the layer\n * was called.\n *\n * @return A tensor (or list of tensors if the layer has multiple outputs).\n */\n getOutputAt(nodeIndex: number): SymbolicTensor|SymbolicTensor[] {\n return generic_utils.singletonOrArray(\n this.getNodeAtIndex(nodeIndex, 'output').outputTensors);\n }\n\n // Properties\n\n /**\n * Retrieves the input tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Input tensor or list of input tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get input(): SymbolicTensor|SymbolicTensor[] {\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(\n `Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer input\" ' +\n 'is ill-defined. ' +\n 'Use `getInputAt(nodeIndex)` instead.');\n } else if (this.inboundNodes.length === 0) {\n throw new AttributeError(\n `Layer ${this.name}` +\n ' is not connected, no input to return.');\n }\n return generic_utils.singletonOrArray(\n this.getNodeAtIndex(0, 'input').inputTensors);\n }\n\n /**\n * Retrieves the output tensor(s) of a layer.\n *\n * Only applicable if the layer has exactly one inbound node,\n * i.e. if it is connected to one incoming layer.\n *\n * @return Output tensor or list of output tensors.\n *\n * @exception AttributeError if the layer is connected to more than one\n * incoming layers.\n */\n get output(): SymbolicTensor|SymbolicTensor[] {\n if (this.inboundNodes.length === 0) {\n throw new AttributeError(\n `Layer ${this.name}` +\n ' has no inbound nodes.');\n }\n if (this.inboundNodes.length > 1) {\n throw new AttributeError(\n `Layer ${this.name}` +\n ' has multiple inbound nodes, ' +\n 'hence the notion of \"layer output\" ' +\n 'is ill-defined. ' +\n 'Use `getOutputAt(nodeIndex)` instead.');\n }\n return generic_utils.singletonOrArray(\n this.getNodeAtIndex(0, 'output').outputTensors);\n }\n\n get losses(): RegularizerFn[] {\n return this._losses;\n }\n\n /**\n * Retrieves the Layer's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses(): Scalar[] {\n // Porting Node: This is an augmentation to Layer.loss in PyKeras.\n // In PyKeras, Layer.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return this.losses.map(lossFn => lossFn());\n }\n\n get updates(): Tensor[] {\n return this._updates;\n }\n\n get built(): boolean {\n return this._built;\n }\n\n set built(built: boolean) {\n this._built = built;\n }\n\n get trainableWeights(): LayerVariable[] {\n if (this.trainable) {\n return this._trainableWeights;\n } else {\n return [];\n }\n }\n\n set trainableWeights(weights: LayerVariable[]) {\n this._trainableWeights = weights;\n }\n\n get nonTrainableWeights(): LayerVariable[] {\n if (!this.trainable) {\n return this._trainableWeights.concat(this._nonTrainableWeights);\n } else {\n return this._nonTrainableWeights;\n }\n }\n\n set nonTrainableWeights(weights: LayerVariable[]) {\n this._nonTrainableWeights = weights;\n }\n\n /**\n * The concatenation of the lists trainableWeights and nonTrainableWeights\n * (in this order).\n */\n get weights(): LayerVariable[] {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n\n get stateful(): boolean {\n return this._stateful;\n }\n\n /**\n * Reset the states of the layer.\n *\n * This method of the base Layer class is essentially a no-op.\n * Subclasses that are stateful (e.g., stateful RNNs) should override this\n * method.\n */\n resetStates(): void {\n if (!this.stateful) {\n throw new Error(\n 'Cannot call the resetStates() method of a non-stateful Layer ' +\n 'object.');\n }\n }\n\n /**\n * Checks compatibility between the layer and provided inputs.\n *\n * This checks that the tensor(s) `input`\n * verify the input assumptions of the layer\n * (if any). If not, exceptions are raised.\n *\n * @param inputs Input tensor or list of input tensors.\n *\n * @exception ValueError in case of mismatch between\n * the provided inputs and the expectations of the layer.\n */\n protected assertInputCompatibility(inputs: Tensor|Tensor[]|SymbolicTensor|\n SymbolicTensor[]): void {\n inputs = generic_utils.toList(inputs);\n if (this.inputSpec == null || this.inputSpec.length === 0) {\n return;\n }\n const inputSpec = generic_utils.toList(this.inputSpec);\n if (inputs.length !== inputSpec.length) {\n throw new ValueError(\n `Layer ${this.name} expects ${inputSpec.length} inputs, ` +\n `but it received ${inputs.length} input tensors. ` +\n `Input received: ${inputs}`);\n }\n for (let inputIndex = 0; inputIndex < inputs.length; inputIndex++) {\n const x = inputs[inputIndex];\n const spec: InputSpec = inputSpec[inputIndex];\n if (spec == null) {\n continue;\n }\n\n // Check ndim.\n const ndim = x.rank;\n if (spec.ndim != null) {\n if (ndim !== spec.ndim) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ${this.name}: ` +\n `expected ndim=${spec.ndim}, found ndim=${ndim}`);\n }\n }\n if (spec.maxNDim != null) {\n if (ndim > spec.maxNDim) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected max_ndim=${spec.maxNDim}, found ndim=${ndim}`);\n }\n }\n if (spec.minNDim != null) {\n if (ndim < spec.minNDim) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ${this.name}` +\n `: expected min_ndim=${spec.minNDim}, found ndim=${ndim}.`);\n }\n }\n\n // Check dtype.\n if (spec.dtype != null) {\n if (x.dtype !== spec.dtype) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ${this.name} ` +\n `: expected dtype=${spec.dtype}, found dtype=${x.dtype}.`);\n }\n }\n\n // Check specific shape axes.\n if (spec.axes) {\n const xShape = x.shape;\n for (const key in spec.axes) {\n const axis = Number(key);\n const value = spec.axes[key];\n // Perform Python-style slicing in case axis < 0;\n // TODO(cais): Use https://github.com/alvivi/typescript-underscore to\n // ensure type safety through Underscore calls.\n const xShapeAtAxis =\n axis >= 0 ? xShape[axis] : xShape[xShape.length + axis];\n if (value != null && [value, null].indexOf(xShapeAtAxis) === -1) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected axis ${axis} of input shape to ` +\n `have value ${value} but got shape ${xShape}.`);\n }\n }\n }\n\n // Check shape.\n if (spec.shape != null) {\n for (let i = 0; i < spec.shape.length; ++i) {\n const specDim = spec.shape[i];\n const dim = x.shape[i];\n if (specDim != null && dim != null) {\n if (specDim !== dim) {\n throw new ValueError(\n `Input ${inputIndex} is incompatible with layer ` +\n `${this.name}: expected shape=${spec.shape}, ` +\n 'found shape=${xShape}.');\n }\n }\n }\n }\n }\n }\n\n /**\n * This is where the layer's logic lives.\n *\n * @param inputs Input tensor, or list/tuple of input tensors.\n * @param kwargs Additional keyword arguments.\n *\n * @return A tensor or list/tuple of tensors.\n */\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return inputs;\n }\n\n protected invokeCallHook(inputs: Tensor|Tensor[], kwargs: Kwargs) {\n if (this._callHook != null) {\n this._callHook(inputs, kwargs);\n }\n }\n\n /**\n * Set call hook.\n * This is currently used for testing only.\n * @param callHook\n */\n setCallHook(callHook: CallHook) {\n this._callHook = callHook;\n }\n\n /**\n * Clear call hook.\n * This is currently used for testing only.\n */\n clearCallHook() {\n this._callHook = null;\n }\n\n /**\n * Builds or executes a `Layer's logic.\n *\n * When called with `tf.Tensor`(s), execute the `Layer`s computation and\n * return Tensor(s). For example:\n *\n * ```js\n * const denseLayer = tf.layers.dense({\n * units: 1,\n * kernelInitializer: 'zeros',\n * useBias: false\n * });\n *\n * // Invoke the layer's apply() method with a `tf.Tensor` (with concrete\n * // numeric values).\n * const input = tf.ones([2, 2]);\n * const output = denseLayer.apply(input);\n *\n * // The output's value is expected to be [[0], [0]], due to the fact that\n * // the dense layer has a kernel initialized to all-zeros and does not have\n * // a bias.\n * output.print();\n * ```\n *\n * When called with `tf.SymbolicTensor`(s), this will prepare the layer for\n * future execution. This entails internal book-keeping on shapes of\n * expected Tensors, wiring layers together, and initializing weights.\n *\n * Calling `apply` with `tf.SymbolicTensor`s are typically used during the\n * building of non-`tf.Sequential` models. For example:\n *\n * ```js\n * const flattenLayer = tf.layers.flatten();\n * const denseLayer = tf.layers.dense({units: 1});\n *\n * // Use tf.layers.input() to obtain a SymbolicTensor as input to apply().\n * const input = tf.input({shape: [2, 2]});\n * const output1 = flattenLayer.apply(input);\n *\n * // output1.shape is [null, 4]. The first dimension is the undetermined\n * // batch size. The second dimension comes from flattening the [2, 2]\n * // shape.\n * console.log(JSON.stringify(output1.shape));\n *\n * // The output SymbolicTensor of the flatten layer can be used to call\n * // the apply() of the dense layer:\n * const output2 = denseLayer.apply(output1);\n *\n * // output2.shape is [null, 1]. The first dimension is the undetermined\n * // batch size. The second dimension matches the number of units of the\n * // dense layer.\n * console.log(JSON.stringify(output2.shape));\n *\n * // The input and output and be used to construct a model that consists\n * // of the flatten and dense layers.\n * const model = tf.model({inputs: input, outputs: output2});\n * ```\n *\n * @param inputs a `tf.Tensor` or `tf.SymbolicTensor` or an Array of them.\n * @param kwargs Additional keyword arguments to be passed to `call()`.\n *\n * @return Output of the layer's `call` method.\n *\n * @exception ValueError error in case the layer is missing shape information\n * for its `build` call.\n */\n // Porting Note: This is a replacement for __call__() in Python.\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n apply(\n inputs: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n kwargs?: Kwargs): Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[] {\n kwargs = kwargs || {};\n\n this.assertNotDisposed();\n\n // Ensure inputs are all the same type.\n const inputsList = generic_utils.toList(inputs);\n\n let allAreSymbolic = true;\n for (const input of inputsList) {\n if (!(input instanceof SymbolicTensor)) {\n allAreSymbolic = false;\n break;\n }\n }\n let noneAreSymbolic = true;\n for (const input of inputsList) {\n if (input instanceof SymbolicTensor) {\n noneAreSymbolic = false;\n break;\n }\n }\n\n if (allAreSymbolic === noneAreSymbolic) {\n throw new ValueError(\n 'Arguments to apply() must be all ' +\n 'SymbolicTensors or all Tensors');\n }\n\n // TODO(michaelterry): nameScope() may not be necessary.\n return nameScope(this.name, () => {\n // Handle laying building (weight creating, input spec locking).\n if (!this.built) {\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec specified in the layer constructor.\n */\n this.assertInputCompatibility(inputs);\n\n // Collect input shapes to build layer.\n const inputShapes: Shape[] = [];\n for (const xElem of generic_utils.toList(inputs)) {\n inputShapes.push(xElem.shape);\n }\n this.build(generic_utils.singletonOrArray(inputShapes));\n this.built = true;\n\n // Load weights that were specified at layer instantiation.\n if (this.initialWeights) {\n this.setWeights(this.initialWeights);\n }\n\n if (this._refCount === null && noneAreSymbolic) {\n // The first use of this layer is a non-symbolic call, set ref count\n // to 1 so the Layer can be properly disposed if its dispose() method\n // is called.\n this._refCount = 1;\n }\n }\n\n /*\n Throw exceptions in case the input is not compatible\n with the inputSpec set at build time.\n */\n this.assertInputCompatibility(inputs);\n\n // Handle mask propagation.\n // TODO(michaelterry): Mask propagation not currently implemented.\n\n // Actually call the layer, collecting output(s), mask(s), and shape(s).\n if (noneAreSymbolic) {\n let output = this.call(inputs as Tensor | Tensor[], kwargs);\n // TODO(michaelterry): Compute the outputMask\n\n // If the layer returns tensors from its inputs, unmodified,\n // we copy them to avoid loss of tensor metadata.\n const outputList: Tensor[] = generic_utils.toList(output);\n const outputListCopy: Tensor[] = [];\n // TODO(michaelterry): This copying may not be necessary given our eager\n // backend.\n for (let x of outputList) {\n if (inputsList.indexOf(x) !== -1) {\n x = x.clone();\n }\n outputListCopy.push(x);\n }\n output = generic_utils.singletonOrArray(outputListCopy);\n\n if (this.activityRegularizer != null) {\n throw new NotImplementedError(\n 'Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n\n // TODO(michaelterry): Call addInboundNode()?\n return output;\n } else {\n const inputShape = collectInputShape(inputs);\n const outputShape = this.computeOutputShape(inputShape);\n let output: SymbolicTensor|SymbolicTensor[];\n const outputDType = guessOutputDType(inputs);\n this.warnOnIncompatibleInputShape(\n Array.isArray(inputs) ? inputShape[0] as Shape :\n inputShape as Shape);\n\n if (outputShape != null && outputShape.length > 0 &&\n Array.isArray(outputShape[0])) {\n // We have multiple output shapes. Create multiple output tensors.\n output = (outputShape as Shape[])\n .map(\n (shape, index) => new SymbolicTensor(\n outputDType, shape, this,\n generic_utils.toList(inputs), kwargs, this.name,\n index));\n } else {\n output = new SymbolicTensor(\n outputDType, outputShape as Shape, this,\n generic_utils.toList(inputs), kwargs, this.name);\n }\n\n /*\n Add an inbound node to the layer, so that it keeps track\n of the call and of all new variables created during the call.\n This also updates the layer history of the output tensor(s).\n If the input tensor(s) had no previous history,\n this does nothing.\n */\n this.addInboundNode(\n inputs as SymbolicTensor | SymbolicTensor[], output, null, null,\n inputShape, outputShape, kwargs);\n this._refCount++;\n\n if (this.activityRegularizer != null) {\n throw new NotImplementedError(\n 'Layer invocation in the presence of activity ' +\n 'regularizer(s) is not supported yet.');\n }\n\n return output;\n }\n });\n }\n\n /**\n * Check compatibility between input shape and this layer's batchInputShape.\n *\n * Print warning if any incompatibility is found.\n *\n * @param inputShape Input shape to be checked.\n */\n protected warnOnIncompatibleInputShape(inputShape: Shape) {\n if (this.batchInputShape == null) {\n return;\n } else if (inputShape.length !== this.batchInputShape.length) {\n console.warn(\n `The rank of the input tensor provided (shape: ` +\n `${JSON.stringify(inputShape)}) does not match that of the ` +\n `batchInputShape (${JSON.stringify(this.batchInputShape)}) ` +\n `of the layer ${this.name}`);\n } else {\n let dimMismatch = false;\n this.batchInputShape.forEach((dimension, i) => {\n if (dimension != null && inputShape[i] != null &&\n inputShape[i] !== dimension) {\n dimMismatch = true;\n }\n });\n if (dimMismatch) {\n console.warn(\n `The shape of the input tensor ` +\n `(${JSON.stringify(inputShape)}) does not ` +\n `match the expectation of layer ${this.name}: ` +\n `${JSON.stringify(this.batchInputShape)}`);\n }\n }\n }\n\n /**\n * Retrieves the output shape(s) of a layer.\n *\n * Only applicable if the layer has only one inbound node, or if all inbound\n * nodes have the same output shape.\n *\n * @returns Output shape or shapes.\n * @throws AttributeError: if the layer is connected to more than one incoming\n * nodes.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n get outputShape(): Shape|Shape[] {\n if (this.inboundNodes == null || this.inboundNodes.length === 0) {\n throw new AttributeError(\n `The layer ${this.name} has never been called and thus has no ` +\n `defined output shape.`);\n }\n const allOutputShapes: string[] = [];\n for (const node of this.inboundNodes) {\n const shapeString = JSON.stringify(node.outputShapes);\n if (allOutputShapes.indexOf(shapeString) === -1) {\n allOutputShapes.push(shapeString);\n }\n }\n if (allOutputShapes.length === 1) {\n const outputShapes = this.inboundNodes[0].outputShapes;\n if (Array.isArray(outputShapes) && Array.isArray(outputShapes[0]) &&\n outputShapes.length === 1) {\n return (outputShapes as Shape[])[0];\n } else {\n return outputShapes;\n }\n\n } else {\n throw new AttributeError(\n `The layer ${this.name} has multiple inbound nodes with different ` +\n `output shapes. Hence the notion of \"outut shape\" is ill-defined ` +\n `for the layer.`);\n // TODO(cais): Implement getOutputShapeAt().\n }\n }\n\n /**\n * Counts the total number of numbers (e.g., float32, int32) in the\n * weights.\n *\n * @returns An integer count.\n * @throws RuntimeError: If the layer is not built yet (in which case its\n * weights are not defined yet.)\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n countParams(): number {\n if (!this.built) {\n throw new RuntimeError(\n `You tried to call countParams() on ${this.name}, ` +\n `but the layer is not built yet. Build it first by calling ` +\n `build(batchInputShape).`);\n }\n return variable_utils.countParamsInWeights(this.weights);\n }\n\n /**\n * Creates the layer weights.\n *\n * Must be implemented on all layers that have weights.\n *\n * Called when apply() is called to construct the weights.\n *\n * @param inputShape A `Shape` or array of `Shape` (unused).\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n build(inputShape: Shape|Shape[]) {\n this.built = true;\n }\n\n /**\n * Returns the current values of the weights of the layer.\n *\n * @param trainableOnly Whether to get the values of only trainable weights.\n * @returns Weight values as an `Array` of `tf.Tensor`s.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n getWeights(trainableOnly = false): Tensor[] {\n return batchGetValue(trainableOnly ? this.trainableWeights : this.weights);\n }\n\n /**\n * Sets the weights of the layer, from Tensors.\n *\n * @param weights a list of Tensors. The number of arrays and their shape\n * must match number of the dimensions of the weights of the layer (i.e.\n * it should match the output of `getWeights`).\n *\n * @exception ValueError If the provided weights list does not match the\n * layer's specifications.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n setWeights(weights: Tensor[]): void {\n tidy(() => {\n const params = this.weights;\n if (params.length !== weights.length) {\n // TODO(cais): Restore the following and use `providedWeights`, instead\n // of `weights` in the error message, once the deeplearn.js bug is\n // fixed: https://github.com/PAIR-code/deeplearnjs/issues/498 const\n // providedWeights = JSON.stringify(weights).substr(0, 50);\n throw new ValueError(\n `You called setWeights(weights) on layer \"${this.name}\" ` +\n `with a weight list of length ${weights.length}, ` +\n `but the layer was expecting ${params.length} weights. ` +\n `Provided weights: ${weights}...`);\n }\n if (params.length === 0) {\n return;\n }\n const weightValueTuples: Array<[LayerVariable, Tensor]> = [];\n const paramValues = batchGetValue(params);\n for (let i = 0; i < paramValues.length; ++i) {\n const pv = paramValues[i];\n const p = params[i];\n const w = weights[i];\n if (!util.arraysEqual(pv.shape, w.shape)) {\n throw new ValueError(\n `Layer weight shape ${pv.shape} ` +\n `not compatible with provided weight shape ${w.shape}`);\n }\n weightValueTuples.push([p, w]);\n }\n batchSetValue(weightValueTuples);\n });\n }\n\n /**\n * Adds a weight variable to the layer.\n *\n * @param name Name of the new weight variable.\n * @param shape The shape of the weight.\n * @param dtype The dtype of the weight.\n * @param initializer An initializer instance.\n * @param regularizer A regularizer instance.\n * @param trainable Whether the weight should be trained via backprop or not\n * (assuming that the layer itself is also trainable).\n * @param constraint An optional trainable.\n * @return The created weight variable.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n protected addWeight(\n name: string, shape: Shape, dtype?: DataType, initializer?: Initializer,\n regularizer?: Regularizer, trainable?: boolean,\n constraint?: Constraint): LayerVariable {\n // Reject duplicate weight names.\n if (this._addedWeightNames.indexOf(name) !== -1) {\n throw new ValueError(\n `Duplicate weight name ${name} for layer ${this.name}`);\n }\n this._addedWeightNames.push(name);\n\n if (dtype == null) {\n dtype = 'float32';\n }\n const weight = new LayerVariable(\n initializer.apply(shape, dtype), dtype, name, trainable, constraint);\n // Request backend not to dispose the weights of the model on scope() exit.\n if (regularizer != null) {\n this.addLoss(() => regularizer.apply(weight.read()));\n }\n if (trainable == null) {\n trainable = true;\n }\n if (trainable) {\n this._trainableWeights.push(weight);\n } else {\n this._nonTrainableWeights.push(weight);\n }\n return weight;\n }\n\n /**\n * Add losses to the layer.\n *\n * The loss may potentionally be conditional on some inputs tensors,\n * for instance activity losses are conditional on the layer's inputs.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n addLoss(losses: RegularizerFn|RegularizerFn[]): void {\n if (losses == null || Array.isArray(losses) && losses.length === 0) {\n return;\n }\n // Update this.losses\n losses = generic_utils.toList(losses);\n if (this._losses !== undefined && this._losses !== null) {\n this.losses.push(...losses);\n }\n }\n\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor\n |Tensor[] {\n if (!this.supportsMasking) {\n if (mask != null) {\n if (Array.isArray(mask)) {\n mask.forEach(maskElement => {\n if (maskElement != null) {\n throw new TypeError(\n `Layer ${this.name} does not support masking,` +\n 'but was passed an inputMask.');\n }\n });\n } else {\n throw new TypeError(\n `Layer ${this.name} does not support masking,` +\n 'but was passed an inputMask.');\n }\n }\n // masking not explicitly supported: return null as mask\n return null;\n }\n // if masking is explictly supported, by default\n // carry over the input mask\n return mask;\n }\n\n /**\n * Internal method to create an inbound node for the layer.\n *\n * @param inputTensors List of input tensors.\n * @param outputTensors List of output tensors.\n * @param inputMasks List of input masks (a mask can be a tensor, or null).\n * @param outputMasks List of output masks (a mask can be a tensor, or null).\n * @param inputShapes List of input shape tuples.\n * @param outputShapes List of output shape tuples.\n * @param kwargs Dictionary of keyword arguments that were passed to the\n * `call` method of the layer at the call that created the node.\n */\n private addInboundNode(\n inputTensors: SymbolicTensor|SymbolicTensor[],\n outputTensors: SymbolicTensor|SymbolicTensor[],\n inputMasks: Tensor|Tensor[], outputMasks: Tensor|Tensor[],\n inputShapes: Shape|Shape[], outputShapes: Shape|Shape[],\n kwargs: {} = null): void {\n const inputTensorList: SymbolicTensor[] =\n generic_utils.toList(inputTensors);\n outputTensors = generic_utils.toList(outputTensors);\n inputMasks = generic_utils.toList(inputMasks);\n outputMasks = generic_utils.toList(outputMasks);\n inputShapes = types_utils.normalizeShapeList(inputShapes);\n outputShapes = types_utils.normalizeShapeList(outputShapes);\n\n // Collect input tensor(s) coordinates.\n const inboundLayers: Layer[] = [];\n const nodeIndices: number[] = [];\n const tensorIndices: number[] = [];\n for (const x of inputTensorList) {\n /*\n * TODO(michaelterry): Keras adds this value to tensors; it's not\n * clear whether we'll use this or not.\n */\n inboundLayers.push(x.sourceLayer as Layer);\n nodeIndices.push(x.nodeIndex);\n tensorIndices.push(x.tensorIndex);\n }\n\n // Create node, add it to inbound nodes.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node(\n {\n outboundLayer: this,\n inboundLayers,\n nodeIndices,\n tensorIndices,\n inputTensors: inputTensorList,\n outputTensors,\n inputMasks,\n outputMasks,\n inputShapes,\n outputShapes\n },\n kwargs);\n\n // Update tensor history\n for (let i = 0; i < outputTensors.length; i++) {\n // TODO(michaelterry: _uses_learning_phase not tracked.\n outputTensors[i].sourceLayer = this;\n outputTensors[i].nodeIndex = this.inboundNodes.length - 1;\n outputTensors[i].tensorIndex = i;\n }\n }\n\n /**\n * Returns the config of the layer.\n *\n * A layer config is a TS dictionary (serializable)\n * containing the configuration of a layer.\n * The same layer can be reinstantiated later\n * (without its trained weights) from this configuration.\n *\n * The config of a layer does not include connectivity\n * information, nor the layer class name. These are handled\n * by 'Container' (one layer of abstraction above).\n *\n * Porting Note: The TS dictionary follows TS naming standrds for\n * keys, and uses tfjs-layers type-safe Enums. Serialization methods\n * should use a helper function to convert to the pythonic storage\n * standard. (see serialization_utils.convertTsToPythonic)\n *\n * @returns TS dictionary of configuration.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n getConfig(): serialization.ConfigDict {\n const config:\n serialization.ConfigDict = {name: this.name, trainable: this.trainable};\n if (this.batchInputShape != null) {\n config['batchInputShape'] = this.batchInputShape;\n }\n if (this.dtype != null) {\n config['dtype'] = this.dtype;\n }\n return config;\n }\n\n /**\n * Dispose the weight variables that this Layer instance holds.\n *\n * @returns {number} Number of disposed variables.\n */\n protected disposeWeights(): number {\n this.weights.forEach(weight => weight.dispose());\n return this.weights.length;\n }\n\n protected assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Layer '${this.name}' is already disposed.`);\n }\n }\n\n /**\n * Attempt to dispose layer's weights.\n *\n * This method decrease the reference count of the Layer object by 1.\n *\n * A Layer is reference-counted. Its reference count is incremented by 1\n * the first item its `apply()` method is called and when it becomes a part\n * of a new `Node` (through calling the `apply()`) method on a\n * `tf.SymbolicTensor`).\n *\n * If the reference count of a Layer becomes 0, all the weights will be\n * disposed and the underlying memory (e.g., the textures allocated in WebGL)\n * will be freed.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * weights of the Layer will *not* be disposed.\n *\n * After a Layer is disposed, it cannot be used in calls such as `apply()`,\n * `getWeights()` or `setWeights()` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Container after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the layer has already\n * been disposed.\n */\n /** @doc {heading: 'Models', 'subheading': 'Classes'} */\n dispose(): DisposeResult {\n if (!this.built) {\n throw new Error(\n `Cannot dispose Layer ${this.name} because it has not been ` +\n `built yet.`);\n }\n\n if (this._refCount === null) {\n throw new Error(\n `Cannot dispose Layer ${this.name} because it has not been used ` +\n `yet.`);\n }\n\n this.assertNotDisposed();\n\n let numDisposedVariables = 0;\n if (--this._refCount === 0) {\n numDisposedVariables = this.disposeWeights();\n }\n\n return {refCountAfterDispose: this._refCount, numDisposedVariables};\n }\n}\n\n/**\n * Collects the input shape(s) of a list of `tf.Tensor`s or\n * `tf.SymbolicTensor`s.\n *\n * TODO(michaelterry): Update PyKeras docs (backport).\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return List of shape tuples (or single tuple), one tuple per input.\n */\nfunction collectInputShape(inputTensors: SymbolicTensor|SymbolicTensor[]|Tensor|\n Tensor[]): Shape|Shape[] {\n inputTensors =\n generic_utils.toList(inputTensors) as SymbolicTensor[] | Tensor[];\n const shapes: Shape[] = [];\n for (const x of inputTensors) {\n shapes.push(x.shape);\n }\n return generic_utils.singletonOrArray(shapes);\n}\n\n/**\n * Guesses output dtype based on inputs.\n *\n * At present, just returns 'float32' for any input.\n *\n * @param inputTensors List of input tensors (or single input tensor).\n *\n * @return The guessed DType. At present, always returns 'float32'.\n */\nfunction guessOutputDType(inputTensors: SymbolicTensor|SymbolicTensor[]|Tensor|\n Tensor[]): DataType {\n return 'float32';\n}\n\n/**\n * Returns the list of input tensors necessary to compute `tensor`.\n *\n * Output will always be a list of tensors (potentially with 1 element).\n *\n * @param tensor The tensor to start from.\n * @param layer Origin layer of the tensor.\n * @param nodeIndex Origin node index of the tensor.\n *\n * @return Array of input tensors.\n */\nexport function getSourceInputs(\n tensor: SymbolicTensor, layer?: Layer,\n nodeIndex?: number): SymbolicTensor[] {\n if (layer == null || (nodeIndex != null && nodeIndex > 0)) {\n layer = tensor.sourceLayer;\n nodeIndex = tensor.nodeIndex;\n }\n if (layer.inboundNodes.length === 0) {\n return [tensor];\n } else {\n const node = layer.inboundNodes[nodeIndex];\n if (node.inboundLayers.length === 0) {\n return node.inputTensors;\n } else {\n const sourceTensors: SymbolicTensor[] = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const previousSources = getSourceInputs(x, layer, nodeIndex);\n // Avoid input redundancy.\n for (const x of previousSources) {\n if (sourceTensors.indexOf(x) === -1) {\n sourceTensors.push(x);\n }\n }\n }\n return sourceTensors;\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {DataType, serialization, Tensor} from '@tensorflow/tfjs-core';\n\nimport {getUid} from '../backend/state';\nimport {ValueError} from '../errors';\nimport {Kwargs, Shape} from '../types';\n\nimport {DisposeResult, Layer, Node, SymbolicTensor} from './topology';\n\n/**\n * Constructor arguments for InputLayer.\n *\n * Note: You should provide only inputShape or batchInputShape (not both).\n * If only inputShape is provided, then the batchInputShape is determined by\n * the batchSize argument and the inputShape: [batchSize].concat(inputShape).\n */\nexport interface InputLayerConfig {\n /** Input shape, not including the batch axis. */\n inputShape?: Shape;\n /** Optional input batch size (integer or null). */\n batchSize?: number;\n /** Batch input shape, including the batch axis. */\n batchInputShape?: Shape;\n /** Datatype of the input. */\n dtype?: DataType;\n /**\n * Whether the placeholder created is meant to be sparse.\n */\n sparse?: boolean; // TODO(michaelterry): Not clear whether we'll need this.\n\n /** Name of the layer. */\n name?: string;\n}\n\n/**\n * An input layer is an entry point into a `tf.Model`.\n *\n * `InputLayer` is generated automatically for `tf.Sequential`` models by\n * specifying the `inputshape` or `batchInputShape` for the first layer. It\n * should not be specified explicitly. However, it can be useful sometimes,\n * e.g., when constructing a sequential model from a subset of another\n * sequential model's layers. Like the code snippet below shows.\n *\n * ```js\n * // Define a model which simply adds two inputs.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.dense({inputShape: [4], units: 3, activation: 'relu'}));\n * model1.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n * model1.summary();\n * model1.preidct(tf.zeros([1, 4])).print();\n *\n * // Construct another model, reusing the second layer of `model1` while\n * // not using the first layer of `model1`. Note that you cannot add the second\n * // layer of `model` directly as the first layer of the new sequential model,\n * // because doing so will lead to an error related to the fact that the layer\n * // is not an input layer. Instead, you need to create an `inputLayer` and add\n * // it to the new sequential model before adding the reused layer.\n * const model2 = tf.sequential();\n * // Use an inputShape that matches the input shape of `model1`'s second\n * // layer.\n * model2.add(tf.layers.inputLayer({inputShape: [3]}));\n * model2.add(model1.layers[1]);\n * model2.summary();\n * model2.preidct(tf.zeros([1, 3])).print();\n * ```\n */\nexport class InputLayer extends Layer {\n static readonly className = 'InputLayer';\n sparse: boolean;\n constructor(config: InputLayerConfig) {\n super({\n dtype: config.dtype,\n name: config.name != null ? config.name : getUid('input').toString()\n });\n // Normalize config.batchSize and config.sparse\n if (config.batchSize == null) {\n config.batchSize = null;\n }\n if (config.sparse == null) {\n config.sparse = false;\n }\n\n this.trainable = false;\n this.built = true;\n this.sparse = config.sparse;\n\n if (config.inputShape != null && config.batchInputShape != null) {\n throw new ValueError(\n 'Only provide the inputShape OR ' +\n 'batchInputShape argument to inputLayer, not both at the same time.');\n }\n let batchInputShape = config.batchInputShape;\n if (batchInputShape == null) {\n if (config.inputShape == null) {\n throw new ValueError(\n 'An InputLayer should be passed either a ' +\n '`batchInputShape` or an `inputShape`.');\n } else {\n batchInputShape = [config.batchSize].concat(config.inputShape);\n }\n } else {\n // TODO(michaelterry): Backport to PyKeras\n if (config.batchSize != null) {\n throw new ValueError(\n 'Cannot specify batchSize if batchInputShape is' +\n 'specified when creating an InputLayer.');\n }\n }\n\n const dtype = config.dtype || 'float32';\n\n this.batchInputShape = batchInputShape;\n this.dtype = dtype;\n // TODO(michaelterry): Backport this to PyKeras?\n this.inputSpec = [{shape: batchInputShape}];\n\n const inputTensor = new SymbolicTensor(\n this.dtype, this.batchInputShape, this, [], {}, this.name);\n inputTensor.nodeIndex = 0;\n inputTensor.tensorIndex = 0;\n\n // Create an input node to add to this.outboundNode.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: [inputTensor],\n outputTensors: [inputTensor],\n inputMasks: [null],\n outputMasks: [null],\n inputShapes: [batchInputShape],\n outputShapes: [batchInputShape]\n });\n }\n\n apply(\n inputs: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n kwargs?: Kwargs): Tensor|Tensor[]|SymbolicTensor {\n throw new ValueError(\n 'Cannot pass any input to an ' +\n `InputLayer's apply() method. InputLayer name: ${this.name}`);\n }\n\n dispose(): DisposeResult {\n // dispose() for InputLayer is overridden as no-op.\n return {refCountAfterDispose: this._refCount, numDisposedVariables: 0};\n }\n\n getConfig(): serialization.ConfigDict {\n return {\n batchInputShape: this.batchInputShape,\n dtype: this.dtype,\n sparse: this.sparse,\n name: this.name\n };\n }\n}\nserialization.registerClass(InputLayer);\n\n/**\n * Config for the Input function.\n *\n * Note: You should provide only shape or batchShape (not both).\n * If only shape is provided, then the batchShape becomes\n * [null].concat(inputShape).\n */\nexport interface InputConfig {\n /**\n * A shape, not including the batch size. For instance, `shape=[32]`\n * indicates that the expected input will be batches of 32-dimensional\n * vectors.\n */\n shape?: Shape;\n /**\n * A shape tuple (integer), including the batch size. For instance,\n * `batchShape=[10, 32]` indicates that the expected input will be batches of\n * 10 32-dimensional vectors. `batchShape=[null, 32]` indicates batches of an\n * arbitrary number of 32-dimensional vectors.\n */\n batchShape?: Shape;\n /**\n * An optional name string for the layer. Should be unique in a model (do not\n * reuse the same name twice). It will be autogenerated if it isn't provided.\n */\n name?: string;\n dtype?: DataType;\n /**\n * A boolean specifying whether the placeholder to be created is sparse.\n */\n sparse?: boolean;\n}\n\n/**\n * Used to instantiate an input to a model as a `tf.SymbolicTensor`.\n *\n * Users should call the `input` factory function for\n * consistency with other generator functions.\n *\n * Example:\n *\n * ```js\n * // Defines a simple logistic regression model with 32 dimensional input\n * // and 3 dimensional output.\n * const x = tf.input({shape: [32]});\n * const y = tf.layers.dense({units: 3, activation: 'softmax'}).apply(x);\n * const model = tf.model({inputs: x, outputs: y});\n * model.predict(tf.ones([2, 32])).print();\n * ```\n *\n * Note: `input` is only necessary when using `model`. When using\n * `sequential`, specify `inputShape` for the first layer or use `inputLayer`\n * as the first layer.\n */\nexport function Input(config: InputConfig): SymbolicTensor {\n if (config.batchShape == null && config.shape == null) {\n throw new Error(\n 'Please provide to Input either a `shape`' +\n ' or a `batchShape` argument. Note that ' +\n '`shape` does not include the batch ' +\n 'dimension.');\n }\n if (config.batchShape != null && config.shape != null) {\n // TODO(michaelterry): Backport to PyKeras.\n throw new ValueError(\n 'Please provide either a `shape` or `batchShape` ' +\n 'argument to Input, but not both.');\n }\n let batchShape = config.batchShape;\n if (config.shape != null && batchShape == null) {\n batchShape = [null].concat(config.shape);\n }\n\n let dtype = config.dtype;\n if (dtype == null) {\n dtype = 'float32';\n }\n\n const inputLayer = new InputLayer({\n batchInputShape: batchShape,\n name: config.name,\n dtype,\n sparse: config.sparse\n });\n\n const outputs = inputLayer.inboundNodes[0].outputTensors;\n return outputs[0];\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: keras/callbacks.py */\n\nimport {add, div, keep, mul, nextFrame, Scalar, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from './backend/state';\nimport {Container} from './engine/container';\nimport {ValueError} from './errors';\nimport {Logs, resolveScalarsInLogs, UnresolvedLogs} from './logs';\nimport * as generic_utils from './utils/generic_utils';\n\n/** Verbosity logging level when fitting a model. */\nexport enum ModelLoggingVerbosity {\n SILENT = 0,\n VERBOSE = 1\n}\n\nexport type Params = {\n [key: string]: number|string|boolean|number[]|string[]|boolean[];\n};\n\nexport type YieldEveryOptions = 'auto'|'batch'|'epoch'|'never';\n\n/**\n * Abstract base class used to build new callbacks.\n *\n * The `logs` dictionary that callback methods take as argument will contain\n * keys for quantities relevant to the current batch or epoch.\n *\n * Currently, the `.fit()` method of the `Sequential` model class\n * will include the following quantities in the `logs` that\n * it passes to its callbacks:\n *\n * onEpochEnd: Logs include `acc` and `loss`, and optionally include `valLoss`\n * (if validation is enabled in `fit`), and `valAcc` (if validation and\n * accuracy monitoring are enabled).\n * onBatchBegin: Logs include `size`, the number of samples in the current\n * batch.\n * onBatchEnd: Logs include `loss`, and optionally `acc` (if accuracy monitoring\n * is enabled).\n */\nexport abstract class BaseCallback {\n // TODO(michaelterry): This type is a best guess.\n validationData: Tensor|Tensor[] = null;\n /**\n * Training parameters (eg. verbosity, batch size, number of epochs...).\n */\n params: Params;\n\n setParams(params: Params): void {\n this.params = params;\n }\n\n async onEpochBegin(epoch: number, logs?: UnresolvedLogs) {}\n\n async onEpochEnd(epoch: number, logs?: UnresolvedLogs) {}\n\n async onBatchBegin(batch: number, logs?: UnresolvedLogs) {}\n\n async onBatchEnd(batch: number, logs?: UnresolvedLogs) {}\n\n async onTrainBegin(logs?: UnresolvedLogs) {}\n\n async onTrainEnd(logs?: UnresolvedLogs) {}\n\n // Model needs to call Callback.setModel(), but cannot actually depend on\n // Callback because that creates a cyclic dependency. Providing this no-op\n // method on BaseCallback breaks the cycle: this way Model can depend on\n // BaseCallback but not on Callback. The argument is typed as `Container`\n // (the superclass of Model) to avoid recapitulating the cycle. Callback\n // overrides this method and enforces that the argument is really a Model.\n setModel(model: Container): void {\n // Do nothing. Use Callback instead of BaseCallback to track the model.\n }\n}\n\n/**\n * Container abstracting a list of callbacks.\n */\nexport class CallbackList {\n callbacks: BaseCallback[];\n queueLength: number;\n\n // TODO(cais): When the need arises, uncomment the following lines and\n // implement the queue for time values.\n // private deltaTBatch: number;\n // private deltaTsBatchBegin: Array<number>;\n // private deltaTsBatchEnd: Array<number>;\n\n /**\n * Constructor of CallbackList.\n * @param callbacks Array of `Callback` instances.\n * @param queueLength Queue length for keeping running statistics over\n * callback execution time.\n */\n constructor(callbacks?: BaseCallback[], queueLength = 10) {\n // TODO(cais): Make use of queueLength when implementing the queue for time\n // values.\n if (callbacks == null) {\n callbacks = [];\n }\n this.callbacks = callbacks;\n this.queueLength = queueLength;\n }\n\n append(callback: BaseCallback): void {\n this.callbacks.push(callback);\n }\n\n setParams(params: Params): void {\n for (const callback of this.callbacks) {\n callback.setParams(params);\n }\n }\n\n setModel(model: Container): void {\n for (const callback of this.callbacks) {\n callback.setModel(model);\n }\n }\n\n /**\n * Called at the start of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochBegin(epoch: number, logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochBegin(epoch, logs);\n }\n }\n\n /**\n * Called at the end of an epoch.\n * @param epoch Index of epoch.\n * @param logs Dictionary of logs.\n */\n async onEpochEnd(epoch: number, logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onEpochEnd(epoch, logs);\n }\n }\n\n /**\n * Called right before processing a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchBegin(batch: number, logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onBatchBegin(batch, logs);\n }\n }\n\n /**\n * Called at the end of a batch.\n * @param batch Index of batch within the current epoch.\n * @param logs Dictionary of logs.\n */\n async onBatchEnd(batch: number, logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n await resolveScalarsInLogs(logs);\n for (const callback of this.callbacks) {\n await callback.onBatchEnd(batch, logs);\n }\n }\n\n /**\n * Called at the beginning of training.\n * @param logs Dictionary of logs.\n */\n async onTrainBegin(logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainBegin(logs);\n }\n }\n\n /**\n * Called at the end of training.\n * @param logs Dictionary of logs.\n */\n async onTrainEnd(logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n for (const callback of this.callbacks) {\n await callback.onTrainEnd(logs);\n }\n }\n}\n\n/**\n * A class that manages thread yielding during model training.\n *\n * The lifetime of an instance of `ModelTrainingYielder` is that of a\n * `Model.fit()` call. In other words, each `Model.fit()` call must create\n * and use a separate `ModelTrainingYielder` object.\n */\nexport class ModelTrainingYielder {\n // How many batches to skip at the beginning of a `Model.fit` call.\n // The first batches usually are longer than the rest, because they may\n // involve warm-up time.\n static readonly SKIP_FIRST_BATCHES = 1;\n\n // How many batches to average over when calculating the average batch\n // duration.\n static readonly DECISION_BATCH_COUNT = 2;\n\n // How many milliseconds to wait before yielding again.\n static readonly THRESHOLD_MILLIS = 16;\n\n private yieldEvery: YieldEveryOptions;\n private batchCount: number;\n private lastYieldBatchCount: number;\n private batchStartMillis: number;\n private batchDurationsMillis: number[];\n private autoYieldEveryBatches: number;\n\n /**\n * Constructor of ModelTrainingYielder\n *\n * @param yieldEvery The configuration for how often the yielding will occur.\n */\n constructor(yieldEvery: YieldEveryOptions) {\n this.yieldEvery = yieldEvery;\n this.batchCount = 0;\n this.batchDurationsMillis = [];\n this.autoYieldEveryBatches = null;\n this.batchStartMillis = util.now();\n }\n\n /**\n * Find the first Scalar tensor in `logs` and await data() on it.\n *\n * This causes a data download (e.g., from GPU) and therefore clears the\n * queued operations (e.g., on the GPU).\n */\n private async resolveOneTensorInLogs(logs: UnresolvedLogs) {\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n await (value as Scalar).data();\n break;\n }\n }\n }\n\n /**\n * The action taken when a batch ends.\n *\n * The action taken depends on the `yieldEvery` configuration.\n *\n * * In the case of `auto`, during the first several batches, this method\n * will estimate the average duration of each batch. It will then decide\n * how often the yielding will occur based on the estimation. The yielding\n * is achieved through\n * - Awaiting `data()` on one of the Tensors in `logs`, causing the queued\n * operations to clear.\n * - Calling `await nextFrame()`.\n * * In the case of `batch` or `epoch`, the yielding will occur on the end of\n * every batch or every epoch, respectively.\n * * In the case of `never`, the yielding will never occur.\n *\n * @param logs The logs from the batch.\n */\n async maybeYieldOnBatch(logs: UnresolvedLogs) {\n if (this.yieldEvery === 'auto') {\n this.batchCount++;\n if (this.autoYieldEveryBatches == null) {\n // autoYieldEveryBatches has not been determined yet. We are still in\n // the measurement phase.\n await this.resolveOneTensorInLogs(logs);\n const t = util.now();\n await nextFrame();\n // We skip the first few batches for timing, because they usually\n // involve some warm-up time.\n if (this.batchCount > ModelTrainingYielder.SKIP_FIRST_BATCHES) {\n this.batchDurationsMillis.push(t - this.batchStartMillis);\n if (this.batchDurationsMillis.length >=\n ModelTrainingYielder.DECISION_BATCH_COUNT) {\n const meanBatchDuration =\n this.batchDurationsMillis.reduce((dur, prev) => dur + prev) /\n this.batchDurationsMillis.length;\n this.autoYieldEveryBatches = Math.round(\n ModelTrainingYielder.THRESHOLD_MILLIS / meanBatchDuration);\n if (this.autoYieldEveryBatches < 1) {\n this.autoYieldEveryBatches = 1;\n }\n }\n }\n this.batchStartMillis = util.now();\n this.lastYieldBatchCount = this.batchCount;\n } else {\n // autoYieldEveryBatch has been determined. We perform yielding\n // accordingly.\n if (this.batchCount - this.lastYieldBatchCount >=\n this.autoYieldEveryBatches) {\n await nextFrame();\n await this.resolveOneTensorInLogs(logs);\n this.lastYieldBatchCount = this.batchCount;\n }\n }\n } else if (this.yieldEvery === 'batch') {\n await nextFrame();\n }\n }\n\n async maybeYieldOnEpoch() {\n if (this.yieldEvery === 'epoch') {\n await nextFrame();\n }\n }\n}\n\n/**\n * Callback that accumulates epoch averages of metrics.\n *\n * This callback is automatically applied to every Model.\n */\nexport class BaseLogger extends BaseCallback {\n private seen: number;\n private totals: UnresolvedLogs;\n private autoYielder: ModelTrainingYielder;\n private yieldEvery: YieldEveryOptions;\n\n constructor(yieldEvery?: YieldEveryOptions) {\n super();\n\n this.yieldEvery = yieldEvery || 'auto';\n }\n\n async onTrainBegin(logs?: UnresolvedLogs) {\n this.autoYielder = new ModelTrainingYielder(this.yieldEvery);\n }\n\n async onEpochBegin(epoch: number) {\n this.seen = 0;\n this.totals = {};\n }\n\n async onBatchEnd(batch: number, logs?: UnresolvedLogs) {\n await this.autoYielder.maybeYieldOnBatch(logs);\n\n if (logs == null) {\n logs = {};\n }\n const batchSize = logs['size'] == null ? 0 : logs['size'] as number;\n this.seen += batchSize;\n for (const key in logs) {\n const value = logs[key];\n if (typeof value === 'number') {\n if (!this.totals.hasOwnProperty(key)) {\n this.totals[key] = 0;\n }\n this.totals[key] = this.totals[key] as number + value * batchSize;\n } else {\n let oldTotalsToDispose: Scalar;\n if (key in this.totals) {\n oldTotalsToDispose = this.totals[key] as Scalar;\n } else {\n this.totals[key] = getScalar(0);\n }\n\n this.totals[key] = tidy(\n () => add((this.totals[key] as Scalar),\n mul(value, getScalar(batchSize))) as Scalar);\n if (oldTotalsToDispose != null) {\n oldTotalsToDispose.dispose();\n }\n }\n }\n }\n\n async onEpochEnd(epoch: number, logs?: UnresolvedLogs) {\n await this.autoYielder.maybeYieldOnEpoch();\n\n if (logs != null) {\n for (const key of this.params['metrics'] as string[]) {\n if (this.totals[key] == null) {\n continue;\n }\n if (typeof this.totals[key] === 'number') {\n logs[key] = this.totals[key] as number / this.seen;\n } else {\n tidy(() => {\n logs[key] = mul(div(getScalar(1), getScalar(this.seen)) as Scalar,\n this.totals[key] as Scalar) as Scalar;\n (this.totals[key] as Tensor).dispose();\n keep(logs[key] as Scalar);\n });\n }\n }\n }\n }\n}\n\n/**\n * Callback that records events into a `History` object. This callback is\n * automatically applied to every TF.js Layers model. The `History` object\n * gets returned by the `fit` method of models.\n */\nexport class History extends BaseCallback {\n epoch: number[];\n history: {[key: string]: Array<number|Tensor>};\n\n async onTrainBegin(logs?: UnresolvedLogs) {\n this.epoch = [];\n this.history = {};\n }\n\n async onEpochEnd(epoch: number, logs?: UnresolvedLogs) {\n if (logs == null) {\n logs = {};\n }\n this.epoch.push(epoch);\n for (const key in logs) {\n if (this.history[key] == null) {\n this.history[key] = [];\n }\n this.history[key].push(logs[key]);\n }\n }\n\n /**\n * Await the values of all losses and metrics.\n */\n async syncData() {\n const promises: Array<Promise<Float32Array|Int32Array|Uint8Array>> = [];\n const keys: string[] = [];\n const indices: number[] = [];\n for (const key in this.history) {\n const valueArray = this.history[key];\n for (let i = 0; i < valueArray.length; ++i) {\n if (typeof valueArray[i] !== 'number') {\n const valueScalar = valueArray[i] as Tensor;\n promises.push(valueScalar.data());\n keys.push(key);\n indices.push(i);\n }\n }\n }\n const values = await Promise.all(promises);\n for (let n = 0; n < values.length; ++n) {\n const tensorToDispose = this.history[keys[n]][indices[n]] as Tensor;\n tensorToDispose.dispose();\n this.history[keys[n]][indices[n]] = values[n][0];\n }\n }\n}\n\nexport interface CustomCallbackConfig {\n onTrainBegin?: (logs?: Logs) => Promise<void>;\n onTrainEnd?: (logs?: Logs) => Promise<void>;\n onEpochBegin?: (epoch: number, logs?: Logs) => Promise<void>;\n onEpochEnd?: (epoch: number, logs?: Logs) => Promise<void>;\n onBatchBegin?: (batch: number, logs?: Logs) => Promise<void>;\n onBatchEnd?: (batch: number, logs?: Logs) => Promise<void>;\n}\n\n/**\n * Custom callback for training.\n */\nexport class CustomCallback extends BaseCallback {\n protected readonly trainBegin: (logs?: Logs) => Promise<void>;\n protected readonly trainEnd: (logs?: Logs) => Promise<void>;\n protected readonly epochBegin: (epoch: number, logs?: Logs) => Promise<void>;\n protected readonly epochEnd: (epoch: number, logs?: Logs) => Promise<void>;\n protected readonly batchBegin: (batch: number, logs?: Logs) => Promise<void>;\n protected readonly batchEnd: (batch: number, logs?: Logs) => Promise<void>;\n\n constructor(config: CustomCallbackConfig) {\n super();\n this.trainBegin = config.onTrainBegin;\n this.trainEnd = config.onTrainEnd;\n this.epochBegin = config.onEpochBegin;\n this.epochEnd = config.onEpochEnd;\n this.batchBegin = config.onBatchBegin;\n this.batchEnd = config.onBatchEnd;\n }\n\n async onEpochBegin(epoch: number, logs?: UnresolvedLogs): Promise<void> {\n if (this.epochBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.epochBegin(epoch, logs as Logs);\n }\n }\n\n async onEpochEnd(epoch: number, logs?: UnresolvedLogs): Promise<void> {\n if (this.epochEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.epochEnd(epoch, logs as Logs);\n }\n }\n\n async onBatchBegin(batch: number, logs?: UnresolvedLogs): Promise<void> {\n if (this.batchBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.batchBegin(batch, logs as Logs);\n }\n }\n\n async onBatchEnd(batch: number, logs?: UnresolvedLogs): Promise<void> {\n if (this.batchEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.batchEnd(batch, logs as Logs);\n }\n }\n\n async onTrainBegin(logs?: UnresolvedLogs): Promise<void> {\n if (this.trainBegin != null) {\n await resolveScalarsInLogs(logs);\n await this.trainBegin(logs as Logs);\n }\n }\n\n async onTrainEnd(logs?: UnresolvedLogs): Promise<void> {\n if (this.trainEnd != null) {\n await resolveScalarsInLogs(logs);\n await this.trainEnd(logs as Logs);\n }\n }\n}\n\n/**\n * Standardize callbacks or configurations of them to an Array of callbacks.\n */\nexport function standardizeCallbacks(callbacks: BaseCallback|BaseCallback[]|\n CustomCallbackConfig|\n CustomCallbackConfig[]): BaseCallback[] {\n if (callbacks == null) {\n return null;\n }\n if (callbacks instanceof BaseCallback) {\n return [callbacks as BaseCallback];\n }\n if (Array.isArray(callbacks) && callbacks[0] instanceof BaseCallback) {\n return callbacks as BaseCallback[];\n }\n // Convert custom callback configs to custom callback objects.\n const callbackConfigs =\n generic_utils.toList(callbacks) as CustomCallbackConfig[];\n return callbackConfigs.map(\n callbackConfig => new CustomCallback(callbackConfig));\n}\n\nexport declare type BaseCallbackConstructor = {\n new (): BaseCallback\n};\n\n/**\n * A global registry for callback constructors to be used during Model.fit().\n */\nexport class CallbackConstructorRegistry {\n private static constructors:\n {[verbosityLevel: number]: BaseCallbackConstructor[]} = {};\n\n /**\n * Blocks public access to constructor.\n */\n private constructor() {}\n\n /**\n * Register a tf.Model.fit() callback constructor.\n *\n * The registered callback constructor will be used to instantiate\n * callbacks for every tf.Model.fit() call afterwards.\n *\n * @param verbosityLevel Level of verbosity at which the `callbackConstructor`\n * is to be reigstered.\n * @param callbackConstructor A no-arg constructor for `tf.Callback`.\n * @throws Error, if the same callbackConstructor has been registered before,\n * either at the same or a different `verbosityLevel`.\n */\n static registerCallbackConstructor(\n verbosityLevel: number, callbackConstructor: BaseCallbackConstructor) {\n util.assert(\n verbosityLevel >= 0 && Number.isInteger(verbosityLevel),\n `Verbosity level is expected to be an integer >= 0, ` +\n `but got ${verbosityLevel}`);\n CallbackConstructorRegistry.checkForDuplicate(callbackConstructor);\n if (CallbackConstructorRegistry.constructors[verbosityLevel] == null) {\n CallbackConstructorRegistry.constructors[verbosityLevel] = [];\n }\n CallbackConstructorRegistry.constructors[verbosityLevel].push(\n callbackConstructor);\n }\n\n private static checkForDuplicate(callbackConstructor:\n BaseCallbackConstructor) {\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const constructors = CallbackConstructorRegistry.constructors[+levelName];\n constructors.forEach(ctor => {\n if (ctor === callbackConstructor) {\n throw new ValueError('Duplicate callback constructor.');\n }\n });\n }\n }\n\n /**\n * Clear all registered callback constructors.\n */\n protected static clear() {\n CallbackConstructorRegistry.constructors = {};\n }\n\n /**\n * Create callbacks using the registered callback constructors.\n *\n * Given `verbosityLevel`, all constructors registered at that level or above\n * will be called and the instantiated callbacks will be used.\n *\n * @param verbosityLevel: Level of verbosity.\n */\n static createCallbacks(verbosityLevel: number): BaseCallback[] {\n const constructors: BaseCallbackConstructor[] = [];\n for (const levelName in CallbackConstructorRegistry.constructors) {\n const level = +levelName;\n if (verbosityLevel >= level) {\n constructors.push(...CallbackConstructorRegistry.constructors[level]);\n }\n }\n return constructors.map(ctor => new ctor());\n }\n}\n\nexport function configureCallbacks(\n callbacks: BaseCallback[], yieldEvery: YieldEveryOptions,\n verbose: ModelLoggingVerbosity, epochs: number, initialEpoch: number,\n numTrainSamples: number, stepsPerEpoch: number, batchSize: number,\n doValidation: boolean,\n callbackMetrics: string[]): {callbackList: CallbackList, history: History} {\n const history = new History();\n const actualCallbacks: BaseCallback[] = [\n new BaseLogger(yieldEvery),\n ...CallbackConstructorRegistry.createCallbacks(verbose)\n ];\n if (callbacks != null) {\n actualCallbacks.push(...callbacks);\n }\n actualCallbacks.push(history);\n const callbackList = new CallbackList(actualCallbacks);\n\n // TODO(cais): Figure out when this Model instance can have a dynamically\n // set property called 'callback_model' as in PyKeras.\n\n callbackList.setParams({\n epochs,\n initialEpoch,\n samples: numTrainSamples,\n steps: stepsPerEpoch,\n batchSize,\n verbose,\n doValidation,\n metrics: callbackMetrics,\n });\n return {callbackList, history};\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {dispose, Scalar} from '@tensorflow/tfjs-core';\n\n/**\n * Logs in which values can be either numbers or Tensors (Scalars).\n *\n * Used internally.\n */\nexport type UnresolvedLogs = {\n [key: string]: number|Scalar;\n};\n\n/**\n * Turn any Scalar values in a Logs object into actual number values.\n *\n * @param logs The `Logs` object to be resolved in place.\n */\nexport async function resolveScalarsInLogs(logs: UnresolvedLogs) {\n if (logs == null) {\n return;\n }\n const promises: Array<Promise<Float32Array|Int32Array|Uint8Array>> = [];\n const keys: string[] = [];\n const scalarsToDispose: Scalar[] = [];\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n const valueScalar = value as Scalar;\n promises.push(valueScalar.data());\n keys.push(key);\n scalarsToDispose.push(valueScalar);\n }\n }\n const values = await Promise.all(promises);\n for (let i = 0; i < values.length; ++i) {\n logs[keys[i]] = values[i][0];\n }\n\n // Dispose the original scalar tensors.\n dispose(scalarsToDispose);\n}\n\n/**\n * Dispose all Tensors in an UnresolvedLogs object.\n *\n * @param logs An `UnresolvedLogs` object potentially containing `tf.Tensor`s in\n * places where the values can be `tf.Tensor` or `number`.\n */\nexport function disposeTensorsInLogs(logs: UnresolvedLogs) {\n if (logs == null) {\n return;\n }\n for (const key in logs) {\n const value = logs[key];\n if (typeof value !== 'number') {\n value.dispose();\n }\n }\n}\n\n/**\n * Logs in which values can only be numbers.\n *\n * Used when calling client-provided custom callbacks.\n */\nexport type Logs = {\n [key: string]: number;\n};\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original Source: losses.py */\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {scalar, Tensor, Tensor1D, tidy} from '@tensorflow/tfjs-core';\n\nimport {epsilon} from './backend/common';\nimport {getScalar} from './backend/state';\nimport * as K from './backend/tfjs_backend';\nimport {ValueError} from './errors';\nimport {LossOrMetricFn} from './types';\n\n\n/**\n * Normalizes a tensor wrt the L2 norm alongside the specified axis.\n * @param x\n * @param axis Axis along which to perform normalization.\n */\nexport function l2Normalize(x: Tensor, axis?: number): Tensor {\n return tidy(() => {\n const squareSum = tfc.sum(K.square(x), axis, true);\n const epsilonTensor = tfc.mul(scalar(epsilon()), tfc.onesLike(x));\n const norm = tfc.sqrt(tfc.maximum(squareSum, epsilonTensor));\n return tfc.div(x, norm);\n });\n}\n\n/**\n * Loss or metric function: Mean squared error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [3, 4]]);\n * const yPred = tf.tensor2d([[0, 1], [-3, -4]]);\n * const mse = tf.metrics.meanSquaredError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MSE`, `tf.metrics.mse`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean squared error Tensor.\n */\nexport function meanSquaredError(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => tfc.mean(K.square(tfc.sub(yPred, yTrue)), -1));\n}\n\n/**\n * Loss or metric function: Mean absolute error.\n *\n * Mathematically, mean absolute error is defined as:\n * `mean(abs(yPred - yTrue))`,\n * wherein the `mean` is applied over feature dimensions.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [0, 0], [2, 3]]);\n * const yPred = tf.tensor2d([[0, 1], [0, 1], [-2, -3]]);\n * const mse = tf.metrics.meanAbsoluteError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute error Tensor.\n */\nexport function meanAbsoluteError(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => tfc.mean(tfc.abs(tfc.sub(yPred, yTrue)), -1));\n}\n\n/**\n * Loss or metric function: Mean absolute percentage error.\n *\n * ```js\n * const yTrue = tf.tensor2d([[0, 1], [10, 20]]);\n * const yPred = tf.tensor2d([[0, 1], [11, 24]]);\n * const mse = tf.metrics.meanAbsolutePercentageError(yTrue, yPred);\n * mse.print();\n * ```\n *\n * Aliases: `tf.metrics.MAPE`, `tf.metrics.mape`.\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Mean absolute percentage error Tensor.\n */\nexport function meanAbsolutePercentageError(\n yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const diff = tfc.sub(yTrue, yPred);\n const clippedTrue =\n tfc.clipByValue(tfc.abs(yTrue), epsilon(), Number.MAX_VALUE);\n const absResult = tfc.abs(tfc.div(diff, clippedTrue));\n return tfc.mul(getScalar(100.0), tfc.mean(absResult, -1));\n });\n}\n\nexport function meanSquaredLogarithmicError(\n yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const one = getScalar(1.0);\n\n const clippedPred = tfc.clipByValue(yPred, epsilon(), Number.MAX_VALUE);\n const firstLog = tfc.log(tfc.add(one, clippedPred));\n\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), Number.MAX_VALUE);\n const secondLog = tfc.log(tfc.add(one, clippedTrue));\n\n return tfc.mean(K.square(tfc.sub(firstLog, secondLog)), -1);\n });\n}\n\nexport function squaredHinge(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const zeroTensor = getScalar(0.0);\n const one = getScalar(1.0);\n const maxResult =\n tfc.maximum(zeroTensor, tfc.sub(one, tfc.mul(yTrue, yPred)));\n return tfc.mean(K.square(maxResult), -1);\n });\n}\n\nexport function hinge(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const zeroTensor = getScalar(0.0);\n const one = getScalar(1.0);\n const maxResult =\n tfc.maximum(zeroTensor, tfc.sub(one, tfc.mul(yTrue, yPred)));\n return tfc.mean(maxResult, -1);\n });\n}\n\nexport function categoricalHinge(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const zeroTensor = getScalar(0.0);\n const one = getScalar(1.0);\n const pos = tfc.sum(tfc.mul(yTrue, yPred), -1);\n const neg = tfc.max(tfc.mul(tfc.sub(one, yTrue), yPred), -1);\n return tfc.maximum(zeroTensor, tfc.add(one, tfc.sub(neg, pos)));\n });\n}\n\n/**\n * Logarithm of the hyperbolic cosine of the prediction error.\n *\n * `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and\n * to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly\n * like the mean squared error, but will not be so strongly affected by the\n * occasional wildly incorrect prediction.\n */\nexport function logcosh(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const log2 = getScalar(Math.log(2.0));\n const predictionDiff = tfc.sub(yPred, yTrue);\n const logcoshResult = tfc.sub(\n tfc.add(\n predictionDiff,\n tfc.softplus(tfc.mul(getScalar(-2.0), predictionDiff))),\n log2);\n return tfc.mean(logcoshResult, -1);\n });\n}\n\n/**\n * Categorical crossentropy between an output tensor and a target tensor.\n *\n * @param target A tensor of the same shape as `output`.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function categoricalCrossentropy(\n target: Tensor, output: Tensor, fromLogits = false): Tensor {\n return tidy(() => {\n if (fromLogits) {\n output = tfc.softmax(output);\n } else {\n // scale preds so that the class probabilities of each sample sum to 1.\n const outputSum = tfc.sum(output, output.shape.length - 1, true);\n output = tfc.div(output, outputSum);\n }\n output = tfc.clipByValue(output, epsilon(), 1 - epsilon());\n return tfc.neg(tfc.sum(\n tfc.mul(target.toFloat(), tfc.log(output)), output.shape.length - 1));\n });\n}\n\n/**\n * Categorical crossentropy with integer targets.\n *\n * @param target An integer tensor.\n * @param output A tensor resulting from a softmax (unless `fromLogits` is\n * `true`, in which case `output` is expected to be the logits).\n * @param fromLogits Boolean, whether `output` is the result of a softmax, or is\n * a tensor of logits.\n */\nexport function sparseCategoricalCrossentropy(\n target: Tensor, output: Tensor, fromLogits = false): Tensor {\n return tidy(() => {\n const flatTarget = tfc.floor(K.flatten(target)).toInt() as Tensor1D;\n const outputShape = output.shape;\n const oneHotTarget =\n tfc.oneHot(flatTarget, outputShape[outputShape.length - 1])\n .reshape(outputShape);\n return categoricalCrossentropy(oneHotTarget, output, fromLogits);\n });\n}\n\n/**\n * From TensorFlow's implementation in nn_impl.py:\n *\n * For brevity, let `x = logits`, `z = labels`. The logistic loss is\n * z * -log(sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x))\n * = z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))\n * = z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))\n * = (1 - z) * x + log(1 + exp(-x))\n * = x - x * z + log(1 + exp(-x))\n * For x < 0, to avoid overflow in exp(-x), we reformulate the above\n * x - x * z + log(1 + exp(-x))\n * = log(exp(x)) - x * z + log(1 + exp(-x))\n * = - x * z + log(1 + exp(x))\n * Hence, to ensure stability and avoid overflow, the implementation uses this\n * equivalent formulation\n * max(x, 0) - x * z + log(1 + exp(-abs(x)))\n *\n * @param target The labels.\n * @param output The logits.\n */\nexport function sigmoidCrossEntropyWithLogits(\n target: Tensor, output: Tensor): Tensor {\n return tidy(() => {\n const maxOutput = tfc.maximum(output, tfc.zerosLike(output));\n const outputXTarget = tfc.mul(output, target);\n const sigmoidOutput =\n tfc.log(tfc.add(getScalar(1), tfc.exp(tfc.neg(tfc.abs(output)))));\n const result = tfc.add(tfc.sub(maxOutput, outputXTarget), sigmoidOutput);\n return result;\n });\n}\n\nexport function binaryCrossentropy(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n let y: Tensor;\n y = tfc.clipByValue(yPred, epsilon(), 1 - epsilon());\n y = tfc.log(tfc.div(y, tfc.sub(tfc.onesLike(y), y)));\n return tfc.mean(sigmoidCrossEntropyWithLogits(yTrue, y), -1);\n });\n}\n\nexport function kullbackLeiblerDivergence(\n yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const clippedTrue = tfc.clipByValue(yTrue, epsilon(), 1);\n const clippedPred = tfc.clipByValue(yPred, epsilon(), 1);\n return tfc.sum(\n tfc.mul(yTrue, tfc.log(tfc.div(clippedTrue, clippedPred))), -1);\n });\n}\n\nexport function poisson(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const logPred = tfc.log(tfc.add(getScalar(epsilon()), yPred));\n return tfc.mean(tfc.sub(yPred, tfc.mul(yTrue, logPred)), -1);\n });\n}\n\n/**\n * Loss or metric function: Cosine proximity.\n *\n * Mathematically, cosine proximity is defined as:\n * `-sum(l2Normalize(yTrue) * l2Normalize(yPred))`,\n * wherein `l2Normalize()` normalizes the L2 norm of the input to 1 and `*`\n * represents element-wise multiplication.\n *\n * ```js\n * const yTrue = tf.tensor2d([[1, 0], [1, 0]]);\n * const yPred = tf.tensor2d([[1 / Math.sqrt(2), 1 / Math.sqrt(2)], [0, 1]]);\n * const proximity = tf.metrics.cosineProximity(yTrue, yPred);\n * proximity.print();\n * ```\n *\n * @param yTrue Truth Tensor.\n * @param yPred Prediction Tensor.\n * @return Cosine proximity Tensor.\n */\nexport function cosineProximity(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const trueNormalized = l2Normalize(yTrue, -1);\n const predNormalized = l2Normalize(yPred, -1);\n const trueXPred = tfc.mul(trueNormalized, predNormalized);\n return tfc.neg(tfc.sum(trueXPred, -1));\n });\n}\n\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const msle = meanSquaredLogarithmicError;\nexport const MSLE = meanSquaredLogarithmicError;\nexport const kld = kullbackLeiblerDivergence;\nexport const KLD = kullbackLeiblerDivergence;\nexport const cosine = cosineProximity;\n\n// TODO(michaelterry): Add deserialize() function.\n\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function get(identifierOrFn: string|LossOrMetricFn): LossOrMetricFn {\n const lossesMap: {[functionName: string]: LossOrMetricFn} = {\n meanSquaredError,\n meanAbsoluteError,\n meanAbsolutePercentageError,\n meanSquaredLogarithmicError,\n squaredHinge,\n hinge,\n categoricalHinge,\n logcosh,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n binaryCrossentropy,\n kullbackLeiblerDivergence,\n poisson,\n cosineProximity\n };\n if (typeof identifierOrFn === 'string') {\n if (identifierOrFn in lossesMap) {\n return lossesMap[identifierOrFn];\n }\n let errMsg = `Unknown loss ${identifierOrFn}`;\n if (identifierOrFn.toLowerCase().includes('softmaxcrossentropy')) {\n errMsg = `Unknown loss ${identifierOrFn}. ` +\n 'Use \"categoricalCrossentropy\" as the string name for ' +\n 'tf.losses.softmaxCrossEntropy';\n }\n throw new ValueError(errMsg);\n } else {\n return identifierOrFn;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Built-in metrics.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from './backend/state';\nimport * as K from './backend/tfjs_backend';\nimport {NotImplementedError, ValueError} from './errors';\nimport {categoricalCrossentropy as categoricalCrossentropyLoss, cosineProximity, meanAbsoluteError, meanAbsolutePercentageError, meanSquaredError, sparseCategoricalCrossentropy as sparseCategoricalCrossentropyLoss} from './losses';\nimport {binaryCrossentropy as lossBinaryCrossentropy} from './losses';\nimport {LossOrMetricFn} from './types';\n\n/**\n * Binary accuracy metric function.\n *\n * `yTrue` and `yPred` can have 0-1 values. Example:\n * ```js\n * const x = tensor2d([[1, 1, 1, 1], [0, 0, 0, 0]], [2, 4]);\n * const y = tensor2d([[1, 0, 1, 0], [0, 0, 0, 1]], [2, 4]);\n * const accuracy = tfl.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * `yTrue` and `yPred` can also have floating-number values between 0 and 1, in\n * which case the values will be thresholded at 0.5 to yield 0-1 values (i.e.,\n * a value >= 0.5 and <= 1.0 is interpreted as 1.\n * )\n * Example:\n * ```js\n * const x = tensor1d([1, 1, 1, 1, 0, 0, 0, 0]);\n * const y = tensor1d([0.2, 0.4, 0.6, 0.8, 0.2, 0.3, 0.4, 0.7]);\n * const accuracy = tf.metrics.binaryAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction.\n * @return Accuracy Tensor.\n */\nexport function binaryAccuracy(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const threshold = tfc.mul(getScalar(0.5), tfc.onesLike(yPred));\n const yPredThresholded = K.cast(tfc.greater(yPred, threshold), yTrue.dtype);\n return tfc.mean(tfc.equal(yTrue, yPredThresholded), -1);\n });\n}\n\n/**\n * Categorical accuracy metric function.\n *\n * Example:\n * ```js\n * const x = tensor2d([[0, 0, 0, 1], [0, 0, 0, 1]]);\n * const y = tensor2d([[0.1, 0.8, 0.05, 0.05], [0.1, 0.05, 0.05, 0.8]]);\n * const accuracy = tf.metrics.categoricalAccuracy(x, y);\n * accuracy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth: one-hot encoding of categories.\n * @param yPred Binary Tensor of prediction: probabilities or logits for the\n * same categories as in `yTrue`.\n * @return Accuracy Tensor.\n */\nexport function categoricalAccuracy(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(\n () => K.cast(\n tfc.equal(tfc.argMax(yTrue, -1), tfc.argMax(yPred, -1)), 'float32'));\n}\n\nfunction truePositives(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const one = getScalar(1);\n return tfc.logicalAnd(yTrue.equal(one), yPred.equal(one))\n .sum()\n .cast('float32');\n });\n}\n\nfunction falseNegatives(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const one = getScalar(1);\n const zero = getScalar(0);\n return tfc.logicalAnd(yTrue.equal(one), yPred.equal(zero))\n .sum()\n .cast('float32');\n });\n}\n\nfunction falsePositives(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const one = getScalar(1);\n const zero = getScalar(0);\n return tfc.logicalAnd(yTrue.equal(zero), yPred.equal(one))\n .sum()\n .cast('float32');\n });\n}\n\n/**\n * Computes the precision of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1].\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1].\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const precision = tf.metrics.precision(x, y);\n * precision.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Precision Tensor.\n */\nexport function precision(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const zero = getScalar(0);\n\n const tp = truePositives(yTrue, yPred);\n const fp = falsePositives(yTrue, yPred);\n\n const denominator = tp.add(fp);\n\n return tfc.where(tfc.greater(denominator, zero), tp.div(denominator), zero)\n .cast('float32');\n });\n}\n\n/**\n * Computes the recall of the predictions with respect to the labels.\n *\n * Example:\n * ```js\n * const x = tensor2d(\n * [\n * [0, 0, 0, 1],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1].\n * [1, 0, 0, 0],\n * [0, 0, 1, 0]\n * ]\n * );\n *\n * const y = tensor2d(\n * [\n * [0, 0, 1, 0],\n * [0, 1, 0, 0],\n * [0, 0, 0, 1].\n * [0, 1, 0, 0],\n * [0, 1, 0, 0]\n * ]\n * );\n *\n * const recall = tf.metrics.recall(x, y);\n * recall.print();\n * ```\n *\n * @param yTrue The ground truth values. Expected to be contain only 0-1 values.\n * @param yPred The predicted values. Expected to be contain only 0-1 values.\n * @return Recall Tensor.\n */\nexport function recall(yTrue: Tensor, yPred: Tensor): Tensor {\n return tidy(() => {\n const zero = getScalar(0);\n\n const tp = truePositives(yTrue, yPred);\n const fn = falseNegatives(yTrue, yPred);\n\n const denominator = tp.add(fn);\n\n return tfc.where(tfc.greater(denominator, zero), tp.div(denominator), zero)\n .cast('float32');\n });\n}\n\n/**\n * Binary crossentropy metric function.\n *\n * Example:\n * ```js\n * const x = tensor2d([[0], [1], [1], [1]]);\n * const y = tensor2d([[0], [0], [0.5], [1]]);\n * const crossentropy = tf.metrics.binaryCrossentropy(x, y);\n * crossentropy.print();\n * ```\n *\n * @param yTrue Binary Tensor of truth.\n * @param yPred Binary Tensor of prediction, probabilities for the `1` case.\n * @return Accuracy Tensor.\n */\nexport function binaryCrossentropy(yTrue: Tensor, yPred: Tensor): Tensor {\n return lossBinaryCrossentropy(yTrue, yPred);\n}\n\nexport function sparseCategoricalAccuracy(\n yTrue: Tensor, yPred: Tensor): Tensor {\n throw new NotImplementedError();\n}\n\nexport function topKCategoricalAccuracy(yTrue: Tensor, yPred: Tensor): Tensor {\n throw new NotImplementedError();\n}\n\nexport function sparseTopKCategoricalAccuracy(\n yTrue: Tensor, yPred: Tensor): Tensor {\n throw new NotImplementedError();\n}\n\n// Aliases.\nexport const mse = meanSquaredError;\nexport const MSE = meanSquaredError;\nexport const mae = meanAbsoluteError;\nexport const MAE = meanAbsoluteError;\nexport const mape = meanAbsolutePercentageError;\nexport const MAPE = meanAbsolutePercentageError;\nexport const categoricalCrossentropy = categoricalCrossentropyLoss;\nexport const cosine = cosineProximity;\nexport const sparseCategoricalCrossentropy = sparseCategoricalCrossentropyLoss;\n\n// TODO(cais, nielsene): Add serialize().\n\nexport function get(identifier: string|LossOrMetricFn): LossOrMetricFn {\n const metricsMap: {[functionName: string]: LossOrMetricFn} = {\n binaryAccuracy,\n categoricalAccuracy,\n precision,\n categoricalCrossentropy,\n sparseCategoricalCrossentropy,\n mse,\n MSE,\n mae,\n MAE,\n mape,\n MAPE,\n cosine,\n };\n if (typeof identifier === 'string' && identifier in metricsMap) {\n return metricsMap[identifier];\n } else if (typeof identifier !== 'string' && identifier != null) {\n return identifier;\n } else {\n throw new ValueError(`Unknown metric ${identifier}`);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Optimizers.\n */\n\nimport {Optimizer, train} from '@tensorflow/tfjs-core';\n\nimport {epsilon} from './backend/common';\n\nimport {ValueError} from './errors';\n\n// Add (de)serialize()\n\n// Porting note: This diverges from the PyKeras implementation and may need to\n// change based on (de)serialization requirements.\nexport function getOptimizer(identifier: string): Optimizer {\n const optimizerMap: {[optimizerName: string]: () => Optimizer} = {\n 'Adagrad': () => train.adagrad(0.01),\n 'Adadelta': () => train.adadelta(1, 0.95, epsilon()),\n 'Adam': () => train.adam(0.001, 0.9, 0.999, epsilon()),\n 'Adamax': () => train.adamax(0.002, 0.9, 0.999, epsilon(), 0),\n 'RMSProp': () => train.rmsprop(0.001, 0.9, 0, epsilon()),\n 'SGD': () => train.sgd(0.01)\n };\n optimizerMap['adagrad'] = optimizerMap['Adagrad'];\n optimizerMap['adadelta'] = optimizerMap['Adadelta'];\n optimizerMap['adam'] = optimizerMap['Adam'];\n optimizerMap['adamax'] = optimizerMap['Adamax'];\n optimizerMap['rmsprop'] = optimizerMap['RMSProp'];\n optimizerMap['sgd'] = optimizerMap['SGD'];\n\n if (identifier in optimizerMap) {\n return optimizerMap[identifier]();\n }\n throw new ValueError(`Unknown Optimizer ${identifier}`);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {Container} from '../engine/container';\nimport {Layer, Node} from '../engine/topology';\nimport {countParamsInWeights} from './variable_utils';\n\n/**\n * Print the summary of a Model object.\n *\n * @param model tf.Model instance.\n * @param lineLength Total length of printed lines. Set this to adapt to the\n * display to different terminal or console sizes.\n * @param positions Relative or absolute positions of log elements in each\n * line. Each number corresponds to right-most (i.e., ending) position of a\n * column.\n * If not provided, defaults to `[0.45, 0.85, 1]` for sequential-like\n * models and `[0.33, 0.55, 0.67, 1]` for non-sequential like models.\n * @param printFn Print function to use.\n * It will be called on each line of the summary. You can provide a custom\n * function in order to capture the string summary. Defaults to `console.log`.\n */\nexport function printSummary(\n model: Container, lineLength?: number, positions?: number[],\n // tslint:disable-next-line:no-any\n printFn: (message?: any, ...optionalParams: any[]) => void =\n console.log): void {\n const sequentialLike = isModelSequentialLike(model);\n\n // Header names for different log elements.\n const toDisplay: string[] = ['Layer (type)', 'Output shape', 'Param #'];\n if (sequentialLike) {\n lineLength = lineLength || 65;\n positions = positions || [0.45, 0.85, 1];\n } else {\n lineLength = lineLength || 98;\n positions = positions || [0.33, 0.55, 0.67, 1];\n // Header names for different log elements.\n }\n\n if (positions[positions.length - 1] <= 1) {\n // `positions` is relative. Convert it to absolute positioning.\n positions = positions.map(p => Math.floor(lineLength * p));\n }\n\n let relevantNodes: Node[];\n if (!sequentialLike) {\n toDisplay.push('Receives inputs');\n relevantNodes = [];\n for (const depth in model.nodesByDepth) {\n relevantNodes.push(...model.nodesByDepth[depth]);\n }\n }\n\n printFn('_'.repeat(lineLength));\n printRow(toDisplay, positions, printFn);\n printFn('='.repeat(lineLength));\n\n const layers = model.layers;\n for (let i = 0; i < layers.length; ++i) {\n if (sequentialLike) {\n printLayerSummary(layers[i], positions, printFn);\n } else {\n printLayerSummaryWithConnections(\n layers[i], positions, relevantNodes, printFn);\n }\n printFn((i === layers.length - 1 ? '=' : '_').repeat(lineLength));\n }\n\n // tslint:disable-next-line:no-any\n (model as any).checkTrainableWeightsConsistency();\n\n const trainableCount = countTrainableParams(model);\n const nonTrainableCount = countParamsInWeights(model.nonTrainableWeights);\n\n printFn(`Total params: ${trainableCount + nonTrainableCount}`);\n printFn(`Trainable params: ${trainableCount}`);\n printFn(`Non-trainable params: ${nonTrainableCount}`);\n printFn('_'.repeat(lineLength));\n}\n\nfunction countTrainableParams(model: Container): number {\n let trainableCount: number;\n // tslint:disable:no-any\n if ((model as any).collectedTrainableWeights != null) {\n trainableCount =\n countParamsInWeights((model as any).collectedTrainableWeights);\n } else {\n trainableCount = countParamsInWeights(model.trainableWeights);\n }\n // tslint:enable:no-any\n return trainableCount;\n}\n\nfunction isModelSequentialLike(model: Container): boolean {\n let sequentialLike = true;\n const nodesByDepth: Node[][] = [];\n const nodes: Node[] = [];\n for (const depth in model.nodesByDepth) {\n nodesByDepth.push(model.nodesByDepth[depth]);\n }\n for (const depthNodes of nodesByDepth) {\n if (depthNodes.length > 1 ||\n depthNodes.length === 1 && depthNodes[0].inboundLayers.length > 1) {\n sequentialLike = false;\n break;\n }\n nodes.push(...depthNodes);\n }\n if (sequentialLike) {\n // Search for shared layers.\n for (const layer of model.layers) {\n let flag = false;\n for (const node of layer.inboundNodes) {\n if (nodes.indexOf(node) !== -1) {\n if (flag) {\n sequentialLike = false;\n break;\n } else {\n flag = true;\n }\n }\n }\n if (!sequentialLike) {\n break;\n }\n }\n }\n return sequentialLike;\n}\n\nfunction printRow(\n fields: string[], positions: number[],\n // tslint:disable-next-line:no-any\n printFn: (message?: any, ...optionalParams: any[]) => void = console.log) {\n let line = '';\n for (let i = 0; i < fields.length; ++i) {\n if (i > 0) {\n line = line.slice(0, line.length - 1) + ' ';\n }\n line += fields[i];\n line = line.slice(0, positions[i]);\n line += ' '.repeat(positions[i] - line.length);\n }\n printFn(line);\n}\n\n/**\n * Prints a summary for a single Layer, without connectivity information.\n *\n * @param layer: Layer instance to print.\n */\nfunction printLayerSummary(\n layer: Layer, positions: number[],\n // tslint:disable-next-line:no-any\n printFn: (message?: any, ...optionalParams: any[]) => void) {\n let outputShape: string;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n } catch (err) {\n outputShape = 'multiple';\n }\n\n const name = layer.name;\n const className = layer.getClassName();\n const fields: string[] =\n [`${name} (${className})`, outputShape, layer.countParams().toString()];\n printRow(fields, positions, printFn);\n}\n\n/**\n * Prints a summary for a single Layer, with connectivity information.\n */\nfunction printLayerSummaryWithConnections(\n layer: Layer, positions: number[], relevantNodes: Node[],\n // tslint:disable-next-line:no-any\n printFn: (message?: any, ...optionalParams: any[]) => void) {\n let outputShape: string;\n try {\n outputShape = JSON.stringify(layer.outputShape);\n } catch (err) {\n outputShape = 'multiple';\n }\n\n const connections: string[] = [];\n for (const node of layer.inboundNodes) {\n if (relevantNodes != null && relevantNodes.length > 0 &&\n relevantNodes.indexOf(node) === -1) {\n continue;\n }\n for (let i = 0; i < node.inboundLayers.length; ++i) {\n const inboundLayer = node.inboundLayers[i].name;\n const inboundLayerIndex = node.nodeIndices[i];\n const inboundTensorIndex = node.tensorIndices[i];\n connections.push(\n `${inboundLayer}[${inboundLayerIndex}][${inboundTensorIndex}]`);\n }\n }\n const name = layer.name;\n const className = layer.getClassName();\n const firstConnection = connections.length === 0 ? '' : connections[0];\n const fields: string[] = [\n `${name} (${className})`, outputShape, layer.countParams().toString(),\n firstConnection\n ];\n\n printRow(fields, positions, printFn);\n for (let i = 1; i < connections.length; ++i) {\n printRow(['', '', '', connections[i]], positions, printFn);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original Source layers/__init__.py */\nimport {serialization} from '@tensorflow/tfjs-core';\n\nimport {deserializeKerasObject} from '../utils/generic_utils';\n\n/**\n * Instantiate a layer from a config dictionary.\n * @param config: dict of the form {class_name: str, config: dict}\n * @param custom_objects: dict mapping class names (or function names)\n * of custom (non-Keras) objects to class/functions\n * @returns Layer instance (may be Model, Sequential, Layer...)\n */\nexport function deserialize(\n config: serialization.ConfigDict,\n customObjects = {} as\n serialization.ConfigDict): serialization.Serializable {\n return deserializeKerasObject(\n config, serialization.SerializationMap.getMap().classNameMap,\n customObjects, 'layer');\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n// Porting note: This file doesn't exist in PyKeras.\n// Its purpose here is to centralize the boundary layer between\n// tfjs-layers's internal Config TS-Centric format and PyKeras's\n// serialized Python Config format.\n\nimport {serialization} from '@tensorflow/tfjs-core';\n\nimport {JsonValue} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\n// tslint:enable\n\n/**\n * Test whether a value in an array is the name of a Model or Layer.\n * @param key The key name that the value is found under. Note that the key\n * may not be at the level immediately above the value, if the value is in a\n * nested array.\n * @param index Index of the value in the Array that it is found in.\n * @param value The value object.\n * @returns A boolean indicating whether value is a name.\n */\nfunction isArrayItemInputOrOutputName<T>(\n key: string, index: number, value: T): boolean {\n return (key === 'inboundNodes' || key === 'outputLayers' ||\n key === 'inputLayers') &&\n index === 0 && typeof value === 'string';\n}\n\n/**\n * Convert a Pythonic config object to TypeScript config object.\n * @param pythonicConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertPythonicToTs(\n pythonicConfig: JsonValue, key?: string): serialization.ConfigDictValue {\n if (pythonicConfig === null) {\n return null;\n } else if (typeof pythonicConfig === 'string') {\n return generic_utils.toCamelCase(pythonicConfig);\n } else if (\n (typeof pythonicConfig === 'number') ||\n (typeof pythonicConfig === 'boolean')) {\n return pythonicConfig;\n } else if (pythonicConfig instanceof Array) {\n const tsArray = [];\n const arrayLength = pythonicConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = pythonicConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n tsArray.push(item);\n } else {\n tsArray.push(convertPythonicToTs(item, key));\n }\n }\n return tsArray;\n } else {\n const tsDict: serialization.ConfigDict = {};\n for (const pythonicKey of Object.keys(pythonicConfig)) {\n const pythonicValue = pythonicConfig[pythonicKey];\n if (pythonicKey === 'name' && typeof pythonicValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of Model and Layer instances, should not undergo the\n // camel-case conversion.\n tsDict[pythonicKey] = pythonicValue;\n } else {\n const tsKey = generic_utils.toCamelCase(pythonicKey);\n tsDict[tsKey] = convertPythonicToTs(pythonicValue, tsKey);\n }\n }\n return tsDict;\n }\n}\n\n/**\n * Convert a TypeScript config object to Python config object.\n * @param tsConfig The config object to convert.\n * @param key Optional key name of the object being converted.\n * @returns Result of the conversion.\n */\nexport function convertTsToPythonic(\n tsConfig: serialization.ConfigDictValue, key?: string): JsonValue {\n if (tsConfig === null || tsConfig === undefined) {\n return null;\n } else if (typeof tsConfig === 'string') {\n return generic_utils.toSnakeCase(tsConfig);\n } else if (\n (typeof tsConfig === 'number') || (typeof tsConfig === 'boolean')) {\n return tsConfig;\n } else if (tsConfig instanceof Array) {\n const pyArray = [];\n const arrayLength = tsConfig.length;\n for (let i = 0; i < arrayLength; ++i) {\n const item = tsConfig[i];\n if (isArrayItemInputOrOutputName(key, i, item)) {\n pyArray.push(item);\n } else {\n pyArray.push(convertTsToPythonic(item, key));\n }\n }\n return pyArray;\n } else {\n const pyDict: serialization.ConfigDict = {};\n for (const tsKey of Object.keys(tsConfig)) {\n const tsValue = tsConfig[tsKey];\n const pyKey = generic_utils.toSnakeCase(tsKey);\n if ((tsKey === 'name' || tsKey === 'className') &&\n typeof tsValue === 'string') {\n // Special case the 'name' key with a string value. Name values, such as\n // the names of Model and Layer instances, should not undergo the\n // snake-case conversion.\n pyDict[pyKey] = tsValue;\n } else {\n pyDict[pyKey] = convertTsToPythonic(tsValue, tsKey);\n }\n }\n return pyDict;\n }\n}\n","/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '0.8.5';\nexport {version};\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: keras/engine/topology.py */\n\nimport {Scalar, serialization, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {getUid} from '../backend/state';\nimport {NotImplementedError, RuntimeError, ValueError} from '../errors';\nimport {deserialize as deserializeLayer} from '../layers/serialization';\nimport {JsonDict, Kwargs, NamedTensorMap, Shape} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport {convertTsToPythonic} from '../utils/serialization_utils';\nimport * as types_utils from '../utils/types_utils';\nimport {batchSetValue, LayerVariable} from '../variables';\nimport {version as layersVersion} from '../version';\n\nimport {InputLayer} from './input_layer';\nimport {DisposeResult, Layer, Node, SymbolicTensor} from './topology';\n\n/**\n * Converts layers weights to a format suitable for TensorFlow.js Layers.\n *\n * Porting Note: The function `preprocess_weights_for_loading()` in PyKeras\n * performs conversion from Keras 1 to Keras 2. But in TypeScript, we\n * require Keras version to be 2. Thus this conversion is not applicable. We\n * simply check the Keras version and pass the weights through.\n *\n * @param layer Layer instance.\n * @param weights Input weights.\n * @param originalKerasVersion Keras version for the weights.\n * @param originalBackend Keras backend the weights were trained with.\n * @returns Output weights as Tensors.\n */\nfunction preprocessWeightsForLoading(\n layer: Layer, weights: LayerVariable[], originalKerasVersion?: string,\n originalBackend?: string): LayerVariable[] {\n if (!originalKerasVersion.startsWith('2.')) {\n throw new ValueError(\n 'Unsupported Keras version in weights being loaded: ' +\n originalKerasVersion);\n }\n return weights;\n}\n\n/**\n * Create an Tensor from info about dtype, shape and values.\n * @param dtype DType string.\n * @param shape Shape.\n * @param value Values of the array, as a scalar or nested Array of proper\n * shape.\n * @returns An Tensor instance.\n */\n// tslint:disable-next-line:no-any\nfunction loadTensor(dtype: string, shape: Shape, value: any): Tensor {\n const dataType = generic_utils.stringToDType(dtype);\n return Tensor.make(\n shape, {values: shape.length === 0 ? value : util.flatten(value)},\n dataType);\n}\n\n// TODO(cais): Maybe remove the following (b/74015805).\n/**\n * Load weights from a weights JSON object to an array of layers.\n *\n * Porting Note: This is ported from the Python function\n * load_weights_from_hdf5_group_by_name()\n *\n * @param weightsJSON. The input JSON object represent the weights from a\n * trained Keras model. See scripts/pykeras.py for more details.\n * @param layers An array of target layers.\n * @param skipMismatch Whether to skip loading of layers where there is a\n * mismatch in the number of weights, or a mismatch in the shape of the\n * weights.\n */\nexport function loadWeightsFromJson(\n weightsJSON: JsonDict, layers: Layer[], skipMismatch = false): void {\n const originalKerasVersion = weightsJSON['keras_version'] as string;\n const originalBackend = weightsJSON['backend'] as string;\n const layerNames = layers.map(layer => layer.name);\n\n // Reverse index of layer name to list of layers with name.\n const index: {[layerName: string]: Layer[]} = {};\n for (const layer of layers) {\n if (layer.name != null) {\n if (index[layer.name] == null) {\n index[layer.name] = [];\n }\n index[layer.name].push(layer);\n }\n }\n\n // tslint:disable-next-line:no-any\n const nameToWeights = weightsJSON['weights'] as {[name: string]: any};\n const weightValueTuples: Array<[LayerVariable, Tensor]> = [];\n for (let k = 0; k < layerNames.length; ++k) {\n const name = layerNames[k];\n let layerWeights = nameToWeights[name];\n if (layerWeights == null) {\n layerWeights = [];\n }\n\n let weightValues: LayerVariable[] = [];\n for (let n = 0; n < layerWeights.length; ++n) {\n // tslint:disable:no-any\n const weightEntry =\n layerWeights[n] as {[key: string]: string | Shape | any};\n // tslint:enable\n weightValues.push(new LayerVariable(loadTensor(\n weightEntry['dtype'], weightEntry['shape'] as Shape,\n weightEntry['value'])));\n }\n for (const layer of index[name]) {\n const symbolicWeights = layer.weights;\n weightValues = preprocessWeightsForLoading(\n layer, weightValues, originalKerasVersion, originalBackend);\n if (weightValues.length !== symbolicWeights.length) {\n if (skipMismatch) {\n console.warn(\n `Skipping loading of weights of layer ${layer.name} ` +\n `due to mismatch in number of weights: (${weightValues.length} ` +\n `vs ${symbolicWeights.length}).`);\n } else {\n throw new ValueError(\n `Layer #${k} (named \"${layer.name}\") expects ` +\n `${symbolicWeights.length} weight(s), but the saved weights ` +\n `have ${weightValues.length} element(s).`);\n }\n }\n\n // Set values.\n for (let i = 0; i < weightValues.length; ++i) {\n if (skipMismatch) {\n if (!util.arraysEqual(\n symbolicWeights[i].shape, weightValues[i].shape)) {\n console.warn(\n `Skipping loading of weights for layer ${layer.name} due ` +\n `to mismatch in shape (${symbolicWeights[i].shape} vs ` +\n `${weightValues[i].shape})`);\n continue;\n }\n }\n weightValueTuples.push([symbolicWeights[i], weightValues[i].read()]);\n }\n }\n }\n batchSetValue(weightValueTuples);\n}\n\n/**\n * Load weights from a named tensor map.\n *\n * Porting Note: This is ported from the Python function\n * load_weights_from_hdf5_group_by_name()\n *\n * @param weights The named tensor map mapping names of weights to weight\n * values.\n * @param strict Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra weights\n * and missing weights will be silently ignored.\n * @param layers An array of target layers.\n */\nexport function loadWeightsFromNamedTensorMap(\n weights: NamedTensorMap, layers: Layer[], strict = true): void {\n // Make a dictionary mapping weight name to weight.\n const nameToWeight: {[name: string]: LayerVariable} = {};\n let totalWeightsCount = 0;\n for (const layer of layers) {\n for (const weight of layer.weights) {\n if (nameToWeight[weight.originalName] != null) {\n throw new ValueError(`Duplicate weight name: ${weight.originalName}`);\n }\n nameToWeight[weight.originalName] = weight;\n totalWeightsCount++;\n }\n }\n\n const weightValueTuples: Array<[LayerVariable, Tensor]> = [];\n for (const name in weights) {\n if (nameToWeight[name] != null) {\n weightValueTuples.push([nameToWeight[name], weights[name]]);\n } else if (strict) {\n throw new ValueError(\n `Provided weight data has no target variable: ${name}`);\n }\n delete nameToWeight[name];\n }\n\n if (strict) {\n // Check that all weights are set.\n const unsetNames: string[] = [];\n for (const name in nameToWeight) {\n unsetNames.push(name);\n }\n if (unsetNames.length > 0) {\n throw new ValueError(\n `${unsetNames.length} of ${totalWeightsCount} weights are not set: ` +\n `${unsetNames}`);\n }\n }\n\n batchSetValue(weightValueTuples);\n}\n\n/** Constructor config for Container. */\nexport interface ContainerConfig {\n inputs: SymbolicTensor|SymbolicTensor[];\n outputs: SymbolicTensor|SymbolicTensor[];\n name?: string;\n}\n\n/**\n * A Container is a directed acyclic graph of layers.\n *\n * It is the topological form of a \"model\". A Model\n * is simply a Container with added training routines.\n *\n */\nexport abstract class Container extends Layer {\n inputs: SymbolicTensor[];\n outputs: SymbolicTensor[];\n\n inputLayers: Layer[];\n inputLayersNodeIndices: number[];\n inputLayersTensorIndices: number[];\n\n outputLayers: Layer[];\n outputLayersNodeIndices: number[];\n outputLayersTensorIndices: number[];\n\n layers: Layer[];\n layersByDepth: {[depth: string]: Layer[]};\n nodesByDepth: {[depth: string]: Node[]};\n\n containerNodes = new Set<string>();\n\n // TODO(michaelterry): Add cache support\n // private outputMaskCache: any;\n // private outputTensorCache: any;\n // private outputShapeCache: any;\n\n inputNames: string[];\n outputNames: string[];\n feedInputShapes: Shape[];\n\n protected internalInputShapes: Shape[];\n protected internalOutputShapes: Shape[];\n // TODO(cais): Maybe 'feed' should not in the names of these variables,\n // due to the fact that our backend is not symbolic.\n protected feedInputNames: string[];\n protected feedOutputNames: string[];\n\n constructor(config: ContainerConfig) {\n // No args passed to super's constructor.\n super({});\n this.name = config.name;\n if (this.name == null) {\n const prefix = this.getClassName().toLowerCase();\n this.name = getUid(prefix);\n }\n\n this.supportsMasking = false;\n this.trainable = true;\n this.updatable = true;\n\n // TODO(michaelterry): Initialize perInputLosses/Updates here.\n\n // Container-specific properties.\n if (Array.isArray(config.inputs)) {\n this.inputs = config.inputs.slice();\n } else {\n this.inputs = [config.inputs];\n }\n if (Array.isArray(config.outputs)) {\n this.outputs = config.outputs.slice();\n } else {\n this.outputs = [config.outputs];\n }\n\n // Check for redundancy in inputs.\n if (generic_utils.unique(this.inputs).length !== this.inputs.length) {\n throw new ValueError(\n 'The list of inputs passed to the model is ' +\n 'redundant. All inputs should only appear once. Found: ' +\n this.inputs.map(x => x.name));\n }\n\n // Check for redundancy in outputs.\n if (generic_utils.unique(this.outputs).length !== this.outputs.length) {\n console.warn(\n 'The list of outputs passed to the model is redundant. ' +\n 'All outputs should only appear once. Found: ' +\n this.outputs.map(x => x.name));\n }\n\n /*\n List of initial layers (1 to 1 mapping with this.inputs, hence the same\n layer might appear twice)\n */\n this.inputLayers = [];\n this.inputLayersNodeIndices = [];\n this.inputLayersTensorIndices = [];\n /*\n List of layers (1 to 1 mapping with this.outputs, hence the same layer\n might appear twice)\n */\n this.outputLayers = [];\n this.outputLayersNodeIndices = [];\n this.outputLayersTensorIndices = [];\n /*\n All layers in order of horizontal graph traversal. Entries are unique.\n Includes input and output layers.\n */\n this.layers = [];\n\n // TODO(michaelterry): Determine if caching still needed with eager\n // backend.\n /*\n This is for performance optimization when calling the Container on new\n inputs. Every time the Container is called on a set on input tensors,\n we compute the output tensors, output masks and output shapes in one pass,\n then cache them here. When one of these outputs is queried later,\n we retrieve it from there instead of recomputing it.\n */\n // this.outputTensorCache = {};\n // this.outputShapeCache = {};\n\n // Build this.outputLayers:\n for (const x of this.outputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n this.outputLayers.push(layer as Layer);\n this.outputLayersNodeIndices.push(nodeIndex);\n this.outputLayersTensorIndices.push(tensorIndex);\n }\n\n // TODO(michaelterry): Add output mask cache code.\n\n // Build this.inputLayers:\n for (const x of this.inputs) {\n const layer = x.sourceLayer;\n const nodeIndex = x.nodeIndex;\n const tensorIndex = x.tensorIndex;\n /*\n It's supposed to be an input layer, so only one node\n and one tensor output.\n */\n generic_utils.assert(nodeIndex === 0, 'input layer has >1 nodes');\n generic_utils.assert(tensorIndex === 0, 'input layer has >1 tensors');\n this.inputLayers.push(layer as Layer);\n this.inputLayersNodeIndices.push(nodeIndex);\n this.inputLayersTensorIndices.push(tensorIndex);\n }\n\n // Build this.inputNames and this.outputNames.\n this.inputNames = [];\n this.outputNames = [];\n this.feedInputShapes = [];\n this.feedInputNames = [];\n this.feedOutputNames = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n // Check that layer is an InputLayer.\n if (!(layer instanceof InputLayer)) {\n throw new TypeError(\n 'Input layers to a Model must be InputLayer objects. ' +\n `Received inputs: ${config.inputs}. ` +\n `Input ${i} (0-based) originates ` +\n `from layer type ${layer.getClassName()}.`);\n }\n this.inputNames.push(layer.name);\n this.feedInputShapes.push(layer.batchInputShape);\n\n this.feedInputNames.push(layer.name);\n }\n for (const layer of this.outputLayers) {\n this.outputNames.push(layer.name);\n }\n\n this.internalInputShapes = this.inputs.map(x => x.shape);\n this.internalOutputShapes = this.outputs.map(x => x.shape);\n\n /*\n Container_nodes: set of nodes included in the graph (not all nodes\n included in the layers are relevant to the current graph).\n */\n // ids of all nodes relevant to the Container:\n const nodesDepths: {[nodeID: string]: number} = {};\n // To recover nodes from their ID.\n const nodeIDToNode: {[nodeID: string]: Node} = {};\n const layersDepths: {[layerID: string]: number} = {};\n // To layers from their ID.\n const layerIDToLayer: {[layerID: string]: Layer} = {};\n const layerIndices: {[layerID: string]: number} = {};\n const nodesInDecreasingDepth: Node[] = [];\n\n /**\n * Builds a map of the graph of layers.\n *\n * This recursively updates the map `layerIndices`,\n * the list `nodesInDecreasingDepth` and the set `containerNodes`.\n *\n * @param tensor Some tensor in a graph.\n * @param finishedNodes Set of nodes whose subgraphs have been traversed\n * completely. Useful to prevent duplicated work.\n * @param nodesInProgress Set of nodes that are currently active on the\n * recursion stack. Useful to detect cycles.\n * @param layer Layer from which `tensor` comes from. If not provided,\n * will be obtained from tensor.sourceLayer.\n * @param nodeIndex Node index from which `tensor` comes from.\n * @param tensorIndex TensorIndex from which `tensor` comes from.\n *\n * @exception RuntimeError if a cycle is detected.\n */\n const buildMapOfGraph =\n (tensor: SymbolicTensor, finishedNodes: Node[], nodesInProgress: Node[],\n layer?: Layer, nodeIndex?: number, tensorIndex?: number) => {\n if (layer == null || nodeIndex == null || tensorIndex == null) {\n layer = tensor.sourceLayer as Layer;\n nodeIndex = tensor.nodeIndex;\n tensorIndex = tensor.tensorIndex;\n }\n const node = layer.inboundNodes[nodeIndex];\n\n // Prevent cycles.\n if (nodesInProgress.indexOf(node) !== -1) {\n throw new RuntimeError(\n `The tensor ${tensor.name} at layer \"${layer.name}\" ` +\n 'is part of a cycle.');\n }\n\n // Don't repeat work for shared subgraphs\n if (finishedNodes.indexOf(node) !== -1) {\n return;\n }\n\n // Update containerNodes.\n this.containerNodes.add(Container.nodeKey(layer, nodeIndex));\n\n // Store the traversal order for layer sorting.\n if (!(layer.id in layerIndices)) {\n layerIndices[layer.id] = Object.keys(layerIndices).length;\n }\n\n if (nodesInProgress.indexOf(node) === -1) {\n nodesInProgress.push(node);\n }\n\n // Propagate to all previous tensors connected to this node.\n const numInboundLayers = node.inboundLayers.length;\n for (let i = 0; i < numInboundLayers; i++) {\n const x = node.inputTensors[i];\n const layer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n buildMapOfGraph(\n x, finishedNodes, nodesInProgress, layer, nodeIndex,\n tensorIndex);\n }\n finishedNodes.push(node);\n while (nodesInProgress.indexOf(node) >= 0) {\n nodesInProgress.splice(nodesInProgress.indexOf(node), 1);\n }\n nodesInDecreasingDepth.push(node);\n };\n\n const finishedNodes: Node[] = [];\n const nodesInProgress: Node[] = [];\n for (const x of this.outputs) {\n buildMapOfGraph(x, finishedNodes, nodesInProgress);\n }\n\n const reversedNodesInDecreasingDepth =\n nodesInDecreasingDepth.slice().reverse();\n for (const node of reversedNodesInDecreasingDepth) {\n nodeIDToNode[node.id] = node;\n // If the depth is not set, the node has no outbound nodes (depth 0).\n if (!(node.id in nodesDepths)) {\n nodesDepths[node.id] = 0;\n }\n let depth = nodesDepths[node.id];\n\n // Update the depth of the corresponding layer\n const previousDepth =\n (layersDepths[node.outboundLayer.id] == null ?\n 0 :\n layersDepths[node.outboundLayer.id]);\n\n /*\n If we've seen this layer before at a higher depth, we should use that\n depth instead of the node depth. This is necessary for shared layers\n that have inputs at different depth levels in the graph.\n */\n depth = Math.max(depth, previousDepth);\n layersDepths[node.outboundLayer.id] = depth;\n layerIDToLayer[node.outboundLayer.id] = node.outboundLayer;\n nodesDepths[node.id] = depth;\n\n // Update the depth of inbound nodes.\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const inboundNode = inboundLayer.inboundNodes[nodeIndex];\n const previousDepth =\n (nodesDepths[inboundNode.id] == null ? 0 :\n nodesDepths[inboundNode.id]);\n nodesDepths[inboundNode.id] = Math.max(depth + 1, previousDepth);\n nodeIDToNode[inboundNode.id] = inboundNode;\n }\n }\n\n // Build a dict {depth: list of nodes with this depth}\n const nodesByDepth: {[depth: string]: Node[]} = {};\n for (const nodeID in nodesDepths) {\n const depth = nodesDepths[nodeID];\n if (!(depth in nodesByDepth)) {\n nodesByDepth[depth] = [];\n }\n nodesByDepth[depth].push(nodeIDToNode[nodeID]);\n }\n\n // Build a dict {depth: list of layers with this depth}\n const layersByDepth: {[depth: string]: Layer[]} = {};\n for (const layerID in layersDepths) {\n const depth = layersDepths[layerID];\n if (!(depth in layersByDepth)) {\n layersByDepth[depth] = [];\n }\n layersByDepth[depth].push(layerIDToLayer[layerID]);\n }\n\n // Get sorted list of layer depths.\n let depthKeys = Object.keys(layersByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n\n // Set this.layers and this.layersByDepth.\n this.layers = [];\n for (const depth of depthKeys) {\n const layersForDepth = layersByDepth[depth];\n // Container.layers needs to have a deterministic order:\n // here we order them by traversal order.\n layersForDepth.sort((a, b) => {\n const aIndex = layerIndices[a.id];\n const bIndex = layerIndices[b.id];\n if (aIndex < bIndex) {\n return -1;\n }\n if (aIndex > bIndex) {\n return 1;\n }\n return 0;\n });\n for (const layer of layersForDepth) {\n this.layers.push(layer);\n }\n }\n this.layersByDepth = layersByDepth;\n\n // Get sorted list of node depths;\n depthKeys = Object.keys(nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n\n // Check that all tensors required are computable.\n // computable_tensors: all tensors in the graph\n // that can be computed from the inputs provided.\n const computableTensors = this.inputs.slice();\n\n // To provide a better error msg.\n const layersWithCompleteInput: string[] = [];\n for (const depth of depthKeys) {\n for (const node of nodesByDepth[depth]) {\n const layer = node.outboundLayer;\n if (layer != null) {\n for (const x of node.inputTensors) {\n if (computableTensors.indexOf(x) === -1) {\n throw new RuntimeError(\n `Graph disconnected: cannot obtain value for tensor ${x}` +\n ` at layer \"${layer.name}\". ` +\n 'The following previous layers were accessed without ' +\n `issue: ${layersWithCompleteInput}`);\n }\n }\n for (const x of node.outputTensors) {\n computableTensors.push(x);\n }\n layersWithCompleteInput.push(layer.name);\n }\n }\n }\n\n // Set this.containerNodes and this.nodesByDepth.\n this.nodesByDepth = nodesByDepth;\n\n // Ensure name unicity, which will be crucial for serialization\n // (since serialized nodes refer to layers by their name).\n const allNames = this.layers.map(x => x.name);\n for (const name of allNames) {\n const numOccurrences = allNames.filter(x => x === name).length;\n if (numOccurrences !== 1) {\n throw new RuntimeError(\n `The name \"${name}\" is used ${numOccurrences} times ` +\n 'in the model. All layer names should be unique. Layer names: ' +\n JSON.stringify(allNames));\n }\n }\n\n // Layer parameters.\n // The new container starts with a single inbound node\n // for its inputs, and no outbound nodes.\n // Will be appended to by future calls to apply().\n this.outboundNodes = [];\n // Will be appended to below, and by future calls to apply().\n this.inboundNodes = [];\n\n // Create the node linking internal inputs to internal outputs.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n inputMasks: this.inputs.map(x => null),\n outputMasks: this.outputs.map(x => null),\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs.map(x => x.shape)\n });\n this.built = true;\n this._refCount = 1; // The ref count of a container always start at 1.\n }\n\n protected assertNotDisposed() {\n if (this._refCount === 0) {\n throw new Error(`Container '${this.name}' is already disposed.`);\n }\n }\n\n /**\n * Attempt to dispose a Model's weights.\n *\n * This method decrease the reference count of the Model object by 1.\n *\n * A Model is reference-counted. Its reference count is incremented by 1\n * when it is first constructed and when it is used as a Layer of another\n * Model.\n *\n * If the reference count of a Model becomes 0, the `dispose` method of\n * all its constituent `Layer`s will be called.\n *\n * Note: If the reference count is greater than 0 after the decrement, the\n * `dispose` method of its constituent `Layer`s will *not* be called.\n *\n * After a Model is disposed, it cannot be used in calls such as\n * 'predict`, `evaluate` or `fit` anymore.\n *\n * @returns A DisposeResult Object with the following fields:\n * - refCountAfterDispose: The reference count of the Model after this\n * `dispose()` call.\n * - numDisposedVariables: Number of `tf.Variable`s (i.e., weights) disposed\n * during this `dispose()` call.\n * @throws {Error} If the layer is not built yet, or if the Model has\n * already been disposed.\n */\n dispose(): DisposeResult {\n this.assertNotDisposed();\n const result:\n DisposeResult = {refCountAfterDispose: null, numDisposedVariables: 0};\n if (--this._refCount === 0) {\n for (const layer of this.layers) {\n result.numDisposedVariables += layer.dispose().numDisposedVariables;\n }\n }\n result.refCountAfterDispose = this._refCount;\n return result;\n }\n\n get trainableWeights(): LayerVariable[] {\n // Porting Note: This check below is to prevent errors where the\n // _trainableWeights inherited from the parent class (Layer) gets\n // inadvertently used.\n if (this._trainableWeights.length > 0) {\n throw new ValueError(\n 'Container instance unexpectedly contains _trainableWeights.' +\n 'The trainable weights of a Container are a union of the ' +\n 'trainable weights of its consituent Layers. Its own ' +\n '_trainableWeights must remain an empty Array.');\n }\n\n if (!this.trainable) {\n return [];\n }\n let weights: LayerVariable[] = [];\n for (const layer of this.layers) {\n weights = weights.concat(layer.trainableWeights);\n }\n return weights;\n }\n\n get nonTrainableWeights(): LayerVariable[] {\n const weights: LayerVariable[] = [];\n for (const layer of this.layers) {\n weights.push(...layer.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights: LayerVariable[] = [];\n for (const layer of this.layers) {\n trainableWeights.push(...layer.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n\n get weights(): LayerVariable[] {\n return this.trainableWeights.concat(this.nonTrainableWeights);\n }\n\n /**\n * Loads all layer weights from a JSON object.\n *\n * Porting Note: HDF5 weight files cannot be directly loaded in JavaScript /\n * TypeScript. The utility script at `scripts/pykeras.py` offers means\n * to convert them into JSON strings compatible with this method.\n * Porting Note: TensorFlow.js Layers supports only loading by name currently.\n *\n * @param weightsJSON A JSON mapping weight names to weight values as nested\n * arrays of numbers, or a `NamedTensorMap`, i.e., a JSON mapping weight\n * names to `tf.Tensor` objects.\n * @param skipMismatch Whether to skip loading of layers where there is a\n * mismatch in the number of weights, or a mismatch in the shape of the\n * weight (only valid when `by_name`=True).\n * @param isNamedTensorMap Whether the 1st argument (`weightsJSON`) is a\n * `NamedTensorMap`.\n * @param strict Require that the provided weights exactly match those\n * required by the container. Default true. Passing false means that both\n * extra weights and missing weights will be silently ignored.\n */\n loadWeights(\n weightsJSON: JsonDict|NamedTensorMap, skipMismatch = false,\n isNamedTensorMap = false, strict = true) {\n // TODO(cais): Maybe the JsonDict support should be removed after serving\n // weights from XHR is working. If so, the `loadWeightsFromJson` flag\n // should be removed as well. (b/74015805)\n // TODO(cais): See if we can use smarter type resolution to avoid sending\n // the type info as a separate arg (isNamedTensormap).\n if (isNamedTensorMap) {\n loadWeightsFromNamedTensorMap(\n weightsJSON as NamedTensorMap, this.layers, strict);\n } else {\n loadWeightsFromJson(weightsJSON as JsonDict, this.layers, skipMismatch);\n }\n }\n\n /**\n * Util shared between different serialization methods.\n * @returns Model config with Keras version information added.\n */\n private updatedConfig(): serialization.ConfigDict {\n const theConfig = this.getConfig();\n const modelConfig: serialization.ConfigDict = {\n className: this.getClassName(),\n config: theConfig,\n kerasVersion: `tfjs-layers ${layersVersion}`,\n // TODO(nielsene): Replace something like K.backend() once\n // possible.\n backend: 'TensorFlow.js'\n };\n return modelConfig;\n }\n\n /**\n * Returns a JSON string containing the network configuration.\n *\n * To load a network from a JSON save file, use\n * models.modelFromJSON(jsonString);\n * @param extraJsonArgs Unused in tfjs-layers, maintained for PyKeras\n * @param returnString Whether the return value should be stringified\n * (default: `true`).\n * @returns a JSON string if `returnString` (default), or a JSON object if\n * `!returnString`.\n */\n // tslint:disable-next-line:no-any\n toJSON(unused?: any, returnString = true): string|JsonDict {\n const modelConfig = convertTsToPythonic(this.updatedConfig()) as JsonDict;\n return returnString ? JSON.stringify(modelConfig) : modelConfig;\n }\n\n /**\n * Call the model on new inputs.\n *\n * In this case `call` just reapplies all ops in the graph to the new inputs\n * (e.g. build a new computational graph from the provided inputs).\n *\n * @param inputs A tensor or list of tensors.\n * @param mask A mask or list of masks. A mask can be either a tensor or null\n * (no mask).\n *\n * @return A tensor if there is a single output, or a list of tensors if there\n * are more than one outputs.\n */\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks: Tensor[];\n\n if ('mask' in kwargs) {\n masks = generic_utils.toList(kwargs['mask']);\n } else {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n // TODO(michaelterry): Add support for caching.\n return this.runInternalGraph(inputs, masks)[0];\n });\n }\n\n /**\n * Computes an output mask tensor.\n *\n * @param inputs Tensor or list of tensors.\n * @param mask Tensor or list of tensors.\n *\n * @return null or a tensor (or list of tensors, one per output tensor of the\n * layer).\n */\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor\n |Tensor[] {\n return tidy(() => {\n inputs = generic_utils.toList(inputs);\n let masks: Tensor[];\n if (mask == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n } else {\n masks = generic_utils.toList(mask);\n }\n // TODO(michaelterry): Add support for mask caching.\n return this.runInternalGraph(inputs, masks)[1];\n });\n }\n\n /**\n * Computes the output shape of the layer.\n *\n * Assumes that the layer will be built to match that input shape provided.\n *\n * @param inputShape A shape (tuple of integers) or a list of shape tuples\n * (one per output tensor of the layer). Shape tuples can include null for\n * free dimensions, instead of an integer.\n */\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n const inputShapes = types_utils.normalizeShapeList(inputShape);\n if (inputShapes.length !== this.inputLayers.length) {\n throw new ValueError(\n `Invalid inputShape argument ${inputShape}: ` +\n `model has ${this.inputLayers.length} tensor inputs.`);\n }\n\n // TODO(michaelterry): Add caching\n const layersToOutputShapes: {[shapeKey: string]: Shape} = {};\n for (let i = 0; i < inputShapes.length; i++) {\n const layer = this.inputLayers[i];\n const inputShape = inputShapes[i];\n // It's an input layer: computeOutputShape is identity,\n // and there is only one node and one tensor output.\n const shapeKey = layer.name + '_0_0';\n layersToOutputShapes[shapeKey] = inputShape;\n }\n\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n // Iterate over nodes, by depth level.\n if (depthKeys.length > 1) {\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n if (this.inputLayers.map(x => x.id).indexOf(layer.id) !== -1) {\n // We've already covered the input layers a few lines above.\n continue;\n }\n // Potentially redundant list, same size of node.inputTensors.\n const inputShapes: Shape[] = [];\n for (let j = 0; j < node.inboundLayers.length; j++) {\n const inboundLayer = node.inboundLayers[j];\n const nodeIndex = node.nodeIndices[j];\n const tensorIndex = node.tensorIndices[j];\n const shapeKey = `${inboundLayer.name}_${nodeIndex}_${tensorIndex}`;\n const inputShape = layersToOutputShapes[shapeKey];\n inputShapes.push(inputShape);\n }\n\n const outputShape = layer.computeOutputShape(\n generic_utils.singletonOrArray(inputShapes));\n\n const outputShapes = types_utils.normalizeShapeList(outputShape);\n const nodeIndex = layer.inboundNodes.indexOf(node);\n for (let j = 0; j < outputShapes.length; j++) {\n const shapeKey = `${layer.name}_${nodeIndex}_${j}`;\n layersToOutputShapes[shapeKey] = outputShapes[j];\n }\n }\n }\n }\n\n // Read final output shapes from layersToOutputShapes.\n const outputShapes: Shape[] = [];\n const outputShapeKeys: string[] = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n const tensorIndex = this.outputLayersTensorIndices[i];\n const shapeKey = `${layer.name}_${nodeIndex}_${tensorIndex}`;\n outputShapeKeys.push(shapeKey);\n }\n\n for (let i = 0; i < outputShapeKeys.length; i++) {\n const key = outputShapeKeys[i];\n generic_utils.assert(key in layersToOutputShapes);\n outputShapes.push(layersToOutputShapes[key]);\n }\n\n // TODO(michaelterry): Update cache\n return generic_utils.singletonOrArray(outputShapes);\n }\n\n /**\n * Computes output tensors for new inputs.\n *\n * Note:\n * - Expects `inputs` to be a list (potentially with 1 element).\n *\n * @param inputs List of tensors\n * @param masks List of masks (tensors or null).\n * @return Three lists: outputTensors, outputMasks, outputShapes\n */\n protected runInternalGraph(inputs: Tensor[], masks?: Tensor[]):\n [Tensor[], Tensor[], Shape[]] {\n if (masks == null) {\n masks = generic_utils.pyListRepeat(null, inputs.length);\n }\n\n // Dictionary mapping reference tensors to tuples\n // (computed tensor, compute mask)\n // we assume a 1:1 mapping from tensor to mask\n // TODO: raise exception when a `.computeMask()` call\n // does not return a list the same size as `call`\n const tensorMap: {[tensorID: string]: [Tensor, Tensor]} = {};\n for (let i = 0; i < this.inputs.length; ++i) {\n const x = this.inputs[i];\n const y = inputs[i];\n const mask = masks[i];\n tensorMap[x.id] = [y, mask];\n }\n\n const depthKeys = Object.keys(this.nodesByDepth)\n .map(x => parseInt(x, 10))\n .sort(generic_utils.reverseNumberCompare);\n for (const depth of depthKeys) {\n const nodes = this.nodesByDepth[depth];\n for (const node of nodes) {\n // This is always a single layer, never a list.\n const layer = node.outboundLayer;\n const referenceInputTensors = node.inputTensors;\n const referenceOutputTensors = node.outputTensors;\n\n // If all previous input tensors are available in tensorMap,\n // then call node.inboundLayer on them.\n // List of tuples [input, mask]:\n const computedData = new Array<[Tensor, Tensor]>();\n for (const x of referenceInputTensors) {\n if (x.id in tensorMap) {\n computedData.push(tensorMap[x.id]);\n }\n }\n if (computedData.length === referenceInputTensors.length) {\n // TODO(michaelterry): Add K.name_scope here, if we need it.\n let kwargs: Kwargs = {};\n let computedTensors: Tensor[];\n let computedMasks: Tensor[];\n let outputTensors: Tensor[];\n let outputMasks: Tensor[];\n // call layer\n if (node.callArgs != null) {\n kwargs = node.callArgs;\n }\n if (computedData.length === 1) {\n const [computedTensor, computedMask] = computedData[0];\n if (kwargs.mask == null) {\n kwargs['mask'] = computedMask;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensor, kwargs));\n outputMasks = generic_utils.toList(\n layer.computeMask(computedTensor, computedMask));\n computedTensors = [computedTensor];\n computedMasks = [computedMask];\n } else {\n computedTensors = computedData.map(x => x[0]);\n computedMasks = computedData.map(x => x[1]);\n if (kwargs.mask == null) {\n kwargs['mask'] = computedMasks;\n }\n outputTensors =\n generic_utils.toList(layer.call(computedTensors, kwargs));\n outputMasks = generic_utils.toList(\n layer.computeMask(computedTensors, computedMasks));\n }\n\n if (layer.activityRegularizer) {\n throw new NotImplementedError(\n 'Model invocation with concrete Tensor value(s) in the ' +\n 'presence of activity regularizer(s) is not supported yet.');\n }\n // TODO(michaelterry): Add model updates and losses\n\n // Update tensor map.\n for (let i = 0; i < referenceOutputTensors.length; ++i) {\n const x = referenceOutputTensors[i];\n const y = outputTensors[i];\n const mask = outputMasks[i];\n tensorMap[x.id] = [y, mask];\n }\n }\n }\n }\n\n const outputTensors: Tensor[] = [];\n const outputMasks: Tensor[] = [];\n const outputShapes: Shape[] = [];\n for (const x of this.outputs) {\n generic_utils.assert(\n x.id in tensorMap, `Could not compute output ${x.name} : ${x.id}`);\n const [tensor, mask] = tensorMap[x.id];\n outputShapes.push(tensor.shape);\n outputTensors.push(tensor);\n outputMasks.push(mask);\n }\n\n // TODO(michaelterry): Add support for caches.\n return [outputTensors, outputMasks, outputShapes];\n }\n\n /**\n * Builds a map of internal node keys to node ordering.\n * Used in serializaion a node orderings may change as unused nodes are\n * dropped. Porting Note: This helper method was pulled out of getConfig to\n * improve readability.\n * @param layers An array of Layers in the model.\n * @returns Map of Node Keys to index order within the layer.\n */\n private buildNodeConversionMap(layers: Layer[]): {[nodeKey: string]: number} {\n const nodeConversionMap: {[nodeKey: string]: number} = {};\n let keptNodes: number;\n for (const layer of this.layers) {\n keptNodes = layer instanceof Container ? 1 : 0;\n for (let originalNodeIndex = 0;\n originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n // i.e. we mark it to be saved\n nodeConversionMap[nodeKey] = keptNodes;\n keptNodes += 1;\n }\n }\n }\n return nodeConversionMap;\n }\n\n /**\n * Retrieves a layer based on either its name (unique) or index.\n *\n * Indices are based on order of horizontal graph traversal (bottom-up).\n *\n * If both `name` and `index` are specified, `index` takes precedence.\n *\n * @param name Name of layer.\n * @param index Index of layer.\n * @returns A Layer instance.\n * @throws ValueError: In case of invalid layer name or index.\n */\n /**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Classes',\n * namespace: 'layers',\n * subclasses: ['Model']\n * }\n */\n getLayer(name?: string, index?: number): Layer {\n if (index != null) {\n if (this.layers.length <= index) {\n throw new ValueError(\n `Was asked to retrieve layer at index ${index}, but model only ` +\n `has ${this.layers.length} layer(s).`);\n } else {\n return this.layers[index];\n }\n } else {\n if (name == null) {\n throw new ValueError('Provide either a layer name or layer index');\n }\n }\n\n for (const layer of this.layers) {\n if (layer.name === name) {\n return layer;\n }\n }\n throw new ValueError(`No such layer: ${name}`);\n }\n\n /**\n * Retrieves the Container's current loss values.\n *\n * Used for regularizers during training.\n */\n calculateLosses(): Scalar[] {\n // Porting Node: This is an augmentation to Container.loss in PyKeras.\n // In PyKeras, Container.loss returns symbolic tensors. Here a concrete\n // Tensor (specifically Scalar) values are returned. This is due to the\n // imperative backend.\n return tidy(() => {\n const losses: Scalar[] = [];\n for (const layer of this.layers) {\n for (let nodeIndex = 0; nodeIndex < layer.inboundNodes.length;\n ++nodeIndex) {\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (this.containerNodes.has(nodeKey)) {\n losses.push(...layer.calculateLosses());\n }\n }\n }\n // TODO(cais): Add any unconditional model-level losses?\n return losses;\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {name: this.name};\n\n // Build a map from layer unique name (self._node_key)\n // to the index of the nodes that are saved in the config.\n // Only nodes in container_nodes are saved.\n const nodeConversionMap: {[nodeKey: string]: number} =\n this.buildNodeConversionMap(this.layers);\n\n // Serialize and save the layers in layerConfigs\n const layerConfigs = [];\n for (const layer of this.layers) {\n const layerClassName = layer.getClassName();\n const layerConfig = layer.getConfig();\n const filteredInboundNodes = [];\n for (let originalNodeIndex = 0;\n originalNodeIndex < layer.inboundNodes.length; originalNodeIndex++) {\n const node = layer.inboundNodes[originalNodeIndex];\n const nodeKey = Container.nodeKey(layer, originalNodeIndex);\n let kwargs = {};\n if (this.containerNodes.has(nodeKey)) {\n // The node is relevant to the model:\n // add to filteredInboundNodes.\n if (node.callArgs) {\n try {\n JSON.stringify(node.callArgs);\n kwargs = node.callArgs;\n } catch (err) {\n console.warn(\n `Layer ${layer.name} was passed ` +\n `non-serializable keyword arguments: ` +\n `${node.callArgs}. They will not be included ` +\n `in the serialized model (and thus will be ` +\n `missing at deserialization time).`);\n kwargs = {};\n }\n }\n if (node.inboundLayers.length > 0) {\n const nodeData = [];\n for (let i = 0; i < node.inboundLayers.length; i++) {\n const inboundLayer = node.inboundLayers[i];\n const nodeIndex = node.nodeIndices[i];\n const tensorIndex = node.tensorIndices[i];\n const nodeKey = Container.nodeKey(inboundLayer, nodeIndex);\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex == null) {\n newNodeIndex = 0;\n }\n nodeData.push(\n [inboundLayer.name, newNodeIndex, tensorIndex, kwargs]);\n }\n filteredInboundNodes.push(nodeData);\n }\n }\n }\n layerConfigs.push({\n name: layer.name,\n className: layerClassName,\n config: layerConfig,\n inboundNodes: filteredInboundNodes\n });\n }\n config['layers'] = layerConfigs;\n // Gather info about inputs and outputs\n const modelInputs = [];\n for (let i = 0; i < this.inputLayers.length; i++) {\n const layer = this.inputLayers[i];\n const nodeIndex = this.inputLayersNodeIndices[i];\n\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.inputLayersTensorIndices[i];\n modelInputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['inputLayers'] = modelInputs;\n\n const modelOutputs = [];\n for (let i = 0; i < this.outputLayers.length; i++) {\n const layer = this.outputLayers[i];\n const nodeIndex = this.outputLayersNodeIndices[i];\n\n const nodeKey = Container.nodeKey(layer, nodeIndex);\n if (!this.containerNodes.has(nodeKey)) {\n continue;\n }\n let newNodeIndex = nodeConversionMap[nodeKey];\n if (newNodeIndex === null || newNodeIndex === undefined) {\n newNodeIndex = 0;\n }\n const tensorIndex = this.outputLayersTensorIndices[i];\n modelOutputs.push([layer.name, newNodeIndex, tensorIndex]);\n }\n config['outputLayers'] = modelOutputs;\n return config;\n }\n\n /**\n * Instantiates a Model from its config (output of `get_config()`).\n * @param cls: the class to create\n * @param config: Model config dictionary.\n * @returns A model instance.\n * @throws ValueError: In case of improperly formatted config dict.\n */\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n // Layer instances created during\n // the graph reconstruction process\n const createdLayers: {[layerName: string]: Layer} = {};\n\n // Dictionary mapping layer instances to\n // node data that specifies a layer call.\n // It acts as a queue that maintains any unprocessed\n // layer call until it becomes possible to process it\n // (i.e. until the input tensors to the call all exist).\n const unprocessedNodes:\n {[layer: string]: serialization.ConfigDict[][]} = {};\n function addUnprocessedNode(\n layer: Layer, nodeData: serialization.ConfigDict[]) {\n if (!(layer.name in unprocessedNodes)) {\n unprocessedNodes[layer.name] = [nodeData];\n } else {\n unprocessedNodes[layer.name].push(nodeData);\n }\n }\n\n function processNode(layer: Layer, nodeData: serialization.ConfigDict[]) {\n const inputTensors: SymbolicTensor[] = [];\n let kwargs;\n for (const inputData of nodeData) {\n const inboundLayerName = inputData[0] as string;\n const inboundNodeIndex = inputData[1] as number;\n const inboundTensorIndex = inputData[2] as number;\n if (inputData.length === 3) {\n kwargs = {};\n } else if (inputData.length === 4) {\n kwargs = inputData[3] as serialization.ConfigDict;\n } else {\n throw new ValueError(`Improperly formatted model config for layer ${\n JSON.stringify(layer)}: ${JSON.stringify(inputData)}`);\n }\n if (!(inboundLayerName in createdLayers)) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundLayer = createdLayers[inboundLayerName];\n if (inboundLayer.inboundNodes.length <= inboundNodeIndex) {\n addUnprocessedNode(layer, nodeData);\n return;\n }\n const inboundNode = inboundLayer.inboundNodes[inboundNodeIndex];\n inputTensors.push(inboundNode.outputTensors[inboundTensorIndex]);\n }\n // Call layer on its inputs, thus creating the node\n // and building the layer if needed.\n // Note: This has Eager vs Graph Implications.\n if (inputTensors.length > 0) {\n layer.apply(\n generic_utils.singletonOrArray(inputTensors),\n kwargs); // was ** kwargs\n }\n }\n\n /**\n * Deserialize a layer, then call it on appropriate inputs.\n * @param layerData: layer config dict.\n * @throws ValueError: In case of improperly formatted `layer_data`\n * dict.\n */\n function processLayer(layerData: serialization.ConfigDict|null) {\n const layerName = layerData.name as string;\n // Instantiate layer.\n const layer = deserializeLayer(\n layerData,\n config.customObjects != null ?\n config.customObjects as serialization.ConfigDict :\n {}) as Layer;\n createdLayers[layerName] = layer;\n // Gather layer inputs.\n const inboundNodesData =\n layerData.inboundNodes as serialization.ConfigDict[];\n for (const nodeData of inboundNodesData) {\n if (!(nodeData instanceof Array)) {\n throw new ValueError(\n `Corrupted configuration, expected array for nodeData: ${\n nodeData}`);\n }\n // We don't process nodes (i.e. make layer calls)\n // on the fly because the inbound node may not yet exist,\n // in case of layer shared at different topological depths\n // (e.g.a model such as A(B(A(B(x)))))\n addUnprocessedNode(layer, nodeData);\n }\n }\n\n // First, we create all layers and enqueue nodes to be processed.\n const name = config.name;\n const layersFromConfig = config.layers as serialization.ConfigDict[];\n for (const layerData of layersFromConfig) {\n processLayer(layerData);\n }\n\n // Then we process nodes in order of layer depth.\n // Nodes that cannot yet be processed(if the inbound node\n // does not yet exist) are re - enqueued, and the process\n // is repeated until all nodes are processed.\n while (!generic_utils.isObjectEmpty(unprocessedNodes)) {\n for (const layerData of layersFromConfig) {\n const layer = createdLayers[layerData.name as string];\n if (layer.name in unprocessedNodes) {\n const currentUnprocessedNodesForLayer = unprocessedNodes[layer.name];\n delete unprocessedNodes[layer.name];\n for (const nodeData of currentUnprocessedNodesForLayer) {\n processNode(layer, nodeData);\n }\n }\n }\n }\n\n const inputTensors: SymbolicTensor[] = [];\n const outputTensors: SymbolicTensor[] = [];\n const inputLayersFromConfig =\n config.inputLayers as serialization.ConfigDict[];\n for (const layerData of inputLayersFromConfig) {\n const layerName = layerData[0] as string;\n const nodeIndex = layerData[1] as number;\n const tensorIndex = layerData[2] as number;\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n inputTensors.push(layerOutputTensors[tensorIndex]);\n }\n const outputLayersFromConfig =\n config.outputLayers as serialization.ConfigDict[];\n for (const layerData of outputLayersFromConfig) {\n const layerName = layerData[0] as string;\n const nodeIndex = layerData[1] as number;\n const tensorIndex = layerData[2] as number;\n generic_utils.assert(layerName in createdLayers);\n const layer = createdLayers[layerName];\n const layerOutputTensors = layer.inboundNodes[nodeIndex].outputTensors;\n outputTensors.push(layerOutputTensors[tensorIndex]);\n }\n return new cls({inputs: inputTensors, outputs: outputTensors, name});\n }\n\n /**\n * Determine whether the container is stateful.\n *\n * Porting Note: this is the equivalent of the stateful @property of\n * the Container class in PyKeras.\n */\n get stateful(): boolean {\n // Porting Note: This check is to prevent inadvertent setting of the\n // _stateful property of the Container instance.\n if (this._stateful) {\n throw new ValueError(\n 'Container instance unexpectedly has _stateful = true. The ' +\n 'statefulness of a Container is determined by the Layers it ' +\n 'contains. Its _stateful property must remain the default false.');\n }\n for (const layer of this.layers) {\n if (layer.stateful) {\n return true;\n }\n }\n return false;\n }\n\n /**\n * Reset the state of all stateful constituent layers (if any).\n *\n * Examples of stateful layers include RNN layers whose `stateful` property\n * is set as `true`.\n */\n resetStates() {\n tidy(() => {\n this.layers.forEach(layer => {\n // tslint:disable:no-any\n if (layer.stateful) {\n layer.resetStates();\n }\n // tslint:enable:no-any\n });\n });\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Executor: Evaluates SymbolicTensor based on feeds.\n */\n\nimport {cast, Tensor} from '@tensorflow/tfjs-core';\n\nimport {ValueError} from '../errors';\nimport {Kwargs} from '../types';\n\nimport {InputLayer} from './input_layer';\nimport {SymbolicTensor} from './topology';\n\n/**\n * Helper function to check the dtype and shape compatibility of a feed value.\n */\nfunction assertFeedCompatibility(key: SymbolicTensor, val: Tensor): Tensor {\n // 1. Check shape compatibility. If shapes are not compatible, error.\n if (key.shape != null) {\n if (key.shape.length !== val.shape.length) {\n throw new ValueError(\n `The rank of feed (${val.shape.length}) does not match the rank of ` +\n `the key (${key.shape.length}).`);\n }\n\n for (let i = 0; i < key.shape.length; ++i) {\n if (key.shape[i] != null && key.shape[i] !== val.shape[i]) {\n throw new ValueError(\n `The ${i}-th dimension of the feed (${val.shape[i]}) is ` +\n `incompatible with that of the key (${key.shape[i]}).`);\n }\n }\n }\n // 2. Check dtype compatibility.\n if (key.dtype == null || key.dtype === val.dtype) {\n // 2a. If types match, return val tensor as is.\n return val;\n }\n try {\n // 2b. Attempt to convert to expected type.\n return cast(val, key.dtype);\n } catch (err) {\n // 2c. If conversion fails, return helpful error.\n throw new ValueError(\n `The dtype of the feed (${val.dtype}) can not be cast to the dtype ` +\n `of the key '${key.name}' (${key.dtype}).`);\n }\n}\n\n/**\n * A concrete Tensor value for a symbolic tensor as the key.\n */\nexport interface Feed {\n key: SymbolicTensor;\n value: Tensor;\n}\n\n/**\n * FeedDict: A mapping from unique SymbolicTensors to feed values for them.\n * A feed value is a concrete value represented as an `tf.Tensor`.\n */\nexport class FeedDict {\n private id2Value: {[id: number]: Tensor} = {};\n\n /**\n * Constructor, optionally does copy-construction.\n * @param feeds An Array of `Feed`s, or another `FeedDict`, in which case\n * copy-construction will be performed.\n */\n constructor(feeds?: Feed[]|FeedDict) {\n if (feeds instanceof FeedDict) {\n for (const id in feeds.id2Value) {\n this.id2Value[id] = feeds.id2Value[id];\n }\n } else {\n if (feeds == null) {\n return;\n }\n for (const feed of feeds) {\n this.add(feed.key, feed.value);\n }\n }\n }\n\n /**\n * Add a key-value pair to the FeedDict.\n * @param key The key of the feed.\n * @param value The value of the feed.\n * @returns This `FeedDict`.\n * @throws ValueError: If the key `tf.SymbolicTensor` already exists in the\n * `FeedDict`.\n */\n add(key: SymbolicTensor, value: Tensor): FeedDict {\n if (this.id2Value[key.id] == null) {\n this.id2Value[key.id] = assertFeedCompatibility(key, value);\n } else {\n throw new ValueError(`Duplicate key: name=${key.name}, id=${key.id}`);\n }\n return this;\n }\n\n /**\n * Add a Feed to the FeedDict.\n * @param feed The new `Feed` to add.\n * @returns This `FeedDict`.\n */\n addFeed(feed: Feed) {\n this.add(feed.key, feed.value);\n }\n\n /**\n * Probe whether a key already exists in the FeedDict.\n * @param key\n */\n hasKey(key: SymbolicTensor): boolean {\n return this.id2Value[key.id] != null;\n }\n\n /**\n * Get the feed value for given key.\n * @param key\n * @returns If `key` exists, the corresponding feed value.\n * @throws ValueError: If `key` does not exist in this `FeedDict`.\n */\n getValue(key: SymbolicTensor): Tensor {\n if (this.id2Value[key.id] == null) {\n throw new ValueError(`Nonexistent key: ${JSON.stringify(key)}`);\n } else {\n return this.id2Value[key.id];\n }\n }\n}\n\n/**\n * Execute a SymbolicTensor by using concrete feed values.\n *\n * A `tf.SymbolicTensor` object is a node in a computation graph of TF.js\n * Layers. The object is backed by a source layer and input\n * `tf.SymbolicTensor`s to the source layer. This method evaluates\n * the `call()` method of the source layer, using concrete values of the inputs\n * obtained from either\n * * `feedDict`, if the input key exists in `feedDict`, or else,\n * * a recursive call to `execute()` itself.\n *\n * @param x: The `tf.SymbolicTensor` to execute.\n * @param feedDict: The feed values, as base condition of the recursion.\n * execution.\n * @param kwargs: Optional keyword arguments.\n * @returns Result of the execution.\n * @throws ValueError: If any `tf.SymbolicTensor`s from `InputLayer`s\n * encountered during the execution lacks a feed value in `feedDict`.\n */\nexport function execute(\n fetches: SymbolicTensor|SymbolicTensor[], feedDict: FeedDict,\n kwargs?: Kwargs): Tensor|Tensor[]|[Tensor | Tensor[]] {\n const arrayFetches = Array.isArray(fetches);\n const fetchArray: SymbolicTensor[] =\n arrayFetches ? fetches as SymbolicTensor[] : [fetches as SymbolicTensor];\n\n const outputs: Tensor[] = [];\n const internalFeedDict = new FeedDict(feedDict);\n\n for (const fetch of fetchArray) {\n outputs.push(executeInternal(fetch, internalFeedDict, kwargs) as Tensor);\n }\n return arrayFetches ? outputs : outputs[0];\n}\n\nfunction executeInternal(\n fetch: SymbolicTensor, internalFeedDict: FeedDict,\n kwargs?: Kwargs): Tensor {\n if (internalFeedDict.hasKey(fetch)) {\n return internalFeedDict.getValue(fetch);\n }\n if (fetch.sourceLayer instanceof InputLayer) {\n throw new ValueError(\n `Missing a feed value for SymbolicTensor from InputLayer ` +\n `'${InputLayer.name}'`);\n }\n\n const inputs = fetch.inputs;\n const inputValues: Tensor[] = [];\n for (const input of inputs) {\n // Recursive call.\n const inputVal = executeInternal(input, internalFeedDict, kwargs) as Tensor;\n inputValues.push(inputVal);\n }\n\n let output =\n fetch.sourceLayer.apply(inputValues, kwargs) as Tensor | Tensor[];\n if (!Array.isArray(output)) {\n output = [output];\n }\n const layerOutputs = getNodeOutputs(fetch);\n const outputSymbolicTensors =\n Array.isArray(layerOutputs) ? layerOutputs : [layerOutputs];\n for (let i = 0; i < outputSymbolicTensors.length; ++i) {\n internalFeedDict.add(outputSymbolicTensors[i], output[i]);\n }\n return output.length === 1 ? output[0] : output[fetch.outputTensorIndex];\n}\n\n/**\n * Get the symbolic output tensors of the node to which a given fetch belongs.\n * @param fetch The fetched symbolic tensor.\n * @returns The Array of symbolic tensors output by the node to which `fetch`\n * belongs.\n */\nfunction getNodeOutputs(fetch: SymbolicTensor): SymbolicTensor|\n SymbolicTensor[] {\n let layerOutputs: SymbolicTensor|SymbolicTensor[];\n if (fetch.sourceLayer.inboundNodes.length === 1) {\n layerOutputs = fetch.sourceLayer.output;\n } else {\n let nodeIndex: number = null;\n for (let i = 0; i < fetch.sourceLayer.inboundNodes.length; ++i) {\n for (const outputTensor of fetch.sourceLayer.inboundNodes[i]\n .outputTensors) {\n if (outputTensor.id === fetch.id) {\n nodeIndex = i;\n break;\n }\n }\n }\n layerOutputs = fetch.sourceLayer.getOutputAt(nodeIndex);\n }\n return layerOutputs;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Interfaces and methods for training models using TensorFlow.js datasets.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {TensorContainer} from '@tensorflow/tfjs-core/dist/tensor_types';\n\nimport {getScalar} from '../backend/state';\nimport {BaseCallback, configureCallbacks, CustomCallbackConfig, History, ModelLoggingVerbosity, standardizeCallbacks, YieldEveryOptions} from '../base_callbacks';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {disposeTensorsInLogs, UnresolvedLogs} from '../logs';\nimport {singletonOrArray, toList} from '../utils/generic_utils';\n\nimport {Dataset, LazyIterator, TensorMap, TensorOrTensorMap} from './dataset_stub';\n\n/**\n * Interface for configuring model training based on a dataset object.\n */\nexport interface ModelFitDatasetConfig<T extends TensorContainer> {\n /**\n * (Optional) Total number of steps (batches of samples) before\n * declaring one epoch finished and starting the next epoch. It should\n * typically be equal to the number of samples of your dataset divided by\n * the batch size, so that `fitDataset`() call can utilize the entire dataset.\n * If it is not provided, use `done` return value in `iterator.next()` as\n * signal to finish an epoch.\n */\n batchesPerEpoch?: number;\n\n /**\n * The number of times to iterate over the training dataset.\n *\n * An integer.\n */\n epochs: number;\n\n /**\n * Verbosity level.\n *\n * Expected to be 0, 1, or 2. Default: 1.\n *\n * 0 - No printed message during fit() call.\n * 1 - In Node.js (tfjs-node), prints the progress bar, together with\n * real-time updates of loss and metric values and training speed.\n * In the browser: no action. This is the default.\n * 2 - Not implemented yet.\n */\n verbose?: ModelLoggingVerbosity;\n\n /**\n * List of callbacks to be called during training.\n * Can consist of one or more of the following fields: `onTrainBegin`,\n * `onTrainEnd`, `onEpochBegin`, `onEpochEnd`, `onBatchBegin`, `onBatchEnd`.\n */\n callbacks?: BaseCallback[]|CustomCallbackConfig|CustomCallbackConfig[];\n\n /**\n * Data on which to evaluate the loss and any model\n * metrics at the end of each epoch. The model will not be trained on this\n * data. This could be any of the following:\n *\n * - an Array of `tf.Tensor` objects: [xVal, yVal]\n * - an Array of `tf.Tensor` objects:\n * [xVal, yVal, valSampleWeights] (not implemented yet).\n * - a dataset object.\n *\n * If `validationData` is an Array of Tensor objects, the `tf.Tensor` will be\n * sliced into batches during validation, using the parameter\n * `validationBatchSize` (which defaults to 32). The entirety of the\n * `tf.Tensor` objects will be used in the validation.\n *\n * If `validationData` is a dataset object, and the `validationBatches`\n * parameter is specified, the validation will use `validationBatches` batches\n * drawn from the dataset object. If `validationBatches` parameter is not\n * specified, the validation will stop when the dataset is exhausted.\n *\n * The model will not be trained on this data.\n */\n validationData?:\n [\n tfc.Tensor|tfc.Tensor[]|TensorMap, tfc.Tensor|tfc.Tensor[]|TensorMap\n ]|[tfc.Tensor | tfc.Tensor[] | TensorMap,\n tfc.Tensor|tfc.Tensor[]|TensorMap, tfc.Tensor|tfc.Tensor[]|TensorMap]|\n Dataset<T>;\n\n /**\n * Optional batch size for validation.\n *\n * Used only if `validationData` is an array of `tf.Tensor` objects, i.e., not\n * a dataset object.\n *\n * If not specified, its value defaults to 32.\n */\n validationBatchSize?: number;\n\n /**\n * (Optional) Only relevant if `validationData` is specified and is a dataset\n * object.\n *\n * Total number of batches of samples to draw from `validationData` for\n * validation purpose before stopping at the end of every epoch. If not\n * specified, `evaluateDataset` will use `iterator.next().done` as signal to\n * stop validation.\n */\n validationBatches?: number;\n\n /**\n * Configures the frequency of yielding the main thread to other tasks.\n *\n * In the browser environment, yielding the main thread can improve the\n * responsiveness of the page during training. In the Node.js environment,\n * it can ensure tasks queued in the event loop can be handled in a timely\n * manner.\n *\n * - The value can be one of the following strings:\n * - 'auto': automatically determine how frequently the yielding happens\n * by measuring the duration of each batch of training (default).\n * - 'batch': yield every batch.\n * - 'epoch': yield every epoch.\n * - 'never': never yield. (But yielding can still happen through `await\n * nextFrame()` calls in custom callbacks.)\n */\n yieldEvery?: YieldEveryOptions;\n\n /**\n * Epoch at which to start training (useful for resuming a previous training\n * run).\n */\n initialEpoch?: number;\n}\n\n/**\n * Interface for configuring model evaluation based on a dataset object.\n */\nexport interface ModelEvaluateDatasetConfig {\n /**\n * Number of batches to draw from the dataset object before ending the\n * evaluation.\n */\n batches?: number;\n\n /**\n * Verbosity mode.\n */\n verbose?: ModelLoggingVerbosity;\n}\n\n// Default batch size used during tensor-based validation.\nconst DEFAULT_VALIDATION_BATCH_SIZE = 32;\n\n/**\n * Standardize the output of a dataset iterator for use by Model.fitDataset().\n *\n * @param model: A `tf.Model` object.\n * @param iteratorOut The output of a dataset iterator. It is required to be\n * an array of two tensor containers. Each of the two elements of the array\n * must be a single `tf.Tensor` or a map from string names to `tf.Tensor`s.\n * @returns A flat array of `tf.Tensor` objects: the input `tf.Tensor`s followed\n * by the target `tf.Tensor`s.\n */\nfunction standardizeDataIteratorOutput(\n // Type `model` as `any` here to avoid circular dependency w/ training.ts.\n // tslint:disable-next-line:no-any\n model: any, iteratorOut: TensorContainer): tfc.Tensor[] {\n if (model.outputs.length > 1) {\n throw new NotImplementedError(\n `Support for training a model with multiple output tensors with ` +\n `a dataset object is not implemented yet.`);\n }\n\n tfc.util.assert(\n Array.isArray(iteratorOut) && iteratorOut.length === 2,\n 'Dataset iterator for fitDataset() is expected to generate ' +\n 'an Array of length 2: `[xs, ys]`, but instead generates ' +\n iteratorOut);\n // TODO(cais): If there are multiple inputs or outputs, make sure\n // they all have the same batch size.\n iteratorOut = iteratorOut as [TensorOrTensorMap, TensorOrTensorMap];\n const ys = iteratorOut[1] as tfc.Tensor;\n let xs = iteratorOut[0] as TensorOrTensorMap;\n if (xs instanceof tfc.Tensor) {\n tfc.util.assert(\n model.inputs.length === 1,\n `Model has multiple ${model.inputs.length} inputs, hence it ` +\n `expects the input dataset to generate a dictionary of tensors ` +\n ` (with keys ${JSON.stringify(model.inputNames)}, ` +\n `but received a single tensor.`);\n tfc.util.assert(\n xs.shape[0] === ys.shape[0],\n `Mismatch in batch size between x and y tensors (${xs.shape[0]} vs. ` +\n `${ys.shape[0]})`);\n return [xs, ys];\n } else {\n let batchSize: number;\n xs = xs as TensorMap;\n const flattendXs: tfc.Tensor[] = [];\n // Check that all the required keys are available and all the batch sizes\n // are equal.\n for (const inputName of model.inputNames) {\n if (xs[inputName] == null) {\n throw new ValueError(\n `The feature data generated by the dataset lacks the required ` +\n `input key '${inputName}'.`);\n }\n flattendXs.push(xs[inputName]);\n if (batchSize == null) {\n batchSize = xs[inputName].shape[0];\n } else {\n tfc.util.assert(\n xs[inputName].shape[0] === batchSize,\n `Mismatch in batch size between x and y tensors ` +\n `(${xs[inputName].shape[0]} vs. ${ys.shape[0]})`);\n }\n }\n return flattendXs.concat(ys);\n }\n\n // TODO(cais): Handle case in which ys is a TensorMap.\n}\n\nfunction standardizeTensorValidationData<T extends TensorContainer>(\n data:\n [\n tfc.Tensor|tfc.Tensor[], tfc.Tensor|tfc.Tensor[]\n ]|[tfc.Tensor | tfc.Tensor[], tfc.Tensor | tfc.Tensor[],\n tfc.Tensor | tfc.Tensor[]]):\n {xs: tfc.Tensor|tfc.Tensor[], ys: tfc.Tensor|tfc.Tensor[]} {\n if (data.length === 3) {\n throw new NotImplementedError(\n 'Validation with sample weights is not implemented yet.');\n }\n return {xs: data[0], ys: data[1]};\n}\n\nexport async function fitDataset<T extends TensorContainer>(\n // Type `model` as `any` here to avoid circular dependency w/ training.ts.\n // tslint:disable-next-line:no-any\n model: any, dataset: Dataset<T>,\n config: ModelFitDatasetConfig<T>): Promise<History> {\n const hasBatchesPerEpoch = config.batchesPerEpoch != null;\n tfc.util.assert(\n model.optimizer != null,\n 'You must compile a model before training/testing. Use ' +\n 'Model.compile(modelCompileConfig).');\n\n tfc.util.assert(\n config != null,\n `For fitDataset(), the 2nd argument (config) is required, ` +\n `but it is not provided in this call.`);\n tfc.util.assert(\n config.epochs != null && config.epochs > 0 &&\n Number.isInteger(config.epochs),\n `For fitDataset(), config.epochs is expected to be a positive ` +\n `integer, but got ${config.epochs}`);\n tfc.util.assert(\n !hasBatchesPerEpoch ||\n (config.batchesPerEpoch > 0 &&\n Number.isInteger(config.batchesPerEpoch)),\n `For fitDataset(), config.batchesPerEpoch is expected to be a ` +\n `positive integer if specified, but got ${config.batchesPerEpoch}`);\n tfc.util.assert(\n // tslint:disable-next-line:no-any\n (config as any)['validationSplit'] == null,\n '`validationSplit` is not supported by `fitDataset()`. ' +\n 'Use validationData instead.');\n\n if (model.isTraining) {\n throw new Error(\n 'Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n\n try {\n const doValidation = config.validationData != null;\n let valXs: tfc.Tensor|tfc.Tensor[];\n let valYs: tfc.Tensor|tfc.Tensor[];\n if (doValidation) {\n if (isDatasetObject(config.validationData)) {\n tfc.util.assert(\n config.validationBatches == null ||\n (config.validationBatches > 0 &&\n Number.isInteger(config.validationBatches)),\n `For fitDataset() with dataset-based validation, ` +\n `config.validationBatches is expected not to be provided, ` +\n `or to be a positive integer, ` +\n `but got ${config.validationBatches}`);\n } else {\n const validationData = standardizeTensorValidationData(\n config.validationData as\n [tfc.Tensor | tfc.Tensor[], tfc.Tensor | tfc.Tensor[]] |\n [\n tfc.Tensor | tfc.Tensor[], tfc.Tensor | tfc.Tensor[],\n tfc.Tensor | tfc.Tensor[]\n ]);\n valXs = validationData.xs;\n valYs = validationData.ys;\n }\n }\n\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames() as string[];\n\n let callbackMetrics: string[];\n if (doValidation) {\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n } else {\n callbackMetrics = outLabels.slice();\n }\n\n const callbacks = standardizeCallbacks(config.callbacks);\n const {callbackList, history} = configureCallbacks(\n callbacks, config.yieldEvery, config.verbose, config.epochs, null, null,\n config.batchesPerEpoch,\n null, // Batch size determined by the dataset itself.\n doValidation, callbackMetrics);\n model.history = history;\n\n await callbackList.onTrainBegin();\n let epoch = config.initialEpoch == null ? 0 : config.initialEpoch;\n const epochLogs: UnresolvedLogs = {};\n\n let dataIterator = await dataset.iterator();\n while (epoch < config.epochs) {\n await callbackList.onEpochBegin(epoch);\n let stepsDone = 0;\n let batchIndex = 0;\n if (!hasBatchesPerEpoch) {\n dataIterator = await dataset.iterator();\n }\n while (hasBatchesPerEpoch ? stepsDone < config.batchesPerEpoch : true) {\n const iteratorOut = await dataIterator.next();\n\n // If `batchesPerEpoch` is specified, the dataset should not be\n // exhausted until all epoches are done.\n if (hasBatchesPerEpoch && iteratorOut.done) {\n console.warn(\n 'You provided `batchesPerEpoch` as ' +\n `${config.batchesPerEpoch}, ` +\n 'but your dataset iterator ran out of data after ' +\n `${stepsDone} batches; ` +\n 'interrupting training. Make sure that your ' +\n 'dataset can generate at least `batchesPerEpoch * epochs` ' +\n 'batches (in this case, ' +\n `${config.batchesPerEpoch * config.epochs} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n break;\n }\n\n\n if (iteratorOut.value != null) {\n const xsAndYs =\n standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchLogs: UnresolvedLogs = {};\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = xsAndYs[0].shape[0];\n\n callbackList.onBatchBegin(batchIndex, batchLogs);\n\n // Train on batch.\n // TODO(cais): Take care of models with multiple outputs.\n const outs = trainFunction(xsAndYs);\n tfc.dispose(xsAndYs);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n }\n\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n\n batchIndex++;\n stepsDone++;\n }\n\n if (hasBatchesPerEpoch ? stepsDone >= config.batchesPerEpoch :\n iteratorOut.done) {\n // Epoch finished. Perform validation.\n if (doValidation) {\n let valOuts: tfc.Scalar[];\n if (isDatasetObject(config.validationData)) {\n valOuts = toList(await model.evaluateDataset(\n config.validationData, {batches: config.validationBatches}));\n } else {\n valOuts = toList(model.evaluate(valXs, valYs, {\n batchSize: config.validationBatchSize == null ?\n DEFAULT_VALIDATION_BATCH_SIZE :\n config.validationBatchSize,\n verbose: 0\n }));\n }\n for (let i = 0; i < model.metricsNames.length; ++i) {\n epochLogs[`val_${model.metricsNames[i]}`] = valOuts[i];\n }\n }\n // Call `break` to exit one epoch lopp after validation is done. If\n // config.batchesPerEpoch is specified, an epoch while loop will stop\n // when `stepsDone >= config.batchesPerEpoch`. When\n // config.batchesPerEpoch is not provided, the following `break` is\n // required to exit the while lopp after dataset is exhausted.\n break;\n }\n\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onEpochEnd(epoch, epochLogs);\n epoch++;\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n await model.history.syncData();\n return model.history;\n } finally {\n model.isTraining = false;\n }\n}\n\n// Check if provided object is a Dataset object by checking it's .iterator\n// element.\nfunction isDatasetObject<T extends TensorContainer>(\n dataset:\n [\n tfc.Tensor|tfc.Tensor[]|TensorMap, tfc.Tensor|tfc.Tensor[]|TensorMap\n ]|[tfc.Tensor | tfc.Tensor[] | TensorMap,\n tfc.Tensor | tfc.Tensor[] | TensorMap,\n tfc.Tensor | tfc.Tensor[] | TensorMap]|Dataset<T>): boolean {\n return (typeof (dataset as Dataset<T>).iterator === 'function');\n}\n\n// Check if provided object is a LazyIterator object by checking it's .next\n// element.\nfunction isLazyIteratorObject<T extends TensorContainer>(\n iterator: Dataset<T>|LazyIterator<T>): boolean {\n return (typeof (iterator as LazyIterator<T>).next === 'function');\n}\n\nexport async function evaluateDataset<T extends TensorContainer>(\n // Type `model` as `any` here to avoid circular dependency w/ training.ts.\n // tslint:disable-next-line:no-any\n model: any, dataset: Dataset<T>|LazyIterator<T>,\n config: ModelEvaluateDatasetConfig): Promise<tfc.Scalar|tfc.Scalar[]> {\n const hasBatches = config.batches != null;\n const f = model.testFunction;\n const outs: tfc.Scalar[] = [];\n if (config.verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n\n tfc.util.assert(\n !hasBatches || (config.batches > 0 && Number.isInteger(config.batches)),\n 'Test loop expects `batches` to be a positive integer, but ' +\n `received ${JSON.stringify(config.batches)}`);\n const dataIterator = isLazyIteratorObject(dataset) ?\n dataset as LazyIterator<T>:\n await (dataset as Dataset<T>).iterator();\n // Keeps track of number of examples used in this evaluation.\n let numExamples = 0;\n let batch = 0;\n while (hasBatches ? batch < config.batches : true) {\n const iteratorOut = await dataIterator.next();\n if (iteratorOut.value) {\n // TODO(cais): Once real dataset is available, use\n // `map(x => standardizeDataIteratorOutput(model, x).map(f)`.\n const xsAndYs = standardizeDataIteratorOutput(model, iteratorOut.value);\n const batchOuts = tfc.tidy(() => f(xsAndYs));\n tfc.dispose(xsAndYs);\n\n if (batch === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(getScalar(0));\n }\n }\n const batchSize = xsAndYs[0].shape[0];\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n const oldScalar = outs[i];\n outs[i] = tfc.tidy(\n () => tfc.add(outs[i], tfc.mul(getScalar(batchSize), batchOut)) as\n tfc.Scalar);\n if (batch > 0) {\n tfc.dispose(oldScalar);\n }\n }\n tfc.dispose(batchOuts);\n numExamples += batchSize;\n\n ++batch;\n }\n if (iteratorOut.done) {\n if (hasBatches) {\n console.warn(\n 'Your dataset iterator ran out of data during evaluateDataset(). ' +\n 'Interrupting evalution. Make sure that your ' +\n 'dataset can generate at least `batches` ' +\n `batches (in this case, ${config.batches} batches). ` +\n 'You may need to use the repeat() function when building ' +\n 'your dataset.');\n }\n break;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n const oldScalar = outs[i];\n outs[i] =\n tfc.tidy(() => tfc.div(outs[i], getScalar(numExamples)) as tfc.Scalar);\n tfc.dispose(oldScalar);\n }\n\n return singletonOrArray(outs);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Interfaces and methods for training models using tf.Tensor objects.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {Scalar, Tensor, Tensor1D, tensor1d, util} from '@tensorflow/tfjs-core';\n\nimport {expandDims, gather, sliceAlongFirstAxis} from '../backend/tfjs_backend';\nimport {BaseCallback, configureCallbacks, CustomCallbackConfig, History, ModelLoggingVerbosity, standardizeCallbacks, YieldEveryOptions} from '../base_callbacks';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {disposeTensorsInLogs, UnresolvedLogs} from '../logs';\nimport {range} from '../utils/math_utils';\n\n/**\n * Interface configuration model training based on data as `tf.Tensor`s.\n */\nexport interface ModelFitConfig {\n /**\n * Number of samples per gradient update. If unspecified, it\n * will default to 32.\n */\n batchSize?: number;\n\n /** The number of times to iterate over the training data arrays. */\n epochs?: number;\n\n /**\n * Verbosity level.\n *\n * Expected to be 0, 1, or 2. Default: 1.\n *\n * 0 - No printed message during fit() call.\n * 1 - In Node.js (tfjs-node), prints the progress bar, together with\n * real-time updates of loss and metric values and training speed.\n * In the browser: no action. This is the default.\n * 2 - Not implemented yet.\n */\n verbose?: ModelLoggingVerbosity;\n\n /**\n * List of callbacks to be called during training.\n * Can consist of one or more of the following fields: `onTrainBegin`,\n * `onTrainEnd`, `onEpochBegin`, `onEpochEnd`, `onBatchBegin`, `onBatchEnd`.\n */\n callbacks?: BaseCallback[]|CustomCallbackConfig|CustomCallbackConfig[];\n\n /**\n * Float between 0 and 1: fraction of the training data\n * to be used as validation data. The model will set apart this fraction of\n * the training data, will not train on it, and will evaluate the loss and\n * any model metrics on this data at the end of each epoch.\n * The validation data is selected from the last samples in the `x` and `y`\n * data provided, before shuffling.\n */\n validationSplit?: number;\n\n /**\n * Data on which to evaluate the loss and any model\n * metrics at the end of each epoch. The model will not be trained on this\n * data. This could be a tuple [xVal, yVal] or a tuple [xVal, yVal,\n * valSampleWeights]. The model will not be trained on this data.\n * `validationData` will override `validationSplit`.\n */\n validationData?: [\n Tensor|Tensor[], Tensor|Tensor[]\n ]|[Tensor | Tensor[], Tensor|Tensor[], Tensor|Tensor[]];\n\n /**\n * Whether to shuffle the training data before each epoch. Has\n * no effect when `stepsPerEpoch` is not `null`.\n */\n shuffle?: boolean;\n\n /**\n * Optional dictionary mapping class indices (integers) to\n * a weight (float) to apply to the model's loss for the samples from this\n * class during training. This can be useful to tell the model to \"pay more\n * attention\" to samples from an under-represented class.\n */\n classWeight?: {[classIndex: string]: number};\n\n /**\n * Optional array of the same length as x, containing\n * weights to apply to the model's loss for each sample. In the case of\n * temporal data, you can pass a 2D array with shape (samples,\n * sequenceLength), to apply a different weight to every timestep of every\n * sample. In this case you should make sure to specify\n * sampleWeightMode=\"temporal\" in compile().\n */\n sampleWeight?: Tensor;\n\n /**\n * Epoch at which to start training (useful for resuming a previous training\n * run).\n */\n initialEpoch?: number;\n\n /**\n * Total number of steps (batches of samples) before\n * declaring one epoch finished and starting the next epoch. When training\n * with Input Tensors such as TensorFlow data tensors, the default `null` is\n * equal to the number of unique samples in your dataset divided by the\n * batch size, or 1 if that cannot be determined.\n */\n stepsPerEpoch?: number;\n\n /**\n * Only relevant if `stepsPerEpoch` is specified. Total number of steps\n * (batches of samples) to validate before stopping.\n */\n validationSteps?: number;\n\n /**\n * Configures the frequency of yielding the main thread to other tasks.\n *\n * In the browser environment, yielding the main thread can improve the\n * responsiveness of the page during training. In the Node.js environment,\n * it can ensure tasks queued in the event loop can be handled in a timely\n * manner.\n *\n * - The value can be one of the following strings:\n * - 'auto': automatically determine how frequently the yielding happens\n * by measuring the duration of each batch of training (default).\n * - 'batch': yield every batch.\n * - 'epoch': yield every epoch.\n * - 'never': never yield. (But yielding can still happen through `await\n * nextFrame()` calls in custom callbacks.)\n */\n yieldEvery?: YieldEveryOptions;\n}\n\nexport function checkBatchSize(batchSize: number) {\n tfc.util.assert(\n batchSize > 0 && Number.isInteger(batchSize),\n `batchSize is required to be a positive integer, but got ${batchSize}`);\n}\n\n\n/**\n * Slice an Tensor or an Array of Tensors, by start and stop indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArraysByIndices()` together.\n *\n * @param arrays: the input.\n * @param start: the starting index (inclusive).\n * @param stop: the stopping index (exclusive).\n * @returns The result of the slicing. If `arrays` is an `Array` of\n * `tf.Tensor`s, the slicing will be applied to all elements of the `Array`\n * in the same way.\n */\nexport function sliceArrays(\n arrays: Tensor|Tensor[], start: number, stop: number): Tensor|Tensor[] {\n if (arrays == null) {\n return [null];\n } else if (Array.isArray(arrays)) {\n return arrays.map(array => sliceAlongFirstAxis(array, start, stop - start));\n } else { // Tensor.\n return sliceAlongFirstAxis(arrays, start, stop - start);\n }\n}\n\n/**\n * Slice an Tensor or an Array of Tensors, by random-order indices.\n *\n * Porting Note: The `_slice_arrays` function in PyKeras is covered by this\n * function and `sliceArrays()` together.\n *\n * @param arrays The input `tf.Tensor` or `Array` of `tf.Tensor`s to slice.\n * If an `Array` of `tf.Tensor`s, all `tf.Tensor`s will be sliced in the\n * same fashion.\n * @param indices The indices to use for slicing along the first (batch)\n * dimension.\n * @returns Result(s) of the slicing.\n */\nexport function sliceArraysByIndices(\n arrays: Tensor|Tensor[], indices: Tensor1D): Tensor|Tensor[] {\n return tfc.tidy(() => {\n if (arrays == null) {\n return null;\n } else if (Array.isArray(arrays)) {\n return arrays.map(\n array => (sliceArraysByIndices(array, indices) as Tensor));\n } else {\n // TODO(cais): indices should be a pre-constructed Tensor1D to avoid\n // tensor1d() calls.\n return gather(\n arrays, indices.dtype === 'int32' ? indices : indices.toInt());\n }\n });\n}\n\n/**\n * Returns a list of batch indices (tuples of indices).\n * @param size: Integer, total size of the data to slice into batches.\n * @param batchSize: Integer, batch size.\n * @returns An Array of [batchStart, batchEnd] tuples. batchStart is\n * inclusive; batchEnd is exclusive. I.e., each batch consists of indices x\n * that satisfy batchStart <= x < batchEnd.\n */\nexport function makeBatches(\n size: number, batchSize: number): Array<[number, number]> {\n const output: Array<[number, number]> = [];\n let batchStart = 0;\n let batchEnd: number = null;\n while (batchStart < size) {\n batchEnd = batchStart + batchSize;\n if (batchEnd >= size) {\n batchEnd = size;\n }\n output.push([batchStart, batchEnd]);\n batchStart = batchEnd;\n }\n return output;\n}\n\n/**\n * Abstract fit function for `f(ins)`.\n * @param f A Function returning a list of tensors. For training, this\n * function is expected to perform the updates to the variables.\n * @param ins List of tensors to be fed to `f`.\n * @param outLabels List of strings, display names of the outputs of `f`.\n * @param batchSize Integer batch size or `== null` if unknown.\n * @param epochs Number of times to iterate over the data.\n * @param verbose Verbosity mode: 0, 1, or 2. Default: 1.\n * @param callbacks List of callbacks to be called during training.\n * @param valF Function to call for validation.\n * @param valIns List of tensors to be fed to `valF`.\n * @param shuffle Whether to shuffle the data at the beginning of every\n * epoch.\n * @param callbackMetrics List of strings, the display names of the metrics\n * passed to the callbacks. They should be the concatenation of the\n * display names of the outputs of `f` and the list of display names\n * of the outputs of `valF`.\n * @param initialEpoch Epoch at which to start training (useful for\n * resuming a previous training run).\n * @param stepsPerEpoch Total number of steps (batches on samples) before\n * declaring one epoch finished and starting the next epoch. Ignored with\n * the default value of `undefined` or `null`.\n * @param validationSteps Number of steps to run validation for (only if\n * doing validation from data tensors). Not applicable for tfjs-layers.\n * @returns A `History` object.\n */\nasync function fitLoop(\n // Type `model` as `any` here to avoid circular dependency w/ training.ts.\n // tslint:disable-next-line:no-any\n model: any, f: (data: Tensor[]) => Scalar[], ins: Tensor[],\n outLabels?: string[], batchSize?: number, epochs?: number, verbose?: number,\n callbacks?: BaseCallback[], valF?: (data: Tensor[]) => Scalar[],\n valIns?: Tensor[], shuffle?: boolean|string, callbackMetrics?: string[],\n initialEpoch?: number, stepsPerEpoch?: number, validationSteps?: number,\n yieldEvery?: YieldEveryOptions): Promise<History> {\n if (batchSize == null) {\n batchSize = 32;\n }\n if (epochs == null) {\n epochs = 1;\n }\n if (shuffle == null) {\n shuffle = true;\n }\n if (initialEpoch == null) {\n initialEpoch = 0;\n }\n\n // TODO(cais): Change const to let below when implementing validation.\n let doValidation = false;\n if (valF != null && valIns != null) {\n doValidation = true;\n // TODO(cais): verbose message.\n }\n if (validationSteps != null) {\n doValidation = true;\n if (stepsPerEpoch == null) {\n throw new ValueError(\n 'Can only use `validationSteps` when doing step-wise training, ' +\n 'i.e., `stepsPerEpoch` must be set.');\n }\n }\n\n const numTrainSamples =\n model.checkNumSamples(ins, batchSize, stepsPerEpoch, 'steps_per_epoch');\n let indexArray: number[];\n if (numTrainSamples != null) {\n indexArray = range(0, numTrainSamples);\n }\n\n if (verbose == null) {\n verbose = 1;\n }\n\n const {callbackList, history} = configureCallbacks(\n callbacks, yieldEvery, verbose, epochs, initialEpoch, numTrainSamples,\n stepsPerEpoch, batchSize, doValidation, callbackMetrics);\n callbackList.setModel(model);\n model.history = history;\n await callbackList.onTrainBegin();\n model.stopTraining_ = false;\n // TODO(cais): Take care of callbacks.validation_data as in PyKeras.\n // TODO(cais): Pre-convert feeds for performance as in PyKeras.\n\n for (let epoch = initialEpoch; epoch < epochs; ++epoch) {\n await callbackList.onEpochBegin(epoch);\n const epochLogs: UnresolvedLogs = {};\n if (stepsPerEpoch != null) {\n throw new NotImplementedError(\n 'stepsPerEpoch mode is not implemented yet.');\n } else {\n if (shuffle === 'batch') {\n throw new NotImplementedError('batch shuffling is not implemneted yet');\n } else if (shuffle) {\n util.shuffle(indexArray);\n }\n // Convert the potentially shuffled indices to Tensor1D, to avoid the\n // cost of repeated creation of Array1Ds later on.\n const epochIndexArray1D = tensor1d(indexArray);\n\n const batches = makeBatches(numTrainSamples, batchSize);\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchLogs: UnresolvedLogs = {};\n await callbackList.onBatchBegin(batchIndex, batchLogs);\n\n tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds = sliceAlongFirstAxis(\n epochIndexArray1D, batchStart,\n batchEnd - batchStart) as Tensor1D;\n batchLogs['batch'] = batchIndex;\n batchLogs['size'] = batchEnd - batchStart;\n\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds) as Tensor[];\n const outs = f(insBatch);\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = outs[i];\n batchLogs[label] = out;\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n }\n\n if (batchIndex === batches.length - 1) { // Last batch.\n if (doValidation) {\n const valOuts = model.testLoop(valF, valIns, batchSize);\n // Porting Notes: In tfjs-layers, valOuts is always an Array.\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n const out = valOuts[i];\n tfc.keep(out);\n // TODO(cais): Use scope() to avoid ownership.\n epochLogs['val_' + label] = out;\n }\n }\n }\n });\n\n await callbackList.onBatchEnd(batchIndex, batchLogs);\n disposeTensorsInLogs(batchLogs);\n\n if (model.stopTraining_) {\n break;\n }\n // TODO(cais): return outs as list of Tensor.\n }\n\n epochIndexArray1D.dispose();\n }\n // TODO(cais): Run validation at the end of the epoch.\n await callbackList.onEpochEnd(epoch, epochLogs);\n if (model.stopTraining_) {\n break;\n }\n }\n await callbackList.onTrainEnd();\n\n await model.history.syncData();\n return model.history;\n}\n\nexport async function fitTensors(\n // Type `model` as `any` here to avoid circular dependency w/ training.ts.\n // tslint:disable-next-line:no-any\n model: any, x: Tensor|Tensor[]|{[inputName: string]: Tensor},\n y: Tensor|Tensor[]|{[inputName: string]: Tensor},\n config: ModelFitConfig = {}): Promise<History> {\n if (model.isTraining) {\n throw new Error(\n 'Cannot start training because another fit() call is ongoing.');\n }\n model.isTraining = true;\n let inputs: Tensor[];\n let targets: Tensor[];\n let inputValX: Tensor|Tensor[];\n let inputValY: Tensor|Tensor[];\n let valX: Tensor|Tensor[];\n let valY: Tensor|Tensor[];\n try {\n const batchSize = config.batchSize == null ? 32 : config.batchSize;\n checkBatchSize(batchSize);\n\n // Validate user data.\n // TODO(cais): Add sampleWeight and classWeight.\n const standardizedOuts =\n model.standardizeUserData(\n x, y, false, batchSize) as [Tensor[], Tensor[]];\n inputs = standardizedOuts[0];\n targets = standardizedOuts[1];\n // TODO(cais): Make use of sampleWeights in standardizedOuts[2] when\n // available.\n\n // Prepare validation data.\n let doValidation = false;\n let valIns: Tensor[];\n if (config.validationData != null && config.validationData.length > 0) {\n doValidation = true;\n if (config.validationData.length === 2) {\n // config.validationData consists of valX and valY.\n inputValX = config.validationData[0];\n inputValY = config.validationData[1];\n } else if (config.validationData.length === 3) {\n throw new NotImplementedError(\n 'validationData including sample weights is not supported yet.');\n } else {\n throw new ValueError(\n `When passing validation data, it must contain 2 (valX, valY) ` +\n `or 3 (valX, valY, valSampleWeight) items; ` +\n `${config.validationData} is invalid.`);\n }\n\n const valStandardized = model.standardizeUserData(\n inputValX, inputValY, true,\n batchSize) as [Tensor[], Tensor[], Tensor[]];\n valX = valStandardized[0];\n valY = valStandardized[1];\n // TODO(cais): Use validation sample weights in valStandardized[2]\n // once\n // it becomes available.\n valIns = valX.concat(valY);\n // TODO(cais): Add useLearningPhase data properly.\n } else if (\n config.validationSplit != null && config.validationSplit > 0 &&\n config.validationSplit < 1) {\n doValidation = true;\n // Porting Note: In tfjs-layers, inputs[0] is always an Tensor.\n const splitAt =\n Math.floor(inputs[0].shape[0] * (1 - config.validationSplit));\n const originalBatchSize = inputs[0].shape[0];\n valX = sliceArrays(inputs, splitAt, originalBatchSize) as Tensor[];\n inputs = sliceArrays(inputs, 0, splitAt) as Tensor[];\n valY = sliceArrays(targets, splitAt, originalBatchSize) as Tensor[];\n targets = sliceArrays(targets, 0, splitAt) as Tensor[];\n // TODO(cais): Once sampleWeights becomes available, slice it to get\n // valSampleWeights.\n valIns = valX.concat(valY);\n\n // TODO(cais): Add useLearningPhase data properly.\n } else if (config.validationSteps != null) {\n doValidation = true;\n // TODO(cais): Add useLearningPhase.\n }\n\n const ins = inputs.concat(targets);\n\n model.checkTrainableWeightsConsistency();\n\n // TODO(cais): Handle use_learning_phase and learning_phase?\n\n // Porting Note: Here we see a key deviation of tfjs-layers from\n // Keras.\n // Due to the imperative nature of tfjs-layers' backend (tfjs-core),\n // we do not construct symbolic computation graphs to embody the\n // training process. Instead, we define a function that performs the\n // training action. In PyKeras, the data (inputs and targets) are fed\n // through graph placeholders. In tfjs-layers, the data are fed as\n // function arguments. Since the function are defined below in the\n // scope, we don't have equivalents of PyKeras's\n // `_make_train_funciton`.\n const trainFunction = model.makeTrainFunction();\n const outLabels = model.getDedupedMetricsNames() as string[];\n\n let valFunction: (data: Tensor[]) => Scalar[];\n let callbackMetrics: string[];\n if (doValidation) {\n model.makeTestFunction();\n valFunction = model.testFunction;\n callbackMetrics =\n outLabels.slice().concat(outLabels.map(n => 'val_' + n));\n } else {\n valFunction = null;\n valIns = [];\n callbackMetrics = outLabels.slice();\n }\n\n const callbacks = standardizeCallbacks(config.callbacks);\n const out = await fitLoop(\n model, trainFunction, ins, outLabels, batchSize, config.epochs,\n config.verbose, callbacks, valFunction, valIns, config.shuffle,\n callbackMetrics, config.initialEpoch, null, null, config.yieldEvery);\n model.isTraining = false;\n return out;\n } finally {\n model.isTraining = false;\n // Memory clean up.\n disposeNewTensors(inputs, x);\n disposeNewTensors(targets, y);\n disposeNewTensors(valX as Tensor[], inputValX);\n disposeNewTensors(valY as Tensor[], inputValY);\n }\n // TODO(cais): Add value to outLabels.\n}\n\n/**\n * Ensure tensors all have a rank of at least 2.\n *\n * If a tensor has a rank of 1, it is dimension-expanded to rank 2.\n * If any tensor has a rank of 0 (i.e., is a scalar), an error will be thrown.\n */\nexport function ensureTensorsRank2OrHigher(tensors: Tensor|Tensor[]): Tensor[] {\n const outs: Tensor[] = [];\n if (tensors instanceof Tensor) {\n tensors = [tensors];\n }\n\n // Make Tensors at least 2D.\n for (let i = 0; i < tensors.length; ++i) {\n const tensor = tensors[i];\n if (tensor.rank === 1) {\n outs.push(expandDims(tensor, 1));\n } else if (tensor.rank === 0) {\n throw new Error(\n 'Expected tensor to be at least 1D, but received a 0D tensor ' +\n '(scalar).');\n } else {\n outs.push(tensor);\n }\n }\n return outs;\n}\n\n/**\n * Compare a set of tensors with a reference (old) set, discard the ones\n * in the new set that are not present in the reference set.\n *\n * This method is used for memory clenaup during calls such as Model.fit().\n *\n * @param tensors New set which may contain Tensors not present in\n * `refTensors`.\n * @param refTensors Reference Tensor set.\n */\n// TODO(cais, kangyizhang): Deduplicate with tfjs-data.\nexport function disposeNewTensors(\n tensors: Tensor|Tensor[]|{[inputName: string]: Tensor},\n refTensors: Tensor|Tensor[]|{[inputName: string]: Tensor}): void {\n if (tensors == null) {\n return;\n }\n const oldTensorIds: number[] = [];\n if (refTensors instanceof Tensor) {\n oldTensorIds.push(refTensors.id);\n } else if (Array.isArray(refTensors)) {\n refTensors.forEach(t => oldTensorIds.push(t.id));\n } else if (refTensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in refTensors) {\n const oldTensor = refTensors[name];\n oldTensorIds.push(oldTensor.id);\n }\n }\n\n const tensorsToDispose: Tensor[] = [];\n if (tensors instanceof Tensor) {\n if (oldTensorIds.indexOf(tensors.id) === -1) {\n tensorsToDispose.push(tensors);\n }\n } else if (Array.isArray(tensors)) {\n tensors.forEach(t => {\n if (oldTensorIds.indexOf(t.id) === -1) {\n tensorsToDispose.push(t);\n }\n });\n } else if (tensors != null) {\n // `oldTensors` is a map from string name to Tensor.\n for (const name in tensors) {\n const tensor = tensors[name];\n if (oldTensorIds.indexOf(tensor.id) === -1) {\n tensorsToDispose.push(tensor);\n }\n }\n }\n\n tensorsToDispose.forEach(t => {\n if (!t.isDisposed) {\n t.dispose();\n }\n });\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original Source: engine/training.py */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {io, ModelPredictConfig, Optimizer, Scalar, serialization, Tensor, Tensor1D, tensor1d, util} from '@tensorflow/tfjs-core';\nimport {TensorContainer} from '@tensorflow/tfjs-core/dist/tensor_types';\n\nimport {getScalar,} from '../backend/state';\nimport * as K from '../backend/tfjs_backend';\nimport {History, ModelLoggingVerbosity} from '../base_callbacks';\nimport {nameScope} from '../common';\nimport {NotImplementedError, RuntimeError, ValueError} from '../errors';\nimport * as losses from '../losses';\nimport * as Metrics from '../metrics';\nimport * as optimizers from '../optimizers';\nimport {LossOrMetricFn, NamedTensorMap, Shape} from '../types';\nimport {count, pyListRepeat, singletonOrArray, unique} from '../utils/generic_utils';\nimport {printSummary} from '../utils/layer_utils';\nimport {range} from '../utils/math_utils';\nimport {LayerVariable} from '../variables';\n\nimport {Container, ContainerConfig} from './container';\nimport {Dataset} from './dataset_stub';\nimport {execute, FeedDict} from './executor';\nimport {SymbolicTensor} from './topology';\nimport {evaluateDataset, fitDataset, ModelEvaluateDatasetConfig, ModelFitDatasetConfig} from './training_dataset';\nimport {checkBatchSize, disposeNewTensors, ensureTensorsRank2OrHigher, fitTensors, makeBatches, ModelFitConfig, sliceArrays, sliceArraysByIndices} from './training_tensors';\n\n/**\n * Helper function for polymorphic input data: 1. singleton Tensor.\n */\nexport function isDataTensor(x: Tensor|Tensor[]|{[inputName: string]: Tensor}|\n {[inputName: string]: Tensor[]}): boolean {\n return x instanceof Tensor;\n}\n\n/**\n * Helper function for polymorphic input data: 2. Array of Tensor.\n */\nexport function isDataArray(x: Tensor|Tensor[]|\n {[inputName: string]: Tensor}): boolean {\n return Array.isArray(x);\n}\n\n/**\n * Helper function for polymorphic input data: 3. \"dict\" of Tensor.\n */\nexport function isDataDict(x: Tensor|Tensor[]|\n {[inputName: string]: Tensor}): boolean {\n return !isDataTensor(x) && !isDataArray(x);\n}\n\n/**\n * Normalizes inputs and targets provided by users.\n * @param data User-provided input data (polymorphic).\n * @param names An Array of expected Tensor names.\n * @param shapes Optional Array of expected Tensor shapes.\n * @param checkBatchAxis Whether to check that the batch axis of the arrays\n * match the expected value found in `shapes`.\n * @param exceptionPrefix String prefix used for exception formatting.\n * @returns List of standardized input Tensors (one Tensor per model input).\n * @throws ValueError: in case of improperly formatted user data.\n */\nexport function standardizeInputData(\n data: Tensor|Tensor[]|{[inputName: string]: Tensor}, names: string[],\n shapes?: Shape[], checkBatchAxis = true, exceptionPrefix = ''): Tensor[] {\n if (names == null || names.length === 0) {\n // Check for the case where the model expected no data, but some data got\n // sent.\n if (data != null) {\n let gotUnexpectedData = false;\n if (isDataArray(data) && (data as Tensor[]).length > 0) {\n gotUnexpectedData = true;\n } else if (isDataDict(data)) {\n for (const key in data) {\n if (data.hasOwnProperty(key)) {\n gotUnexpectedData = true;\n break;\n }\n }\n } else {\n // `data` is a singleton Tensor in this case.\n gotUnexpectedData = true;\n }\n if (gotUnexpectedData) {\n throw new ValueError(\n `Error when checking model ${exceptionPrefix} expected no data, ` +\n `but got ${data}`);\n }\n }\n return [];\n }\n if (data == null) {\n return names.map(name => null);\n }\n\n let arrays: Tensor[];\n if (isDataDict(data)) {\n data = data as {[inputName: string]: Tensor};\n arrays = [];\n for (const name of names) {\n if (data[name] == null) {\n throw new ValueError(\n `No data provided for \"${name}\". Need data for each key in: ` +\n `${names}`);\n }\n arrays.push(data[name]);\n }\n } else if (isDataArray(data)) {\n data = data as Tensor[];\n if (data.length !== names.length) {\n throw new ValueError(\n `Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `model expected. Expected to see ${names.length} Tensor(s), but ` +\n `instead got the following list of Tensor(s): ${data}`);\n }\n arrays = data;\n } else {\n data = data as Tensor;\n if (names.length > 1) {\n throw new ValueError(\n `The model ${exceptionPrefix} expects ${names.length} Tensor(s), ` +\n `but only received one Tensor. Found: Tensor with shape ${\n data.shape}`);\n }\n arrays = [data];\n }\n\n arrays = ensureTensorsRank2OrHigher(arrays);\n\n // Check shape compatibility.\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(\n `Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s). but got array with ` +\n `shape ${array.shape}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n // Skip the first (batch) axis.\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null && refDim >= 0 && dim !== refDim) {\n throw new ValueError(\n `Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have shape [${shapes[i]}], but got array with shape ` +\n `[${array.shape}].`);\n }\n }\n }\n }\n return arrays;\n}\n\n/**\n * User input validation for Tensors.\n * @param inputs `Array` of `tf.Tensor`s for inputs.\n * @param targets `Array` of `tf.Tensor`s for targets.\n * @param weights Optional `Array` of `tf.Tensor`s for sample weights.\n * @throws ValueError: in case of incorrectly formatted data.\n */\nexport function checkArrayLengths(\n inputs: Tensor[], targets: Tensor[], weights?: Tensor[]) {\n const setX = unique(inputs.map(input => input.shape[0]));\n setX.sort();\n const setY = unique(targets.map(target => target.shape[0]));\n setY.sort();\n // TODO(cais): Check `weights` as well.\n if (setX.length > 1) {\n throw new ValueError(\n `All input Tensors (x) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(inputs.map(input => input.shape))}`);\n }\n if (setY.length > 1) {\n throw new ValueError(\n `All target Tensors (y) should have the same number of samples. ` +\n `Got array shapes: ` +\n `${JSON.stringify(targets.map(target => target.shape))}`);\n }\n if (setX.length > 0 && setY.length > 0 && !util.arraysEqual(setX, setY)) {\n throw new ValueError(\n `Input Tensors should have the same number of samples as target ` +\n `Tensors. Found ${setX[0]} input sample(s) and ${setY[0]} target ` +\n `sample(s).`);\n }\n}\n\n/**\n * Validation on the compatibility of targes and loss functions.\n *\n * This helps prevent users from using loss functions incorrectly.\n *\n * @param targets `Array` of `tf.Tensor`s of targets.\n * @param lossFns `Array` of loss functions.\n * @param outputShapes `Array` of shapes of model outputs.\n */\nfunction checkLossAndTargetCompatibility(\n targets: Tensor[], lossFns: LossOrMetricFn[], outputShapes: Shape[]) {\n // TODO(cais): Dedicated test coverage?\n const keyLosses = [\n losses.meanSquaredError, losses.binaryCrossentropy,\n losses.categoricalCrossentropy\n ];\n for (let i = 0; i < targets.length; ++i) {\n const y = targets[i];\n const loss = lossFns[i];\n const shape = outputShapes[i];\n if (loss == null) {\n continue;\n }\n if (loss === losses.categoricalCrossentropy) {\n if (y.shape[y.shape.length - 1] === 1) {\n throw new ValueError(\n `You are passing a target array of shape ${y.shape} while using ` +\n `a loss 'categorical_crossentropy'. 'categorical_crossentropy'` +\n `expects targets to be binary matrices (1s and 0s) of shape ` +\n `[samples, classes].`);\n // TODO(cais): Example code in error message.\n }\n }\n if (keyLosses.indexOf(loss) !== -1) {\n const slicedYShape = y.shape.slice(1);\n const slicedShape = shape.slice(1);\n for (let j = 0; j < slicedYShape.length; ++j) {\n const targetDim = slicedYShape[j];\n const outDim = slicedShape[j];\n if (outDim != null && targetDim !== outDim) {\n throw new ValueError(\n `A target Tensor with shape ${y.shape} was passed for an ` +\n `output of shape ${shape}, while using a loss function that ` +\n `expects targets to have the same shape as the output.`);\n }\n }\n }\n }\n}\n\n/**\n * Check inputs provided by the user.\n *\n * Porting Note: This corresponds to _standardize_input_data() in Python\n * Keras. Because of the strong typing in TF.js, we do not need to convert\n * the data. Specifically:\n * 1) in PyKeras, `data` can be `DataFrame` instances from pandas, for\n * example. We don't need to worry about that here because there is no\n * widely popular javascript/typesdcript equivalent of pandas (so far).\n * If one becomes available in the future, we can add support.\n * 2) in PyKeras, inputs can be Python dict. But here we are stipulating\n * that the data is either a single `tf.Tensor` or an Array of `tf.Tensor`s. We\n * may add support for `Object` data inputs in the future when the need\n * arises.\n *\n * Instead, we perform basic checks for number of parameters and shapes.\n *\n * @param data: The input data.\n * @param names: Name for the inputs, from the model.\n * @param shapes: Expected shapes for the input data, from the model.\n * @param checkBatchAxis: Whether the size along the batch axis (i.e., the\n * first dimension) will be checked for matching.\n * @param exceptionPrefix: Execption prefix message, used in generating error\n * messages.\n * @throws ValueError: on incorrect number of inputs or mismatches in shapes.\n */\nfunction checkInputData(\n data: Tensor|Tensor[], names: string[], shapes?: Shape[],\n checkBatchAxis = true, exceptionPrefix = '') {\n let arrays: Tensor[];\n if (Array.isArray(data)) {\n if (data.length !== names.length) {\n throw new ValueError(\n `Error when checking model ${exceptionPrefix}: the Array of ` +\n `Tensors that you are passing to your model is not the size the ` +\n `the model expected. Expected to see ${names.length} Tensor(s),` +\n ` but instead got ${data.length} Tensors(s).`);\n }\n arrays = data;\n } else {\n if (names.length > 1) {\n throw new ValueError(\n `The model expects ${names.length} ${exceptionPrefix} Tensors, ` +\n `but only received one Tensor. Found: array with shape ` +\n `${JSON.stringify(data.shape)}.`);\n }\n arrays = [data];\n }\n\n if (shapes != null) {\n for (let i = 0; i < names.length; ++i) {\n if (shapes[i] == null) {\n continue;\n }\n const array = arrays[i];\n if (array.shape.length !== shapes[i].length) {\n throw new ValueError(\n `Error when checking ${exceptionPrefix}: expected ${names[i]} ` +\n `to have ${shapes[i].length} dimension(s), but got array with ` +\n `shape ${JSON.stringify(array.shape)}`);\n }\n for (let j = 0; j < shapes[i].length; ++j) {\n if (j === 0 && !checkBatchAxis) {\n continue;\n }\n const dim = array.shape[j];\n const refDim = shapes[i][j];\n if (refDim != null) {\n if (refDim !== dim) {\n throw new ValueError(\n `Error when checking ${exceptionPrefix}: expected ` +\n `${names[i]} to have shape ${JSON.stringify(shapes[i])} but ` +\n `got array with shape ${JSON.stringify(array.shape)}.`);\n }\n }\n }\n }\n }\n}\n\n/**\n * Maps metric functions to model outputs.\n * @param metrics An `Array` or dict (`Object`) of metric functions.\n * @param outputNames An `Array` of the names of model outputs.\n * @returns An `Array` (one entry per model output) of `Array` of metric\n * functions. For instance, if the model has 2 outputs, and for the first\n * output we want to compute `binaryAccuracy` and `binaryCrossentropy`,\n * and just `binaryAccuracy` for the second output, the `Array` would look\n * like:\n * `[[binaryAccuracy, binaryCrossentropy], [binaryAccuracy]]`\n * @throws TypeError: if `null` or `undefined` value is provided.\n */\nfunction collectMetrics(\n metrics: string[]|{[outputName: string]: string | string[]},\n outputNames: string[]): string[][] {\n if (metrics == null || Array.isArray(metrics) && metrics.length === 0) {\n return outputNames.map(name => []);\n }\n if (Array.isArray(metrics)) {\n // We then apply all metrics to all outputs.\n return outputNames.map(name => metrics);\n } else if (metrics != null) {\n // In this case, metrics is a dict.\n const nestedMetrics: string[][] = [];\n for (const name of outputNames) {\n let outputMetrics: string|string[] =\n metrics.hasOwnProperty(name) ? metrics[name] : [];\n if (!Array.isArray(outputMetrics)) {\n outputMetrics = [outputMetrics];\n }\n nestedMetrics.push(outputMetrics as string[]);\n }\n return nestedMetrics;\n } else {\n throw new TypeError(\n 'Type of metrics argument not understood. Expected an Array or ' +\n 'Object, found: ' + metrics);\n }\n}\n\nexport interface ModelEvaluateConfig {\n /**\n * Batch size (Integer). If unspecified, it will default to 32.\n */\n batchSize?: number;\n\n /**\n * Verbosity mode.\n */\n verbose?: ModelLoggingVerbosity;\n\n /**\n * Tensor of weights to weight the contribution of different samples to the\n * loss and metrics.\n */\n sampleWeight?: Tensor;\n\n /**\n * integer: total number of steps (batches of samples)\n * before declaring the evaluation round finished. Ignored with the default\n * value of `undefined`.\n */\n steps?: number;\n}\n\n/**\n * Configuration for calls to `Model.compile()`.\n */\nexport interface ModelCompileConfig {\n /**\n * An instance of `tf.train.Optimizer` or a string name for an Optimizer.\n */\n optimizer: string|Optimizer;\n\n /**\n * Object function(s) or name(s) of object function(s).\n * If the model has multiple outputs, you can use a different loss\n * on each output by passing a dictionary or an Array of losses.\n * The loss value that will be minimized by the model will then be the sum\n * of all individual losses.\n */\n loss: string|string[]|{[outputName: string]: string}|LossOrMetricFn|\n LossOrMetricFn[]|{[outputName: string]: LossOrMetricFn};\n\n /**\n * List of metrics to be evaluated by the model during training and testing.\n * Typically you will use `metrics=['accuracy']`.\n * To specify different metrics for different outputs of a multi-output\n * model, you could also pass a dictionary.\n */\n metrics?: string[]|{[outputName: string]: string};\n\n // TODO(cais): Add lossWeights, sampleWeightMode, weightedMetrics, and\n // targetTensors.\n}\n\n/**\n * A `tf.Model` is a directed, acyclic graph of `tf.Layer`s plus methods for\n * training, evaluation, prediction and saving.\n *\n * `tf.Model` is the basic unit of training, inference and evaluation in\n * TensorFlow.js. To create a `tf.Model`, use `tf.model`.\n *\n * See also:\n * `tf.Sequential`, `tf.loadModel`.\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Model extends Container implements tfc.InferenceModel {\n static className = 'Model';\n optimizer: Optimizer;\n loss: string|string[]|{[outputName: string]: string}|LossOrMetricFn|\n LossOrMetricFn[]|{[outputName: string]: LossOrMetricFn};\n lossFunctions: LossOrMetricFn[];\n\n // TODO(cais): These private variables should probably not have the string\n // 'feed' in their names, because we are not dealing with a symbolic\n // backend.\n private feedOutputShapes: Shape[];\n private feedLossFns: LossOrMetricFn[];\n private collectedTrainableWeights: LayerVariable[];\n private testFunction: (data: Tensor[]) => Scalar[];\n history: History;\n\n // A public property that can be set by Callbacks to order early stopping\n // during `fit()` calls.\n protected stopTraining_: boolean;\n protected isTraining: boolean;\n\n metrics: string[]|{[outputName: string]: string};\n metricsNames: string[];\n // Porting Note: `metrics_tensors` in PyKeras is a symbolic tensor. But given\n // the imperative nature of tfjs-core, `metricsTensors` is a\n // TypeScript function here.\n // Also note that due to the imperative nature of tfjs-core, `metricsTensor`\n // here needs an output index to keep track of which output of the Model\n // a metric belongs to. This is unlike `metrics_tensors` in PyKeras,\n // which is a `list` of symbolic tensors, each of which has implicit\n // \"knowledge\" of the outputs it depends on.\n metricsTensors: Array<[LossOrMetricFn, number]>;\n\n constructor(config: ContainerConfig) {\n super(config);\n this.isTraining = false;\n }\n\n /**\n * Print a text summary of the model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - If the model has non-sequential-like topology, the inputs each layer\n * receives\n * - The total number of trainable and non-trainable parameters of the model.\n *\n * ```js\n * const input1 = tf.input({shape: [10]});\n * const input2 = tf.input({shape: [20]});\n * const dense1 = tf.layers.dense({units: 4}).apply(input1);\n * const dense2 = tf.layers.dense({units: 8}).apply(input2);\n * const concat = tf.layers.concatenate().apply([dense1, dense2]);\n * const output =\n * tf.layers.dense({units: 3, activation: 'softmax'}).apply(concat);\n *\n * const model = tf.model({inputs: [input1, input2], outputs: output});\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n */\n /** @doc {heading: 'Models', subheading: 'Classes'} */\n summary(\n lineLength?: number, positions?: number[],\n printFn:\n // tslint:disable-next-line:no-any\n (message?: any, ...optionalParams: any[]) => void = console.log) {\n if (!this.built) {\n throw new ValueError(\n `This model has never been called, thus its weights have not been ` +\n `created yet. So no summary can be displayed. Build the model ` +\n `first (e.g., by calling it on some test data).`);\n }\n printSummary(this, lineLength, positions, printFn);\n }\n\n /**\n * Configures and prepares the model for training and evaluation. Compiling\n * outfits the model with an optimizer, loss, and/or metrics. Calling `fit`\n * or `evaluate` on an un-compiled model will throw an error.\n *\n * @param config a `ModelCompileConfig` specifying the loss, optimizer, and\n * metrics to be used for fitting and evaluating this model.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [0]}\n */\n compile(config: ModelCompileConfig): void {\n if (config.loss == null) {\n config.loss = [];\n }\n this.loss = config.loss;\n\n if (typeof config.optimizer === 'string') {\n this.optimizer = optimizers.getOptimizer(config.optimizer);\n } else {\n if (!(config.optimizer instanceof Optimizer)) {\n throw new ValueError(\n `User-defined optimizer must be an instance of tf.Optimizer.`);\n }\n this.optimizer = config.optimizer;\n }\n\n // TODO(cais): Add lossWeights.\n // TODO(cais): Add sampleWeightMode.\n\n // Prepare loss functions.\n let lossFunctions: LossOrMetricFn[] = [];\n if (!Array.isArray(config.loss) && typeof config.loss !== 'string' &&\n typeof config.loss !== 'function') {\n config.loss = config.loss as {[outputName: string]: string};\n for (const name in config.loss) {\n if (this.outputNames.indexOf(name) === -1) {\n throw new ValueError(\n `Unknown entry in loss dictionary: \"${name}\". Only expect the ` +\n `following keys: ${this.outputNames}`);\n }\n }\n for (const name in this.outputNames) {\n if (config.loss[name] == null) {\n console.warn(\n `Output \"${name}\" is missing from loss dictionary. We assume ` +\n `this was done on purpose, and we will not be expecting data ` +\n `to be passed to ${name} during training`);\n }\n lossFunctions.push(losses.get(config.loss[name]));\n }\n } else if (Array.isArray(config.loss)) {\n if (config.loss.length !== this.outputs.length) {\n throw new ValueError(\n `When passing an Array as loss, it should have one entry per ` +\n `model output. The model has ${this.outputs.length} output(s), ` +\n `but you passed loss=${config.loss}.`);\n }\n const theLosses = config.loss as Array<string|LossOrMetricFn>;\n lossFunctions = theLosses.map(l => losses.get(l));\n } else {\n const lossFunction = losses.get(config.loss);\n this.outputs.map(layer => {\n lossFunctions.push(lossFunction);\n });\n }\n\n this.lossFunctions = lossFunctions;\n\n this.feedOutputNames = [];\n this.feedOutputShapes = [];\n this.feedLossFns = [];\n for (let i = 0; i < this.outputs.length; ++i) {\n // TODO(cais): Logic for skipping target(s).\n const shape = this.internalOutputShapes[i];\n const name = this.outputNames[i];\n this.feedOutputNames.push(name);\n this.feedOutputShapes.push(shape);\n this.feedLossFns.push(this.lossFunctions[i]);\n }\n\n // TODO(cais): Add logic for weighted losses.\n // TODO(cais): Add logic for output masks.\n // TODO(cais): Add logic for sample weights.\n const skipTargetIndices: number[] = [];\n\n // Prepare metrics.\n this.metrics = config.metrics;\n // TODO(cais): Add weightedMetrics.\n this.metricsNames = ['loss'];\n this.metricsTensors = [];\n\n // Compute total loss.\n // Porting Note: In PyKeras, metrics_tensors are symbolic tensor objects.\n // Here, metricsTensors are TypeScript functions. This difference is due\n // to the difference in symbolic/imperative property of the backends.\n nameScope('loss', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n // TODO(cais): Add weightedLoss, sampleWeight and mask.\n // The following line should be weightedLoss\n const weightedLoss = this.lossFunctions[i];\n if (this.outputs.length > 1) {\n this.metricsTensors.push([weightedLoss, i]);\n this.metricsNames.push(this.outputNames[i] + '_loss');\n }\n }\n\n // Porting Note: Due to the imperative nature of the backend, we calculate\n // the regularizer penalties in the totalLossFunction, instead of here.\n });\n\n const nestedMetrics = collectMetrics(config.metrics, this.outputNames);\n // TODO(cais): Add nestedWeightedMetrics.\n\n /**\n * Helper function used in loop below.\n */\n const appendMetric =\n (outputIndex: number, metricName: string,\n metricTensor: LossOrMetricFn) => {\n if (this.outputNames.length > 1) {\n metricName = this.outputNames[outputIndex] + '_' + metricName;\n }\n this.metricsNames.push(metricName);\n this.metricsTensors.push([metricTensor, outputIndex]);\n };\n\n nameScope('metric', () => {\n for (let i = 0; i < this.outputs.length; ++i) {\n if (skipTargetIndices.indexOf(i) !== -1) {\n continue;\n }\n const outputMetrics = nestedMetrics[i];\n // TODO(cais): Add weights and outputWeightedMetrics.\n\n // TODO(cais): Add optional arg `weights` to the following function.\n const handleMetrics = (metrics: string[]) => {\n const metricNamePrefix = '';\n let metricName: string;\n let accFn: LossOrMetricFn;\n let weightedMetricFn: LossOrMetricFn;\n // TODO(cais): Use 'weights_' for weighted metrics.\n\n for (const metric of metrics) {\n if (['accuracy', 'acc', 'crossentropy', 'ce'].indexOf(metric) !==\n -1) {\n const outputShape = this.internalOutputShapes[i];\n\n if (outputShape[outputShape.length - 1] === 1 ||\n this.lossFunctions[i] === losses.binaryCrossentropy) {\n // case: binary accuracy/crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryAccuracy;\n } else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.binaryCrossentropy;\n }\n } else if (\n this.lossFunctions[i] ===\n losses.sparseCategoricalCrossentropy) {\n // case: categorical accuracy / crossentropy with sparse\n // targets.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalAccuracy;\n } else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.sparseCategoricalCrossentropy;\n }\n } else {\n // case: categorical accuracy / crossentropy.\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalAccuracy;\n } else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n accFn = Metrics.categoricalCrossentropy;\n }\n }\n let suffix: string;\n if (['accuracy', 'acc'].indexOf(metric) !== -1) {\n suffix = 'acc';\n } else if (['crossentropy', 'ce'].indexOf(metric) !== -1) {\n suffix = 'ce';\n }\n // TODO(cais): Add weighting actually.\n weightedMetricFn = accFn;\n metricName = metricNamePrefix + suffix;\n } else {\n const metricFn = Metrics.get(metric);\n // TODO(cais): Add weighting actually.\n weightedMetricFn = metricFn;\n metricName = metricNamePrefix + metric;\n }\n\n // TODO(cais): Add weighting and masking to metricResult.\n let metricResult: LossOrMetricFn;\n nameScope(metricName, () => {\n metricResult = weightedMetricFn;\n });\n appendMetric(i, metricName, metricResult);\n }\n };\n\n handleMetrics(outputMetrics);\n // TODO(cais): Call handleMetrics with weights.\n }\n });\n\n // Porting Notes: Given the imperative backend of tfjs-core,\n // there is no need for constructing the symbolic graph and placeholders.\n this.collectedTrainableWeights = this.trainableWeights;\n }\n\n /**\n * Check trainable weights count consistency.\n *\n * This will raise a warning if `this.trainableWeights` and\n * `this.collectedTrainableWeights` are inconsistent (i.e., have different\n * numbers of parameters).\n * Inconsistency will typically arise when one modifies `model.trainable`\n * without calling `model.compile()` again.\n */\n protected checkTrainableWeightsConsistency(): void {\n if (this.collectedTrainableWeights == null) {\n return;\n }\n if (this.trainableWeights.length !==\n this.collectedTrainableWeights.length) {\n console.warn(\n 'Discrepancy between trainableweights and collected trainable ' +\n 'weights. Did you set `model.trainable` without calling ' +\n '`model.compile()` afterwards?');\n }\n }\n\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(\n * tf.ones([8, 10]), tf.ones([8, 1]), {batchSize: 4});\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param config A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n evaluate(\n x: Tensor|Tensor[], y: Tensor|Tensor[],\n config: ModelEvaluateConfig = {}): Scalar|Scalar[] {\n const batchSize = config.batchSize == null ? 32 : config.batchSize;\n checkBatchSize(batchSize);\n\n // TODO(cais): Standardize `config.sampleWeights` as well.\n // Validate user data.\n const standardizedOuts = this.standardizeUserData(x, y, true, batchSize);\n try {\n // TODO(cais): If uses `useLearningPhase`, set the corresponding element\n // of the input to 0.\n const ins = standardizedOuts[0].concat(standardizedOuts[1]);\n this.makeTestFunction();\n const f = this.testFunction;\n const testOuts =\n this.testLoop(f, ins, batchSize, config.verbose, config.steps);\n return singletonOrArray(testOuts);\n } finally {\n disposeNewTensors(standardizedOuts[0], x);\n disposeNewTensors(standardizedOuts[1], y);\n }\n }\n\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param config A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async evaluateDataset<T extends TensorContainer>(\n dataset: Dataset<T>,\n config: ModelEvaluateDatasetConfig): Promise<Scalar|Scalar[]> {\n this.makeTestFunction();\n return evaluateDataset(this, dataset, config);\n }\n\n /**\n * Get number of samples provided for training, evaluation or prediction.\n *\n * @param ins Input `tf.Tensor`.\n * @param batchSize Integer batch size, optional.\n * @param steps Total number of steps (batches of samples) before\n * declaring loop finished. Optional.\n * @param stepsName The public API's parameter name for `steps`.\n * @returns Number of samples provided.\n */\n private checkNumSamples(\n ins: Tensor|Tensor[], batchSize?: number, steps?: number,\n stepsName = 'steps'): number {\n let numSamples: number;\n if (steps != null) {\n numSamples = null;\n if (batchSize != null) {\n throw new ValueError(\n `If ${stepsName} is set, batchSize must be null or undefined.` +\n `Got batchSize = ${batchSize}`);\n }\n } else if (ins != null) {\n if (Array.isArray(ins)) {\n numSamples = ins[0].shape[0];\n } else {\n numSamples = ins.shape[0];\n }\n } else {\n throw new ValueError(\n `Either the input data should have a defined shape, or ` +\n `${stepsName} shoud be specified.`);\n }\n return numSamples;\n }\n\n /**\n * Execute internal tensors of the model with input data feed.\n * @param inputs Input data feed. Must match the inputs of the model.\n * @param outputs Names of the output tensors to be fetched. Must match\n * names of the SymbolicTensors that belong to the graph.\n * @returns Fetched values for `outputs`.\n */\n execute(inputs: Tensor|Tensor[]|NamedTensorMap, outputs: string|string[]):\n Tensor|Tensor[] {\n if (Array.isArray(outputs) && outputs.length === 0) {\n throw new ValueError(\n '`outputs` is an empty Array, which is not allowed.');\n }\n\n const outputsIsArray = Array.isArray(outputs);\n const outputNames = (outputsIsArray ? outputs as string[] :\n [outputs as string]) as string[];\n const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames);\n\n // Format the input into a FeedDict.\n const feedDict = new FeedDict();\n if (inputs instanceof Tensor) {\n inputs = [inputs as Tensor];\n }\n if (Array.isArray(inputs)) {\n if ((inputs as Tensor[]).length !== this.inputs.length) {\n throw new ValueError(\n `The number of inputs provided (${(inputs as Tensor[]).length}) ` +\n `does not match the number of inputs of this model ` +\n `(${this.inputs.length}).`);\n }\n for (let i = 0; i < this.inputs.length; ++i) {\n feedDict.add(this.inputs[i], (inputs as Tensor[])[i]);\n }\n } else {\n for (const input of this.inputs) {\n const tensorValue = (inputs as NamedTensorMap)[input.name];\n if (tensorValue == null) {\n throw new ValueError(\n `No value is provided for the model's input ${input.name}`);\n }\n feedDict.add(input, tensorValue);\n }\n }\n\n // Run execution.\n const executeOutputs = execute(outputSymbolicTensors, feedDict) as Tensor[];\n return outputsIsArray ? executeOutputs : executeOutputs[0];\n }\n\n /**\n * Retrieve the model's internal symbolic tensors from symbolic-tensor names.\n */\n private retrieveSymbolicTensors(symbolicTensorNames: string[]):\n SymbolicTensor[] {\n const outputSymbolicTensors: SymbolicTensor[] =\n pyListRepeat(null, symbolicTensorNames.length);\n let outputsRemaining = symbolicTensorNames.length;\n for (const layer of this.layers) {\n const layerOutputs: SymbolicTensor[] = Array.isArray(layer.output) ?\n layer.output as SymbolicTensor[] :\n [layer.output as SymbolicTensor];\n const layerOutputNames = layerOutputs.map(output => output.name);\n for (let i = 0; i < symbolicTensorNames.length; ++i) {\n const index = layerOutputNames.indexOf(symbolicTensorNames[i]);\n if (index !== -1) {\n outputSymbolicTensors[i] = layerOutputs[index];\n outputsRemaining--;\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n if (outputsRemaining === 0) {\n break;\n }\n }\n\n if (outputsRemaining > 0) {\n const remainingNames: string[] = [];\n outputSymbolicTensors.forEach((tensor, i) => {\n if (tensor == null) {\n remainingNames.push(symbolicTensorNames[i]);\n }\n });\n throw new ValueError(\n `Cannot find SymbolicTensors for output name(s): ` +\n `${JSON.stringify(remainingNames)}`);\n }\n return outputSymbolicTensors;\n }\n\n /**\n * Helper method to loop over some data in batches.\n *\n * Porting Note: Not using the functional approach in the Python equivalent\n * due to the imperative backend.\n * Porting Note: Does not support step mode currently.\n *\n * @param ins: input data\n * @param batchSize: integer batch size.\n * @param verbose: verbosity model\n * @returns: Predictions as `tf.Tensor` (if a single output) or an `Array` of\n * `tf.Tensor` (if multipe outputs).\n */\n private predictLoop(ins: Tensor|Tensor[], batchSize = 32, verbose = false):\n Tensor|Tensor[] {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins);\n if (verbose) {\n throw new NotImplementedError(\n 'Verbose predictLoop() is not implemented yet.');\n }\n\n // Sample-based predictions.\n // Porting Note: Tensor currently does not support sliced assignments as\n // in numpy, e.g., x[1:3] = y. Therefore we use concatenation while\n // iterating over the batches.\n\n const batches = makeBatches(numSamples, batchSize);\n const outs: Tensor[] = [];\n // TODO(cais): Can the scope() be pushed down inside the for loop?\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchOuts = tfc.tidy(() => {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n // TODO(cais): Take care of the case of the last element is a flag for\n // training/test.\n const insBatch = sliceArrays(ins, batchStart, batchEnd);\n\n // Construct the feeds for execute();\n const feeds = [];\n if (Array.isArray(insBatch)) {\n for (let i = 0; i < insBatch.length; ++i) {\n feeds.push({key: this.inputs[i], value: insBatch[i]});\n }\n } else {\n feeds.push({key: this.inputs[0], value: insBatch});\n }\n const feedDict = new FeedDict(feeds);\n return execute(this.outputs, feedDict) as Tensor[];\n });\n if (batchIndex === 0) {\n // Pre-allocate.\n for (const batchOut of batchOuts) {\n outs.push(batchOut);\n }\n } else {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs[i] = K.concatAlongFirstAxis(outs[i], batchOuts[i]);\n }\n }\n }\n return singletonOrArray(outs);\n });\n }\n\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFlow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([8, 10]), {batchSize: 4}).print();\n * ```\n *\n * @param x The input data, as an Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param config A `ModelPredictConfig` object containing optional fields.\n *\n * @return Prediction results as a `tf.Tensor`(s).\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [1]}\n */\n predict(x: Tensor|Tensor[], config: ModelPredictConfig = {}): Tensor\n |Tensor[] {\n const xsRank2OrHigher = ensureTensorsRank2OrHigher(x);\n checkInputData(\n xsRank2OrHigher, this.inputNames, this.feedInputShapes, false);\n try {\n // TODO(cais): Take care of stateful models.\n // if (this.stateful) ...\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n const batchSize = config.batchSize == null ? 32 : config.batchSize;\n checkBatchSize(batchSize);\n return this.predictLoop(xsRank2OrHigher, batchSize);\n } finally {\n disposeNewTensors(xsRank2OrHigher, x);\n }\n }\n\n /**\n * Returns predictions for a single batch of samples.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predictOnBatch(tf.ones([8, 10])).print();\n * ```\n * @param x: Input samples, as an Tensor\n * @return Tensor(s) of predictions\n */\n /** @doc {heading: 'Models', subheading: 'Classes'} */\n predictOnBatch(x: Tensor): Tensor|Tensor[] {\n checkInputData(x, this.inputNames, this.feedInputShapes, true);\n // TODO(cais): Take care of the learning_phase boolean flag.\n // if (this.useLearningPhase) ...\n return this.predictLoop(x, x.shape[0]);\n }\n\n protected standardizeUserData(\n x: Tensor|Tensor[]|{[inputName: string]: Tensor},\n y: Tensor|Tensor[]|{[inputName: string]: Tensor}, checkBatchAxis = true,\n batchSize?: number): [Tensor[], Tensor[], Tensor[]] {\n // TODO(cais): Add sampleWeight, classWeight\n if (this.optimizer == null) {\n throw new RuntimeError(\n 'You must compile a model before training/testing. Use ' +\n 'Model.compile(modelCompileConfig).');\n }\n const outputShapes: Shape[] = [];\n for (let i = 0; i < this.feedOutputShapes.length; ++i) {\n const outputShape = this.feedOutputShapes[i];\n const lossFn = this.feedLossFns[i];\n if (lossFn === losses.sparseCategoricalCrossentropy) {\n outputShapes.push(\n outputShape.slice(0, outputShape.length - 1).concat([1]));\n } else {\n // Porting Note: Because of strong typing `lossFn` must be a function.\n outputShapes.push(outputShape);\n }\n }\n x = standardizeInputData(\n x, this.feedInputNames, this.feedInputShapes, false, 'input') as\n Tensor[];\n y = standardizeInputData(\n y, this.feedOutputNames, outputShapes, false, 'target') as Tensor[];\n // TODO(cais): Standardize sampleWeights & classWeights.\n checkArrayLengths(x, y, null);\n // TODO(cais): Check sampleWeights as well.\n checkLossAndTargetCompatibility(y, this.feedLossFns, this.feedOutputShapes);\n if (this.stateful && batchSize != null && batchSize > 0) {\n if (x[0].shape[0] % batchSize !== 0) {\n throw new ValueError(\n `In a stateful network, you should only pass inputs with a ` +\n `number of samples that is divisible by the batch size ` +\n `${batchSize}. Found: ${x[0].shape[0]} sample(s).`);\n }\n }\n // TODO(cais): Deal with the case of model.stateful == true.\n return [x, y, null];\n }\n\n /**\n * Loop over some test data in batches.\n * @param f A Function returning a list of tensors.\n * @param ins Array of tensors to be fed to `f`.\n * @param batchSize Integer batch size or `null` / `undefined`.\n * @param verbose verbosity mode.\n * @param steps Total number of steps (batches of samples) before\n * declaring test finished. Ignored with the default value of `null` /\n * `undefined`.\n * @returns Array of Scalars.\n */\n private testLoop(\n f: (data: Tensor[]) => Scalar[], ins: Tensor[], batchSize?: number,\n verbose = 0, steps?: number): Scalar[] {\n return tfc.tidy(() => {\n const numSamples = this.checkNumSamples(ins, batchSize, steps, 'steps');\n const outs: Scalar[] = [];\n if (verbose > 0) {\n throw new NotImplementedError('Verbose mode is not implemented yet.');\n }\n // TODO(cais): Use `indicesForConversionToDense' to prevent slow down.\n if (steps != null) {\n throw new NotImplementedError(\n 'steps mode in testLoop() is not implemented yet');\n } else {\n const batches = makeBatches(numSamples, batchSize);\n const indexArray = tensor1d(range(0, numSamples));\n for (let batchIndex = 0; batchIndex < batches.length; ++batchIndex) {\n const batchStart = batches[batchIndex][0];\n const batchEnd = batches[batchIndex][1];\n const batchIds =\n K.sliceAlongFirstAxis(\n indexArray, batchStart, batchEnd - batchStart) as Tensor1D;\n // TODO(cais): In ins, train flag can be a number, instead of an\n // Tensor? Do we need to handle this in tfjs-layers?\n const insBatch = sliceArraysByIndices(ins, batchIds) as Scalar[];\n const batchOuts = f(insBatch);\n if (batchIndex === 0) {\n for (let i = 0; i < batchOuts.length; ++i) {\n outs.push(getScalar(0));\n }\n }\n for (let i = 0; i < batchOuts.length; ++i) {\n const batchOut = batchOuts[i];\n outs[i] =\n tfc.add(\n outs[i],\n tfc.mul(getScalar(batchEnd - batchStart), batchOut)) as\n Scalar;\n }\n }\n for (let i = 0; i < outs.length; ++i) {\n outs[i] = tfc.div(outs[i], getScalar(numSamples)) as Scalar;\n }\n }\n return outs;\n });\n }\n\n protected getDedupedMetricsNames(): string[] {\n const outLabels = this.metricsNames;\n // Rename duplicated metrics names (can happen with an output layer\n // shared among multiple dataflows).\n const dedupedOutLabels = [];\n for (let i = 0; i < outLabels.length; ++i) {\n const label = outLabels[i];\n let newLabel = label;\n if (count(outLabels, label) > 1) {\n const dupIndex = count(outLabels.slice(0, i), label);\n newLabel += `_${dupIndex}`;\n }\n dedupedOutLabels.push(newLabel);\n }\n return dedupedOutLabels;\n }\n\n /**\n * Creates a function that performs the following actions:\n *\n * 1. computes the losses\n * 2. sums them to get the total loss\n * 3. call the optimizer computes the gradients of the Model's\n * trainable weights w.r.t. the total loss and update the variables\n * 4. calculates the metrics\n * 5. returns the values of the losses and metrics.\n */\n protected makeTrainFunction(): (data: Tensor[]) => Scalar[] {\n return (data: Tensor[]) => {\n const losses: Tensor[] = [];\n const lossValues: Scalar[] = [];\n\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(\n this.inputs.length, this.inputs.length + this.outputs.length);\n\n const metricsValues: Scalar[] = [];\n\n // Create a function that computes the total loss based on the\n // inputs. This function is used for obtaining gradients through\n // backprop.\n const totalLossFunction = () => {\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({key: this.inputs[i], value: inputs[i]});\n }\n const feedDict = new FeedDict(feeds);\n const outputs =\n execute(this.outputs, feedDict, {'training': true}) as Tensor[];\n // TODO(cais): Take care of the case of multiple outputs from a\n // single layer?\n\n let totalLoss: Tensor;\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n const loss = lossFunction(targets[i], outputs[i]);\n losses.push(loss);\n // TODO(cais): push Scalar instead.\n const meanLoss = tfc.mean(loss) as Scalar;\n // TODO(cais): Use a scope() instead, to avoid ownership.\n lossValues.push(meanLoss);\n if (i === 0) {\n totalLoss = loss;\n } else {\n totalLoss = tfc.add(totalLoss, loss);\n }\n }\n\n // Compute the metrics.\n // TODO(cais): These should probably be calculated outside\n // totalLossFunction to benefit speed?\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0];\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting\n // function.\n const meanMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex])) as\n Scalar;\n tfc.keep(meanMetric);\n // TODO(cais): Use a scope() instead, to avoid ownership.\n metricsValues.push(meanMetric);\n }\n\n totalLoss = tfc.mean(totalLoss);\n\n // Add regularizer penalties.\n this.calculateLosses().forEach(regularizerLoss => {\n totalLoss = tfc.add(totalLoss, regularizerLoss);\n });\n\n return totalLoss as Scalar;\n };\n\n const variables = this.collectedTrainableWeights.map(\n param => param.read() as tfc.Variable);\n const returnCost = true;\n const totalLossValue =\n this.optimizer.minimize(totalLossFunction, returnCost, variables);\n\n return [totalLossValue].concat(metricsValues);\n };\n }\n\n /**\n * Create a function which, when invoked with an array of `tf.Tensor`s as a\n * batch of inputs, returns the prespecified loss and metrics of the model\n * under the batch of input data.\n */\n private makeTestFunction() {\n this.testFunction = (data: Tensor[]) => {\n return tfc.tidy(() => {\n const valOutputs: Scalar[] = [];\n let totalLoss: Scalar;\n const inputs = data.slice(0, this.inputs.length);\n const targets = data.slice(\n this.inputs.length, this.inputs.length + this.outputs.length);\n const feeds = [];\n for (let i = 0; i < this.inputs.length; ++i) {\n feeds.push({key: this.inputs[i], value: inputs[i]});\n }\n const feedDict = new FeedDict(feeds);\n const outputs = execute(this.outputs, feedDict) as Tensor[];\n // Compute total loss.\n for (let i = 0; i < this.lossFunctions.length; ++i) {\n const lossFunction = this.lossFunctions[i];\n // TODO(cais): Add sample weighting and replace the simple\n // averaging.\n const loss = tfc.mean(lossFunction(targets[i], outputs[i])) as Scalar;\n if (i === 0) {\n totalLoss = loss;\n } else {\n totalLoss = tfc.add(totalLoss, loss) as Scalar;\n }\n valOutputs.push(totalLoss);\n }\n // Compute the metrics.\n for (let i = 0; i < this.metricsTensors.length; ++i) {\n const metric = this.metricsTensors[i][0]; // TODO(cais): Restore.\n const outputIndex = this.metricsTensors[i][1];\n // TODO(cais): Replace K.mean() with a proper weighting function.\n const meanMetric =\n tfc.mean(metric(targets[outputIndex], outputs[outputIndex]));\n valOutputs.push(meanMetric as Scalar);\n }\n return valOutputs;\n });\n };\n }\n\n /**\n * Trains the model for a fixed number of epochs (iterations on a\n * dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * for (let i = 1; i < 5 ; ++i) {\n * const h = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(\"Loss after Epoch \" + i + \" : \" + h.history.loss[0]);\n * }\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you\n * can also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named,\n * you can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param config A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input\n * data and what the model expects.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async fit(\n x: Tensor|Tensor[]|{[inputName: string]: Tensor},\n y: Tensor|Tensor[]|{[inputName: string]: Tensor},\n config: ModelFitConfig = {}): Promise<History> {\n return fitTensors(this, x, y, config);\n }\n\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for training. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures.\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs.\n * Of the two items in the array, the first is the input feature(s) and\n * the second is the output target(s).\n * @param config A `ModelFitDatasetConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async fitDataset<T extends TensorContainer>(\n dataset: Dataset<T>, config: ModelFitDatasetConfig<T>): Promise<History> {\n return fitDataset(this, dataset, config);\n }\n\n /**\n * Extract weight values of the model.\n *\n * @param config: An instance of `io.SaveConfig`, which specifies\n * model-saving options such as whether only trainable weights are to be\n * saved.\n * @returns A `NamedTensorMap` mapping original weight names (i.e.,\n * non-uniqueified weight names) to their values.\n */\n protected getNamedWeights(config?: io.SaveConfig): NamedTensorMap {\n const namedWeights: NamedTensorMap = {};\n\n const trainableOnly = config != null && config.trainableOnly;\n const weights = trainableOnly ? this.trainableWeights : this.weights;\n const weightValues = this.getWeights(trainableOnly);\n for (let i = 0; i < weights.length; ++i) {\n if (trainableOnly && !weights[i].trainable) {\n // Optionally skip non-trainable weights.\n continue;\n }\n namedWeights[weights[i].originalName] = weightValues[i];\n }\n return namedWeights;\n }\n\n /**\n * Setter used for force stopping of Model.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const input = tf.input({shape: [10]});\n * const output = tf.layers.dense({units: 1}).apply(input);\n * const model = tf.model({inputs: [input], outputs: [output]});\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10\n * values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop: boolean) {\n this.stopTraining_ = stop;\n }\n\n /**\n * Save the configuration and/or weights of the Model.\n *\n * An `IOHandler` is an object that has a `save` method of the proper\n * signature defined. The `save` method manages the storing or\n * transmission of serialized data (\"artifacts\") that represent the\n * model's topology and weights onto or via a specific medium, such as\n * file downloads, local storage, IndexedDB in the web browser and HTTP\n * requests to a server. TensorFlow.js provides `IOHandler`\n * implementations for a number of frequently used saving mediums, such as\n * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io`\n * for more details.\n *\n * This method also allows you to refer to certain types of `IOHandler`s\n * as URL-like string shortcuts, such as 'localstorage://' and\n * 'indexeddb://'.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Saving `model`'s topology and weights as two files\n * (`my-model-1.json` and `my-model-1.weights.bin`) downloaded from\n * browser.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('downloads://my-model-1');\n * ```\n *\n * Example 4. Send `model`'s topology and weights to an HTTP server.\n * See the documentation of `tf.io.browserHTTPRequests` for more details\n * including specifying request parameters and implementation of the\n * server.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * const saveResults = await model.save('http://my-server/model/upload');\n * ```\n *\n * @param handlerOrURL An instance of `IOHandler` or a URL-like,\n * scheme-based string shortcut for `IOHandler`.\n * @param config Options for saving the model.\n * @returns A `Promise` of `SaveResult`, which summarizes the result of\n * the saving, such as byte sizes of the saved artifacts for the model's\n * topology and weight values.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [1]}\n */\n async save(handlerOrURL: io.IOHandler|string, config?: io.SaveConfig):\n Promise<io.SaveResult> {\n if (typeof handlerOrURL === 'string') {\n const handlers = io.getSaveHandlers(handlerOrURL);\n if (handlers.length === 0) {\n throw new ValueError(\n `Cannot find any save handlers for URL '${handlerOrURL}'`);\n } else if (handlers.length > 1) {\n throw new ValueError(\n `Found more than one (${handlers.length}) save handlers for ` +\n `URL '${handlerOrURL}'`);\n }\n handlerOrURL = handlers[0];\n }\n if (handlerOrURL.save == null) {\n throw new ValueError(\n 'Model.save() cannot proceed because the IOHandler provided does ' +\n 'not have the `save` attribute defined.');\n }\n\n const weightDataAndSpecs =\n await io.encodeWeights(this.getNamedWeights(config));\n\n const returnString = false;\n const unusedArg: {} = null;\n const modelConfig = this.toJSON(unusedArg, returnString);\n\n return handlerOrURL.save({\n modelTopology: modelConfig,\n weightData: weightDataAndSpecs.data,\n weightSpecs: weightDataAndSpecs.specs\n });\n }\n}\nserialization.registerClass(Model);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source keras/models.py */\n\nimport {io, Scalar, serialization, Tensor, util} from '@tensorflow/tfjs-core';\nimport {TensorContainer} from '@tensorflow/tfjs-core/dist/tensor_types';\n\nimport {getUid} from './backend/state';\nimport {History} from './base_callbacks';\nimport {Dataset} from './engine/dataset_stub';\nimport {Input} from './engine/input_layer';\nimport {getSourceInputs, Layer, Node, SymbolicTensor} from './engine/topology';\nimport {Model, ModelCompileConfig, ModelEvaluateConfig} from './engine/training';\nimport {ModelEvaluateDatasetConfig, ModelFitDatasetConfig} from './engine/training_dataset';\nimport {ModelFitConfig} from './engine/training_tensors';\nimport {NotImplementedError, RuntimeError, ValueError} from './errors';\nimport {deserialize} from './layers/serialization';\nimport {Kwargs, NamedTensorMap, Shape} from './types';\nimport {JsonDict} from './types';\nimport * as generic_utils from './utils/generic_utils';\nimport {convertPythonicToTs} from './utils/serialization_utils';\nimport {getExactlyOneShape} from './utils/types_utils';\n\n\n/**\n * Parses a JSON model configuration file and returns a model instance.\n *\n * ```js\n * // This example shows how to serialize a model using `toJSON()` and\n * // deserialize it as another model using `tf.models.modelFROMJSON()`.\n * // Note: this example serializes and deserializes only the topology\n * // of the model; the weights of the loaded model will be different\n * // from those of the the original model, due to random weight\n * // initialization.\n * // To load the topology and weights of a model, use `tf.loadModel()`.\n * const model1 = tf.sequential();\n * model1.add(tf.layers.repeatVector({inputShape: [2], n: 4}));\n * // Serialize `model1` as a JSON object.\n * const model1JSON = model1.toJSON(null, false);\n * model1.summary();\n *\n * const model2 = await tf.models.modelFromJSON(model1JSON);\n * model2.summary();\n * ```\n *\n * @param modelAndWeightsConfig JSON object or string encoding a model and\n * weights configuration. It can also be only the topology JSON of the\n * model, in which case the weights will not be loaded.\n * @param custom_objects Optional dictionary mapping names\n * (strings) to custom classes or functions to be\n * considered during deserialization.\n * @returns A TensorFlow.js Layers `tf.Model` instance (uncompiled).\n */\n/**\n * @doc {heading: 'Models',subheading: 'Loading'}\n */\nexport async function modelFromJSON(\n modelAndWeightsConfig: ModelAndWeightsConfig|JsonDict,\n customObjects?: serialization.ConfigDict): Promise<Model> {\n if (!('modelTopology' in modelAndWeightsConfig)) {\n modelAndWeightsConfig = {modelTopology: modelAndWeightsConfig as JsonDict};\n }\n modelAndWeightsConfig = modelAndWeightsConfig as ModelAndWeightsConfig;\n\n let modelTopology = modelAndWeightsConfig.modelTopology as JsonDict;\n if (modelTopology['model_config'] != null) {\n // If the model-topology JSON contains a 'model_config' field, then it is\n // a full model JSON (e.g., from `keras.Model.save()`), which contains\n // not only the model's architecture in its 'model_config' field, but\n // additional information such as the model's optimizer. We use only the\n // 'model_config' field currently.\n modelTopology = modelTopology['model_config'] as JsonDict;\n }\n const tsConfig =\n convertPythonicToTs(modelTopology) as serialization.ConfigDict;\n const model = deserialize(tsConfig, customObjects) as Model;\n\n if (modelAndWeightsConfig.weightsManifest != null) {\n // Load the weight values keyed by the original tensor names in the model\n // file that was loaded. These should match the keys of the weight\n // manifest.\n const weightValues =\n await io.loadWeights(\n modelAndWeightsConfig.weightsManifest,\n modelAndWeightsConfig.pathPrefix,\n model.weights.map(weight => weight.originalName)) as NamedTensorMap;\n\n // Map the weights to the unique tensor names generated during model loading\n const uniqueWeightValues: NamedTensorMap = {};\n for (const weight of model.weights) {\n uniqueWeightValues[weight.originalName] =\n weightValues[weight.originalName];\n }\n\n const skipMismatches: boolean = null;\n const isNamedTensorMap = true;\n model.loadWeights(uniqueWeightValues, skipMismatches, isNamedTensorMap);\n }\n return model;\n}\n\n/**\n * Options for loading a saved mode in TensorFlow.js format.\n */\nexport interface ModelAndWeightsConfig {\n /**\n * A JSON object or JSON string containing the model config.\n *\n * This can be either of the following two formats:\n * - A model archiecture-only config, i.e., a format consistent with the\n * return value of`keras.Model.to_json()`.\n * - A full model config, containing not only model architecture, but also\n * training options and state, i.e., a format consistent with the return\n * value of `keras.models.save_model()`.\n */\n modelTopology: JsonDict;\n\n /**\n * A weights manifest in TensorFlow.js format.\n */\n weightsManifest?: io.WeightsManifestConfig;\n\n /**\n * Path to prepend to the paths in `weightManifest` before fetching.\n *\n * The path may optionally end in a slash ('/').\n */\n pathPrefix?: string;\n}\n\n// TODO(nielsene): Remove after: https://github.com/tensorflow/tfjs/issues/400\nexport interface ModelPredictConfig {\n /**\n * Optional. Batch size (Integer). If unspecified, it will default to 32.\n */\n batchSize?: number;\n\n /**\n * Optional. Verbosity mode. Defaults to false.\n */\n verbose?: boolean;\n}\n\n/**\n * Load a model, including its topology and optionally weights. See the\n * Tutorial named \"How to import a Keras Model\" for usage examples.\n *\n * Example 1: Save `model`'s topology and weights to browser [local\n * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('localstorage://my-model-1');\n *\n * const loadedModel = await tf.loadModel('localstorage://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 2. Saving `model`'s topology and weights to browser\n * [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API);\n * then load it back.\n *\n * ```js\n * const model = tf.sequential(\n * {layers: [tf.layers.dense({units: 1, inputShape: [3]})]});\n * console.log('Prediction from original model:');\n * model.predict(tf.ones([1, 3])).print();\n *\n * const saveResults = await model.save('indexeddb://my-model-1');\n *\n * const loadedModel = await tf.loadModel('indexeddb://my-model-1');\n * console.log('Prediction from loaded model:');\n * loadedModel.predict(tf.ones([1, 3])).print();\n * ```\n *\n * Example 3. Load a model from user-selected files from HTML\n * [file input\n * elements](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file).\n *\n * ```js\n * // Note: this code snippet will not work without the HTML elements in the\n * // page\n * const jsonUpload = document.getElementById('json-upload');\n * const weightsUpload = document.getElementById('weights-upload');\n *\n * const model = await tf.loadModel(\n * tf.io.browserFiles([jsonUpload.files[0], weightsUpload.files[0]]));\n * ```\n *\n * Example 4. Load a model from an HTTP server.\n *\n * ```js\n * const model = await\n * tf.loadModel('https://storage.googleapis.com/tfjs-models/tfjs/iris_v1/model.json');\n * model.summary();\n * ```\n *\n * @param pathOrIOHandler Can be either of the two formats\n * 1. A string path to the `ModelAndWeightsConfig` JSON describing\n * the model in the canonical TensorFlow.js format. This path will be\n * interpreted as a relative HTTP path, to which `fetch` will be used to\n * request the model topology and weight manifest JSON.\n * The content of the JSON file is assumed to be a JSON object with the\n * following fields and values:\n * - 'modelTopology': A JSON object that can be either of:\n * 1. a model architecture JSON consistent with the format of the return\n * value of `keras.Model.to_json()`\n * 2. a full model JSON in the format of `keras.models.save_model()`.\n * - 'weightsManifest': A TensorFlow.js weights manifest.\n * See the Python converter function `save_model()` for more details.\n * It is also assumed that model weights can be accessed from relative\n * paths described by the `paths` fields in weights manifest.\n * 2. An `tf.io.IOHandler` object that loads model artifacts with its `load`\n * method.\n * @param strict Require that the provided weights exactly match those required\n * by the layers. Default true. Passing false means that both extra weights\n * and missing weights will be silently ignored.\n *\n * @returns A `Promise` of `tf.Model`, with the topology and weights loaded.\n */\nexport async function loadModelInternal(\n pathOrIOHandler: string|io.IOHandler, strict = true): Promise<Model> {\n if (typeof pathOrIOHandler === 'string') {\n const handlers = io.getLoadHandlers(pathOrIOHandler);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(io.browserHTTPRequest(pathOrIOHandler));\n } else if (handlers.length > 1) {\n throw new ValueError(\n `Found more than one (${handlers.length}) load handlers for ` +\n `URL '${pathOrIOHandler}'`);\n }\n pathOrIOHandler = handlers[0];\n }\n return loadModelFromIOHandler(\n pathOrIOHandler as io.IOHandler, undefined, strict);\n}\n\n/**\n * Load a model and optionally its weights, using an IOHandler object.\n */\nexport async function loadModelFromIOHandler(\n handler: io.IOHandler, customObjects?: serialization.ConfigDict,\n strict = true): Promise<Model> {\n if (handler.load == null) {\n throw new ValueError(\n 'Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await handler.load();\n let modelTopology = artifacts.modelTopology as JsonDict;\n if (modelTopology['model_config'] != null) {\n modelTopology = modelTopology['model_config'] as JsonDict;\n }\n const model =\n deserialize(\n convertPythonicToTs(modelTopology) as serialization.ConfigDict,\n customObjects) as Model;\n\n // If weightData is present, load the weights into the model.\n if (artifacts.weightData != null) {\n // Loading weights requires weightSpecs.\n if (artifacts.weightSpecs == null) {\n throw new ValueError(\n 'Model artifacts contains weight data, but not weight specs. ' +\n 'Therefore loading of weights cannot proceed.');\n }\n\n const skipMismatch = false;\n const isNamedTensorMap = true;\n model.loadWeights(\n io.decodeWeights(artifacts.weightData, artifacts.weightSpecs),\n skipMismatch, isNamedTensorMap, strict);\n }\n return model;\n}\n\n/**\n * Configuration for a Sequential model.\n */\nexport interface SequentialConfig {\n /** Stack of layers for the model. */\n layers?: Layer[];\n\n /** The name of this model. */\n name?: string;\n}\n\n/**\n * A model with a stack of layers, feeding linearly from one to the next.\n *\n * `tf.sequential` is a factory function that creates an instance of\n * `tf.Sequential`.\n *\n * ```js\n * // Define a model for linear regression.\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [1]}));\n *\n * // Prepare the model for training: Specify the loss and the optimizer.\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n *\n * // Generate some synthetic data for training.\n * const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]);\n * const ys = tf.tensor2d([1, 3, 5, 7], [4, 1]);\n *\n * // Train the model using the data then do inference on a data point the\n * // model hasn't seen:\n * await model.fit(xs, ys);\n * model.predict(tf.tensor2d([5], [1, 1])).print();\n * ```\n */\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class Sequential extends Model {\n static className = 'Sequential';\n private model: Model;\n private _updatable: boolean;\n constructor(config?: SequentialConfig) {\n super({inputs: [], outputs: []});\n config = config || {};\n\n this.trainable = true;\n this._updatable = true;\n this.built = false;\n\n // Set model name.\n this.name = (config.name != null) ? config.name : getUid('sequential_');\n\n // Add to the model any layers passed to the constructor.\n if (config.layers != null) {\n for (const layer of config.layers) {\n this.add(layer);\n }\n }\n }\n\n // Helper function to Sequential.add Throws if the new output shape will be\n // invalid.\n private checkShape(layer: Layer) {\n const shape = layer.inboundNodes[0].outputTensors[0].shape;\n if (shape.some(x => x < 0)) {\n throw new ValueError(\n 'Negative dimension size caused by adding layer ' +\n `${layer.name} with input shape [` +\n `${layer.inboundNodes[0].inputTensors[0].shape}]`);\n }\n }\n\n /**\n * Adds a layer instance on top of the layer stack.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 8, inputShape: [1]}));\n * model.add(tf.layers.dense({units: 4, activation: 'relu6'}));\n * model.add(tf.layers.dense({units: 1, activation: 'relu6'}));\n * // Note that the untrained model is random at this point.\n * model.predict(tf.randomNormal([10, 1])).print();\n * ```\n * @param layer Layer instance.\n *\n * @exception ValueError In case the `layer` argument does not know its\n * input shape.\n * @exception ValueError In case the `layer` argument has multiple output\n * tensors, or is already connected somewhere else (forbidden in\n * `Sequential` models).\n */\n /** @doc {heading: 'Models', subheading: 'Classes'} */\n add(layer: Layer): void {\n const isLayerModelInstance =\n layer instanceof Sequential || layer instanceof Model;\n let modelLayer: Model;\n if (isLayerModelInstance) {\n modelLayer = layer as Model;\n if (modelLayer.outputs.length !== 1) {\n throw new ValueError(\n 'All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n if (modelLayer.inputs.length !== 1) {\n throw new ValueError(\n 'All layers in a Sequential model ' +\n 'should have a single input tensor. ' +\n 'For multi-input layers, ' +\n 'use the functional API.');\n }\n }\n\n if (this.outputs.length === 0) {\n // first layer in model: check that it is an input layer\n if (layer.inboundNodes.length === 0) {\n // create an input layer\n if (layer.batchInputShape == null) {\n throw new ValueError(\n 'The first layer in a Sequential model must ' +\n 'get an `inputShape` or `batchInputShape` argument.');\n }\n // Instantiate the input layer.\n const x = Input({\n batchShape: layer.batchInputShape,\n dtype: layer.dtype,\n name: layer.name + '_input'\n });\n // This will build the current layer and create the node connecting\n // the current layer to the input layer we just created.\n layer.apply(x);\n }\n\n if (isLayerModelInstance) {\n this.outputs = modelLayer.outputs;\n this.inputs = modelLayer.inputs;\n } else {\n if (layer.inboundNodes.length !== 1) {\n throw new ValueError(\n 'A layer added to a Sequential model must not already be ' +\n `connected somewhere else. Model received layer ${layer.name} ` +\n `which has ${layer.inboundNodes.length} pre-existing inbound ` +\n 'connections.');\n }\n\n if (layer.inboundNodes[0].outputTensors.length !== 1) {\n throw new ValueError(\n 'All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [layer.inboundNodes[0].outputTensors[0]];\n this.inputs = getSourceInputs(this.outputs[0]);\n }\n\n this.inboundNodes = [];\n // We create an input node, which we will keep updated\n // as we add more layers.\n // (This call has side effects.)\n // tslint:disable-next-line:no-unused-expression\n new Node({\n outboundLayer: this,\n inboundLayers: [],\n nodeIndices: [],\n tensorIndices: [],\n inputTensors: this.inputs,\n outputTensors: this.outputs,\n // no model-level masking for now\n inputMasks: generic_utils.pyListRepeat(null, this.inputs.length),\n outputMasks: [null],\n inputShapes: this.inputs.map(x => x.shape),\n outputShapes: this.outputs[0].shape\n });\n } else {\n const outputTensor = layer.apply(this.outputs[0]);\n if (Array.isArray(outputTensor)) {\n throw new TypeError(\n 'All layers in a Sequential model ' +\n 'should have a single output tensor. ' +\n 'For multi-output layers, ' +\n 'use the functional API.');\n }\n this.checkShape(layer);\n this.outputs = [outputTensor as SymbolicTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n\n this.layers.push(layer);\n this.built = false;\n }\n\n /**\n * Removes the last layer in the model.\n *\n * @exception TypeError if there are no layers in the model.\n */\n pop(): void {\n if (this.layers.length === 0) {\n throw new TypeError('There are no layers in the model.');\n }\n\n this.layers.pop();\n if (this.layers.length === 0) {\n this.outputs = [];\n this.inboundNodes = [];\n this.outboundNodes = [];\n } else {\n const lastLayerIndex = this.layers.length - 1;\n this.layers[lastLayerIndex].outboundNodes = [];\n this.outputs = [this.layers[lastLayerIndex].output as SymbolicTensor];\n // update self.inbound_nodes\n this.inboundNodes[0].outputTensors = this.outputs;\n this.inboundNodes[0].outputShapes = [this.outputs[0].shape];\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n if (this.model == null) {\n this.build();\n }\n return this.model.call(inputs, kwargs);\n }\n\n build(inputShape?: Shape|Shape[]) {\n // Call `getExactlyOneShape` without using its return value,\n // to verify that exactly one input shape is provided.\n getExactlyOneShape(inputShape);\n\n if (this.inputs.length === 0 || this.outputs.length === 0) {\n throw new TypeError(\n 'Sequential model cannot be built: model is empty.' +\n ' Add some layers first.');\n }\n // actually create the model\n this.model = new Model({\n inputs: this.inputs,\n outputs: this.outputs[0],\n name: this.name + '_model'\n });\n this.model.trainable = this.trainable;\n this.model.updatable = this.updatable;\n\n // mirror model attributes\n this.supportsMasking = this.model.supportsMasking;\n // TODO(michaelterry): Add caches\n this.inputLayers = this.model.inputLayers;\n this.inputLayersNodeIndices = this.model.inputLayersNodeIndices;\n this.inputLayersTensorIndices = this.model.inputLayersTensorIndices;\n this.outputLayers = this.model.outputLayers;\n this.outputLayersNodeIndices = this.model.outputLayersNodeIndices;\n this.outputLayersTensorIndices = this.model.outputLayersTensorIndices;\n this.nodesByDepth = this.model.nodesByDepth;\n this.containerNodes = this.model.containerNodes;\n this.outputNames = this.model.outputNames;\n this.inputNames = this.model.inputNames;\n // TODO(michaelterry): Add feedInputNames, feedInputs, if needed.\n // TODO(michaelterry): Add callbackModel if needed.\n this.built = true;\n }\n\n countParams(): number {\n if (!this.built) {\n this.build();\n }\n return super.countParams();\n }\n\n /**\n * Print a text summary of the Sequential model's layers.\n *\n * The summary includes\n * - Name and type of all layers that comprise the model.\n * - Output shape(s) of the layers\n * - Number of weight parameters of each layer\n * - The total number of trainable and non-trainable parameters of the\n * model.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(\n * tf.layers.dense({units: 100, inputShape: [10], activation: 'relu'}));\n * model.add(tf.layers.dense({units: 1, activation: 'sigmoid'}));\n *\n * model.summary();\n * ```\n *\n * @param lineLength Custom line length, in number of characters.\n * @param positions Custom widths of each of the columns, as either\n * fractions of `lineLength` (e.g., `[0.5, 0.75, 1]`) or absolute number\n * of characters (e.g., `[30, 50, 65]`). Each number corresponds to\n * right-most (i.e., ending) position of a column.\n * @param printFn Custom print function. Can be used to replace the default\n * `console.log`. For example, you can use `x => {}` to mute the printed\n * messages in the console.\n */\n /** @doc {heading: 'Models', subheading: 'Classes'} */\n summary(\n lineLength?: number, positions?: number[],\n printFn:\n // tslint:disable-next-line:no-any\n (message?: any, ...optionalParams: any[]) => void = console.log) {\n if (!this.built) {\n this.build();\n }\n super.summary(lineLength, positions, printFn);\n }\n\n /**\n * Sets the weights of the model.\n *\n * @param weights Should be a list of Tensors with shapes and types matching\n * the output of `model.getWeights()`.\n */\n setWeights(weights: Tensor[]): void {\n if (this.model == null) {\n this.build();\n }\n this.model.setWeights(weights);\n }\n\n get updatable(): boolean {\n return this._updatable;\n }\n\n set updatable(value: boolean) {\n if (this.built) {\n this.model.updatable = value;\n }\n this._updatable = value;\n }\n\n /**\n * Returns the loss value & metrics values for the model in test mode.\n *\n * Loss and metrics are specified during `compile()`, which needs to happen\n * before calls to `evaluate()`.\n *\n * Computation is done in batches.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const result = model.evaluate(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * });\n * result.print();\n * ```\n *\n * @param x `tf.Tensor` of test data, or an `Array` of `tf.Tensor`s if the\n * model has multiple inputs.\n * @param y `tf.Tensor` of target data, or an `Array` of `tf.Tensor`s if the\n * model has multiple outputs.\n * @param config A `ModelEvaluateConfig`, containing optional fields.\n *\n * @return `Scalar` test loss (if the model has a single output and no\n * metrics) or `Array` of `Scalar`s (if the model has multiple outputs\n * and/or metrics). The attribute `model.metricsNames`\n * will give you the display labels for the scalar outputs.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n evaluate(\n x: Tensor|Tensor[], y: Tensor|Tensor[],\n config: ModelEvaluateConfig = {}): Scalar|Scalar[] {\n if (!this.built) {\n throw new RuntimeError(\n 'The model needs to be compiled before being used.');\n }\n return this.model.evaluate(x, y, config);\n }\n\n // TODO(cais): Add code snippet below once real dataset objects are\n // available.\n /**\n * Evaluate model using a dataset object.\n *\n * Note: Unlike `evaluate()`, this method is asynchronous (`async`);\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param config A configuration object for the dataset-based evaluation.\n * @returns Loss and metric values as an Array of `Scalar` objects.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async evaluateDataset<T extends TensorContainer>(\n dataset: Dataset<T>,\n config: ModelEvaluateDatasetConfig): Promise<Scalar|Scalar[]> {\n if (!this.built) {\n throw new RuntimeError(\n 'The model needs to be compiled before being used.');\n }\n return this.model.evaluateDataset(dataset, config);\n }\n\n /**\n * Generates output predictions for the input samples.\n *\n * Computation is done in batches.\n *\n * Note: the \"step\" mode of predict() is currently not supported.\n * This is because the TensorFow.js core backend is imperative only.\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.predict(tf.ones([2, 10])).print();\n * ```\n *\n * @param x The input data, as an Tensor, or an `Array` of `tf.Tensor`s if\n * the model has multiple inputs.\n * @param conifg A `ModelPredictConfig` object containing optional fields.\n *\n * @return `tf.Tensor`(s) of predictions.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and the model's expectations, or in case a stateful model receives a\n * number of samples that is not a multiple of the batch size.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [1]}\n */\n predict(x: Tensor|Tensor[], config: ModelPredictConfig = {}): Tensor\n |Tensor[] {\n if (this.model == null) {\n this.build();\n }\n return this.model.predict(x, config);\n }\n\n /**\n * Returns predictions for a single batch of samples.\n *\n * @param x: Input samples, as an Tensor, or list of Tensors (if the model\n * has multiple inputs).\n * @return Tensor(s) of predictions\n */\n predictOnBatch(x: Tensor): Tensor|Tensor[] {\n if (this.model == null) {\n this.build();\n }\n return this.model.predictOnBatch(x);\n }\n\n /**\n * See `Model.compile`.\n *\n * @param config\n */\n compile(config: ModelCompileConfig): void {\n this.build();\n this.model.compile(config);\n this.optimizer = this.model.optimizer;\n this.loss = this.model.loss;\n this.metrics = this.model.metrics;\n // TODO(cais): Add this.lossWeights, this.sampleWeightMode,\n // this.weightedMetrics, this.targets.\n this.metricsTensors = this.model.metricsTensors;\n this.metricsNames = this.model.metricsNames;\n // TODO(cais): Add sampleWeights.\n }\n\n /**\n * Trains the model for a fixed number of epochs (iterations on a dataset).\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 1, inputShape: [10]})]\n * });\n * model.compile({optimizer: 'sgd', loss: 'meanSquaredError'});\n * const history = await model.fit(tf.ones([8, 10]), tf.ones([8, 1]), {\n * batchSize: 4,\n * epochs: 3\n * });\n * console.log(history.history.loss[0]);\n * ```\n *\n * @param x `tf.Tensor` of training data, or an array of `tf.Tensor`s if the\n * model has multiple inputs. If all inputs in the model are named, you can\n * also pass a dictionary mapping input names to `tf.Tensor`s.\n * @param y `tf.Tensor` of target (label) data, or an array of `tf.Tensor`s if\n * the model has multiple outputs. If all outputs in the model are named, you\n * can also pass a dictionary mapping output names to `tf.Tensor`s.\n * @param config A `ModelFitConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n *\n * @exception ValueError In case of mismatch between the provided input data\n * and what the model expects.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async fit(\n x: Tensor|Tensor[]|{[inputName: string]: Tensor},\n y: Tensor|Tensor[]|{[inputName: string]: Tensor},\n config: ModelFitConfig = {}): Promise<History> {\n if (!this.built) {\n throw new RuntimeError(\n 'The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fit(x, y, config);\n }\n\n // TODO(cais): Add code snippet below when it's possible to instantiate\n // actual dataset objects.\n /**\n * Trains the model using a dataset object.\n *\n * @param dataset A dataset object. Its `iterator()` method is expected\n * to generate a dataset iterator object, the `next()` method of which\n * is expected to produce data batches for evaluation. The return value\n * of the `next()` call ought to contain a boolean `done` field and a\n * `value` field. The `value` field is expected to be an array of two\n * `tf.Tensor`s or an array of two nested `tf.Tensor` structures. The former\n * case is for models with exactly one input and one output (e.g..\n * a sequential model). The latter case is for models with multiple\n * inputs and/or multiple outputs. Of the two items in the array, the\n * first is the input feature(s) and the second is the output target(s).\n * @param config A `ModelFitDatasetConfig`, containing optional fields.\n *\n * @return A `History` instance. Its `history` attribute contains all\n * information collected during training.\n */\n /**\n * @doc {heading: 'Models', subheading: 'Classes', configParamIndices: [2]}\n */\n async fitDataset<T extends TensorContainer>(\n dataset: Dataset<T>, config: ModelFitDatasetConfig<T>): Promise<History> {\n if (!this.built) {\n throw new RuntimeError(\n 'The model needs to be compiled before ' +\n 'being used.');\n }\n return this.model.fitDataset(dataset, config);\n }\n\n /* See parent class for JsDoc */\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n let configArray: serialization.ConfigDictArray;\n let extraModelConfig: serialization.ConfigDict = {};\n if (config instanceof Array) {\n if (!(config[0].className != null) ||\n config[0]['className'] === 'Merge') {\n throw new ValueError('Legacy serialization format not supported yet.');\n }\n configArray = config;\n } else {\n util.assert(\n config['layers'] != null,\n `When the config data for a Sequential model is not an Array, ` +\n `it must be an Object that contains the 'layers' field.`);\n configArray = config['layers'] as serialization.ConfigDictArray;\n delete config['layers'];\n extraModelConfig = config;\n }\n\n const model = new cls(extraModelConfig);\n if (!(model instanceof Sequential)) {\n throw new NotImplementedError(\n `Sequential.fromConfig called on non-Sequential input: ${model}`);\n }\n\n for (const conf of configArray) {\n const layer = deserialize(conf as serialization.ConfigDict) as Layer;\n model.add(layer);\n }\n return model;\n }\n\n /**\n * Setter used for force stopping of Model.fit() (i.e., training).\n *\n * Example:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.dense({units: 1, inputShape: [10]}));\n * model.compile({loss: 'meanSquaredError', optimizer: 'sgd'});\n * const xs = tf.ones([8, 10]);\n * const ys = tf.zeros([8, 1]);\n *\n * const history = await model.fit(xs, ys, {\n * epochs: 10,\n * callbacks: {\n * onEpochEnd: async (epoch, logs) => {\n * if (epoch === 2) {\n * model.stopTraining = true;\n * }\n * }\n * }\n * });\n *\n * // There should be only 3 values in the loss array, instead of 10 values,\n * // due to the stopping after 3 epochs.\n * console.log(history.history.loss);\n * ```\n */\n set stopTraining(stop: boolean) {\n // TODO(cais): When refactoring to remove the composition pattern happens,\n // remove this method overriding.\n this.model.stopTraining = stop;\n }\n\n // TODO(cais): Override get trainableWeights() here\n\n // tslint:disable-next-line:no-any\n getConfig(): any {\n // NOTE(cais): We override the return type of getConfig() to `any` here,\n // because the `Sequential` class is a special case among `Container`\n // subtypes in that its getConfig() method returns an Array (not a\n // dict).\n const config: serialization.ConfigDict[] = [];\n for (const layer of this.layers) {\n config.push({\n className: layer.getClassName(),\n config: layer.getConfig(),\n });\n }\n return config;\n }\n}\nserialization.registerClass(Sequential);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Exported functions.\n */\n\nimport {io} from '@tensorflow/tfjs-core';\n\nimport {BaseCallbackConstructor, CallbackConstructorRegistry} from './base_callbacks';\nimport {ContainerConfig} from './engine/container';\nimport {Input, InputConfig,} from './engine/input_layer';\nimport {SymbolicTensor} from './engine/topology';\nimport {Model} from './engine/training';\nimport {loadModelInternal, Sequential, SequentialConfig} from './models';\n\n\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n\n// Model and related factory methods.\n\n/**\n * A model is a data structure that consists of `Layers` and defines inputs\n * and outputs.\n *\n * The key difference between `tf.model` and `tf.sequential` is that `tf.model`\n * is more generic, supporting an arbitrary graph (without cycles) of layers.\n * `tf.sequential` is less generic and supports only a linear stack of layers.\n *\n * When creating a `tf.Model`, specify its input(s) and output(s). Layers\n * are used to wire input(s) to output(s).\n *\n * For example, the following code snippet defines a model consisting of\n * two `dense` layers, with 10 and 4 units, respectively.\n *\n * ```js\n * // Define input, which has a size of 5 (not including batch dimension).\n * const input = tf.input({shape: [5]});\n *\n * // First dense layer uses relu activation.\n * const denseLayer1 = tf.layers.dense({units: 10, activation: 'relu'});\n * // Second dense layer uses softmax activation.\n * const denseLayer2 = tf.layers.dense({units: 4, activation: 'softmax'});\n *\n * // Obtain the output symbolic tensor by applying the layers on the input.\n * const output = denseLayer2.apply(denseLayer1.apply(input));\n *\n * // Create the model based on the inputs.\n * const model = tf.model({inputs: input, outputs: output});\n *\n * // The model can be used for training, evaluation and prediction.\n * // For example, the following line runs prediction with the model on\n * // some fake data.\n * model.predict(tf.ones([2, 5])).print();\n * ```\n * See also:\n * `tf.sequential`, `tf.loadModel`.\n */\n\n/**\n * @doc {heading: 'Models', subheading: 'Creation', configParamIndices: [0]}\n */\nexport function model(config: ContainerConfig): Model {\n return new Model(config);\n}\n\n/**\n * Creates a `tf.Sequential` model. A sequential model is any model where the\n * outputs of one layer are the inputs to the next layer, i.e. the model\n * topology is a simple 'stack' of layers, with no branching or skipping.\n *\n * This means that the first layer passed to a `tf.Sequential` model should have\n * a defined input shape. What that means is that it should have received an\n * `inputShape` or `batchInputShape` argument, or for some type of layers\n * (recurrent, Dense...) an `inputDim` argument.\n *\n * The key difference between `tf.model` and `tf.sequential` is that\n * `tf.sequential` is less generic, supporting only a linear stack of layers.\n * `tf.model` is more generic and supports an arbitrary graph (without cycles)\n * of layers.\n *\n * Examples:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have an input shape defined.\n * model.add(tf.layers.dense({units: 32, inputShape: [50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output, which equals\n * // `[null, 4]`. The 1st dimension is the undetermined batch dimension; the\n * // 2nd is the output size of the model's last layer.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * It is also possible to specify a batch size (with potentially undetermined\n * batch dimension, denoted by \"null\") for the first layer using the\n * `batchInputShape` key. The following example is equivalent to the above:\n *\n * ```js\n * const model = tf.sequential();\n *\n * // First layer must have a defined input shape\n * model.add(tf.layers.dense({units: 32, batchInputShape: [null, 50]}));\n * // Afterwards, TF.js does automatic shape inference.\n * model.add(tf.layers.dense({units: 4}));\n *\n * // Inspect the inferred shape of the model's output.\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n *\n * You can also use an `Array` of already-constructed `Layer`s to create\n * a `tf.Sequential` model:\n *\n * ```js\n * const model = tf.sequential({\n * layers: [tf.layers.dense({units: 32, inputShape: [50]}),\n * tf.layers.dense({units: 4})]\n * });\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n */\n/**\n * @doc {heading: 'Models', subheading: 'Creation', configParamIndices: [0]}\n */\nexport function sequential(config?: SequentialConfig): Sequential {\n return new Sequential(config);\n}\n\n/**\n * @doc {\n * heading: 'Models',\n * subheading: 'Loading',\n * useDocsFrom: 'loadModelInternal'\n * }\n */\nexport function loadModel(\n pathOrIOHandler: string|io.IOHandler, strict = true): Promise<Model> {\n return loadModelInternal(pathOrIOHandler, strict);\n}\n\n/**\n * @doc {\n * heading: 'Models',\n * subheading: 'Inputs',\n * useDocsFrom: 'Input',\n * configParamIndices: [0]\n * }\n */\nexport function input(config: InputConfig): SymbolicTensor {\n return Input(config);\n}\n\nexport function registerCallbackConstructor(\n verbosityLevel: number,\n callbackConstructor: BaseCallbackConstructor): void {\n CallbackConstructorRegistry.registerCallbackConstructor(\n verbosityLevel, callbackConstructor);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n// Layer activation functions\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport * as K from './backend/tfjs_backend';\nimport {getScalar} from './backend/state';\nimport {deserializeKerasObject} from './utils/generic_utils';\n\n/**\n * Base class for Activations.\n *\n * Special note: due to cross-language compatibility reasons, the\n * static readonly className field in this family of classes must be set to\n * the initialLowerCamelCase name of the activation.\n */\nexport abstract class Activation extends serialization.Serializable {\n abstract apply(tensor: Tensor, axis?: number): Tensor;\n getConfig(): serialization.ConfigDict {\n return {};\n }\n}\n\n/** @docinline */\nexport type ActivationIdentifier = 'elu'|'hardSigmoid'|'linear'|'relu'|'relu6'|\n 'selu'|'sigmoid'|'softmax'|'softplus'|'softsign'|'tanh'|string;\n\n/**\n * Exponential linear unit (ELU).\n * Reference: https://arxiv.org/abs/1511.07289\n */\nexport class Elu extends Activation {\n static readonly className = 'elu';\n /**\n * Calculate the activation function.\n *\n * @param x: Input.\n * @param alpha: Scaling factor the negative section.\n * @return Output of the ELU activation.\n */\n apply(x: Tensor, alpha = 1): Tensor {\n return K.elu(x, alpha);\n }\n}\nserialization.registerClass(Elu);\n\n/**\n * Scaled Exponential Linear Unit. (Klambauer et al., 2017).\n * Reference: Self-Normalizing Neural Networks, https://arxiv.org/abs/1706.02515\n * Notes:\n * - To be used together with the initialization \"lecunNormal\".\n * - To be used together with the dropout variant \"AlphaDropout\".\n */\nexport class Selu extends Activation {\n static readonly className = 'selu';\n apply(x: Tensor): Tensor {\n return tfc.selu(x);\n }\n}\nserialization.registerClass(Selu);\n\n/**\n * Rectified linear unit\n */\nexport class Relu extends Activation {\n static readonly className = 'relu';\n apply(x: Tensor): Tensor {\n return tfc.relu(x);\n }\n}\nserialization.registerClass(Relu);\n\n/**\n * Rectified linear unit activation maxing out at 6.0.\n */\nexport class Relu6 extends Activation {\n static readonly className = 'relu6';\n apply(x: Tensor): Tensor {\n return tidy(() => tfc.minimum(getScalar(6.0), tfc.relu(x)));\n }\n}\nserialization.registerClass(Relu6);\n\n//* Linear activation (no-op) */\nexport class Linear extends Activation {\n static readonly className = 'linear';\n apply(x: Tensor): Tensor {\n return x;\n }\n}\nserialization.registerClass(Linear);\n\n/**\n * Sigmoid activation function.\n */\nexport class Sigmoid extends Activation {\n static readonly className = 'sigmoid';\n apply(x: Tensor): Tensor {\n return tfc.sigmoid(x);\n }\n}\nserialization.registerClass(Sigmoid);\n\n/**\n * Segment-wise linear approximation of sigmoid.\n */\nexport class HardSigmoid extends Activation {\n static readonly className = 'hardSigmoid';\n apply(x: Tensor): Tensor {\n return K.hardSigmoid(x);\n }\n}\nserialization.registerClass(HardSigmoid);\n\n/**\n * Softplus activation function.\n */\nexport class Softplus extends Activation {\n static readonly className = 'softplus';\n apply(x: Tensor): Tensor {\n return tfc.softplus(x);\n }\n}\nserialization.registerClass(Softplus);\n\n/**\n * Softsign activation function.\n */\nexport class Softsign extends Activation {\n static readonly className = 'softsign';\n apply(x: Tensor): Tensor {\n return K.softsign(x);\n }\n}\nserialization.registerClass(Softsign);\n\n/**\n * Hyperbolic tangent function.\n */\nexport class Tanh extends Activation {\n static readonly className = 'tanh';\n apply(x: Tensor): Tensor {\n return tfc.tanh(x);\n }\n}\nserialization.registerClass(Tanh);\n\n/**\n * Softmax activation function\n */\nexport class Softmax extends Activation {\n static readonly className = 'softmax';\n /**\n * Calculate the activation function.\n *\n * @param x Tensor.\n * @param axis Integer, axis along which the softmax normalization is applied.\n * Invalid if < 2, as softmax across 1 (the batch dimension) is assumed to be\n * an error.\n *\n * @returns a Tensor of the same shape as x\n *\n * @throws ValueError: In case `dim(x) < 2`.\n */\n apply(x: Tensor, axis: number = (-1)): Tensor {\n return tfc.softmax(x, axis);\n }\n}\nserialization.registerClass(Softmax);\n\nexport function serializeActivation(activation: Activation): string {\n return activation.getClassName();\n}\n\nexport function deserializeActivation(\n config: serialization.ConfigDict,\n customObjects: serialization.ConfigDict = {}): Activation {\n return deserializeKerasObject(\n config, serialization.SerializationMap.getMap().classNameMap,\n customObjects, 'activation');\n}\n\nexport function getActivation(identifier: ActivationIdentifier|\n serialization.ConfigDict|Activation): Activation {\n if (identifier == null) {\n const config = {className: 'linear', config: {}};\n return deserializeActivation(config);\n }\n if (typeof identifier === 'string') {\n const config = {className: identifier, config: {}};\n return deserializeActivation(config);\n } else if (identifier instanceof Activation) {\n return identifier;\n } else {\n return deserializeActivation(identifier);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Advanced activation layers.\n */\n\nimport {elu, leakyRelu, relu, serialization, Tensor, clipByValue} from '@tensorflow/tfjs-core';\n\nimport {Softmax as softmaxActivation} from '../activations';\nimport {cast} from '../backend/tfjs_backend';\nimport {Layer, LayerConfig} from '../engine/topology';\nimport {getScalar} from '../backend/state';\nimport {NotImplementedError} from '../errors';\nimport {Kwargs, Shape} from '../types';\nimport {getExactlyOneTensor} from '../utils/types_utils';\n\nexport interface ReLULayerConfig extends LayerConfig {\n /**\n * Float, the maximum output value.\n */\n maxValue?: number;\n}\n\n/**\n * Rectified Linear Unit activation function.\n *\n * Input shape:\n * Arbitrary. Use the config field `inputShape` (Array of integers, does\n * not include the sample axis) when using this layer as the first layer\n * in a model.\n *\n * Output shape:\n * Same shape as the input.\n */\nexport class ReLU extends Layer {\n static className = 'ReLU';\n maxValue: number;\n\n constructor(config?: ReLULayerConfig) {\n super(config == null ? {} : config);\n this.supportsMasking = true;\n if (config != null) {\n this.maxValue = config.maxValue;\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n inputs = getExactlyOneTensor(inputs);\n let output = relu(inputs);\n if (this.maxValue != null) {\n output = clipByValue(output, 0, this.maxValue);\n }\n return output;\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {maxValue: this.maxValue};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(ReLU);\n\nexport interface LeakyReLULayerConfig extends LayerConfig {\n /**\n * Float `>= 0`. Negative slope coefficient. Defaults to `0.3`.\n */\n alpha?: number;\n}\n\n/**\n * Leaky version of a rectified linear unit.\n *\n * It allows a small gradient when the unit is not active:\n * `f(x) = alpha * x for x < 0.`\n * `f(x) = x for x >= 0.`\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n */\nexport class LeakyReLU extends Layer {\n static className = 'LeakyReLU';\n readonly alpha: number;\n\n readonly DEFAULT_ALPHA = 0.3;\n\n constructor(config?: LeakyReLULayerConfig) {\n super(config == null ? {} : config);\n if (config == null) {\n config = {};\n }\n\n this.alpha = config.alpha == null ? this.DEFAULT_ALPHA : config.alpha;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n const x = getExactlyOneTensor(inputs);\n return leakyRelu(x, this.alpha);\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {alpha: this.alpha};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(LeakyReLU);\n\n// TODO(cais): Implement PReLU\n\nexport interface ELULayerConfig extends LayerConfig {\n /**\n * Float `>= 0`. Negative slope coefficient. Defaults to `1.0`.\n */\n alpha?: number;\n}\n\n/**\n * Exponetial Linear Unit (ELU).\n *\n * It follows:\n * `f(x) = alpha * (exp(x) - 1.) for x < 0`,\n * `f(x) = x for x >= 0`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Fast and Accurate Deep Network Learning by Exponential Linear Units\n * (ELUs)](https://arxiv.org/abs/1511.07289v1)\n */\nexport class ELU extends Layer {\n static className = 'ELU';\n readonly alpha: number;\n\n readonly DEFAULT_ALPHA = 1.0;\n\n constructor(config?: ELULayerConfig) {\n super(config == null ? {} : config);\n if (config == null) {\n config = {};\n }\n\n if (config.alpha != null && config.alpha !== this.DEFAULT_ALPHA) {\n throw new NotImplementedError(\n `Non-default alpha value (${config.alpha}) is not supported by the ` +\n `ELU layer yet.`);\n }\n\n this.alpha = config.alpha == null ? this.DEFAULT_ALPHA : config.alpha;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n const x = getExactlyOneTensor(inputs);\n return elu(x);\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {alpha: this.alpha};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(ELU);\n\nexport interface ThresholdedReLULayerConfig extends LayerConfig {\n /**\n * Float >= 0. Threshold location of activation.\n */\n theta?: number;\n}\n\n/**\n * Thresholded Rectified Linear Unit.\n *\n * It follows:\n * `f(x) = x for x > theta`,\n * `f(x) = 0 otherwise`.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n *\n * References:\n * - [Zero-Bias Autoencoders and the Benefits of Co-Adapting\n * Features](http://arxiv.org/abs/1402.3337)\n */\nexport class ThresholdedReLU extends Layer {\n static className = 'ThresholdedReLU';\n readonly theta: number;\n private readonly thetaTensor: Tensor;\n\n readonly DEFAULT_THETA = 1.0;\n\n constructor(config?: ThresholdedReLULayerConfig) {\n super(config == null ? {} : config);\n if (config == null) {\n config = {};\n }\n\n this.theta = config.theta == null ? this.DEFAULT_THETA : config.theta;\n this.thetaTensor = getScalar(this.theta);\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n const x = getExactlyOneTensor(inputs);\n return x.mul(cast(x.greater(this.thetaTensor), 'float32'));\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {theta: this.theta};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(ThresholdedReLU);\n\nexport interface SoftmaxLayerConfig extends LayerConfig {\n /**\n * Integer, axis along which the softmax normalization is applied.\n * Defaults to `-1` (i.e., the last axis).\n */\n axis?: number;\n}\n\n/**\n * Softmax activation layer.\n *\n * Input shape:\n * Arbitrary. Use the configuration `inputShape` when using this layer as the\n * first layer in a model.\n *\n * Output shape:\n * Same shape as the input.\n */\nexport class Softmax extends Layer {\n static className = 'Softmax';\n readonly axis: number;\n readonly softmax: (t: Tensor, a?: number) => Tensor;\n readonly DEFAULT_AXIS = 1.0;\n\n constructor(config?: SoftmaxLayerConfig) {\n super(config == null ? {} : config);\n if (config == null) {\n config = {};\n }\n this.softmax = new softmaxActivation().apply;\n this.axis = config.axis == null ? this.DEFAULT_AXIS : config.axis;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n const x = getExactlyOneTensor(inputs);\n return this.softmax(x, this.axis);\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n return inputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {axis: this.axis};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Softmax);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* original source: keras/regularizers.py */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {abs, add, Scalar, serialization, sum, Tensor, tidy, zeros} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from './backend/state';\nimport * as K from './backend/tfjs_backend';\nimport {deserializeKerasObject, serializeKerasObject} from './utils/generic_utils';\n\n/**\n * Regularizer base class.\n */\nexport abstract class Regularizer extends serialization.Serializable {\n abstract apply(x: Tensor): Scalar;\n}\n\nexport interface L1L2Config {\n /** L1 regularization rate. Defaults to 0.01. */\n l1?: number;\n /** L2 regularization rate. Defaults to 0.01. */\n l2?: number;\n}\n\nexport interface L1Config {\n /** L1 regularization rate. Defaults to 0.01. */\n l1: number;\n}\n\nexport interface L2Config {\n /** L2 regularization rate. Defaults to 0.01. */\n l2: number;\n}\n\n/**\n * Regularizer for L1 and L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x)) + sum(l2 * x^2)\n */\n/** @doc {heading: 'Regularizers', namespace: 'regularizers'} */\nexport class L1L2 extends Regularizer {\n static className = 'L1L2';\n\n private readonly l1: Scalar;\n private readonly l2: Scalar;\n private readonly hasL1: boolean;\n private readonly hasL2: boolean;\n constructor(config?: L1L2Config) {\n super();\n\n const l1 = config == null || config.l1 == null ? 0.01 : config.l1;\n const l2 = config == null || config.l2 == null ? 0.01 : config.l2;\n this.hasL1 = l1 !== 0;\n this.hasL2 = l2 !== 0;\n\n this.l1 = getScalar(l1);\n this.l2 = getScalar(l2);\n }\n\n /**\n * Porting note: Renamed from __call__.\n * @param x Variable of which to calculate the regularization score.\n */\n apply(x: Tensor): Scalar {\n return tidy(() => {\n let regularization: Tensor = zeros([1]);\n if (this.hasL1) {\n regularization = add(regularization, sum(tfc.mul(this.l1, abs(x))));\n }\n if (this.hasL2) {\n regularization =\n add(regularization, sum(tfc.mul(this.l2, K.square(x))));\n }\n return regularization.asScalar();\n });\n }\n\n getConfig(): serialization.ConfigDict {\n return {'l1': this.l1.dataSync()[0], 'l2': this.l2.dataSync()[0]};\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n return new cls({l1: config.l1 as number, l2: config.l2 as number});\n }\n}\nserialization.registerClass(L1L2);\n\n/**\n * Regularizer for L1 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l1 * abs(x))\n * @param config l1 config.\n */\nexport function l1(config?: L1Config) {\n return new L1L2({l1: config != null ? config.l1 : null, l2: 0});\n}\n\n/**\n * Regularizer for L2 regularization.\n *\n * Adds a term to the loss to penalize large weights:\n * loss += sum(l2 * x^2)\n * @param config l2 config.\n */\nexport function l2(config: L2Config) {\n return new L1L2({l2: config != null ? config.l2 : null, l1: 0});\n}\n\n/** @docinline */\nexport type RegularizerIdentifier = 'l1l2'|string;\n\n// Maps the JavaScript-like identifier keys to the corresponding keras symbols.\nexport const REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP:\n {[identifier in RegularizerIdentifier]: string} = {\n 'l1l2': 'L1L2'\n };\n\nexport function serializeRegularizer(constraint: Regularizer):\n serialization.ConfigDictValue {\n return serializeKerasObject(constraint);\n}\n\nexport function deserializeRegularizer(\n config: serialization.ConfigDict,\n customObjects: serialization.ConfigDict = {}): Regularizer {\n return deserializeKerasObject(\n config, serialization.SerializationMap.getMap().classNameMap,\n customObjects, 'regularizer');\n}\n\nexport function getRegularizer(identifier: RegularizerIdentifier|\n serialization.ConfigDict|\n Regularizer): Regularizer {\n if (identifier == null) {\n return null;\n }\n if (typeof identifier === 'string') {\n const className = identifier in REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP ?\n REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP[identifier] :\n identifier;\n const config = {className, config: {}};\n return deserializeRegularizer(config);\n } else if (identifier instanceof Regularizer) {\n return identifier;\n } else {\n return deserializeRegularizer(identifier);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {PaddingMode} from '../common';\nimport {ValueError} from '../errors';\n\nimport {pyListRepeat} from './generic_utils';\nimport {isInteger, max} from './math_utils';\n\n/**\n * Transforms a single number of array of numbers into an array of numbers.\n * @param value\n * @param n: The size of the tuple to be returned.\n * @param name: Name of the parameter, used for generating error messages.\n * @returns An array of numbers.\n */\nexport function normalizeArray(\n value: number|number[], n: number, name: string): number[] {\n if (typeof value === 'number') {\n return pyListRepeat(value, n);\n } else {\n if (value.length !== n) {\n throw new ValueError(\n `The ${name} argument must be a tuple of ${n} integers. Received: ` +\n `${value.length} elements.`);\n }\n for (let i = 0; i < n; ++i) {\n const singleValue = value[i];\n if (!isInteger(singleValue)) {\n throw new ValueError(\n `The ${name} argument must be a tuple of ${\n n} integers. Received: ` +\n `${JSON.stringify(value)} including a non-integer number ` +\n `${singleValue}`);\n }\n }\n return value;\n }\n}\n\n/**\n * Determines output length of a convolution given input length.\n * @param inputLength\n * @param filterSize\n * @param padding\n * @param stride\n * @param dilation: dilation rate.\n */\nexport function convOutputLength(\n inputLength: number, filterSize: number, padding: PaddingMode,\n stride: number, dilation = 1): number {\n if (inputLength == null) {\n return inputLength;\n }\n const dilatedFilterSize = filterSize + (filterSize - 1) * (dilation - 1);\n let outputLength: number;\n if (padding === 'same') {\n outputLength = inputLength;\n } else { // VALID\n outputLength = inputLength - dilatedFilterSize + 1;\n }\n return Math.floor((outputLength + stride - 1) / stride);\n}\n\nexport function deconvLength(\n dimSize: number, strideSize: number, kernelSize: number,\n padding: PaddingMode): number {\n if (dimSize == null) {\n return null;\n }\n\n if (padding === 'valid') {\n dimSize = dimSize * strideSize + max([kernelSize - strideSize, 0]);\n } else if (padding === 'same') {\n dimSize = dimSize * strideSize;\n } else {\n throw new ValueError(`Unsupport padding mode: ${padding}.`);\n }\n return dimSize;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Convolutional Layers\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, tidy} from '@tensorflow/tfjs-core';\n\nimport {Activation, getActivation, serializeActivation} from '../activations';\nimport {imageDataFormat} from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport {checkDataFormat, checkPaddingMode, DataFormat, PaddingMode} from '../common';\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {InputSpec, Layer, LayerConfig} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, Shape} from '../types';\nimport {convOutputLength, deconvLength, normalizeArray} from '../utils/conv_utils';\nimport * as generic_utils from '../utils/generic_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\n/**\n * Transpose and cast the input before the conv2d.\n * @param x Input image tensor.\n * @param dataFormat\n */\nexport function preprocessConv2DInput(\n x: Tensor, dataFormat: DataFormat): Tensor {\n // TODO(cais): Cast type to float32 if not.\n return tidy(() => {\n checkDataFormat(dataFormat);\n if (dataFormat === 'channelsFirst') {\n return tfc.transpose(x, [0, 2, 3, 1]); // NCHW -> NHWC.\n } else {\n return x;\n }\n });\n}\n\n/**\n * 1D-convolution with bias added.\n *\n * Porting Note: This function does not exist in the Python Keras backend.\n * It is exactly the same as `conv2d`, except the added `bias`.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.\n * @param bias Bias, rank-3, of shape `[outDepth]`.\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1dWithBias(\n x: Tensor, kernel: Tensor, bias: Tensor, strides = 1, padding = 'valid',\n dataFormat?: DataFormat, dilationRate = 1): Tensor {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n // Check the ranks of x, kernel and bias.\n if (x.shape.length !== 3) {\n throw new ValueError(\n `The input of a conv1dWithBias operation should be 3, but is ` +\n `${x.shape.length} instead.`);\n }\n if (kernel.shape.length !== 3) {\n throw new ValueError(\n `The kernel for a conv1dWithBias operation should be 3, but is ` +\n `${kernel.shape.length} instead`);\n }\n if (bias != null && bias.shape.length !== 1) {\n throw new ValueError(\n `The bias for a conv1dWithBias operation should be 1, but is ` +\n `${kernel.shape.length} instead`);\n }\n // TODO(cais): Support CAUSAL padding mode.\n if (dataFormat === 'channelsFirst') {\n x = tfc.transpose(x, [0, 2, 1]); // NCW -> NWC.\n }\n if (padding === 'causal') {\n throw new NotImplementedError(\n 'The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n let y: Tensor = tfc.conv1d(\n x as Tensor2D | Tensor3D, kernel as Tensor3D, strides,\n padding === 'same' ? 'same' : 'valid', 'NWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias);\n }\n return y;\n });\n}\n\n/**\n * 1D-convolution.\n *\n * @param x Input tensor, rank-3, of shape `[batchSize, width, inChannels]`.\n * @param kernel Kernel, rank-3, of shape `[filterWidth, inDepth, outDepth]`.s\n * @param strides\n * @param padding Padding mode.\n * @param dataFormat Data format.\n * @param dilationRate\n * @returns The result of the 1D convolution.\n * @throws ValueError, if `x`, `kernel` or `bias` is not of the correct rank.\n */\nexport function conv1d(\n x: Tensor, kernel: Tensor, strides = 1, padding = 'valid',\n dataFormat?: DataFormat, dilationRate = 1): Tensor {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv1dWithBias(\n x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n\n/**\n * 2D Convolution\n * @param x\n * @param kernel kernel of the convolution.\n * @param strides strides array.\n * @param padding padding mode. Default to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param dilationRate dilation rate array.\n * @returns Result of the 2D pooling.\n */\nexport function conv2d(\n x: Tensor, kernel: Tensor, strides = [1, 1], padding = 'valid',\n dataFormat?: DataFormat, dilationRate?: [number, number]): Tensor {\n return tidy(() => {\n checkDataFormat(dataFormat);\n return conv2dWithBias(\n x, kernel, null, strides, padding, dataFormat, dilationRate);\n });\n}\n\n/**\n * 2D Convolution with an added bias.\n * Note: This function does not exist in the Python Keras Backend. This function\n * is exactly the same as `conv2d`, except the added `bias`.\n */\nexport function conv2dWithBias(\n x: Tensor, kernel: Tensor, bias: Tensor, strides = [1, 1],\n padding = 'valid', dataFormat?: DataFormat,\n dilationRate?: [number, number]): Tensor {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n if (x.rank !== 3 && x.rank !== 4) {\n throw new ValueError(\n `conv2dWithBias expects input to be of rank 3 or 4, but received ` +\n `${x.rank}.`);\n }\n if (kernel.rank !== 3 && kernel.rank !== 4) {\n throw new ValueError(\n `conv2dWithBias expects kernel to be of rank 3 or 4, but received ` +\n `${x.rank}.`);\n }\n let y = preprocessConv2DInput(x, dataFormat);\n if (padding === 'causal') {\n throw new NotImplementedError(\n 'The support for CAUSAL padding mode in conv1dWithBias is not ' +\n 'implemented yet.');\n }\n y = tfc.conv2d(\n y as Tensor3D | Tensor4D, kernel as Tensor4D,\n strides as [number, number], padding === 'same' ? 'same' : 'valid',\n 'NHWC', dilationRate);\n if (bias != null) {\n y = K.biasAdd(y, bias as Tensor1D);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n\n\n/**\n * Base LayerConfig for depthwise and non-depthwise convolutional layers.\n */\nexport interface BaseConvLayerConfig extends LayerConfig {\n /**\n * The dimensions of the convolution window. If kernelSize is a number, the\n * convolutional window will be square.\n */\n kernelSize: number|number[];\n\n /**\n * The strides of the convolution in each dimension. If strides is a number,\n * strides in both dimensions are equal.\n *\n * Specifying any stride value != 1 is incompatible with specifying any\n * `dilationRate` value != 1.\n */\n strides?: number|number[];\n\n /**\n * Padding mode.\n */\n padding?: PaddingMode;\n\n /**\n * Format of the data, which determines the ordering of the dimensions in\n * the inputs.\n *\n * `channels_last` corresponds to inputs with shape\n * `(batch, ..., channels)`\n *\n * `channels_first` corresponds to inputs with shape `(batch, channels,\n * ...)`.\n *\n * Defaults to `channels_last`.\n */\n dataFormat?: DataFormat;\n\n /**\n * The dilation rate to use for the dilated convolution in each dimension.\n * Should be an integer or array of two integers.\n *\n * Currently, specifying any `dilationRate` value != 1 is incompatible with\n * specifying any `strides` value != 1.\n */\n dilationRate?: number|[number]|[number, number];\n\n /**\n * Activation function of the layer.\n *\n * If you don't specify the activation, none is applied.\n */\n activation?: string;\n\n /**\n * Whether the layer uses a bias vector. Defaults to `true`.\n */\n useBias?: boolean;\n\n /**\n * Initializer for the convolutional kernel weights matrix.\n */\n kernelInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the bias vector.\n */\n biasInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Constraint for the convolutional kernel weights.\n */\n kernelConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint for the bias vector.\n */\n biasConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Regularizer function applied to the kernel weights matrix.\n */\n kernelRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the bias vector.\n */\n biasRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the activation.\n */\n activityRegularizer?: RegularizerIdentifier|Regularizer;\n}\n\n/**\n * LayerConfig for non-depthwise convolutional layers.\n * Applies to non-depthwise convolution of all ranks (e.g, Conv1D, Conv2D).\n */\nexport interface ConvLayerConfig extends BaseConvLayerConfig {\n /**\n * The dimensionality of the output space (i.e. the number of filters in the\n * convolution).\n */\n filters: number;\n}\n\n/**\n * Abstract convolution layer.\n */\nexport abstract class BaseConv extends Layer {\n protected readonly rank: number;\n protected readonly kernelSize: number[];\n protected readonly strides: number[];\n protected readonly padding: PaddingMode;\n protected readonly dataFormat: DataFormat;\n protected readonly activation: Activation;\n protected readonly useBias: boolean;\n protected readonly dilationRate: number[];\n\n // Bias-related members are here because all convolution subclasses use the\n // same configuration parmeters to control bias. Kernel-related members\n // are in subclass `Conv` because some subclasses use different parameters to\n // control kernel properties, for instance, `DepthwiseConv2D` uses\n // `depthwiseInitializer` instead of `kernelInitializer`.\n protected readonly biasInitializer?: Initializer;\n protected readonly biasConstraint?: Constraint;\n protected readonly biasRegularizer?: Regularizer;\n\n protected bias: LayerVariable = null;\n\n readonly DEFAULT_KERNEL_INITIALIZER: InitializerIdentifier = 'glorotNormal';\n readonly DEFAULT_BIAS_INITIALIZER: InitializerIdentifier = 'zeros';\n\n constructor(rank: number, config: BaseConvLayerConfig) {\n super(config as LayerConfig);\n BaseConv.verifyConfig(config);\n this.rank = rank;\n if (this.rank !== 1 && this.rank !== 2) {\n throw new NotImplementedError(\n `Convolution layer for rank other than 1 or 2 (${this.rank}) is ` +\n `not implemented yet.`);\n }\n this.kernelSize = normalizeArray(config.kernelSize, rank, 'kernelSize');\n this.strides = normalizeArray(\n config.strides == null ? 1 : config.strides, rank, 'strides');\n this.padding = config.padding == null ? 'valid' : config.padding;\n checkPaddingMode(this.padding);\n this.dataFormat =\n config.dataFormat == null ? 'channelsLast' : config.dataFormat;\n checkDataFormat(this.dataFormat);\n this.activation = getActivation(config.activation);\n this.useBias = config.useBias == null ? true : config.useBias;\n this.biasInitializer =\n getInitializer(config.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.biasConstraint = getConstraint(config.biasConstraint);\n this.biasRegularizer = getRegularizer(config.biasRegularizer);\n this.activityRegularizer = getRegularizer(config.activityRegularizer);\n this.dilationRate = normalizeArray(\n config.dilationRate == null ? 1 : config.dilationRate, rank,\n 'dilationRate');\n if (this.rank === 1 &&\n (Array.isArray(this.dilationRate) &&\n (this.dilationRate as number[]).length !== 1)) {\n throw new ValueError(\n `dilationRate must be a number or an array of a single number ` +\n `for 1D convolution, but received ` +\n `${JSON.stringify(this.dilationRate)}`);\n }\n if (this.rank === 2) {\n if (typeof this.dilationRate === 'number') {\n this.dilationRate = [this.dilationRate, this.dilationRate];\n } else if (this.dilationRate.length !== 2) {\n throw new ValueError(\n `dilationRate must be a number or array of two numbers for 2D ` +\n `convolution, but received ${JSON.stringify(this.dilationRate)}`);\n }\n }\n }\n\n protected static verifyConfig(config: BaseConvLayerConfig) {\n // Check config.kernelSize type and shape.\n generic_utils.assert(\n 'kernelSize' in config, `required key 'kernelSize' not in config`);\n if (typeof config.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(\n config.kernelSize, 'number', 1, 2))\n throw new ValueError(\n `BaseConv expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(config.kernelSize)}.`);\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n kernelSize: this.kernelSize,\n strides: this.strides,\n padding: this.padding,\n dataFormat: this.dataFormat,\n dilationRate: this.dilationRate,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n biasInitializer: serializeInitializer(this.biasInitializer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n\n/**\n * Abstract nD convolution layer. Ancestor of convolution layers which reduce\n * across channels, i.e., Conv1D and Conv2D, but not DepthwiseConv2D.\n */\nexport abstract class Conv extends BaseConv {\n protected readonly filters: number;\n\n protected kernel: LayerVariable = null;\n\n // Bias-related properties are stored in the superclass `BaseConv` because all\n // convolution subclasses use the same configuration parameters to control\n // bias. Kernel-related properties are defined here rather than in the\n // superclass because some convolution subclasses use different names and\n // configuration parameters for their internal kernel state.\n protected readonly kernelInitializer?: Initializer;\n protected readonly kernelConstraint?: Constraint;\n protected readonly kernelRegularizer?: Regularizer;\n\n constructor(rank: number, config: ConvLayerConfig) {\n super(rank, config as BaseConvLayerConfig);\n Conv.verifyConfig(config);\n this.filters = config.filters;\n this.kernelInitializer = getInitializer(\n config.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.kernelConstraint = getConstraint(config.kernelConstraint);\n this.kernelRegularizer = getRegularizer(config.kernelRegularizer);\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n const channelAxis =\n this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(\n `The channel dimension of the input should be defined. ` +\n `Found ${inputShape[channelAxis]}`);\n }\n const inputDim = inputShape[channelAxis];\n\n const kernelShape = this.kernelSize.concat([inputDim, this.filters]);\n\n this.kernel = this.addWeight(\n 'kernel', kernelShape, null, this.kernelInitializer,\n this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.filters], null, this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n }\n\n this.inputSpec = [{ndim: this.rank + 2, axes: {[channelAxis]: inputDim}}];\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs: Tensor;\n const biasValue = this.bias == null ? null : this.bias.read();\n\n if (this.rank === 1) {\n outputs = conv1dWithBias(\n inputs, this.kernel.read(), biasValue, this.strides[0],\n this.padding, this.dataFormat, this.dilationRate[0]);\n } else if (this.rank === 2) {\n // TODO(cais): Move up to constructor.\n outputs = conv2dWithBias(\n inputs, this.kernel.read(), biasValue, this.strides, this.padding,\n this.dataFormat, this.dilationRate as [number, number]);\n } else if (this.rank === 3) {\n throw new NotImplementedError('3D convolution is not implemented yet.');\n }\n\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const newSpace: number[] = [];\n const space = (this.dataFormat === 'channelsLast') ?\n inputShape.slice(1, inputShape.length - 1) :\n inputShape.slice(2);\n for (let i = 0; i < space.length; ++i) {\n const newDim = convOutputLength(\n space[i], this.kernelSize[i], this.padding, this.strides[i],\n typeof this.dilationRate === 'number' ? this.dilationRate :\n this.dilationRate[i]);\n newSpace.push(newDim);\n }\n\n let outputShape = [inputShape[0]];\n if (this.dataFormat === 'channelsLast') {\n outputShape = outputShape.concat(newSpace);\n outputShape.push(this.filters);\n } else {\n outputShape.push(this.filters);\n outputShape = outputShape.concat(newSpace);\n }\n return outputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n filters: this.filters,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n\n protected static verifyConfig(config: ConvLayerConfig) {\n // Check config.filters type, shape, and value.\n if (!('filters' in config) || typeof config.filters !== 'number' ||\n config.filters < 1) {\n throw new ValueError(\n `Convolution layer expected config.filters to be a 'number' > 0 ` +\n `but got ${JSON.stringify(config.filters)}`);\n }\n }\n}\n\n\n/**\n * 2D convolution layer (e.g. spatial convolution over images).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input to produce a tensor of outputs.\n *\n * If `useBias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model,\n * provide the keyword argument `inputShape`\n * (Array of integers, does not include the sample axis),\n * e.g. `inputShape=[128, 128, 3]` for 128x128 RGB pictures\n * in `dataFormat='channelsLast'`.\n */\nexport class Conv2D extends Conv {\n static className = 'Conv2D';\n constructor(config: ConvLayerConfig) {\n super(2, config);\n Conv2D.verifyConfig(config);\n }\n\n getConfig(): serialization.ConfigDict {\n const config = super.getConfig();\n delete config['rank'];\n return config;\n }\n\n protected static verifyConfig(config: ConvLayerConfig) {\n // config.kernelSize must be a number or array of numbers.\n if ((typeof config.kernelSize !== 'number') &&\n !generic_utils.checkArrayTypeAndLength(\n config.kernelSize, 'number', 1, 2))\n throw new ValueError(\n `Conv2D expects config.kernelSize to be number or number[] with ` +\n `length 1 or 2, but received ${JSON.stringify(config.kernelSize)}.`);\n }\n}\nserialization.registerClass(Conv2D);\n\n/**\n * Transposed convolutional layer (sometimes called Deconvolution).\n *\n * The need for transposed convolutions generally arises\n * from the desire to use a transformation going in the opposite direction of\n * a normal convolution, i.e., from something that has the shape of the output\n * of some convolution to something that has the shape of its input while\n * maintaining a connectivity pattern that is compatible with said\n * convolution.\n *\n * When using this layer as the first layer in a model, provide the\n * configuration `inputShape` (`Array` of integers, does not include the\n * sample axis), e.g., `inputShape: [128, 128, 3]` for 128x128 RGB pictures in\n * `dataFormat: 'channelsLast'`.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if `dataFormat` is `'channelsFirst'`.\n * or 4D tensor with shape\n * `[batch, rows, cols, channels]` if `dataFormat` is `'channelsLast`.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if `dataFormat` is\n * `'channelsFirst'`. or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if `dataFormat` is `'channelsLast'`.\n *\n * References:\n * - [A guide to convolution arithmetic for deep\n * learning](https://arxiv.org/abs/1603.07285v1)\n * - [Deconvolutional\n * Networks](http://www.matthewzeiler.com/pubs/cvpr2010/cvpr2010.pdf)\n */\nexport class Conv2DTranspose extends Conv2D {\n static className = 'Conv2DTranspose';\n inputSpec: InputSpec[];\n\n constructor(config: ConvLayerConfig) {\n super(config);\n this.inputSpec = [new InputSpec({ndim: 4})];\n\n if (this.padding !== 'same' && this.padding !== 'valid') {\n throw new ValueError(\n `Conv2DTranspose currently supports only padding modes 'same' ` +\n `and 'valid', but received padding mode ${this.padding}`);\n }\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n\n if (inputShape.length !== 4) {\n throw new ValueError(\n 'Input should have rank 4; Received input shape: ' +\n JSON.stringify(inputShape));\n }\n\n const channelAxis =\n this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null) {\n throw new ValueError(\n 'The channel dimension of the inputs should be defined. ' +\n 'Found `None`.');\n }\n const inputDim = inputShape[channelAxis];\n const kernelShape = this.kernelSize.concat([this.filters, inputDim]);\n\n this.kernel = this.addWeight(\n 'kernel', kernelShape, 'float32', this.kernelInitializer,\n this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.filters], 'float32', this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n }\n\n // Set input spec.\n this.inputSpec =\n [new InputSpec({ndim: 4, axes: {[channelAxis]: inputDim}})];\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs);\n if (input.shape.length !== 4) {\n throw new ValueError(\n `Conv2DTranspose.call() expects input tensor to be rank-4, but ` +\n `received a tensor of rank-${input.shape.length}`);\n }\n\n const inputShape = input.shape;\n const batchSize = inputShape[0];\n\n let hAxis: number;\n let wAxis: number;\n if (this.dataFormat === 'channelsFirst') {\n hAxis = 2;\n wAxis = 3;\n } else {\n hAxis = 1;\n wAxis = 2;\n }\n\n const height = inputShape[hAxis];\n const width = inputShape[wAxis];\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n\n // Infer the dynamic output shape.\n const outHeight = deconvLength(height, strideH, kernelH, this.padding);\n const outWidth = deconvLength(width, strideW, kernelW, this.padding);\n\n // Porting Note: We don't branch based on `this.dataFormat` here,\n // because\n // the tjfs-core function `conv2dTranspose` called below always\n // assumes channelsLast.\n const outputShape: [number, number, number, number] =\n [batchSize, outHeight, outWidth, this.filters];\n\n if (this.dataFormat !== 'channelsLast') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n }\n let outputs = tfc.conv2dTranspose(\n input as Tensor4D, this.kernel.read() as Tensor4D, outputShape,\n this.strides as [number, number], this.padding as 'same' | 'valid');\n if (this.dataFormat !== 'channelsLast') {\n outputs = tfc.transpose(outputs, [0, 3, 1, 2]) as Tensor4D;\n }\n\n if (this.bias != null) {\n outputs =\n K.biasAdd(outputs, this.bias.read(), this.dataFormat) as Tensor4D;\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs) as Tensor4D;\n }\n return outputs;\n });\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n\n let channelAxis: number;\n let heightAxis: number;\n let widthAxis: number;\n if (this.dataFormat === 'channelsFirst') {\n channelAxis = 1;\n heightAxis = 2;\n widthAxis = 3;\n } else {\n channelAxis = 3;\n heightAxis = 1;\n widthAxis = 2;\n }\n\n const kernelH = this.kernelSize[0];\n const kernelW = this.kernelSize[1];\n const strideH = this.strides[0];\n const strideW = this.strides[1];\n\n outputShape[channelAxis] = this.filters;\n outputShape[heightAxis] =\n deconvLength(outputShape[heightAxis], strideH, kernelH, this.padding);\n outputShape[widthAxis] =\n deconvLength(outputShape[widthAxis], strideW, kernelW, this.padding);\n return outputShape;\n }\n\n getConfig(): serialization.ConfigDict {\n const config = super.getConfig();\n delete config['dilationRate'];\n return config;\n }\n}\nserialization.registerClass(Conv2DTranspose);\n\nexport interface SeparableConvLayerConfig extends ConvLayerConfig {\n /**\n * The number of depthwise convolution output channels for each input\n * channel.\n * The total number of depthwise convolution output channels will be equal\n * to `filtersIn * depthMultiplier`. Default: 1.\n */\n depthMultiplier?: number;\n\n /**\n * Initializer for the depthwise kernel matrix.\n */\n depthwiseInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the pointwise kernel matrix.\n */\n pointwiseInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Regularizer function applied to the depthwise kernel matrix.\n */\n depthwiseRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the pointwise kernel matrix.\n */\n pointwiseRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Constraint function applied to the depthwise kernel matrix.\n */\n depthwiseConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint function applied to the pointwise kernel matrix.\n */\n pointwiseConstraint?: ConstraintIdentifier|Constraint;\n}\n\n\nexport class SeparableConv extends Conv {\n static className = 'SeparableConv';\n\n readonly depthMultiplier: number;\n\n protected readonly depthwiseInitializer?: Initializer;\n protected readonly depthwiseRegularizer?: Regularizer;\n protected readonly depthwiseConstraint?: Constraint;\n protected readonly pointwiseInitializer?: Initializer;\n protected readonly pointwiseRegularizer?: Regularizer;\n protected readonly pointwiseConstraint?: Constraint;\n\n readonly DEFAULT_DEPTHWISE_INITIALIZER: InitializerIdentifier =\n 'glorotUniform';\n readonly DEFAULT_POINTWISE_INITIALIZER: InitializerIdentifier =\n 'glorotUniform';\n\n protected depthwiseKernel: LayerVariable = null;\n protected pointwiseKernel: LayerVariable = null;\n\n constructor(rank: number, config?: SeparableConvLayerConfig) {\n super(rank, config);\n\n if (config.filters == null) {\n throw new ValueError(\n 'The `filters` configuration field is required by SeparableConv, ' +\n 'but is unspecified.');\n }\n if (config.kernelInitializer != null || config.kernelRegularizer != null ||\n config.kernelConstraint != null) {\n throw new ValueError(\n 'Fields kernelInitializer, kernelRegularizer and kernelConstraint ' +\n 'are invalid for SeparableConv2D. Use depthwiseInitializer, ' +\n 'depthwiseRegularizer, depthwiseConstraint, pointwiseInitializer, ' +\n 'pointwiseRegularizer and pointwiseConstraint instead.');\n }\n if (config.padding != null && config.padding !== 'same' &&\n config.padding !== 'valid') {\n throw new ValueError(\n `SeparableConv${this.rank}D supports only padding modes: ` +\n `'same' and 'valid', but received ${JSON.stringify(config.padding)}`);\n }\n\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(\n config.depthwiseInitializer || this.DEFAULT_DEPTHWISE_INITIALIZER);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.pointwiseInitializer = getInitializer(\n config.depthwiseInitializer || this.DEFAULT_POINTWISE_INITIALIZER);\n this.pointwiseRegularizer = getRegularizer(config.pointwiseRegularizer);\n this.pointwiseConstraint = getConstraint(config.pointwiseConstraint);\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < this.rank + 2) {\n throw new ValueError(\n `Inputs to SeparableConv${this.rank}D should have rank ` +\n `${this.rank + 2}, but received input shape: ` +\n `${JSON.stringify(inputShape)}`);\n }\n const channelAxis =\n this.dataFormat === 'channelsFirst' ? 1 : inputShape.length - 1;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(\n `The channel dimension of the inputs should be defined, ` +\n `but found ${JSON.stringify(inputShape[channelAxis])}`);\n }\n\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape =\n this.kernelSize.concat([inputDim, this.depthMultiplier]);\n const pointwiseKernelShape = [];\n for (let i = 0; i < this.rank; ++i) {\n pointwiseKernelShape.push(1);\n }\n pointwiseKernelShape.push(inputDim * this.depthMultiplier, this.filters);\n\n const trainable = true;\n this.depthwiseKernel = this.addWeight(\n 'depthwise_kernel', depthwiseKernelShape, 'float32',\n this.depthwiseInitializer, this.depthwiseRegularizer, trainable,\n this.depthwiseConstraint);\n this.pointwiseKernel = this.addWeight(\n 'pointwise_kernel', pointwiseKernelShape, 'float32',\n this.pointwiseInitializer, this.pointwiseRegularizer, trainable,\n this.pointwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.filters], 'float32', this.biasInitializer,\n this.biasRegularizer, trainable, this.biasConstraint);\n } else {\n this.bias = null;\n }\n\n this.inputSpec =\n [new InputSpec({ndim: this.rank + 2, axes: {[channelAxis]: inputDim}})];\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n\n let output: Tensor;\n if (this.rank === 1) {\n throw new NotImplementedError(\n '1D separable convolution is not implemented yet.');\n } else if (this.rank === 2) {\n if (this.dataFormat === 'channelsFirst') {\n inputs = tfc.transpose(inputs, [0, 2, 3, 1]); // NCHW -> NHWC.\n }\n\n output = tfc.separableConv2d(\n inputs as Tensor4D, this.depthwiseKernel.read() as Tensor4D,\n this.pointwiseKernel.read() as Tensor4D,\n this.strides as [number, number], this.padding as 'same' | 'valid',\n this.dilationRate as [number, number], 'NHWC');\n }\n\n if (this.useBias) {\n output = K.biasAdd(output, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n\n if (this.dataFormat === 'channelsFirst') {\n output = tfc.transpose(output, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return output;\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = super.getConfig();\n delete config['rank'];\n delete config['kernelInitializer'];\n delete config['kernelRegularizer'];\n delete config['kernelConstraint'];\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['pointwiseInitializer'] =\n serializeInitializer(this.pointwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['pointwiseRegularizer'] =\n serializeRegularizer(this.pointwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseConstraint);\n config['pointwiseConstraint'] =\n serializeConstraint(this.pointwiseConstraint);\n return config;\n }\n}\n\n/**\n * Depthwise separable 2D convolution.\n *\n * Separable convolution consists of first performing\n * a depthwise spatial convolution\n * (which acts on each input channel separately)\n * followed by a pointwise convolution which mixes together the resulting\n * output channels. The `depthMultiplier` argument controls how many\n * output channels are generated per input channel in the depthwise step.\n *\n * Intuitively, separable convolutions can be understood as\n * a way to factorize a convolution kernel into two smaller kernels,\n * or as an extreme version of an Inception block.\n *\n * Input shape:\n * 4D tensor with shape:\n * `[batch, channels, rows, cols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, rows, cols, channels]` if data_format='channelsLast'.\n *\n * Output shape:\n * 4D tensor with shape:\n * `[batch, filters, newRows, newCols]` if data_format='channelsFirst'\n * or 4D tensor with shape:\n * `[batch, newRows, newCols, filters]` if data_format='channelsLast'.\n * `rows` and `cols` values might have changed due to padding.\n */\nexport class SeparableConv2D extends SeparableConv {\n static className = 'SeparableConv2D';\n constructor(config?: SeparableConvLayerConfig) {\n super(2, config);\n }\n}\nserialization.registerClass(SeparableConv2D);\n\n/**\n * 1D convolution layer (e.g., temporal convolution).\n *\n * This layer creates a convolution kernel that is convolved\n * with the layer input over a single spatial (or temporal) dimension\n * to produce a tensor of outputs.\n *\n * If `use_bias` is True, a bias vector is created and added to the outputs.\n *\n * If `activation` is not `null`, it is applied to the outputs as well.\n *\n * When using this layer as the first layer in a model, provide an\n * `inputShape` argument `Array` or `null`.\n *\n * For example, `inputShape` would be:\n * - `[10, 128]` for sequences of 10 vectors of 128-dimensional vectors\n * - `[null, 128]` for variable-length sequences of 128-dimensional vectors.\n */\nexport class Conv1D extends Conv {\n static className = 'Conv1D';\n constructor(config: ConvLayerConfig) {\n super(1, config);\n Conv1D.verifyConfig(config);\n this.inputSpec = [{ndim: 3}];\n }\n\n getConfig(): serialization.ConfigDict {\n const config = super.getConfig();\n delete config['rank'];\n delete config['dataFormat'];\n return config;\n }\n\n static verifyConfig(config: ConvLayerConfig) {\n // config.kernelSize must be a number or array of numbers.\n if (typeof config.kernelSize !== 'number' &&\n !generic_utils.checkArrayTypeAndLength(\n config.kernelSize, 'number', 1, 1))\n throw new ValueError(\n `Conv1D expects config.kernelSize to be number or number[] with ` +\n `length 1, but received ${JSON.stringify(config.kernelSize)}.`);\n }\n}\nserialization.registerClass(Conv1D);\n\nexport interface Cropping2DLayerConfig extends LayerConfig {\n /**\n * Dimension of the cropping along the width and the height.\n * - If integer: the same symmetric cropping\n * is applied to width and height.\n * - If list of 2 integers:\n * interpreted as two different\n * symmetric cropping values for height and width:\n * `[symmetric_height_crop, symmetric_width_crop]`.\n * - If a list of 2 list of 2 integers:\n * interpreted as\n * `[[top_crop, bottom_crop], [left_crop, right_crop]]`\n */\n cropping: number|[number, number]|[[number, number], [number, number]];\n\n /**\n * Format of the data, which determines the ordering of the dimensions in\n * the inputs.\n *\n * `channels_last` corresponds to inputs with shape\n * `(batch, ..., channels)`\n *\n * `channels_first` corresponds to inputs with shape `(batch, channels,\n * ...)`.\n *\n * Defaults to `channels_last`.\n */\n dataFormat?: DataFormat;\n}\n\n/**\n * Cropping layer for 2D input (e.g., image).\n *\n * This layer can crop an input\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, croppedRows, croppedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, croppedRows, croppedCols]`.\n *\n * Examples\n * ```js\n *\n * const model = tf.sequential();\n * model.add(tf.layers.cropping2D({cropping:[[2, 2], [2, 2]],\n * inputShape: [128, 128, 3]}));\n * //now output shape is [batch, 124, 124, 3]\n * ```\n */\nexport class Cropping2D extends Layer {\n static className = 'Cropping2D';\n protected readonly cropping: [[number, number], [number, number]];\n protected readonly dataFormat: DataFormat;\n\n constructor(config: Cropping2DLayerConfig) {\n super(config);\n if (typeof config.cropping === 'number')\n this.cropping = [\n [config.cropping, config.cropping], [config.cropping, config.cropping]\n ];\n else if (typeof config.cropping[0] === 'number')\n this.cropping = [\n [config.cropping[0] as number, config.cropping[0] as number],\n [config.cropping[1] as number, config.cropping[1] as number]\n ];\n else\n this.cropping = config.cropping as [[number, number], [number, number]];\n this.dataFormat =\n config.dataFormat === undefined ? 'channelsLast' : config.dataFormat;\n this.inputSpec = [{ndim: 4}];\n }\n\n computeOutputShape(inputShape: Shape): Shape {\n if (this.dataFormat === 'channelsFirst')\n return [\n inputShape[0],\n inputShape[1],\n inputShape[2] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[3] - this.cropping[1][0] - this.cropping[1][1]\n ];\n else\n return [\n inputShape[0],\n inputShape[1] - this.cropping[0][0] - this.cropping[0][1],\n inputShape[2] - this.cropping[1][0] - this.cropping[1][1],\n inputShape[3]\n ];\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n\n if (this.dataFormat === 'channelsLast') {\n const hSliced = K.sliceAlongAxis(\n inputs, this.cropping[0][0],\n inputs.shape[1] - this.cropping[0][0] - this.cropping[0][1], 2);\n return K.sliceAlongAxis(\n hSliced, this.cropping[1][0],\n inputs.shape[2] - this.cropping[1][1] - this.cropping[1][0], 3);\n } else {\n const hSliced = K.sliceAlongAxis(\n inputs, this.cropping[0][0],\n inputs.shape[2] - this.cropping[0][0] - this.cropping[0][1], 3);\n return K.sliceAlongAxis(\n hSliced, this.cropping[1][0],\n inputs.shape[3] - this.cropping[1][1] - this.cropping[1][0], 4);\n }\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {cropping: this.cropping, dataFormat: this.dataFormat};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Cropping2D);\n\nexport interface UpSampling2DLayerConfig extends LayerConfig {\n /**\n * The upsampling factors for rows and columns.\n *\n * Defaults to `[2, 2]`.\n */\n size?: number[];\n /**\n * Format of the data, which determines the ordering of the dimensions in\n * the inputs.\n *\n * `\"channelsLast\"` corresponds to inputs with shape\n * `[batch, ..., channels]`\n *\n * `\"channelsFirst\"` corresponds to inputs with shape `[batch, channels,\n * ...]`.\n *\n * Defaults to `\"channelsLast\"`.\n */\n dataFormat?: DataFormat;\n}\n\n/**\n * Upsampling layer for 2D inputs.\n *\n * Repeats the rows and columns of the data\n * by size[0] and size[1] respectively.\n *\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, rows, cols]`\n *\n * Output shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, upsampledRows, upsampledCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, upsampledRows, upsampledCols]`\n *\n */\nexport class UpSampling2D extends Layer {\n static className = 'UpSampling2D';\n protected readonly DEFAULT_SIZE = [2, 2];\n protected readonly size: number[];\n protected readonly dataFormat: DataFormat;\n\n constructor(config: UpSampling2DLayerConfig) {\n super(config);\n this.inputSpec = [{ndim: 4}];\n this.size = config.size == null ? this.DEFAULT_SIZE : config.size;\n this.dataFormat =\n config.dataFormat == null ? 'channelsLast' : config.dataFormat;\n }\n\n computeOutputShape(inputShape: Shape): Shape {\n if (this.dataFormat === 'channelsFirst') {\n const height =\n inputShape[2] == null ? null : this.size[0] * inputShape[2];\n const width = inputShape[3] == null ? null : this.size[1] * inputShape[3];\n return [inputShape[0], inputShape[1], height, width];\n } else {\n const height =\n inputShape[1] == null ? null : this.size[0] * inputShape[1];\n const width = inputShape[2] == null ? null : this.size[1] * inputShape[2];\n return [inputShape[0], height, width, inputShape[3]];\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tfc.tidy(() => {\n let input = getExactlyOneTensor(inputs) as Tensor4D;\n const inputShape = input.shape;\n\n if (this.dataFormat === 'channelsFirst') {\n input = tfc.transpose(input, [0, 2, 3, 1]);\n const height = this.size[0] * inputShape[2];\n const width = this.size[1] * inputShape[3];\n const resized = input.resizeNearestNeighbor([height, width]);\n return tfc.transpose(resized, [0, 3, 1, 2]);\n } else {\n const height = this.size[0] * inputShape[1];\n const width = this.size[1] * inputShape[2];\n return input.resizeNearestNeighbor([height, width]);\n }\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {size: this.size, dataFormat: this.dataFormat};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(UpSampling2D);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Depthwise Convolutional Layers\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, Tensor4D, tidy} from '@tensorflow/tfjs-core';\n\nimport {imageDataFormat} from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport {checkDataFormat, DataFormat} from '../common';\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, Shape} from '../types';\nimport {convOutputLength} from '../utils/conv_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\nimport {BaseConv, BaseConvLayerConfig, ConvLayerConfig, preprocessConv2DInput} from './convolutional';\n\n\n/**\n * 2D convolution with separable filters.\n * @param x Input tensor.\n * @param depthwiseKernel Convolution kernel for depthwise convolution.\n * @param strides Strides (Array of two integers).\n * @param padding Padding model.\n * @param dataFormat Data format.\n * @param dilationRate Array of two integers, dilation rates for the separable\n * convolution.\n * @returns Output tensor.\n * @throws ValueError If depthwiseKernel is not a 4D array.\n */\nexport function depthwiseConv2d(\n x: Tensor, depthwiseKernel: Tensor, strides: [number, number] = [1, 1],\n padding = 'valid', dataFormat?: DataFormat,\n dilationRate?: [number, number]): Tensor {\n return tidy(() => {\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n checkDataFormat(dataFormat);\n let y = preprocessConv2DInput(x, dataFormat);\n if (x.rank !== 4) {\n throw new ValueError(\n `Input for depthwiseConv2d is required to be 4-D, but is instead ` +\n `${x.rank}-D`);\n }\n if (depthwiseKernel.rank !== 4) {\n throw new ValueError(\n `depthwiseKernel is required to be 4-D, but is instead ` +\n `${depthwiseKernel.rank}-D`);\n }\n y = tfc.depthwiseConv2d(\n y as Tensor4D, depthwiseKernel as Tensor4D, strides,\n padding === 'same' ? 'same' : 'valid', 'NHWC', dilationRate);\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]);\n }\n return y;\n });\n}\n\nexport interface DepthwiseConv2DLayerConfig extends BaseConvLayerConfig {\n /**\n * An integer or Array of 2 integers, specifying the width and height of the\n * 2D convolution window. Can be a single integer to specify the same value\n * for all spatial dimensions.\n */\n kernelSize: number|[number, number];\n\n /**\n * The number of depthwise convolution output channels for each input\n * channel.\n * The total number of depthwise convolution output channels will be equal to\n * `filtersIn * depthMultiplier`.\n * Default: 1.\n */\n depthMultiplier?: number;\n\n /**\n * Initializer for the depthwise kernel matrix.\n * Default: GlorotNormal.\n */\n depthwiseInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Constraint for the depthwise kernel matrix.\n */\n depthwiseConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Regulzarizer function for the depthwise kernel matrix.\n */\n depthwiseRegularizer?: RegularizerIdentifier|Regularizer;\n}\n\n/**\n * Depthwise separable 2D convolution.\n *\n * Depthwise Separable convolutions consists in performing just the first step\n * in a depthwise spatial convolution (which acts on each input channel\n * separately). The `depthMultplier` argument controls how many output channels\n * are generated per input channel in the depthwise step.\n */\nexport class DepthwiseConv2D extends BaseConv {\n static className = 'DepthwiseConv2D';\n private readonly depthMultiplier: number;\n private readonly depthwiseInitializer: Initializer;\n private readonly depthwiseConstraint: Constraint;\n private readonly depthwiseRegularizer: Regularizer;\n\n private depthwiseKernel: LayerVariable = null;\n\n constructor(config: DepthwiseConv2DLayerConfig) {\n super(2, config as ConvLayerConfig);\n this.depthMultiplier =\n config.depthMultiplier == null ? 1 : config.depthMultiplier;\n this.depthwiseInitializer = getInitializer(\n config.depthwiseInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.depthwiseConstraint = getConstraint(config.depthwiseConstraint);\n this.depthwiseRegularizer = getRegularizer(config.depthwiseRegularizer);\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 4) {\n throw new ValueError(\n `Inputs to DepthwiseConv2D should have rank 4. ` +\n `Received input shape: ${JSON.stringify(inputShape)}.`);\n }\n const channelAxis = this.dataFormat === 'channelsFirst' ? 1 : 3;\n if (inputShape[channelAxis] == null || inputShape[channelAxis] < 0) {\n throw new ValueError(\n 'The channel dimension of the inputs to DepthwiseConv2D should ' +\n `be defined, but is not (${inputShape[channelAxis]}).`);\n }\n const inputDim = inputShape[channelAxis];\n const depthwiseKernelShape: Shape = [\n this.kernelSize[0], this.kernelSize[1], inputDim, this.depthMultiplier\n ];\n\n this.depthwiseKernel = this.addWeight(\n 'depthwise_kernel', depthwiseKernelShape, null,\n this.depthwiseInitializer, this.depthwiseRegularizer, true,\n this.depthwiseConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [inputDim * this.depthMultiplier], null, this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n } else {\n this.bias = null;\n }\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n let outputs = depthwiseConv2d(\n inputs, this.depthwiseKernel.read(), this.strides as [number, number],\n this.padding, this.dataFormat, null);\n // TODO(cais): Add support for dilation.\n if (this.useBias) {\n outputs = K.biasAdd(outputs, this.bias.read(), this.dataFormat);\n }\n if (this.activation != null) {\n outputs = this.activation.apply(outputs);\n }\n return outputs;\n });\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const rows =\n this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n const cols =\n this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n const outFilters = this.dataFormat === 'channelsFirst' ?\n inputShape[1] * this.depthMultiplier :\n inputShape[3] * this.depthMultiplier;\n const outRows = convOutputLength(\n rows, this.kernelSize[0], this.padding, this.strides[0]);\n const outCols = convOutputLength(\n cols, this.kernelSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], outFilters, outRows, outCols];\n } else {\n // In this case, assume 'channelsLast'.\n return [inputShape[0], outRows, outCols, outFilters];\n }\n }\n\n getConfig(): serialization.ConfigDict {\n const config = super.getConfig();\n config['depthMultiplier'] = this.depthMultiplier;\n config['depthwiseInitializer'] =\n serializeInitializer(this.depthwiseInitializer);\n config['depthwiseRegularizer'] =\n serializeRegularizer(this.depthwiseRegularizer);\n config['depthwiseConstraint'] =\n serializeConstraint(this.depthwiseRegularizer);\n return config;\n }\n}\nserialization.registerClass(DepthwiseConv2D);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Basic Layers.\n */\n\nimport {Scalar, serialization, Tensor, tidy, transpose, util} from '@tensorflow/tfjs-core';\n\nimport {Activation as ActivationFn, ActivationIdentifier, getActivation, serializeActivation} from '../activations';\nimport {getScalar} from '../backend/state';\nimport * as K from '../backend/tfjs_backend';\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {InputSpec, Layer, LayerConfig} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, Shape} from '../types';\nimport {arrayProd, range} from '../utils/math_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\n\nexport interface DropoutLayerConfig extends LayerConfig {\n /** Float between 0 and 1. Fraction of the input units to drop. */\n rate: number;\n\n /**\n * Integer array representing the shape of the binary dropout mask that will\n * be multiplied with the input.\n *\n * For instance, if your inputs have shape `(batchSize, timesteps, features)`\n * and you want the dropout mask to be the same for all timesteps, you can use\n * `noise_shape=(batch_size, 1, features)`.\n */\n noiseShape?: number[];\n\n /** An integer to use as random seed. */\n seed?: number;\n}\n\n/**\n * Applies\n * [dropout](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf) to\n * the input.\n *\n * Dropout consists in randomly setting a fraction `rate` of input units to 0 at\n * each update during training time, which helps prevent overfitting.\n */\nexport class Dropout extends Layer {\n static className = 'Dropout';\n private readonly rate: number;\n private readonly rateScalar: Scalar;\n private readonly noiseShape: number[];\n private readonly seed: number;\n\n constructor(config: DropoutLayerConfig) {\n super(config);\n this.rate = Math.max(Math.min(config.rate, 1), 0);\n this.rateScalar = getScalar(this.rate);\n // So that the scalar doesn't get tidied up between executions.\n this.noiseShape = config.noiseShape;\n this.seed = config.seed;\n if (this.seed != null) {\n throw new NotImplementedError(\n 'Non-default seed is not implemented in Dropout layer yet: ' +\n this.seed);\n }\n this.supportsMasking = true;\n }\n\n private getNoiseShape(input: Tensor): Shape {\n if (this.noiseShape == null) {\n return this.noiseShape;\n }\n const inputShape = input.shape;\n const noiseShape: Shape = [];\n for (let i = 0; i < this.noiseShape.length; ++i) {\n noiseShape.push(\n this.noiseShape[i] == null ? inputShape[i] : this.noiseShape[i]);\n }\n return noiseShape;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n if (this.noiseShape != null &&\n !util.arraysEqual(input.shape, this.noiseShape)) {\n throw new NotImplementedError(\n 'Non-default noise shape is not implemented in Dropout ' +\n 'layer yet: ' + JSON.stringify(this.noiseShape));\n }\n if (0 < this.rate && this.rate < 1) {\n const training =\n kwargs['training'] == null ? false : kwargs['training'];\n const noiseShape = this.getNoiseShape(input);\n const output =\n K.inTrainPhase(\n () => K.dropout(input, this.rateScalar, noiseShape, this.seed),\n () => input, training) as Tensor;\n return output;\n }\n return inputs;\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n rate: this.rate,\n noiseShape: this.noiseShape,\n seed: this.seed,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Dropout);\n\nexport interface DenseLayerConfig extends LayerConfig {\n /** Positive integer, dimensionality of the output space. */\n units: number;\n /**\n * Activation function to use.\n *\n * If unspecified, no activation is applied.\n */\n activation?: ActivationIdentifier;\n /** Whether to apply a bias. */\n useBias?: boolean;\n /**\n * Initializer for the dense kernel weights matrix.\n */\n kernelInitializer?: InitializerIdentifier|Initializer;\n /**\n * Initializer for the bias vector.\n */\n biasInitializer?: InitializerIdentifier|Initializer;\n /**\n * If specified, defines inputShape as `[inputDim]`.\n */\n inputDim?: number;\n\n /**\n * Constraint for the kernel weights.\n */\n kernelConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint for the bias vector.\n */\n biasConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Regularizer function applied to the dense kernel weights matrix.\n */\n kernelRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the bias vector.\n */\n biasRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the activation.\n */\n activityRegularizer?: RegularizerIdentifier|Regularizer;\n}\n\n/**\n * Creates a dense (fully connected) layer.\n *\n * This layer implements the operation:\n * `output = activation(dot(input, kernel) + bias)`\n *\n * `activation` is the element-wise activation function\n * passed as the `activation` argument.\n *\n * `kernel` is a weights matrix created by the layer.\n *\n * `bias` is a bias vector created by the layer (only applicable if `useBias`\n * is `true`).\n *\n * **Input shape:**\n *\n * nD `tf.Tensor` with shape: `(batchSize, ..., inputDim)`.\n *\n * The most common situation would be\n * a 2D input with shape `(batchSize, inputDim)`.\n *\n * **Output shape:**\n *\n * nD tensor with shape: `(batchSize, ..., units)`.\n *\n * For instance, for a 2D input with shape `(batchSize, inputDim)`,\n * the output would have shape `(batchSize, units)`.\n *\n * Note: if the input to the layer has a rank greater than 2, then it is\n * flattened prior to the initial dot product with the kernel.\n */\nexport class Dense extends Layer {\n static className = 'Dense';\n private units: number;\n // Default activation: Linear (none).\n private activation: ActivationFn = null;\n private useBias = true;\n private kernelInitializer: Initializer;\n private biasInitializer: Initializer;\n private kernel: LayerVariable = null;\n private bias: LayerVariable = null;\n\n readonly DEFAULT_KERNEL_INITIALIZER: InitializerIdentifier = 'glorotNormal';\n readonly DEFAULT_BIAS_INITIALIZER: InitializerIdentifier = 'zeros';\n private readonly kernelConstraint?: Constraint;\n private readonly biasConstraint?: Constraint;\n private readonly kernelRegularizer?: Regularizer;\n private readonly biasRegularizer?: Regularizer;\n\n constructor(config: DenseLayerConfig) {\n super(config);\n if (config.batchInputShape == null && config.inputShape == null &&\n config.inputDim != null) {\n // This logic is copied from Layer's constructor, since we can't\n // do exactly what the Python constructor does for Dense().\n let batchSize: number = null;\n if (config.batchSize != null) {\n batchSize = config.batchSize;\n }\n this.batchInputShape = [batchSize, config.inputDim];\n }\n\n this.units = config.units;\n this.activation = getActivation(config.activation);\n if (config.useBias != null) {\n this.useBias = config.useBias;\n }\n this.kernelInitializer = getInitializer(\n config.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.biasInitializer =\n getInitializer(config.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.kernelConstraint = getConstraint(config.kernelConstraint);\n this.biasConstraint = getConstraint(config.biasConstraint);\n this.kernelRegularizer = getRegularizer(config.kernelRegularizer);\n this.biasRegularizer = getRegularizer(config.biasRegularizer);\n this.activityRegularizer = getRegularizer(config.activityRegularizer);\n\n this.inputSpec = [{minNDim: 2}];\n }\n\n public build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n const inputLastDim = inputShape[inputShape.length - 1];\n if (this.kernel == null) {\n this.kernel = this.addWeight(\n 'kernel', [inputLastDim, this.units], null, this.kernelInitializer,\n this.kernelRegularizer, true, this.kernelConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.units], null, this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n }\n }\n\n this.inputSpec = [{minNDim: 2, axes: {[-1]: inputLastDim}}];\n this.built = true;\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n outputShape[outputShape.length - 1] = this.units;\n return outputShape;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Dense layer accepts only a single input.\n const input = getExactlyOneTensor(inputs);\n let output = K.dot(input, this.kernel.read());\n if (this.bias != null) {\n output = K.biasAdd(output, this.bias.read());\n }\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n return output;\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Dense);\n\n/**\n * Flattens the input. Does not affect the batch size.\n *\n * A `Flatten` layer flattens each batch in its inputs to 1D (making the output\n * 2D).\n *\n * For example:\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const flattenLayer = tf.layers.flatten();\n * // Inspect the inferred output shape of the flatten layer, which\n * // equals `[null, 12]`. The 2nd dimension is 4 * 3, i.e., the result of the\n * // flattening. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(flattenLayer.apply(input).shape));\n * ```\n */\nexport class Flatten extends Layer {\n static className = 'Flatten';\n constructor(config?: LayerConfig) {\n super(config || {});\n this.inputSpec = [{minNDim: 3}];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n for (const dim of inputShape.slice(1)) {\n if (dim == null) {\n throw new ValueError(\n `The shape of the input to \"Flatten\" is not fully defined ` +\n `(got ${inputShape.slice(1)}). Make sure to pass a complete ` +\n `\"input_shape\" or \"batch_input_shape\" argument to the first ` +\n `layer in your model.`);\n }\n }\n return [inputShape[0], arrayProd(inputShape, 1)];\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return K.batchFlatten(getExactlyOneTensor(inputs));\n });\n }\n}\nserialization.registerClass(Flatten);\n\nexport interface ActivationLayerConfig extends LayerConfig {\n /**\n * Name of the activation function to use.\n */\n activation: ActivationIdentifier;\n}\n\n/**\n * Applies an activation function to an output.\n *\n * This layer applies element-wise activation function. Other layers, notably\n * `dense` can also apply activation functions. Use this isolated activation\n * function to extract the values before and after the\n * activation. For instance:\n *\n * ```js\n * const input = tf.input({shape: [5]});\n * const denseLayer = tf.layers.dense({units: 1});\n * const activationLayer = tf.layers.activation({activation: 'relu6'});\n *\n * // Obtain the output symbolic tensors by applying the layers in order.\n * const denseOutput = denseLayer.apply(input);\n * const activationOutput = activationLayer.apply(denseOutput);\n *\n * // Create the model based on the inputs.\n * const model = tf.model({\n * inputs: input,\n * outputs: [denseOutput, activationOutput]\n * });\n *\n * // Collect both outputs and print separately.\n * const [denseOut, activationOut] = model.predict(tf.randomNormal([6, 5]));\n * denseOut.print();\n * activationOut.print();\n * ```\n *\n */\nexport class Activation extends Layer {\n static className = 'Activation';\n activation: ActivationFn;\n\n constructor(config: ActivationLayerConfig) {\n super(config);\n this.supportsMasking = true;\n this.activation = getActivation(config.activation);\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n return this.activation.apply(input);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {activation: serializeActivation(this.activation)};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Activation);\n\nexport interface ReshapeLayerConfig extends LayerConfig {\n /** The target shape. Does not include the batch axis. */\n targetShape: Shape;\n}\n\nexport interface RepeatVectorLayerConfig extends LayerConfig {\n /**\n * The integer number of times to repeat the input.\n */\n n: number;\n}\n\n/**\n * Repeats the input n times in a new dimension.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.repeatVector({n: 4, inputShape: [2]}));\n * const x = tf.tensor2d([[10, 20]]);\n * // Use the model to do inference on a data point the model hasn't see\n * model.predict(x).print();\n * // output shape is now [batch, 2, 4]\n * ```\n */\nexport class RepeatVector extends Layer {\n static className = 'RepeatVector';\n readonly n: number;\n\n constructor(config: RepeatVectorLayerConfig) {\n super(config);\n this.n = config.n;\n this.inputSpec = [{ndim: 2}];\n }\n\n computeOutputShape(inputShape: Shape): Shape {\n return [inputShape[0], this.n, inputShape[1]];\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = getExactlyOneTensor(inputs);\n return K.repeat(inputs, this.n);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n n: this.n,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(RepeatVector);\n\n/**\n * Reshapes an input to a certain shape.\n *\n * ```js\n * const input = tf.input({shape: [4, 3]});\n * const reshapeLayer = tf.layers.reshape({targetShape: [2, 6]});\n * // Inspect the inferred output shape of the Reshape layer, which\n * // equals `[null, 2, 6]`. (The 1st dimension is the undermined batch size.)\n * console.log(JSON.stringify(reshapeLayer.apply(input).shape));\n * ```\n *\n * Input shape:\n * Arbitrary: although all dimensions in the input shape must be fixed.\n * Use the ReshapeLayerConfig field `input_shape` when using this layer\n * as the first layer in a model.\n *\n * Output shape:\n * [batchSize, targetShape[0], targetShape[1], ...,\n * targetShape[targetShape.length - 1]].\n */\nexport class Reshape extends Layer {\n static className = 'Reshape';\n private targetShape: Shape;\n\n constructor(config: ReshapeLayerConfig) {\n super(config);\n this.targetShape = config.targetShape;\n\n // Make sure that all unknown dimensions are represented as `null`.\n for (let i = 0; i < this.targetShape.length; ++i) {\n if (this.isUnknown(this.targetShape[i])) {\n this.targetShape[i] = null;\n }\n }\n }\n\n private isUnknown(dim: number): boolean {\n return dim < 0 || dim == null;\n }\n\n /**\n * Finds and replaces a missing dimension in output shape.\n *\n * This is a near direct port of the internal Numpy function\n * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.\n *\n * @param inputShape: Original shape of array begin reshape.\n * @param outputShape: Target shape of the array, with at most a single\n * `null` or negative number, which indicates an underdetermined dimension\n * that should be derived from `inputShape` and the known dimensions of\n * `outputShape`.\n * @returns: The output shape with `null` replaced with its computed value.\n * @throws: ValueError: If `inputShape` and `outputShape` do not match.\n */\n private fixUnknownDimension(inputShape: Shape, outputShape: Shape): Shape {\n const errorMsg = 'Total size of new array must be unchanged.';\n const finalShape = outputShape.slice();\n let known = 1;\n let unknown = null;\n for (let i = 0; i < finalShape.length; ++i) {\n const dim = finalShape[i];\n if (this.isUnknown(dim)) {\n if (unknown === null) {\n unknown = i;\n } else {\n throw new ValueError('Can only specifiy one unknown dimension.');\n }\n } else {\n known *= dim;\n }\n }\n\n const originalSize = arrayProd(inputShape);\n if (unknown !== null) {\n if (known === 0 || originalSize % known !== 0) {\n throw new ValueError(errorMsg);\n }\n finalShape[unknown] = originalSize / known;\n } else if (originalSize !== known) {\n throw new ValueError(errorMsg);\n }\n\n return finalShape;\n }\n\n computeOutputShape(inputShape: Shape): Shape {\n let anyUnknownDims = false;\n for (let i = 0; i < inputShape.length; ++i) {\n if (this.isUnknown(inputShape[i])) {\n anyUnknownDims = true;\n break;\n }\n }\n\n if (anyUnknownDims) {\n return inputShape.slice(0, 1).concat(this.targetShape);\n } else {\n return inputShape.slice(0, 1).concat(\n this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const outputShape = inputShape.slice(0, 1).concat(\n this.fixUnknownDimension(inputShape.slice(1), this.targetShape));\n return input.reshape(outputShape);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n targetShape: this.targetShape,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Reshape);\n\nexport interface PermuteLayerConfig extends LayerConfig {\n /**\n * Array of integers. Permutation pattern. Does not include the\n * sample (batch) dimension. Index starts at 1.\n * For instance, `[2, 1]` permutes the first and second dimensions\n * of the input.\n */\n dims: number[];\n}\n\n/**\n * Permutes the dimensions of the input according to a given pattern.\n *\n * Useful for, e.g., connecting RNNs and convnets together.\n *\n * Example:\n *\n * ```js\n * const model = tf.Sequential();\n * model.add(tf.layers.permute({\n * dims: [2, 1],\n * inputShape: [10, 64]\n * }));\n * console.log(model.outputShape);\n * // Now model's output shape is [null, 64, 10], where null is the\n * // unpermuted sample (batch) dimension.\n * ```\n *\n * Input shape:\n * Arbitrary. Use the configuration field `inputShape` when using this\n * layer as othe first layer in a model.\n *\n * Output shape:\n * Same rank as the input shape, but with the dimensions re-ordered (i.e.,\n * permuted) according to the `dims` configuration of this layer.\n */\nexport class Permute extends Layer {\n static className = 'Permute';\n readonly dims: number[];\n private readonly dimsIncludingBatch: number[];\n\n constructor(config: PermuteLayerConfig) {\n super(config);\n if (config.dims == null) {\n throw new Error(\n 'Required configuration field `dims` is missing during Permute ' +\n 'constructor call.');\n }\n if (!Array.isArray(config.dims)) {\n throw new Error(\n 'Permute constructor requires `dims` to be an Array, but received ' +\n `${config.dims} instead.`);\n }\n\n // Check the validity of the permutation indices.\n const expectedSortedIndices = range(1, config.dims.length + 1);\n if (!util.arraysEqual(config.dims.slice().sort(), expectedSortedIndices)) {\n throw new Error(\n 'Invalid permutation `dims`: ' + JSON.stringify(config.dims) +\n ' `dims` must contain consecutive integers starting from 1.');\n }\n\n this.dims = config.dims;\n this.dimsIncludingBatch = [0].concat(this.dims);\n this.inputSpec = [new InputSpec({ndim: this.dims.length + 1})];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const outputShape = inputShape.slice();\n this.dims.forEach((dim: number, i: number) => {\n outputShape[i + 1] = (inputShape as Shape)[dim];\n });\n return outputShape;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return transpose(getExactlyOneTensor(inputs), this.dimsIncludingBatch);\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n dims: this.dims,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Permute);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Embedding Layer.\n *\n * Original source: keras/constraints.py\n */\nimport {serialization, Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport * as K from '../backend/tfjs_backend';\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {Layer, LayerConfig} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, Shape} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\n\nexport interface EmbeddingLayerConfig extends LayerConfig {\n /**\n * Integer > 0. Size of the vocabulary, i.e. maximum integer index + 1.\n */\n inputDim: number;\n /**\n * Integer >= 0. Dimension of the dense embedding.\n */\n outputDim: number;\n /**\n * Initializer for the `embeddings` matrix.\n */\n embeddingsInitializer?: InitializerIdentifier|Initializer;\n /**\n * Regularizer function applied to the `embeddings` matrix.\n */\n embeddingsRegularizer?: RegularizerIdentifier|Regularizer;\n /**\n * Regularizer function applied to the activation.\n */\n activityRegularizer?: RegularizerIdentifier|Regularizer;\n /**\n * Constraint function applied to the `embeddings` matrix.\n */\n embeddingsConstraint?: ConstraintIdentifier|Constraint;\n /**\n * Whether the input value 0 is a special \"padding\" value that should be\n * masked out. This is useful when using recurrent layers which may take\n * variable length input.\n *\n * If this is `True` then all subsequent layers in the model need to support\n * masking or an exception will be raised. If maskZero is set to `True`, as a\n * consequence, index 0 cannot be used in the vocabulary (inputDim should\n * equal size of vocabulary + 1).\n */\n maskZero?: boolean;\n /**\n * Length of input sequences, when it is constant.\n *\n * This argument is required if you are going to connect `flatten` then\n * `dense` layers upstream (without it, the shape of the dense outputs cannot\n * be computed).\n */\n inputLength?: number|number[];\n}\n\n/**\n * Maps positive integers (indices) into dense vectors of fixed size.\n * eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]\n *\n * **Input shape:** 2D tensor with shape: `[batchSize, sequenceLength]`.\n *\n * **Output shape:** 3D tensor with shape: `[batchSize, sequenceLength,\n * outputDim]`.\n */\nexport class Embedding extends Layer {\n static className = 'Embedding';\n private inputDim: number;\n private outputDim: number;\n private embeddingsInitializer: Initializer;\n private maskZero: boolean;\n private inputLength: number|number[];\n\n private embeddings: LayerVariable = null;\n\n readonly DEFAULT_EMBEDDINGS_INITIALIZER: InitializerIdentifier =\n 'randomUniform';\n private readonly embeddingsRegularizer?: Regularizer;\n private readonly embeddingsConstraint?: Constraint;\n\n constructor(config: EmbeddingLayerConfig) {\n super(config);\n if (config.batchInputShape == null && config.inputShape == null) {\n // Porting Note: This logic is copied from Layer's constructor, since we\n // can't do exactly what the Python constructor does for Embedding().\n // Specifically, the super constructor can not be called after the\n // mutation of the `config` argument.\n let batchSize: number = null;\n if (config.batchSize != null) {\n batchSize = config.batchSize;\n }\n if (config.inputLength == null) {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (None, )\n this.batchInputShape = [batchSize, null];\n } else {\n // Fix super-constructor to what it would have done if\n // 'config.inputShape' were (config.inputLength, )\n this.batchInputShape =\n [batchSize].concat(generic_utils.toList(config.inputLength));\n }\n }\n this.inputDim = config.inputDim;\n this.outputDim = config.outputDim;\n this.embeddingsInitializer = getInitializer(\n config.embeddingsInitializer || this.DEFAULT_EMBEDDINGS_INITIALIZER);\n this.embeddingsRegularizer = getRegularizer(config.embeddingsRegularizer);\n this.activityRegularizer = getRegularizer(config.activityRegularizer);\n this.embeddingsConstraint = getConstraint(config.embeddingsConstraint);\n this.maskZero = config.maskZero;\n this.inputLength = config.inputLength;\n }\n\n public build(inputShape: Shape|Shape[]): void {\n this.embeddings = this.addWeight(\n 'embeddings', [this.inputDim, this.outputDim], this.dtype,\n this.embeddingsInitializer, this.embeddingsRegularizer, true,\n this.embeddingsConstraint);\n this.built = true;\n }\n\n // Override warnOnIncompatibleInputShape because an embedding layer allows\n // the input to have varying ranks.\n protected warnOnIncompatibleInputShape(inputShape: Shape) {}\n\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor {\n throw new NotImplementedError(\n 'computeMask has not been implemented for Embedding yet');\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n if (this.inputLength == null) {\n return [...inputShape, this.outputDim];\n }\n // inputLength can be an array if input is 3D or higher.\n const inLens: number[] = generic_utils.toList(this.inputLength);\n if (inLens.length !== inputShape.length - 1) {\n throw new ValueError(\n `\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n } else {\n let i = 0;\n for (let k = 0; k < inLens.length; ++k) {\n const s1 = inLens[k];\n const s2 = inputShape[k + 1];\n if ((s1 != null) && (s2 != null) && (s1 !== s2)) {\n throw new ValueError(\n `\"inputLength\" is ${this.inputLength}, but received ` +\n `input shape has shape ${inputShape}`);\n } else if (s1 == null) {\n inLens[i] = s2;\n }\n i++;\n }\n }\n return [inputShape[0], ...inLens, this.outputDim];\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Embedding layer accepts only a single input.\n let input = getExactlyOneTensor(inputs);\n if (input.dtype !== 'int32') {\n input = K.cast(input, 'int32');\n }\n const output = K.gather(this.embeddings.read(), input.as1D());\n return output.reshape(\n getExactlyOneShape(this.computeOutputShape(input.shape)));\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n inputDim: this.inputDim,\n outputDim: this.outputDim,\n embeddingsInitializer: serializeInitializer(this.embeddingsInitializer),\n embeddingsRegularizer: serializeRegularizer(this.embeddingsRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n embeddingsConstraint: serializeConstraint(this.embeddingsConstraint),\n maskZero: this.maskZero,\n inputLength: this.inputLength\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Embedding);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Merge Layers.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from '../backend/state';\nimport * as K from '../backend/tfjs_backend';\nimport {Layer, LayerConfig, SymbolicTensor} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {l2Normalize} from '../losses';\nimport {Kwargs, Shape} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as mathUtils from '../utils/math_utils';\nimport {getExactlyOneShape} from '../utils/types_utils';\n\n/**\n * Generic Merge layer for element-wise merge functions.\n *\n * Used to implement `Sum`, `Average`, `Concatenate`, etc.\n */\nexport abstract class Merge extends Layer {\n protected reshapeRequired: boolean;\n\n constructor(config?: LayerConfig) {\n super(config || {});\n this.supportsMasking = true;\n }\n\n /**\n * Logic for merging multiple tensors, to be overridden by subclasses.\n * @param inputs\n */\n protected mergeFunction(inputs: Tensor[]): Tensor {\n throw new NotImplementedError();\n }\n\n /**\n * Computes the shape of the result of an elementwise operation.\n *\n * @param shape1: Shape of the first tensor.\n * @param shape2: Shape of the second tensor.\n * @returns Expected output shape when an elementwise operation is carried\n * out on 2 tensors with shapes `shape1` and `shape2`.\n * @throws ValueError: If `shape1` and `shape2` are not compatible for\n * element-wise operations.\n */\n private computeElementwiseOpOutputShape(shape1: Shape, shape2: Shape): Shape {\n if (shape1 == null || shape2 == null) {\n return null;\n } else if (shape1.length < shape2.length) {\n return this.computeElementwiseOpOutputShape(shape2, shape1);\n } else if (shape2.length === 0) {\n return shape1;\n }\n const outputShape: Shape = shape1.slice(0, shape1.length - shape2.length);\n for (let k = 0; k < shape2.length; ++k) {\n const i = shape1[shape1.length - shape2.length + k];\n const j = shape2[k];\n if (i == null || j == null || i < 0 || j < 0) {\n outputShape.push(null);\n } else if (i === 1) {\n outputShape.push(j);\n } else if (j === 1) {\n outputShape.push(i);\n } else {\n if (i !== j) {\n throw new ValueError(\n 'Operands could not be broadcast together with shapes ' +\n JSON.stringify(shape1) + ' ' + JSON.stringify(shape2));\n }\n outputShape.push(i);\n }\n }\n return outputShape;\n }\n\n build(inputShape: Shape|Shape[]): void {\n // Used purely for shape validation.\n if (Array.isArray(inputShape) && !Array.isArray(inputShape[0])) {\n // Make sure that inputShape is an Array of shape.\n inputShape = [getExactlyOneShape(inputShape)];\n }\n inputShape = inputShape as Shape[];\n if (inputShape.length < 2) {\n throw new ValueError(\n 'A merge layer should be called on an Array of at least 2 inputs.' +\n ` Got ${inputShape.length} input(s).`);\n }\n\n // Make sure that there is at most one unique batch size among the input\n // shapes.\n let batchSizes: number[] = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length > 1) {\n throw new ValueError(\n `Can not merge tensors with different batch sizes. ` +\n `Got tensors with shapes: ${JSON.stringify(inputShape)}.`);\n }\n\n let outputShape: Shape =\n inputShape[0] == null ? null : inputShape[0].slice(1);\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n // If the inputs have different ranks, we have to reshape them to make them\n // broadcastable.\n const allRanks = inputShape.map(shape => shape.length);\n if (inputShape.indexOf(null) === -1 &&\n generic_utils.unique(allRanks).length === 1) {\n this.reshapeRequired = false;\n } else {\n this.reshapeRequired = true;\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = inputs as Tensor[];\n if (this.reshapeRequired) {\n const reshapedInputs: Tensor[] = [];\n const inputDims = inputs.map(input => input.rank);\n if (inputDims.indexOf(null) === -1) {\n // If ranks of all inputs are available, we simply expand each of them\n // at axis=1 until all of them have the same rank.\n const maxNDim = mathUtils.max(inputDims);\n for (let x of inputs) {\n const xNDim = x.rank;\n for (let k = 0; k < maxNDim - xNDim; ++k) {\n x = K.expandDims(x, 1);\n }\n reshapedInputs.push(x);\n }\n return this.mergeFunction(reshapedInputs);\n } else {\n // Transpose all inputs so that batch size is the last dimension.\n // [batchSize, dim1, dim2, ...] -> [dim1, dim2, ..., batchSize]\n let transposed = false;\n for (const x of inputs) {\n const xNDim = x.rank;\n if (xNDim == null) {\n const xShape = x.shape;\n const batchSize = xShape[0];\n const newShape = xShape.slice(1).concat([batchSize]);\n let xTransposed = x.reshape(\n [batchSize].concat(mathUtils.arrayProd(xShape.slice(1))));\n xTransposed = tfc.transpose(xTransposed, [1, 0]);\n xTransposed = xTransposed.reshape(newShape);\n reshapedInputs.push(xTransposed);\n transposed = true;\n } else if (xNDim > 1) {\n const dims = mathUtils.range(1, xNDim).concat([0]);\n reshapedInputs.push(tfc.transpose(x, dims));\n transposed = true;\n } else {\n // We don't transpose inputs if they are 1D vectors or scalars.\n reshapedInputs.push(x);\n }\n }\n let y = this.mergeFunction(reshapedInputs);\n const yNDim = y.rank;\n if (transposed) {\n // If inputs have been transposed, we have to transpose the output\n // too.\n if (yNDim == null) {\n const yShape = y.shape;\n const yNDim = yShape.length;\n const batchSize = yShape[yNDim - 1];\n const newShape =\n [batchSize].concat(yShape.slice(0, yShape.length - 1));\n y = tfc.transpose(y.reshape([-1, batchSize]), [1, 0])\n .reshape(newShape);\n } else if (yNDim > 1) {\n const dims = [yNDim - 1].concat(mathUtils.range(0, yNDim - 1));\n y = tfc.transpose(y, dims);\n }\n }\n return y;\n }\n } else {\n return this.mergeFunction(inputs);\n }\n });\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = inputShape as Shape[];\n let outputShape: Shape;\n if (inputShape[0] == null) {\n outputShape = null;\n } else {\n outputShape = inputShape[0].slice(1);\n }\n for (let i = 1; i < inputShape.length; ++i) {\n const shape = inputShape[i] == null ? null : inputShape[i].slice(1);\n outputShape = this.computeElementwiseOpOutputShape(outputShape, shape);\n }\n\n let batchSizes: number[] = [];\n for (const shape of inputShape) {\n if (shape != null && shape[0] !== null) {\n batchSizes.push(shape[0]);\n }\n }\n batchSizes = generic_utils.unique(batchSizes);\n if (batchSizes.length === 1) {\n outputShape = batchSizes.concat(outputShape);\n } else {\n outputShape = [null].concat(outputShape);\n }\n return outputShape;\n }\n\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor {\n // TODO(cais): Implement computeMask();\n throw new NotImplementedError(\n 'computeMask has not been implemented for Merge yet');\n }\n}\n\n/**\n * Layer that performs element-wise addition on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). The inputs are specified as an\n * `Array` when the `apply` method of the `Add` layer instance is called. For\n * example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const addLayer = tf.layers.add();\n * const sum = addLayer.apply([input1, input2]);\n * console.log(JSON.stringify(sum.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n */\nexport class Add extends Merge {\n static className = 'Add';\n constructor(config?: LayerConfig) {\n super(config as LayerConfig);\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return output;\n });\n }\n}\nserialization.registerClass(Add);\n\n/**\n * Calculate the element-wise sum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Add` layer, by using no input argument\n * or a single configuration argument. The resultant `Add` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const addLayer = tf.layers.add();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = addLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.add([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.add([input1, input2]).print();\n * // Gives [[11, 22], [33, 44]].\n *\n */\nexport function add(config?: SymbolicTensor[]|Tensor[]|LayerConfig): Layer|\n SymbolicTensor|Tensor {\n if (Array.isArray(config)) {\n const layer = new Add({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Add(config);\n }\n}\n\n/**\n * Layer that multiplies (element-wise) an `Array` of inputs.\n *\n * It takes as input an Array of tensors, all of the same\n * shape, and returns a single tensor (also of the same shape).\n * For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const input3 = tf.input({shape: [2, 2]});\n * const multiplyLayer = tf.layers.multiply();\n * const product = multiplyLayer.apply([input1, input2, input3]);\n * console.log(product.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n */\nexport class Multiply extends Merge {\n static className = 'Multiply';\n constructor(config?: LayerConfig) {\n super(config);\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.mul(output, inputs[i]);\n }\n return output;\n });\n }\n}\nserialization.registerClass(Multiply);\n\n/**\n * Calculate the element-wise product of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Multiply` layer, by using no input argument\n * or a single configuration argument. The resultant `Multiply` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const multiplyLayer = tf.layers.multiply();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = multiplyLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.multiply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.multiply([input1, input2]).print();\n * // Gives [[10, 40], [90, 160]].\n *\n */\nexport function multiply(config?: SymbolicTensor[]|Tensor[]|LayerConfig): Layer|\n SymbolicTensor|Tensor {\n if (Array.isArray(config)) {\n const layer = new Multiply({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Multiply(config);\n }\n}\n\n/**\n * Layer that performs element-wise averaging on an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape, and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const averageLayer = tf.layers.average();\n * const average = averageLayer.apply([input1, input2]);\n * console.log(JSON.stringify(average.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n */\nexport class Average extends Merge {\n static className = 'Average';\n constructor(config?: LayerConfig) {\n super(config);\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n let output = inputs[0].clone();\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.add(output, inputs[i]);\n }\n return tfc.mul(getScalar(1 / inputs.length), output);\n });\n }\n}\nserialization.registerClass(Average);\n\n/**\n * Calculate the element-wise arithmetic mean of inputs, which all have the same\n * shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Average` layer, by using no input argument\n * or a single configuration argument. The resultant `Average` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const averageLayer = tf.layers.average();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = averageLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.average([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 2, 3, 4], [2, 2]);\n * const input2 = tf.tensor2d([10, 20, 30, 40], [2, 2]);\n * tf.layers.average([input1, input2]).print();\n * // Gives [[5.5, 11], [16.5, 22]].\n *\n */\nexport function average(config?: SymbolicTensor[]|Tensor[]|LayerConfig): Layer|\n SymbolicTensor|Tensor {\n if (Array.isArray(config)) {\n const layer = new Average({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Average(config);\n }\n}\n\n/**\n * Layer that computes the element-wise maximum an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const maxLayer = tf.layers.maximum();\n * const max = maxLayer.apply([input1, input2]);\n * console.log(JSON.stringify(max.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n */\nexport class Maximum extends Merge {\n static className = 'Maximum';\n constructor(config?: LayerConfig) {\n super(config);\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.maximum(output, inputs[i]);\n }\n return output;\n });\n }\n}\nserialization.registerClass(Maximum);\n\n/**\n * Calculate the element-wise maximum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Maximum` layer, by using no input argument\n * or a single configuration argument. The resultant `Maximum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const maximumLayer = tf.layers.maximum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = maximumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.maximum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.maximum([input1, input2]).print();\n * // Gives [[10, 20], [30, 40]].\n *\n */\nexport function maximum(config?: SymbolicTensor[]|Tensor[]|LayerConfig): Layer|\n SymbolicTensor|Tensor {\n if (Array.isArray(config)) {\n const layer = new Maximum({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Maximum(config);\n }\n}\n\n/**\n * Layer that computes the element-wise minimum of an `Array` of inputs.\n *\n * It takes as input a list of tensors, all of the same shape and returns a\n * single tensor (also of the same shape). For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const minLayer = tf.layers.minimum();\n * const min = minLayer.apply([input1, input2]);\n * console.log(JSON.stringify(min.shape));\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n */\nexport class Minimum extends Merge {\n static className = 'Minimum';\n constructor(config?: LayerConfig) {\n super(config);\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n let output = inputs[0];\n for (let i = 1; i < inputs.length; ++i) {\n output = tfc.minimum(output, inputs[i]);\n }\n return output;\n });\n }\n}\nserialization.registerClass(Minimum);\n\n/**\n * Calculate the element-wise minimum of inputs, which all have the same shape.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Minimum` layer, by using no input argument\n * or a single configuration argument. The resultant `Minimum` layer can then\n * be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const minimumLayer = tf.layers.minimum();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = minimumLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 2]});\n * const output = tf.layers.minimum([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([1, 20, 3, 40], [2, 2]);\n * const input2 = tf.tensor2d([10, 2, 30, 4], [2, 2]);\n * tf.layers.minimum([input1, input2]).print();\n * // Gives [[1, 2], [3, 4]].\n *\n */\nexport function minimum(config?: SymbolicTensor[]|Tensor[]|LayerConfig): Layer|\n SymbolicTensor|Tensor {\n if (Array.isArray(config)) {\n const layer = new Minimum({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Minimum(config);\n }\n}\n\nexport interface ConcatenateLayerConfig extends LayerConfig {\n /**\n * Axis along which to concatenate.\n */\n axis?: number;\n}\n\n/**\n * Layer that concatenates an `Array` of inputs.\n *\n * It takes a list of tensors, all of the same shape except for the\n * concatenation axis, and returns a single tensor, the concatenation\n * of all inputs. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 2]});\n * const input2 = tf.input({shape: [2, 3]});\n * const concatLayer = tf.layers.concatenate();\n * const output = concatLayer.apply([input1, input2]);\n * console.log(JSON.stringify(output.shape));\n * // You get [null, 2, 5], with the first dimension as the undetermined batch\n * // dimension. The last dimension (5) is the result of concatenating the\n * // last dimensions of the inputs (2 and 3).\n * ```\n */\nexport class Concatenate extends Merge {\n static className = 'Concatenate';\n readonly DEFAULT_AXIS = -1;\n private readonly axis: number;\n\n constructor(config?: ConcatenateLayerConfig) {\n super(config);\n if (config == null) {\n config = {};\n }\n this.axis = config.axis == null ? this.DEFAULT_AXIS : config.axis;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n\n build(inputShape: Shape|Shape[]): void {\n // Used purely for shape validation.]\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0])) ||\n inputShape.length === 1) {\n throw new ValueError(\n 'A `Concatenate` layer should be called on a list of at least 2 ' +\n 'inputs');\n }\n inputShape = inputShape as Shape[];\n\n let allNoneShape = true;\n for (const shape of inputShape) {\n if (shape != null) {\n allNoneShape = false;\n break;\n }\n }\n if (allNoneShape) {\n return;\n }\n\n const shapeSet: Shape[] = [];\n for (let i = 0; i < inputShape.length; ++i) {\n const shapeWithoutConcatAxis = inputShape[i].slice();\n shapeWithoutConcatAxis.splice(this.axis, 1);\n let exists = false;\n for (const shape of shapeSet) {\n if (util.arraysEqual(shape, shapeWithoutConcatAxis)) {\n exists = true;\n break;\n }\n }\n if (!exists) {\n shapeSet.push(shapeWithoutConcatAxis);\n }\n }\n if (shapeSet.length > 1) {\n throw new ValueError(\n 'A `Concatenate` layer requires inputs with matching shapes ' +\n 'except for the concat axis. Got input shapes: ' +\n JSON.stringify(inputShape));\n }\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n return tidy(() => {\n return K.concatenate(inputs, this.axis);\n });\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n if (!(Array.isArray(inputShape) && Array.isArray(inputShape[0]))) {\n throw new ValueError(\n 'A `Concatenate` layer should be called on a list of inputs.');\n }\n const inputShapes = inputShape as Shape[];\n const outputShape = inputShapes[0].slice();\n const axis = this.axis < 0 ? outputShape.length + this.axis : this.axis;\n // Porting Note: the line above is because TypeScript doesn't support\n // negative indices.\n for (const shape of inputShapes.slice(1)) {\n if (outputShape[axis] == null || shape[axis] == null) {\n outputShape[axis] = null;\n break;\n }\n outputShape[axis] += shape[axis];\n }\n return outputShape;\n }\n\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor {\n // TODO(cais): Implement computeMask();\n throw new NotImplementedError(\n 'computeMask has not been implemented for Concatenate yet');\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n 'axis': this.axis,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Concatenate);\n\n/**\n * Concatenate an `Array` of inputs.\n *\n * This function can be invoked in three ways.\n *\n * 1. Construct an instance of `Concatenate` layer, by using no input argument\n * or a single configuration argument. The resultant `Concatenate` layer can\n * then be used on `tf.SymbolicTensor`s or `tf.Tensor`s. For example:\n *\n * ```js\n * const concatLayer = tf.layers.concatenate();\n *\n * // The layer can be applied to inputs.\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = concatLayer.apply([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 7], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 2. Invoke directly on an `Array` of `tf.SymbolicTensor`s. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.SymbolicTensor`. For example:\n *\n * ```js\n * const input1 = tf.input({shape: [2, 3]});\n * const input2 = tf.input({shape: [2, 4]});\n * const output = tf.layers.concatenate([input1, input2]);\n * console.log(output.shape);\n * // You get [null, 2, 2], with the first dimension as the undetermined batch\n * // dimension and the last dimension as the result of concatenating the\n * // last dimensions of the two inputs.\n * ```\n *\n * 3. Invoke directly on `tf.Tensor`s, i.e., concrete values. This constructs\n * an `Layer` object internally and calls its `apply` method on the inputs,\n * generating a new `tf.Tensor` as the result of the computation. For\n * example:\n *\n * ```js\n * const input1 = tf.tensor2d([[1, 2], [3, 4]], [2, 2]);\n * const input2 = tf.tensor2d([[10, 20], [30, 40]], [2, 2]);\n * tf.layers.concatenate([input1, input2]).print();\n * // Gives [[1, 2, 10, 20], [3, 4, 30, 40]].\n *\n */\nexport function concatenate(config?: SymbolicTensor[]|Tensor[]|\n ConcatenateLayerConfig): Layer|SymbolicTensor|\n Tensor {\n if (Array.isArray(config)) {\n const layer = new Concatenate({});\n return layer.apply(config as SymbolicTensor[] | Tensor[]) as\n SymbolicTensor |\n Tensor;\n } else {\n return new Concatenate(config);\n }\n}\n\nexport interface DotLayerConfig extends LayerConfig {\n /**\n * Axis or axes along which the dot product will be taken.\n *\n * Integer or an Array of integers.\n */\n axes: number|[number, number];\n\n /**\n * Whether to L2-normalize samples along the dot product axis\n * before taking the dot product.\n *\n * If set to `true`, the output of the dot product isthe cosine\n * proximity between the two samples.\n */\n normalize?: boolean;\n}\n\n/**\n * Interpretable potentially negative axis index.\n *\n * For example, given axis = -1, and dim = 3, this function will return 2.\n *\n * @param axis The axis index, may be a positive, zero or negative integer.\n * @param dim Total number of dimensions, a positive integer.\n * @returns A non-negative axis index equivalent to the input `axis`.\n */\nfunction interpretAxis(axis: number, dim: number): number {\n while (axis < 0) {\n axis += dim;\n }\n return axis;\n}\n\nfunction batchDot(x: Tensor, y: Tensor, axes: number|[number, number]): Tensor {\n if (x.shape.length > 3 || y.shape.length > 3) {\n throw new NotImplementedError(\n 'batchDot is not implemented for tensors of 4D or higher rank yet');\n }\n tfc.util.assert(\n x.shape.length >= 2,\n `batchDot requires the rank of x to be >= 2, ` +\n `but got ${x.shape.length}`);\n tfc.util.assert(\n x.shape.length >= 2,\n `batchDot requires the rank of y to be >= 2, ` +\n `but got ${y.shape.length}`);\n\n if (typeof axes === 'number') {\n axes = [axes, axes];\n }\n\n if (x.dtype === 'complex64' || y.dtype === 'complex64') {\n throw new NotImplementedError(\n 'batchDot is not implemented for complex64-type Tensors yet.');\n }\n\n const xNDim = x.shape.length;\n const yNDim = y.shape.length;\n if (axes == null) {\n // Behave like batchMatmul by default.\n axes = [xNDim - 1, yNDim - 2];\n }\n const axesArray = axes as [number, number];\n\n return tfc.tidy(() => {\n let diff: number;\n if (xNDim > yNDim) {\n diff = xNDim - yNDim;\n const diffShape: Shape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n y = y.reshape(y.shape.concat(diffShape));\n } else if (yNDim > xNDim) {\n diff = yNDim - xNDim;\n const diffShape: Shape = [];\n for (let i = 0; i < diff; ++i) {\n diffShape.push(1);\n }\n x = x.reshape(x.shape.concat(diffShape));\n } else {\n diff = 0;\n }\n\n let out: Tensor;\n if (x.shape.length === 2 && y.shape.length === 2) {\n if (axesArray[0] === axesArray[1]) {\n out = x.mulStrict(y).sum(axesArray[0]);\n } else {\n out = x.transpose([1, 0]).mulStrict(y).sum(axesArray[1]);\n }\n } else {\n const adjX = axesArray[0] === x.shape.length - 1 ? null : true;\n const adjY = axesArray[1] === y.shape.length - 1 ? true : null;\n out = x.matMul(y, adjX, adjY);\n }\n\n if (diff > 0) {\n let idx: number;\n if (xNDim > yNDim) {\n idx = xNDim + yNDim - 3;\n } else {\n idx = xNDim - 1;\n }\n const squeezeAxes: number[] = [];\n for (let i = idx; i < idx + diff; ++i) {\n squeezeAxes.push(i);\n }\n out = out.squeeze(squeezeAxes);\n }\n if (out.shape.length === 1) {\n out = out.expandDims(1);\n }\n return out;\n });\n}\n\n/**\n * Layer that computes a dot product between samples in two tensors.\n *\n * E.g., if applied to a list of two tensors `a` and `b` both of shape\n * `[batchSize, n]`, the output will be a tensor of shape `[batchSize, 1]`,\n * where each entry at index `[i, 0]` will be the dot product between\n * `a[i, :]` and `b[i, :]`.\n *\n * Example:\n *\n * ```js\n * const dotLayer = tf.layers.dot({axis: -1});\n * const x1 = tf.tensor2d([[10, 20], [30, 40]]);\n * const x2 = tf.tensor2d([[-1, -2], [-3, -4]]);\n *\n * // Invoke the layer's apply() method in eager (imperative) mode.\n * const y = dotLayer.apply([x1, x2]);\n * ```\n */\nexport class Dot extends Merge {\n static className = 'Dot';\n\n private axes: number|[number, number];\n private normalize: boolean;\n\n constructor(config: DotLayerConfig) {\n super(config);\n this.axes = config.axes;\n this.normalize = config.normalize == null ? false : config.normalize;\n this.supportsMasking = true;\n this.reshapeRequired = false;\n }\n\n build(inputShape: Shape|Shape[]): void {\n tfc.util.assert(\n Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]),\n 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = inputShape[0] as Shape;\n const shape2 = inputShape[1] as Shape;\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError(\n 'Dot layer does not support tensors of 4D or higher rank yet.');\n }\n\n const axes = this.interpretAxes(shape1, shape2);\n if (shape1[axes[0]] !== shape2[axes[1]]) {\n throw new ValueError(\n `Dimension incompatibility: ` +\n `${shape1[axes[0]]} !== ${shape2[axes[1]]}`);\n }\n }\n\n protected mergeFunction(inputs: Tensor[]): Tensor {\n if (inputs.length !== 2) {\n throw new ValueError(\n 'A `Dot` layer must be called on exactly 2 inputs, ' +\n `but received ${inputs.length} input(s).`);\n }\n\n let x1 = inputs[0];\n let x2 = inputs[1];\n let axes: [number, number];\n if (!Array.isArray(this.axes)) {\n axes = [\n interpretAxis(this.axes, x1.shape.length),\n interpretAxis(this.axes, x2.shape.length)\n ];\n } else {\n axes = this.axes.map(\n (axis, i) => interpretAxis(\n axis, inputs[i].shape.length)) as [number, number];\n }\n if (this.normalize) {\n x1 = l2Normalize(x1, axes[0]);\n x2 = l2Normalize(x2, axes[1]);\n }\n return batchDot(x1, x2, axes);\n }\n\n private interpretAxes(shape1: Shape, shape2: Shape): number[] {\n let axes: number[];\n if (!Array.isArray(this.axes)) {\n // `this.axes` is a single integer.\n axes = [\n interpretAxis(this.axes, shape1.length),\n interpretAxis(this.axes, shape2.length)\n ];\n } else {\n // `this.axes` is an Array of integers.\n axes = this.axes;\n }\n return axes;\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n tfc.util.assert(\n Array.isArray(inputShape) && inputShape.length === 2 &&\n Array.isArray(inputShape[0]) && Array.isArray(inputShape[1]),\n 'A `Dot` layer should be called on a list of exactly 2 inputs.');\n const shape1 = (inputShape[0] as Shape).slice();\n const shape2 = (inputShape[1] as Shape).slice();\n if (shape1.length > 3 || shape2.length > 3) {\n throw new NotImplementedError(\n 'Dot layer does not support tensors of 4D or higher rank yet.');\n }\n\n const axes = this.interpretAxes(shape1, shape2);\n shape1.splice(axes[0], 1);\n shape2.splice(axes[1], 1);\n shape2.splice(0, 1);\n const outputShape = shape1.concat(shape2);\n if (outputShape.length === 1) {\n outputShape.push(1);\n }\n return outputShape;\n }\n\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor {\n // TODO(cais): Implement computeMask();\n throw new NotImplementedError(\n 'computeMask has not been implemented for Dot yet');\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n 'axes': this.axes,\n 'normalize': this.normalize\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(Dot);\n\n// TODO(cais): Add functional interfaces for the merge layers.\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Normalization layers.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, Tensor1D, Tensor2D, Tensor3D, Tensor4D, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {InputSpec, Layer, LayerConfig} from '../engine/topology';\nimport {getScalar} from '../backend/state';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, Shape} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport * as math_utils from '../utils/math_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\n\n/**\n * Applies batch normalization on x given mean, var, beta and gamma.\n *\n * I.e. returns:\n * `output = (x - mean) / (sqrt(var) + epsilon) * gamma + beta`\n *\n * @param x Input tensor.\n * @param mean Mean of batch.\n * @param variance Variance of batch.\n * @param beta Tensor with which to center the input.\n * @param gamma Tensor by which to scale the input.\n * @param epsilon Fuzz factor.\n * @returns The result of the batch normalization.\n */\nexport function batchNormalization(\n x: Tensor, mean: Tensor, variance: Tensor, beta?: Tensor, gamma?: Tensor,\n epsilon = 1e-3): Tensor {\n let out: Tensor;\n if (x.rank === 2) {\n out = tfc.batchNormalization2d(\n x as Tensor2D, mean as Tensor2D | Tensor1D,\n variance as Tensor2D | Tensor1D, epsilon, gamma as Tensor2D | Tensor1D,\n beta as Tensor2D | Tensor1D);\n } else if (x.rank === 3) {\n // TODO(cais): Check rank; give proper error message.\n out = tfc.batchNormalization3d(\n x as Tensor3D, mean as Tensor3D | Tensor1D,\n variance as Tensor3D | Tensor1D, epsilon, gamma as Tensor3D | Tensor1D,\n beta as Tensor3D | Tensor1D);\n } else if (x.rank === 4) {\n out = tfc.batchNormalization4d(\n x as Tensor4D, mean as Tensor4D | Tensor1D,\n variance as Tensor4D | Tensor1D, epsilon, gamma as Tensor4D | Tensor1D,\n beta as Tensor4D | Tensor1D);\n } else {\n throw new NotImplementedError(\n `batchNormalization is not implemented for array of rank ${x.rank} ` +\n `yet`);\n }\n return out;\n}\n\n/**\n * Non-broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction regularNormalizeBatchInTraining(\n x: Tensor, gamma: Tensor, beta: Tensor, reductionAxes: number[],\n epsilon = 1e-3): [Tensor, Tensor, Tensor] {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const normed =\n batchNormalization(x, mean, variance, beta, gamma, epsilon);\n return [normed, mean, variance];\n }) as [Tensor, Tensor, Tensor];\n}\n\n/**\n * Broadcasting batch normalization for use in training (not inference).\n *\n * The input is normalized to zero mean and unit variance along the\n * `reductionAxes`, followed by scaling with `gamma` and shifted by `beta`.\n * The result of that is returned as the first element\n * of the returned `Array`. The other two elements are the mean and variance,\n * respectively.\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nfunction broadcastNormalizeBatchInTraining(\n x: Tensor, gamma: Tensor, beta: Tensor, reductionAxes: number[],\n epsilon = 1e-3): [Tensor, Tensor, Tensor] {\n return tidy(() => {\n const meanAndVariance = tfc.moments(x, reductionAxes);\n const mean = meanAndVariance.mean;\n const variance = meanAndVariance.variance;\n const targetShape: number[] = [];\n for (const axis of math_utils.range(0, x.rank)) {\n if (reductionAxes.indexOf(axis) !== -1) {\n targetShape.push(1);\n } else {\n targetShape.push(x.shape[axis]);\n }\n }\n const broadcastMean = mean.reshape(targetShape);\n const broadcastVariance = variance.reshape(targetShape);\n const broadcastGamma =\n gamma == null ? null : gamma.reshape(targetShape);\n const broadcastBeta =\n beta == null ? null : beta.reshape(targetShape);\n const normed = batchNormalization(\n x, broadcastMean, broadcastVariance, broadcastBeta,\n broadcastGamma, epsilon);\n return [normed, mean, variance];\n }) as [Tensor, Tensor, Tensor];\n}\n\n/**\n * Batch normalization for use in training (not inference).\n *\n * @param x Input tensor to be normalized.\n * @param gamma Tensor by which to scale the input.\n * @param beta Tensor by which to center the input.\n * @param reductionAxes Axes over which to normalize.\n * @param epsilon Fuzz factor.\n * @returns An `Array` of three `Tensors`:\n * [normalized tensor, mean of input, variance of input].\n */\nexport function normalizeBatchInTraining(\n x: Tensor, gamma: Tensor, beta: Tensor, reductionAxes: number[],\n epsilon = 1e-3): [Tensor, Tensor, Tensor] {\n if (util.arraysEqual(\n reductionAxes.slice().sort(), math_utils.range(0, x.rank - 1))) {\n return regularNormalizeBatchInTraining(\n x, gamma, beta, reductionAxes, epsilon);\n } else {\n return broadcastNormalizeBatchInTraining(\n x, gamma, beta, reductionAxes, epsilon);\n }\n}\n\nexport interface BatchNormalizationLayerConfig extends LayerConfig {\n /**\n * The integer axis that should be normalized (typically the features axis).\n * Defaults to -1.\n *\n * For instance, after a `Conv2D` layer with `data_format=\"channels_first\"`,\n * set `axis=1` in `batchNormalization`.\n */\n axis?: number;\n\n /**\n * Momentum of the moving average. Defaults to 0.99.\n */\n momentum?: number;\n\n /**\n * Small float added to the variance to avoid dividing by zero. Defaults to\n * 1e-3.\n */\n epsilon?: number;\n\n /**\n * If `true`, add offset of `beta` to normalized tensor.\n * If `false`, `beta` is ignored.\n * Defaults to `true`.\n */\n center?: boolean;\n\n /**\n * If `true`, multiply by `gamma`.\n * If `false`, `gamma` is not used.\n * When the next layer is linear (also e.g. `nn.relu`),\n * this can be disabled since the scaling will be done by the next layer.\n * Defaults to `true`.\n */\n scale?: boolean;\n\n /**\n * Initializer for the beta weight.\n * Defaults to 'zeros'.\n */\n betaInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the gamma weight.\n * Defaults to `ones`.\n */\n gammaInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the moving mean.\n * Defaults to `zeros`\n */\n movingMeanInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the moving variance.\n * Defaults to 'Ones'.\n */\n movingVarianceInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Constraint for the beta weight.\n */\n betaConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint for gamma weight.\n */\n gammaConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Regularizer for the beta weight.\n */\n betaRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer for the gamma weight.\n */\n gammaRegularizer?: RegularizerIdentifier|Regularizer;\n}\n\n\n/**\n * Batch normalization layer (Ioffe and Szegedy, 2014).\n *\n * Normalize the activations of the previous layer at each batch,\n * i.e. applies a transformation that maintains the mean activation\n * close to 0 and the activation standard deviation close to 1.\n *\n * Input shape:\n * Arbitrary. Use the keyword argument `inputShape` (Array of integers, does\n * not include the sample axis) when calling the constructor of this class,\n * if this layer is used as a first layer in a model.\n *\n * Output shape:\n * Same shape as input.\n *\n * References:\n * - [Batch Normalization: Accelerating Deep Network Training by Reducing\n * Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n */\nexport class BatchNormalization extends Layer {\n static className = 'BatchNormalization';\n private readonly axis: number;\n private readonly momentum: number;\n private readonly epsilon: number;\n private readonly center: boolean;\n private readonly scale: boolean;\n private readonly betaInitializer: Initializer;\n private readonly gammaInitializer: Initializer;\n private readonly movingMeanInitializer: Initializer;\n private readonly movingVarianceInitializer: Initializer;\n private readonly betaConstraint: Constraint;\n private readonly gammaConstraint: Constraint;\n private readonly betaRegularizer: Regularizer;\n private readonly gammaRegularizer: Regularizer;\n private gamma: LayerVariable;\n private beta: LayerVariable;\n private movingMean: LayerVariable;\n private movingVariance: LayerVariable;\n private stepCount: number;\n\n constructor(config?: BatchNormalizationLayerConfig) {\n if (config == null) {\n config = {};\n }\n super(config);\n\n this.supportsMasking = true;\n this.axis = config.axis == null ? -1 : config.axis;\n this.momentum = config.momentum == null ? 0.99 : config.momentum;\n this.epsilon = config.epsilon == null ? 1e-3 : config.epsilon;\n this.center = config.center == null ? true : config.center;\n this.scale = config.scale == null ? true : config.scale;\n this.betaInitializer = getInitializer(config.betaInitializer || 'zeros');\n this.gammaInitializer = getInitializer(config.gammaInitializer || 'ones');\n this.movingMeanInitializer =\n getInitializer(config.movingMeanInitializer || 'zeros');\n this.movingVarianceInitializer =\n getInitializer(config.movingVarianceInitializer || 'ones');\n this.betaConstraint = getConstraint(config.betaConstraint);\n this.gammaConstraint = getConstraint(config.gammaConstraint);\n this.betaRegularizer = getRegularizer(config.betaRegularizer);\n this.gammaRegularizer = getRegularizer(config.gammaRegularizer);\n this.stepCount = 0;\n }\n\n public build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n const axis = this.axis >= 0 ? this.axis : (this.axis + inputShape.length);\n const dim = inputShape[axis];\n if (dim == null) {\n throw new ValueError(\n `Axis ${axis} of input tensor should have a defined dimension but ` +\n `the layer received an input with shape ` +\n `${JSON.stringify(inputShape)}.`);\n }\n this.inputSpec =\n [new InputSpec({ndim: inputShape.length, axes: {[axis]: dim}})];\n const shape = [dim];\n if (this.scale) {\n this.gamma = this.addWeight(\n 'gamma', shape, null, this.gammaInitializer, this.gammaRegularizer,\n true, this.gammaConstraint);\n }\n if (this.center) {\n this.beta = this.addWeight(\n 'beta', shape, null, this.betaInitializer, this.betaRegularizer, true,\n this.betaConstraint);\n }\n this.movingMean = this.addWeight(\n 'moving_mean', shape, null, this.movingMeanInitializer, null, false);\n this.movingVariance = this.addWeight(\n 'moving_variance', shape, null, this.movingVarianceInitializer, null,\n false);\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n const input = getExactlyOneTensor(inputs);\n const inputShape = input.shape;\n const ndim = inputShape.length;\n const reductionAxes = math_utils.range(0, ndim);\n const axis = this.axis >= 0 ? this.axis : (this.axis + ndim);\n reductionAxes.splice(axis, 1);\n const broadcastShape = generic_utils.pyListRepeat(1, ndim);\n broadcastShape[axis] = inputShape[axis];\n\n const sortedReductionAxes = reductionAxes.slice();\n sortedReductionAxes.sort();\n const needsBroadcasting = !util.arraysEqual(\n sortedReductionAxes, math_utils.range(0, ndim).slice(0, ndim - 1));\n\n const normalizeInference: () => Tensor = () => {\n if (needsBroadcasting) {\n const broadcastMovingMean =\n this.movingMean.read().reshape(broadcastShape);\n const broadcastMovingVariance =\n this.movingVariance.read().reshape(broadcastShape);\n const broadcastBeta =\n this.center ? this.beta.read().reshape(broadcastShape) : null;\n const broadcastGamma =\n this.scale ? this.gamma.read().reshape(broadcastShape) : null;\n return batchNormalization(\n input, broadcastMovingMean, broadcastMovingVariance,\n broadcastBeta, broadcastGamma, this.epsilon);\n } else {\n return batchNormalization(\n input, this.movingMean.read(), this.movingVariance.read(),\n this.beta == null ? null : this.beta.read(),\n this.gamma == null ? null : this.gamma.read(), this.epsilon);\n }\n };\n\n if (!training) {\n return normalizeInference();\n }\n\n const [normedTraining, mean, variance] = normalizeBatchInTraining(\n input, this.gamma.read(), this.beta.read(), reductionAxes,\n this.epsilon);\n\n // Debias variance.\n const sampleSize =\n math_utils.arrayProd(reductionAxes.map(axis => input.shape[axis]));\n const varianceDebiased = variance.mul(\n getScalar(sampleSize / (sampleSize - (1 + this.epsilon))));\n\n // Perform updates to moving mean and moving variance for training.\n // Porting Note: In PyKeras, these updates to `movingMean` and\n // `movingAverage` are done as a deferred Graph, added to the `Layer`'s\n // `update`s using the `add_update()` method. Here we do it imperatively\n // and encapsulate the updates in a function that is invoked\n // immediately.\n const updateMovingMeanAndVariance = () => {\n this.stepCount++;\n const newMovingMean = tfc.movingAverage(\n this.movingMean.read(), mean, this.momentum, this.stepCount);\n this.movingMean.write(newMovingMean);\n const newMovingVariance = tfc.movingAverage(\n this.movingVariance.read(), varianceDebiased, this.momentum,\n this.stepCount);\n this.movingVariance.write(newMovingVariance);\n };\n updateMovingMeanAndVariance();\n\n return normedTraining;\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n axis: this.axis,\n momentum: this.momentum,\n epsilon: this.epsilon,\n center: this.center,\n scale: this.scale,\n betaInitializer: serializeInitializer(this.betaInitializer),\n gammaInitializer: serializeInitializer(this.gammaInitializer),\n movingMeanInitializer: serializeInitializer(this.movingMeanInitializer),\n movingVarianceInitializer:\n serializeInitializer(this.movingVarianceInitializer),\n betaRegularizer: serializeRegularizer(this.betaRegularizer),\n gammaRegularizer: serializeRegularizer(this.gammaRegularizer),\n betaConstraint: serializeConstraint(this.betaConstraint),\n gammaConstraint: serializeConstraint(this.gammaConstraint)\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(BatchNormalization);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Padding Layers.\n */\n\n// Porting Note: In Python Keras, the padding layers are in convolutional.py,\n// but we decided to put them in a separate file (padding.ts) for clarity.\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport {imageDataFormat} from '../backend/common';\nimport {DataFormat} from '../common';\nimport {InputSpec, Layer, LayerConfig} from '../engine/topology';\nimport {ValueError} from '../errors';\nimport {Kwargs, Shape} from '../types';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\n\n\n/**\n * Pads the middle dimension of a 3D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of 2 integers, how many zeros to add at the start and\n * end of the middle dimension (i.e., dimension 1).\n * @return A padded 3D `tf.Tensor`.\n */\nexport function temporalPadding(x: Tensor, padding?: [number, number]): Tensor {\n return tidy(() => {\n if (x.rank !== 3) {\n throw new ValueError(\n `temporalPadding expects input tensor to be 3-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n\n if (padding == null) {\n padding = [1, 1];\n }\n if (padding.length !== 2) {\n throw new ValueError(\n `temporalPadding expects input padding pattern to be a length-2 ` +\n `array, but received a length-${padding.length} array.`);\n }\n\n const pattern: Array<[number, number]> = [[0, 0], padding, [0, 0]];\n return tfc.pad(x, pattern);\n });\n}\n\n/**\n * Pads the 2nd and 3rd dimensions of a 4D tensor.\n *\n * @param x Input `tf.Tensor` to be padded.\n * @param padding `Array` of two `Array`s, each of which is an `Array` of two\n * integers. The amount of padding at the beginning and end of the 2nd and 3rd\n * dimensions, respectively.\n * @param dataFormat 'channelsLast' (default) or 'channelsFirst'.\n * @return Padded 4D `tf.Tensor`.\n */\nexport function spatial2dPadding(\n x: Tensor, padding?: [[number, number], [number, number]],\n dataFormat?: DataFormat): Tensor {\n return tidy(() => {\n if (x.rank !== 4) {\n throw new ValueError(\n `temporalPadding expects input tensor to be 4-D, but received a ` +\n `${x.rank}-D tensor.`);\n }\n\n if (padding == null) {\n padding = [[1, 1], [1, 1]];\n }\n if (padding.length !== 2 || padding[0].length !== 2 ||\n padding[1].length !== 2) {\n throw new ValueError(\n 'spatial2dPadding expects `padding` to be an Array of two Arrays, ' +\n 'each of which is an Array of two integers.');\n }\n\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (dataFormat !== 'channelsLast' && dataFormat !== 'channelsFirst') {\n throw new ValueError(\n `Unknown data format: ${dataFormat}. ` +\n `Supported data formats are 'channelsLast' and 'channelsFirst.`);\n }\n\n let pattern: Array<[number, number]>;\n if (dataFormat === 'channelsFirst') {\n pattern = [[0, 0], [0, 0], padding[0], padding[1]];\n } else {\n pattern = [[0, 0], padding[0], padding[1], [0, 0]];\n }\n\n return tfc.pad(x, pattern);\n });\n}\n\nexport interface ZeroPadding2DLayerConfig extends LayerConfig {\n /**\n * Integer, or `Array` of 2 integers, or `Array` of 2 `Array`s, each of\n * which is an `Array` of 2 integers.\n * - If integer, the same symmetric padding is applied to width and height.\n * - If Array` of 2 integers, interpreted as two different symmetric values\n * for height and width:\n * `[symmetricHeightPad, symmetricWidthPad]`.\n * - If `Array` of 2 `Array`s, interpreted as:\n * `[[topPad, bottomPad], [leftPad, rightPad]]`.\n */\n padding?: number|[number, number]|[[number, number], [number, number]];\n\n /**\n * One of `'channelsLast'` (default) and `'channelsFirst'`.\n *\n * The ordering of the dimensions in the inputs.\n * `channelsLast` corresponds to inputs with shape\n * `[batch, height, width, channels]` while `channelsFirst`\n * corresponds to inputs with shape\n * `[batch, channels, height, width]`.\n */\n dataFormat?: DataFormat;\n}\n\n/**\n * Zero-padding layer for 2D input (e.g., image).\n *\n * This layer can add rows and columns of zeros\n * at the top, bottom, left and right side of an image tensor.\n *\n * Input shape:\n * 4D tensor with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, rows, cols, channels]`\n * - If `data_format` is `\"channels_first\"`:\n * `[batch, channels, rows, cols]`.\n *\n * Output shape:\n * 4D with shape:\n * - If `dataFormat` is `\"channelsLast\"`:\n * `[batch, paddedRows, paddedCols, channels]`\n * - If `dataFormat` is `\"channelsFirst\"`:\n * `[batch, channels, paddedRows, paddedCols]`.\n */\nexport class ZeroPadding2D extends Layer {\n static className = 'ZeroPadding2D';\n readonly dataFormat: DataFormat;\n readonly padding: [[number, number], [number, number]];\n\n constructor(config?: ZeroPadding2DLayerConfig) {\n if (config == null) {\n config = {};\n }\n super(config);\n\n this.dataFormat =\n config.dataFormat == null ? imageDataFormat() : config.dataFormat;\n // TODO(cais): Maybe refactor the following logic surrounding `padding`\n // into a helper method.\n if (config.padding == null) {\n this.padding = [[1, 1], [1, 1]];\n } else if (typeof config.padding === 'number') {\n this.padding =\n [[config.padding, config.padding], [config.padding, config.padding]];\n } else {\n config.padding = config.padding as [number, number] |\n [[number, number], [number, number]];\n if (config.padding.length !== 2) {\n throw new ValueError(\n `ZeroPadding2D expects padding to be a length-2 array, but ` +\n `received a length-${config.padding.length} array.`);\n }\n\n let heightPadding: [number, number];\n let widthPadding: [number, number];\n if (typeof config.padding[0] === 'number') {\n heightPadding =\n [config.padding[0] as number, config.padding[0] as number];\n widthPadding =\n [config.padding[1] as number, config.padding[1] as number];\n } else {\n config.padding = config.padding as [[number, number], [number, number]];\n\n if (config.padding[0].length !== 2) {\n throw new ValueError(\n `ZeroPadding2D expects height padding to be a length-2 array, ` +\n `but received a length-${config.padding[0].length} array.`);\n }\n heightPadding = config.padding[0] as [number, number];\n\n if (config.padding[1].length !== 2) {\n throw new ValueError(\n `ZeroPadding2D expects width padding to be a length-2 array, ` +\n `but received a length-${config.padding[1].length} array.`);\n }\n widthPadding = config.padding[1] as [number, number];\n }\n this.padding = [heightPadding, widthPadding];\n }\n this.inputSpec = [new InputSpec({ndim: 4})];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n\n let rows: number;\n let cols: number;\n if (this.dataFormat === 'channelsFirst') {\n if (inputShape[2] != null && inputShape[2] >= 0) {\n rows = inputShape[2] + this.padding[0][0] + this.padding[0][1];\n } else {\n rows = null;\n }\n if (inputShape[3] != null && inputShape[3] >= 0) {\n cols = inputShape[3] + this.padding[1][0] + this.padding[1][1];\n } else {\n cols = null;\n }\n return [inputShape[0], inputShape[1], rows, cols];\n } else {\n if (inputShape[1] != null && inputShape[1] >= 0) {\n rows = inputShape[1] + this.padding[0][0] + this.padding[0][1];\n } else {\n rows = null;\n }\n if (inputShape[2] != null && inputShape[2] >= 0) {\n cols = inputShape[2] + this.padding[1][0] + this.padding[1][1];\n } else {\n cols = null;\n }\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(\n () => spatial2dPadding(\n getExactlyOneTensor(inputs), this.padding, this.dataFormat));\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n padding: this.padding,\n dataFormat: this.dataFormat,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(ZeroPadding2D);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Pooling Layers.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, Tensor3D, Tensor4D, tidy} from '@tensorflow/tfjs-core';\n\nimport {imageDataFormat} from '../backend/common';\nimport * as K from '../backend/tfjs_backend';\nimport {checkDataFormat, checkPaddingMode, checkPoolMode, DataFormat, PaddingMode, PoolMode} from '../common';\nimport {InputSpec} from '../engine/topology';\nimport {Layer, LayerConfig} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {Kwargs, Shape} from '../types';\nimport {convOutputLength} from '../utils/conv_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\n\nimport {preprocessConv2DInput} from './convolutional';\n\n\n/**\n * 2D pooling.\n * @param x\n * @param poolSize\n * @param stridesdes strides. Defaults to [1, 1].\n * @param padding padding. Defaults to 'valid'.\n * @param dataFormat data format. Defaults to 'channelsLast'.\n * @param poolMode Mode of pooling. Defaults to 'max'.\n * @returns Result of the 2D pooling.\n */\nexport function pool2d(\n x: Tensor, poolSize: [number, number], strides?: [number, number],\n padding?: PaddingMode, dataFormat?: DataFormat,\n poolMode?: PoolMode): Tensor {\n return tidy(() => {\n checkDataFormat(dataFormat);\n checkPoolMode(poolMode);\n checkPaddingMode(padding);\n if (strides == null) {\n strides = [1, 1];\n }\n if (padding == null) {\n padding = 'valid';\n }\n if (dataFormat == null) {\n dataFormat = imageDataFormat();\n }\n if (poolMode == null) {\n poolMode = 'max';\n }\n\n // TODO(cais): Remove the preprocessing step once deeplearn.js supports\n // dataFormat as an input argument.\n x = preprocessConv2DInput(x, dataFormat); // x is NHWC after preprocessing.\n let y: Tensor;\n const paddingString = (padding === 'same') ? 'same' : 'valid';\n if (poolMode === 'max') {\n // TODO(cais): Rank check?\n y = tfc.maxPool(x as Tensor4D, poolSize, strides, paddingString);\n } else { // 'avg'\n // TODO(cais): Check the dtype and rank of x and give clear error message\n // if those are incorrect.\n y = tfc.avgPool(\n // TODO(cais): Rank check?\n x as Tensor3D | Tensor4D, poolSize, strides, paddingString);\n }\n if (dataFormat === 'channelsFirst') {\n y = tfc.transpose(y, [0, 3, 1, 2]); // NHWC -> NCHW.\n }\n return y;\n });\n}\n\n\nexport interface Pooling1DLayerConfig extends LayerConfig {\n /**\n * Size of the window to pool over, should be an integer.\n */\n poolSize?: number;\n /**\n * Period at which to sample the pooled values.\n *\n * If `null`, defaults to `poolSize`.\n */\n strides?: number;\n /** How to fill in data that's not an integer multiple of poolSize. */\n padding?: PaddingMode;\n}\n\n/**\n * Abstract class for different pooling 1D layers.\n */\nexport abstract class Pooling1D extends Layer {\n protected readonly poolSize: [number];\n protected readonly strides: [number];\n protected readonly padding: PaddingMode;\n\n /**\n *\n * @param config Parameters for the Pooling layer.\n *\n * config.poolSize defaults to 2.\n */\n constructor(config: Pooling1DLayerConfig) {\n if (config.poolSize == null) {\n config.poolSize = 2;\n }\n super(config);\n if (typeof config.poolSize === 'number') {\n this.poolSize = [config.poolSize];\n } else if (\n Array.isArray(config.poolSize) &&\n (config.poolSize as number[]).length === 1 &&\n typeof (config.poolSize as number[])[0] === 'number') {\n this.poolSize = config.poolSize;\n } else {\n throw new ValueError(\n `poolSize for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(config.poolSize)}`);\n }\n if (config.strides == null) {\n this.strides = this.poolSize;\n } else {\n if (typeof config.strides === 'number') {\n this.strides = [config.strides];\n } else if (\n Array.isArray(config.strides) &&\n (config.strides as number[]).length === 1 &&\n typeof (config.strides as number[])[0] === 'number') {\n this.strides = config.strides;\n } else {\n throw new ValueError(\n `strides for 1D convolutional layer must be a number or an ` +\n `Array of a single number, but received ` +\n `${JSON.stringify(config.strides)}`);\n }\n }\n\n this.padding = config.padding == null ? 'valid' : config.padding;\n checkPaddingMode(this.padding);\n this.inputSpec = [new InputSpec({ndim: 3})];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const length = convOutputLength(\n inputShape[1], this.poolSize[0], this.padding, this.strides[0]);\n return [inputShape[0], length, inputShape[2]];\n }\n\n protected abstract poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor;\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n // Add dummy last dimension.\n inputs = K.expandDims(getExactlyOneTensor(inputs), 2);\n const output = this.poolingFunction(\n getExactlyOneTensor(inputs), [this.poolSize[0], 1],\n [this.strides[0], 1], this.padding, 'channelsLast');\n // Remove dummy last dimension.\n return tfc.squeeze(output, [2]);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n\n/**\n * Max pooling operation for temporal data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n */\nexport class MaxPooling1D extends Pooling1D {\n static className = 'MaxPooling1D';\n constructor(config: Pooling1DLayerConfig) {\n super(config);\n }\n\n protected poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\nserialization.registerClass(MaxPooling1D);\n\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape: `[batchSize, inLength, channels]`\n *\n * Output shape: `[batchSize, pooledLength, channels]`\n *\n * `tf.avgPool1d` is an alias.\n */\nexport class AveragePooling1D extends Pooling1D {\n static className = 'AveragePooling1D';\n constructor(config: Pooling1DLayerConfig) {\n super(config);\n }\n\n protected poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\nserialization.registerClass(AveragePooling1D);\n\nexport interface Pooling2DLayerConfig extends LayerConfig {\n /**\n * Factors by which to downscale in each dimension [vertical, horizontal].\n * Expects an integer or an array of 2 integers.\n *\n * For example, `[2, 2]` will halve the input in both spatial dimension.\n * If only one integer is specified, the same window length\n * will be used for both dimensions.\n */\n poolSize?: number|[number, number];\n\n /**\n * The size of the stride in each dimension of the pooling window. Expects\n * an integer or an array of 2 integers. Integer, tuple of 2 integers, or\n * None.\n *\n * If `null`, defaults to `poolSize`.\n */\n strides?: number|[number, number];\n\n /** The padding type to use for the pooling layer. */\n padding?: PaddingMode;\n /** The data format to use for the pooling layer. */\n dataFormat?: DataFormat;\n}\n\n/**\n * Abstract class for different pooling 2D layers.\n */\nexport abstract class Pooling2D extends Layer {\n protected readonly poolSize: [number, number];\n protected readonly strides: [number, number];\n protected readonly padding: PaddingMode;\n protected readonly dataFormat: DataFormat;\n\n constructor(config: Pooling2DLayerConfig) {\n if (config.poolSize == null) {\n config.poolSize = [2, 2];\n }\n super(config);\n this.poolSize = Array.isArray(config.poolSize) ?\n config.poolSize :\n [config.poolSize, config.poolSize];\n if (config.strides == null) {\n this.strides = this.poolSize;\n } else if (Array.isArray(config.strides)) {\n if (config.strides.length !== 2) {\n throw new ValueError(\n `If the strides property of a 2D pooling layer is an Array, ` +\n `it is expected to have a length of 2, but received length ` +\n `${config.strides.length}.`);\n }\n this.strides = config.strides;\n } else {\n // `config.strides` is a number.\n this.strides = [config.strides, config.strides];\n }\n this.padding = config.padding == null ? 'valid' : config.padding;\n this.dataFormat =\n config.dataFormat == null ? 'channelsLast' : config.dataFormat;\n checkDataFormat(this.dataFormat);\n checkPaddingMode(this.padding);\n\n this.inputSpec = [new InputSpec({ndim: 4})];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n let rows =\n this.dataFormat === 'channelsFirst' ? inputShape[2] : inputShape[1];\n let cols =\n this.dataFormat === 'channelsFirst' ? inputShape[3] : inputShape[2];\n rows =\n convOutputLength(rows, this.poolSize[0], this.padding, this.strides[0]);\n cols =\n convOutputLength(cols, this.poolSize[1], this.padding, this.strides[1]);\n if (this.dataFormat === 'channelsFirst') {\n return [inputShape[0], inputShape[1], rows, cols];\n } else {\n return [inputShape[0], rows, cols, inputShape[3]];\n }\n }\n\n protected abstract poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor;\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n this.invokeCallHook(inputs, kwargs);\n return this.poolingFunction(\n getExactlyOneTensor(inputs), this.poolSize, this.strides,\n this.padding, this.dataFormat);\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {\n poolSize: this.poolSize,\n padding: this.padding,\n strides: this.strides,\n dataFormat: this.dataFormat\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n\n/**\n * Max pooling operation for spatial data.\n *\n * Input shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat=CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat=CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n */\nexport class MaxPooling2D extends Pooling2D {\n static className = 'MaxPooling2D';\n constructor(config: Pooling2DLayerConfig) {\n super(config);\n }\n\n protected poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'max');\n }\n}\nserialization.registerClass(MaxPooling2D);\n\n/**\n * Average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, rows, cols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, rows, cols]`\n *\n * Output shape\n * - If `dataFormat === CHANNEL_LAST`:\n * 4D tensor with shape:\n * `[batchSize, pooleRows, pooledCols, channels]`\n * - If `dataFormat === CHANNEL_FIRST`:\n * 4D tensor with shape:\n * `[batchSize, channels, pooleRows, pooledCols]`\n *\n * `tf.avgPool2d` is an alias.\n */\nexport class AveragePooling2D extends Pooling2D {\n static className = 'AveragePooling2D';\n constructor(config: Pooling2DLayerConfig) {\n super(config);\n }\n\n protected poolingFunction(\n inputs: Tensor, poolSize: [number, number], strides: [number, number],\n padding: PaddingMode, dataFormat: DataFormat): Tensor {\n checkDataFormat(dataFormat);\n checkPaddingMode(padding);\n return pool2d(inputs, poolSize, strides, padding, dataFormat, 'avg');\n }\n}\nserialization.registerClass(AveragePooling2D);\n\n/**\n * Abstract class for different global pooling 1D layers.\n */\nexport abstract class GlobalPooling1D extends Layer {\n constructor(config: LayerConfig) {\n super(config);\n this.inputSpec = [new InputSpec({ndim: 3})];\n }\n\n computeOutputShape(inputShape: Shape): Shape {\n return [inputShape[0], inputShape[2]];\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n throw new NotImplementedError();\n }\n}\n\n/**\n * Global average pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n */\nexport class GlobalAveragePooling1D extends GlobalPooling1D {\n static className = 'GlobalAveragePooling1D';\n constructor(config: LayerConfig) {\n super(config);\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.mean(input, 1);\n });\n }\n}\nserialization.registerClass(GlobalAveragePooling1D);\n\n/**\n * Global max pooling operation for temporal data.\n *\n * Input Shape: 3D tensor with shape: `[batchSize, steps, features]`.\n *\n * Output Shape:2D tensor with shape: `[batchSize, features]`.\n */\nexport class GlobalMaxPooling1D extends GlobalPooling1D {\n static className = 'GlobalMaxPooling1D';\n constructor(config: LayerConfig) {\n super(config);\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n return tfc.max(input, 1);\n });\n }\n}\nserialization.registerClass(GlobalMaxPooling1D);\n\nexport interface GlobalPooling2DLayerConfig extends LayerConfig {\n /**\n * One of `CHANNEL_LAST` (default) or `CHANNEL_FIRST`.\n *\n * The ordering of the dimensions in the inputs. `CHANNEL_LAST` corresponds\n * to inputs with shape `[batch, height, width, channels[` while\n * `CHANNEL_FIRST` corresponds to inputs with shape\n * `[batch, channels, height, width]`.\n */\n dataFormat?: DataFormat;\n}\n\n/**\n * Abstract class for different global pooling 2D layers.\n */\nexport abstract class GlobalPooling2D extends Layer {\n protected dataFormat: DataFormat;\n constructor(config: GlobalPooling2DLayerConfig) {\n super(config);\n this.dataFormat =\n config.dataFormat == null ? 'channelsLast' : config.dataFormat;\n checkDataFormat(this.dataFormat);\n this.inputSpec = [new InputSpec({ndim: 4})];\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = inputShape as Shape;\n if (this.dataFormat === 'channelsLast') {\n return [inputShape[0], inputShape[3]];\n } else {\n return [inputShape[0], inputShape[1]];\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n throw new NotImplementedError();\n }\n\n getConfig(): serialization.ConfigDict {\n const config = {dataFormat: this.dataFormat};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\n\n/**\n * Global average pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n */\nexport class GlobalAveragePooling2D extends GlobalPooling2D {\n static className = 'GlobalAveragePooling2D';\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.mean(input, [1, 2]);\n } else {\n return tfc.mean(input, [2, 3]);\n }\n });\n }\n}\nserialization.registerClass(GlobalAveragePooling2D);\n\n/**\n * Global max pooling operation for spatial data.\n *\n * Input shape:\n * - If `dataFormat` is `CHANNEL_LAST`:\n * 4D tensor with shape: `[batchSize, rows, cols, channels]`.\n * - If `dataFormat` is `CHANNEL_FIRST`:\n * 4D tensor with shape: `[batchSize, channels, rows, cols]`.\n *\n * Output shape:\n * 2D tensor with shape: `[batchSize, channels]`.\n */\nexport class GlobalMaxPooling2D extends GlobalPooling2D {\n static className = 'GlobalMaxPooling2D';\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const input = getExactlyOneTensor(inputs);\n if (this.dataFormat === 'channelsLast') {\n return tfc.max(input, [1, 2]);\n } else {\n return tfc.max(input, [2, 3]);\n }\n });\n }\n}\nserialization.registerClass(GlobalMaxPooling2D);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * TensorFlow.js Layers: Recurrent Neural Network Layers.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {DataType, serialization, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\nimport {Activation, ActivationIdentifier, getActivation, serializeActivation} from '../activations';\nimport {getScalar} from '../backend/state';\nimport * as K from '../backend/tfjs_backend';\nimport {Constraint, ConstraintIdentifier, getConstraint, serializeConstraint} from '../constraints';\nimport {InputSpec, SymbolicTensor} from '../engine/topology';\nimport {Layer, LayerConfig} from '../engine/topology';\nimport {AttributeError, NotImplementedError, ValueError} from '../errors';\nimport {getInitializer, Initializer, InitializerIdentifier, Ones, serializeInitializer} from '../initializers';\nimport {getRegularizer, Regularizer, RegularizerIdentifier, serializeRegularizer} from '../regularizers';\nimport {Kwargs, RnnStepFunction, Shape} from '../types';\nimport * as math_utils from '../utils/math_utils';\nimport {getExactlyOneShape, getExactlyOneTensor, isArrayOfShapes} from '../utils/types_utils';\nimport {batchGetValue, batchSetValue, LayerVariable} from '../variables';\n\nimport {deserialize} from './serialization';\n\n\n/**\n * Standardize `apply()` args to a single list of tensor inputs.\n *\n * When running a model loaded from file, the input tensors `initialState` and\n * `constants` are passed to `RNN.apply()` as part of `inputs` instead of the\n * dedicated kwargs fields. `inputs` consists of\n * `[inputs, initialState0, initialState1, ..., constant0, constant1]` in this\n * case.\n * This method makes sure that arguments are\n * separated and that `initialState` and `constants` are `Array`s of tensors\n * (or None).\n *\n * @param inputs Tensor or `Array` of tensors.\n * @param initialState Tensor or `Array` of tensors or `null`/`undefined`.\n * @param constants Tensor or `Array` of tensors or `null`/`undefined`.\n * @returns An object consisting of\n * inputs: A tensor.\n * initialState: `Array` of tensors or `null`.\n * constants: `Array` of tensors or `null`.\n * @throws ValueError, if `inputs` is an `Array` but either `initialState` or\n * `constants` is provided.\n */\nexport function standardizeArgs(\n inputs: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n initialState: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n constants: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n numConstants?: number): {\n inputs: Tensor|SymbolicTensor,\n initialState: Tensor[]|SymbolicTensor[],\n constants: Tensor[]|SymbolicTensor[]\n} {\n if (Array.isArray(inputs)) {\n if (initialState != null || constants != null) {\n throw new ValueError(\n 'When inputs is an array, neither initialState or constants ' +\n 'should be provided');\n }\n if (numConstants != null) {\n constants = inputs.slice(inputs.length - numConstants, inputs.length);\n inputs = inputs.slice(0, inputs.length - numConstants);\n }\n if (inputs.length > 1) {\n initialState = inputs.slice(1, inputs.length);\n }\n inputs = inputs[0];\n }\n\n function toListOrNull(x: Tensor|Tensor[]|SymbolicTensor|\n SymbolicTensor[]): Tensor[]|SymbolicTensor[] {\n if (x == null || Array.isArray(x)) {\n return x as Tensor[] | SymbolicTensor[];\n } else {\n return [x] as Tensor[] | SymbolicTensor[];\n }\n }\n\n initialState = toListOrNull(initialState);\n constants = toListOrNull(constants);\n\n return {inputs, initialState, constants};\n}\n\n/**\n * Iterates over the time dimension of a tensor.\n *\n * @param stepFunction RNN step function.\n * Parameters:\n * inputs: tensor with shape `[samples, ...]` (no time dimension),\n * representing input for the batch of samples at a certain time step.\n * states: an Array of tensors.\n * Returns:\n * outputs: tensor with shape `[samples, outputDim]` (no time dimension).\n * newStates: list of tensors, same length and shapes as `states`. The first\n * state in the list must be the output tensor at the previous timestep.\n * @param inputs Tensor of temporal data of shape `[samples, time, ...]` (at\n * least 3D).\n * @param initialStates Tensor with shape `[samples, outputDim]` (no time\n * dimension), containing the initial values of the states used in the step\n * function.\n * @param goBackwards If `true`, do the iteration over the time dimension in\n * reverse order and return the reversed sequence.\n * @param mask Binary tensor with shape `[sample, time, 1]`, with a zero for\n * every element that is masked.\n * @param constants An Array of constant values passed at each step.\n * @param unroll Whether to unroll the RNN or to use a symbolic loop. *Not*\n * applicable to this imperative deeplearn.js backend. Its value is ignored.\n * @param needPerStepOutputs Whether the per-step outputs are to be\n * concatenated into a single tensor and returned (as the second return\n * value). Default: `false`. This arg is included so that the relatively\n * expensive concatenation of the stepwise outputs can be omitted unless\n * the stepwise outputs need to be kept (e.g., for an LSTM layer of which\n * `returnSequence` is `true`.)\n * @returns An Array: `[lastOutput, outputs, newStates]`.\n * lastOutput: the lastest output of the RNN, of shape `[samples, ...]`.\n * outputs: tensor with shape `[samples, time, ...]` where each entry\n * `output[s, t]` is the output of the step function at time `t` for sample\n * `s`. This return value is provided if and only if the\n * `needPerStepOutputs` is set as `true`. If it is set as `false`, this\n * return value will be `undefined`.\n * newStates: Array of tensors, latest states returned by the step function,\n * of shape `(samples, ...)`.\n * @throws ValueError If input dimension is less than 3.\n *\n * TODO(nielsene): This needs to be tidy-ed.\n */\nexport function rnn(\n stepFunction: RnnStepFunction, inputs: Tensor, initialStates: Tensor[],\n goBackwards = false, mask?: Tensor, constants?: Tensor[], unroll = false,\n needPerStepOutputs = false): [Tensor, Tensor, Tensor[]] {\n const ndim = inputs.shape.length;\n if (ndim < 3) {\n throw new ValueError(`Input should be at least 3D, but is ${ndim}D.`);\n }\n\n // Transpose to time-major, i.e., from [batch, time, ...] to [time, batch,\n // ...].\n const axes = [1, 0].concat(math_utils.range(2, ndim));\n inputs = tfc.transpose(inputs, axes);\n\n if (mask != null) {\n throw new NotImplementedError(\n 'The rnn() function of the deeplearn.js backend does not support ' +\n 'masking yet.');\n }\n\n if (constants != null) {\n throw new NotImplementedError(\n 'The rnn() functoin of the deeplearn.js backend does not support ' +\n 'constants yet.');\n }\n\n // Porting Note: the unroll option is ignored by the imperative backend.\n if (unroll) {\n console.warn(\n 'Backend rnn(): the unroll = true option is not applicable to the ' +\n 'imperative deeplearn.js backend.');\n }\n\n if (goBackwards) {\n inputs = tfc.reverse(inputs, 0);\n }\n\n // Porting Note: PyKeras with TensorFlow backend uses a symbolic loop\n // (tf.while_loop). But for the imperative deeplearn.js backend, we just\n // use the usual TypeScript control flow to iterate over the time steps in\n // the inputs.\n // Porting Note: PyKeras patches a \"_use_learning_phase\" attribute to\n // outputs.\n // This is not idiomatic in TypeScript. The info regarding whether we are\n // in a learning (i.e., training) phase for RNN is passed in a different\n // way.\n // TODO(cais): Determine in what exact way the learning phase information\n // will be passed.\n\n let outputs: Tensor;\n let lastOutput: Tensor;\n let states = initialStates;\n const timeSteps = inputs.shape[0];\n for (let t = 0; t < timeSteps; ++t) {\n let currentInput = K.sliceAlongFirstAxis(inputs, t, 1);\n currentInput = currentInput.reshape(currentInput.shape.slice(1));\n const stepOutputs = tfc.tidy(() => stepFunction(currentInput, states));\n lastOutput = stepOutputs[0];\n if (needPerStepOutputs) {\n if (t === 0) {\n outputs = lastOutput.expandDims(1);\n } else {\n const newOutputs = tfc.concat([outputs, lastOutput.expandDims(1)], 1);\n outputs.dispose();\n outputs = newOutputs;\n }\n }\n // TODO(soergel): Call K.concatenate() to perform only one concatenation\n // at the end, once the backend function is available.\n states = stepOutputs[1];\n }\n return [lastOutput, outputs, states];\n}\n\n\nexport interface BaseRNNLayerConfig extends LayerConfig {\n /**\n * A RNN cell instance. A RNN cell is a class that has:\n * - a `call()` method, which takes `[Tensor, Tensor]` as the\n * first input argument. The first item is the input at time t, and\n * second item is the cell state at time t.\n * The `call()` method returns `[outputAtT, statesAtTPlus1]`.\n * The `call()` method of the cell can also take the argument `constants`,\n * see section \"Note on passing external constants\" below.\n * Porting Node: PyKeras overrides the `call()` signature of RNN cells,\n * which are Layer subtypes, to accept two arguments. tfjs-layers does\n * not do such overriding. Instead we preseve the `call()` signature,\n * which due to its `Tensor|Tensor[]` argument and return value, is\n * flexible enough to handle the inputs and states.\n * - a `stateSize` attribute. This can be a single integer (single state)\n * in which case it is the size of the recurrent state (which should be\n * the same as the size of the cell output). This can also be an Array of\n * integers (one size per state). In this case, the first entry\n * (`stateSize[0]`) should be the same as the size of the cell output.\n * It is also possible for `cell` to be a list of RNN cell instances, in which\n * case the cells get stacked on after the other in the RNN, implementing an\n * efficient stacked RNN.\n */\n cell?: RNNCell|RNNCell[];\n\n /**\n * Whether to return the last output in the output sequence, or the full\n * sequence.\n */\n returnSequences?: boolean;\n\n /**\n * Whether to return the last state in addition to the output.\n */\n returnState?: boolean;\n\n /**\n * If `true`, process the input sequence backwards and return the reversed\n * sequence (default: `false`).\n */\n goBackwards?: boolean;\n\n /**\n * If `true`, the last state for each sample at index i in a batch will be\n * used as initial state of the sample of index i in the following batch\n * (default: `false`).\n *\n * You can set RNN layers to be \"stateful\", which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable \"statefulness\":\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * - if sequential model:\n * `batchInputShape: [...]` to the first layer in your model.\n * - else for functional model with 1 or more Input layers:\n * `batchShape: [...]` to all the first layers in your model.\n * This is the expected shape of your inputs\n * *including the batch size*.\n * It should be a tuple of integers, e.g., `[32, 10, 100]`.\n * - specify `shuffle: false` when calling `Model.fit()`.\n *\n * To reset the state of your model, call `resetStates()` on either the\n * specific layer or on the entire model.\n */\n stateful?: boolean;\n // TODO(cais): Explore whether we can warn users when they fail to set\n // `shuffle: false` when training a model consisting of stateful RNNs\n // and any stateful Layers in general.\n\n /**\n * If `true`, the network will be unrolled, else a symbolic loop will be\n * used. Unrolling can speed-up a RNN, although it tends to be more memory-\n * intensive. Unrolling is only suitable for short sequences (default:\n * `false`).\n * Porting Note: tfjs-layers has an imperative backend. RNNs are executed with\n * normal TypeScript control flow. Hence this property is inapplicable and\n * ignored in tfjs-layers.\n */\n unroll?: boolean;\n\n /**\n * Dimensionality of the input (integer).\n * This option (or alternatively, the option `inputShape`) is required when\n * this layer is used as the first layer in a model.\n */\n inputDim?: number;\n\n /**\n * Length of the input sequences, to be specified when it is constant.\n * This argument is required if you are going to connect `Flatten` then\n * `Dense` layers upstream (without it, the shape of the dense outputs cannot\n * be computed). Note that if the recurrent layer is not the first layer in\n * your model, you would need to specify the input length at the level of the\n * first layer (e.g., via the `inputShape` option).\n */\n inputLength?: number;\n}\n\n/**\n * RNNLayerConfig is identical to BaseRNNLayerConfig, except it makes the\n * `cell` property required. This interface is to be used with constructors\n * of concrete RNN layer sbutypes.\n */\nexport interface RNNLayerConfig extends BaseRNNLayerConfig {\n cell: RNNCell|RNNCell[];\n}\n\n/**\n * Base class for recurrent layers.\n *\n * Input shape:\n * 3D tensor with shape `[batchSize, timeSteps, inputDim]`.\n *\n * Output shape:\n * - if `returnState`, an Array of tensors (i.e., `tf.Tensor`s). The first\n * tensor is the output. The remaining tensors are the states at the\n * last time step, each with shape `[batchSize, units]`.\n * - if `returnSequences`, the output will have shape\n * `[batchSize, timeSteps, units]`.\n * - else, the output will have shape `[batchSize, units]`.\n *\n * Masking:\n * This layer supports masking for input data with a variable number\n * of timesteps. To introduce masks to your data,\n * use an embedding layer with the `mask_zero` parameter\n * set to `True`.\n *\n * Notes on using statefulness in RNNs:\n * You can set RNN layers to be 'stateful', which means that the states\n * computed for the samples in one batch will be reused as initial states\n * for the samples in the next batch. This assumes a one-to-one mapping\n * between samples in different successive batches.\n *\n * To enable statefulness:\n * - specify `stateful: true` in the layer constructor.\n * - specify a fixed batch size for your model, by passing\n * if sequential model:\n * `batchInputShape=[...]` to the first layer in your model.\n * else for functional model with 1 or more Input layers:\n * `batchShape=[...]` to all the first layers in your model.\n * This is the expected shape of your inputs *including the batch size*.\n * It should be a tuple of integers, e.g. `(32, 10, 100)`.\n * - specify `shuffle=False` when calling fit().\n *\n * To reset the states of your model, call `.resetStates()` on either\n * a specific layer, or on your entire model.\n *\n * Note on specifying the initial state of RNNs\n * You can specify the initial state of RNN layers symbolically by\n * calling them with the option `initialState`. The value of\n * `initialState` should be a tensor or list of tensors representing\n * the initial state of the RNN layer.\n *\n * You can specify the initial state of RNN layers numerically by\n * calling `resetStates` with the keyword argument `states`. The value of\n * `states` should be a numpy array or list of numpy arrays representing\n * the initial state of the RNN layer.\n *\n * Note on passing external constants to RNNs\n * You can pass \"external\" constants to the cell using the `constants`\n * keyword argument of `RNN.call` method. This requires that the `cell.call`\n * method accepts the same keyword argument `constants`. Such constants\n * can be used to conditon the cell transformation on additional static inputs\n * (not changing over time), a.k.a an attention mechanism.\n */\nexport class RNN extends Layer {\n static className = 'RNN';\n public readonly cell: RNNCell;\n public readonly returnSequences: boolean;\n public readonly returnState: boolean;\n public readonly goBackwards: boolean;\n public readonly unroll: boolean;\n\n public stateSpec: InputSpec[];\n public states: Tensor[];\n\n // NOTE(cais): For stateful RNNs, the old states cannot be disposed right\n // away when new states are set, because the old states may need to be used\n // later for backpropagation through time (BPTT) and other purposes. So we\n // keep them here for final disposal when the state is reset completely\n // (i.e., through no-arg call to `resetStates()`).\n private keptStates: Tensor[][];\n\n private numConstants: number;\n\n constructor(config: RNNLayerConfig) {\n super(config);\n let cell: RNNCell;\n if (config.cell == null) {\n throw new ValueError(\n 'cell property is missing for the constructor of RNN.');\n } else if (Array.isArray(config.cell)) {\n cell = new StackedRNNCells({cells: config.cell});\n } else {\n cell = config.cell;\n }\n if ((cell as RNNCell).stateSize == null) {\n throw new ValueError(\n 'The RNN cell should have an attribute `stateSize` (tuple of ' +\n 'integers, one integer per RNN state).');\n }\n this.cell = cell;\n this.returnSequences =\n config.returnSequences == null ? false : config.returnSequences;\n this.returnState = config.returnState == null ? false : config.returnState;\n this.goBackwards = config.goBackwards == null ? false : config.goBackwards;\n this._stateful = config.stateful == null ? false : config.stateful;\n this.unroll = config.unroll == null ? false : config.unroll;\n\n this.supportsMasking = true;\n this.inputSpec = [new InputSpec({ndim: 3})];\n this.stateSpec = null;\n this.states = null;\n // TODO(cais): Add constantsSpec and numConstants.\n this.numConstants = null;\n // TODO(cais): Look into the use of initial_state in the kwargs of the\n // constructor.\n\n this.keptStates = [];\n }\n\n // Porting Note: This is the equivalent of `RNN.states` property getter in\n // PyKeras.\n getStates(): Tensor[] {\n if (this.states == null) {\n const numStates =\n Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n return math_utils.range(0, numStates).map(x => null);\n } else {\n return this.states;\n }\n }\n\n // Porting Note: This is the equivalent of the `RNN.states` property setter in\n // PyKeras.\n setStates(states: Tensor[]): void {\n this.states = states;\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n if (isArrayOfShapes(inputShape)) {\n inputShape = (inputShape as Shape[])[0];\n }\n inputShape = inputShape as Shape;\n\n // TODO(cais): Remove the casting once stacked RNN cells become supported.\n let stateSize = this.cell.stateSize;\n if (!Array.isArray(stateSize)) {\n stateSize = [stateSize];\n }\n const outputDim = stateSize[0];\n let outputShape: Shape|Shape[];\n if (this.returnSequences) {\n outputShape = [inputShape[0], inputShape[1], outputDim];\n } else {\n outputShape = [inputShape[0], outputDim];\n }\n\n if (this.returnState) {\n const stateShape: Shape[] = [];\n for (const dim of stateSize) {\n stateShape.push([inputShape[0], dim]);\n }\n return [outputShape].concat(stateShape);\n } else {\n return outputShape;\n }\n }\n\n computeMask(inputs: Tensor|Tensor[], mask?: Tensor|Tensor[]): Tensor {\n throw new NotImplementedError(\n 'computeMask has not been implemented for RNN yet');\n }\n\n public build(inputShape: Shape|Shape[]): void {\n // Note inputShape will be an Array of Shapes of initial states and\n // constants if these are passed in apply().\n const constantShape: Shape[] = null;\n if (this.numConstants != null) {\n throw new NotImplementedError(\n 'Constants support is not implemented in RNN yet.');\n }\n\n if (isArrayOfShapes(inputShape)) {\n inputShape = (inputShape as Shape[])[0];\n }\n inputShape = inputShape as Shape;\n\n const batchSize: number = this.stateful ? inputShape[0] : null;\n const inputDim = inputShape[inputShape.length - 1];\n this.inputSpec[0] = new InputSpec({shape: [batchSize, null, inputDim]});\n\n // Allow cell (if RNNCell Layer) to build before we set or validate\n // stateSpec.\n const stepInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (constantShape != null) {\n throw new NotImplementedError(\n 'Constants support is not implemented in RNN yet.');\n } else {\n this.cell.build(stepInputShape);\n }\n\n // Set or validate stateSpec.\n let stateSize: number[];\n if (Array.isArray((this.cell as RNNCell).stateSize)) {\n stateSize = this.cell.stateSize as number[];\n } else {\n stateSize = [this.cell.stateSize as number];\n }\n\n if (this.stateSpec != null) {\n if (!util.arraysEqual(\n this.stateSpec.map(spec => spec.shape[spec.shape.length - 1]),\n stateSize)) {\n throw new ValueError(\n `An initialState was passed that is not compatible with ` +\n `cell.stateSize. Received stateSpec=${this.stateSpec}; ` +\n `However cell.stateSize is ${this.cell.stateSize}`);\n }\n } else {\n this.stateSpec =\n stateSize.map(dim => new InputSpec({shape: [null, dim]}));\n }\n if (this.stateful) {\n this.resetStates();\n }\n }\n\n /**\n * Reset the state tensors of the RNN.\n *\n * If the `states` argument is `undefined` or `null`, will set the\n * state tensor(s) of the RNN to all-zero tensors of the appropriate\n * shape(s).\n *\n * If `states` is provided, will set the state tensors of the RNN to its\n * value.\n *\n * @param states Optional externally-provided initial states.\n * @param training Whether this call is done during training. For stateful\n * RNNs, this affects whether the old states are kept or discarded. In\n * particular, if `training` is `true`, the old states will be kept so\n * that subsequent backpropgataion through time (BPTT) may work properly.\n * Else, the old states will be discarded.\n */\n resetStates(states?: Tensor|Tensor[], training = false): void {\n tidy(() => {\n if (!this.stateful) {\n throw new AttributeError(\n 'Cannot call resetStates() on an RNN Layer that is not stateful.');\n }\n const batchSize = this.inputSpec[0].shape[0];\n if (batchSize == null) {\n throw new ValueError(\n 'If an RNN is stateful, it needs to know its batch size. Specify ' +\n 'the batch size of your input tensors: \\n' +\n '- If using a Sequential model, specify the batch size by ' +\n 'passing a `batchInputShape` option to your first layer.\\n' +\n '- If using the functional API, specify the batch size by ' +\n 'passing a `batchShape` option to your Input layer.');\n }\n // Initialize state if null.\n if (this.states == null) {\n if (Array.isArray(this.cell.stateSize)) {\n this.states =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n } else {\n this.states = [tfc.zeros([batchSize, this.cell.stateSize])];\n }\n } else if (states == null) {\n // Dispose old state tensors.\n tfc.dispose(this.states);\n // For stateful RNNs, fully dispose kept old states.\n if (this.keptStates != null) {\n tfc.dispose(this.keptStates);\n this.keptStates = [];\n }\n\n if (Array.isArray(this.cell.stateSize)) {\n this.states =\n this.cell.stateSize.map(dim => tfc.zeros([batchSize, dim]));\n } else {\n this.states[0] = tfc.zeros([batchSize, this.cell.stateSize]);\n }\n } else {\n if (!Array.isArray(states)) {\n states = [states];\n }\n if (states.length !== this.states.length) {\n throw new ValueError(\n `Layer ${this.name} expects ${this.states.length} state(s), ` +\n `but it received ${states.length} state value(s). Input ` +\n `received: ${states}`);\n }\n\n if (training === true) {\n // Store old state tensors for complete disposal later, i.e., during\n // the next no-arg call to this method. We do not dispose the old\n // states immediately because that BPTT (among other things) require\n // them.\n this.keptStates.push(this.states.slice());\n } else {\n tfc.dispose(this.states);\n }\n\n for (let index = 0; index < this.states.length; ++index) {\n const value = states[index];\n const dim = Array.isArray(this.cell.stateSize) ?\n this.cell.stateSize[index] :\n this.cell.stateSize;\n const expectedShape = [batchSize, dim];\n if (!util.arraysEqual(value.shape, expectedShape)) {\n throw new ValueError(\n `State ${index} is incompatible with layer ${this.name}: ` +\n `expected shape=${expectedShape}, received shape=${\n value.shape}`);\n }\n this.states[index] = value;\n }\n }\n this.states.forEach(state => tfc.keep(state));\n });\n }\n\n apply(\n inputs: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n kwargs?: Kwargs): Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[] {\n // TODO(cais): Figure out whether initialState is in kwargs or inputs.\n let initialState: Tensor[]|SymbolicTensor[] =\n kwargs == null ? null : kwargs['initialState'];\n let constants: Tensor[]|SymbolicTensor[] =\n kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n\n const standardized =\n standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs;\n initialState = standardized.initialState;\n constants = standardized.constants;\n\n // If any of `initial_state` or `constants` are specified and are\n // `tf.SymbolicTensor`s, then add them to the inputs and temporarily modify\n // the input_spec to include them.\n\n let additionalInputs: Array<Tensor|SymbolicTensor> = [];\n let additionalSpecs: InputSpec[] = [];\n if (initialState != null) {\n kwargs['initialState'] = initialState;\n additionalInputs = additionalInputs.concat(initialState);\n this.stateSpec = [];\n for (const state of initialState) {\n this.stateSpec.push(new InputSpec({shape: state.shape}));\n }\n // TODO(cais): Use the following instead.\n // this.stateSpec = initialState.map(state => new InputSpec({shape:\n // state.shape}));\n additionalSpecs = additionalSpecs.concat(this.stateSpec);\n }\n if (constants != null) {\n kwargs['constants'] = constants;\n additionalInputs = additionalInputs.concat(constants);\n // TODO(cais): Add this.constantsSpec.\n this.numConstants = constants.length;\n }\n\n const isTensor = additionalInputs[0] instanceof SymbolicTensor;\n if (isTensor) {\n // Compute full input spec, including state and constants.\n const fullInput =\n [inputs].concat(additionalInputs) as Tensor[] | SymbolicTensor[];\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call with temporarily replaced inputSpec.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output = super.apply(fullInput, kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n } else {\n return super.apply(inputs, kwargs);\n }\n }\n\n // tslint:disable-next-line:no-any\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n // Input shape: `[samples, time (padded with zeros), input_dim]`.\n // Note that the .build() method of subclasses **must** define\n // this.inputSpec and this.stateSpec owith complete input shapes.\n return tidy(() => {\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n let initialState: Tensor[] =\n kwargs == null ? null : kwargs['initialState'];\n\n inputs = getExactlyOneTensor(inputs);\n if (initialState == null) {\n if (this.stateful) {\n initialState = this.states;\n } else {\n initialState = this.getInitialState(inputs);\n }\n }\n\n if (mask != null) {\n throw new NotImplementedError('Masking is not implemented for RNN yet');\n }\n\n const numStates =\n Array.isArray(this.cell.stateSize) ? this.cell.stateSize.length : 1;\n if (initialState.length !== numStates) {\n throw new ValueError(\n `RNN Layer has ${numStates} state(s) but was passed ` +\n `${initialState.length} initial state(s).`);\n }\n if (this.unroll) {\n console.warn(\n 'Ignoring unroll = true for RNN layer, due to imperative backend.');\n }\n\n const cellCallKwargs: Kwargs = {training};\n\n // TODO(cais): Add support for constants.\n const step = (inputs: Tensor, states: Tensor[]) => {\n // `inputs` and `states` are concatenated to form a single `Array` of\n // `tf.Tensor`s as the input to `cell.call()`.\n const outputs =\n this.cell.call([inputs].concat(states), cellCallKwargs) as Tensor[];\n // Marshall the return value into output and new states.\n return [outputs[0], outputs.slice(1)] as [Tensor, Tensor[]];\n };\n\n // TODO(cais): Add support for constants.\n // TODO(cais): Add support for masks.\n\n const rnnOutputs =\n rnn(step, inputs, initialState, this.goBackwards, null, null,\n this.unroll, this.returnSequences);\n const lastOutput = rnnOutputs[0];\n const outputs = rnnOutputs[1];\n const states = rnnOutputs[2];\n\n if (this.stateful) {\n this.resetStates(states, training);\n }\n\n const output = this.returnSequences ? outputs : lastOutput;\n\n // TODO(cais): Porperty set learning phase flag.\n\n if (this.returnState) {\n return [output].concat(states);\n } else {\n return output;\n }\n });\n }\n\n getInitialState(inputs: Tensor): Tensor[] {\n return tidy(() => {\n // Build an all-zero tensor of shape [samples, outputDim].\n // [Samples, timeSteps, inputDim].\n let initialState = tfc.zeros(inputs.shape);\n // [Samples].\n initialState = tfc.sum(initialState, [1, 2]);\n initialState = K.expandDims(initialState); // [Samples, 1].\n\n if (Array.isArray(this.cell.stateSize)) {\n return this.cell.stateSize.map(\n dim => dim > 1 ? K.tile(initialState, [1, dim]) : initialState);\n } else {\n return this.cell.stateSize > 1 ?\n [K.tile(initialState, [1, this.cell.stateSize])] :\n [initialState];\n }\n });\n }\n\n get trainableWeights(): LayerVariable[] {\n if (!this.trainable) {\n return [];\n }\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n return this.cell.trainableWeights;\n }\n\n get nonTrainableWeights(): LayerVariable[] {\n // Porting Note: In TypeScript, `this` is always an instance of `Layer`.\n if (!this.trainable) {\n return this.cell.weights;\n }\n return this.cell.nonTrainableWeights;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n returnSequences: this.returnSequences,\n returnState: this.returnState,\n goBackwards: this.goBackwards,\n stateful: this.stateful,\n unroll: this.unroll,\n };\n if (this.numConstants != null) {\n config.numConstants = this.numConstants;\n }\n const cellConfig = this.cell.getConfig();\n config.cell = {\n className: this.cell.getClassName(),\n config: cellConfig,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(RNN);\n\n/**\n * An RNNCell layer.\n */\n// Porting Note: This is a common parent class for RNN cells. There is no\n// equivalent of this in PyKeras. Having a common parent class forgoes the\n// need for `has_attr(cell, ...)` checks or its TypeScript equivalent.\n/** @doc {heading: 'Layers', subheading: 'Classes'} */\nexport abstract class RNNCell extends Layer {\n /**\n * Size(s) of the states.\n * For RNN cells with only a single state, this is a single integer.\n */\n public stateSize: number|number[];\n public dropoutMask: Tensor|Tensor[];\n public recurrentDropoutMask: Tensor|Tensor[];\n}\n\nexport interface SimpleRNNCellLayerConfig extends LayerConfig {\n /**\n * units: Positive integer, dimensionality of the output space.\n */\n units: number;\n\n /**\n * Activation function to use.\n * Default: hyperbolic tangent ('tanh').\n * If you pass `null`, 'linear' activation will be applied.\n */\n activation?: ActivationIdentifier;\n\n /**\n * Whether the layer uses a bias vector.\n */\n useBias?: boolean;\n\n /**\n * Initializer for the `kernel` weights matrix, used for the linear\n * transformation of the inputs.\n */\n kernelInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the `recurrentKernel` weights matrix, used for\n * linear transformation of the recurrent state.\n */\n recurrentInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the bias vector.\n */\n biasInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Regularizer function applied to the `kernel` weights matrix.\n */\n kernelRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the `recurrent_kernel` weights matrix.\n */\n recurrentRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the bias vector.\n */\n biasRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Constraint function applied to the `kernel` weights matrix.\n */\n kernelConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint function applied to the `recurrentKernel` weights matrix.\n */\n recurrentConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraintfunction applied to the bias vector.\n */\n biasConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Float number between 0 and 1. Fraction of the units to drop for the linear\n * transformation of the inputs.\n */\n dropout?: number;\n\n /**\n * Float number between 0 and 1. Fraction of the units to drop for the linear\n * transformation of the recurrent state.\n */\n recurrentDropout?: number;\n}\n\n/**\n * Cell class for `SimpleRNN`.\n *\n * `SimpleRNNCell` is distinct from the `RNN` subclass `SimpleRNN` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `SimpleRNN` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.simpleRNNCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `SimpleRNNCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.simpleRNNCell({units: 4}),\n * tf.layers.simpleRNNCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `SimpleRNNCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `SimpleRNNCell`, use the\n * `tf.layers.simpleRNN`.\n */\nexport class SimpleRNNCell extends RNNCell {\n static className = 'SimpleRNNCell';\n readonly units: number;\n readonly activation: Activation;\n readonly useBias: boolean;\n\n readonly kernelInitializer: Initializer;\n readonly recurrentInitializer: Initializer;\n readonly biasInitializer: Initializer;\n\n readonly kernelConstraint: Constraint;\n readonly recurrentConstraint: Constraint;\n readonly biasConstraint: Constraint;\n\n readonly kernelRegularizer: Regularizer;\n readonly recurrentRegularizer: Regularizer;\n readonly biasRegularizer: Regularizer;\n\n readonly dropout: number;\n readonly recurrentDropout: number;\n\n readonly stateSize: number;\n\n kernel: LayerVariable;\n recurrentKernel: LayerVariable;\n bias: LayerVariable;\n\n readonly DEFAULT_ACTIVATION = 'tanh';\n readonly DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n readonly DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n readonly DEFAULT_BIAS_INITIALIZER: InitializerIdentifier = 'zeros';\n\n constructor(config: SimpleRNNCellLayerConfig) {\n super(config);\n this.units = config.units;\n this.activation = getActivation(\n config.activation == null ? this.DEFAULT_ACTIVATION :\n config.activation);\n this.useBias = config.useBias == null ? true : config.useBias;\n\n this.kernelInitializer = getInitializer(\n config.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(\n config.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n\n this.biasInitializer =\n getInitializer(config.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n\n this.kernelRegularizer = getRegularizer(config.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(config.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(config.biasRegularizer);\n\n this.kernelConstraint = getConstraint(config.kernelConstraint);\n this.recurrentConstraint = getConstraint(config.recurrentConstraint);\n this.biasConstraint = getConstraint(config.biasConstraint);\n\n this.dropout = math_utils.min(\n [1, math_utils.max([0, config.dropout == null ? 0 : config.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max(\n [0, config.recurrentDropout == null ? 0 : config.recurrentDropout])\n ]);\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n // TODO(cais): Use regularizer.\n this.kernel = this.addWeight(\n 'kernel', [inputShape[inputShape.length - 1], this.units], null,\n this.kernelInitializer, this.kernelRegularizer, true,\n this.kernelConstraint);\n this.recurrentKernel = this.addWeight(\n 'recurrent_kernel', [this.units, this.units], null,\n this.recurrentInitializer, this.recurrentRegularizer, true,\n this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.units], null, this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n } else {\n this.bias = null;\n }\n this.built = true;\n }\n\n // Porting Note: PyKeras' equivalent of this method takes two tensor inputs:\n // `inputs` and `states`. Here, the two tensors are combined into an\n // `Tensor[]` Array as the first input argument.\n // Similarly, PyKeras' equivalent of this method returns two values:\n // `output` and `[output]`. Here the two are combined into one length-2\n // `Tensor[]`, consisting of `output` repeated.\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = inputs as Tensor[];\n if (inputs.length !== 2) {\n throw new ValueError(\n `SimpleRNNCell expects 2 input Tensors, got ${inputs.length}.`);\n }\n let prevOutput = inputs[1];\n inputs = inputs[0];\n const training = kwargs['training'] == null ? false : kwargs['training'];\n\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask(\n () => tfc.onesLike(inputs as Tensor),\n this.dropout, training) as Tensor;\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask =\n generateDropoutMask(\n () => tfc.onesLike(prevOutput), this.recurrentDropout,\n training) as Tensor;\n }\n let h: Tensor;\n const dpMask: Tensor = this.dropoutMask as Tensor;\n const recDpMask: Tensor = this.recurrentDropoutMask as Tensor;\n if (dpMask != null) {\n h = K.dot(tfc.mul(inputs, dpMask), this.kernel.read());\n } else {\n h = K.dot(inputs, this.kernel.read());\n }\n if (this.bias != null) {\n h = K.biasAdd(h, this.bias.read());\n }\n if (recDpMask != null) {\n prevOutput = tfc.mul(prevOutput, recDpMask);\n }\n let output = tfc.add(h, K.dot(prevOutput, this.recurrentKernel.read()));\n if (this.activation != null) {\n output = this.activation.apply(output);\n }\n\n // TODO(cais): Properly set learning phase on output tensor?\n return [output, output];\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(SimpleRNNCell);\n\nexport interface SimpleRNNLayerConfig extends BaseRNNLayerConfig {\n /**\n * Positive integer, dimensionality of the output space.\n */\n units: number;\n\n /**\n * Activation function to use.\n *\n * Defaults to hyperbolic tangent (`tanh`)\n *\n * If you pass `null`, no activation will be applied.\n */\n activation?: ActivationIdentifier;\n\n /**\n * Whether the layer uses a bias vector.\n */\n useBias?: boolean;\n\n /**\n * Initializer for the `kernel` weights matrix, used for the linear\n * transformation of the inputs.\n */\n kernelInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the `recurrentKernel` weights matrix, used for\n * linear transformation of the recurrent state.\n */\n recurrentInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Initializer for the bias vector.\n */\n biasInitializer?: InitializerIdentifier|Initializer;\n\n /**\n * Regularizer function applied to the kernel weights matrix.\n */\n kernelRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the recurrentKernel weights matrix.\n */\n recurrentRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Regularizer function applied to the bias vector.\n */\n biasRegularizer?: RegularizerIdentifier|Regularizer;\n\n /**\n * Constraint function applied to the kernel weights matrix.\n */\n kernelConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint function applied to the recurrentKernel weights matrix.\n */\n recurrentConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Constraint function applied to the bias vector.\n */\n biasConstraint?: ConstraintIdentifier|Constraint;\n\n /**\n * Number between 0 and 1. Fraction of the units to drop for the linear\n * transformation of the inputs.\n */\n dropout?: number;\n\n /**\n * Number between 0 and 1. Fraction of the units to drop for the linear\n * transformation of the recurrent state.\n */\n recurrentDropout?: number;\n}\n\n/**\n * Fully-connected RNN where the output is to be fed back to input.\n *\n * This is an `RNN` layer consisting of one `SimpleRNNCell`. However, unlike\n * the underlying `SimpleRNNCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.simpleRNN({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `SimpleRNNCell`'s number of units.\n * ```\n */\nexport class SimpleRNN extends RNN {\n static className = 'SimpleRNN';\n constructor(config: SimpleRNNLayerConfig) {\n config.cell = new SimpleRNNCell(config);\n super(config as RNNLayerConfig);\n // TODO(cais): Add activityRegularizer.\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState: Tensor[] =\n kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, {mask, training, initialState});\n });\n }\n\n // TODO(cais): Research possibility of refactoring out the tedious all\n // the getters that delegate to `this.cell` below.\n get units(): number {\n return (this.cell as SimpleRNNCell).units;\n }\n\n get activation(): Activation {\n return (this.cell as SimpleRNNCell).activation;\n }\n\n get useBias(): boolean {\n return (this.cell as SimpleRNNCell).useBias;\n }\n\n get kernelInitializer(): Initializer {\n return (this.cell as SimpleRNNCell).kernelInitializer;\n }\n\n get recurrentInitializer(): Initializer {\n return (this.cell as SimpleRNNCell).recurrentInitializer;\n }\n\n get biasInitializer(): Initializer {\n return (this.cell as SimpleRNNCell).biasInitializer;\n }\n\n get kernelRegularizer(): Regularizer {\n return (this.cell as SimpleRNNCell).kernelRegularizer;\n }\n\n get recurrentRegularizer(): Regularizer {\n return (this.cell as SimpleRNNCell).recurrentRegularizer;\n }\n\n get biasRegularizer(): Regularizer {\n return (this.cell as SimpleRNNCell).biasRegularizer;\n }\n\n get kernelConstraint(): Constraint {\n return (this.cell as SimpleRNNCell).kernelConstraint;\n }\n\n get recurrentConstraint(): Constraint {\n return (this.cell as SimpleRNNCell).recurrentConstraint;\n }\n\n get biasConstraint(): Constraint {\n return (this.cell as SimpleRNNCell).biasConstraint;\n }\n\n get dropout(): number {\n return (this.cell as SimpleRNNCell).dropout;\n }\n\n get recurrentDropout(): number {\n return (this.cell as SimpleRNNCell).recurrentDropout;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n };\n const baseConfig = super.getConfig();\n delete baseConfig['cell'];\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(SimpleRNN);\n\n// Porting Note: Since this is a superset of SimpleRNNLayerConfig, we extend\n// that interface instead of repeating the fields.\nexport interface GRUCellLayerConfig extends SimpleRNNCellLayerConfig {\n /**\n * Activation function to use for the recurrent step.\n *\n * Defaults to hard sigmoid (`hardSigmoid`).\n *\n * If `null`, no activation is applied.\n */\n recurrentActivation?: string;\n\n /**\n * Implementation mode, either 1 or 2.\n *\n * Mode 1 will structure its operations as a larger number of\n * smaller dot products and additions.\n *\n * Mode 2 will batch them into fewer, larger operations. These modes will\n * have different performance profiles on different hardware and\n * for different applications.\n *\n * Note: For superior performance, TensorFlow.js always uses implementation\n * 2, regardless of the actual value of this configuration field.\n */\n implementation?: number;\n}\n\n/**\n * Cell class for `GRU`.\n *\n * `GRUCell` is distinct from the `RNN` subclass `GRU` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `GRU` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.gruCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `GRUCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.gruCell({units: 4}),\n * tf.layers.gruCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `gruCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `GRUCell`, use the\n * `tf.layers.gru`.\n */\nexport class GRUCell extends RNNCell {\n static className = 'GRUCell';\n readonly units: number;\n readonly activation: Activation;\n readonly recurrentActivation: Activation;\n readonly useBias: boolean;\n\n readonly kernelInitializer: Initializer;\n readonly recurrentInitializer: Initializer;\n readonly biasInitializer: Initializer;\n\n readonly kernelRegularizer: Regularizer;\n readonly recurrentRegularizer: Regularizer;\n readonly biasRegularizer: Regularizer;\n\n readonly kernelConstraint: Constraint;\n readonly recurrentConstraint: Constraint;\n readonly biasConstraint: Constraint;\n\n readonly dropout: number;\n readonly recurrentDropout: number;\n\n readonly stateSize: number;\n readonly implementation: number;\n\n readonly DEFAULT_ACTIVATION = 'tanh';\n readonly DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n\n readonly DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n readonly DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n readonly DEFAULT_BIAS_INITIALIZER: InitializerIdentifier = 'zeros';\n\n kernel: LayerVariable;\n recurrentKernel: LayerVariable;\n bias: LayerVariable;\n\n constructor(config: GRUCellLayerConfig) {\n super(config);\n\n this.units = config.units;\n this.activation = getActivation(\n config.activation === undefined ? this.DEFAULT_ACTIVATION :\n config.activation);\n this.recurrentActivation = getActivation(\n config.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n config.recurrentActivation);\n this.useBias = config.useBias == null ? true : config.useBias;\n\n this.kernelInitializer = getInitializer(\n config.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(\n config.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n\n this.biasInitializer =\n getInitializer(config.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n\n this.kernelRegularizer = getRegularizer(config.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(config.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(config.biasRegularizer);\n\n this.kernelConstraint = getConstraint(config.kernelConstraint);\n this.recurrentConstraint = getConstraint(config.recurrentConstraint);\n this.biasConstraint = getConstraint(config.biasConstraint);\n\n this.dropout = math_utils.min(\n [1, math_utils.max([0, config.dropout == null ? 0 : config.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max(\n [0, config.recurrentDropout == null ? 0 : config.recurrentDropout])\n ]);\n this.implementation = config.implementation;\n this.stateSize = this.units;\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n\n public build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight(\n 'kernel', [inputDim, this.units * 3], null, this.kernelInitializer,\n this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight(\n 'recurrent_kernel', [this.units, this.units * 3], null,\n this.recurrentInitializer, this.recurrentRegularizer, true,\n this.recurrentConstraint);\n if (this.useBias) {\n this.bias = this.addWeight(\n 'bias', [this.units * 3], null, this.biasInitializer,\n this.biasRegularizer, true, this.biasConstraint);\n } else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = inputs as Tensor[];\n if (inputs.length !== 2) {\n throw new ValueError(\n `GRUCell expects 2 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n\n const training = kwargs['training'] == null ? false : kwargs['training'];\n let hTMinus1 = inputs[1]; // Previous memory state.\n inputs = inputs[0];\n\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2, regardless of the actual value of\n // config.implementation.\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask(\n () => tfc.onesLike(inputs as Tensor),\n this.dropout, training, 3) as Tensor[];\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask =\n generateDropoutMask(\n () => tfc.onesLike(hTMinus1), this.recurrentDropout, training,\n 3) as Tensor[];\n }\n const dpMask = this.dropoutMask as [Tensor, Tensor, Tensor];\n const recDpMask = this.recurrentDropoutMask as [Tensor, Tensor, Tensor];\n let z: Tensor;\n let r: Tensor;\n let hh: Tensor;\n\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let matrixX = K.dot(inputs, this.kernel.read());\n if (this.useBias) {\n matrixX = K.biasAdd(matrixX, this.bias.read());\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n\n const recurrentKernelValue = this.recurrentKernel.read();\n const [rk1, rk2] = tfc.split(\n recurrentKernelValue, [2 * this.units, this.units],\n recurrentKernelValue.rank - 1);\n const matrixInner = K.dot(hTMinus1, rk1);\n\n const [xZ, xR, xH] = tfc.split(matrixX, 3, matrixX.rank - 1);\n const [recurrentZ, recurrentR] =\n tfc.split(matrixInner, 2, matrixInner.rank - 1);\n z = this.recurrentActivation.apply(tfc.add(xZ, recurrentZ));\n r = this.recurrentActivation.apply(tfc.add(xR, recurrentR));\n\n const recurrentH = K.dot(tfc.mul(r, hTMinus1), rk2);\n hh = this.activation.apply(tfc.add(xH, recurrentH));\n\n const h = tfc.add(\n tfc.mul(z, hTMinus1), tfc.mul(tfc.add(getScalar(1), tfc.neg(z)), hh));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h];\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(GRUCell);\n\n// Porting Note: Since this is a superset of SimpleRNNLayerConfig, we inherit\n// from that interface instead of repeating the fields here.\nexport interface GRULayerConfig extends SimpleRNNLayerConfig {\n /**\n * Activation function to use for the recurrent step.\n *\n * Defaults to hard sigmoid (`hardSigmoid`).\n *\n * If `null`, no activation is applied.\n */\n recurrentActivation?: string;\n\n /**\n * Implementation mode, either 1 or 2.\n *\n * Mode 1 will structure its operations as a larger number of\n * smaller dot products and additions.\n *\n * Mode 2 will batch them into fewer, larger operations. These modes will\n * have different performance profiles on different hardware and\n * for different applications.\n *\n * Note: For superior performance, TensorFlow.js always uses implementation\n * 2, regardless of the actual value of this configuration field.\n */\n implementation?: number;\n}\n\n/**\n * Gated Recurrent Unit - Cho et al. 2014.\n *\n * This is an `RNN` layer consisting of one `GRUCell`. However, unlike\n * the underlying `GRUCell`, the `apply` method of `SimpleRNN` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const rnn = tf.layers.gru({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `GRUCell`'s number of units.\n */\nexport class GRU extends RNN {\n static className = 'GRU';\n constructor(config: GRULayerConfig) {\n if (config.implementation === 0) {\n console.warn(\n '`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n config.cell = new GRUCell(config);\n super(config as RNNLayerConfig);\n // TODO(cais): Add activityRegularizer.\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState: Tensor[] =\n kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, {mask, training, initialState});\n });\n }\n\n get units(): number {\n return (this.cell as GRUCell).units;\n }\n\n get activation(): Activation {\n return (this.cell as GRUCell).activation;\n }\n\n get recurrentActivation(): Activation {\n return (this.cell as GRUCell).recurrentActivation;\n }\n\n get useBias(): boolean {\n return (this.cell as GRUCell).useBias;\n }\n\n get kernelInitializer(): Initializer {\n return (this.cell as GRUCell).kernelInitializer;\n }\n\n get recurrentInitializer(): Initializer {\n return (this.cell as GRUCell).recurrentInitializer;\n }\n\n get biasInitializer(): Initializer {\n return (this.cell as GRUCell).biasInitializer;\n }\n\n get kernelRegularizer(): Regularizer {\n return (this.cell as GRUCell).kernelRegularizer;\n }\n\n get recurrentRegularizer(): Regularizer {\n return (this.cell as GRUCell).recurrentRegularizer;\n }\n\n get biasRegularizer(): Regularizer {\n return (this.cell as GRUCell).biasRegularizer;\n }\n\n get kernelConstraint(): Constraint {\n return (this.cell as GRUCell).kernelConstraint;\n }\n\n get recurrentConstraint(): Constraint {\n return (this.cell as GRUCell).recurrentConstraint;\n }\n\n get biasConstraint(): Constraint {\n return (this.cell as GRUCell).biasConstraint;\n }\n\n get dropout(): number {\n return (this.cell as GRUCell).dropout;\n }\n\n get recurrentDropout(): number {\n return (this.cell as GRUCell).recurrentDropout;\n }\n\n get implementation(): number {\n return (this.cell as GRUCell).implementation;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n const baseConfig = super.getConfig();\n delete baseConfig['cell'];\n Object.assign(config, baseConfig);\n return config;\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\nserialization.registerClass(GRU);\n\n// Porting Note: Since this is a superset of SimpleRNNLayerConfig, we extend\n// that interface instead of repeating the fields.\nexport interface LSTMCellLayerConfig extends SimpleRNNCellLayerConfig {\n /**\n * Activation function to use for the recurrent step.\n *\n * Defaults to hard sigmoid (`hardSigmoid`).\n *\n * If `null`, no activation is applied.\n */\n recurrentActivation?: ActivationIdentifier;\n\n /**\n * If `true`, add 1 to the bias of the forget gate at initialization.\n * Setting it to `true` will also force `biasInitializer = 'zeros'`.\n * This is recommended in\n * [Jozefowicz et\n * al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf).\n */\n unitForgetBias?: boolean;\n\n /**\n * Implementation mode, either 1 or 2.\n *\n * Mode 1 will structure its operations as a larger number of\n * smaller dot products and additions.\n *\n * Mode 2 will batch them into fewer, larger operations. These modes will\n * have different performance profiles on different hardware and\n * for different applications.\n *\n * Note: For superior performance, TensorFlow.js always uses implementation\n * 2, regardless of the actual value of this configuration field.\n */\n implementation?: number;\n}\n\n/**\n * Cell class for `LSTM`.\n *\n * `LSTMCell` is distinct from the `RNN` subclass `LSTM` in that its\n * `apply` method takes the input data of only a single time step and returns\n * the cell's output at the time step, while `LSTM` takes the input data\n * over a number of time steps. For example:\n *\n * ```js\n * const cell = tf.layers.lstmCell({units: 2});\n * const input = tf.input({shape: [10]});\n * const output = cell.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10]: This is the cell's output at a single time step. The 1st\n * // dimension is the unknown batch size.\n * ```\n *\n * Instance(s) of `LSTMCell` can be used to construct `RNN` layers. The\n * most typical use of this workflow is to combine a number of cells into a\n * stacked RNN cell (i.e., `StackedRNNCell` internally) and use it to create an\n * RNN. For example:\n *\n * ```js\n * const cells = [\n * tf.layers.lstmCell({units: 4}),\n * tf.layers.lstmCell({units: 8}),\n * ];\n * const rnn = tf.layers.rnn({cell: cells, returnSequences: true});\n *\n * // Create an input with 10 time steps and a length-20 vector at each step.\n * const input = tf.input({shape: [10, 20]});\n * const output = rnn.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the last `lstmCell`'s number of units.\n * ```\n *\n * To create an `RNN` consisting of only *one* `LSTMCell`, use the\n * `tf.layers.lstm`.\n */\nexport class LSTMCell extends RNNCell {\n static className = 'LSTMCell';\n readonly units: number;\n readonly activation: Activation;\n readonly recurrentActivation: Activation;\n readonly useBias: boolean;\n\n readonly kernelInitializer: Initializer;\n readonly recurrentInitializer: Initializer;\n readonly biasInitializer: Initializer;\n readonly unitForgetBias: boolean;\n\n readonly kernelConstraint: Constraint;\n readonly recurrentConstraint: Constraint;\n readonly biasConstraint: Constraint;\n\n readonly kernelRegularizer: Regularizer;\n readonly recurrentRegularizer: Regularizer;\n readonly biasRegularizer: Regularizer;\n\n readonly dropout: number;\n readonly recurrentDropout: number;\n\n readonly stateSize: number[];\n readonly implementation: number;\n\n readonly DEFAULT_ACTIVATION = 'tanh';\n readonly DEFAULT_RECURRENT_ACTIVATION = 'hardSigmoid';\n readonly DEFAULT_KERNEL_INITIALIZER = 'glorotNormal';\n readonly DEFAULT_RECURRENT_INITIALIZER = 'orthogonal';\n\n readonly DEFAULT_BIAS_INITIALIZER = 'zeros';\n\n kernel: LayerVariable;\n recurrentKernel: LayerVariable;\n bias: LayerVariable;\n\n constructor(config: LSTMCellLayerConfig) {\n super(config);\n\n this.units = config.units;\n this.activation = getActivation(\n config.activation === undefined ? this.DEFAULT_ACTIVATION :\n config.activation);\n this.recurrentActivation = getActivation(\n config.recurrentActivation === undefined ?\n this.DEFAULT_RECURRENT_ACTIVATION :\n config.recurrentActivation);\n this.useBias = config.useBias == null ? true : config.useBias;\n\n this.kernelInitializer = getInitializer(\n config.kernelInitializer || this.DEFAULT_KERNEL_INITIALIZER);\n this.recurrentInitializer = getInitializer(\n config.recurrentInitializer || this.DEFAULT_RECURRENT_INITIALIZER);\n\n this.biasInitializer =\n getInitializer(config.biasInitializer || this.DEFAULT_BIAS_INITIALIZER);\n this.unitForgetBias = config.unitForgetBias;\n\n this.kernelRegularizer = getRegularizer(config.kernelRegularizer);\n this.recurrentRegularizer = getRegularizer(config.recurrentRegularizer);\n this.biasRegularizer = getRegularizer(config.biasRegularizer);\n\n this.kernelConstraint = getConstraint(config.kernelConstraint);\n this.recurrentConstraint = getConstraint(config.recurrentConstraint);\n this.biasConstraint = getConstraint(config.biasConstraint);\n\n this.dropout = math_utils.min(\n [1, math_utils.max([0, config.dropout == null ? 0 : config.dropout])]);\n this.recurrentDropout = math_utils.min([\n 1,\n math_utils.max(\n [0, config.recurrentDropout == null ? 0 : config.recurrentDropout])\n ]);\n this.implementation = config.implementation;\n this.stateSize = [this.units, this.units];\n this.dropoutMask = null;\n this.recurrentDropoutMask = null;\n }\n\n public build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n const inputDim = inputShape[inputShape.length - 1];\n this.kernel = this.addWeight(\n 'kernel', [inputDim, this.units * 4], null, this.kernelInitializer,\n this.kernelRegularizer, true, this.kernelConstraint);\n this.recurrentKernel = this.addWeight(\n 'recurrent_kernel', [this.units, this.units * 4], null,\n this.recurrentInitializer, this.recurrentRegularizer, true,\n this.recurrentConstraint);\n let biasInitializer: Initializer;\n if (this.useBias) {\n if (this.unitForgetBias) {\n const capturedBiasInit = this.biasInitializer;\n const capturedUnits = this.units;\n biasInitializer = new (class CustomInit extends Initializer {\n static className = 'CustomInit';\n\n apply(shape: Shape, dtype?: DataType): Tensor {\n // TODO(cais): More informative variable names?\n const bI = capturedBiasInit.apply([capturedUnits]);\n const bF = (new Ones()).apply([capturedUnits]);\n const bCAndH = capturedBiasInit.apply([capturedUnits * 2]);\n return K.concatAlongFirstAxis(\n K.concatAlongFirstAxis(bI, bF), bCAndH);\n }\n })();\n } else {\n biasInitializer = this.biasInitializer;\n }\n this.bias = this.addWeight(\n 'bias', [this.units * 4], null, biasInitializer, this.biasRegularizer,\n true, this.biasConstraint);\n } else {\n this.bias = null;\n }\n // Porting Notes: Unlike the PyKeras implementation, we perform slicing\n // of the weights and bias in the call() method, at execution time.\n this.built = true;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n const training = kwargs['training'] == null ? false : kwargs['training'];\n inputs = inputs as Tensor[];\n if (inputs.length !== 3) {\n throw new ValueError(\n `LSTMCell expects 3 input Tensors (inputs, h, c), got ` +\n `${inputs.length}.`);\n }\n let hTMinus1 = inputs[1]; // Previous memory state.\n const cTMinus1 = inputs[2]; // Previous carry state.\n inputs = inputs[0];\n if (0 < this.dropout && this.dropout < 1 && this.dropoutMask == null) {\n this.dropoutMask = generateDropoutMask(\n () => tfc.onesLike(inputs as Tensor),\n this.dropout, training, 4) as Tensor[];\n }\n if (0 < this.recurrentDropout && this.recurrentDropout < 1 &&\n this.recurrentDropoutMask == null) {\n this.recurrentDropoutMask =\n generateDropoutMask(\n () => tfc.onesLike(hTMinus1), this.recurrentDropout, training,\n 4) as Tensor[];\n }\n const dpMask = this.dropoutMask as [Tensor, Tensor, Tensor, Tensor];\n const recDpMask =\n this.recurrentDropoutMask as [Tensor, Tensor, Tensor, Tensor];\n\n // Note: For superior performance, TensorFlow.js always uses\n // implementation 2 regardless of the actual value of\n // config.implementation.\n let i: Tensor;\n let f: Tensor;\n let c: Tensor;\n let o: Tensor;\n if (0 < this.dropout && this.dropout < 1) {\n inputs = tfc.mul(inputs, dpMask[0]);\n }\n let z = K.dot(inputs, this.kernel.read());\n if (0 < this.recurrentDropout && this.recurrentDropout < 1) {\n hTMinus1 = tfc.mul(hTMinus1, recDpMask[0]);\n }\n z = tfc.add(z, K.dot(hTMinus1, this.recurrentKernel.read()));\n if (this.useBias) {\n z = K.biasAdd(z, this.bias.read());\n }\n\n const [z0, z1, z2, z3] = tfc.split(z, 4, z.rank - 1);\n\n i = this.recurrentActivation.apply(z0);\n f = this.recurrentActivation.apply(z1);\n c = tfc.add(tfc.mul(f, cTMinus1), tfc.mul(i, this.activation.apply(z2)));\n o = this.recurrentActivation.apply(z3);\n\n const h = tfc.mul(o, this.activation.apply(c));\n // TODO(cais): Add use_learning_phase flag properly.\n return [h, h, c];\n });\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n}\nserialization.registerClass(LSTMCell);\n\n// Porting Note: Since this is a superset of SimpleRNNLayerConfig, we inherit\n// from that interface instead of repeating the fields here.\nexport interface LSTMLayerConfig extends SimpleRNNLayerConfig {\n /**\n * Activation function to use for the recurrent step.\n *\n * Defaults to hard sigmoid (`hardSigmoid`).\n *\n * If `null`, no activation is applied.\n */\n recurrentActivation?: string;\n\n /**\n * If `true`, add 1 to the bias of the forget gate at initialization.\n * Setting it to `true` will also force `biasInitializer = 'zeros'`.\n * This is recommended in\n * [Jozefowicz et\n * al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf).\n */\n unitForgetBias?: boolean;\n\n /**\n * Implementation mode, either 1 or 2.\n * Mode 1 will structure its operations as a larger number of\n * smaller dot products and additions, whereas mode 2 will\n * batch them into fewer, larger operations. These modes will\n * have different performance profiles on different hardware and\n * for different applications.\n *\n * Note: For superior performance, TensorFlow.js always uses implementation\n * 2, regardless of the actual value of this config field.\n */\n implementation?: number;\n}\n\n/**\n * Long-Short Term Memory layer - Hochreiter 1997.\n *\n * This is an `RNN` layer consisting of one `LSTMCell`. However, unlike\n * the underlying `LSTMCell`, the `apply` method of `LSTM` operates\n * on a sequence of inputs. The shape of the input (not including the first,\n * batch dimension) needs to be at least 2-D, with the first dimension being\n * time steps. For example:\n *\n * ```js\n * const lstm = tf.layers.lstm({units: 8, returnSequences: true});\n *\n * // Create an input with 10 time steps.\n * const input = tf.input({shape: [10, 20]});\n * const output = lstm.apply(input);\n *\n * console.log(JSON.stringify(output.shape));\n * // [null, 10, 8]: 1st dimension is unknown batch size; 2nd dimension is the\n * // same as the sequence length of `input`, due to `returnSequences`: `true`;\n * // 3rd dimension is the `LSTMCell`'s number of units.\n */\nexport class LSTM extends RNN {\n static className = 'LSTM';\n constructor(config: LSTMLayerConfig) {\n if (config.implementation as number === 0) {\n console.warn(\n '`implementation=0` has been deprecated, and now defaults to ' +\n '`implementation=1`. Please update your layer call.');\n }\n config.cell = new LSTMCell(config);\n super(config as RNNLayerConfig);\n // TODO(cais): Add activityRegularizer.\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n if (this.cell.dropoutMask != null) {\n tfc.dispose(this.cell.dropoutMask);\n this.cell.dropoutMask = null;\n }\n if (this.cell.recurrentDropoutMask != null) {\n tfc.dispose(this.cell.recurrentDropoutMask);\n this.cell.recurrentDropoutMask = null;\n }\n const mask = kwargs == null ? null : kwargs['mask'];\n const training = kwargs == null ? null : kwargs['training'];\n const initialState: Tensor[] =\n kwargs == null ? null : kwargs['initialState'];\n return super.call(inputs, {mask, training, initialState});\n });\n }\n\n get units(): number {\n return (this.cell as LSTMCell).units;\n }\n\n get activation(): Activation {\n return (this.cell as LSTMCell).activation;\n }\n\n get recurrentActivation(): Activation {\n return (this.cell as LSTMCell).recurrentActivation;\n }\n\n get useBias(): boolean {\n return (this.cell as LSTMCell).useBias;\n }\n\n get kernelInitializer(): Initializer {\n return (this.cell as LSTMCell).kernelInitializer;\n }\n\n get recurrentInitializer(): Initializer {\n return (this.cell as LSTMCell).recurrentInitializer;\n }\n\n get biasInitializer(): Initializer {\n return (this.cell as LSTMCell).biasInitializer;\n }\n\n get unitForgetBias(): boolean {\n return (this.cell as LSTMCell).unitForgetBias;\n }\n\n get kernelRegularizer(): Regularizer {\n return (this.cell as LSTMCell).kernelRegularizer;\n }\n\n get recurrentRegularizer(): Regularizer {\n return (this.cell as LSTMCell).recurrentRegularizer;\n }\n\n get biasRegularizer(): Regularizer {\n return (this.cell as LSTMCell).biasRegularizer;\n }\n\n get kernelConstraint(): Constraint {\n return (this.cell as LSTMCell).kernelConstraint;\n }\n\n get recurrentConstraint(): Constraint {\n return (this.cell as LSTMCell).recurrentConstraint;\n }\n\n get biasConstraint(): Constraint {\n return (this.cell as LSTMCell).biasConstraint;\n }\n\n get dropout(): number {\n return (this.cell as LSTMCell).dropout;\n }\n\n get recurrentDropout(): number {\n return (this.cell as LSTMCell).recurrentDropout;\n }\n\n get implementation(): number {\n return (this.cell as LSTMCell).implementation;\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n units: this.units,\n activation: serializeActivation(this.activation),\n recurrentActivation: serializeActivation(this.recurrentActivation),\n useBias: this.useBias,\n kernelInitializer: serializeInitializer(this.kernelInitializer),\n recurrentInitializer: serializeInitializer(this.recurrentInitializer),\n biasInitializer: serializeInitializer(this.biasInitializer),\n unitForgetBias: this.unitForgetBias,\n kernelRegularizer: serializeRegularizer(this.kernelRegularizer),\n recurrentRegularizer: serializeRegularizer(this.recurrentRegularizer),\n biasRegularizer: serializeRegularizer(this.biasRegularizer),\n activityRegularizer: serializeRegularizer(this.activityRegularizer),\n kernelConstraint: serializeConstraint(this.kernelConstraint),\n recurrentConstraint: serializeConstraint(this.recurrentConstraint),\n biasConstraint: serializeConstraint(this.biasConstraint),\n dropout: this.dropout,\n recurrentDropout: this.recurrentDropout,\n implementation: this.implementation,\n };\n const baseConfig = super.getConfig();\n delete baseConfig['cell'];\n Object.assign(config, baseConfig);\n return config;\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n if (config['implmentation'] === 0) {\n config['implementation'] = 1;\n }\n return new cls(config);\n }\n}\nserialization.registerClass(LSTM);\n\nexport interface StackedRNNCellsConfig extends LayerConfig {\n /**\n * A `Array` of `RNNCell` instances.\n */\n cells: RNNCell[];\n}\n\n/**\n * Wrapper allowing a stack of RNN cells to behave as a single cell.\n *\n * Used to implement efficient stacked RNNs.\n */\nexport class StackedRNNCells extends RNNCell {\n static className = 'StackedRNNCells';\n protected cells: RNNCell[];\n\n constructor(config: StackedRNNCellsConfig) {\n super(config);\n this.cells = config.cells;\n }\n\n get stateSize(): number[] {\n // States are a flat list in reverse order of the cell stack.\n // This allows perserving the requirement `stack.statesize[0] ===\n // outputDim`. E.g., states of a 2-layer LSTM would be `[h2, c2, h1, c1]`,\n // assuming one LSTM has states `[h, c]`.\n const stateSize: number[] = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n stateSize.push(...cell.stateSize);\n } else {\n stateSize.push(cell.stateSize);\n }\n }\n return stateSize;\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n inputs = inputs as Tensor[];\n let states = inputs.slice(1);\n\n // Recover per-cell states.\n const nestedStates: Tensor[][] = [];\n for (const cell of this.cells.slice().reverse()) {\n if (Array.isArray(cell.stateSize)) {\n nestedStates.push(states.splice(0, cell.stateSize.length));\n } else {\n nestedStates.push(states.splice(0, 1));\n }\n }\n nestedStates.reverse();\n\n // Call the cells in order and store the returned states.\n const newNestedStates: Tensor[][] = [];\n let callInputs: Tensor[];\n for (let i = 0; i < this.cells.length; ++i) {\n const cell = this.cells[i];\n states = nestedStates[i];\n // TODO(cais): Take care of constants.\n if (i === 0) {\n callInputs = [inputs[0]].concat(states);\n } else {\n callInputs = [callInputs[0]].concat(states);\n }\n callInputs = cell.call(callInputs, kwargs) as Tensor[];\n newNestedStates.push(callInputs.slice(1));\n }\n\n // Format the new states as a flat list in reverse cell order.\n states = [];\n for (const cellStates of newNestedStates.slice().reverse()) {\n states.push(...cellStates);\n }\n return [callInputs[0]].concat(states);\n });\n }\n\n public build(inputShape: Shape|Shape[]): void {\n if (isArrayOfShapes(inputShape)) {\n // TODO(cais): Take care of input constants.\n // const constantShape = inputShape.slice(1);\n inputShape = (inputShape as Shape[])[0];\n }\n inputShape = inputShape as Shape;\n let outputDim: number;\n for (const cell of this.cells) {\n // TODO(cais): Take care of input constants.\n cell.build(inputShape);\n if (Array.isArray(cell.stateSize)) {\n outputDim = cell.stateSize[0];\n } else {\n outputDim = cell.stateSize;\n }\n inputShape = [inputShape[0], outputDim];\n }\n this.built = true;\n }\n\n getConfig(): serialization.ConfigDict {\n const cellConfigs: serialization.ConfigDict[] = [];\n for (const cell of this.cells) {\n cellConfigs.push({\n 'className': this.getClassName(),\n 'config': cell.getConfig(),\n });\n }\n const config: serialization.ConfigDict = {'cells': cellConfigs};\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict,\n customObjects = {} as serialization.ConfigDict): T {\n const cells: RNNCell[] = [];\n for (const cellConfig of (config['cells'] as serialization.ConfigDict[])) {\n cells.push(deserialize(cellConfig, customObjects) as RNNCell);\n }\n return new cls({cells});\n }\n\n get trainableWeights(): LayerVariable[] {\n if (!this.trainable) {\n return [];\n }\n const weights: LayerVariable[] = [];\n for (const cell of this.cells) {\n weights.push(...cell.trainableWeights);\n }\n return weights;\n }\n\n get nonTrainableWeights(): LayerVariable[] {\n const weights: LayerVariable[] = [];\n for (const cell of this.cells) {\n weights.push(...cell.nonTrainableWeights);\n }\n if (!this.trainable) {\n const trainableWeights: LayerVariable[] = [];\n for (const cell of this.cells) {\n trainableWeights.push(...cell.trainableWeights);\n }\n return trainableWeights.concat(weights);\n }\n return weights;\n }\n\n /**\n * Retrieve the weights of a the model.\n *\n * @returns A flat `Array` of `tf.Tensor`s.\n */\n getWeights(): Tensor[] {\n const weights: LayerVariable[] = [];\n for (const cell of this.cells) {\n weights.push(...cell.weights);\n }\n return batchGetValue(weights);\n }\n\n /**\n * Set the weights of the model.\n *\n * @param weights An `Array` of `tf.Tensor`s with shapes and types matching\n * the output of `getWeights()`.\n */\n setWeights(weights: Tensor[]): void {\n const tuples: Array<[LayerVariable, Tensor]> = [];\n for (const cell of this.cells) {\n const numParams = cell.weights.length;\n const inputWeights = weights.splice(numParams);\n for (let i = 0; i < cell.weights.length; ++i) {\n tuples.push([cell.weights[i], inputWeights[i]]);\n }\n }\n batchSetValue(tuples);\n }\n\n // TODO(cais): Maybe implemnt `losses` and `getLossesFor`.\n}\nserialization.registerClass(StackedRNNCells);\n\nfunction generateDropoutMask(\n ones: () => Tensor, rate: number, training: boolean = null,\n count = 1): Tensor|Tensor[] {\n function droppedInputs(): Tensor {\n return K.dropout(ones(), getScalar(rate));\n }\n if (count > 1) {\n const mask: Tensor[] = [];\n for (let i = 0; i < count; i++) {\n mask.push(K.inTrainPhase(droppedInputs, ones, training));\n }\n mask.forEach(m => tfc.keep(m));\n return mask;\n } else {\n return tfc.keep(K.inTrainPhase(droppedInputs, ones, training));\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/**\n * Layers that augment the functionality of a base layer.\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {serialization, Tensor, tidy} from '@tensorflow/tfjs-core';\n\nimport {getScalar} from '../backend/state';\nimport * as K from '../backend/tfjs_backend';\nimport {nameScope} from '../common';\nimport {InputSpec, Layer, LayerConfig, SymbolicTensor} from '../engine/topology';\nimport {NotImplementedError, ValueError} from '../errors';\nimport {Kwargs, Shape} from '../types';\nimport {RegularizerFn, RnnStepFunction} from '../types';\nimport * as generic_utils from '../utils/generic_utils';\nimport {getExactlyOneShape, getExactlyOneTensor} from '../utils/types_utils';\nimport {LayerVariable} from '../variables';\n\nimport {rnn, RNN, standardizeArgs} from './recurrent';\nimport {deserialize} from './serialization';\n\n\nexport interface WrapperLayerConfig extends LayerConfig {\n /**\n * The layer to be wrapped.\n */\n layer: Layer;\n}\n\n/**\n * Abstract wrapper base class.\n *\n * Wrappers take another layer and augment it in various ways.\n * Do not use this class as a layer, it is only an abstract base class.\n * Two usable wrappers are the `TimeDistributed` and `Bidirectional` wrappers.\n */\nexport abstract class Wrapper extends Layer {\n readonly layer: Layer;\n\n constructor(config: WrapperLayerConfig) {\n // Porting Note: In PyKeras, `self.layer` is set prior to the calling\n // `super()`. But we can't do that here due to TypeScript's restriction.\n // See: https://github.com/Microsoft/TypeScript/issues/8277\n // As a result, we have to add checks in `get trainable()` and\n // `set trainable()` below in order to prevent using `this.layer` when\n // its value is `undefined`. The super constructor does use the getter\n // and the setter of `this.layer`.\n super(config);\n this.layer = config.layer;\n }\n\n build(inputShape: Shape|Shape[]): void {\n this.built = true;\n }\n\n // TODO(cais): Implement activityRegularizer getter.\n\n get trainable(): boolean {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n return this.layer.trainable;\n } else {\n return false;\n }\n }\n\n set trainable(value: boolean) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n if (this.layer != null) {\n this.layer.trainable = value;\n }\n }\n\n get trainableWeights(): LayerVariable[] {\n return this.layer.trainableWeights;\n }\n // TODO(cais): Implement setter for trainableWeights.\n\n get nonTrainableWeights(): LayerVariable[] {\n return this.layer.nonTrainableWeights;\n }\n // TODO(cais): Implement setter for nonTrainableWeights.\n\n get updates(): Tensor[] {\n // tslint:disable-next-line:no-any\n return (this.layer as any)._updates;\n }\n\n // TODO(cais): Implement getUpdatesFor().\n\n get losses(): RegularizerFn[] {\n return this.layer.losses;\n }\n\n // TODO(cais): Implement getLossesFor().\n\n getWeights(): Tensor[] {\n return this.layer.getWeights();\n }\n\n setWeights(weights: Tensor[]): void {\n this.layer.setWeights(weights);\n }\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n 'layer': {\n 'className': this.layer.getClassName(),\n 'config': this.layer.getConfig(),\n }\n };\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict,\n customObjects = {} as serialization.ConfigDict): T {\n const layerConfig = config['layer'] as serialization.ConfigDict;\n const layer = deserialize(layerConfig, customObjects) as Layer;\n delete config['layer'];\n const newConfig = {layer};\n Object.assign(newConfig, config);\n return new cls(newConfig);\n }\n}\n\n/**\n * This wrapper applies a layer to every temporal slice of an input.\n *\n * The input should be at least 3D, and the dimension of the index `1` will be\n * considered to be the temporal dimension.\n *\n * Consider a batch of 32 samples, where each sample is a sequence of 10 vectors\n * of 16 dimensions. The batch input shape of the layer is then `[32, 10,\n * 16]`, and the `inputShape`, not including the sample dimension, is\n * `[10, 16]`.\n *\n * You can then use `TimeDistributed` to apply a `Dense` layer to each of the 10\n * timesteps, independently:\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.dense({units: 8}),\n * inputShape: [10, 16],\n * }));\n *\n * // Now model.outputShape = [null, 10, 8].\n * // The output will then have shape `[32, 10, 8]`.\n *\n * // In subsequent layers, there is no need for `inputShape`:\n * model.add(tf.layers.timeDistributed({layer: tf.layers.dense({units: 32})}));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * // Now model.outputShape = [null, 10, 32].\n * ```\n *\n * The output will then have shape `[32, 10, 32]`.\n *\n * `TimeDistributed` can be used with arbitrary layers, not just `Dense`, for\n * instance a `Conv2D` layer.\n *\n * ```js\n * const model = tf.sequential();\n * model.add(tf.layers.timeDistributed({\n * layer: tf.layers.conv2d({filters: 64, kernelSize: [3, 3]}),\n * inputShape: [10, 299, 299, 3],\n * }));\n * console.log(JSON.stringify(model.outputs[0].shape));\n * ```\n */\nexport class TimeDistributed extends Wrapper {\n static className = 'TimeDistributed';\n constructor(config: WrapperLayerConfig) {\n super(config);\n this.supportsMasking = true;\n }\n\n build(inputShape: Shape|Shape[]): void {\n inputShape = getExactlyOneShape(inputShape);\n if (inputShape.length < 3) {\n throw new ValueError(\n `TimeDistributed layer expects an input shape >= 3D, but received ` +\n `input shape ${JSON.stringify(inputShape)}`);\n }\n this.inputSpec = [{shape: inputShape}];\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n if (!this.layer.built) {\n this.layer.build(childInputShape);\n this.layer.built = true;\n }\n super.build(inputShape);\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n inputShape = getExactlyOneShape(inputShape);\n const childInputShape = [inputShape[0]].concat(inputShape.slice(2));\n const childOutputShape =\n this.layer.computeOutputShape(childInputShape) as Shape;\n const timesteps = inputShape[1];\n return [childOutputShape[0], timesteps].concat(childOutputShape.slice(1));\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n // TODO(cais): Add 'training' and 'useLearningPhase' to kwargs.\n inputs = getExactlyOneTensor(inputs);\n // Porting Note: In tfjs-layers, `inputs` are always concrete tensor\n // values. Hence the inputs can't have an undetermined first (batch)\n // dimension, which is why we always use the K.rnn approach here.\n const step: RnnStepFunction = (inputs: Tensor, states: Tensor[]) => {\n // TODO(cais): Add useLearningPhase.\n // NOTE(cais): `layer.call` may return a length-1 array of Tensor in\n // some cases (e.g., `layer` is a `Sequential` instance), which is\n // why `getExactlyOneTensor` is used below.\n const output = getExactlyOneTensor(this.layer.call(inputs, kwargs));\n return [output, []];\n };\n const rnnOutputs =\n rnn(step, inputs, [], false /* goBackwards */, null /* mask */,\n null /* constants */, false /* unroll */,\n true /* needPerStepOutputs */);\n const y = rnnOutputs[1];\n // TODO(cais): Add activity regularization.\n // TODO(cais): Add useLearningPhase.\n return y;\n });\n }\n}\nserialization.registerClass(TimeDistributed);\n\nexport type BidirectionalMergeMode = 'sum'|'mul'|'concat'|'ave';\nexport const VALID_BIDIRECTIONAL_MERGE_MODES = ['sum', 'mul', 'concat', 'ave'];\nexport function checkBidirectionalMergeMode(value?: string): void {\n generic_utils.checkStringTypeUnionValue(\n VALID_BIDIRECTIONAL_MERGE_MODES, 'BidirectionalMergeMode', value);\n}\n\nexport interface BidirectionalLayerConfig extends WrapperLayerConfig {\n /**\n * The instance of an `RNN` layer to be wrapped.\n */\n layer: RNN;\n\n /**\n * Mode by which outputs of the forward and backward RNNs are\n * combinied. If `null` or `undefined`, the output will not be\n * combined, they will be returned as an `Array`.\n */\n mergeMode?: BidirectionalMergeMode;\n}\n\nexport class Bidirectional extends Wrapper {\n static className = 'Bidirectional';\n private forwardLayer: RNN;\n private backwardLayer: RNN;\n private mergeMode: BidirectionalMergeMode;\n private returnSequences: boolean;\n private returnState: boolean;\n private numConstants?: number;\n private _trainable: boolean;\n\n constructor(config: BidirectionalLayerConfig) {\n super(config);\n\n // Note: When creating `this.forwardLayer`, the original Layer object\n // (`config.layer`) ought to be cloned. This is why we call\n // `getConfig()` followed by `deserialize()`. Without this cloning,\n // the layer names saved during serialization will incorrectly contain\n // the 'forward_' prefix. In Python Keras, this is done using\n // `copy.copy` (shallow copy), which does not have a simple equivalent\n // in JavaScript. JavaScript's `Object.assign()` does not copy\n // methods.\n const layerConfig = config.layer.getConfig();\n this.forwardLayer =\n deserialize(\n {className: config.layer.getClassName(), config: layerConfig}) as\n RNN;\n layerConfig['goBackwards'] =\n layerConfig['goBackwards'] === true ? false : true;\n this.backwardLayer =\n deserialize(\n {className: config.layer.getClassName(), config: layerConfig}) as\n RNN;\n this.forwardLayer.name = 'forward_' + this.forwardLayer.name;\n this.backwardLayer.name = 'backward_' + this.backwardLayer.name;\n checkBidirectionalMergeMode(config.mergeMode);\n this.mergeMode = config.mergeMode;\n if (config.weights) {\n throw new NotImplementedError(\n 'weights support is not implemented for Bidirectional layer yet.');\n }\n this._stateful = config.layer.stateful;\n this.returnSequences = config.layer.returnSequences;\n this.returnState = config.layer.returnState;\n this.supportsMasking = true;\n this._trainable = true;\n this.inputSpec = config.layer.inputSpec;\n this.numConstants = null;\n }\n\n get trainable(): boolean {\n return this._trainable;\n }\n\n set trainable(value: boolean) {\n // Porting Note: the check of `this.layer` here is necessary due to the\n // way the `constructor` of this class is written (see Porting Note\n // above).\n this._trainable = value;\n if (this.forwardLayer != null) {\n this.forwardLayer.trainable = value;\n }\n if (this.backwardLayer != null) {\n this.backwardLayer.trainable = value;\n }\n }\n\n getWeights(): Tensor[] {\n return this.forwardLayer.getWeights().concat(\n this.backwardLayer.getWeights());\n }\n\n setWeights(weights: Tensor[]): void {\n const numWeights = weights.length;\n const numeightsOver2 = Math.floor(numWeights / 2);\n this.forwardLayer.setWeights(weights.slice(0, numeightsOver2));\n this.backwardLayer.setWeights(weights.slice(numeightsOver2));\n }\n\n computeOutputShape(inputShape: Shape|Shape[]): Shape|Shape[] {\n let layerShapes: Shape|Shape[] =\n this.forwardLayer.computeOutputShape(inputShape);\n if (!(Array.isArray(layerShapes) && Array.isArray(layerShapes[0]))) {\n layerShapes = [layerShapes as Shape];\n }\n layerShapes = layerShapes as Shape[];\n\n let outputShape: Shape;\n let outputShapes: Shape[];\n let stateShape: Shape[];\n if (this.returnState) {\n stateShape = layerShapes.slice(1);\n outputShape = layerShapes[0];\n } else {\n outputShape = layerShapes[0];\n }\n outputShape = outputShape as Shape;\n if (this.mergeMode === 'concat') {\n outputShape[outputShape.length - 1] *= 2;\n outputShapes = [outputShape];\n } else if (this.mergeMode == null) {\n outputShapes = [outputShape, outputShape.slice()];\n } else {\n outputShapes = [outputShape];\n }\n\n if (this.returnState) {\n if (this.mergeMode == null) {\n return outputShapes.concat(stateShape).concat(stateShape.slice());\n }\n return [outputShape].concat(stateShape).concat(stateShape.slice());\n }\n return generic_utils.singletonOrArray(outputShapes);\n }\n\n apply(\n inputs: Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[],\n kwargs?: Kwargs): Tensor|Tensor[]|SymbolicTensor|SymbolicTensor[] {\n let initialState: Tensor[]|SymbolicTensor[] =\n kwargs == null ? null : kwargs['initialState'];\n let constants: Tensor[]|SymbolicTensor[] =\n kwargs == null ? null : kwargs['constants'];\n if (kwargs == null) {\n kwargs = {};\n }\n const standardized =\n standardizeArgs(inputs, initialState, constants, this.numConstants);\n inputs = standardized.inputs as Tensor | SymbolicTensor;\n initialState = standardized.initialState;\n constants = standardized.constants;\n\n if (Array.isArray(inputs)) {\n initialState = (inputs as Tensor[] | SymbolicTensor[]).slice(1);\n inputs = (inputs as Tensor[] | SymbolicTensor[])[0];\n }\n\n if ((initialState == null || initialState.length === 0) &&\n constants == null) {\n return super.apply(inputs, kwargs);\n }\n const additionalInputs: Array<Tensor|SymbolicTensor> = [];\n const additionalSpecs: InputSpec[] = [];\n if (initialState != null) {\n const numStates = initialState.length;\n if (numStates % 2 > 0) {\n throw new ValueError(\n 'When passing `initialState` to a Bidrectional RNN, ' +\n 'the state should be an Array containing the states of ' +\n 'the underlying RNNs.');\n }\n kwargs['initialState'] = initialState;\n additionalInputs.push(...initialState);\n const stateSpecs = (initialState as Array<Tensor|SymbolicTensor>)\n .map(state => new InputSpec({shape: state.shape}));\n this.forwardLayer.stateSpec = stateSpecs.slice(0, numStates / 2);\n this.backwardLayer.stateSpec = stateSpecs.slice(numStates / 2);\n additionalSpecs.push(...stateSpecs);\n }\n if (constants != null) {\n throw new NotImplementedError(\n 'Support for constants in Bidirectional layers is not ' +\n 'implemented yet.');\n }\n\n const isSymbolicTensor = additionalInputs[0] instanceof SymbolicTensor;\n for (const tensor of additionalInputs) {\n if (tensor instanceof SymbolicTensor !== isSymbolicTensor) {\n throw new ValueError(\n 'The initial state of a Bidirectional layer cannot be ' +\n 'specified as a mix of symbolic and non-symbolic tensors');\n }\n }\n\n if (isSymbolicTensor) {\n // Compute the full input and specs, including the states.\n const fullInput = [inputs].concat(additionalInputs);\n const fullInputSpec = this.inputSpec.concat(additionalSpecs);\n // Perform the call temporarily and replace inputSpec.\n // Note: with initial states symbolic calls and non-symbolic calls to\n // this method differ in how the initial states are passed. For\n // symbolic calls, the initial states are passed in the first arg, as\n // an Array of SymbolicTensors; for non-symbolic calls, they are\n // passed in the second arg as a part of the kwargs. Hence the need to\n // temporarily modify inputSpec here.\n // TODO(cais): Make refactoring so that this hacky code below is no\n // longer needed.\n const originalInputSpec = this.inputSpec;\n this.inputSpec = fullInputSpec;\n const output =\n super.apply(fullInput as Tensor[] | SymbolicTensor[], kwargs);\n this.inputSpec = originalInputSpec;\n return output;\n } else {\n return super.apply(inputs, kwargs);\n }\n }\n\n call(inputs: Tensor|Tensor[], kwargs: Kwargs): Tensor|Tensor[] {\n return tidy(() => {\n if (kwargs['mask'] != null) {\n throw new NotImplementedError(\n 'The support for masking is not implemented for ' +\n 'Bidirectional layers yet.');\n }\n const initialState = kwargs['initialState'];\n\n let y: Tensor|Tensor[];\n let yRev: Tensor|Tensor[];\n if (initialState == null) {\n y = this.forwardLayer.call(inputs, kwargs);\n yRev = this.backwardLayer.call(inputs, kwargs);\n } else {\n const forwardState = initialState.slice(0, initialState.length / 2);\n const backwardState = initialState.slice(initialState.length / 2);\n y = this.forwardLayer.call(\n inputs, Object.assign(kwargs, {initialState: forwardState}));\n yRev = this.backwardLayer.call(\n inputs, Object.assign(kwargs, {initialState: backwardState}));\n }\n\n let states: Tensor[];\n if (this.returnState) {\n if (Array.isArray(y)) {\n states = (y as Tensor[]).slice(1).concat((yRev as Tensor[]).slice(1));\n } else {\n }\n y = (y as Tensor[])[0];\n yRev = (yRev as Tensor[])[0];\n }\n\n if (this.returnSequences) {\n yRev = tfc.reverse(yRev as Tensor, 1);\n }\n\n let output: Tensor|Tensor[];\n if (this.mergeMode === 'concat') {\n output = K.concatenate([y as Tensor, yRev as Tensor]);\n } else if (this.mergeMode === 'sum') {\n output = tfc.add(y as Tensor, yRev as Tensor);\n } else if (this.mergeMode === 'ave') {\n output = tfc.mul(getScalar(0.5), tfc.add(y as Tensor, yRev as Tensor));\n } else if (this.mergeMode === 'mul') {\n output = tfc.mul(y as Tensor, yRev as Tensor);\n } else if (this.mergeMode == null) {\n output = [y as Tensor, yRev as Tensor];\n }\n\n // TODO(cais): Properly set learning phase.\n if (this.returnState) {\n if (this.mergeMode == null) {\n return (output as Tensor[]).concat(states);\n }\n return [output as Tensor].concat(states);\n }\n return output;\n });\n }\n\n resetStates(states?: Tensor|Tensor[]): void {\n this.forwardLayer.resetStates();\n this.backwardLayer.resetStates();\n }\n\n build(inputShape: Shape|Shape[]): void {\n nameScope(this.forwardLayer.name, () => {\n this.forwardLayer.build(inputShape);\n });\n nameScope(this.backwardLayer.name, () => {\n this.backwardLayer.build(inputShape);\n });\n this.built = true;\n }\n\n // TODO(cais): Implement computeMask().\n\n get trainableWeights(): LayerVariable[] {\n return this.forwardLayer.trainableWeights.concat(\n this.backwardLayer.trainableWeights);\n }\n\n get nonTrainableWeights(): LayerVariable[] {\n return this.forwardLayer.nonTrainableWeights.concat(\n this.backwardLayer.nonTrainableWeights);\n }\n\n // TODO(cais): Implement constraints().\n\n getConfig(): serialization.ConfigDict {\n const config: serialization.ConfigDict = {\n 'mergeMode': this.mergeMode,\n };\n // TODO(cais): Add logic for `numConstants` once the property is added.\n const baseConfig = super.getConfig();\n Object.assign(config, baseConfig);\n return config;\n }\n\n static fromConfig<T extends serialization.Serializable>(\n cls: serialization.SerializableConstructor<T>,\n config: serialization.ConfigDict): T {\n const rnnLayer =\n deserialize(config['layer'] as serialization.ConfigDict) as RNN;\n delete config['layer'];\n // TODO(cais): Add logic for `numConstants` once the property is added.\n if (config['numConstants'] != null) {\n throw new NotImplementedError(\n `Deserialization of a Bidirectional layer with numConstants ` +\n `present is not supported yet.`);\n }\n // tslint:disable-next-line:no-any\n const newConfig: {[key: string]: any} = config;\n newConfig['layer'] = rnnLayer;\n return new cls(newConfig);\n }\n}\nserialization.registerClass(Bidirectional);\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\nimport {InputLayer, InputLayerConfig} from './engine/input_layer';\nimport {Layer, LayerConfig} from './engine/topology';\nimport {input} from './exports';\nimport {ELU, ELULayerConfig, LeakyReLU, LeakyReLULayerConfig, ReLU, ReLULayerConfig, Softmax, SoftmaxLayerConfig, ThresholdedReLU, ThresholdedReLULayerConfig} from './layers/advanced_activations';\nimport {Conv1D, Conv2D, Conv2DTranspose, ConvLayerConfig, Cropping2D, Cropping2DLayerConfig, SeparableConv2D, SeparableConvLayerConfig, UpSampling2D, UpSampling2DLayerConfig} from './layers/convolutional';\nimport {DepthwiseConv2D, DepthwiseConv2DLayerConfig} from './layers/convolutional_depthwise';\nimport {Activation, ActivationLayerConfig, Dense, DenseLayerConfig, Dropout, DropoutLayerConfig, Flatten, Permute, PermuteLayerConfig, RepeatVector, RepeatVectorLayerConfig, Reshape, ReshapeLayerConfig} from './layers/core';\nimport {Embedding, EmbeddingLayerConfig} from './layers/embeddings';\nimport {Add, Average, Concatenate, ConcatenateLayerConfig, Dot, DotLayerConfig, Maximum, Minimum, Multiply} from './layers/merge';\nimport {BatchNormalization, BatchNormalizationLayerConfig} from './layers/normalization';\nimport {ZeroPadding2D, ZeroPadding2DLayerConfig} from './layers/padding';\nimport {AveragePooling1D, AveragePooling2D, GlobalAveragePooling1D, GlobalAveragePooling2D, GlobalMaxPooling1D, GlobalMaxPooling2D, GlobalPooling2DLayerConfig, MaxPooling1D, MaxPooling2D, Pooling1DLayerConfig, Pooling2DLayerConfig} from './layers/pooling';\nimport {GRU, GRUCell, GRUCellLayerConfig, GRULayerConfig, LSTM, LSTMCell, LSTMCellLayerConfig, LSTMLayerConfig, RNN, RNNCell, RNNLayerConfig, SimpleRNN, SimpleRNNCell, SimpleRNNCellLayerConfig, SimpleRNNLayerConfig, StackedRNNCells, StackedRNNCellsConfig} from './layers/recurrent';\nimport {Bidirectional, BidirectionalLayerConfig, TimeDistributed, Wrapper, WrapperLayerConfig} from './layers/wrappers';\n\n\n// TODO(cais): Add doc string to all the public static functions in this\n// class; include exectuable JavaScript code snippets where applicable\n// (b/74074458).\n\n// Input Layer.\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Inputs',\n * namespace: 'layers',\n * useDocsFrom: 'InputLayer',\n * configParamIndices: [0]\n * }\n */\nexport function inputLayer(config: InputLayerConfig): Layer {\n return new InputLayer(config);\n}\n\n// Advanced Activation Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers',\n * useDocsFrom: 'ELU',\n * configParamIndices: [0]\n * }\n */\nexport function elu(config?: ELULayerConfig): Layer {\n return new ELU(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers',\n * useDocsFrom: 'ReLU',\n * configParamIndices: [0]\n * }\n */\nexport function reLU(config?: ReLULayerConfig): Layer {\n return new ReLU(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers',\n * useDocsFrom: 'LeakyReLU',\n * configParamIndices: [0]\n * }\n */\nexport function leakyReLU(config?: LeakyReLULayerConfig): Layer {\n return new LeakyReLU(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers',\n * useDocsFrom: 'Softmax',\n * configParamIndices: [0]\n * }\n */\nexport function softmax(config?: SoftmaxLayerConfig): Layer {\n return new Softmax(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Advanced Activation',\n * namespace: 'layers',\n * useDocsFrom: 'ThresholdedReLU',\n * configParamIndices: [0]\n * }\n */\nexport function thresholdedReLU(config?: ThresholdedReLULayerConfig): Layer {\n return new ThresholdedReLU(config);\n}\n\n// Convolutional Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'Conv1D',\n * configParamIndices: [0]\n * }\n */\nexport function conv1d(config: ConvLayerConfig): Layer {\n return new Conv1D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'Conv2D',\n * configParamIndices: [0]\n * }\n */\nexport function conv2d(config: ConvLayerConfig): Layer {\n return new Conv2D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'Conv2DTranspose',\n * configParamIndices: [0]\n * }\n */\nexport function conv2dTranspose(config: ConvLayerConfig): Layer {\n return new Conv2DTranspose(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'SeparableConv2D',\n * configParamIndices: [0]\n * }\n */\nexport function separableConv2d(config: SeparableConvLayerConfig): Layer {\n return new SeparableConv2D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'Cropping2D',\n * configParamIndices: [0]\n * }\n */\nexport function cropping2D(config: Cropping2DLayerConfig): Layer {\n return new Cropping2D(config);\n}\n\n/**\n * @doc{\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'UpSampling2D',\n * configParamIndices: [0]\n * }\n */\nexport function upSampling2d(config: UpSampling2DLayerConfig): Layer {\n return new UpSampling2D(config);\n}\n\n// Convolutional(depthwise) Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Convolutional',\n * namespace: 'layers',\n * useDocsFrom: 'DepthwiseConv2D',\n * configParamIndices: [0]\n * }\n */\n\nexport function depthwiseConv2d(config: DepthwiseConv2DLayerConfig): Layer {\n return new DepthwiseConv2D(config);\n}\n\n// Basic Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Activation',\n * configParamIndices: [0]\n * }\n */\nexport function activation(config: ActivationLayerConfig): Layer {\n return new Activation(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Dense',\n * configParamIndices: [0]\n * }\n */\nexport function dense(config: DenseLayerConfig): Layer {\n return new Dense(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Dropout',\n * configParamIndices: [0]\n * }\n */\nexport function dropout(config: DropoutLayerConfig): Layer {\n return new Dropout(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Flatten',\n * configParamIndices: [0]\n * }\n */\nexport function flatten(config?: LayerConfig): Layer {\n return new Flatten(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'RepeatVector',\n * configParamIndices: [0]\n * }\n */\nexport function repeatVector(config: RepeatVectorLayerConfig): Layer {\n return new RepeatVector(config);\n}\n\n/**\n * @doc{\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Reshape',\n * configParamIndices: [0]\n * }\n */\nexport function reshape(config: ReshapeLayerConfig): Layer {\n return new Reshape(config);\n}\n\n/**\n * @doc{\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Permute',\n * configParamIndices: [0]\n * }\n */\nexport function permute(config: PermuteLayerConfig): Layer {\n return new Permute(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Basic',\n * namespace: 'layers',\n * useDocsFrom: 'Embedding',\n * configParamIndices: [0]\n * }\n */\nexport function embedding(config: EmbeddingLayerConfig): Layer {\n return new Embedding(config);\n}\n\n// Merge Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Add',\n * configParamIndices: [0]\n * }\n */\nexport function add(config?: LayerConfig): Layer {\n return new Add(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Average',\n * configParamIndices: [0]\n * }\n */\nexport function average(config?: LayerConfig): Layer {\n return new Average(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Concatenate',\n * configParamIndices: [0]\n * }\n */\nexport function concatenate(config?: ConcatenateLayerConfig): Layer {\n return new Concatenate(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Maximum',\n * configParamIndices: [0]\n * }\n */\nexport function maximum(config?: LayerConfig): Layer {\n return new Maximum(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Minimum',\n * configParamIndices: [0]\n * }\n */\nexport function minimum(config?: LayerConfig): Layer {\n return new Minimum(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Multiply',\n * configParamIndices: [0]\n * }\n */\nexport function multiply(config?: LayerConfig): Layer {\n return new Multiply(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Merge',\n * namespace: 'layers',\n * useDocsFrom: 'Dot',\n * configParamIndices: [0]\n * }\n */\nexport function dot(config: DotLayerConfig): Layer {\n return new Dot(config);\n}\n\n// Normalization Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Normalization',\n * namespace: 'layers',\n * useDocsFrom: 'BatchNormalization',\n * configParamIndices: [0]\n * }\n */\nexport function batchNormalization(config?: BatchNormalizationLayerConfig):\n Layer {\n return new BatchNormalization(config);\n}\n\n// Padding Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Padding',\n * namespace: 'layers',\n * useDocsFrom: 'ZeroPadding2D',\n * configParamIndices: [0]\n * }\n */\nexport function zeroPadding2d(config?: ZeroPadding2DLayerConfig): Layer {\n return new ZeroPadding2D(config);\n}\n\n// Pooling Layers.\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'AveragePooling1D',\n * configParamIndices: [0]\n * }\n */\nexport function averagePooling1d(config: Pooling1DLayerConfig): Layer {\n return new AveragePooling1D(config);\n}\nexport function avgPool1d(config: Pooling1DLayerConfig): Layer {\n return averagePooling1d(config);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling1d(config: Pooling1DLayerConfig): Layer {\n return averagePooling1d(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'AveragePooling2D',\n * configParamIndices: [0]\n * }\n */\nexport function averagePooling2d(config: Pooling2DLayerConfig): Layer {\n return new AveragePooling2D(config);\n}\nexport function avgPool2d(config: Pooling2DLayerConfig): Layer {\n return averagePooling2d(config);\n}\n// For backwards compatibility.\n// See https://github.com/tensorflow/tfjs/issues/152\nexport function avgPooling2d(config: Pooling2DLayerConfig): Layer {\n return averagePooling2d(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'GlobalAveragePooling1D',\n * configParamIndices: [0]\n * }\n */\nexport function globalAveragePooling1d(config: LayerConfig): Layer {\n return new GlobalAveragePooling1D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'GlobalAveragePooling2D',\n * configParamIndices: [0]\n * }\n */\nexport function globalAveragePooling2d(config: GlobalPooling2DLayerConfig):\n Layer {\n return new GlobalAveragePooling2D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'GlobalMaxPooling1D',\n * configParamIndices: [0]\n * }\n */\nexport function globalMaxPooling1d(config: LayerConfig): Layer {\n return new GlobalMaxPooling1D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'GlobalMaxPooling2D',\n * configParamIndices: [0]\n * }\n */\nexport function globalMaxPooling2d(config: GlobalPooling2DLayerConfig): Layer {\n return new GlobalMaxPooling2D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'MaxPooling1D',\n * configParamIndices: [0]\n * }\n */\nexport function maxPooling1d(config: Pooling1DLayerConfig): Layer {\n return new MaxPooling1D(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Pooling',\n * namespace: 'layers',\n * useDocsFrom: 'MaxPooling2D',\n * configParamIndices: [0]\n * }\n */\nexport function maxPooling2d(config: Pooling2DLayerConfig): Layer {\n return new MaxPooling2D(config);\n}\n\n// Recurrent Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'GRU',\n * configParamIndices: [0]\n * }\n */\nexport function gru(config: GRULayerConfig): Layer {\n return new GRU(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'GRUCell',\n * configParamIndices: [0]\n * }\n */\nexport function gruCell(config: GRUCellLayerConfig): RNNCell {\n return new GRUCell(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'LSTM',\n * configParamIndices: [0]\n * }\n */\nexport function lstm(config: LSTMLayerConfig): Layer {\n return new LSTM(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'LSTMCell',\n * configParamIndices: [0]\n * }\n */\nexport function lstmCell(config: LSTMCellLayerConfig): RNNCell {\n return new LSTMCell(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'SimpleRNN',\n * configParamIndices: [0]\n * }\n */\nexport function simpleRNN(config: SimpleRNNLayerConfig): Layer {\n return new SimpleRNN(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'SimpleRNNCell',\n * configParamIndices: [0]\n * }\n */\nexport function simpleRNNCell(config: SimpleRNNCellLayerConfig): RNNCell {\n return new SimpleRNNCell(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'RNN',\n * configParamIndices: [0]\n * }\n */\nexport function rnn(config: RNNLayerConfig): Layer {\n return new RNN(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Recurrent',\n * namespace: 'layers',\n * useDocsFrom: 'RNN',\n * configParamIndices: [0]\n * }\n */\nexport function stackedRNNCells(config: StackedRNNCellsConfig): RNNCell {\n return new StackedRNNCells(config);\n}\n\n// Wrapper Layers.\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Wrapper',\n * namespace: 'layers',\n * useDocsFrom: 'Bidirectional',\n * configParamIndices: [0]\n * }\n */\nexport function bidirectional(config: BidirectionalLayerConfig): Wrapper {\n return new Bidirectional(config);\n}\n\n/**\n * @doc {\n * heading: 'Layers',\n * subheading: 'Wrapper',\n * namespace: 'layers',\n * useDocsFrom: 'TimeDistributed',\n * configParamIndices: [0]\n * }\n */\nexport function timeDistributed(config: WrapperLayerConfig): Layer {\n return new TimeDistributed(config);\n}\n\n// Aliases for pooling.\nexport const globalMaxPool1d = globalMaxPooling1d;\nexport const globalMaxPool2d = globalMaxPooling2d;\nexport const maxPool1d = maxPooling1d;\nexport const maxPool2d = maxPooling2d;\n\nexport {Layer, RNN, RNNCell, input /* alias for tf.input */};\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport {Tensor} from '@tensorflow/tfjs-core';\n\nimport * as losses from './losses';\nimport * as metrics from './metrics';\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'binaryAccuracy'\n * }\n */\nexport function binaryAccuracy(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.binaryAccuracy(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'binaryCrossentropy'\n * }\n */\nexport function binaryCrossentropy(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.binaryCrossentropy(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'categoricalAccuracy'\n * }\n */\nexport function categoricalAccuracy(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.categoricalAccuracy(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'categoricalCrossentropy'\n * }\n */\nexport function categoricalCrossentropy(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.categoricalCrossentropy(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'precision'\n * }\n */\nexport function precision(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.precision(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'recall'\n * }\n */\nexport function recall(yTrue: Tensor, yPred: Tensor): Tensor {\n return metrics.recall(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'cosineProximity'\n * }\n */\nexport function cosineProximity(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.cosineProximity(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'meanAbsoluteError'\n * }\n */\nexport function meanAbsoluteError(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanAbsoluteError(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'meanAbsolutePercentageError'\n * }\n */\nexport function meanAbsolutePercentageError(\n yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n\nexport function MAPE(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n\nexport function mape(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanAbsolutePercentageError(yTrue, yPred);\n}\n\n/**\n * @doc {\n * heading: 'Metrics',\n * namespace: 'metrics',\n * useDocsFrom: 'meanSquaredError'\n * }\n */\nexport function meanSquaredError(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanSquaredError(yTrue, yPred);\n}\n\nexport function MSE(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanSquaredError(yTrue, yPred);\n}\n\nexport function mse(yTrue: Tensor, yPred: Tensor): Tensor {\n return losses.meanSquaredError(yTrue, yPred);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\nimport * as regularizers from './regularizers';\n// tslint:disable-next-line:max-line-length\nimport {L1Config, L1L2, L1L2Config, L2Config, Regularizer} from './regularizers';\n\n/**\n * @doc {\n * heading: 'Regularizers',\n * namespace: 'regularizers',\n * useDocsFrom: 'L1L2',\n * configParamIndices: [0]\n * }\n */\nexport function l1l2(config?: L1L2Config): Regularizer {\n return new L1L2(config);\n}\n\n/**\n * @doc {\n * heading: 'Regularizers',\n * namespace: 'regularizers',\n * useDocsFrom: 'L1L2',\n * configParamIndices: [0]\n * }\n */\nexport function l1(config?: L1Config): Regularizer {\n return regularizers.l1(config);\n}\n\n/**\n * @doc {\n * heading: 'Regularizers',\n * namespace: 'regularizers',\n * useDocsFrom: 'L1L2',\n * configParamIndices: [0]\n * }\n */\nexport function l2(config?: L2Config): Regularizer {\n return regularizers.l2(config);\n}\n","/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Use of this source code is governed by an MIT-style\n * license that can be found in the LICENSE file or at\n * https://opensource.org/licenses/MIT.\n * =============================================================================\n */\n\n/* Original source: keras/callbacks.py */\n\nimport {BaseCallback} from './base_callbacks';\nimport {Container} from './engine/container';\nimport {Model} from './engine/training';\n\nexport abstract class Callback extends BaseCallback {\n /** Instance of `keras.models.Model`. Reference of the model being trained. */\n model: Model = null;\n\n setModel(model: Container): void {\n if (!(model instanceof Model)) {\n throw new Error('model must be a Model, not some other Container');\n }\n this.model = model as Model;\n }\n}\n","\"use strict\";\r\nmodule.exports = asPromise;\r\n\r\n/**\r\n * Callback as used by {@link util.asPromise}.\r\n * @typedef asPromiseCallback\r\n * @type {function}\r\n * @param {Error|null} error Error, if any\r\n * @param {...*} params Additional arguments\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Returns a promise from a node-style callback function.\r\n * @memberof util\r\n * @param {asPromiseCallback} fn Function to call\r\n * @param {*} ctx Function context\r\n * @param {...*} params Function arguments\r\n * @returns {Promise<*>} Promisified function\r\n */\r\nfunction asPromise(fn, ctx/*, varargs */) {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0,\r\n index = 2,\r\n pending = true;\r\n while (index < arguments.length)\r\n params[offset++] = arguments[index++];\r\n return new Promise(function executor(resolve, reject) {\r\n params[offset] = function callback(err/*, varargs */) {\r\n if (pending) {\r\n pending = false;\r\n if (err)\r\n reject(err);\r\n else {\r\n var params = new Array(arguments.length - 1),\r\n offset = 0;\r\n while (offset < params.length)\r\n params[offset++] = arguments[offset];\r\n resolve.apply(null, params);\r\n }\r\n }\r\n };\r\n try {\r\n fn.apply(ctx || null, params);\r\n } catch (err) {\r\n if (pending) {\r\n pending = false;\r\n reject(err);\r\n }\r\n }\r\n });\r\n}\r\n","\"use strict\";\r\n\r\n/**\r\n * A minimal base64 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar base64 = exports;\r\n\r\n/**\r\n * Calculates the byte length of a base64 encoded string.\r\n * @param {string} string Base64 encoded string\r\n * @returns {number} Byte length\r\n */\r\nbase64.length = function length(string) {\r\n var p = string.length;\r\n if (!p)\r\n return 0;\r\n var n = 0;\r\n while (--p % 4 > 1 && string.charAt(p) === \"=\")\r\n ++n;\r\n return Math.ceil(string.length * 3) / 4 - n;\r\n};\r\n\r\n// Base64 encoding table\r\nvar b64 = new Array(64);\r\n\r\n// Base64 decoding table\r\nvar s64 = new Array(123);\r\n\r\n// 65..90, 97..122, 48..57, 43, 47\r\nfor (var i = 0; i < 64;)\r\n s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++;\r\n\r\n/**\r\n * Encodes a buffer to a base64 encoded string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} Base64 encoded string\r\n */\r\nbase64.encode = function encode(buffer, start, end) {\r\n var parts = null,\r\n chunk = [];\r\n var i = 0, // output index\r\n j = 0, // goto index\r\n t; // temporary\r\n while (start < end) {\r\n var b = buffer[start++];\r\n switch (j) {\r\n case 0:\r\n chunk[i++] = b64[b >> 2];\r\n t = (b & 3) << 4;\r\n j = 1;\r\n break;\r\n case 1:\r\n chunk[i++] = b64[t | b >> 4];\r\n t = (b & 15) << 2;\r\n j = 2;\r\n break;\r\n case 2:\r\n chunk[i++] = b64[t | b >> 6];\r\n chunk[i++] = b64[b & 63];\r\n j = 0;\r\n break;\r\n }\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (j) {\r\n chunk[i++] = b64[t];\r\n chunk[i++] = 61;\r\n if (j === 1)\r\n chunk[i++] = 61;\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\nvar invalidEncoding = \"invalid encoding\";\r\n\r\n/**\r\n * Decodes a base64 encoded string to a buffer.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Number of bytes written\r\n * @throws {Error} If encoding is invalid\r\n */\r\nbase64.decode = function decode(string, buffer, offset) {\r\n var start = offset;\r\n var j = 0, // goto index\r\n t; // temporary\r\n for (var i = 0; i < string.length;) {\r\n var c = string.charCodeAt(i++);\r\n if (c === 61 && j > 1)\r\n break;\r\n if ((c = s64[c]) === undefined)\r\n throw Error(invalidEncoding);\r\n switch (j) {\r\n case 0:\r\n t = c;\r\n j = 1;\r\n break;\r\n case 1:\r\n buffer[offset++] = t << 2 | (c & 48) >> 4;\r\n t = c;\r\n j = 2;\r\n break;\r\n case 2:\r\n buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2;\r\n t = c;\r\n j = 3;\r\n break;\r\n case 3:\r\n buffer[offset++] = (t & 3) << 6 | c;\r\n j = 0;\r\n break;\r\n }\r\n }\r\n if (j === 1)\r\n throw Error(invalidEncoding);\r\n return offset - start;\r\n};\r\n\r\n/**\r\n * Tests if the specified string appears to be base64 encoded.\r\n * @param {string} string String to test\r\n * @returns {boolean} `true` if probably base64 encoded, otherwise false\r\n */\r\nbase64.test = function test(string) {\r\n return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string);\r\n};\r\n","\"use strict\";\r\nmodule.exports = EventEmitter;\r\n\r\n/**\r\n * Constructs a new event emitter instance.\r\n * @classdesc A minimal event emitter.\r\n * @memberof util\r\n * @constructor\r\n */\r\nfunction EventEmitter() {\r\n\r\n /**\r\n * Registered listeners.\r\n * @type {Object.<string,*>}\r\n * @private\r\n */\r\n this._listeners = {};\r\n}\r\n\r\n/**\r\n * Registers an event listener.\r\n * @param {string} evt Event name\r\n * @param {function} fn Listener\r\n * @param {*} [ctx] Listener context\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.on = function on(evt, fn, ctx) {\r\n (this._listeners[evt] || (this._listeners[evt] = [])).push({\r\n fn : fn,\r\n ctx : ctx || this\r\n });\r\n return this;\r\n};\r\n\r\n/**\r\n * Removes an event listener or any matching listeners if arguments are omitted.\r\n * @param {string} [evt] Event name. Removes all listeners if omitted.\r\n * @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted.\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.off = function off(evt, fn) {\r\n if (evt === undefined)\r\n this._listeners = {};\r\n else {\r\n if (fn === undefined)\r\n this._listeners[evt] = [];\r\n else {\r\n var listeners = this._listeners[evt];\r\n for (var i = 0; i < listeners.length;)\r\n if (listeners[i].fn === fn)\r\n listeners.splice(i, 1);\r\n else\r\n ++i;\r\n }\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Emits an event by calling its listeners with the specified arguments.\r\n * @param {string} evt Event name\r\n * @param {...*} args Arguments\r\n * @returns {util.EventEmitter} `this`\r\n */\r\nEventEmitter.prototype.emit = function emit(evt) {\r\n var listeners = this._listeners[evt];\r\n if (listeners) {\r\n var args = [],\r\n i = 1;\r\n for (; i < arguments.length;)\r\n args.push(arguments[i++]);\r\n for (i = 0; i < listeners.length;)\r\n listeners[i].fn.apply(listeners[i++].ctx, args);\r\n }\r\n return this;\r\n};\r\n","\"use strict\";\r\n\r\nmodule.exports = factory(factory);\r\n\r\n/**\r\n * Reads / writes floats / doubles from / to buffers.\r\n * @name util.float\r\n * @namespace\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using little endian byte order.\r\n * @name util.float.writeFloatLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 32 bit float to a buffer using big endian byte order.\r\n * @name util.float.writeFloatBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using little endian byte order.\r\n * @name util.float.readFloatLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 32 bit float from a buffer using big endian byte order.\r\n * @name util.float.readFloatBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using little endian byte order.\r\n * @name util.float.writeDoubleLE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Writes a 64 bit double to a buffer using big endian byte order.\r\n * @name util.float.writeDoubleBE\r\n * @function\r\n * @param {number} val Value to write\r\n * @param {Uint8Array} buf Target buffer\r\n * @param {number} pos Target buffer offset\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using little endian byte order.\r\n * @name util.float.readDoubleLE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n/**\r\n * Reads a 64 bit double from a buffer using big endian byte order.\r\n * @name util.float.readDoubleBE\r\n * @function\r\n * @param {Uint8Array} buf Source buffer\r\n * @param {number} pos Source buffer offset\r\n * @returns {number} Value read\r\n */\r\n\r\n// Factory function for the purpose of node-based testing in modified global environments\r\nfunction factory(exports) {\r\n\r\n // float: typed array\r\n if (typeof Float32Array !== \"undefined\") (function() {\r\n\r\n var f32 = new Float32Array([ -0 ]),\r\n f8b = new Uint8Array(f32.buffer),\r\n le = f8b[3] === 128;\r\n\r\n function writeFloat_f32_cpy(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n }\r\n\r\n function writeFloat_f32_rev(val, buf, pos) {\r\n f32[0] = val;\r\n buf[pos ] = f8b[3];\r\n buf[pos + 1] = f8b[2];\r\n buf[pos + 2] = f8b[1];\r\n buf[pos + 3] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy;\r\n\r\n function readFloat_f32_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n function readFloat_f32_rev(buf, pos) {\r\n f8b[3] = buf[pos ];\r\n f8b[2] = buf[pos + 1];\r\n f8b[1] = buf[pos + 2];\r\n f8b[0] = buf[pos + 3];\r\n return f32[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev;\r\n /* istanbul ignore next */\r\n exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy;\r\n\r\n // float: ieee754\r\n })(); else (function() {\r\n\r\n function writeFloat_ieee754(writeUint, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0)\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos);\r\n else if (isNaN(val))\r\n writeUint(2143289344, buf, pos);\r\n else if (val > 3.4028234663852886e+38) // +-Infinity\r\n writeUint((sign << 31 | 2139095040) >>> 0, buf, pos);\r\n else if (val < 1.1754943508222875e-38) // denormal\r\n writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos);\r\n else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2),\r\n mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607;\r\n writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos);\r\n }\r\n }\r\n\r\n exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE);\r\n exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE);\r\n\r\n function readFloat_ieee754(readUint, buf, pos) {\r\n var uint = readUint(buf, pos),\r\n sign = (uint >> 31) * 2 + 1,\r\n exponent = uint >>> 23 & 255,\r\n mantissa = uint & 8388607;\r\n return exponent === 255\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 1.401298464324817e-45 * mantissa\r\n : sign * Math.pow(2, exponent - 150) * (mantissa + 8388608);\r\n }\r\n\r\n exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE);\r\n exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE);\r\n\r\n })();\r\n\r\n // double: typed array\r\n if (typeof Float64Array !== \"undefined\") (function() {\r\n\r\n var f64 = new Float64Array([-0]),\r\n f8b = new Uint8Array(f64.buffer),\r\n le = f8b[7] === 128;\r\n\r\n function writeDouble_f64_cpy(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[0];\r\n buf[pos + 1] = f8b[1];\r\n buf[pos + 2] = f8b[2];\r\n buf[pos + 3] = f8b[3];\r\n buf[pos + 4] = f8b[4];\r\n buf[pos + 5] = f8b[5];\r\n buf[pos + 6] = f8b[6];\r\n buf[pos + 7] = f8b[7];\r\n }\r\n\r\n function writeDouble_f64_rev(val, buf, pos) {\r\n f64[0] = val;\r\n buf[pos ] = f8b[7];\r\n buf[pos + 1] = f8b[6];\r\n buf[pos + 2] = f8b[5];\r\n buf[pos + 3] = f8b[4];\r\n buf[pos + 4] = f8b[3];\r\n buf[pos + 5] = f8b[2];\r\n buf[pos + 6] = f8b[1];\r\n buf[pos + 7] = f8b[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy;\r\n\r\n function readDouble_f64_cpy(buf, pos) {\r\n f8b[0] = buf[pos ];\r\n f8b[1] = buf[pos + 1];\r\n f8b[2] = buf[pos + 2];\r\n f8b[3] = buf[pos + 3];\r\n f8b[4] = buf[pos + 4];\r\n f8b[5] = buf[pos + 5];\r\n f8b[6] = buf[pos + 6];\r\n f8b[7] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n function readDouble_f64_rev(buf, pos) {\r\n f8b[7] = buf[pos ];\r\n f8b[6] = buf[pos + 1];\r\n f8b[5] = buf[pos + 2];\r\n f8b[4] = buf[pos + 3];\r\n f8b[3] = buf[pos + 4];\r\n f8b[2] = buf[pos + 5];\r\n f8b[1] = buf[pos + 6];\r\n f8b[0] = buf[pos + 7];\r\n return f64[0];\r\n }\r\n\r\n /* istanbul ignore next */\r\n exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev;\r\n /* istanbul ignore next */\r\n exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy;\r\n\r\n // double: ieee754\r\n })(); else (function() {\r\n\r\n function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) {\r\n var sign = val < 0 ? 1 : 0;\r\n if (sign)\r\n val = -val;\r\n if (val === 0) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1);\r\n } else if (isNaN(val)) {\r\n writeUint(0, buf, pos + off0);\r\n writeUint(2146959360, buf, pos + off1);\r\n } else if (val > 1.7976931348623157e+308) { // +-Infinity\r\n writeUint(0, buf, pos + off0);\r\n writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1);\r\n } else {\r\n var mantissa;\r\n if (val < 2.2250738585072014e-308) { // denormal\r\n mantissa = val / 5e-324;\r\n writeUint(mantissa >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1);\r\n } else {\r\n var exponent = Math.floor(Math.log(val) / Math.LN2);\r\n if (exponent === 1024)\r\n exponent = 1023;\r\n mantissa = val * Math.pow(2, -exponent);\r\n writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0);\r\n writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1);\r\n }\r\n }\r\n }\r\n\r\n exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4);\r\n exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0);\r\n\r\n function readDouble_ieee754(readUint, off0, off1, buf, pos) {\r\n var lo = readUint(buf, pos + off0),\r\n hi = readUint(buf, pos + off1);\r\n var sign = (hi >> 31) * 2 + 1,\r\n exponent = hi >>> 20 & 2047,\r\n mantissa = 4294967296 * (hi & 1048575) + lo;\r\n return exponent === 2047\r\n ? mantissa\r\n ? NaN\r\n : sign * Infinity\r\n : exponent === 0 // denormal\r\n ? sign * 5e-324 * mantissa\r\n : sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496);\r\n }\r\n\r\n exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4);\r\n exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0);\r\n\r\n })();\r\n\r\n return exports;\r\n}\r\n\r\n// uint helpers\r\n\r\nfunction writeUintLE(val, buf, pos) {\r\n buf[pos ] = val & 255;\r\n buf[pos + 1] = val >>> 8 & 255;\r\n buf[pos + 2] = val >>> 16 & 255;\r\n buf[pos + 3] = val >>> 24;\r\n}\r\n\r\nfunction writeUintBE(val, buf, pos) {\r\n buf[pos ] = val >>> 24;\r\n buf[pos + 1] = val >>> 16 & 255;\r\n buf[pos + 2] = val >>> 8 & 255;\r\n buf[pos + 3] = val & 255;\r\n}\r\n\r\nfunction readUintLE(buf, pos) {\r\n return (buf[pos ]\r\n | buf[pos + 1] << 8\r\n | buf[pos + 2] << 16\r\n | buf[pos + 3] << 24) >>> 0;\r\n}\r\n\r\nfunction readUintBE(buf, pos) {\r\n return (buf[pos ] << 24\r\n | buf[pos + 1] << 16\r\n | buf[pos + 2] << 8\r\n | buf[pos + 3]) >>> 0;\r\n}\r\n","\"use strict\";\r\nmodule.exports = inquire;\r\n\r\n/**\r\n * Requires a module only if available.\r\n * @memberof util\r\n * @param {string} moduleName Module to require\r\n * @returns {?Object} Required module if available and not empty, otherwise `null`\r\n */\r\nfunction inquire(moduleName) {\r\n try {\r\n var mod = eval(\"quire\".replace(/^/,\"re\"))(moduleName); // eslint-disable-line no-eval\r\n if (mod && (mod.length || Object.keys(mod).length))\r\n return mod;\r\n } catch (e) {} // eslint-disable-line no-empty\r\n return null;\r\n}\r\n","\"use strict\";\r\n\r\n/**\r\n * A minimal UTF8 implementation for number arrays.\r\n * @memberof util\r\n * @namespace\r\n */\r\nvar utf8 = exports;\r\n\r\n/**\r\n * Calculates the UTF8 byte length of a string.\r\n * @param {string} string String\r\n * @returns {number} Byte length\r\n */\r\nutf8.length = function utf8_length(string) {\r\n var len = 0,\r\n c = 0;\r\n for (var i = 0; i < string.length; ++i) {\r\n c = string.charCodeAt(i);\r\n if (c < 128)\r\n len += 1;\r\n else if (c < 2048)\r\n len += 2;\r\n else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) {\r\n ++i;\r\n len += 4;\r\n } else\r\n len += 3;\r\n }\r\n return len;\r\n};\r\n\r\n/**\r\n * Reads UTF8 bytes as a string.\r\n * @param {Uint8Array} buffer Source buffer\r\n * @param {number} start Source start\r\n * @param {number} end Source end\r\n * @returns {string} String read\r\n */\r\nutf8.read = function utf8_read(buffer, start, end) {\r\n var len = end - start;\r\n if (len < 1)\r\n return \"\";\r\n var parts = null,\r\n chunk = [],\r\n i = 0, // char offset\r\n t; // temporary\r\n while (start < end) {\r\n t = buffer[start++];\r\n if (t < 128)\r\n chunk[i++] = t;\r\n else if (t > 191 && t < 224)\r\n chunk[i++] = (t & 31) << 6 | buffer[start++] & 63;\r\n else if (t > 239 && t < 365) {\r\n t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000;\r\n chunk[i++] = 0xD800 + (t >> 10);\r\n chunk[i++] = 0xDC00 + (t & 1023);\r\n } else\r\n chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63;\r\n if (i > 8191) {\r\n (parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));\r\n i = 0;\r\n }\r\n }\r\n if (parts) {\r\n if (i)\r\n parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));\r\n return parts.join(\"\");\r\n }\r\n return String.fromCharCode.apply(String, chunk.slice(0, i));\r\n};\r\n\r\n/**\r\n * Writes a string as UTF8 bytes.\r\n * @param {string} string Source string\r\n * @param {Uint8Array} buffer Destination buffer\r\n * @param {number} offset Destination offset\r\n * @returns {number} Bytes written\r\n */\r\nutf8.write = function utf8_write(string, buffer, offset) {\r\n var start = offset,\r\n c1, // character 1\r\n c2; // character 2\r\n for (var i = 0; i < string.length; ++i) {\r\n c1 = string.charCodeAt(i);\r\n if (c1 < 128) {\r\n buffer[offset++] = c1;\r\n } else if (c1 < 2048) {\r\n buffer[offset++] = c1 >> 6 | 192;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) {\r\n c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF);\r\n ++i;\r\n buffer[offset++] = c1 >> 18 | 240;\r\n buffer[offset++] = c1 >> 12 & 63 | 128;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n } else {\r\n buffer[offset++] = c1 >> 12 | 224;\r\n buffer[offset++] = c1 >> 6 & 63 | 128;\r\n buffer[offset++] = c1 & 63 | 128;\r\n }\r\n }\r\n return offset - start;\r\n};\r\n","\"use strict\";\r\nmodule.exports = pool;\r\n\r\n/**\r\n * An allocator as used by {@link util.pool}.\r\n * @typedef PoolAllocator\r\n * @type {function}\r\n * @param {number} size Buffer size\r\n * @returns {Uint8Array} Buffer\r\n */\r\n\r\n/**\r\n * A slicer as used by {@link util.pool}.\r\n * @typedef PoolSlicer\r\n * @type {function}\r\n * @param {number} start Start offset\r\n * @param {number} end End offset\r\n * @returns {Uint8Array} Buffer slice\r\n * @this {Uint8Array}\r\n */\r\n\r\n/**\r\n * A general purpose buffer pool.\r\n * @memberof util\r\n * @function\r\n * @param {PoolAllocator} alloc Allocator\r\n * @param {PoolSlicer} slice Slicer\r\n * @param {number} [size=8192] Slab size\r\n * @returns {PoolAllocator} Pooled allocator\r\n */\r\nfunction pool(alloc, slice, size) {\r\n var SIZE = size || 8192;\r\n var MAX = SIZE >>> 1;\r\n var slab = null;\r\n var offset = SIZE;\r\n return function pool_alloc(size) {\r\n if (size < 1 || size > MAX)\r\n return alloc(size);\r\n if (offset + size > SIZE) {\r\n slab = alloc(SIZE);\r\n offset = 0;\r\n }\r\n var buf = slice.call(slab, offset, offset += size);\r\n if (offset & 7) // align to 32 bit\r\n offset = (offset | 7) + 1;\r\n return buf;\r\n };\r\n}\r\n","\"use strict\";\r\nmodule.exports = LongBits;\r\n\r\nvar util = require(\"../util/minimal\");\r\n\r\n/**\r\n * Constructs new long bits.\r\n * @classdesc Helper class for working with the low and high bits of a 64 bit value.\r\n * @memberof util\r\n * @constructor\r\n * @param {number} lo Low 32 bits, unsigned\r\n * @param {number} hi High 32 bits, unsigned\r\n */\r\nfunction LongBits(lo, hi) {\r\n\r\n // note that the casts below are theoretically unnecessary as of today, but older statically\r\n // generated converter code might still call the ctor with signed 32bits. kept for compat.\r\n\r\n /**\r\n * Low bits.\r\n * @type {number}\r\n */\r\n this.lo = lo >>> 0;\r\n\r\n /**\r\n * High bits.\r\n * @type {number}\r\n */\r\n this.hi = hi >>> 0;\r\n}\r\n\r\n/**\r\n * Zero bits.\r\n * @memberof util.LongBits\r\n * @type {util.LongBits}\r\n */\r\nvar zero = LongBits.zero = new LongBits(0, 0);\r\n\r\nzero.toNumber = function() { return 0; };\r\nzero.zzEncode = zero.zzDecode = function() { return this; };\r\nzero.length = function() { return 1; };\r\n\r\n/**\r\n * Zero hash.\r\n * @memberof util.LongBits\r\n * @type {string}\r\n */\r\nvar zeroHash = LongBits.zeroHash = \"\\0\\0\\0\\0\\0\\0\\0\\0\";\r\n\r\n/**\r\n * Constructs new long bits from the specified number.\r\n * @param {number} value Value\r\n * @returns {util.LongBits} Instance\r\n */\r\nLongBits.fromNumber = function fromNumber(value) {\r\n if (value === 0)\r\n return zero;\r\n var sign = value < 0;\r\n if (sign)\r\n value = -value;\r\n var lo = value >>> 0,\r\n hi = (value - lo) / 4294967296 >>> 0;\r\n if (sign) {\r\n hi = ~hi >>> 0;\r\n lo = ~lo >>> 0;\r\n if (++lo > 4294967295) {\r\n lo = 0;\r\n if (++hi > 4294967295)\r\n hi = 0;\r\n }\r\n }\r\n return new LongBits(lo, hi);\r\n};\r\n\r\n/**\r\n * Constructs new long bits from a number, long or string.\r\n * @param {Long|number|string} value Value\r\n * @returns {util.LongBits} Instance\r\n */\r\nLongBits.from = function from(value) {\r\n if (typeof value === \"number\")\r\n return LongBits.fromNumber(value);\r\n if (util.isString(value)) {\r\n /* istanbul ignore else */\r\n if (util.Long)\r\n value = util.Long.fromString(value);\r\n else\r\n return LongBits.fromNumber(parseInt(value, 10));\r\n }\r\n return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;\r\n};\r\n\r\n/**\r\n * Converts this long bits to a possibly unsafe JavaScript number.\r\n * @param {boolean} [unsigned=false] Whether unsigned or not\r\n * @returns {number} Possibly unsafe number\r\n */\r\nLongBits.prototype.toNumber = function toNumber(unsigned) {\r\n if (!unsigned && this.hi >>> 31) {\r\n var lo = ~this.lo + 1 >>> 0,\r\n hi = ~this.hi >>> 0;\r\n if (!lo)\r\n hi = hi + 1 >>> 0;\r\n return -(lo + hi * 4294967296);\r\n }\r\n return this.lo + this.hi * 4294967296;\r\n};\r\n\r\n/**\r\n * Converts this long bits to a long.\r\n * @param {boolean} [unsigned=false] Whether unsigned or not\r\n * @returns {Long} Long\r\n */\r\nLongBits.prototype.toLong = function toLong(unsigned) {\r\n return util.Long\r\n ? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))\r\n /* istanbul ignore next */\r\n : { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };\r\n};\r\n\r\nvar charCodeAt = String.prototype.charCodeAt;\r\n\r\n/**\r\n * Constructs new long bits from the specified 8 characters long hash.\r\n * @param {string} hash Hash\r\n * @returns {util.LongBits} Bits\r\n */\r\nLongBits.fromHash = function fromHash(hash) {\r\n if (hash === zeroHash)\r\n return zero;\r\n return new LongBits(\r\n ( charCodeAt.call(hash, 0)\r\n | charCodeAt.call(hash, 1) << 8\r\n | charCodeAt.call(hash, 2) << 16\r\n | charCodeAt.call(hash, 3) << 24) >>> 0\r\n ,\r\n ( charCodeAt.call(hash, 4)\r\n | charCodeAt.call(hash, 5) << 8\r\n | charCodeAt.call(hash, 6) << 16\r\n | charCodeAt.call(hash, 7) << 24) >>> 0\r\n );\r\n};\r\n\r\n/**\r\n * Converts this long bits to a 8 characters long hash.\r\n * @returns {string} Hash\r\n */\r\nLongBits.prototype.toHash = function toHash() {\r\n return String.fromCharCode(\r\n this.lo & 255,\r\n this.lo >>> 8 & 255,\r\n this.lo >>> 16 & 255,\r\n this.lo >>> 24 ,\r\n this.hi & 255,\r\n this.hi >>> 8 & 255,\r\n this.hi >>> 16 & 255,\r\n this.hi >>> 24\r\n );\r\n};\r\n\r\n/**\r\n * Zig-zag encodes this long bits.\r\n * @returns {util.LongBits} `this`\r\n */\r\nLongBits.prototype.zzEncode = function zzEncode() {\r\n var mask = this.hi >> 31;\r\n this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;\r\n this.lo = ( this.lo << 1 ^ mask) >>> 0;\r\n return this;\r\n};\r\n\r\n/**\r\n * Zig-zag decodes this long bits.\r\n * @returns {util.LongBits} `this`\r\n */\r\nLongBits.prototype.zzDecode = function zzDecode() {\r\n var mask = -(this.lo & 1);\r\n this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;\r\n this.hi = ( this.hi >>> 1 ^ mask) >>> 0;\r\n return this;\r\n};\r\n\r\n/**\r\n * Calculates the length of this longbits when encoded as a varint.\r\n * @returns {number} Length\r\n */\r\nLongBits.prototype.length = function length() {\r\n var part0 = this.lo,\r\n part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,\r\n part2 = this.hi >>> 24;\r\n return part2 === 0\r\n ? part1 === 0\r\n ? part0 < 16384\r\n ? part0 < 128 ? 1 : 2\r\n : part0 < 2097152 ? 3 : 4\r\n : part1 < 16384\r\n ? part1 < 128 ? 5 : 6\r\n : part1 < 2097152 ? 7 : 8\r\n : part2 < 128 ? 9 : 10;\r\n};\r\n","\"use strict\";\r\nvar util = exports;\r\n\r\n// used to return a Promise where callback is omitted\r\nutil.asPromise = require(\"@protobufjs/aspromise\");\r\n\r\n// converts to / from base64 encoded strings\r\nutil.base64 = require(\"@protobufjs/base64\");\r\n\r\n// base class of rpc.Service\r\nutil.EventEmitter = require(\"@protobufjs/eventemitter\");\r\n\r\n// float handling accross browsers\r\nutil.float = require(\"@protobufjs/float\");\r\n\r\n// requires modules optionally and hides the call from bundlers\r\nutil.inquire = require(\"@protobufjs/inquire\");\r\n\r\n// converts to / from utf8 encoded strings\r\nutil.utf8 = require(\"@protobufjs/utf8\");\r\n\r\n// provides a node-like buffer pool in the browser\r\nutil.pool = require(\"@protobufjs/pool\");\r\n\r\n// utility to work with the low and high bits of a 64 bit value\r\nutil.LongBits = require(\"./longbits\");\r\n\r\n/**\r\n * An immuable empty array.\r\n * @memberof util\r\n * @type {Array.<*>}\r\n * @const\r\n */\r\nutil.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes\r\n\r\n/**\r\n * An immutable empty object.\r\n * @type {Object}\r\n * @const\r\n */\r\nutil.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes\r\n\r\n/**\r\n * Whether running within node or not.\r\n * @memberof util\r\n * @type {boolean}\r\n * @const\r\n */\r\nutil.isNode = Boolean(global.process && global.process.versions && global.process.versions.node);\r\n\r\n/**\r\n * Tests if the specified value is an integer.\r\n * @function\r\n * @param {*} value Value to test\r\n * @returns {boolean} `true` if the value is an integer\r\n */\r\nutil.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {\r\n return typeof value === \"number\" && isFinite(value) && Math.floor(value) === value;\r\n};\r\n\r\n/**\r\n * Tests if the specified value is a string.\r\n * @param {*} value Value to test\r\n * @returns {boolean} `true` if the value is a string\r\n */\r\nutil.isString = function isString(value) {\r\n return typeof value === \"string\" || value instanceof String;\r\n};\r\n\r\n/**\r\n * Tests if the specified value is a non-null object.\r\n * @param {*} value Value to test\r\n * @returns {boolean} `true` if the value is a non-null object\r\n */\r\nutil.isObject = function isObject(value) {\r\n return value && typeof value === \"object\";\r\n};\r\n\r\n/**\r\n * Checks if a property on a message is considered to be present.\r\n * This is an alias of {@link util.isSet}.\r\n * @function\r\n * @param {Object} obj Plain object or message instance\r\n * @param {string} prop Property name\r\n * @returns {boolean} `true` if considered to be present, otherwise `false`\r\n */\r\nutil.isset =\r\n\r\n/**\r\n * Checks if a property on a message is considered to be present.\r\n * @param {Object} obj Plain object or message instance\r\n * @param {string} prop Property name\r\n * @returns {boolean} `true` if considered to be present, otherwise `false`\r\n */\r\nutil.isSet = function isSet(obj, prop) {\r\n var value = obj[prop];\r\n if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins\r\n return typeof value !== \"object\" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;\r\n return false;\r\n};\r\n\r\n/**\r\n * Any compatible Buffer instance.\r\n * This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.\r\n * @interface Buffer\r\n * @extends Uint8Array\r\n */\r\n\r\n/**\r\n * Node's Buffer class if available.\r\n * @type {Constructor<Buffer>}\r\n */\r\nutil.Buffer = (function() {\r\n try {\r\n var Buffer = util.inquire(\"buffer\").Buffer;\r\n // refuse to use non-node buffers if not explicitly assigned (perf reasons):\r\n return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;\r\n } catch (e) {\r\n /* istanbul ignore next */\r\n return null;\r\n }\r\n})();\r\n\r\n// Internal alias of or polyfull for Buffer.from.\r\nutil._Buffer_from = null;\r\n\r\n// Internal alias of or polyfill for Buffer.allocUnsafe.\r\nutil._Buffer_allocUnsafe = null;\r\n\r\n/**\r\n * Creates a new buffer of whatever type supported by the environment.\r\n * @param {number|number[]} [sizeOrArray=0] Buffer size or number array\r\n * @returns {Uint8Array|Buffer} Buffer\r\n */\r\nutil.newBuffer = function newBuffer(sizeOrArray) {\r\n /* istanbul ignore next */\r\n return typeof sizeOrArray === \"number\"\r\n ? util.Buffer\r\n ? util._Buffer_allocUnsafe(sizeOrArray)\r\n : new util.Array(sizeOrArray)\r\n : util.Buffer\r\n ? util._Buffer_from(sizeOrArray)\r\n : typeof Uint8Array === \"undefined\"\r\n ? sizeOrArray\r\n : new Uint8Array(sizeOrArray);\r\n};\r\n\r\n/**\r\n * Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.\r\n * @type {Constructor<Uint8Array>}\r\n */\r\nutil.Array = typeof Uint8Array !== \"undefined\" ? Uint8Array /* istanbul ignore next */ : Array;\r\n\r\n/**\r\n * Any compatible Long instance.\r\n * This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.\r\n * @interface Long\r\n * @property {number} low Low bits\r\n * @property {number} high High bits\r\n * @property {boolean} unsigned Whether unsigned or not\r\n */\r\n\r\n/**\r\n * Long.js's Long class if available.\r\n * @type {Constructor<Long>}\r\n */\r\nutil.Long = /* istanbul ignore next */ global.dcodeIO && /* istanbul ignore next */ global.dcodeIO.Long || util.inquire(\"long\");\r\n\r\n/**\r\n * Regular expression used to verify 2 bit (`bool`) map keys.\r\n * @type {RegExp}\r\n * @const\r\n */\r\nutil.key2Re = /^true|false|0|1$/;\r\n\r\n/**\r\n * Regular expression used to verify 32 bit (`int32` etc.) map keys.\r\n * @type {RegExp}\r\n * @const\r\n */\r\nutil.key32Re = /^-?(?:0|[1-9][0-9]*)$/;\r\n\r\n/**\r\n * Regular expression used to verify 64 bit (`int64` etc.) map keys.\r\n * @type {RegExp}\r\n * @const\r\n */\r\nutil.key64Re = /^(?:[\\\\x00-\\\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;\r\n\r\n/**\r\n * Converts a number or long to an 8 characters long hash string.\r\n * @param {Long|number} value Value to convert\r\n * @returns {string} Hash\r\n */\r\nutil.longToHash = function longToHash(value) {\r\n return value\r\n ? util.LongBits.from(value).toHash()\r\n : util.LongBits.zeroHash;\r\n};\r\n\r\n/**\r\n * Converts an 8 characters long hash string to a long or number.\r\n * @param {string} hash Hash\r\n * @param {boolean} [unsigned=false] Whether unsigned or not\r\n * @returns {Long|number} Original value\r\n */\r\nutil.longFromHash = function longFromHash(hash, unsigned) {\r\n var bits = util.LongBits.fromHash(hash);\r\n if (util.Long)\r\n return util.Long.fromBits(bits.lo, bits.hi, unsigned);\r\n return bits.toNumber(Boolean(unsigned));\r\n};\r\n\r\n/**\r\n * Merges the properties of the source object into the destination object.\r\n * @memberof util\r\n * @param {Object.<string,*>} dst Destination object\r\n * @param {Object.<string,*>} src Source object\r\n * @param {boolean} [ifNotSet=false] Merges only if the key is not already set\r\n * @returns {Object.<string,*>} Destination object\r\n */\r\nfunction merge(dst, src, ifNotSet) { // used by converters\r\n for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)\r\n if (dst[keys[i]] === undefined || !ifNotSet)\r\n dst[keys[i]] = src[keys[i]];\r\n return dst;\r\n}\r\n\r\nutil.merge = merge;\r\n\r\n/**\r\n * Converts the first character of a string to lower case.\r\n * @param {string} str String to convert\r\n * @returns {string} Converted string\r\n */\r\nutil.lcFirst = function lcFirst(str) {\r\n return str.charAt(0).toLowerCase() + str.substring(1);\r\n};\r\n\r\n/**\r\n * Creates a custom error constructor.\r\n * @memberof util\r\n * @param {string} name Error name\r\n * @returns {Constructor<Error>} Custom error constructor\r\n */\r\nfunction newError(name) {\r\n\r\n function CustomError(message, properties) {\r\n\r\n if (!(this instanceof CustomError))\r\n return new CustomError(message, properties);\r\n\r\n // Error.call(this, message);\r\n // ^ just returns a new error instance because the ctor can be called as a function\r\n\r\n Object.defineProperty(this, \"message\", { get: function() { return message; } });\r\n\r\n /* istanbul ignore next */\r\n if (Error.captureStackTrace) // node\r\n Error.captureStackTrace(this, CustomError);\r\n else\r\n Object.defineProperty(this, \"stack\", { value: (new Error()).stack || \"\" });\r\n\r\n if (properties)\r\n merge(this, properties);\r\n }\r\n\r\n (CustomError.prototype = Object.create(Error.prototype)).constructor = CustomError;\r\n\r\n Object.defineProperty(CustomError.prototype, \"name\", { get: function() { return name; } });\r\n\r\n CustomError.prototype.toString = function toString() {\r\n return this.name + \": \" + this.message;\r\n };\r\n\r\n return CustomError;\r\n}\r\n\r\nutil.newError = newError;\r\n\r\n/**\r\n * Constructs a new protocol error.\r\n * @classdesc Error subclass indicating a protocol specifc error.\r\n * @memberof util\r\n * @extends Error\r\n * @template T extends Message<T>\r\n * @constructor\r\n * @param {string} message Error message\r\n * @param {Object.<string,*>} [properties] Additional properties\r\n * @example\r\n * try {\r\n * MyMessage.decode(someBuffer); // throws if required fields are missing\r\n * } catch (e) {\r\n * if (e instanceof ProtocolError && e.instance)\r\n * console.log(\"decoded so far: \" + JSON.stringify(e.instance));\r\n * }\r\n */\r\nutil.ProtocolError = newError(\"ProtocolError\");\r\n\r\n/**\r\n * So far decoded message instance.\r\n * @name util.ProtocolError#instance\r\n * @type {Message<T>}\r\n */\r\n\r\n/**\r\n * A OneOf getter as returned by {@link util.oneOfGetter}.\r\n * @typedef OneOfGetter\r\n * @type {function}\r\n * @returns {string|undefined} Set field name, if any\r\n */\r\n\r\n/**\r\n * Builds a getter for a oneof's present field name.\r\n * @param {string[]} fieldNames Field names\r\n * @returns {OneOfGetter} Unbound getter\r\n */\r\nutil.oneOfGetter = function getOneOf(fieldNames) {\r\n var fieldMap = {};\r\n for (var i = 0; i < fieldNames.length; ++i)\r\n fieldMap[fieldNames[i]] = 1;\r\n\r\n /**\r\n * @returns {string|undefined} Set field name, if any\r\n * @this Object\r\n * @ignore\r\n */\r\n return function() { // eslint-disable-line consistent-return\r\n for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)\r\n if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)\r\n return keys[i];\r\n };\r\n};\r\n\r\n/**\r\n * A OneOf setter as returned by {@link util.oneOfSetter}.\r\n * @typedef OneOfSetter\r\n * @type {function}\r\n * @param {string|undefined} value Field name\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * Builds a setter for a oneof's present field name.\r\n * @param {string[]} fieldNames Field names\r\n * @returns {OneOfSetter} Unbound setter\r\n */\r\nutil.oneOfSetter = function setOneOf(fieldNames) {\r\n\r\n /**\r\n * @param {string} name Field name\r\n * @returns {undefined}\r\n * @this Object\r\n * @ignore\r\n */\r\n return function(name) {\r\n for (var i = 0; i < fieldNames.length; ++i)\r\n if (fieldNames[i] !== name)\r\n delete this[fieldNames[i]];\r\n };\r\n};\r\n\r\n/**\r\n * Default conversion options used for {@link Message#toJSON} implementations.\r\n *\r\n * These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:\r\n *\r\n * - Longs become strings\r\n * - Enums become string keys\r\n * - Bytes become base64 encoded strings\r\n * - (Sub-)Messages become plain objects\r\n * - Maps become plain objects with all string keys\r\n * - Repeated fields become arrays\r\n * - NaN and Infinity for float and double fields become strings\r\n *\r\n * @type {IConversionOptions}\r\n * @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json\r\n */\r\nutil.toJSONOptions = {\r\n longs: String,\r\n enums: String,\r\n bytes: String,\r\n json: true\r\n};\r\n\r\nutil._configure = function() {\r\n var Buffer = util.Buffer;\r\n /* istanbul ignore if */\r\n if (!Buffer) {\r\n util._Buffer_from = util._Buffer_allocUnsafe = null;\r\n return;\r\n }\r\n // because node 4.x buffers are incompatible & immutable\r\n // see: https://github.com/dcodeIO/protobuf.js/pull/665\r\n util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||\r\n /* istanbul ignore next */\r\n function Buffer_from(value, encoding) {\r\n return new Buffer(value, encoding);\r\n };\r\n util._Buffer_allocUnsafe = Buffer.allocUnsafe ||\r\n /* istanbul ignore next */\r\n function Buffer_allocUnsafe(size) {\r\n return new Buffer(size);\r\n };\r\n};\r\n","\"use strict\";\r\nmodule.exports = Writer;\r\n\r\nvar util = require(\"./util/minimal\");\r\n\r\nvar BufferWriter; // cyclic\r\n\r\nvar LongBits = util.LongBits,\r\n base64 = util.base64,\r\n utf8 = util.utf8;\r\n\r\n/**\r\n * Constructs a new writer operation instance.\r\n * @classdesc Scheduled writer operation.\r\n * @constructor\r\n * @param {function(*, Uint8Array, number)} fn Function to call\r\n * @param {number} len Value byte length\r\n * @param {*} val Value to write\r\n * @ignore\r\n */\r\nfunction Op(fn, len, val) {\r\n\r\n /**\r\n * Function to call.\r\n * @type {function(Uint8Array, number, *)}\r\n */\r\n this.fn = fn;\r\n\r\n /**\r\n * Value byte length.\r\n * @type {number}\r\n */\r\n this.len = len;\r\n\r\n /**\r\n * Next operation.\r\n * @type {Writer.Op|undefined}\r\n */\r\n this.next = undefined;\r\n\r\n /**\r\n * Value to write.\r\n * @type {*}\r\n */\r\n this.val = val; // type varies\r\n}\r\n\r\n/* istanbul ignore next */\r\nfunction noop() {} // eslint-disable-line no-empty-function\r\n\r\n/**\r\n * Constructs a new writer state instance.\r\n * @classdesc Copied writer state.\r\n * @memberof Writer\r\n * @constructor\r\n * @param {Writer} writer Writer to copy state from\r\n * @ignore\r\n */\r\nfunction State(writer) {\r\n\r\n /**\r\n * Current head.\r\n * @type {Writer.Op}\r\n */\r\n this.head = writer.head;\r\n\r\n /**\r\n * Current tail.\r\n * @type {Writer.Op}\r\n */\r\n this.tail = writer.tail;\r\n\r\n /**\r\n * Current buffer length.\r\n * @type {number}\r\n */\r\n this.len = writer.len;\r\n\r\n /**\r\n * Next state.\r\n * @type {State|null}\r\n */\r\n this.next = writer.states;\r\n}\r\n\r\n/**\r\n * Constructs a new writer instance.\r\n * @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.\r\n * @constructor\r\n */\r\nfunction Writer() {\r\n\r\n /**\r\n * Current length.\r\n * @type {number}\r\n */\r\n this.len = 0;\r\n\r\n /**\r\n * Operations head.\r\n * @type {Object}\r\n */\r\n this.head = new Op(noop, 0, 0);\r\n\r\n /**\r\n * Operations tail\r\n * @type {Object}\r\n */\r\n this.tail = this.head;\r\n\r\n /**\r\n * Linked forked states.\r\n * @type {Object|null}\r\n */\r\n this.states = null;\r\n\r\n // When a value is written, the writer calculates its byte length and puts it into a linked\r\n // list of operations to perform when finish() is called. This both allows us to allocate\r\n // buffers of the exact required size and reduces the amount of work we have to do compared\r\n // to first calculating over objects and then encoding over objects. In our case, the encoding\r\n // part is just a linked list walk calling operations with already prepared values.\r\n}\r\n\r\n/**\r\n * Creates a new writer.\r\n * @function\r\n * @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}\r\n */\r\nWriter.create = util.Buffer\r\n ? function create_buffer_setup() {\r\n return (Writer.create = function create_buffer() {\r\n return new BufferWriter();\r\n })();\r\n }\r\n /* istanbul ignore next */\r\n : function create_array() {\r\n return new Writer();\r\n };\r\n\r\n/**\r\n * Allocates a buffer of the specified size.\r\n * @param {number} size Buffer size\r\n * @returns {Uint8Array} Buffer\r\n */\r\nWriter.alloc = function alloc(size) {\r\n return new util.Array(size);\r\n};\r\n\r\n// Use Uint8Array buffer pool in the browser, just like node does with buffers\r\n/* istanbul ignore else */\r\nif (util.Array !== Array)\r\n Writer.alloc = util.pool(Writer.alloc, util.Array.prototype.subarray);\r\n\r\n/**\r\n * Pushes a new operation to the queue.\r\n * @param {function(Uint8Array, number, *)} fn Function to call\r\n * @param {number} len Value byte length\r\n * @param {number} val Value to write\r\n * @returns {Writer} `this`\r\n * @private\r\n */\r\nWriter.prototype._push = function push(fn, len, val) {\r\n this.tail = this.tail.next = new Op(fn, len, val);\r\n this.len += len;\r\n return this;\r\n};\r\n\r\nfunction writeByte(val, buf, pos) {\r\n buf[pos] = val & 255;\r\n}\r\n\r\nfunction writeVarint32(val, buf, pos) {\r\n while (val > 127) {\r\n buf[pos++] = val & 127 | 128;\r\n val >>>= 7;\r\n }\r\n buf[pos] = val;\r\n}\r\n\r\n/**\r\n * Constructs a new varint writer operation instance.\r\n * @classdesc Scheduled varint writer operation.\r\n * @extends Op\r\n * @constructor\r\n * @param {number} len Value byte length\r\n * @param {number} val Value to write\r\n * @ignore\r\n */\r\nfunction VarintOp(len, val) {\r\n this.len = len;\r\n this.next = undefined;\r\n this.val = val;\r\n}\r\n\r\nVarintOp.prototype = Object.create(Op.prototype);\r\nVarintOp.prototype.fn = writeVarint32;\r\n\r\n/**\r\n * Writes an unsigned 32 bit value as a varint.\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.uint32 = function write_uint32(value) {\r\n // here, the call to this.push has been inlined and a varint specific Op subclass is used.\r\n // uint32 is by far the most frequently used operation and benefits significantly from this.\r\n this.len += (this.tail = this.tail.next = new VarintOp(\r\n (value = value >>> 0)\r\n < 128 ? 1\r\n : value < 16384 ? 2\r\n : value < 2097152 ? 3\r\n : value < 268435456 ? 4\r\n : 5,\r\n value)).len;\r\n return this;\r\n};\r\n\r\n/**\r\n * Writes a signed 32 bit value as a varint.\r\n * @function\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.int32 = function write_int32(value) {\r\n return value < 0\r\n ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec\r\n : this.uint32(value);\r\n};\r\n\r\n/**\r\n * Writes a 32 bit value as a varint, zig-zag encoded.\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.sint32 = function write_sint32(value) {\r\n return this.uint32((value << 1 ^ value >> 31) >>> 0);\r\n};\r\n\r\nfunction writeVarint64(val, buf, pos) {\r\n while (val.hi) {\r\n buf[pos++] = val.lo & 127 | 128;\r\n val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;\r\n val.hi >>>= 7;\r\n }\r\n while (val.lo > 127) {\r\n buf[pos++] = val.lo & 127 | 128;\r\n val.lo = val.lo >>> 7;\r\n }\r\n buf[pos++] = val.lo;\r\n}\r\n\r\n/**\r\n * Writes an unsigned 64 bit value as a varint.\r\n * @param {Long|number|string} value Value to write\r\n * @returns {Writer} `this`\r\n * @throws {TypeError} If `value` is a string and no long library is present.\r\n */\r\nWriter.prototype.uint64 = function write_uint64(value) {\r\n var bits = LongBits.from(value);\r\n return this._push(writeVarint64, bits.length(), bits);\r\n};\r\n\r\n/**\r\n * Writes a signed 64 bit value as a varint.\r\n * @function\r\n * @param {Long|number|string} value Value to write\r\n * @returns {Writer} `this`\r\n * @throws {TypeError} If `value` is a string and no long library is present.\r\n */\r\nWriter.prototype.int64 = Writer.prototype.uint64;\r\n\r\n/**\r\n * Writes a signed 64 bit value as a varint, zig-zag encoded.\r\n * @param {Long|number|string} value Value to write\r\n * @returns {Writer} `this`\r\n * @throws {TypeError} If `value` is a string and no long library is present.\r\n */\r\nWriter.prototype.sint64 = function write_sint64(value) {\r\n var bits = LongBits.from(value).zzEncode();\r\n return this._push(writeVarint64, bits.length(), bits);\r\n};\r\n\r\n/**\r\n * Writes a boolish value as a varint.\r\n * @param {boolean} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.bool = function write_bool(value) {\r\n return this._push(writeByte, 1, value ? 1 : 0);\r\n};\r\n\r\nfunction writeFixed32(val, buf, pos) {\r\n buf[pos ] = val & 255;\r\n buf[pos + 1] = val >>> 8 & 255;\r\n buf[pos + 2] = val >>> 16 & 255;\r\n buf[pos + 3] = val >>> 24;\r\n}\r\n\r\n/**\r\n * Writes an unsigned 32 bit value as fixed 32 bits.\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.fixed32 = function write_fixed32(value) {\r\n return this._push(writeFixed32, 4, value >>> 0);\r\n};\r\n\r\n/**\r\n * Writes a signed 32 bit value as fixed 32 bits.\r\n * @function\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.sfixed32 = Writer.prototype.fixed32;\r\n\r\n/**\r\n * Writes an unsigned 64 bit value as fixed 64 bits.\r\n * @param {Long|number|string} value Value to write\r\n * @returns {Writer} `this`\r\n * @throws {TypeError} If `value` is a string and no long library is present.\r\n */\r\nWriter.prototype.fixed64 = function write_fixed64(value) {\r\n var bits = LongBits.from(value);\r\n return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);\r\n};\r\n\r\n/**\r\n * Writes a signed 64 bit value as fixed 64 bits.\r\n * @function\r\n * @param {Long|number|string} value Value to write\r\n * @returns {Writer} `this`\r\n * @throws {TypeError} If `value` is a string and no long library is present.\r\n */\r\nWriter.prototype.sfixed64 = Writer.prototype.fixed64;\r\n\r\n/**\r\n * Writes a float (32 bit).\r\n * @function\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.float = function write_float(value) {\r\n return this._push(util.float.writeFloatLE, 4, value);\r\n};\r\n\r\n/**\r\n * Writes a double (64 bit float).\r\n * @function\r\n * @param {number} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.double = function write_double(value) {\r\n return this._push(util.float.writeDoubleLE, 8, value);\r\n};\r\n\r\nvar writeBytes = util.Array.prototype.set\r\n ? function writeBytes_set(val, buf, pos) {\r\n buf.set(val, pos); // also works for plain array values\r\n }\r\n /* istanbul ignore next */\r\n : function writeBytes_for(val, buf, pos) {\r\n for (var i = 0; i < val.length; ++i)\r\n buf[pos + i] = val[i];\r\n };\r\n\r\n/**\r\n * Writes a sequence of bytes.\r\n * @param {Uint8Array|string} value Buffer or base64 encoded string to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.bytes = function write_bytes(value) {\r\n var len = value.length >>> 0;\r\n if (!len)\r\n return this._push(writeByte, 1, 0);\r\n if (util.isString(value)) {\r\n var buf = Writer.alloc(len = base64.length(value));\r\n base64.decode(value, buf, 0);\r\n value = buf;\r\n }\r\n return this.uint32(len)._push(writeBytes, len, value);\r\n};\r\n\r\n/**\r\n * Writes a string.\r\n * @param {string} value Value to write\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.string = function write_string(value) {\r\n var len = utf8.length(value);\r\n return len\r\n ? this.uint32(len)._push(utf8.write, len, value)\r\n : this._push(writeByte, 1, 0);\r\n};\r\n\r\n/**\r\n * Forks this writer's state by pushing it to a stack.\r\n * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.fork = function fork() {\r\n this.states = new State(this);\r\n this.head = this.tail = new Op(noop, 0, 0);\r\n this.len = 0;\r\n return this;\r\n};\r\n\r\n/**\r\n * Resets this instance to the last state.\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.reset = function reset() {\r\n if (this.states) {\r\n this.head = this.states.head;\r\n this.tail = this.states.tail;\r\n this.len = this.states.len;\r\n this.states = this.states.next;\r\n } else {\r\n this.head = this.tail = new Op(noop, 0, 0);\r\n this.len = 0;\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.\r\n * @returns {Writer} `this`\r\n */\r\nWriter.prototype.ldelim = function ldelim() {\r\n var head = this.head,\r\n tail = this.tail,\r\n len = this.len;\r\n this.reset().uint32(len);\r\n if (len) {\r\n this.tail.next = head.next; // skip noop\r\n this.tail = tail;\r\n this.len += len;\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Finishes the write operation.\r\n * @returns {Uint8Array} Finished buffer\r\n */\r\nWriter.prototype.finish = function finish() {\r\n var head = this.head.next, // skip noop\r\n buf = this.constructor.alloc(this.len),\r\n pos = 0;\r\n while (head) {\r\n head.fn(head.val, buf, pos);\r\n pos += head.len;\r\n head = head.next;\r\n }\r\n // this.head = this.tail = null;\r\n return buf;\r\n};\r\n\r\nWriter._configure = function(BufferWriter_) {\r\n BufferWriter = BufferWriter_;\r\n};\r\n","\"use strict\";\r\nmodule.exports = BufferWriter;\r\n\r\n// extends Writer\r\nvar Writer = require(\"./writer\");\r\n(BufferWriter.prototype = Object.create(Writer.prototype)).constructor = BufferWriter;\r\n\r\nvar util = require(\"./util/minimal\");\r\n\r\nvar Buffer = util.Buffer;\r\n\r\n/**\r\n * Constructs a new buffer writer instance.\r\n * @classdesc Wire format writer using node buffers.\r\n * @extends Writer\r\n * @constructor\r\n */\r\nfunction BufferWriter() {\r\n Writer.call(this);\r\n}\r\n\r\n/**\r\n * Allocates a buffer of the specified size.\r\n * @param {number} size Buffer size\r\n * @returns {Buffer} Buffer\r\n */\r\nBufferWriter.alloc = function alloc_buffer(size) {\r\n return (BufferWriter.alloc = util._Buffer_allocUnsafe)(size);\r\n};\r\n\r\nvar writeBytesBuffer = Buffer && Buffer.prototype instanceof Uint8Array && Buffer.prototype.set.name === \"set\"\r\n ? function writeBytesBuffer_set(val, buf, pos) {\r\n buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)\r\n // also works for plain array values\r\n }\r\n /* istanbul ignore next */\r\n : function writeBytesBuffer_copy(val, buf, pos) {\r\n if (val.copy) // Buffer values\r\n val.copy(buf, pos, 0, val.length);\r\n else for (var i = 0; i < val.length;) // plain array values\r\n buf[pos++] = val[i++];\r\n };\r\n\r\n/**\r\n * @override\r\n */\r\nBufferWriter.prototype.bytes = function write_bytes_buffer(value) {\r\n if (util.isString(value))\r\n value = util._Buffer_from(value, \"base64\");\r\n var len = value.length >>> 0;\r\n this.uint32(len);\r\n if (len)\r\n this._push(writeBytesBuffer, len, value);\r\n return this;\r\n};\r\n\r\nfunction writeStringBuffer(val, buf, pos) {\r\n if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)\r\n util.utf8.write(val, buf, pos);\r\n else\r\n buf.utf8Write(val, pos);\r\n}\r\n\r\n/**\r\n * @override\r\n */\r\nBufferWriter.prototype.string = function write_string_buffer(value) {\r\n var len = Buffer.byteLength(value);\r\n this.uint32(len);\r\n if (len)\r\n this._push(writeStringBuffer, len, value);\r\n return this;\r\n};\r\n\r\n\r\n/**\r\n * Finishes the write operation.\r\n * @name BufferWriter#finish\r\n * @function\r\n * @returns {Buffer} Finished buffer\r\n */\r\n","\"use strict\";\r\nmodule.exports = Reader;\r\n\r\nvar util = require(\"./util/minimal\");\r\n\r\nvar BufferReader; // cyclic\r\n\r\nvar LongBits = util.LongBits,\r\n utf8 = util.utf8;\r\n\r\n/* istanbul ignore next */\r\nfunction indexOutOfRange(reader, writeLength) {\r\n return RangeError(\"index out of range: \" + reader.pos + \" + \" + (writeLength || 1) + \" > \" + reader.len);\r\n}\r\n\r\n/**\r\n * Constructs a new reader instance using the specified buffer.\r\n * @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.\r\n * @constructor\r\n * @param {Uint8Array} buffer Buffer to read from\r\n */\r\nfunction Reader(buffer) {\r\n\r\n /**\r\n * Read buffer.\r\n * @type {Uint8Array}\r\n */\r\n this.buf = buffer;\r\n\r\n /**\r\n * Read buffer position.\r\n * @type {number}\r\n */\r\n this.pos = 0;\r\n\r\n /**\r\n * Read buffer length.\r\n * @type {number}\r\n */\r\n this.len = buffer.length;\r\n}\r\n\r\nvar create_array = typeof Uint8Array !== \"undefined\"\r\n ? function create_typed_array(buffer) {\r\n if (buffer instanceof Uint8Array || Array.isArray(buffer))\r\n return new Reader(buffer);\r\n throw Error(\"illegal buffer\");\r\n }\r\n /* istanbul ignore next */\r\n : function create_array(buffer) {\r\n if (Array.isArray(buffer))\r\n return new Reader(buffer);\r\n throw Error(\"illegal buffer\");\r\n };\r\n\r\n/**\r\n * Creates a new reader using the specified buffer.\r\n * @function\r\n * @param {Uint8Array|Buffer} buffer Buffer to read from\r\n * @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}\r\n * @throws {Error} If `buffer` is not a valid buffer\r\n */\r\nReader.create = util.Buffer\r\n ? function create_buffer_setup(buffer) {\r\n return (Reader.create = function create_buffer(buffer) {\r\n return util.Buffer.isBuffer(buffer)\r\n ? new BufferReader(buffer)\r\n /* istanbul ignore next */\r\n : create_array(buffer);\r\n })(buffer);\r\n }\r\n /* istanbul ignore next */\r\n : create_array;\r\n\r\nReader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */ util.Array.prototype.slice;\r\n\r\n/**\r\n * Reads a varint as an unsigned 32 bit value.\r\n * @function\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.uint32 = (function read_uint32_setup() {\r\n var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)\r\n return function read_uint32() {\r\n value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;\r\n value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;\r\n value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;\r\n value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;\r\n value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;\r\n\r\n /* istanbul ignore if */\r\n if ((this.pos += 5) > this.len) {\r\n this.pos = this.len;\r\n throw indexOutOfRange(this, 10);\r\n }\r\n return value;\r\n };\r\n})();\r\n\r\n/**\r\n * Reads a varint as a signed 32 bit value.\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.int32 = function read_int32() {\r\n return this.uint32() | 0;\r\n};\r\n\r\n/**\r\n * Reads a zig-zag encoded varint as a signed 32 bit value.\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.sint32 = function read_sint32() {\r\n var value = this.uint32();\r\n return value >>> 1 ^ -(value & 1) | 0;\r\n};\r\n\r\n/* eslint-disable no-invalid-this */\r\n\r\nfunction readLongVarint() {\r\n // tends to deopt with local vars for octet etc.\r\n var bits = new LongBits(0, 0);\r\n var i = 0;\r\n if (this.len - this.pos > 4) { // fast route (lo)\r\n for (; i < 4; ++i) {\r\n // 1st..4th\r\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\r\n if (this.buf[this.pos++] < 128)\r\n return bits;\r\n }\r\n // 5th\r\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;\r\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;\r\n if (this.buf[this.pos++] < 128)\r\n return bits;\r\n i = 0;\r\n } else {\r\n for (; i < 3; ++i) {\r\n /* istanbul ignore if */\r\n if (this.pos >= this.len)\r\n throw indexOutOfRange(this);\r\n // 1st..3th\r\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\r\n if (this.buf[this.pos++] < 128)\r\n return bits;\r\n }\r\n // 4th\r\n bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;\r\n return bits;\r\n }\r\n if (this.len - this.pos > 4) { // fast route (hi)\r\n for (; i < 5; ++i) {\r\n // 6th..10th\r\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\r\n if (this.buf[this.pos++] < 128)\r\n return bits;\r\n }\r\n } else {\r\n for (; i < 5; ++i) {\r\n /* istanbul ignore if */\r\n if (this.pos >= this.len)\r\n throw indexOutOfRange(this);\r\n // 6th..10th\r\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\r\n if (this.buf[this.pos++] < 128)\r\n return bits;\r\n }\r\n }\r\n /* istanbul ignore next */\r\n throw Error(\"invalid varint encoding\");\r\n}\r\n\r\n/* eslint-enable no-invalid-this */\r\n\r\n/**\r\n * Reads a varint as a signed 64 bit value.\r\n * @name Reader#int64\r\n * @function\r\n * @returns {Long} Value read\r\n */\r\n\r\n/**\r\n * Reads a varint as an unsigned 64 bit value.\r\n * @name Reader#uint64\r\n * @function\r\n * @returns {Long} Value read\r\n */\r\n\r\n/**\r\n * Reads a zig-zag encoded varint as a signed 64 bit value.\r\n * @name Reader#sint64\r\n * @function\r\n * @returns {Long} Value read\r\n */\r\n\r\n/**\r\n * Reads a varint as a boolean.\r\n * @returns {boolean} Value read\r\n */\r\nReader.prototype.bool = function read_bool() {\r\n return this.uint32() !== 0;\r\n};\r\n\r\nfunction readFixed32_end(buf, end) { // note that this uses `end`, not `pos`\r\n return (buf[end - 4]\r\n | buf[end - 3] << 8\r\n | buf[end - 2] << 16\r\n | buf[end - 1] << 24) >>> 0;\r\n}\r\n\r\n/**\r\n * Reads fixed 32 bits as an unsigned 32 bit integer.\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.fixed32 = function read_fixed32() {\r\n\r\n /* istanbul ignore if */\r\n if (this.pos + 4 > this.len)\r\n throw indexOutOfRange(this, 4);\r\n\r\n return readFixed32_end(this.buf, this.pos += 4);\r\n};\r\n\r\n/**\r\n * Reads fixed 32 bits as a signed 32 bit integer.\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.sfixed32 = function read_sfixed32() {\r\n\r\n /* istanbul ignore if */\r\n if (this.pos + 4 > this.len)\r\n throw indexOutOfRange(this, 4);\r\n\r\n return readFixed32_end(this.buf, this.pos += 4) | 0;\r\n};\r\n\r\n/* eslint-disable no-invalid-this */\r\n\r\nfunction readFixed64(/* this: Reader */) {\r\n\r\n /* istanbul ignore if */\r\n if (this.pos + 8 > this.len)\r\n throw indexOutOfRange(this, 8);\r\n\r\n return new LongBits(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));\r\n}\r\n\r\n/* eslint-enable no-invalid-this */\r\n\r\n/**\r\n * Reads fixed 64 bits.\r\n * @name Reader#fixed64\r\n * @function\r\n * @returns {Long} Value read\r\n */\r\n\r\n/**\r\n * Reads zig-zag encoded fixed 64 bits.\r\n * @name Reader#sfixed64\r\n * @function\r\n * @returns {Long} Value read\r\n */\r\n\r\n/**\r\n * Reads a float (32 bit) as a number.\r\n * @function\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.float = function read_float() {\r\n\r\n /* istanbul ignore if */\r\n if (this.pos + 4 > this.len)\r\n throw indexOutOfRange(this, 4);\r\n\r\n var value = util.float.readFloatLE(this.buf, this.pos);\r\n this.pos += 4;\r\n return value;\r\n};\r\n\r\n/**\r\n * Reads a double (64 bit float) as a number.\r\n * @function\r\n * @returns {number} Value read\r\n */\r\nReader.prototype.double = function read_double() {\r\n\r\n /* istanbul ignore if */\r\n if (this.pos + 8 > this.len)\r\n throw indexOutOfRange(this, 4);\r\n\r\n var value = util.float.readDoubleLE(this.buf, this.pos);\r\n this.pos += 8;\r\n return value;\r\n};\r\n\r\n/**\r\n * Reads a sequence of bytes preceeded by its length as a varint.\r\n * @returns {Uint8Array} Value read\r\n */\r\nReader.prototype.bytes = function read_bytes() {\r\n var length = this.uint32(),\r\n start = this.pos,\r\n end = this.pos + length;\r\n\r\n /* istanbul ignore if */\r\n if (end > this.len)\r\n throw indexOutOfRange(this, length);\r\n\r\n this.pos += length;\r\n if (Array.isArray(this.buf)) // plain array\r\n return this.buf.slice(start, end);\r\n return start === end // fix for IE 10/Win8 and others' subarray returning array of size 1\r\n ? new this.buf.constructor(0)\r\n : this._slice.call(this.buf, start, end);\r\n};\r\n\r\n/**\r\n * Reads a string preceeded by its byte length as a varint.\r\n * @returns {string} Value read\r\n */\r\nReader.prototype.string = function read_string() {\r\n var bytes = this.bytes();\r\n return utf8.read(bytes, 0, bytes.length);\r\n};\r\n\r\n/**\r\n * Skips the specified number of bytes if specified, otherwise skips a varint.\r\n * @param {number} [length] Length if known, otherwise a varint is assumed\r\n * @returns {Reader} `this`\r\n */\r\nReader.prototype.skip = function skip(length) {\r\n if (typeof length === \"number\") {\r\n /* istanbul ignore if */\r\n if (this.pos + length > this.len)\r\n throw indexOutOfRange(this, length);\r\n this.pos += length;\r\n } else {\r\n do {\r\n /* istanbul ignore if */\r\n if (this.pos >= this.len)\r\n throw indexOutOfRange(this);\r\n } while (this.buf[this.pos++] & 128);\r\n }\r\n return this;\r\n};\r\n\r\n/**\r\n * Skips the next element of the specified wire type.\r\n * @param {number} wireType Wire type received\r\n * @returns {Reader} `this`\r\n */\r\nReader.prototype.skipType = function(wireType) {\r\n switch (wireType) {\r\n case 0:\r\n this.skip();\r\n break;\r\n case 1:\r\n this.skip(8);\r\n break;\r\n case 2:\r\n this.skip(this.uint32());\r\n break;\r\n case 3:\r\n do { // eslint-disable-line no-constant-condition\r\n if ((wireType = this.uint32() & 7) === 4)\r\n break;\r\n this.skipType(wireType);\r\n } while (true);\r\n break;\r\n case 5:\r\n this.skip(4);\r\n break;\r\n\r\n /* istanbul ignore next */\r\n default:\r\n throw Error(\"invalid wire type \" + wireType + \" at offset \" + this.pos);\r\n }\r\n return this;\r\n};\r\n\r\nReader._configure = function(BufferReader_) {\r\n BufferReader = BufferReader_;\r\n\r\n var fn = util.Long ? \"toLong\" : /* istanbul ignore next */ \"toNumber\";\r\n util.merge(Reader.prototype, {\r\n\r\n int64: function read_int64() {\r\n return readLongVarint.call(this)[fn](false);\r\n },\r\n\r\n uint64: function read_uint64() {\r\n return readLongVarint.call(this)[fn](true);\r\n },\r\n\r\n sint64: function read_sint64() {\r\n return readLongVarint.call(this).zzDecode()[fn](false);\r\n },\r\n\r\n fixed64: function read_fixed64() {\r\n return readFixed64.call(this)[fn](true);\r\n },\r\n\r\n sfixed64: function read_sfixed64() {\r\n return readFixed64.call(this)[fn](false);\r\n }\r\n\r\n });\r\n};\r\n","\"use strict\";\r\nmodule.exports = BufferReader;\r\n\r\n// extends Reader\r\nvar Reader = require(\"./reader\");\r\n(BufferReader.prototype = Object.create(Reader.prototype)).constructor = BufferReader;\r\n\r\nvar util = require(\"./util/minimal\");\r\n\r\n/**\r\n * Constructs a new buffer reader instance.\r\n * @classdesc Wire format reader using node buffers.\r\n * @extends Reader\r\n * @constructor\r\n * @param {Buffer} buffer Buffer to read from\r\n */\r\nfunction BufferReader(buffer) {\r\n Reader.call(this, buffer);\r\n\r\n /**\r\n * Read buffer.\r\n * @name BufferReader#buf\r\n * @type {Buffer}\r\n */\r\n}\r\n\r\n/* istanbul ignore else */\r\nif (util.Buffer)\r\n BufferReader.prototype._slice = util.Buffer.prototype.slice;\r\n\r\n/**\r\n * @override\r\n */\r\nBufferReader.prototype.string = function read_string_buffer() {\r\n var len = this.uint32(); // modifies pos\r\n return this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len));\r\n};\r\n\r\n/**\r\n * Reads a sequence of bytes preceeded by its length as a varint.\r\n * @name BufferReader#bytes\r\n * @function\r\n * @returns {Buffer} Value read\r\n */\r\n","\"use strict\";\r\nmodule.exports = Service;\r\n\r\nvar util = require(\"../util/minimal\");\r\n\r\n// Extends EventEmitter\r\n(Service.prototype = Object.create(util.EventEmitter.prototype)).constructor = Service;\r\n\r\n/**\r\n * A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.\r\n *\r\n * Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.\r\n * @typedef rpc.ServiceMethodCallback\r\n * @template TRes extends Message<TRes>\r\n * @type {function}\r\n * @param {Error|null} error Error, if any\r\n * @param {TRes} [response] Response message\r\n * @returns {undefined}\r\n */\r\n\r\n/**\r\n * A service method part of a {@link rpc.Service} as created by {@link Service.create}.\r\n * @typedef rpc.ServiceMethod\r\n * @template TReq extends Message<TReq>\r\n * @template TRes extends Message<TRes>\r\n * @type {function}\r\n * @param {TReq|Properties<TReq>} request Request message or plain object\r\n * @param {rpc.ServiceMethodCallback<TRes>} [callback] Node-style callback called with the error, if any, and the response message\r\n * @returns {Promise<Message<TRes>>} Promise if `callback` has been omitted, otherwise `undefined`\r\n */\r\n\r\n/**\r\n * Constructs a new RPC service instance.\r\n * @classdesc An RPC service as returned by {@link Service#create}.\r\n * @exports rpc.Service\r\n * @extends util.EventEmitter\r\n * @constructor\r\n * @param {RPCImpl} rpcImpl RPC implementation\r\n * @param {boolean} [requestDelimited=false] Whether requests are length-delimited\r\n * @param {boolean} [responseDelimited=false] Whether responses are length-delimited\r\n */\r\nfunction Service(rpcImpl, requestDelimited, responseDelimited) {\r\n\r\n if (typeof rpcImpl !== \"function\")\r\n throw TypeError(\"rpcImpl must be a function\");\r\n\r\n util.EventEmitter.call(this);\r\n\r\n /**\r\n * RPC implementation. Becomes `null` once the service is ended.\r\n * @type {RPCImpl|null}\r\n */\r\n this.rpcImpl = rpcImpl;\r\n\r\n /**\r\n * Whether requests are length-delimited.\r\n * @type {boolean}\r\n */\r\n this.requestDelimited = Boolean(requestDelimited);\r\n\r\n /**\r\n * Whether responses are length-delimited.\r\n * @type {boolean}\r\n */\r\n this.responseDelimited = Boolean(responseDelimited);\r\n}\r\n\r\n/**\r\n * Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.\r\n * @param {Method|rpc.ServiceMethod<TReq,TRes>} method Reflected or static method\r\n * @param {Constructor<TReq>} requestCtor Request constructor\r\n * @param {Constructor<TRes>} responseCtor Response constructor\r\n * @param {TReq|Properties<TReq>} request Request message or plain object\r\n * @param {rpc.ServiceMethodCallback<TRes>} callback Service callback\r\n * @returns {undefined}\r\n * @template TReq extends Message<TReq>\r\n * @template TRes extends Message<TRes>\r\n */\r\nService.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {\r\n\r\n if (!request)\r\n throw TypeError(\"request must be specified\");\r\n\r\n var self = this;\r\n if (!callback)\r\n return util.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);\r\n\r\n if (!self.rpcImpl) {\r\n setTimeout(function() { callback(Error(\"already ended\")); }, 0);\r\n return undefined;\r\n }\r\n\r\n try {\r\n return self.rpcImpl(\r\n method,\r\n requestCtor[self.requestDelimited ? \"encodeDelimited\" : \"encode\"](request).finish(),\r\n function rpcCallback(err, response) {\r\n\r\n if (err) {\r\n self.emit(\"error\", err, method);\r\n return callback(err);\r\n }\r\n\r\n if (response === null) {\r\n self.end(/* endedByRPC */ true);\r\n return undefined;\r\n }\r\n\r\n if (!(response instanceof responseCtor)) {\r\n try {\r\n response = responseCtor[self.responseDelimited ? \"decodeDelimited\" : \"decode\"](response);\r\n } catch (err) {\r\n self.emit(\"error\", err, method);\r\n return callback(err);\r\n }\r\n }\r\n\r\n self.emit(\"data\", response, method);\r\n return callback(null, response);\r\n }\r\n );\r\n } catch (err) {\r\n self.emit(\"error\", err, method);\r\n setTimeout(function() { callback(err); }, 0);\r\n return undefined;\r\n }\r\n};\r\n\r\n/**\r\n * Ends this service and emits the `end` event.\r\n * @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.\r\n * @returns {rpc.Service} `this`\r\n */\r\nService.prototype.end = function end(endedByRPC) {\r\n if (this.rpcImpl) {\r\n if (!endedByRPC) // signal end to rpcImpl\r\n this.rpcImpl(null, null, null);\r\n this.rpcImpl = null;\r\n this.emit(\"end\").off();\r\n }\r\n return this;\r\n};\r\n","\"use strict\";\r\n\r\n/**\r\n * Streaming RPC helpers.\r\n * @namespace\r\n */\r\nvar rpc = exports;\r\n\r\n/**\r\n * RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.\r\n * @typedef RPCImpl\r\n * @type {function}\r\n * @param {Method|rpc.ServiceMethod<Message<{}>,Message<{}>>} method Reflected or static method being called\r\n * @param {Uint8Array} requestData Request data\r\n * @param {RPCImplCallback} callback Callback function\r\n * @returns {undefined}\r\n * @example\r\n * function rpcImpl(method, requestData, callback) {\r\n * if (protobuf.util.lcFirst(method.name) !== \"myMethod\") // compatible with static code\r\n * throw Error(\"no such method\");\r\n * asynchronouslyObtainAResponse(requestData, function(err, responseData) {\r\n * callback(err, responseData);\r\n * });\r\n * }\r\n */\r\n\r\n/**\r\n * Node-style callback as used by {@link RPCImpl}.\r\n * @typedef RPCImplCallback\r\n * @type {function}\r\n * @param {Error|null} error Error, if any, otherwise `null`\r\n * @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error\r\n * @returns {undefined}\r\n */\r\n\r\nrpc.Service = require(\"./rpc/service\");\r\n","\"use strict\";\r\nvar protobuf = exports;\r\n\r\n/**\r\n * Build type, one of `\"full\"`, `\"light\"` or `\"minimal\"`.\r\n * @name build\r\n * @type {string}\r\n * @const\r\n */\r\nprotobuf.build = \"minimal\";\r\n\r\n// Serialization\r\nprotobuf.Writer = require(\"./writer\");\r\nprotobuf.BufferWriter = require(\"./writer_buffer\");\r\nprotobuf.Reader = require(\"./reader\");\r\nprotobuf.BufferReader = require(\"./reader_buffer\");\r\n\r\n// Utility\r\nprotobuf.util = require(\"./util/minimal\");\r\nprotobuf.rpc = require(\"./rpc\");\r\nprotobuf.roots = require(\"./roots\");\r\nprotobuf.configure = configure;\r\n\r\n/* istanbul ignore next */\r\n/**\r\n * Reconfigures the library according to the environment.\r\n * @returns {undefined}\r\n */\r\nfunction configure() {\r\n protobuf.Reader._configure(protobuf.BufferReader);\r\n protobuf.util._configure();\r\n}\r\n\r\n// Configure serialization\r\nprotobuf.Writer._configure(protobuf.BufferWriter);\r\nconfigure();\r\n","// minimal library entry point.\r\n\r\n\"use strict\";\r\nmodule.exports = require(\"./src/index-minimal\");\r\n","/*eslint-disable block-scoped-var, no-redeclare, no-control-regex, no-prototype-builtins*/\n\"use strict\";\n\nvar $protobuf = require(\"protobufjs/minimal\");\n\nvar $Reader = $protobuf.Reader, $util = $protobuf.util;\n\nvar $root = $protobuf.roots[\"default\"] || ($protobuf.roots[\"default\"] = {});\n\n$root.tensorflow = (function() {\n\n var tensorflow = {};\n\n tensorflow.Any = (function() {\n\n function Any(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n Any.prototype.typeUrl = \"\";\n Any.prototype.value = $util.newBuffer([]);\n\n Any.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.Any();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.typeUrl = r.string();\n break;\n case 2:\n m.value = r.bytes();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return Any;\n })();\n\n tensorflow.DataType = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"DT_INVALID\"] = 0;\n values[valuesById[1] = \"DT_FLOAT\"] = 1;\n values[valuesById[2] = \"DT_DOUBLE\"] = 2;\n values[valuesById[3] = \"DT_INT32\"] = 3;\n values[valuesById[4] = \"DT_UINT8\"] = 4;\n values[valuesById[5] = \"DT_INT16\"] = 5;\n values[valuesById[6] = \"DT_INT8\"] = 6;\n values[valuesById[7] = \"DT_STRING\"] = 7;\n values[valuesById[8] = \"DT_COMPLEX64\"] = 8;\n values[valuesById[9] = \"DT_INT64\"] = 9;\n values[valuesById[10] = \"DT_BOOL\"] = 10;\n values[valuesById[11] = \"DT_QINT8\"] = 11;\n values[valuesById[12] = \"DT_QUINT8\"] = 12;\n values[valuesById[13] = \"DT_QINT32\"] = 13;\n values[valuesById[14] = \"DT_BFLOAT16\"] = 14;\n values[valuesById[101] = \"DT_FLOAT_REF\"] = 101;\n values[valuesById[102] = \"DT_DOUBLE_REF\"] = 102;\n values[valuesById[103] = \"DT_INT32_REF\"] = 103;\n values[valuesById[104] = \"DT_UINT8_REF\"] = 104;\n values[valuesById[105] = \"DT_INT16_REF\"] = 105;\n values[valuesById[106] = \"DT_INT8_REF\"] = 106;\n values[valuesById[107] = \"DT_STRING_REF\"] = 107;\n values[valuesById[108] = \"DT_COMPLEX64_REF\"] = 108;\n values[valuesById[109] = \"DT_INT64_REF\"] = 109;\n values[valuesById[110] = \"DT_BOOL_REF\"] = 110;\n values[valuesById[111] = \"DT_QINT8_REF\"] = 111;\n values[valuesById[112] = \"DT_QUINT8_REF\"] = 112;\n values[valuesById[113] = \"DT_QINT32_REF\"] = 113;\n values[valuesById[114] = \"DT_BFLOAT16_REF\"] = 114;\n return values;\n })();\n\n tensorflow.TensorShape = (function() {\n\n function TensorShape(p) {\n this.dim = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n TensorShape.prototype.dim = $util.emptyArray;\n TensorShape.prototype.unknownRank = false;\n\n TensorShape.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.TensorShape();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 2:\n if (!(m.dim && m.dim.length))\n m.dim = [];\n m.dim.push($root.tensorflow.TensorShape.Dim.decode(r, r.uint32()));\n break;\n case 3:\n m.unknownRank = r.bool();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n TensorShape.Dim = (function() {\n\n function Dim(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n Dim.prototype.size = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n Dim.prototype.name = \"\";\n\n Dim.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.TensorShape.Dim();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.size = r.int64();\n break;\n case 2:\n m.name = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return Dim;\n })();\n\n return TensorShape;\n })();\n\n tensorflow.Tensor = (function() {\n\n function Tensor(p) {\n this.floatVal = [];\n this.doubleVal = [];\n this.intVal = [];\n this.stringVal = [];\n this.scomplexVal = [];\n this.int64Val = [];\n this.boolVal = [];\n this.uint32Val = [];\n this.uint64Val = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n Tensor.prototype.dtype = 0;\n Tensor.prototype.tensorShape = null;\n Tensor.prototype.versionNumber = 0;\n Tensor.prototype.tensorContent = $util.newBuffer([]);\n Tensor.prototype.floatVal = $util.emptyArray;\n Tensor.prototype.doubleVal = $util.emptyArray;\n Tensor.prototype.intVal = $util.emptyArray;\n Tensor.prototype.stringVal = $util.emptyArray;\n Tensor.prototype.scomplexVal = $util.emptyArray;\n Tensor.prototype.int64Val = $util.emptyArray;\n Tensor.prototype.boolVal = $util.emptyArray;\n Tensor.prototype.uint32Val = $util.emptyArray;\n Tensor.prototype.uint64Val = $util.emptyArray;\n\n Tensor.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.Tensor();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.dtype = r.int32();\n break;\n case 2:\n m.tensorShape = $root.tensorflow.TensorShape.decode(r, r.uint32());\n break;\n case 3:\n m.versionNumber = r.int32();\n break;\n case 4:\n m.tensorContent = r.bytes();\n break;\n case 5:\n if (!(m.floatVal && m.floatVal.length))\n m.floatVal = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.floatVal.push(r.float());\n } else\n m.floatVal.push(r.float());\n break;\n case 6:\n if (!(m.doubleVal && m.doubleVal.length))\n m.doubleVal = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.doubleVal.push(r.double());\n } else\n m.doubleVal.push(r.double());\n break;\n case 7:\n if (!(m.intVal && m.intVal.length))\n m.intVal = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.intVal.push(r.int32());\n } else\n m.intVal.push(r.int32());\n break;\n case 8:\n if (!(m.stringVal && m.stringVal.length))\n m.stringVal = [];\n m.stringVal.push(r.bytes());\n break;\n case 9:\n if (!(m.scomplexVal && m.scomplexVal.length))\n m.scomplexVal = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.scomplexVal.push(r.float());\n } else\n m.scomplexVal.push(r.float());\n break;\n case 10:\n if (!(m.int64Val && m.int64Val.length))\n m.int64Val = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.int64Val.push(r.int64());\n } else\n m.int64Val.push(r.int64());\n break;\n case 11:\n if (!(m.boolVal && m.boolVal.length))\n m.boolVal = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.boolVal.push(r.bool());\n } else\n m.boolVal.push(r.bool());\n break;\n case 16:\n if (!(m.uint32Val && m.uint32Val.length))\n m.uint32Val = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.uint32Val.push(r.uint32());\n } else\n m.uint32Val.push(r.uint32());\n break;\n case 17:\n if (!(m.uint64Val && m.uint64Val.length))\n m.uint64Val = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.uint64Val.push(r.uint64());\n } else\n m.uint64Val.push(r.uint64());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return Tensor;\n })();\n\n tensorflow.AttrValue = (function() {\n\n function AttrValue(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n AttrValue.prototype.list = null;\n AttrValue.prototype.s = $util.newBuffer([]);\n AttrValue.prototype.i = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n AttrValue.prototype.f = 0;\n AttrValue.prototype.b = false;\n AttrValue.prototype.type = 0;\n AttrValue.prototype.shape = null;\n AttrValue.prototype.tensor = null;\n AttrValue.prototype.placeholder = \"\";\n AttrValue.prototype.func = null;\n\n var $oneOfFields;\n\n Object.defineProperty(AttrValue.prototype, \"value\", {\n get: $util.oneOfGetter($oneOfFields = [\"list\", \"s\", \"i\", \"f\", \"b\", \"type\", \"shape\", \"tensor\", \"placeholder\", \"func\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n AttrValue.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.AttrValue();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.list = $root.tensorflow.AttrValue.ListValue.decode(r, r.uint32());\n break;\n case 2:\n m.s = r.bytes();\n break;\n case 3:\n m.i = r.int64();\n break;\n case 4:\n m.f = r.float();\n break;\n case 5:\n m.b = r.bool();\n break;\n case 6:\n m.type = r.int32();\n break;\n case 7:\n m.shape = $root.tensorflow.TensorShape.decode(r, r.uint32());\n break;\n case 8:\n m.tensor = $root.tensorflow.Tensor.decode(r, r.uint32());\n break;\n case 9:\n m.placeholder = r.string();\n break;\n case 10:\n m.func = $root.tensorflow.NameAttrList.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n AttrValue.ListValue = (function() {\n\n function ListValue(p) {\n this.s = [];\n this.i = [];\n this.f = [];\n this.b = [];\n this.type = [];\n this.shape = [];\n this.tensor = [];\n this.func = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n ListValue.prototype.s = $util.emptyArray;\n ListValue.prototype.i = $util.emptyArray;\n ListValue.prototype.f = $util.emptyArray;\n ListValue.prototype.b = $util.emptyArray;\n ListValue.prototype.type = $util.emptyArray;\n ListValue.prototype.shape = $util.emptyArray;\n ListValue.prototype.tensor = $util.emptyArray;\n ListValue.prototype.func = $util.emptyArray;\n\n ListValue.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.AttrValue.ListValue();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 2:\n if (!(m.s && m.s.length))\n m.s = [];\n m.s.push(r.bytes());\n break;\n case 3:\n if (!(m.i && m.i.length))\n m.i = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.i.push(r.int64());\n } else\n m.i.push(r.int64());\n break;\n case 4:\n if (!(m.f && m.f.length))\n m.f = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.f.push(r.float());\n } else\n m.f.push(r.float());\n break;\n case 5:\n if (!(m.b && m.b.length))\n m.b = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.b.push(r.bool());\n } else\n m.b.push(r.bool());\n break;\n case 6:\n if (!(m.type && m.type.length))\n m.type = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.type.push(r.int32());\n } else\n m.type.push(r.int32());\n break;\n case 7:\n if (!(m.shape && m.shape.length))\n m.shape = [];\n m.shape.push($root.tensorflow.TensorShape.decode(r, r.uint32()));\n break;\n case 8:\n if (!(m.tensor && m.tensor.length))\n m.tensor = [];\n m.tensor.push($root.tensorflow.Tensor.decode(r, r.uint32()));\n break;\n case 9:\n if (!(m.func && m.func.length))\n m.func = [];\n m.func.push($root.tensorflow.NameAttrList.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return ListValue;\n })();\n\n return AttrValue;\n })();\n\n tensorflow.NameAttrList = (function() {\n\n function NameAttrList(p) {\n this.attr = {};\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n NameAttrList.prototype.name = \"\";\n NameAttrList.prototype.attr = $util.emptyObject;\n\n NameAttrList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.NameAttrList(), k;\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 2:\n r.skip().pos++;\n if (m.attr === $util.emptyObject)\n m.attr = {};\n k = r.string();\n r.pos++;\n m.attr[k] = $root.tensorflow.AttrValue.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return NameAttrList;\n })();\n\n tensorflow.NodeDef = (function() {\n\n function NodeDef(p) {\n this.input = [];\n this.attr = {};\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n NodeDef.prototype.name = \"\";\n NodeDef.prototype.op = \"\";\n NodeDef.prototype.input = $util.emptyArray;\n NodeDef.prototype.device = \"\";\n NodeDef.prototype.attr = $util.emptyObject;\n\n NodeDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.NodeDef(), k;\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 2:\n m.op = r.string();\n break;\n case 3:\n if (!(m.input && m.input.length))\n m.input = [];\n m.input.push(r.string());\n break;\n case 4:\n m.device = r.string();\n break;\n case 5:\n r.skip().pos++;\n if (m.attr === $util.emptyObject)\n m.attr = {};\n k = r.string();\n r.pos++;\n m.attr[k] = $root.tensorflow.AttrValue.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return NodeDef;\n })();\n\n tensorflow.VersionDef = (function() {\n\n function VersionDef(p) {\n this.badConsumers = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n VersionDef.prototype.producer = 0;\n VersionDef.prototype.minConsumer = 0;\n VersionDef.prototype.badConsumers = $util.emptyArray;\n\n VersionDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.VersionDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.producer = r.int32();\n break;\n case 2:\n m.minConsumer = r.int32();\n break;\n case 3:\n if (!(m.badConsumers && m.badConsumers.length))\n m.badConsumers = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.badConsumers.push(r.int32());\n } else\n m.badConsumers.push(r.int32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return VersionDef;\n })();\n\n tensorflow.GraphDef = (function() {\n\n function GraphDef(p) {\n this.node = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n GraphDef.prototype.node = $util.emptyArray;\n GraphDef.prototype.versions = null;\n GraphDef.prototype.library = null;\n\n GraphDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.GraphDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.node && m.node.length))\n m.node = [];\n m.node.push($root.tensorflow.NodeDef.decode(r, r.uint32()));\n break;\n case 4:\n m.versions = $root.tensorflow.VersionDef.decode(r, r.uint32());\n break;\n case 2:\n m.library = $root.tensorflow.FunctionDefLibrary.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return GraphDef;\n })();\n\n tensorflow.CollectionDef = (function() {\n\n function CollectionDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n CollectionDef.prototype.nodeList = null;\n CollectionDef.prototype.bytesList = null;\n CollectionDef.prototype.int64List = null;\n CollectionDef.prototype.floatList = null;\n CollectionDef.prototype.anyList = null;\n\n var $oneOfFields;\n\n Object.defineProperty(CollectionDef.prototype, \"kind\", {\n get: $util.oneOfGetter($oneOfFields = [\"nodeList\", \"bytesList\", \"int64List\", \"floatList\", \"anyList\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n CollectionDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.nodeList = $root.tensorflow.CollectionDef.NodeList.decode(r, r.uint32());\n break;\n case 2:\n m.bytesList = $root.tensorflow.CollectionDef.BytesList.decode(r, r.uint32());\n break;\n case 3:\n m.int64List = $root.tensorflow.CollectionDef.Int64List.decode(r, r.uint32());\n break;\n case 4:\n m.floatList = $root.tensorflow.CollectionDef.FloatList.decode(r, r.uint32());\n break;\n case 5:\n m.anyList = $root.tensorflow.CollectionDef.AnyList.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n CollectionDef.NodeList = (function() {\n\n function NodeList(p) {\n this.value = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n NodeList.prototype.value = $util.emptyArray;\n\n NodeList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef.NodeList();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.value && m.value.length))\n m.value = [];\n m.value.push(r.string());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return NodeList;\n })();\n\n CollectionDef.BytesList = (function() {\n\n function BytesList(p) {\n this.value = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n BytesList.prototype.value = $util.emptyArray;\n\n BytesList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef.BytesList();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.value && m.value.length))\n m.value = [];\n m.value.push(r.bytes());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return BytesList;\n })();\n\n CollectionDef.Int64List = (function() {\n\n function Int64List(p) {\n this.value = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n Int64List.prototype.value = $util.emptyArray;\n\n Int64List.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef.Int64List();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.value && m.value.length))\n m.value = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.value.push(r.int64());\n } else\n m.value.push(r.int64());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return Int64List;\n })();\n\n CollectionDef.FloatList = (function() {\n\n function FloatList(p) {\n this.value = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n FloatList.prototype.value = $util.emptyArray;\n\n FloatList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef.FloatList();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.value && m.value.length))\n m.value = [];\n if ((t & 7) === 2) {\n var c2 = r.uint32() + r.pos;\n while (r.pos < c2)\n m.value.push(r.float());\n } else\n m.value.push(r.float());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return FloatList;\n })();\n\n CollectionDef.AnyList = (function() {\n\n function AnyList(p) {\n this.value = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n AnyList.prototype.value = $util.emptyArray;\n\n AnyList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.CollectionDef.AnyList();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.value && m.value.length))\n m.value = [];\n m.value.push($root.tensorflow.Any.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return AnyList;\n })();\n\n return CollectionDef;\n })();\n\n tensorflow.SaverDef = (function() {\n\n function SaverDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n SaverDef.prototype.filenameTensorName = \"\";\n SaverDef.prototype.saveTensorName = \"\";\n SaverDef.prototype.restoreOpName = \"\";\n SaverDef.prototype.maxToKeep = 0;\n SaverDef.prototype.sharded = false;\n SaverDef.prototype.keepCheckpointEveryNHours = 0;\n SaverDef.prototype.version = 0;\n\n SaverDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.SaverDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.filenameTensorName = r.string();\n break;\n case 2:\n m.saveTensorName = r.string();\n break;\n case 3:\n m.restoreOpName = r.string();\n break;\n case 4:\n m.maxToKeep = r.int32();\n break;\n case 5:\n m.sharded = r.bool();\n break;\n case 6:\n m.keepCheckpointEveryNHours = r.float();\n break;\n case 7:\n m.version = r.int32();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n SaverDef.CheckpointFormatVersion = (function() {\n var valuesById = {}, values = Object.create(valuesById);\n values[valuesById[0] = \"LEGACY\"] = 0;\n values[valuesById[1] = \"V1\"] = 1;\n values[valuesById[2] = \"V2\"] = 2;\n return values;\n })();\n\n return SaverDef;\n })();\n\n tensorflow.TensorInfo = (function() {\n\n function TensorInfo(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n TensorInfo.prototype.name = \"\";\n TensorInfo.prototype.cooSparse = null;\n TensorInfo.prototype.dtype = 0;\n TensorInfo.prototype.tensorShape = null;\n\n var $oneOfFields;\n\n Object.defineProperty(TensorInfo.prototype, \"encoding\", {\n get: $util.oneOfGetter($oneOfFields = [\"name\", \"cooSparse\"]),\n set: $util.oneOfSetter($oneOfFields)\n });\n\n TensorInfo.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.TensorInfo();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 4:\n m.cooSparse = $root.tensorflow.TensorInfo.CooSparse.decode(r, r.uint32());\n break;\n case 2:\n m.dtype = r.int32();\n break;\n case 3:\n m.tensorShape = $root.tensorflow.TensorShape.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n TensorInfo.CooSparse = (function() {\n\n function CooSparse(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n CooSparse.prototype.valuesTensorName = \"\";\n CooSparse.prototype.indicesTensorName = \"\";\n CooSparse.prototype.denseShapeTensorName = \"\";\n\n CooSparse.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.TensorInfo.CooSparse();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.valuesTensorName = r.string();\n break;\n case 2:\n m.indicesTensorName = r.string();\n break;\n case 3:\n m.denseShapeTensorName = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return CooSparse;\n })();\n\n return TensorInfo;\n })();\n\n tensorflow.SignatureDef = (function() {\n\n function SignatureDef(p) {\n this.inputs = {};\n this.outputs = {};\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n SignatureDef.prototype.inputs = $util.emptyObject;\n SignatureDef.prototype.outputs = $util.emptyObject;\n SignatureDef.prototype.methodName = \"\";\n\n SignatureDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.SignatureDef(), k;\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n r.skip().pos++;\n if (m.inputs === $util.emptyObject)\n m.inputs = {};\n k = r.string();\n r.pos++;\n m.inputs[k] = $root.tensorflow.TensorInfo.decode(r, r.uint32());\n break;\n case 2:\n r.skip().pos++;\n if (m.outputs === $util.emptyObject)\n m.outputs = {};\n k = r.string();\n r.pos++;\n m.outputs[k] = $root.tensorflow.TensorInfo.decode(r, r.uint32());\n break;\n case 3:\n m.methodName = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return SignatureDef;\n })();\n\n tensorflow.AssetFileDef = (function() {\n\n function AssetFileDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n AssetFileDef.prototype.tensorInfo = null;\n AssetFileDef.prototype.filename = \"\";\n\n AssetFileDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.AssetFileDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.tensorInfo = $root.tensorflow.TensorInfo.decode(r, r.uint32());\n break;\n case 2:\n m.filename = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return AssetFileDef;\n })();\n\n tensorflow.OpDef = (function() {\n\n function OpDef(p) {\n this.inputArg = [];\n this.outputArg = [];\n this.attr = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n OpDef.prototype.name = \"\";\n OpDef.prototype.inputArg = $util.emptyArray;\n OpDef.prototype.outputArg = $util.emptyArray;\n OpDef.prototype.attr = $util.emptyArray;\n OpDef.prototype.deprecation = null;\n OpDef.prototype.summary = \"\";\n OpDef.prototype.description = \"\";\n OpDef.prototype.isCommutative = false;\n OpDef.prototype.isAggregate = false;\n OpDef.prototype.isStateful = false;\n OpDef.prototype.allowsUninitializedInput = false;\n\n OpDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.OpDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 2:\n if (!(m.inputArg && m.inputArg.length))\n m.inputArg = [];\n m.inputArg.push($root.tensorflow.OpDef.ArgDef.decode(r, r.uint32()));\n break;\n case 3:\n if (!(m.outputArg && m.outputArg.length))\n m.outputArg = [];\n m.outputArg.push($root.tensorflow.OpDef.ArgDef.decode(r, r.uint32()));\n break;\n case 4:\n if (!(m.attr && m.attr.length))\n m.attr = [];\n m.attr.push($root.tensorflow.OpDef.AttrDef.decode(r, r.uint32()));\n break;\n case 8:\n m.deprecation = $root.tensorflow.OpDef.OpDeprecation.decode(r, r.uint32());\n break;\n case 5:\n m.summary = r.string();\n break;\n case 6:\n m.description = r.string();\n break;\n case 18:\n m.isCommutative = r.bool();\n break;\n case 16:\n m.isAggregate = r.bool();\n break;\n case 17:\n m.isStateful = r.bool();\n break;\n case 19:\n m.allowsUninitializedInput = r.bool();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n OpDef.ArgDef = (function() {\n\n function ArgDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n ArgDef.prototype.name = \"\";\n ArgDef.prototype.description = \"\";\n ArgDef.prototype.type = 0;\n ArgDef.prototype.typeAttr = \"\";\n ArgDef.prototype.numberAttr = \"\";\n ArgDef.prototype.typeListAttr = \"\";\n ArgDef.prototype.isRef = false;\n\n ArgDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.OpDef.ArgDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 2:\n m.description = r.string();\n break;\n case 3:\n m.type = r.int32();\n break;\n case 4:\n m.typeAttr = r.string();\n break;\n case 5:\n m.numberAttr = r.string();\n break;\n case 6:\n m.typeListAttr = r.string();\n break;\n case 16:\n m.isRef = r.bool();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return ArgDef;\n })();\n\n OpDef.AttrDef = (function() {\n\n function AttrDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n AttrDef.prototype.name = \"\";\n AttrDef.prototype.type = \"\";\n AttrDef.prototype.defaultValue = null;\n AttrDef.prototype.description = \"\";\n AttrDef.prototype.hasMinimum = false;\n AttrDef.prototype.minimum = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n AttrDef.prototype.allowedValues = null;\n\n AttrDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.OpDef.AttrDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.name = r.string();\n break;\n case 2:\n m.type = r.string();\n break;\n case 3:\n m.defaultValue = $root.tensorflow.AttrValue.decode(r, r.uint32());\n break;\n case 4:\n m.description = r.string();\n break;\n case 5:\n m.hasMinimum = r.bool();\n break;\n case 6:\n m.minimum = r.int64();\n break;\n case 7:\n m.allowedValues = $root.tensorflow.AttrValue.decode(r, r.uint32());\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return AttrDef;\n })();\n\n OpDef.OpDeprecation = (function() {\n\n function OpDeprecation(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n OpDeprecation.prototype.version = 0;\n OpDeprecation.prototype.explanation = \"\";\n\n OpDeprecation.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.OpDef.OpDeprecation();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.version = r.int32();\n break;\n case 2:\n m.explanation = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return OpDeprecation;\n })();\n\n return OpDef;\n })();\n\n tensorflow.OpList = (function() {\n\n function OpList(p) {\n this.op = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n OpList.prototype.op = $util.emptyArray;\n\n OpList.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.OpList();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m.op && m.op.length))\n m.op = [];\n m.op.push($root.tensorflow.OpDef.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return OpList;\n })();\n\n tensorflow.MetaGraphDef = (function() {\n\n function MetaGraphDef(p) {\n this.collectionDef = {};\n this.signatureDef = {};\n this.assetFileDef = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n MetaGraphDef.prototype.metaInfoDef = null;\n MetaGraphDef.prototype.graphDef = null;\n MetaGraphDef.prototype.saverDef = null;\n MetaGraphDef.prototype.collectionDef = $util.emptyObject;\n MetaGraphDef.prototype.signatureDef = $util.emptyObject;\n MetaGraphDef.prototype.assetFileDef = $util.emptyArray;\n\n MetaGraphDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.MetaGraphDef(), k;\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.metaInfoDef = $root.tensorflow.MetaGraphDef.MetaInfoDef.decode(r, r.uint32());\n break;\n case 2:\n m.graphDef = $root.tensorflow.GraphDef.decode(r, r.uint32());\n break;\n case 3:\n m.saverDef = $root.tensorflow.SaverDef.decode(r, r.uint32());\n break;\n case 4:\n r.skip().pos++;\n if (m.collectionDef === $util.emptyObject)\n m.collectionDef = {};\n k = r.string();\n r.pos++;\n m.collectionDef[k] = $root.tensorflow.CollectionDef.decode(r, r.uint32());\n break;\n case 5:\n r.skip().pos++;\n if (m.signatureDef === $util.emptyObject)\n m.signatureDef = {};\n k = r.string();\n r.pos++;\n m.signatureDef[k] = $root.tensorflow.SignatureDef.decode(r, r.uint32());\n break;\n case 6:\n if (!(m.assetFileDef && m.assetFileDef.length))\n m.assetFileDef = [];\n m.assetFileDef.push($root.tensorflow.AssetFileDef.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n MetaGraphDef.MetaInfoDef = (function() {\n\n function MetaInfoDef(p) {\n this.tags = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n MetaInfoDef.prototype.metaGraphVersion = \"\";\n MetaInfoDef.prototype.strippedOpList = null;\n MetaInfoDef.prototype.anyInfo = null;\n MetaInfoDef.prototype.tags = $util.emptyArray;\n MetaInfoDef.prototype.tensorflowVersion = \"\";\n MetaInfoDef.prototype.tensorflowGitVersion = \"\";\n\n MetaInfoDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.MetaGraphDef.MetaInfoDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.metaGraphVersion = r.string();\n break;\n case 2:\n m.strippedOpList = $root.tensorflow.OpList.decode(r, r.uint32());\n break;\n case 3:\n m.anyInfo = $root.tensorflow.Any.decode(r, r.uint32());\n break;\n case 4:\n if (!(m.tags && m.tags.length))\n m.tags = [];\n m.tags.push(r.string());\n break;\n case 5:\n m.tensorflowVersion = r.string();\n break;\n case 6:\n m.tensorflowGitVersion = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return MetaInfoDef;\n })();\n\n return MetaGraphDef;\n })();\n\n tensorflow.SavedModel = (function() {\n\n function SavedModel(p) {\n this.metaGraphs = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n SavedModel.prototype.savedModelSchemaVersion = $util.Long ? $util.Long.fromBits(0,0,false) : 0;\n SavedModel.prototype.metaGraphs = $util.emptyArray;\n\n SavedModel.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.SavedModel();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.savedModelSchemaVersion = r.int64();\n break;\n case 2:\n if (!(m.metaGraphs && m.metaGraphs.length))\n m.metaGraphs = [];\n m.metaGraphs.push($root.tensorflow.MetaGraphDef.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return SavedModel;\n })();\n\n tensorflow.FunctionDefLibrary = (function() {\n\n function FunctionDefLibrary(p) {\n this[\"function\"] = [];\n this.gradient = [];\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n FunctionDefLibrary.prototype[\"function\"] = $util.emptyArray;\n FunctionDefLibrary.prototype.gradient = $util.emptyArray;\n\n FunctionDefLibrary.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.FunctionDefLibrary();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n if (!(m[\"function\"] && m[\"function\"].length))\n m[\"function\"] = [];\n m[\"function\"].push($root.tensorflow.FunctionDef.decode(r, r.uint32()));\n break;\n case 2:\n if (!(m.gradient && m.gradient.length))\n m.gradient = [];\n m.gradient.push($root.tensorflow.GradientDef.decode(r, r.uint32()));\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return FunctionDefLibrary;\n })();\n\n tensorflow.FunctionDef = (function() {\n\n function FunctionDef(p) {\n this.attr = {};\n this.nodeDef = [];\n this.ret = {};\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n FunctionDef.prototype.signature = null;\n FunctionDef.prototype.attr = $util.emptyObject;\n FunctionDef.prototype.nodeDef = $util.emptyArray;\n FunctionDef.prototype.ret = $util.emptyObject;\n\n FunctionDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.FunctionDef(), k;\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.signature = $root.tensorflow.OpDef.decode(r, r.uint32());\n break;\n case 5:\n r.skip().pos++;\n if (m.attr === $util.emptyObject)\n m.attr = {};\n k = r.string();\n r.pos++;\n m.attr[k] = $root.tensorflow.AttrValue.decode(r, r.uint32());\n break;\n case 3:\n if (!(m.nodeDef && m.nodeDef.length))\n m.nodeDef = [];\n m.nodeDef.push($root.tensorflow.NodeDef.decode(r, r.uint32()));\n break;\n case 4:\n r.skip().pos++;\n if (m.ret === $util.emptyObject)\n m.ret = {};\n k = r.string();\n r.pos++;\n m.ret[k] = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return FunctionDef;\n })();\n\n tensorflow.GradientDef = (function() {\n\n function GradientDef(p) {\n if (p)\n for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)\n if (p[ks[i]] != null)\n this[ks[i]] = p[ks[i]];\n }\n\n GradientDef.prototype.functionName = \"\";\n GradientDef.prototype.gradientFunc = \"\";\n\n GradientDef.decode = function decode(r, l) {\n if (!(r instanceof $Reader))\n r = $Reader.create(r);\n var c = l === undefined ? r.len : r.pos + l, m = new $root.tensorflow.GradientDef();\n while (r.pos < c) {\n var t = r.uint32();\n switch (t >>> 3) {\n case 1:\n m.functionName = r.string();\n break;\n case 2:\n m.gradientFunc = r.string();\n break;\n default:\n r.skipType(t & 7);\n break;\n }\n }\n return m;\n };\n\n return GradientDef;\n })();\n\n return tensorflow;\n})();\n\nmodule.exports = $root;\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node, ValueType} from '../types';\n\nexport function getParamValue(\n paramName: string, node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): ValueType {\n const param = node.params[paramName];\n if (param && param.inputIndex !== undefined) {\n if (param.type === 'tensor') {\n return getTensor(node.inputNames[param.inputIndex], tensorMap, context);\n }\n if (param.type === 'tensors') {\n const inputs = param.inputIndex === 0 ?\n (param.inputParamLength === 0 ?\n node.inputNames :\n node.inputNames.slice(\n param.inputIndex, -param.inputParamLength)) :\n node.inputNames.splice(param.inputIndex);\n\n return inputs.map(name => getTensor(name, tensorMap, context));\n }\n const data = Array.prototype.slice.call(\n getTensor(\n node.inputNames.slice(param.inputIndex)[0], tensorMap, context)\n .dataSync());\n return param.type === 'number' ? data[0] : data;\n }\n return param && param.value;\n}\n\n/**\n * Retrieve the tensor based on input name by extracting the node name and\n * output index information.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensor(\n name: string, tensorsMap: NamedTensorsMap,\n context: ExecutionContext): tfc.Tensor {\n const [nodeName, index] = parseNodeName(name);\n const contextId = context.currentContextIds.find(contextId => {\n return !!tensorsMap[getNodeNameWithContextId(nodeName, contextId)];\n });\n\n return contextId !== undefined ?\n tensorsMap[getNodeNameWithContextId(nodeName, contextId)][index] :\n undefined;\n}\n\n/**\n * Retrieve the tensors based on input name for current context.\n * @param name Node input name\n * @param tensorsMap Tensors map keyed by the node\n */\nexport function getTensorsForCurrentContenxt(\n name: string, tensorsMap: NamedTensorsMap,\n context: ExecutionContext): tfc.Tensor[] {\n return tensorsMap[getNodeNameWithContextId(name, context.currentContextId)];\n}\n\n/**\n * Returns the node name and index from the Node input name.\n * @param inputName The input name of the node, in format of\n * node_name:output_index, i.e. MatMul:0, if the output_index is not set, it is\n * default to 0.\n */\nexport function getNodeNameAndIndex(\n inputName: string, context?: ExecutionContext): [string, number] {\n const [nodeName, index] = parseNodeName(inputName);\n\n return [\n getNodeNameWithContextId(nodeName, context && context.currentContextId),\n index\n ];\n}\n\nfunction getNodeNameWithContextId(name: string, contextId?: string): string {\n return !!contextId ? `${name}-${contextId}` : name;\n}\n\nexport function parseNodeName(name: string): [string, number] {\n const index = name.lastIndexOf(':');\n if (index === -1) return [name, 0];\n\n const nodeName = name.substring(0, index);\n return [nodeName, Number(name.substring(index + 1))];\n}\n\nexport function split(arr: number[], size: number) {\n const res = [];\n for (let i = 0; i < arr.length; i += size) {\n res.push(arr.slice(i, i + size));\n }\n return res;\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Add',\n 'dlOpName': 'add',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'AddN',\n 'dlOpName': 'addN',\n 'category': 'arithmetic',\n 'params': [{\n 'tfInputIndex': 0,\n 'tfInputParamLength': 0,\n 'dlParamName': 'tensors',\n 'type': 'tensors'\n }]\n },\n {\n 'tfOpName': 'BiasAdd',\n 'dlOpName': 'add',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sub',\n 'dlOpName': 'sub',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'RealDiv',\n 'dlOpName': 'div',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Div',\n 'dlOpName': 'div',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FloorDiv',\n 'dlOpName': 'floorDiv',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Mul',\n 'dlOpName': 'mul',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Maximum',\n 'dlOpName': 'maximum',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}\n ]\n },\n {\n 'tfOpName': 'Minimum',\n 'dlOpName': 'minimum',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}\n ]\n },\n {\n 'tfOpName': 'Pow',\n 'dlOpName': 'pow',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'SquaredDifference',\n 'dlOpName': 'squaredDifference',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Mod',\n 'dlOpName': 'mod',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FloorMod',\n 'dlOpName': 'mod',\n 'category': 'arithmetic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Abs',\n 'dlOpName': 'abs',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Acos',\n 'dlOpName': 'acos',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Asin',\n 'dlOpName': 'asin',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Atan',\n 'dlOpName': 'atan',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Atan2',\n 'dlOpName': 'atan2',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'y', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Ceil',\n 'dlOpName': 'ceil',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'ClipByValue',\n 'dlOpName': 'clipByValue',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'clip_value_min',\n 'dlParamName': 'clipValueMin',\n 'type': 'number'\n },\n {\n 'tfParamName': 'clip_value_max',\n 'dlParamName': 'clipValueMax',\n 'type': 'number'\n }\n ]\n },\n {\n 'tfOpName': 'Cos',\n 'dlOpName': 'cos',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Cosh',\n 'dlOpName': 'cosh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Elu',\n 'dlOpName': 'elu',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Exp',\n 'dlOpName': 'exp',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Floor',\n 'dlOpName': 'floor',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Log',\n 'dlOpName': 'log',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Neg',\n 'dlOpName': 'neg',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Relu',\n 'dlOpName': 'relu',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Relu6',\n 'dlOpName': 'clipByValue',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n {'dlParamName': 'clipValueMin', 'type': 'number', 'defaultValue': 0},\n {'dlParamName': 'clipValueMax', 'type': 'number', 'defaultValue': 6}\n ]\n },\n {\n 'tfOpName': 'Selu',\n 'dlOpName': 'selu',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sigmoid',\n 'dlOpName': 'sigmoid',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sin',\n 'dlOpName': 'sin',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sinh',\n 'dlOpName': 'sinh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sqrt',\n 'dlOpName': 'sqrt',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Rsqrt',\n 'dlOpName': 'rsqrt',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Square',\n 'dlOpName': 'square',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tan',\n 'dlOpName': 'tan',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tanh',\n 'dlOpName': 'tanh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Sign',\n 'dlOpName': 'sign',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Round',\n 'dlOpName': 'round',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Expm1',\n 'dlOpName': 'expm1',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Log1p',\n 'dlOpName': 'log1p',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'dlOpName': 'reciprocal',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reciprocal',\n 'dlOpName': 'reciprocal',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Softplus',\n 'dlOpName': 'softplus',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Asinh',\n 'dlOpName': 'asinh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Acosh',\n 'dlOpName': 'acosh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Atanh',\n 'dlOpName': 'atanh',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Erf',\n 'dlOpName': 'erf',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'dlOpName': 'prod',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axes', 'type': 'number[]'}, {\n 'tfParamName': 'keep_dims',\n 'dlParamName': 'keepDims',\n 'type': 'bool',\n 'notSupported': true\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LeakyRelu',\n 'dlOpName': 'leakyRelu',\n 'category': 'basic_math',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'alpha',\n 'dlParamName': 'alpha',\n 'type': 'number',\n 'defaultValue': 0.2\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'LoopCond',\n 'dlOpName': 'loopCond',\n 'category': 'control',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'pred', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'Switch',\n 'dlOpName': 'switch',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'data', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'pred', 'type': 'tensor'}\n ]\n },\n {\n 'tfOpName': 'Merge',\n 'dlOpName': 'merge',\n 'category': 'control',\n 'params': [{\n 'tfInputIndex': 0,\n 'tfInputParamLength': 0,\n 'dlParamName': 'tensors',\n 'type': 'tensors'\n }]\n },\n {\n 'tfOpName': 'Enter',\n 'dlOpName': 'enter',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensor', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfParamName': 'frame_name',\n 'dlParamName': 'frameName',\n 'type': 'string'\n },\n {\n 'tfParamName': 'is_constant',\n 'dlParamName': 'isConstant',\n 'type': 'bool'\n }\n ]\n },\n {\n 'tfOpName': 'Exit',\n 'dlOpName': 'exit',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensor', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'NextIteration',\n 'dlOpName': 'nextIteration',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensor', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArrayV3',\n 'dlOpName': 'tensorArray',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'size', 'type': 'number'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}, {\n 'tfParamName': 'element_shape',\n 'dlParamName': 'elementShape',\n 'type': 'shape'\n },\n {\n 'tfParamName': 'dynamic_size',\n 'dlParamName': 'dynamicSize',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'clear_after_read',\n 'dlParamName': 'clearAfterRead',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'identical_element_shapes',\n 'dlParamName': 'identicalElementShapes',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'tensor_array_name',\n 'dlParamName': 'name',\n 'type': 'string'\n }\n ]\n },\n {\n 'tfOpName': 'TensorArrayWriteV3',\n 'dlOpName': 'tensorArrayWrite',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'index', 'type': 'number'},\n {'tfInputIndex': 2, 'dlParamName': 'tensor', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'flowIn', 'type': 'number'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArrayReadV3',\n 'dlOpName': 'tensorArrayRead',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'index', 'type': 'number'},\n {'tfInputIndex': 2, 'dlParamName': 'flowIn', 'type': 'number'}, {\n 'tfParamName': 'dtype',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArrayGatherV3',\n 'dlOpName': 'tensorArrayGather',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'indices', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'flowIn', 'type': 'number'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}, {\n 'tfParamName': 'element_shape',\n 'dlParamName': 'elementShape',\n 'type': 'shape'\n }\n ]\n },\n {\n 'tfOpName': 'TensorArrayScatterV3',\n 'dlOpName': 'tensorArrayScatter',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'indices', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'tensor', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'flowIn', 'type': 'number'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'TensorArrayConcatV3',\n 'dlOpName': 'tensorArrayConcat',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'flowIn', 'type': 'number'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}, {\n 'tfParamName': 'element_shape_except0',\n 'dlParamName': 'elementShapeExcept0',\n 'type': 'shape',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'TensorArraySplitV3',\n 'dlOpName': 'tensorArraySplit',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'tensor', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'lengths', 'type': 'number[]'},\n {'tfInputIndex': 3, 'dlParamName': 'flowIn', 'type': 'number'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'TensorArraySizeV3',\n 'dlOpName': 'tensorArraySize',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'flowIn', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'TensorArrayCloseV3',\n 'dlOpName': 'tensorArrayClose',\n 'category': 'control',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'tensorArrayId', 'type': 'number'}\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'AvgPool',\n 'dlOpName': 'avgPool',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {'tfParamName': 'ksize', 'dlParamName': 'kernelSize', 'type': 'number[]'},\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'MaxPool',\n 'dlOpName': 'maxPool',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n },\n {'tfParamName': 'ksize', 'dlParamName': 'kernelSize', 'type': 'number[]'},\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Conv1D',\n 'dlOpName': 'conv1d',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'filter', 'type': 'tensor'},\n {'tfParamName': 'stride', 'dlParamName': 'stride', 'type': 'number'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NWC'\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n {\n 'tfParamName': 'dilation',\n 'dlParamName': 'dilation',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'Conv2D',\n 'dlOpName': 'conv2d',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'filter', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'useCudnnOnGpu',\n 'dlParamName': 'useCudnnOnGpu',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfParamName': 'dilations',\n 'dlParamName': 'dilations',\n 'type': 'number[]'\n }\n ]\n },\n {\n 'tfOpName': 'Conv2DBackpropInput',\n 'dlOpName': 'conv2dTranspose',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 2, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'filter', 'type': 'tensor'},\n {'tfInputIndex': 0, 'dlParamName': 'outputShape', 'type': 'number[]'},\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2d',\n 'dlOpName': 'depthwiseConv2d',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'input', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'filter', 'type': 'tensor'},\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfParamName': 'dilations',\n 'dlParamName': 'dilations',\n 'type': 'number[]'\n }\n ]\n },\n {\n 'tfOpName': 'DepthwiseConv2dNative',\n 'dlOpName': 'depthwiseConv2d',\n 'category': 'convolution',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'input', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'filter', 'type': 'tensor'},\n {'tfParamName': 'strides', 'dlParamName': 'strides', 'type': 'number[]'},\n {'tfParamName': 'padding', 'dlParamName': 'pad', 'type': 'string'}, {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'defaultValue': 'NHWC'\n },\n {\n 'tfParamName': 'dilations',\n 'dlParamName': 'dilations',\n 'type': 'number[]'\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Fill',\n 'dlOpName': 'fill',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'shape', 'type': 'number[]'},\n {'tfInputIndex': 1, 'dlParamName': 'value', 'type': 'number'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'LinSpace',\n 'dlOpName': 'linspace',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'start', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'stop', 'type': 'number'},\n {'tfInputIndex': 2, 'dlParamName': 'num', 'type': 'number'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'OneHot',\n 'dlOpName': 'oneHot',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'indices', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'depth', 'type': 'number'}, {\n 'tfInputIndex': 2,\n 'dlParamName': 'onValue',\n 'type': 'number',\n 'defaultValue': 1\n },\n {\n 'tfInputIndex': 3,\n 'dlParamName': 'offValue',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'axis',\n 'dlParamName': 'axis',\n 'type': 'number',\n 'notSupported': true\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Ones',\n 'dlOpName': 'ones',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'shape', 'type': 'number[]'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'OnesLike',\n 'dlOpName': 'onesLike',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'RandomUniform',\n 'dlOpName': 'randomUniform',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'shape', 'type': 'number[]'}, {\n 'tfParamName': 'minval',\n 'dlParamName': 'minval',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'maxval',\n 'dlParamName': 'maxval',\n 'type': 'number',\n 'defaultValue': 1\n },\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}, {\n 'tfParamName': 'seed',\n 'dlParamName': 'seed',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'seed2',\n 'dlParamName': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'T',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Range',\n 'dlOpName': 'range',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'start', 'type': 'number'},\n {'tfInputIndex': 1, 'dlParamName': 'stop', 'type': 'number'}, {\n 'tfInputIndex': 2,\n 'dlParamName': 'step',\n 'type': 'number',\n 'defaultValue': 0\n },\n {'tfParamName': 'Tidx', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'truncatedNormal',\n 'dlOpName': 'truncatedNormal',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'shape', 'type': 'number[]'}, {\n 'tfParamName': 'means',\n 'dlParamName': 'mean',\n 'type': 'number',\n 'defaultValue': 0.0\n },\n {\n 'tfParamName': 'stddev',\n 'dlParamName': 'stdDev',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {'tfParamName': 'seed', 'dlParamName': 'seed', 'type': 'number'}, {\n 'tfParamName': 'seed2',\n 'dlParamName': 'seed2',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n },\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'T',\n 'type': 'number',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Zeros',\n 'dlOpName': 'zeros',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'shape', 'type': 'number[]'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'ZerosLike',\n 'dlOpName': 'zerosLike',\n 'category': 'creation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfParamName': 'T', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'NonMaxSuppressionV2',\n 'dlOpName': 'nonMaxSuppression',\n 'category': 'dynamic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'boxes', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'scores', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'maxOutputSize', 'type': 'number'},\n {'tfInputIndex': 3, 'dlParamName': 'iouThreshold', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'NonMaxSuppressionV3',\n 'dlOpName': 'nonMaxSuppression',\n 'category': 'dynamic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'boxes', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'scores', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'maxOutputSize', 'type': 'number'},\n {'tfInputIndex': 3, 'dlParamName': 'iouThreshold', 'type': 'number'},\n {'tfInputIndex': 4, 'dlParamName': 'scoreThreshold', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'Where',\n 'dlOpName': 'whereAsync',\n 'category': 'dynamic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'condition', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'ListDiff',\n 'dlOpName': 'setdiff1dAsync',\n 'category': 'dynamic',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'y', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [{\n 'tfOpName': 'TopKV2',\n 'dlOpName': 'topK',\n 'category': 'evaluation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'k', 'type': 'number'},\n {'tfParamName': 'sorted', 'dlParamName': 'sorted', 'type': 'bool'}\n ]\n}];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'PlaceholderWithDefault',\n 'dlOpName': 'placeholder',\n 'category': 'graph',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'default', 'type': 'tensor'},\n {'tfParamName': 'shape', 'dlParamName': 'shape', 'type': 'shape'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'Placeholder',\n 'dlOpName': 'placeholder',\n 'category': 'graph',\n 'params': [\n {'tfParamName': 'shape', 'dlParamName': 'shape', 'type': 'shape'},\n {'tfParamName': 'dtype', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {'tfOpName': 'Const', 'dlOpName': 'const', 'category': 'graph'}, {\n 'tfOpName': 'Identity',\n 'dlOpName': 'identity',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'Snapshot',\n 'dlOpName': 'snapshot',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'Rank',\n 'dlOpName': 'rank',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'Size',\n 'dlOpName': 'size',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'Shape',\n 'dlOpName': 'shape',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'ShapeN',\n 'dlOpName': 'shapeN',\n 'category': 'graph',\n 'params': [{\n 'tfInputIndex': 0,\n 'tfInputParamLength': 0,\n 'dlParamName': 'x',\n 'type': 'tensors'\n }]\n },\n {\n 'tfOpName': 'Print',\n 'dlOpName': 'print',\n 'category': 'graph',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfInputIndex': 1,\n 'tfInputParamLength': 1,\n 'dlParamName': 'data',\n 'type': 'tensors'\n },\n {'tfParamName': 'message', 'dlParamName': 'message', 'type': 'string'}, {\n 'tfParamName': 'first_n',\n 'dlParamName': 'firstN',\n 'type': 'number',\n 'notSupprted': true\n },\n {\n 'tfParamName': 'summarize',\n 'dlParamName': 'summarize',\n 'type': 'number',\n 'defaultValue': 3\n }\n ]\n },\n {'tfOpName': 'NoOp', 'dlOpName': 'noop', 'category': 'graph', 'params': []}, {\n 'tfOpName': 'StopGradient',\n 'dlOpName': 'stopGradient',\n 'category': 'graph',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'FakeQuantWithMinMaxVars',\n 'dlOpName': 'fakeQuantWithMinMaxVars',\n 'category': 'graph',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfParamName': 'min', 'dlParamName': 'min', 'type': 'number'},\n {'tfParamName': 'max', 'dlParamName': 'max', 'type': 'number'}\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'ResizeBilinear',\n 'dlOpName': 'resizeBilinear',\n 'category': 'image',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'images', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'size', 'type': 'number[]'}, {\n 'tfParamName': 'align_corners',\n 'dlParamName': 'alignCorners',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'ResizeNearestNeighbor',\n 'dlOpName': 'resizeNearestNeighbor',\n 'category': 'image',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'images', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'size', 'type': 'number[]'}, {\n 'tfParamName': 'align_corners',\n 'dlParamName': 'alignCorners',\n 'type': 'bool'\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'CropAndResize',\n 'dlOpName': 'cropAndResize',\n 'category': 'image',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'image', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'boxes', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'boxInd', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'cropSize', 'type': 'number[]'},\n {'tfParamName': 'method', 'dlParamName': 'method', 'type': 'string'}, {\n 'tfParamName': 'extrapolation_value',\n 'dlParamName': 'extrapolationValue',\n 'type': 'number'\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Equal',\n 'dlOpName': 'equal',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'NotEqual',\n 'dlOpName': 'notEqual',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Greater',\n 'dlOpName': 'greater',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'GreaterEqual',\n 'dlOpName': 'greaterEqual',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Less',\n 'dlOpName': 'less',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LessEqual',\n 'dlOpName': 'lessEqual',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LogicalAnd',\n 'dlOpName': 'logicalAnd',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LogicalNot',\n 'dlOpName': 'logicalNot',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LogicalOr',\n 'dlOpName': 'logicalOr',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Select',\n 'dlOpName': 'where',\n 'category': 'logical',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'condition', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'MatMul',\n 'dlOpName': 'matMul',\n 'category': 'matrices',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'transpose_a',\n 'dlParamName': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfParamName': 'transpose_b',\n 'dlParamName': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'BatchMatMul',\n 'dlOpName': 'matMul',\n 'category': 'matrices',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'a', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'b', 'type': 'tensor'}, {\n 'tfParamName': 'adj_x',\n 'dlParamName': 'transposeA',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfParamName': 'adj_y',\n 'dlParamName': 'transposeB',\n 'type': 'bool',\n 'defaultValue': false\n },\n {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Transpose',\n 'dlOpName': 'transpose',\n 'category': 'matrices',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'perm', 'type': 'number[]'}, {\n 'tfParamName': 'T',\n 'dlParamName': 'dtype',\n 'type': 'dtype',\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'FusedBatchNorm',\n 'dlOpName': 'batchNormalization',\n 'category': 'normalization',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'scale', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'offset', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'mean', 'type': 'tensor'},\n {'tfInputIndex': 4, 'dlParamName': 'variance', 'type': 'tensor'}, {\n 'tfParamName': 'epsilon',\n 'dlParamName': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'FusedBatchNormV2',\n 'dlOpName': 'batchNormalization',\n 'category': 'normalization',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'scale', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'offset', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'mean', 'type': 'tensor'},\n {'tfInputIndex': 4, 'dlParamName': 'variance', 'type': 'tensor'}, {\n 'tfParamName': 'epsilon',\n 'dlParamName': 'epsilon',\n 'type': 'number',\n 'defaultValue': 0.001\n },\n {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'LRN',\n 'dlOpName': 'localResponseNormalization',\n 'category': 'normalization',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'depth_radius',\n 'dlParamName': 'radius',\n 'type': 'number',\n 'defaultValue': 5\n },\n {\n 'tfParamName': 'bias',\n 'dlParamName': 'bias',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfParamName': 'alpha',\n 'dlParamName': 'alpha',\n 'type': 'number',\n 'defaultValue': 1.0\n },\n {\n 'tfParamName': 'beta',\n 'dlParamName': 'beta',\n 'type': 'number',\n 'defaultValue': 0.5\n }\n ]\n },\n {\n 'tfOpName': 'Softmax',\n 'dlOpName': 'softmax',\n 'category': 'normalization',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'LogSoftmax',\n 'dlOpName': 'logSoftmax',\n 'category': 'normalization',\n 'params': [{'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'dlOpName': 'sparseToDense',\n 'category': 'normalization',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'sparseIndices', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'outputShape', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'sparseValues', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'defaultValue', 'type': 'tensor'}, {\n 'tfParamName': 'validate_indices',\n 'dlParamName': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': true,\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Max',\n 'dlOpName': 'max',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'Mean',\n 'dlOpName': 'mean',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'Min',\n 'dlOpName': 'min',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'Sum',\n 'dlOpName': 'sum',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'All',\n 'dlOpName': 'all',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'Any',\n 'dlOpName': 'any',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'},\n {'tfParamName': 'keep_dims', 'dlParamName': 'keepDims', 'type': 'bool'}\n ]\n },\n {\n 'tfOpName': 'ArgMax',\n 'dlOpName': 'argMax',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'ArgMin',\n 'dlOpName': 'argMin',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'Prod',\n 'dlOpName': 'prod',\n 'category': 'reduction',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'}, {\n 'tfParamName': 'keep_dims',\n 'dlParamName': 'keepDims',\n 'type': 'bool'\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'ConcatV2',\n 'dlOpName': 'concat',\n 'category': 'slice_join',\n 'params': [\n {\n 'tfInputIndex': 0,\n 'tfInputParamLength': 1,\n 'dlParamName': 'tensors',\n 'type': 'tensors'\n },\n {'tfInputIndex': -1, 'dlParamName': 'axis', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'Concat',\n 'dlOpName': 'concat',\n 'category': 'slice_join',\n 'params': [\n {\n 'tfInputIndex': 1,\n 'tfInputParamLength': 1,\n 'dlParamName': 'tensors',\n 'type': 'tensors'\n },\n {'tfInputIndex': 0, 'dlParamName': 'axis', 'type': 'number'}\n ]\n },\n {\n 'tfOpName': 'GatherV2',\n 'dlOpName': 'gather',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'indices', 'type': 'tensor'}, {\n 'tfInputIndex': 2,\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Gather',\n 'dlOpName': 'gather',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'indices', 'type': 'tensor'}, {\n 'tfParamName': 'axis',\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'validate_indices',\n 'dlParamName': 'validateIndices',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Reverse',\n 'dlOpName': 'reverse',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfInputIndex': 1,\n 'dlParamName': 'dims',\n 'type': 'bool',\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'ReverseV2',\n 'dlOpName': 'reverse',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'axis', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'Slice',\n 'dlOpName': 'slice',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'begin', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'size', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'StridedSlice',\n 'dlOpName': 'stridedSlice',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'begin', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'end', 'type': 'number[]'},\n {'tfInputIndex': 3, 'dlParamName': 'strides', 'type': 'number[]'}, {\n 'tfParamName': 'begin_mask',\n 'dlParamName': 'beginMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'end_mask',\n 'dlParamName': 'endMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'new_axis_mask',\n 'dlParamName': 'newAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'ellipsis_mask',\n 'dlParamName': 'ellipsisMask',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'shrink_axis_mask',\n 'dlParamName': 'shrinkAxisMask',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Pack',\n 'dlOpName': 'stack',\n 'category': 'slice_join',\n 'params': [\n {\n 'tfInputIndex': 0,\n 'tfInputParamLength': 0,\n 'dlParamName': 'tensors',\n 'type': 'tensors'\n },\n {\n 'tfParamName': 'axis',\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Unpack',\n 'dlOpName': 'unstack',\n 'category': 'slice_join',\n 'params': [\n {\n 'tfInputIndex': 0,\n 'tfInputParamLength': 0,\n 'dlParamName': 'tensor',\n 'type': 'tensor'\n },\n {\n 'tfParamName': 'axis',\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n },\n {\n 'tfParamName': 'num',\n 'dlParamName': 'num',\n 'type': 'number',\n 'defaultValue': 0,\n 'notSupported': true\n }\n ]\n },\n {\n 'tfOpName': 'Tile',\n 'dlOpName': 'tile',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'reps', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'Split',\n 'dlOpName': 'split',\n 'category': 'slice_join',\n 'params': [\n {\n 'tfInputIndex': 0,\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n },\n {'tfInputIndex': 1, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'num_split',\n 'dlParamName': 'numOrSizeSplits',\n 'type': 'number',\n 'defaultValue': 1\n }\n ]\n },\n {\n 'tfOpName': 'SplitV',\n 'dlOpName': 'split',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'numOrSizeSplits', 'type': 'number[]'},\n {\n 'tfInputIndex': 2,\n 'dlParamName': 'axis',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'ScatterNd',\n 'dlOpName': 'scatterNd',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'indices', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'values', 'type': 'tensor'},\n {'tfInputIndex': 2, 'dlParamName': 'shape', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'GatherNd',\n 'dlOpName': 'gatherNd',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'indices', 'type': 'tensor'}\n ]\n },\n {\n 'tfOpName': 'SparseToDense',\n 'dlOpName': 'sparseToDense',\n 'category': 'slice_join',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'sparseIndices', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'outputShape', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'sparseValues', 'type': 'tensor'},\n {'tfInputIndex': 3, 'dlParamName': 'defaultValue', 'type': 'tensor'}, {\n 'tfParamName': 'validate_indices',\n 'dlParamName': 'validateIndices',\n 'type': 'bool',\n 'defaultValue': false,\n 'notSupported': true\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport const json = [\n {\n 'tfOpName': 'Cast',\n 'dlOpName': 'cast',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'SrcT',\n 'dlParamName': 'sdtype',\n 'type': 'dtype',\n 'notSupported': true\n },\n {'tfParamName': 'DstT', 'dlParamName': 'dtype', 'type': 'dtype'}\n ]\n },\n {\n 'tfOpName': 'ExpandDims',\n 'dlOpName': 'expandDims',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfInputIndex': 1,\n 'tfParamNameDeprecated': 'dim',\n 'dlParamName': 'axis',\n 'type': 'number'\n }\n ]\n },\n {\n 'tfOpName': 'Pad',\n 'dlOpName': 'pad',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'padding', 'type': 'number[]'}, {\n 'tfParamName': 'constant_value',\n 'dlParamName': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'PadV2',\n 'dlOpName': 'pad',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'padding', 'type': 'number[]'}, {\n 'tfInputIndex': 2,\n 'dlParamName': 'constantValue',\n 'type': 'number',\n 'defaultValue': 0\n }\n ]\n },\n {\n 'tfOpName': 'Reshape',\n 'dlOpName': 'reshape',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'shape', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'Squeeze',\n 'dlOpName': 'squeeze',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'axis',\n 'tfParamNameDeprecated': 'squeeze_dims',\n 'dlParamName': 'axis',\n 'type': 'number[]'\n }\n ]\n },\n {\n 'tfOpName': 'SpaceToBatchND',\n 'dlOpName': 'spaceToBatchND',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'blockShape', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'paddings', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'BatchToSpaceND',\n 'dlOpName': 'batchToSpaceND',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'},\n {'tfInputIndex': 1, 'dlParamName': 'blockShape', 'type': 'number[]'},\n {'tfInputIndex': 2, 'dlParamName': 'crops', 'type': 'number[]'}\n ]\n },\n {\n 'tfOpName': 'DepthToSpace',\n 'dlOpName': 'depthToSpace',\n 'category': 'transformation',\n 'params': [\n {'tfInputIndex': 0, 'dlParamName': 'x', 'type': 'tensor'}, {\n 'tfParamName': 'block_size',\n 'dlParamName': 'blockSize',\n 'type': 'number'\n },\n {\n 'tfParamName': 'data_format',\n 'dlParamName': 'dataFormat',\n 'type': 'string'\n }\n ]\n }\n];\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {DataType} from '@tensorflow/tfjs-core';\n\nimport {tensorflow} from '../data/compiled_api';\n\nimport {getNodeNameAndIndex} from './executors/utils';\nimport * as arithmetic from './op_list/arithmetic';\nimport * as basicMath from './op_list/basic_math';\nimport * as control from './op_list/control';\nimport * as convolution from './op_list/convolution';\nimport * as creation from './op_list/creation';\nimport * as dynamic from './op_list/dynamic';\nimport * as evaluation from './op_list/evaluation';\nimport * as graph from './op_list/graph';\nimport * as image from './op_list/image';\nimport * as logical from './op_list/logical';\nimport * as matrices from './op_list/matrices';\nimport * as normalization from './op_list/normalization';\nimport * as reduction from './op_list/reduction';\nimport * as sliceJoin from './op_list/slice_join';\nimport * as transformation from './op_list/transformation';\nimport {Graph, Node, OpMapper, ParamValue} from './types';\n\nconst CONTROL_FLOW_OPS = ['Switch', 'Merge', 'Enter', 'Exit', 'NextIteration'];\nconst DYNAMIC_SHAPE_OPS =\n ['NonMaxSuppressionV2', 'NonMaxSuppressionV3', 'Where'];\n\nexport class OperationMapper {\n private static _instance: OperationMapper;\n\n private opMappers: {[key: string]: OpMapper};\n\n // Singleton instance for the mapper\n public static get Instance() {\n return this._instance || (this._instance = new this());\n }\n\n // Loads the op mapping from the JSON file.\n private constructor() {\n const ops = [\n arithmetic, basicMath, control, convolution, creation, dynamic,\n evaluation, logical, image, graph, matrices, normalization, reduction,\n sliceJoin, transformation\n ];\n const mappersJson: OpMapper[] = [].concat.apply([], ops.map(op => op.json));\n\n this.opMappers = mappersJson.reduce<{[key: string]: OpMapper}>(\n (map, mapper: OpMapper) => {\n map[mapper.tfOpName] = mapper;\n return map;\n },\n {});\n }\n\n private isControlFlow(node: tensorflow.INodeDef) {\n return CONTROL_FLOW_OPS.some(op => op === node.op);\n }\n\n private isDynamicShape(node: tensorflow.INodeDef) {\n return DYNAMIC_SHAPE_OPS.some(op => op === node.op);\n }\n // Converts the model from Tensorflow GraphDef to local representation for\n // deeplearn.js API\n transformGraph(graph: tensorflow.IGraphDef): Graph {\n const tfNodes = graph.node;\n let withControlFlow = false;\n let withDynamicShape = false;\n const placeholders: Node[] = [];\n const weights: Node[] = [];\n const nodes = tfNodes.reduce<{[key: string]: Node}>((map, node) => {\n map[node.name] = this.mapNode(node);\n if (this.isControlFlow(node)) withControlFlow = true;\n if (this.isDynamicShape(node)) withDynamicShape = true;\n if (node.op === 'Placeholder') placeholders.push(map[node.name]);\n if (node.op === 'Const') weights.push(map[node.name]);\n return map;\n }, {});\n\n const inputs: Node[] = [];\n const outputs: Node[] = [];\n Object.keys(nodes).forEach(key => {\n const node = nodes[key];\n node.inputNames.forEach(name => {\n const [nodeName, ] = getNodeNameAndIndex(name);\n node.inputs.push(nodes[nodeName]);\n nodes[nodeName].children.push(node);\n });\n if (node.inputs.length === 0) inputs.push(node);\n });\n\n Object.keys(nodes).forEach(key => {\n const node = nodes[key];\n if (node.children.length === 0) outputs.push(node);\n });\n\n return {\n nodes,\n inputs,\n outputs,\n weights,\n placeholders,\n withControlFlow,\n withDynamicShape\n };\n }\n\n private mapNode(node: tensorflow.INodeDef): Node {\n const mapper = this.opMappers[node.op];\n if (mapper === undefined) {\n throw new Error('Tensorflow Op is not supported: ' + node.op);\n }\n const newNode: Node = {\n name: node.name,\n op: mapper.dlOpName,\n category: mapper.category,\n inputNames:\n (node.input ||\n []).map(input => input.startsWith('^') ? input.substr(1) : input),\n inputs: [],\n children: [],\n params: {}\n };\n\n if (!!mapper.params) {\n newNode.params = mapper.params.reduce<{[key: string]:\n ParamValue}>((map, param) => {\n const inputIndex = param.tfInputIndex;\n const inputParamLength = param.tfInputParamLength;\n const type = param.type;\n let value = undefined;\n if (inputIndex === undefined) {\n switch (param.type) {\n case 'string':\n value = this.getStringParam(\n node.attr, param.tfParamName, param.defaultValue as string);\n\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getStringParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as string);\n }\n break;\n case 'number':\n value = this.getNumberParam(\n node.attr, param.tfParamName, param.defaultValue as number);\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getNumberParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as number);\n }\n break;\n case 'number[]':\n value = this.getNumericArrayParam(\n node.attr, param.tfParamName, param.defaultValue as number[]);\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getNumericArrayParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as number[]);\n }\n break;\n case 'bool':\n value = this.getBoolParam(\n node.attr, param.tfParamName, param.defaultValue as boolean);\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getBoolParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as boolean);\n }\n break;\n case 'shape':\n value = this.getTensorShapeParam(\n node.attr, param.tfParamName, param.defaultValue as number[]);\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getTensorShapeParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as number[]);\n }\n break;\n case 'dtype':\n value = this.getDtypeParam(\n node.attr, param.tfParamName, param.defaultValue as DataType);\n if (value === undefined && !!param.tfParamNameDeprecated) {\n value = this.getDtypeParam(\n node.attr, param.tfParamNameDeprecated,\n param.defaultValue as DataType);\n }\n break;\n case 'tensor':\n case 'tensors':\n break;\n default:\n throw new Error(\n `Unsupported param type: ${param.type} for op: ${node.op}`);\n }\n }\n map[param.dlParamName] = {value, inputIndex, type, inputParamLength};\n return map;\n }, {});\n }\n return newNode;\n }\n\n private getStringParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string, def: string,\n keepCase = false): string {\n const param = attrs[name];\n if (param !== undefined) {\n const value = String.fromCharCode.apply(null, param.s);\n return keepCase ? value : value.toLowerCase();\n }\n return def;\n }\n\n private getBoolParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string,\n def: boolean): boolean {\n const param = attrs[name];\n return param ? param.b : def;\n }\n\n private getNumberParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string,\n def: number): number {\n const param = attrs[name] as tensorflow.AttrValue;\n const value = (param ? param[param.value] : def) as number | Long;\n return (typeof value === 'number') ? value : value['toInt']() as number;\n }\n private getDtypeParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string,\n def: DataType): DataType {\n const param = attrs[name];\n if (param && param.type) {\n switch (param.type) {\n case tensorflow.DataType.DT_FLOAT:\n return 'float32';\n case tensorflow.DataType.DT_INT32:\n return 'int32';\n case tensorflow.DataType.DT_BOOL:\n return 'bool';\n default:\n return def;\n }\n }\n return def;\n }\n private getTensorShapeParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string,\n def?: number[]): number[]|undefined {\n const param = attrs[name];\n if (param && param.shape) {\n return param.shape.dim.map(\n dim =>\n (typeof dim.size === 'number') ? dim.size : dim.size['toInt']());\n }\n return def;\n }\n\n private getNumericArrayParam(\n attrs: {[key: string]: tensorflow.IAttrValue}, name: string,\n def: number[]): number[] {\n const param = attrs[name];\n if (param) {\n return ((param.list.f && param.list.f.length ? param.list.f :\n param.list.i))\n .map(v => (typeof v === 'number') ? v : v['toInt']()) as\n number[];\n }\n return def;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'add': {\n return [tfc.add(\n (getParamValue('a', node, tensorMap, context) as tfc.Tensor),\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'addN': {\n return [tfc.addN((\n getParamValue('tensors', node, tensorMap, context) as tfc.Tensor[]))];\n }\n case 'mod':\n return [tfc.mod(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n case 'mul':\n return [tfc.mul(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n case 'div': {\n return [tfc.div(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'floorDiv': {\n return [tfc.floorDiv(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'sub': {\n return [tfc.sub(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'minimum': {\n return [tfc.minimum(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'maximum': {\n return [tfc.maximum(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'pow': {\n return [tfc.pow(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'squaredDifference': {\n return [tfc.squaredDifference(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'arithmetic';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue, getTensor} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'abs':\n return [tfc.abs(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'acos':\n return [tfc.acos(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'acosh':\n return [tfc.acosh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'asin':\n return [tfc.asin(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'asinh':\n return [tfc.asinh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'atan':\n return [tfc.atan(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'atan2':\n return [tfc.atan2(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('y', node, tensorMap, context) as tfc.Tensor)];\n case 'atanh':\n return [tfc.atanh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'ceil':\n return [tfc.ceil(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'cos':\n return [tfc.cos(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'cosh':\n return [tfc.cosh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'elu':\n return [tfc.elu(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'erf':\n return [tfc.erf(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'exp':\n return [tfc.exp(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'expm1': {\n return [tfc.expm1(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'floor':\n return [tfc.floor(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'log':\n return [tfc.log(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'log1p': {\n return [tfc.log1p(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'neg':\n return [tfc.neg(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'reciprocal': {\n return [tfc.reciprocal(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'relu':\n return [tfc.relu(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'round': {\n return [tfc.round(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'selu':\n return [tfc.selu(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'sigmoid':\n return [tfc.sigmoid(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'sin':\n return [tfc.sin(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'sign': {\n return [tfc.sign(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'sinh': {\n return [tfc.sinh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'softplus': {\n return [tfc.softplus(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'sqrt': {\n return [tfc.sqrt(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'square': {\n return [tfc.square(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'tanh': {\n return [tfc.tanh(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'tan':\n return [tfc.tan(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n case 'clipByValue':\n return [tfc.clipByValue(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('clipValueMin', node, tensorMap, context) as number,\n getParamValue('clipValueMax', node, tensorMap, context) as number)];\n case 'rsqrt':\n return [tfc.div(\n tfc.scalar(1.0, 'float32'),\n tfc.sqrt(getTensor(node.inputNames[0], tensorMap, context)))];\n case 'prod':\n return [tfc.prod(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('axes', node, tensorMap, context) as number[])];\n case 'leakyRelu':\n return [tfc.leakyRelu(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('alpha', node, tensorMap, context) as number)];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'basic_math';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n// tslint:disable-next-line:max-line-length\nimport {concat, DataType, slice, stack, Tensor, tensor, tidy, unstack, util} from '@tensorflow/tfjs-core';\n\nexport interface TensorWithState {\n tensor?: Tensor;\n written?: boolean;\n read?: boolean;\n cleared?: boolean;\n}\n/**\n * The TensorArray object keeps an array of Tensors. It\n * allows reading from the array and writing to the array.\n */\nexport class TensorArray {\n private static nextId = 0;\n private tensors: TensorWithState[] = [];\n private closed_ = false;\n readonly id: number;\n constructor(\n public readonly name: string, public readonly dtype: DataType,\n private maxSize: number, private elementShape: number[],\n public readonly identicalElementShapes: boolean,\n public readonly dynamicSize: boolean,\n public readonly clearAfterRead: boolean) {\n this.id = TensorArray.nextId++;\n }\n\n get closed() {\n return this.closed_;\n }\n\n /**\n * Close the current TensorArray.\n */\n clearAndClose() {\n this.tensors.forEach(tensor => tensor.tensor.dispose());\n this.tensors = [];\n this.closed_ = true;\n }\n\n size(): number {\n return this.tensors.length;\n }\n\n /**\n * Read the value at location index in the TensorArray.\n * @param index Number the index to read from.\n */\n read(index: number): Tensor {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n\n if (index < 0 || index >= this.tensors.length) {\n throw new Error(`Tried to read from index ${index}, but array size is: ${\n this.tensors.length}`);\n }\n\n const tensorWithState = this.tensors[index];\n if (tensorWithState.cleared) {\n throw new Error(\n `TensorArray ${this.name}: Could not read index ${\n index} twice because it was cleared after a previous read ` +\n `(perhaps try setting clear_after_read = false?).`);\n }\n\n if (this.clearAfterRead) {\n tensorWithState.cleared = true;\n }\n\n tensorWithState.read = true;\n return tensorWithState.tensor;\n }\n\n /**\n * Helper method to read multiple tensors from the specified indices.\n */\n readMany(indices: number[]): Tensor[] {\n return indices.map(index => this.read(index));\n }\n\n /**\n * Write value into the index of the TensorArray.\n * @param index number the index to write to.\n * @param tensor\n */\n write(index: number, tensor: Tensor) {\n if (this.closed_) {\n throw new Error(`TensorArray ${this.name} has already been closed.`);\n }\n\n if (index < 0 || !this.dynamicSize && index >= this.maxSize) {\n throw new Error(`Tried to write to index ${\n index}, but array is not resizeable and size is: ${this.maxSize}`);\n }\n\n const t = this.tensors[index] || {};\n\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray ${\n this.name}: Could not write to TensorArray index ${index},\n because the value dtype is ${\n tensor.dtype}, but TensorArray dtype is ${this.dtype}.`);\n }\n\n // Set the shape for the first time write to unknow shape tensor array\n if (this.size() === 0 && this.elementShape.length === 0) {\n this.elementShape = tensor.shape;\n }\n\n this.assertShapesMatch(\n this.elementShape, tensor.shape,\n `TensorArray ${this.name}: Could not write to TensorArray index ${\n index}.`);\n\n if (t && t.read) {\n throw new Error(\n `TensorArray ${this.name}: Could not write to TensorArray index ${\n index}, because it has already been read.`);\n }\n\n if (t && t.written) {\n throw new Error(\n `TensorArray ${this.name}: Could not write to TensorArray index ${\n index}, because it has already been written.`);\n }\n\n t.tensor = tensor;\n t.written = true;\n\n this.tensors[index] = t;\n }\n\n /**\n * Helper method to write multiple tensors to the specified indices.\n */\n writeMany(indices: number[], tensors: Tensor[]) {\n if (indices.length !== tensors.length) {\n throw new Error(\n `TensorArray ${this.name}: could not write multiple tensors,` +\n `because the index size: ${\n indices.length} is not the same as tensors size: ${\n tensors.length}.`);\n }\n\n indices.forEach((i, index) => this.write(i, tensors[index]));\n }\n\n /**\n * Return selected values in the TensorArray as a packed Tensor. All of\n * selected values must have been written and their shapes must all match.\n * @param [indices] number[] Optional. Taking values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size(). If not specified returns\n * all tensors in the original order.\n * @param [dtype]\n */\n gather(indices?: number[], dtype?: DataType): Tensor {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${\n this.dtype} but gather requested dtype ${dtype}`);\n }\n\n if (!indices) {\n indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n }\n\n if (indices.length === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n\n // Read all the PersistentTensors into a vector to keep track of\n // their memory.\n const tensors = this.readMany(indices);\n\n this.assertShapesMatch(\n this.elementShape, tensors[0].shape, 'TensorArray shape mismatch: ');\n\n return stack(tensors, 0);\n }\n\n /**\n * Return the values in the TensorArray as a concatenated Tensor.\n */\n concat(dtype?: DataType): Tensor {\n if (!!dtype && dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${\n this.dtype} but concat requested dtype ${dtype}`);\n }\n\n if (this.size() === 0) {\n return tensor([], [0].concat(this.elementShape));\n }\n\n const indices = [];\n for (let i = 0; i < this.size(); i++) {\n indices.push(i);\n }\n // Collect all the tensors from the tensors array.\n const tensors = this.readMany(indices);\n\n this.assertShapesMatch(\n this.elementShape, tensors[0].shape,\n `TensorArray shape mismatch: tensor array shape (${\n this.elementShape}) vs first tensor shape (${tensors[0].shape})`);\n\n return concat(tensors, 0);\n }\n\n /**\n * Scatter the values of a Tensor in specific indices of a TensorArray.\n * @param indices nummber[] values in [0, max_value). If the\n * TensorArray is not dynamic, max_value=size().\n * @param tensor Tensor input tensor.\n */\n scatter(indices: number[], tensor: Tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${\n this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n\n if (indices.length !== tensor.shape[0]) {\n throw new Error(`Expected len(indices) == tensor.shape[0], but saw: ${\n indices.length} vs. ${tensor.shape[0]}`);\n }\n\n const maxIndex = Math.max(...indices);\n\n if (!this.dynamicSize && maxIndex >= this.maxSize) {\n throw new Error(\n `Max index must be < array size (${maxIndex} vs. ${this.maxSize})`);\n }\n\n this.writeMany(indices, unstack(tensor, 0));\n }\n\n /**\n * Split the values of a Tensor into the TensorArray.\n * @param length number[] with the lengths to use when splitting value along\n * its first dimension.\n * @param tensor Tensor, the tensor to split.\n */\n split(length: number[], tensor: Tensor) {\n if (tensor.dtype !== this.dtype) {\n throw new Error(`TensorArray dtype is ${\n this.dtype} but tensor has dtype ${tensor.dtype}`);\n }\n let totalLength = 0;\n const cumulativeLengths = length.map(len => {\n totalLength += len;\n return totalLength;\n });\n\n if (totalLength !== tensor.shape[0]) {\n throw new Error(`Expected sum of lengths to be equal to\n tensor.shape[0], but sum of lengths is\n ${totalLength}, and tensor's shape is: ${tensor.shape}`);\n }\n\n if (!this.dynamicSize && length.length !== this.maxSize) {\n throw new Error(\n `TensorArray's size is not equal to the size of lengths (${\n this.maxSize} vs. ${length.length}), ` +\n 'and the TensorArray is not marked as dynamically resizeable');\n }\n\n const elementPerRow = totalLength === 0 ? 0 : tensor.size / totalLength;\n const tensors: Tensor[] = [];\n tidy(() => {\n tensor = tensor.reshape([1, totalLength, elementPerRow]);\n for (let i = 0; i < length.length; ++i) {\n const previousLength = (i === 0) ? 0 : cumulativeLengths[i - 1];\n const indices = [0, previousLength, 0];\n const sizes = [1, length[i], elementPerRow];\n tensors[i] = slice(tensor, indices, sizes).reshape(this.elementShape);\n }\n return tensors;\n });\n const indices = [];\n for (let i = 0; i < length.length; i++) {\n indices[i] = i;\n }\n this.writeMany(indices, tensors);\n }\n\n private assertShapesMatch(\n shapeA: number[], shapeB: number[], errorMessagePrefix = ''): void {\n util.assert(\n this.arraysEqual(shapeA, shapeB),\n errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n }\n\n private arraysEqual(n1: number[], n2: number[]) {\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i = 0; i < n1.length; i++) {\n if (n1[i] !== -1 && n2[i] !== -1 && n1[i] !== n2[i]) {\n return false;\n }\n }\n return true;\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\nimport {scalar} from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {TensorArray} from '../../executor/tensor_array';\nimport {Node} from '../types';\n\nimport {getParamValue, getTensor} from './utils';\n\nexport async function executeOp(\n node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): Promise<tfc.Tensor[]> {\n switch (node.op) {\n case 'loopCond':\n return [\n (getParamValue('pred', node, tensorMap, context) as tfc.Tensor).clone()\n ];\n case 'switch': {\n const pred =\n getParamValue('pred', node, tensorMap, context) as tfc.Tensor;\n const data =\n getParamValue('data', node, tensorMap, context) as tfc.Tensor;\n // Outputs nodes :0 => false, :1 => true\n return (await pred.data())[0] ? [undefined, data.clone()] :\n [data.clone(), undefined];\n }\n case 'merge':\n const inputName = node.inputNames.find(\n name => getTensor(name, tensorMap, context) !== undefined);\n return inputName ? [getTensor(inputName, tensorMap, context).clone()] :\n undefined;\n\n case 'enter':\n const frameId =\n getParamValue('frameName', node, tensorMap, context) as string;\n const data =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n context.enterFrame(frameId);\n return [data.clone()];\n\n case 'exit':\n const tensor =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n context.exitFrame();\n return [tensor.clone()];\n\n case 'nextIteration':\n const input =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n context.nextIteration();\n return [input.clone()];\n\n case 'tensorArray':\n const size = getParamValue('size', node, tensorMap, context) as number;\n const dtype =\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType;\n const elementShape =\n getParamValue('elementShape', node, tensorMap, context) as number[];\n const dynamicSize =\n getParamValue('dynamicSize', node, tensorMap, context) as boolean;\n const clearAfterRead =\n getParamValue('clearAfterRead', node, tensorMap, context) as boolean;\n const identicalElementShapes =\n getParamValue('identicalElementShapes', node, tensorMap, context) as\n boolean;\n const name = getParamValue('name', node, tensorMap, context) as string;\n const tensorArray = new TensorArray(\n name, dtype, size, elementShape, identicalElementShapes, dynamicSize,\n clearAfterRead);\n context.addTensorArray(tensorArray);\n return [scalar(tensorArray.id), scalar(1.0)];\n\n case 'tensorArrayWrite':\n const id =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const index = getParamValue('index', node, tensorMap, context) as number;\n const writeTensor =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n const writeTensorArray = context.getTensorArray(id);\n writeTensorArray.write(index, writeTensor);\n return [scalar(1.0)];\n\n case 'tensorArrayRead':\n const readId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const readIndex =\n getParamValue('index', node, tensorMap, context) as number;\n const readTensorArray = context.getTensorArray(readId);\n return [readTensorArray.read(readIndex)];\n\n case 'tensorArrayGather':\n const gatherId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const gatherIndices =\n getParamValue('indices', node, tensorMap, context) as number[];\n const gatherDtype =\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType;\n const gatherTensorArray = context.getTensorArray(gatherId);\n return [gatherTensorArray.gather(gatherIndices, gatherDtype)];\n\n case 'tensorArrayScatter':\n const scatterId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const scatterIndices =\n getParamValue('indices', node, tensorMap, context) as number[];\n const scatterTensor =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n const scatterTensorArray = context.getTensorArray(scatterId);\n scatterTensorArray.scatter(scatterIndices, scatterTensor);\n return [scalar(1.0)];\n\n case 'tensorArrayConcat':\n const concatId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const concatTensorArray = context.getTensorArray(concatId);\n const concatDtype =\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType;\n return [concatTensorArray.concat(concatDtype)];\n\n case 'tensorArraySplit':\n const splitId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const splitTensor =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n const lengths =\n getParamValue('lengths', node, tensorMap, context) as number[];\n const splitTensorArray = context.getTensorArray(splitId);\n splitTensorArray.split(lengths, splitTensor);\n return [scalar(1.0)];\n\n case 'tensorArraySize':\n const sizeId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const sizeTensorArray = context.getTensorArray(sizeId);\n return [scalar(sizeTensorArray.size(), 'int32')];\n\n case 'tensorArrayClose':\n const closeId =\n getParamValue('tensorArrayId', node, tensorMap, context) as number;\n const closeTensorArray = context.getTensorArray(closeId);\n closeTensorArray.clearAndClose();\n return [];\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n}\n\nexport const CATEGORY = 'control';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor =\n (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): tfc.Tensor[] => {\n switch (node.op) {\n case 'conv1d': {\n const stride =\n getParamValue('stride', node, tensorMap, context) as number;\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat =\n (getParamValue('dataFormat', node, tensorMap, context) as string)\n .toUpperCase();\n const dilation =\n getParamValue('dilation', node, tensorMap, context) as number;\n return [tfc.conv1d(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D,\n getParamValue('filter', node, tensorMap, context) as tfc.Tensor3D,\n stride, pad as 'valid' | 'same', dataFormat as 'NWC' | 'NCW',\n dilation)];\n }\n case 'conv2d': {\n const stride =\n getParamValue('strides', node, tensorMap, context) as number[];\n const pad = getParamValue('pad', node, tensorMap, context);\n const dataFormat =\n (getParamValue('dataFormat', node, tensorMap, context) as string)\n .toUpperCase();\n const dilations =\n getParamValue('dilations', node, tensorMap, context) as number[];\n return [tfc.conv2d(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n getParamValue('filter', node, tensorMap, context) as tfc.Tensor4D,\n [stride[1], stride[2]], pad as 'valid' | 'same',\n dataFormat as 'NHWC' | 'NCHW', [dilations[0], dilations[1]])];\n }\n case 'conv2dTranspose': {\n const shape = getParamValue(\n 'outputShape', node, tensorMap,\n context) as [number, number, number] |\n [number, number, number, number];\n const stride =\n getParamValue('strides', node, tensorMap, context) as number[];\n const pad = getParamValue('pad', node, tensorMap, context);\n return [tfc.conv2dTranspose(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n getParamValue('filter', node, tensorMap, context) as tfc.Tensor4D,\n shape, [stride[1], stride[2]], pad as 'valid' | 'same')];\n }\n case 'depthwiseConv2d': {\n const stride =\n getParamValue('strides', node, tensorMap, context) as number[];\n const pad = getParamValue('pad', node, tensorMap, context);\n const dilations =\n getParamValue('dilations', node, tensorMap, context) as number[];\n const dataFormat =\n (getParamValue('dataFormat', node, tensorMap, context) as string)\n .toUpperCase();\n\n return [tfc.depthwiseConv2d(\n getParamValue('input', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n getParamValue('filter', node, tensorMap, context) as tfc.Tensor4D,\n [stride[1], stride[2]], pad as 'valid' | 'same',\n dataFormat as 'NHWC' | 'NCHW', [dilations[0], dilations[1]])];\n }\n\n case 'avgPool': {\n const stride =\n getParamValue('strides', node, tensorMap, context) as number[];\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize =\n getParamValue('kernelSize', node, tensorMap, context) as number[];\n\n return [tfc.avgPool(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n [kernelSize[1], kernelSize[2]], [stride[1], stride[2]],\n pad as 'valid' | 'same')];\n }\n\n case 'maxPool': {\n const stride =\n getParamValue('strides', node, tensorMap, context) as number[];\n const pad = getParamValue('pad', node, tensorMap, context);\n const kernelSize =\n getParamValue('kernelSize', node, tensorMap, context) as number[];\n\n return [tfc.maxPool(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n [kernelSize[1], kernelSize[2]], [stride[1], stride[2]],\n pad as 'valid' | 'same')];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n };\n\nexport const CATEGORY = 'convolution';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'fill': {\n const shape =\n getParamValue('shape', node, tensorMap, context) as number[];\n const dtype =\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType;\n const value = getParamValue('value', node, tensorMap, context) as number;\n return [tfc.fill(shape, value, dtype)];\n }\n case 'linspace': {\n const start = getParamValue('start', node, tensorMap, context) as number;\n const stop = getParamValue('stop', node, tensorMap, context) as number;\n const num = getParamValue('num', node, tensorMap, context) as number;\n return [tfc.linspace(start, stop, num)];\n }\n case 'oneHot': {\n const indices =\n getParamValue('indices', node, tensorMap, context) as tfc.Tensor1D;\n const depth = getParamValue('depth', node, tensorMap, context) as number;\n const onValue =\n getParamValue('onValue', node, tensorMap, context) as number;\n const offValue =\n getParamValue('offValue', node, tensorMap, context) as number;\n return [tfc.oneHot(indices, depth, onValue, offValue)];\n }\n case 'ones': {\n return [tfc.ones(\n getParamValue('shape', node, tensorMap, context) as number[],\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType)];\n }\n case 'onesLike': {\n return [tfc.onesLike(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'randomUniform': {\n return [tfc.randomUniform(\n // tslint:disable-next-line:no-any\n getParamValue('shape', node, tensorMap, context) as any,\n getParamValue('minval', node, tensorMap, context) as number,\n getParamValue('maxval', node, tensorMap, context) as number,\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType)];\n }\n case 'range': {\n const start = getParamValue('start', node, tensorMap, context) as number;\n const stop = getParamValue('stop', node, tensorMap, context) as number;\n const step = getParamValue('step', node, tensorMap, context) as number;\n return [tfc.range(\n start, stop, step,\n getParamValue('dtype', node, tensorMap, context) as 'float32' |\n 'int32')];\n }\n case 'truncatedNormal': {\n const shape =\n getParamValue('shape', node, tensorMap, context) as number[];\n const mean = getParamValue('mean', node, tensorMap, context) as number;\n const stdDev =\n getParamValue('stdDev', node, tensorMap, context) as number;\n const seed = getParamValue('seed', node, tensorMap, context) as number;\n return [tfc.truncatedNormal(\n shape, mean, stdDev,\n getParamValue('dtype', node, tensorMap, context) as 'float32' |\n 'int32',\n seed)];\n }\n case 'zeros': {\n return [tfc.zeros(\n getParamValue('shape', node, tensorMap, context) as number[],\n getParamValue('dtype', node, tensorMap, context) as tfc.DataType)];\n }\n case 'zerosLike': {\n return [tfc.zerosLike(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'creation';\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\nimport {getParamValue} from './utils';\n\nexport async function executeOp(\n node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): Promise<tfc.Tensor[]> {\n switch (node.op) {\n case 'nonMaxSuppression': {\n const boxes =\n getParamValue('boxes', node, tensorMap, context) as tfc.Tensor;\n const scores =\n getParamValue('scores', node, tensorMap, context) as tfc.Tensor;\n const maxOutputSize =\n getParamValue('maxOutputSize', node, tensorMap, context) as number;\n const iouThreshold =\n getParamValue('iouThreshold', node, tensorMap, context) as number;\n const scoreThreshold =\n getParamValue('scoreThreshold', node, tensorMap, context) as number;\n return [await tfc.image.nonMaxSuppressionAsync(\n boxes as tfc.Tensor2D, scores as tfc.Tensor1D, maxOutputSize,\n iouThreshold, scoreThreshold)];\n }\n case 'whereAsync': {\n return [await tfc.whereAsync(\n getParamValue('condition', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'setdiff1dAsync': {\n return await tfc.setdiff1dAsync(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('y', node, tensorMap, context) as tfc.Tensor);\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n}\n\nexport const CATEGORY = 'dynamic';\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor =\n (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): tfc.Tensor[] => {\n switch (node.op) {\n case 'topK': {\n const x = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n const k = getParamValue('k', node, tensorMap, context) as number;\n const sorted =\n getParamValue('sorted', node, tensorMap, context) as boolean;\n const result = tfc.topk(x, k, sorted);\n return [result.values, result.indices];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n };\n\nexport const CATEGORY = 'evaluation';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue, getTensor} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'const': {\n return tensorMap[node.name];\n }\n case 'placeholder':\n const def =\n getParamValue('default', node, tensorMap, context) as tfc.Tensor;\n return [getTensor(node.name, tensorMap, context) || def];\n case 'identity':\n case 'stopGradient':\n case 'fakeQuantWithMinMaxVars': // This op is currently ignored.\n return [\n (getParamValue('x', node, tensorMap, context) as tfc.Tensor).clone()\n ];\n case 'snapshot':\n const snapshot =\n (getParamValue('x', node, tensorMap, context) as tfc.Tensor);\n return [snapshot.clone()];\n case 'shape':\n return [tfc.tensor1d(\n (getParamValue('x', node, tensorMap, context) as tfc.Tensor).shape,\n 'int32')];\n case 'shapeN':\n return (getParamValue('x', node, tensorMap, context) as tfc.Tensor[])\n .map((t: tfc.Tensor) => tfc.tensor1d(t.shape));\n case 'size':\n return [tfc.scalar(\n (getParamValue('x', node, tensorMap, context) as tfc.Tensor).size,\n 'int32')];\n case 'rank':\n return [tfc.scalar(\n (getParamValue('x', node, tensorMap, context) as tfc.Tensor).rank,\n 'int32')];\n case 'noop':\n return [];\n case 'print':\n const input = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n const data =\n getParamValue('data', node, tensorMap, context) as tfc.Tensor[];\n const message =\n getParamValue('message', node, tensorMap, context) as string;\n const summarize =\n getParamValue('summarize', node, tensorMap, context) as number;\n console.warn(\n 'The graph has a tf.print() operation,' +\n 'usually used for debugging, which slows down performance.');\n console.log(message);\n for (let i = 0; i < data.length; i++) {\n console.log(\n Array.prototype.slice.call(data[0].dataSync()).slice(0, summarize));\n }\n return [input];\n\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'graph';\n","/**\n * @license\n * Copyright 2018 Google Inc. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'resizeBilinear': {\n const images =\n getParamValue('images', node, tensorMap, context) as tfc.Tensor;\n const size = getParamValue('size', node, tensorMap, context) as number[];\n const alignCorners =\n getParamValue('alignCorners', node, tensorMap, context) as boolean;\n return [tfc.image.resizeBilinear(\n images as tfc.Tensor3D | tfc.Tensor4D, [size[0], size[1]],\n alignCorners)];\n }\n case 'resizeNearestNeighbor': {\n const images =\n getParamValue('images', node, tensorMap, context) as tfc.Tensor;\n const size = getParamValue('size', node, tensorMap, context) as number[];\n const alignCorners =\n getParamValue('alignCorners', node, tensorMap, context) as boolean;\n return [tfc.image.resizeNearestNeighbor(\n images as tfc.Tensor3D | tfc.Tensor4D, [size[0], size[1]],\n alignCorners)];\n }\n case 'cropAndResize': {\n const image =\n getParamValue('image', node, tensorMap, context) as tfc.Tensor;\n const boxes =\n getParamValue('boxes', node, tensorMap, context) as tfc.Tensor;\n const boxInd =\n getParamValue('boxInd', node, tensorMap, context) as tfc.Tensor;\n const cropSize =\n getParamValue('cropSize', node, tensorMap, context) as number[];\n const method =\n getParamValue('method', node, tensorMap, context) as string;\n const extrapolationValue =\n getParamValue('extrapolationValue', node, tensorMap, context) as\n number;\n return [tfc.image.cropAndResize(\n image as tfc.Tensor4D, boxes as tfc.Tensor2D, boxInd as tfc.Tensor1D,\n cropSize as [number, number], method as 'bilinear' | 'nearest',\n extrapolationValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'image';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'equal': {\n return [tfc.equal(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'notEqual': {\n return [tfc.notEqual(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'greater': {\n return [tfc.greater(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'greaterEqual': {\n return [tfc.greaterEqual(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'less': {\n return [tfc.less(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'lessEqual': {\n return [tfc.lessEqual(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'logicalAnd': {\n return [tfc.logicalAnd(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'logicalNot': {\n return [tfc.logicalNot(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'logicalOr': {\n return [tfc.logicalOr(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'where': {\n return [tfc.where(\n getParamValue('condition', node, tensorMap, context) as tfc.Tensor,\n getParamValue('a', node, tensorMap, context) as tfc.Tensor,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'logical';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'matMul':\n return [tfc.matMul(\n getParamValue('a', node, tensorMap, context) as tfc.Tensor2D,\n getParamValue('b', node, tensorMap, context) as tfc.Tensor2D,\n getParamValue('transposeA', node, tensorMap, context) as boolean,\n getParamValue('transposeB', node, tensorMap, context) as boolean)];\n case 'transpose':\n return [tfc.transpose(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('perm', node, tensorMap, context) as number[])];\n\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'matrices';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'batchNormalization': {\n return [tfc.batchNormalization(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('mean', node, tensorMap, context) as tfc.Tensor,\n getParamValue('variance', node, tensorMap, context) as tfc.Tensor,\n getParamValue('epsilon', node, tensorMap, context) as number,\n getParamValue('scale', node, tensorMap, context) as tfc.Tensor,\n getParamValue('offset', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'localResponseNormalization': {\n return [tfc.localResponseNormalization(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor3D |\n tfc.Tensor4D,\n getParamValue('radius', node, tensorMap, context) as number,\n getParamValue('bias', node, tensorMap, context) as number,\n getParamValue('alpha', node, tensorMap, context) as number,\n getParamValue('beta', node, tensorMap, context) as number)];\n }\n case 'softmax': {\n return [tfc.softmax(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'logSoftmax': {\n return [tfc.logSoftmax(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor)];\n }\n case 'sparseToDense': {\n return [tfc.sparseToDense(\n getParamValue('sparseIndices', node, tensorMap, context) as\n tfc.Tensor,\n getParamValue('outputShape', node, tensorMap, context) as tfc.Tensor,\n getParamValue('sparseValues', node, tensorMap, context) as number[],\n getParamValue('defaultValue', node, tensorMap, context) as\n tfc.Scalar)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'normalization';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'max': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.max(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'mean': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.mean(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'min': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.min(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'sum': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.sum(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'all': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.all(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'any': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.any(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n case 'argMax': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n return [tfc.argMax(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis)];\n }\n case 'argMin': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n return [tfc.argMin(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis)];\n }\n case 'prod': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const keepDims =\n getParamValue('keepDims', node, tensorMap, context) as boolean;\n return [tfc.prod(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis,\n keepDims)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'reduction';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'concat': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n const inputs =\n getParamValue('tensors', node, tensorMap, context) as tfc.Tensor[];\n return [tfc.concat(inputs, axis)];\n }\n case 'gather': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n const input = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n const indices =\n getParamValue('indices', node, tensorMap, context) as tfc.Tensor1D;\n return [tfc.gather(input, indices, axis)];\n }\n case 'reverse': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n const input = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n return [tfc.reverse(input, axis)];\n }\n case 'slice': {\n // tslint:disable-next-line:no-any\n const begin = getParamValue('begin', node, tensorMap, context) as any;\n // tslint:disable-next-line:no-any\n const size = getParamValue('size', node, tensorMap, context) as any;\n return [tfc.slice(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, begin,\n size)];\n }\n case 'stridedSlice': {\n const begin =\n getParamValue('begin', node, tensorMap, context) as number[];\n const end = getParamValue('end', node, tensorMap, context) as number[];\n const strides =\n getParamValue('strides', node, tensorMap, context) as number[];\n const beginMask =\n getParamValue('beginMask', node, tensorMap, context) as number;\n const endMask =\n getParamValue('endMask', node, tensorMap, context) as number;\n const ellipsisMask =\n getParamValue('ellipsisMask', node, tensorMap, context) as number;\n const newAxisMask =\n getParamValue('newAxisMask', node, tensorMap, context) as number;\n const shrinkAxisMask =\n getParamValue('shrinkAxisMask', node, tensorMap, context) as number;\n const tensor = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n if (begin.length === 1 && tensor.shape.length > 1) {\n for (let i = 1; i < tensor.shape.length; i++) {\n begin.push(0);\n end.push(tensor.shape[i]);\n strides.push(strides[0]);\n }\n }\n return [tfc.stridedSlice(\n tensor, begin, end, strides, beginMask, endMask, ellipsisMask,\n newAxisMask, shrinkAxisMask)];\n }\n case 'stack': {\n return tfc.tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n const tensors =\n getParamValue('tensors', node, tensorMap, context) as tfc.Tensor[];\n // Reshape the tensors to the first tensor's shape if they don't match.\n const shape = tensors[0].shape;\n const squeezedShape = tensors[0].squeeze().shape;\n const mapped = tensors.map(tensor => {\n const sameShape = tfc.util.arraysEqual(tensor.shape, shape);\n if (!sameShape &&\n !tfc.util.arraysEqual(tensor.squeeze().shape, squeezedShape)) {\n throw new Error('the input tensors shape does not match');\n }\n return sameShape ? tensor : tensor.reshape(shape);\n });\n return [tfc.stack(mapped, axis)];\n });\n }\n case 'unstack': {\n return tfc.tidy(() => {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n const tensor =\n getParamValue('tensor', node, tensorMap, context) as tfc.Tensor;\n return tfc.unstack(tensor, axis);\n });\n }\n case 'tile': {\n const reps = getParamValue('reps', node, tensorMap, context) as number[];\n return [tfc.tile(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, reps)];\n }\n case 'split': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n const numOrSizeSplits =\n getParamValue('numOrSizeSplits', node, tensorMap, context) as number |\n number[];\n return tfc.split(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n numOrSizeSplits, axis);\n }\n case 'scatterNd': {\n const indices =\n getParamValue('indices', node, tensorMap, context) as tfc.Tensor;\n const values =\n getParamValue('values', node, tensorMap, context) as tfc.Tensor;\n const shape =\n getParamValue('shape', node, tensorMap, context) as number[];\n return [tfc.scatterND(indices, values, shape)];\n }\n case 'gatherNd': {\n const x = getParamValue('x', node, tensorMap, context) as tfc.Tensor;\n const indices =\n getParamValue('indices', node, tensorMap, context) as tfc.Tensor;\n return [tfc.gatherND(x, indices)];\n }\n case 'sparseToDense': {\n const indices =\n getParamValue('sparseIndices', node, tensorMap, context) as\n tfc.Tensor;\n const shape =\n getParamValue('outputShape', node, tensorMap, context) as number[];\n const sparseValues =\n getParamValue('sparseValues', node, tensorMap, context) as tfc.Tensor;\n const defaultValue =\n getParamValue('defaultValue', node, tensorMap, context) as tfc.Scalar;\n return [tfc.sparseToDense(indices, sparseValues, shape, defaultValue)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'slice_join';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../../data/types';\nimport {ExecutionContext} from '../../executor/execution_context';\nimport {Node} from '../types';\n\nimport {OpExecutor} from './types';\nimport {getParamValue, split} from './utils';\n\nexport let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext):\n tfc.Tensor[] => {\n switch (node.op) {\n case 'cast': {\n return [tfc.cast(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('dtype', node, tensorMap, context) as 'int32' |\n 'float32' | 'bool')];\n }\n case 'expandDims': {\n const axis = getParamValue('axis', node, tensorMap, context) as number;\n return [tfc.expandDims(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis)];\n }\n case 'squeeze': {\n const axis = getParamValue('axis', node, tensorMap, context) as number[];\n return [tfc.squeeze(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor, axis)];\n }\n\n case 'reshape': {\n return [tfc.reshape(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n getParamValue('shape', node, tensorMap, context) as number[])];\n }\n case 'pad': {\n return [tfc.pad(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n split(\n getParamValue('padding', node, tensorMap, context) as number[],\n 2) as Array<[number, number]>,\n getParamValue('constantValue', node, tensorMap, context) as number)];\n }\n case 'spaceToBatchND': {\n const blockShape =\n getParamValue('blockShape', node, tensorMap, context) as number[];\n const paddings = split(\n getParamValue('paddings', node, tensorMap, context) as number[], 2);\n return [tfc.spaceToBatchND(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n blockShape, paddings)];\n }\n case 'batchToSpaceND': {\n const blockShape =\n getParamValue('blockShape', node, tensorMap, context) as number[];\n const crops = split(\n getParamValue('crops', node, tensorMap, context) as number[], 2);\n return [tfc.batchToSpaceND(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor,\n blockShape, crops)];\n }\n case 'depthToSpace': {\n const blockSize =\n getParamValue('blockSize', node, tensorMap, context) as number;\n const dataFormat =\n getParamValue('dataFormat', node, tensorMap, context) as 'NHWC' |\n 'NCHW';\n return [tfc.depthToSpace(\n getParamValue('x', node, tensorMap, context) as tfc.Tensor4D,\n blockSize, dataFormat)];\n }\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n};\n\nexport const CATEGORY = 'transformation';\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap} from '../data/types';\nimport {ExecutionContext} from '../executor/execution_context';\n\nimport * as arithmetic from './executors/arithmetic_executor';\nimport * as basicMath from './executors/basic_math_executor';\nimport * as control from './executors/control_executor';\nimport * as convolution from './executors/convolution_executor';\nimport * as creation from './executors/creation_executor';\nimport * as dynamic from './executors/dynamic_executor';\nimport * as evaluation from './executors/evaluation_executor';\nimport * as graph from './executors/graph_executor';\nimport * as image from './executors/image_executor';\nimport * as logical from './executors/logical_executor';\nimport * as matrices from './executors/matrices_executor';\nimport * as normalization from './executors/normalization_executor';\nimport * as reduction from './executors/reduction_executor';\nimport * as sliceJoin from './executors/slice_join_executor';\nimport * as transformation from './executors/transformation_executor';\nimport {Node} from './types';\n\n/**\n * Executes the op defined by the node object.\n * @param node\n * @param tensorMap contains tensors for executed nodes and weights\n */\nexport function executeOp(\n node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext): tfc.Tensor[]|Promise<tfc.Tensor[]> {\n switch (node.category) {\n case 'arithmetic':\n return arithmetic.executeOp(node, tensorMap, context);\n case 'basic_math':\n return basicMath.executeOp(node, tensorMap, context);\n case 'control':\n return control.executeOp(node, tensorMap, context);\n case 'convolution':\n return convolution.executeOp(node, tensorMap, context);\n case 'creation':\n return creation.executeOp(node, tensorMap, context);\n case 'dynamic':\n return dynamic.executeOp(node, tensorMap, context);\n case 'evaluation':\n return evaluation.executeOp(node, tensorMap, context);\n case 'image':\n return image.executeOp(node, tensorMap, context);\n case 'graph':\n return graph.executeOp(node, tensorMap, context);\n case 'logical':\n return logical.executeOp(node, tensorMap, context);\n case 'matrices':\n return matrices.executeOp(node, tensorMap, context);\n case 'normalization':\n return normalization.executeOp(node, tensorMap, context);\n case 'reduction':\n return reduction.executeOp(node, tensorMap, context);\n case 'slice_join':\n return sliceJoin.executeOp(node, tensorMap, context);\n case 'transformation':\n return transformation.executeOp(node, tensorMap, context);\n default:\n throw TypeError(`Node type ${node.op} is not implemented`);\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\nimport {Tensor} from '@tensorflow/tfjs-core';\n\nimport {NamedTensorsMap, TensorArrayMap} from '../data/types';\n\nimport {TensorArray} from './tensor_array';\n\nexport interface ExecutionContextInfo {\n id: number; // the unique id of the context info\n frameName: string; // The frame name of the loop, this comes from\n // the TensorFlow NodeDef.\n iterationId: number; // The iteration id of the loop\n}\n\n/**\n * ExecutionContext captures the runtime environment of the node. It keeps\n * track of the current frame and iteration for the control flow ops.\n *\n * For example, typical Dynamic RNN model may contain loops, for which\n * TensorFlow will generate graphs with Enter/Exit nodes to control the\n * current execution frame, and NextIteration Nodes for iteration id increment.\n * For model with branch logic, TensorFLow will generate Switch/Merge ops.\n */\nexport class ExecutionContext {\n private rootContext = {id: 0, frameName: '', iterationId: 0};\n private contexts: ExecutionContextInfo[] = [this.rootContext];\n private lastId = 0;\n private _currentContextIds: string[];\n\n constructor(\n public readonly weightMap: NamedTensorsMap,\n public readonly tensorArrayMap: TensorArrayMap) {\n this.generateCurrentContextIds();\n }\n\n private newFrame(id: number, frameName: string) {\n return {id, frameName, iterationId: 0};\n }\n\n /**\n * Set the current context\n * @param contexts: ExecutionContextInfo[] the current path of execution\n * frames\n */\n set currentContext(contexts: ExecutionContextInfo[]) {\n if (this.contexts !== contexts) {\n this.contexts = contexts;\n this.generateCurrentContextIds();\n }\n }\n\n get currentContext(): ExecutionContextInfo[] {\n return this.contexts;\n }\n\n /**\n * Returns the current context in string format.\n */\n get currentContextId(): string {\n return this._currentContextIds[0];\n }\n\n /**\n * Returns the current context and all parent contexts in string format.\n * This allow access to the nodes in the current and parent frames.\n */\n get currentContextIds(): string[] {\n return this._currentContextIds;\n }\n\n private generateCurrentContextIds() {\n const names = [];\n for (let i = 0; i < this.contexts.length - 1; i++) {\n const contexts = this.contexts.slice(0, this.contexts.length - i);\n names.push(this.contextIdforContexts(contexts));\n }\n names.push('');\n this._currentContextIds = names;\n }\n\n private contextIdforContexts(contexts: ExecutionContextInfo[]) {\n return contexts ?\n contexts\n .map(\n context => (context.id === 0 && context.iterationId === 0) ?\n '' :\n `${context.frameName}-${context.iterationId}`)\n .join('/') :\n '';\n }\n\n /**\n * Enter a new frame, a new context is pushed on the current context list.\n * @param frameId new frame id\n */\n enterFrame(frameId: string) {\n if (this.contexts) {\n this.lastId++;\n this.contexts = this.contexts.slice();\n this.contexts.push(this.newFrame(this.lastId, frameId));\n this._currentContextIds.unshift(this.contextIdforContexts(this.contexts));\n }\n }\n\n /**\n * Exit the current frame, the last context is removed from the current\n * context list.\n */\n exitFrame() {\n if (this.contexts && this.contexts.length > 1) {\n this.contexts = this.contexts.slice();\n this.contexts.splice(-1);\n this.currentContextIds.shift();\n } else {\n throw new Error('Cannot exit frame, the context is empty');\n }\n }\n\n /**\n * Enter the next iteration of a loop, the iteration id of last context is\n * increased.\n */\n nextIteration() {\n if (this.contexts && this.contexts.length > 0) {\n this.contexts = this.contexts.slice();\n this.lastId++;\n const context =\n Object.assign({}, this.contexts[this.contexts.length - 1]) as\n ExecutionContextInfo;\n context.iterationId += 1;\n context.id = this.lastId;\n this.contexts.splice(-1, 1, context);\n this._currentContextIds.splice(\n 0, 1, this.contextIdforContexts(this.contexts));\n } else {\n throw new Error('Cannot increase frame iteration, the context is empty');\n }\n }\n\n getWeight(name: string): Tensor[] {\n return this.weightMap[name];\n }\n\n addTensorArray(tensorArray: TensorArray) {\n this.tensorArrayMap[tensorArray.id] = tensorArray;\n }\n\n getTensorArray(id: number): TensorArray {\n return this.tensorArrayMap[id];\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport {DataType, Tensor, tidy, util} from '@tensorflow/tfjs-core';\n\n// tslint:disable-next-line:max-line-length\nimport {NamedTensorMap, NamedTensorsMap, TensorArrayMap, TensorInfo} from '../data/types';\n// tslint:disable-next-line:max-line-length\nimport {getNodeNameAndIndex, getParamValue, getTensor, getTensorsForCurrentContenxt} from '../operations/executors/utils';\nimport {executeOp} from '../operations/operation_executor';\nimport {Graph, Node} from '../operations/types';\n\nimport {ExecutionContext, ExecutionContextInfo} from './execution_context';\n\ninterface NodeWithContexts {\n contexts: ExecutionContextInfo[];\n node: Node;\n}\n\nexport class GraphExecutor {\n private compiledMap: Map<string, Node[]> = new Map();\n private _weightMap: NamedTensorsMap = {};\n private weightIds: number[];\n private placeholders: Node[];\n private _outputs: Node[];\n private SEPERATOR = ',';\n get weightMap(): NamedTensorsMap {\n return this._weightMap;\n }\n set weightMap(weightMap: NamedTensorsMap) {\n const weightIds = Object.keys(weightMap).map(\n key => weightMap[key].map(tensor => tensor.id));\n this.weightIds = [].concat.apply([], weightIds);\n this._weightMap = weightMap;\n }\n\n get inputs(): TensorInfo[] {\n return this.placeholders.map(node => {\n return {\n name: node.name,\n shape: node.params['shape'] ? node.params['shape'].value as number[] :\n undefined,\n dtype: node.params['dtype'] ? node.params['dtype'].value as DataType :\n undefined\n };\n });\n }\n\n get outputs(): TensorInfo[] {\n return this._outputs.map(node => {\n return {\n name: node.name,\n shape: node.params['shape'] ? node.params['shape'].value as number[] :\n undefined,\n dtype: node.params['dtype'] ? node.params['dtype'].value as DataType :\n undefined\n };\n });\n }\n\n get inputNodes(): string[] {\n return this.placeholders.map(node => node.name);\n }\n\n get outputNodes(): string[] {\n return this.outputs.map(node => node.name);\n }\n\n constructor(private graph: Graph) {\n this.placeholders = graph.placeholders;\n this._outputs = graph.outputs;\n this.compile();\n }\n\n get isControlFlowModel(): boolean {\n return this.graph.withControlFlow;\n }\n\n get isDynamicShapeModel(): boolean {\n return this.graph.withDynamicShape;\n }\n\n /**\n * Compiles the inference graph to generate the topology order of op nodes,\n * cache the result for inference execution.\n */\n private compile(startNodes?: Node[]) {\n // Do not compile for graph with control flow, since the execution order\n // requires runtime evaluation of the output tensors.\n if (this.graph.withControlFlow || this.graph.withDynamicShape) {\n return;\n }\n const compiledOrder = [];\n const inputs = startNodes || this.graph.placeholders;\n const sortedNodeNames = inputs.map(node => node.name).sort();\n const nameKey = sortedNodeNames.join(this.SEPERATOR);\n\n // do nothing is the compiled graph cache contains the input.\n if (this.compiledMap.get(nameKey)) {\n return;\n }\n\n const stack = [...inputs, ...this.graph.weights];\n const visited: {[key: string]: boolean} = {};\n while (stack.length > 0) {\n const node = stack.pop();\n visited[node.name] = true;\n compiledOrder.push(node);\n node.children.forEach((childNode) => {\n if (!visited[childNode.name] && childNode.inputNames.every(name => {\n const [nodeName, ] = getNodeNameAndIndex(name);\n return visited[nodeName];\n })) {\n stack.push(childNode);\n }\n });\n }\n this.compiledMap.set(nameKey, compiledOrder);\n }\n\n /**\n * Executes the inference for given input tensors.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n execute(\n inputs: NamedTensorsMap, strictInputCheck = true,\n outputs?: string|string[]): NamedTensorMap {\n const names = Object.keys(inputs).sort();\n this.checkInput(inputs, strictInputCheck);\n this.checkInputShapeAndType(inputs, strictInputCheck);\n\n this.compile(names.map(name => this.graph.nodes[name]));\n const outputNames = this.calculateOutputs(outputs);\n this.checkOutput(\n this.compiledMap.get(names.join(this.SEPERATOR)), outputNames);\n\n const tensorArrayMap: TensorArrayMap = {};\n const result = tidy(() => {\n const context = new ExecutionContext(this._weightMap, tensorArrayMap);\n const tensorMap = {...this.weightMap, ...inputs};\n const tensorsToKeep = this.getFrozenTensorIds(tensorMap);\n const intermediateTensorConsumerCount: {[key: number]: number} = {};\n\n const compiledNodes = this.compiledMap.get(names.join(this.SEPERATOR));\n for (let i = 0; i < compiledNodes.length; i++) {\n const node = compiledNodes[i];\n if (!tensorMap[node.name]) {\n tensorMap[node.name] =\n executeOp(node, tensorMap, context) as Tensor[];\n this.checkTensorForDisposal(\n node.name, node, tensorMap, context, tensorsToKeep,\n intermediateTensorConsumerCount);\n }\n // stop the execution if all outputs are found.\n if (outputNames.every(name => !!tensorMap[name])) {\n break;\n }\n }\n return this.findOutputs(tensorMap, context, outputNames);\n });\n return result;\n }\n\n private getFrozenTensorIds(tensorMap: NamedTensorsMap): Set<number> {\n const ids = [].concat.apply(\n [],\n Object.keys(tensorMap)\n .map(key => tensorMap[key])\n .map(tensors => tensors.map(tensor => tensor.id)));\n return new Set(ids);\n }\n private checkTensorForDisposal(\n nodeName: string, node: Node, tensorMap: NamedTensorsMap,\n context: ExecutionContext, tensorsToKeep: Set<number>,\n intermediateTensorConsumerCount: {[key: string]: number}) {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (node.category === 'control') {\n return;\n }\n\n tensorMap[nodeName].forEach(tensor => {\n if (tensor != null) {\n intermediateTensorConsumerCount[tensor.id] =\n (intermediateTensorConsumerCount[tensor.id] || 0) +\n node.children.length;\n }\n });\n node.inputs.forEach(input => {\n // Skip any control flow nodes, since its dependency is tricky to track\n // correctly.\n if (input.category !== 'control') {\n const tensors =\n getTensorsForCurrentContenxt(input.name, tensorMap, context);\n if (tensors != null) {\n tensors.forEach(tensor => {\n if (tensor && !tensorsToKeep.has(tensor.id)) {\n const count = intermediateTensorConsumerCount[tensor.id];\n if (count === 1) {\n tensor.dispose();\n delete intermediateTensorConsumerCount[tensor.id];\n } else if (count != null) {\n // only intermediate nodes has count set, inputs and weights are\n // not.\n intermediateTensorConsumerCount[tensor.id]--;\n }\n }\n });\n }\n }\n });\n }\n /**\n * Executes the inference for given input tensors in Async fashion.\n * @param inputs Tensor map for the model inputs, keyed by the input node\n * names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n */\n async executeAsync(inputs: NamedTensorsMap, outputs?: string|string[]):\n Promise<NamedTensorMap> {\n this.checkInput(inputs, false);\n this.checkInputShapeAndType(inputs, false);\n const tensorArrayMap: TensorArrayMap = {};\n const context = new ExecutionContext(this._weightMap, tensorArrayMap);\n // Graph with control flow op requires runtime evaluation of the execution\n // order, while without control flow the execution order is pre-determined\n // in the compile method.\n const tensors = await this.executeWithControlFlow(inputs, context);\n const results = this.findOutputs(tensors, context, outputs);\n\n // dispose all the intermediate tensors\n const outputIds = Object.keys(results).map(key => results[key].id);\n const inputIdArray =\n Object.keys(inputs).map(key => inputs[key].map(input => input.id));\n const inputIds = [].concat.apply([], inputIdArray);\n Object.keys(tensors).forEach(key => {\n const tensorArray = tensors[key];\n tensorArray.forEach(tensor => {\n if (tensor && outputIds.indexOf(tensor.id) === -1 &&\n inputIds.indexOf(tensor.id) === -1 &&\n this.weightIds.indexOf(tensor.id) === -1) {\n tensor.dispose();\n }\n });\n });\n return results;\n }\n\n /**\n * When there are control flow nodes in the graph, the graph execution use\n * ExecutionContext to keep track of the frames and loop iterators.\n * @param inputs placeholder tensors for the graph.\n * @param context the execution context object for current execution.\n */\n private async executeWithControlFlow(\n inputs: NamedTensorsMap,\n context: ExecutionContext): Promise<NamedTensorsMap> {\n const names = Object.keys(inputs);\n const inputNodes = names.map(name => this.graph.nodes[name]);\n const stack: NodeWithContexts[] =\n [...inputNodes, ...this.graph.weights].map(node => {\n return {node, contexts: context.currentContext};\n });\n const tensorMap = {...this.weightMap, ...inputs};\n const intermediateTensorConsumerCount: {[key: number]: number} = {};\n const tensorsToKeep = this.getFrozenTensorIds(tensorMap);\n const added: {[key: string]: boolean} = {};\n while (stack.length > 0) {\n const promises = this.processStack(\n inputNodes, stack, context, tensorMap, added, tensorsToKeep,\n intermediateTensorConsumerCount);\n await Promise.all(promises);\n }\n\n return tensorMap;\n }\n\n private processStack(\n inputNodes: Node[], stack: NodeWithContexts[], context: ExecutionContext,\n tensorMap: NamedTensorsMap, added: {[key: string]: boolean},\n tensorsToKeep: Set<number>,\n intermediateTensorConsumerCount: {[key: number]: number}) {\n const promises: Array<Promise<Tensor[]>> = [];\n while (stack.length > 0) {\n const item = stack.pop();\n context.currentContext = item.contexts;\n let nodeName = '';\n // The tensor of the Enter op with isConstant set should be set\n // in the parent scope, so it will be available as constant for the\n // whole loop.\n if (item.node.op === 'enter' &&\n getParamValue('isConstant', item.node, tensorMap, context)) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n\n // only process nodes that are not provided as input nodes.\n if (inputNodes.indexOf(item.node) === -1) {\n const tensors = executeOp(item.node, tensorMap, context);\n if (!nodeName) {\n [nodeName] = getNodeNameAndIndex(item.node.name, context);\n }\n\n const currentContext = context.currentContext;\n if (tensors instanceof Promise) {\n promises.push(tensors.then(t => {\n tensorMap[nodeName] = t;\n context.currentContext = currentContext;\n this.checkTensorForDisposal(\n nodeName, item.node, tensorMap, context, tensorsToKeep,\n intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added);\n return t;\n }));\n } else {\n tensorMap[nodeName] = tensors;\n this.checkTensorForDisposal(\n nodeName, item.node, tensorMap, context, tensorsToKeep,\n intermediateTensorConsumerCount);\n this.processChildNodes(item.node, stack, context, tensorMap, added);\n }\n } else {\n this.processChildNodes(item.node, stack, context, tensorMap, added);\n }\n }\n return promises;\n }\n\n private processChildNodes(\n node: Node, stack: NodeWithContexts[], context: ExecutionContext,\n tensorMap: NamedTensorsMap, added: {[key: string]: boolean}) {\n node.children.forEach((childNode) => {\n const [nodeName, ] = getNodeNameAndIndex(childNode.name, context);\n if (!added[nodeName]) {\n // Merge op can be pushed if any of its inputs has value.\n if (childNode.op === 'merge') {\n if (childNode.inputNames.some(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({contexts: context.currentContext, node: childNode});\n }\n } else // Otherwise all inputs must to have value.\n if (childNode.inputNames.every(name => {\n return !!getTensor(name, tensorMap, context);\n })) {\n added[nodeName] = true;\n stack.push({contexts: context.currentContext, node: childNode});\n }\n }\n });\n }\n\n private calculateOutputs(outputs?: string|string[]): string[] {\n if (outputs && !(outputs instanceof Array)) {\n outputs = [outputs];\n }\n return (outputs || this.graph.outputs.map(node => node.name)) as string[];\n }\n\n private findOutputs(\n tensorMap: NamedTensorsMap, context: ExecutionContext,\n outputs?: string|string[]): NamedTensorMap {\n const requestedOutputs = this.calculateOutputs(outputs);\n return requestedOutputs.reduce<NamedTensorMap>((map, name) => {\n map[name] = getTensor(name, tensorMap, context);\n return map;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n Object.keys(this.weightMap)\n .forEach(\n key => this.weightMap[key].forEach(tensor => tensor.dispose()));\n }\n\n private checkInputShapeAndType(\n inputs: NamedTensorsMap, strictInputCheck = true) {\n this.placeholders.forEach(node => {\n const inputTensors = inputs[node.name];\n // do nothing if not strict input check and input tensors is not for the\n // placeholders.\n if (!strictInputCheck && !inputTensors) {\n return;\n }\n\n const input = inputTensors[0];\n if (node.params['shape'] && node.params['shape'].value) {\n const shape = node.params['shape'].value as number[];\n const match = shape.length === input.shape.length &&\n input.shape.every(\n (dim, index) => shape[index] === -1 || shape[index] === dim);\n util.assert(\n match,\n `The shape of dict['${\n node.name}'] provided in model.execute(dict) must be [${\n shape}], but was [${input.shape}]`);\n }\n if (node.params['dtype'] && node.params['dtype'].value) {\n util.assert(\n input.dtype === node.params['dtype'].value as string,\n `The dtype of dict['${\n node.name}'] provided in model.execute(dict) must be ${\n node.params['dtype'].value}, but was ${input.dtype}`);\n }\n });\n }\n\n private checkInput(inputs: NamedTensorsMap, strictInputCheck = true) {\n const inputKeys = Object.keys(inputs);\n const missing: string[] = [];\n const extra: string[] = [];\n\n this.inputNodes.forEach(name => {\n if (inputKeys.indexOf(name) === -1) missing.push(name);\n });\n\n inputKeys.forEach(name => {\n if (this.inputNodes.indexOf(name) === -1) extra.push(name);\n });\n\n const notInGraph = extra.filter(name => !this.graph.nodes[name]);\n\n if (missing.length > 0 && strictInputCheck) {\n throw new Error(\n `The dict provided in model.execute(dict) has the keys ` +\n `[${inputKeys}], but is missing the required keys: [${missing}].`);\n }\n\n if (extra.length > 0 && strictInputCheck) {\n throw new Error(\n `The dict provided in model.execute(dict) has ` +\n `unused keys: [${extra}]. Please provide only the following keys: ` +\n `[${this.inputNodes}].`);\n }\n\n if (notInGraph.length > 0) {\n throw new Error(\n `The dict provided in model.execute(dict) has ` +\n `keys: [${notInGraph}] not part of model graph.`);\n }\n }\n\n private checkOutput(compiledNodes: Node[], outputs: string[]) {\n const compiledNodeNames = compiledNodes.map(node => node.name);\n const extra: string[] = [];\n outputs.forEach(name => {\n if (compiledNodeNames.indexOf(name) === -1) extra.push(name);\n });\n\n if (extra.length > 0) {\n throw new Error(\n `The following outputs are not be generated by the execution: ` +\n `[${extra}].`);\n }\n }\n}\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nimport * as tfc from '@tensorflow/tfjs-core';\n\nimport {tensorflow} from '../data/compiled_api';\nimport {NamedTensorsMap, TensorInfo} from '../data/types';\nimport {OperationMapper} from '../operations/operation_mapper';\n\nimport {GraphExecutor} from './graph_executor';\n\n/**\n * A `tf.FrozenModel` is a directed, acyclic graph of built from\n * SavedModel GraphDef and allows inference exeuction.\n */\n\n/** @doc {heading: 'Models', subheading: 'Classes'} */\nexport class FrozenModel implements tfc.InferenceModel {\n private executor: GraphExecutor;\n private version = 'n/a';\n private handler: tfc.io.IOHandler;\n // Returns the version information for the tensorflow model GraphDef.\n get modelVersion(): string {\n return this.version;\n }\n\n get inputNodes(): string[] {\n return this.executor.inputNodes;\n }\n\n get outputNodes(): string[] {\n return this.executor.outputNodes;\n }\n\n get inputs(): TensorInfo[] {\n return this.executor.inputs;\n }\n\n get outputs(): TensorInfo[] {\n return this.executor.outputs;\n }\n\n get weights(): NamedTensorsMap {\n return this.executor.weightMap;\n }\n\n /**\n * @param modelUrl url for the model file generated by scripts/convert.py\n * script.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n */\n constructor(\n private modelUrl: string, private weightManifestUrl: string,\n private requestOption?: RequestInit) {}\n\n private findIOHandler() {\n const path = [this.modelUrl, this.weightManifestUrl];\n if (this.requestOption) {\n this.handler = tfc.io.browserHTTPRequest(path, this.requestOption);\n } else {\n const handlers = tfc.io.getLoadHandlers(path);\n if (handlers.length === 0) {\n // For backward compatibility: if no load handler can be found,\n // assume it is a relative http path.\n handlers.push(tfc.io.browserHTTPRequest(path, this.requestOption));\n } else if (handlers.length > 1) {\n throw new Error(\n `Found more than one (${handlers.length}) load handlers for ` +\n `URL '${[path]}'`);\n }\n this.handler = handlers[0];\n }\n }\n\n /**\n * Loads the model and weight files, construct the in memory weight map and\n * compile the inference graph.\n */\n async load(): Promise<boolean> {\n this.findIOHandler();\n if (this.handler.load == null) {\n throw new Error(\n 'Cannot proceed with model loading because the IOHandler provided ' +\n 'does not have the `load` method implemented.');\n }\n const artifacts = await this.handler.load();\n const graph = tensorflow.GraphDef.decode(\n new Uint8Array(artifacts.modelTopology as ArrayBuffer));\n\n this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`;\n const weightMap =\n tfc.io.decodeWeights(artifacts.weightData, artifacts.weightSpecs);\n this.executor =\n new GraphExecutor(OperationMapper.Instance.transformGraph(graph));\n this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap);\n return true;\n }\n\n /**\n * Execute the inference for the input tensors.\n *\n * @param input The input tensors, when there is single input for the model,\n * inputs param should be a `tf.Tensor`. For models with mutliple inputs,\n * inputs params should be in either `tf.Tensor`[] if the input order is\n * fixed, or otherwise NamedTensorMap format.\n *\n * For model with multiple inputs, we recommend you use NamedTensorMap as the\n * input type, if you use `tf.Tensor`[], the order of the array needs to\n * follow the\n * order of inputNodes array. @see {@link FrozenModel.inputNodes}\n *\n * You can also feed any intermediate nodes using the NamedTensorMap as the\n * input type. For example, given the graph\n * InputNode => Intermediate => OutputNode,\n * you can execute the subgraph Intermediate => OutputNode by calling\n * frozenModel.execute('IntermediateNode' : tf.tensor(...));\n *\n * This is useful for models that uses tf.dynamic_rnn, where the intermediate\n * state needs to be fed manually.\n *\n * For batch inference execution, the tensors for each input need to be\n * concatenated together. For example with mobilenet, the required input shape\n * is [1, 244, 244, 3], which represents the [batch, height, width, channel].\n * If we are provide a batched data of 100 images, the input tensor should be\n * in the shape of [100, 244, 244, 3].\n *\n * @param config Prediction configuration for specifying the batch size and\n * output node names. Currently the batch size option is ignored for frozen\n * model.\n *\n * @returns Inference result tensors. The output would be single `tf.Tensor`\n * if model has single output node, otherwise Tensor[] or NamedTensorMap[]\n * will be returned for model with multiple outputs.\n */\n predict(\n inputs: tfc.Tensor|tfc.Tensor[]|tfc.NamedTensorMap,\n config?: tfc.ModelPredictConfig): tfc.Tensor\n |tfc.Tensor[]|tfc.NamedTensorMap {\n return this.execute_(inputs, true, this.outputNodes);\n }\n\n private constructTensorMap(inputs: tfc.Tensor|tfc.Tensor[]) {\n const inputArray = inputs instanceof tfc.Tensor ? [inputs] : inputs;\n if (inputArray.length !== this.inputNodes.length) {\n throw new Error(\n 'Input tensor count mismatch,' +\n `the frozen model has ${this.inputNodes.length} placeholders, ` +\n `while there are ${inputArray.length} input tensors.`);\n }\n return this.inputNodes.reduce((map, inputName, i) => {\n map[inputName] = inputArray[i];\n return map;\n }, {} as tfc.NamedTensorMap);\n }\n /**\n * Executes infrerence for the model for given input tensors.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no\n * outputs are specified, the default outputs of the model would be used.\n * You can inspect intermediate nodes of the model by adding them to the\n * outputs array.\n *\n * @returns A single tensor if provided with a single output or no outputs\n * are provided and there is only one default output, otherwise return a\n * tensor array. The order of the tensor array is the same as the outputs\n * if provided, otherwise the order of outputNodes attribute of the model.\n */\n execute(\n inputs: tfc.Tensor|tfc.Tensor[]|tfc.NamedTensorMap,\n outputs?: string|string[]): tfc.Tensor|tfc.Tensor[] {\n return this.execute_(inputs, false, outputs);\n }\n\n private execute_(\n inputs: tfc.Tensor|tfc.Tensor[]|tfc.NamedTensorMap,\n strictInputCheck = true, outputs?: string|string[]): tfc.Tensor\n |tfc.Tensor[] {\n outputs = outputs || this.outputNodes;\n if (inputs instanceof tfc.Tensor || Array.isArray(inputs)) {\n inputs = this.constructTensorMap(inputs);\n }\n if (this.executor.isControlFlowModel || this.executor.isDynamicShapeModel) {\n throw new Error(\n 'The model contains control flow or dynamic shape ops, ' +\n 'please use executeAsync method');\n }\n const result = this.executor.execute(\n this.convertTensorMapToTensorsMap(inputs), strictInputCheck, outputs);\n const keys = Object.keys(result);\n return (Array.isArray(outputs) && outputs.length > 1) ?\n outputs.map(node => result[node]) :\n result[keys[0]];\n }\n /**\n * Executes inference for the model for given input tensors in async\n * fashion, use this method when your model contains control flow ops.\n * @param inputs tensor, tensor array or tensor map of the inputs for the\n * model, keyed by the input node names.\n * @param outputs output node name from the Tensorflow model, if no outputs\n * are specified, the default outputs of the model would be used. You can\n * inspect intermediate nodes of the model by adding them to the outputs\n * array.\n *\n * @returns A Promise of single tensor if provided with a single output or\n * no outputs are provided and there is only one default output, otherwise\n * return a tensor map.\n */\n async executeAsync(\n inputs: tfc.Tensor|tfc.Tensor[]|tfc.NamedTensorMap,\n outputs?: string|string[]): Promise<tfc.Tensor|tfc.Tensor[]> {\n if (!(this.executor.isControlFlowModel ||\n this.executor.isDynamicShapeModel)) {\n throw new Error(\n 'The model does not contain control flow or dynamic shape ops, ' +\n 'please use execute method for better performance.');\n }\n outputs = outputs || this.outputNodes;\n if (inputs instanceof tfc.Tensor || Array.isArray(inputs)) {\n inputs = this.constructTensorMap(inputs);\n }\n\n const result = await this.executor.executeAsync(\n this.convertTensorMapToTensorsMap(inputs), outputs);\n const keys = Object.keys(result);\n return Array.isArray(outputs) && outputs.length > 1 ?\n outputs.map(node => result[node]) :\n result[keys[0]];\n }\n\n private convertTensorMapToTensorsMap(map: tfc.NamedTensorMap):\n NamedTensorsMap {\n return Object.keys(map).reduce((newMap: NamedTensorsMap, key) => {\n newMap[key] = [map[key]];\n return newMap;\n }, {});\n }\n /**\n * Releases the memory used by the weight tensors.\n */\n dispose() {\n this.executor.dispose();\n }\n}\n\n/**\n * Load the frozen model through url.\n *\n * Example of loading the MobileNetV2 model and making a prediction with a zero\n * input.\n *\n * ```js\n * const GOOGLE_CLOUD_STORAGE_DIR =\n * 'https://storage.googleapis.com/tfjs-models/savedmodel/';\n * const MODEL_URL = 'mobilenet_v2_1.0_224/tensorflowjs_model.pb';\n * const WEIGHTS_URL =\n * 'mobilenet_v2_1.0_224/weights_manifest.json';\n * const model = await tf.loadFrozenModel(GOOGLE_CLOUD_STORAGE_DIR + MODEL_URL,\n * GOOGLE_CLOUD_STORAGE_DIR + WEIGHTS_URL);\n * const zeros = tf.zeros([1, 224, 224, 3]);\n * model.predict(zeros).print();\n * ```\n *\n * @param modelUrl url for the model file generated by scripts/convert.py\n * script.\n * @param weightManifestUrl url for the weight file generated by\n * scripts/convert.py script.\n * @param requestOption options for Request, which allows to send credentials\n * and custom headers.\n */\n/** @doc {heading: 'Models', subheading: 'Loading'} */\nexport async function loadFrozenModel(\n modelUrl: string, weightsManifestUrl: string,\n requestOption?: RequestInit): Promise<FrozenModel> {\n const model = new FrozenModel(modelUrl, weightsManifestUrl, requestOption);\n await model.load();\n return model;\n}\n","/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '0.6.7';\nexport {version};\n","/** @license See the LICENSE file. */\n\n// This code is auto-generated, do not modify this file!\nconst version = '0.13.5';\nexport {version};\n","/**\n * @license\n * Copyright 2018 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\nexport * from '@tensorflow/tfjs-core';\nexport * from '@tensorflow/tfjs-layers';\nexport * from '@tensorflow/tfjs-converter';\n\n// Import versions of all sub-packages.\nimport {version_core} from '@tensorflow/tfjs-core';\nimport {version_layers} from '@tensorflow/tfjs-layers';\nimport {version_converter} from '@tensorflow/tfjs-converter';\nimport {version as version_union} from './version';\n\nexport const version = {\n 'tfjs-core': version_core,\n 'tfjs-layers': version_layers,\n 'tfjs-converter': version_converter,\n 'tfjs': version_union\n};\n"],"names":["contexts","WEBGL_ATTRIBUTES","alpha","antialias","premultipliedAlpha","preserveDrawingBuffer","depth","stencil","failIfMajorPerformanceCaveat","webGLVersion","document","createElement","addEventListener","ev","preventDefault","getWebGLRenderingContext","gl","isContextLost","getWebGLContext","disable","DEPTH_TEST","STENCIL_TEST","BLEND","DITHER","POLYGON_OFFSET_FILL","SAMPLE_COVERAGE","enable","SCISSOR_TEST","CULL_FACE","cullFace","BACK","Error","canvas","getContext","a","navigator","userAgent","vendor","window","opera","test","substr","array","counter","length","temp","index","Math","random","min","x","max","val","arr","sum","i","b","r","result","diff","Number","expr","msg","shapeA","shapeB","errorMessagePrefix","assert","arraysEqual","ret","Array","isArray","flatten","push","shape","size","n1","n2","tanh","Infinity","e2x","exp","floor","sqrt","n","shuffledIndices","Uint32Array","shuffle","repeat","checkFn","delayFn","maxCounter","Promise","resolve","reject","tryCount","tryFn","nextBackoff","setTimeout","shapeProd","implicitIdx","newShape","slice","axis","keptDims","j","dtype","values","Float32Array","Int32Array","Uint8Array","vals","name","isNaN","oldType","newType","debugMode","checkConversionForNaN","bool","round","f","constructor","call","apply","start","rank","strides","noConversionNeeded","copyTypedArray","makeZerosTypedArray","performance","now","process","time","hrtime","backendTimer","logger","this","Logger","Profiler","timer","forEach","dataSync","util.checkComputationForNaN","then","timing","extraInfo","getExtraProfileInfo","_this","logKernelProfile","kernelMs","timeMs","util.rightPad","paddedName","toString","console","log","FORMAT_LIMIT_NUM_VALS","FORMAT_NUM_FIRST_LAST_VALS","FORMAT_NUM_SIG_DIGITS","verbose","computeStrides","padPerCol","computeMaxSizePerColumn","valsLines","subTensorToString","lines","map","l","join","sizeFromShape","numCols","fill","valuesOrTuples","createComplexTuples","row","offset","valToString","pad","rightPad","parseFloat","toFixed","isLast","storagePerElement","firstValsSize","firstVals","from","subarray","lastVals","subshape","substrides","stride","end","sep","newLineSep","complexTuples","util.sizeFromShape","util.assert","util.getTypedArrayFromDType","TensorBuffer","value","_i","locs","locToIndex","Object","Tensor","make","trackerFn","opHandler","fn","handler","dataId","id","nextTensorId","rankType","registerTensor","write","data","throwIfDisposed","as1D","reshape","rows","columns","depth2","cast","buffer","read","readSync","isDisposed","disposeTensor","isDisposedInternal","asType","print","expandDims","exclusive","reverse","cumsum","squeeze","clone","tensorToString","reps","tile","indices","gather","transposeA","transposeB","matMul","dot","ord","keepDims","norm","begin","concat","numOrSizeSplits","split","stack","unstack","paddings","constantValue","mean","variance","varianceEpsilon","scale","batchNormalization","all","any","logSumExp","prod","argMin","argMax","add","addStrict","atan2","sub","subStrict","pow","powStrict","mul","mulStrict","div","floorDiv","divStrict","minimum","minimumStrict","maximum","maximumStrict","mod","modStrict","squaredDifference","squaredDifferenceStrict","perm","transpose","notEqual","notEqualStrict","less","lessStrict","equal","equalStrict","lessEqual","lessEqualStrict","greater","greaterStrict","greaterEqual","greaterEqualStrict","logicalAnd","logicalOr","logicalNot","logicalXor","condition","where","neg","ceil","sign","expm1","log1p","rsqrt","square","reciprocal","abs","clipByValue","relu","elu","selu","leakyRelu","prelu","sigmoid","logSigmoid","softplus","zerosLike","onesLike","sin","cos","tan","asin","acos","atan","sinh","cosh","asinh","acosh","atanh","erf","step","dim","softmax","logSoftmax","newShape2D","alignCorners","image","resizeBilinear","resizeNearestNeighbor","filter","dataFormat","dilation","dimRoundingMode","conv1d","dilations","conv2d","outputShape","conv2dTranspose","depthwiseConv2d","depthwiseFilter","pointwiseFilter","separableConv2d","filterSize","avgPool","maxPool","radius","bias","beta","localResponseNormalization","windowShape","poolingType","padding","dilationRate","pool","trainable","Variable","variable","segmentIds","numSegments","unsortedSegmentSum","blockShape","crops","batchToSpaceND","spaceToBatchND","k","sorted","topk","beginMask","endMask","stridedSlice","blockSize","depthToSpace","spectral","fft","ifft","defineProperty","Symbol","hasInstance","instance","initialValue","_super","nextVariableId","registerVariable","ex","tslib_1.__extends","newValue","util.arraysEqual","assign","Function","tape","xs","y","tensorsFromX","nodesFromX","nodeInputs","node","inputs","inputName","input","anyInputFromX","outputs","output","tensorsLeadToY","nodesToY","filteredTape","prunedInputs","nodeInput","prunedNode","tensorAccumulatedGradientMap","dys","o","gradTensor","dy","util.makeZerosTypedArray","gradient","inputGradients","keys","dx","curGradient","dispose","tensor","tensorList","list","walkTensorContainer","Set","container","seen","isIterable","iterable","has","obj","Type","backend","safeMode","WeakMap","activeScope","track","scopeStack","profiler","activeProfile","newBytes","newTensors","peakBytes","kernels","Engine","nameOrFn","gradMode","String","scopedRun","startScope","endScope","error","res","forwardFunc","backwardsFunc","saved","saveFunc","scopeName","startingBytecount","numBytes","startingNumTensors","numTensors","customGradientDepth","profileKernel","shouldRecord","tapeNode","nextTapeNodeId","activeTape","profiling","bytesAdded","totalBytesSnapshot","tensorsAdded","totalTensorsSnapshot","inputShapes","key","item","refCount","tensorInfo","get","numDataBuffers","util.bytesPerElement","set","register","v","registeredVariables","keepTensors","delete","disposeData","varName","info","memory","query","startBytes","startNumTensors","d","gradientsFunc","inputsMap","idx","resMap","gradientsMode","gradientScopeCount","scopeInfo","tensorsToKeep","tensorsToTrackInParent","getTensorsInContainer","oldScope","pop","isTensorInList","allowNoGradients","tidy","getFilteredNodesXToY","accumulatedGradientMap","ones","backpropagateGradients","grads","util.isFunction","every","t","_a","gradFunc","addTapeNode","pixels","numChannels","fromPixels","timingInfo","wallMs","makeOnesTypedArray","MAX_TEXTURE_SIZE","URL_PROPERTIES","type","BOOLEAN","NUMBER","STRING","e","getParameter","hasExtension","createFloatTextureAndBindToFramebuffer","fenceSync","TENSORFLOWJS_FLAGS_PREFIX","features","location","search","urlParams","getQueryParams","urlFlags_1","keyValue","urlProperty","warn","extensionName","getExtension","frameBuffer","createFramebuffer","texture","createTexture","bindTexture","TEXTURE_2D","internalFormat","RGBA32F","RGBA","texImage2D","FLOAT","bindFramebuffer","FRAMEBUFFER","framebufferTexture2D","COLOR_ATTACHMENT0","isFrameBufferComplete","checkFramebufferStatus","FRAMEBUFFER_COMPLETE","deleteTexture","deleteFramebuffer","queryString","params","replace","s","decodeParam","decodeURIComponent","EPSILON_FLOAT16","TEST_EPSILON_FLOAT16","EPSILON_FLOAT32","TEST_EPSILON_FLOAT32","Environment","backendName","ENV","registry","engine","findBackend","initEngine","disposeVariables","profile","keep","feature","evaluateFeature","entry","sort","priority","versions","isChrome","getWebGLMaxTextureSize","getBestBackendName","getWebGLDisjointQueryTimerVersion","device_util.isMobile","isWebGLVersionEnabled","isRenderToFloatTextureEnabled","isDownloadFloatTextureEnabled","isWebGLFenceEnabled","floatPrecision","getFeaturesFromURL","globalEngine","factory","setTensorTrackerFn","setDataMover","moveData","err","message","ns","getGlobalNamespace","setTensorTracker","getOrMakeEnvironment","util.assertShapesMatch","checkGrads","args","arg","grad","gradients","varList","originalVarCount","some","g","namedGrads","customGrad","inputShape","batchToSpace","reshaped","spatialLength","reshapedRank","blockShapeRank","permuted","permutedBeforeBatch","permutedAfterBatch","reshapedPermuted","sliceBeginCoords","uncroppedShape","sliceSize","axes","outputLoc","reduceLoc","loc","outIdx","reduceIdx","indexOf","aShape","outShape","combineLocations","ax","util.isInt","axesAreInnerMostDims","numAxes","shapes","firstShape","indicesShape","sliceRank","nResult","resultShape","PARALLELIZE_THRESHOLD","inSize","nearestDivisor","updates","sliceDim","batchDim","shapeError","validateUpdateShape","totalNd","safeSliceDim","numUpdates","outputStrides","outputSize","done","ellipsisMask","newAxisMask","shrinkAxisMask","startIndex","endIndex","shrinkAxis","startForAxis","stopForAxis","count","startIndices","MIN_SAFE_INTEGER","MAX_SAFE_INTEGER","axisSize","util.clamp","stopIndices","stop","firstElem","isTypedArray","deepAssertShapeConsistency","subShape","argName","functionName","inferredShape","inferShape","toTypedArray","convertToTensor","opName","endsWith","substring","f2","configurable","logits","$logits","lse","toFloat","dyTimesY","xMax","shifted","op","softmax_","logSoftmax_","real","imag","$real","$imag","runKernel","complex","$input","complex_","real_","imag_","assertShapesMatch","assertNonNull","zeros","getTypedArrayFromDType","$x","num","tensor1d","DType","Rank","UpcastInt32AndMap","UpcastBoolAndMap","UpcastFloat32AndMap","UpcastComplex64AndMap","onesLike_","zerosLike_","upcastTypeMap","float32","int32","complex64","typeA","typeB","upcastType","dataMover","DataStorage","KernelBackend","tensors","convInfo","dY","newHeight","newWidth","newHEight","inputImage","outputImage","normalized","numSamples","seed","onValue","offValue","boxes","scores","maxOutputSize","iouThreshold","scoreThreshold","boxIndex","cropSize","method","extrapolationValue","sizeSplits","sparseIndices","sparseValues","defaultValue","zerosTensor","floatX","hasEncodingLoss","int","zero","scalar","len","inverse","PI","candidates","score","c","c1","c2","selected","ignoreCandidate","intersectionOverUnion","iCoord","jCoord","yminI","xminI","ymaxI","xmaxI","yminJ","xminJ","ymaxJ","xmaxJ","areaI","areaJ","intersectionYmin","intersectionXmin","intersectionYmax","intersectionXmax","intersectionArea","xShape","xDtype","lastDim","batch","allTopKVals","allTopKIndices","valAndInd","outOffset","topKVals","topKIndices","reduceInfo","firstPass","windowSize","batchSize","outSize","variableNames","compOp","indexSnippet","userCode","inShape","filterHeight","filterWidth","strideHeight","strideWidth","dilationHeight","dilationWidth","effectiveFilterHeight","effectiveFilterWidth","padTop","padInfo","top","padLeft","left","avgMultiplier","outHeight","outWidth","inRank","dims","unshift","inDim","outAxis","outDim","meanShape","varianceShape","offsetShape","scaleShape","broadcast_util.assertAndGetBroadcastShape","offsetSnippet","scaleSnippet","meanSnippet","broadcastSample","varianceSnippet","texName","texSampler","charAt","toUpperCase","COMPLEX_MULTIPLY","REAL","IMAG","bShape","CHECK_NAN_SNIPPET","ADD","SUB","MUL","DIV","INT_DIV","POW","SQUARED_DIFFERENCE","EQUAL","NOT_EQUAL","LESS","LESS_EQUAL","GREATER","GREATER_EQUAL","LOGICAL_AND","LOGICAL_OR","MAX","MIN","MOD","ATAN2","ELU_DER","BinaryOpProgram","gpgpu","webGLProgram","startLoc","getUniformLocationNoThrow","uniform1f","NaN","concat_util.computeOutShape","filterShape","inHeight","inWidth","outChannels","channelMul","inChannels","inputDepthNearestVec4","inputDepthVec4Remainder","xNumRows","xNumCols","imageShape","boxShape","imageHeight","imageWidth","numBoxes","cropHeight","cropWidth","methodId","inputHeightFloat","inputWidthFloat","_b","heightRatio","heightScale","inY","_c","widthRatio","widthScale","inX","coords","util.computeStrides","slices","nearestVec4","nearestVec4Remainder","xSlice","ySlice","buildVec","inputsInfo","broadcast","usesPackedTextures","inputPrefixSnippet","shapeInfo","logicalShape","isUniform","outputSamplingSnippet","floatTextureSetOutputSnippet","inputSamplingSnippet","getInputSamplingSnippet","outTexShape","texShape","shaderPrefix","SHADER_PREFIX","isPacked","getPackedOutputSamplingSnippet","FLOAT_TEXTURE_SET_RGBA_SNIPPET","getOutputSamplingSnippet","FLOAT_TEXTURE_SET_R_SNIPPET","SHADER_PACKED_PREFIX","FLOAT_TEXTURE_SAMPLE_SNIPPET","inInfo","getSamplerScalar","getSampler1D","getSampler2D","getSampler3D","getSampler4D","getSampler5D","getSampler6D","getPackedSampler1D","getPackedSampler2D","getPackedSampler3D","getPackedSampler4D","outShapeInfo","getSamplerFlat","getPackedSamplerFromInInfo","getSamplerFromInInfo","getSamplerAtOutputCoords","getOutputScalarCoords","getOutputPacked1DCoords","getOutputPacked2DCoords","getOutputPacked3DCoords","getOutputPacked4DCoords","getOutput1DCoords","getOutput2DCoords","getOutput3DCoords","getOutput4DCoords","getOutput5DCoords","getOutput6DCoords","SAMPLE_1D_SNIPPET","SAMPLE_2D_SNIPPET","SAMPLE_3D_SNIPPET","SAMPLE_4D_SNIPPET","SAMPLE_5D_SNIPPET","SAMPLE_6D_SNIPPET","packedTexShape","texelsInLogicalRow","texelsInBatch","coordsFromIndexSnippet","shader_util.getLogicalCoordinatesFromFlatIndex","texelsInBatch2","inputInfo","funcName","texNumR","texNumC","texNumR_1","squeezedShape","squeezeInputInfo","getSqueezedParams","valuesPerRow","stride0","stride1","stride2","stride3","stride4","tNumR","tNumC","texFuncSnippet","outRank","broadcastDims","broadcast_util.getBroadcastDims","rankDiff","supportsBroadcasting","doBroadcast","broadcastOverOuter","broadcast_util.broadcastDimsAreOuter","getBroadcastOutputCoordsSampler","broadcastSnippet","inTexShape","newInputInfo","JSON","parse","stringify","finalDim","comparator","getCoordsDataType","getFinalCoord","getCoords","getHeightCoordString","getWidthCoordString","getDepthCoordString","getOutputDepthSize","getInputSamplingString","DepthToSpaceProgram","COMPLEX_FFT","innerDim","exponentMultiplierSnippet","resultDenominator","height","width","indicesLength","sourceCoords","getSourceCoords","currentCoords","TextureUsage","PhysicalTextureType","stridesType","strideString","matrixSize","channelsPerTexture","unpackedSize","matrix","unpackedArray","requiredSize","getUnpackedArraySizeFromMatrixSize","dst","src","getMatrixSizeFromUnpackedArraySize","batches","packedRGBA","getPackedRGBAArraySizeFromMatrixShape","oddWidth","oddHeight","widthInFullBlocks","heightInFullBlocks","texelsPerRow","texelsPerBatch","flattenedMatrixSize","util.nearestLargerEven","sourceOffset","batchOffset","dstStride","oneRow","blockY","matrixSrcRow","blockX","srcStride","dstRow1","dstRow2","func","returnValue","checkWebGLError","webGLDebugErrorCheckingEnabled","enabled","getError","NO_ERROR","getWebGLErrorMessage","status","INVALID_ENUM","INVALID_VALUE","INVALID_OPERATION","INVALID_FRAMEBUFFER_OPERATION","OUT_OF_MEMORY","CONTEXT_LOST_WEBGL","throwIfNull","vertexShaderSource","vertexShader","createShader","VERTEX_SHADER","callAndCheck","shaderSource","compileShader","getShaderParameter","COMPILE_STATUS","getShaderInfoLog","fragmentShaderSource","fragmentShader","FRAGMENT_SHADER","logShaderSourceAndInfoLog","lineNumberRegex","shaderInfoLog","lineNumberRegexResult","exec","lineNumber","shaderLines","linesWithLineNumbers","line","maxLineLength","beforeErrorLines","errorLine","afterErrorLines","createProgram","program","linkProgram","getProgramParameter","LINK_STATUS","getProgramInfoLog","validateProgram","VALIDATE_STATUS","createBuffer","bindBuffer","ARRAY_BUFFER","bufferData","STATIC_DRAW","ELEMENT_ARRAY_BUFFER","maxTextureSize","requested","attribute","arrayEntriesPerItem","itemStrideInBytes","itemOffsetInBytes","getAttribLocation","vertexAttribPointer","enableVertexAttribArray","textureUnit","validateTextureUnit","activeTexture","TEXTURE0","uniformName","getUniformLocation","uniformSamplerLocation","bindTextureUnit","uniform1i","viewport","scissor","framebuffer","getFramebufferErrorMessage","FRAMEBUFFER_INCOMPLETE_ATTACHMENT","FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT","FRAMEBUFFER_INCOMPLETE_DIMENSIONS","FRAMEBUFFER_UNSUPPORTED","returnTOrNull","failureMessage","tOrNull","maxTextureUnit","MAX_COMBINED_TEXTURE_IMAGE_UNITS","glTextureUnit","logShape","maxTexSize","squeezeResult","util.squeezeShape","util.sizeToSquarishShape","shape1","shape2","isEven","webgl_util.createVertexShader","webgl_util.createStaticVertexBuffer","webgl_util.createStaticIndexBuffer","Uint16Array","textureHalfFloatExtension","internalFormatFloat","internalFormatHalfFloat","internalFormatPackedFloat","textureFormatFloat","downloadUnpackNumChannels","defaultNumChannels","textureTypeHalfFloat","glany","R32F","R16F","RED","HALF_FLOAT","HALF_FLOAT_OES","downloadTextureFormat","textureFormat","textureType","webgl_util.validateTextureSize","webgl_util.createTexture","tex2d","webgl_util.callAndCheck","texParameteri","TEXTURE_WRAP_S","CLAMP_TO_EDGE","TEXTURE_WRAP_T","TEXTURE_MIN_FILTER","NEAREST","TEXTURE_MAG_FILTER","textureConfig","createAndConfigureTexture","UNSIGNED_BYTE","vertexBuffer","webgl_util.bindVertexBufferToProgramAttribute","texSubImage2D","w","h","tex_util.encodeMatrixToUnpackedArray","tex_util.getUnpackedArraySizeFromMatrixSize","uploadDataToTexture","tex_util.getPackedRGBAArraySizeFromMatrixShape","tex_util.encodeMatrixToPackedRGBA","bufferOrTexture","gl2_1","buffer_1","PIXEL_PACK_BUFFER","bufferSizeBytes_1","readPixels","gl2","downloadTarget","getBufferSubData","tex_util.decodeMatrixFromUnpackedArray","cols","physicalRows","physicalCols","tex_util.decodeMatrixFromPackedRGBA","textureFloatExtension","webgl_util.getExtensionOrThrow","colorBufferFloatExtension","colorBufferHalfFloatExtension","gpgpu_util.createVertexBuffer","indexBuffer","gpgpu_util.createIndexBuffer","webgl_util.createFramebuffer","gpgpu_util.getTextureConfig","GPGPUContext","disposed","outputTexture","finish","deleteBuffer","autoDebugValidate","webgl_util.enableDebugWebGLErrorChecking","gpgpu_util.createFloat32MatrixTexture","gpgpu_util.createFloat16MatrixTexture","gpgpu_util.createUnsignedBytesMatrixTexture","gpgpu_util.uploadPixelDataToTexture","gpgpu_util.createFloat16PackedMatrixTexture","gpgpu_util.createPackedMatrixTexture","webgl_util.unbindColorTextureFromFramebuffer","webgl_util.getNumChannels","gpgpu_util.uploadMatrixToTexture","gpgpu_util.uploadMatrixToPackedTexture","downloadMatrixDriver","gpgpu_util.downloadFloat32MatrixFromOutputTexture","gpgpu_util.downloadByteEncodedFloatMatrixFromOutputTexture","gpgpu_util.downloadFloat32MatrixFromBuffer","bindTextureToFrameBuffer","gpgpu_util.maybeCreateBufferFromOutputTexture","unbindTextureToFrameBuffer","fenceContext","createFence","pollFence","isFencePassed","sync_1","SYNC_GPU_COMMANDS_COMPLETE","flush","clientWaitSync","ALREADY_SIGNALED","CONDITION_SATISFIED","beginQuery","endQuery","isQueryAvailable","gpgpu_util.downloadMatrixFromPackedOutputTexture","webgl_util.createFragmentShader","gpgpu_util.createVertexShader","webgl_util.createProgram","attachShader","webgl_util.linkProgram","webgl_util.validateProgram","vertexAttrsAreBound","setProgram","gpgpu_util.bindVertexProgramAttributeStreams","deleteProgram","useProgram","shouldThrow","webgl_util.getProgramUniformLocationOrThrow","webgl_util.getProgramUniformLocation","inputMatrixTexture","uniformLocation","throwIfNoProgram","webgl_util.bindTextureToProgramUniformSampler","outputMatrixTexture","setOutputMatrixTextureDriver","outputPackedMatrixTexture","startRow","numRows","startColumn","numColumns","setOutputMatrixWriteRegionDriver","webgl_util.validateFramebuffer","debugValidate","drawElements","TRIANGLES","UNSIGNED_SHORT","disjointQueryTimerExtension","getQueryTimerExtension","ext_1","getQueryTimerExtensionWebGL2","query_1","createQuery","TIME_ELAPSED_EXT","ext","getQueryTimerExtensionWebGL1","createQueryEXT","beginQueryEXT","endQueryEXT","ext_2","util.repeatedTry","getQueryTime","queryTimerVersion","getQueryParameter","QUERY_RESULT","getQueryObjectEXT","QUERY_RESULT_EXT","available","QUERY_RESULT_AVAILABLE","disjoint","GPU_DISJOINT_EXT","QUERY_RESULT_AVAILABLE_EXT","addItemToPoll","binSearchLastTrue","itemsToPoll","isDoneFn","resolveFn","pollItems","webgl_util.bindColorTextureToFramebuffer","downloadAndDecode","outputMatrixTextureMaybePacked","best","mid","inputInfos","texData","inShapeInfos","source","shader_compiler.makeShader","uniformLocations","shapeInfos","texShapeA","texShapeB","binary","customSetup","validateBinaryAndProgram","outTex","setOutputPackedMatrixTexture","setOutputMatrixTexture","variableName","variableUniformLocation","uniformValues","uniform1fv","tex","setInputMatrixTexture","executeProgram","keyInputs","keyUserCode","keyBroadcast","itemsPerBlockRow","powOperator","rad","maxD","basis","depthRadius","lastIndex","outerShapeA","outerShapeB","sharedDim","aSnippetFromOffset","vec4Offset","indexVar","bSnippetFromOffset","sharedDimNearestVec4","sharedDimVec4Remainder","sharedDimensionPacked","aSample","bSample","aSwizzle","bSwizzle","numOutcomes","MultinomialProgram","seedLoc","numIndices","getVecChannels","channels","getChannels","outOfBoundsCondition","getOutOfBoundsCondition","setup","getSetup","getOutput","col","coord","cond","innerDims","getSourceCoordsArr","p","unpackedCoords","poolType","computePositions","isAvgPool","initializationValue","filterWidthNearestVec4","filterWidthVec4Remainder","updateSnippet","reduceType","compareOp","windowSizeNearestVec4","windowSizeVec4Remainder","vecType","checkOutOfBounds","mainLoop","thisRC","getReshapedInputCoords","getFlatIndex","shader_util.dotify","xHeight","xWidth","yHeight","yWidth","effectiveXSize","effectiveYSize","invHeightScale","invWidthScale","winHeight","winWidth","oldHeight","oldWidth","effectiveInSize","effectiveOutSize","roundBase","inCoords","_","getInCoord","updateSize","indicesRank","updatesRank","summingDupeIndex","indicesString","indicesSnippet","updatesString","updatesSnippet","segOpInfo","segOpType","checkValueOutOfBounds","checkSegmentIdOutOfBounds","cRank","cCoords","abCoords","cCoordVars","abCoordVars","destSize","SliceProgram","uniform2i","uniform3i","uniform4i","inputDtype","newCoords","outputAxis_1","TextureManager","shapeRC","usage","newTexture","physicalTexType","getPhysicalFromLogicalTextureType","shapeKey","getKeyFromTextureShape","freeTextures","usedTextures","numFreeTextures","numUsedTextures","newTexture_1","shift","PACKED_2X2_FLOAT32","createPackedMatrixTexture","PACKED_2X2_FLOAT16","createFloat16PackedMatrixTexture","UNPACKED_FLOAT32","createFloat32MatrixTexture","UNPACKED_FLOAT16","createFloat16MatrixTexture","PACKED_4X1_UNSIGNED_BYTE","createUnsignedBytesMatrixTexture","logicalTexType","texList","texIndex","splice","logEnabled","total","deleteMatrixTexture","DOWNLOAD","PIXELS","UPLOAD","RENDER","shapeRowsCol","newDim","switched","getSwitchedCoords","originalOrder","switchedCoords","ERF_P","ERF_A1","ERF_A2","ERF_A3","ERF_A4","ERF_A5","SELU_SCALEALPHA","SELU_SCALE","opSnippet","UnaryOpProgram","ABS","RELU","ELU","SELU","selu_util.SELU_SCALEALPHA","selu_util.SELU_SCALE","NEG","CEIL","FLOOR","SIGN","ROUND","EXP","EXPM1","LOG","LOG1P","SQRT","RSQRT","SIGMOID","SOFTPLUS","SIN","COS","TAN","ASIN","ACOS","ATAN","SINH","COSH","TANH","ASINH","ACOSH","ATANH","ERF","erf_util.ERF_P","erf_util.ERF_A1","erf_util.ERF_A2","erf_util.ERF_A3","erf_util.ERF_A4","erf_util.ERF_A5","SQUARE","RECIPROCAL","LOGICAL_NOT","TO_INT","$tensors","convertToTensorArray","parseAxisParam","computeOutShape","assertParamsConsistent","splitSizes","reduce","concat_","concat1d","concat1d_","concat2d","concat2d_","concat3d","concat3d_","concat4d","concat4d_","split_","global","module","define","copy","s0","s1","s2","impl","opts","xg","me","mash","charCodeAt","next","state","prng","double","quick","exports","amd","alea","z","strseed","xor128","xorwow","Date","X","init","xorshift7","limit","xor4096","tychei","math","nodecrypto","chunks","rngname","startdenom","significance","overflow","mask","seedrandom","options","callback","shortseed","mixkey","prop","typ","entropy","tostring","out","randomBytes","crypto","msCrypto","getRandomValues","browser","plugins","screen","autoseed","arc4","keylen","S","pass","is_math_call","smear","stringseed","fromCharCode","require","sr","stdDeviation","truncated","stdDev","nextVal","upper","lower","seedValue","seedrandom.alea","MPRandGauss","resultX","resultY","isValid","v1","v2","isValidTruncated","convertValue","batchShape","buff","toTensor","as2D","randGauss","nextValue","minval","maxval","util.randUniform","randFunction","origRank","logits2D","multinomial","$indices","oneHot","img","$img","minTensor","maxTensor","multiplier","bytes","Uint8ClampedArray","ctx","imageData","ImageData","putImageData","util.inferFromImplicitShape","xGrad","expandedTensors","outIndex","permutation","getAxesPermutation","permutedX","permutedAxis","getInnerMostAxes","inputHeight","inputWidth","inputDepth","$y","xVals","yVals","ySet","batchToSpaceND_","cast_","clone_","cumsum_","depthToSpace_","expandDims_","eye","eye_","fromPixels_","multinomial_","oneHot_","pad_","pad1d","pad1d_","pad2d","pad2d_","pad3d","pad3d_","pad4d","pad4d_","rand","rand_","randomNormal","randomNormal_","randomUniform","randomUniform_","reshape_","spaceToBatchND_","squeeze_","stack_","tile_","truncatedNormal","truncatedNormal_","unstack_","setdiff1dAsync","setdiff1dAsync_","condShape","condVals","inBuffer","indexToLoc","CPU_HANDOFF_SIZE_THRESHOLD","BEFORE_PAGING_CONSTANT","MATMUL_SHARED_DIM_THRESHOLD","delayedStorage","WeakSet","gpgpuCreatedLocally","NUM_BYTES_BEFORE_PAGING","devicePixelRatio","textureManager","MathBackendWebGL","complexTensors","HTMLVideoElement","HTMLImageElement","HTMLCanvasElement","fromPixels2DContext","readyState","drawImage","tempPixelHandle","makeTensorHandle","uploadPixelDataToTexture","getTexture","FromPixelsProgram","compileAndRun","releaseTexture","uploadToGPU","convertAndCacheOnCPU","shouldTimeProgram","activeTimers","mergeRealAndImagArrays","getValuesFromTexture","downloadWaitMs","pendingRead","subscribers_1","maybeCreateBufferFromTexture","createAndWaitForFence","WebGLTexture","downloadFloat32MatrixFromBuffer","dTypeVals","subscribers","pendingDisposal","downloadMatrixFromPackedTexture","downloadFloat32MatrixFromOutputTexture","tmpTarget","EncodeFloatProgram","tmpData","downloadByteEncodedFloatMatrixFromOutputTexture","oldActiveTimers","newActiveTimers","outerMostTime","programTimersStack","flattenedActiveTimerQueries","util.flatten","flattenedActiveTimerNames","uploadWaitMs","util.sum","ms","unreliable","numBytesInGPU","startMs","endMs","waitForQueryAndGetTime","timerQuery","cpuBackend","sizeThreshold","getCPUBackend","makeOutputArray","shouldExecuteOnCPU","getCustomSetupFunc","beginIndex","StridedSliceProgram","ReverseProgram","a2D","b2D","ConcatProgram","concat2Tensors","unary_op.NEG","a3D","as3D","b3D","multiply","aSqueezed","bSqueezed","MatMulPackedProgram","makePackedTensor","unpackTensor","MatMulProgram","aData","bData","realProgram","BinaryOpComplexProgram","binaryop_complex_gpu.COMPLEX_MULTIPLY","imagProgram","makeComplexComponentTensorHandle","binaryop_gpu.MUL","envSpecificBatchNormProgram","BatchNormProgram","BatchNormPackedProgram","LRNProgram","LRNGradProgram","TileProgram","PadProgram","TransposeProgram","GatherProgram","array_ops_util.getReshaped","array_ops_util.getPermuted","array_ops_util.getReshapedPermuted","array_ops_util.getSliceBeginCoords","array_ops_util.getSliceSize","completePaddings","paddedX","reshapedPaddedShape","permutedReshapedPaddedPermutation","flattenShape","reduce_util.computeOptimalWindowSize","ReduceProgram","bestIndicesA","ArgMinMaxProgram","argReduce","axis_util.assertAxesAreInnerMostDims","outputDType","sumOutType","axis_util.getAxesPermutation","axis_util.getInnerMostAxes","segment_util.computeOutShape","segOpCompute","axis_util.getUndoAxesPermutation","segment_util.segOpComputeOptimalWindowSize","SegmentOpProgram","range","CumSumProgram","binaryop_gpu.EQUAL","binaryop_gpu.NOT_EQUAL","binaryop_gpu.LESS","binaryop_gpu.LESS_EQUAL","binaryop_gpu.GREATER","binaryop_gpu.GREATER_EQUAL","unary_op.LOGICAL_NOT","binaryop_gpu.LOGICAL_AND","binaryop_gpu.LOGICAL_OR","SelectProgram","whereImpl","topkImpl","binaryop_gpu.MIN","binaryop_gpu.MOD","binaryop_gpu.MAX","binaryop_gpu.SQUARED_DIFFERENCE","binaryop_gpu.DIV","binaryop_gpu.INT_DIV","complexSeparableBinaryOp","binaryop_gpu.ADD","complexTensor","complexPart","binaryop_gpu.SUB","subtract","binaryop_gpu.POW","unary_op.CEIL","unary_op.FLOOR","unary_op.SIGN","unary_op.ROUND","unary_op.EXP","unary_op.EXPM1","unary_op.LOG","unary_op.LOG1P","unary_op.SQRT","unary_op.RSQRT","unary_op.SQUARE","unary_op.RECIPROCAL","unary_op.RELU","unary_op.ELU","binaryop_gpu.ELU_DER","unary_op.SELU","unary_op.TO_INT","ClipProgram","unary_op.ABS","xData","ComplexAbsProgram","unary_op.SIGMOID","unary_op.SOFTPLUS","unary_op.SIN","unary_op.COS","unary_op.TAN","unary_op.ASIN","unary_op.ACOS","unary_op.ATAN","binaryop_gpu.ATAN2","unary_op.SINH","unary_op.COSH","unary_op.TANH","unary_op.ASINH","unary_op.ACOSH","unary_op.ATANH","unary_op.ERF","unary_op.STEP","x2ColShape","xSqueezed","w2Row","im2ColProgram","Im2ColProgram","im2Col","matmulProgram","product","conv2dWithIm2Row","Conv2DProgram","Conv2DDerInputProgram","Conv2DDerFilterProgram","DepthwiseConv2DProgram","DepthwiseConv2DDerInputProgram","DepthwiseConv2DDerFilterProgram","Pool2DProgram","maxPoolPositionsProgram","maxPoolPositions","maxPoolBackPropProgram","MaxPool2DBackpropProgram","avgPoolBackpropProgram","AvgPool2DBackpropProgram","backend_util.castTensor","webgl_util.isReshapeFree","packedReshape","backend_util.reshapeTensor","ResizeBilinearProgram","ResizeBilinearBackpropProgram","ResizeNearestNeighborProgram","ResizeNearestNeigborBackpropProgram","probs","OneHotProgram","nonMaxSuppressionImpl","CropAndResizeProgram","outputHeight","outputWidth","outputDepth","flattenIndices","flattenX","ScatterProgram","fftImpl","FFTProgram","fft_gpu.COMPLEX_FFT","numSlices","GatherNDProgram","packedTensor","UnpackProgram","dimsToSkip","afterShape","inputAs3D","getBatchDim","getRowsCols","afterShapeAs3D","ReshapePackedProgram","pageToCpu","inputsData","preProcessProgram","processedInput","PackProgram","outputData","gpgpu_math.makeShaderKey","getAndSaveBinary","gpgpu_math.compileProgram","startTimer","gpgpu_math.runProgram","numBytesToPage","lruDataGPU","computeBytes","endTimer","getBinary","binaryCache","remove","webgl_util.getTextureShapeFromLogicalShape","acquireTexture","uploadMatrixToPackedTexture","typedArrayToFloat32","uploadMatrixToTexture","float32Values","dontKeepCopyOnGPU","float32ToTypedArray","texType","save","complexAbs","clipValueMin","clipValueMax","clip","registerBackend","abs_","acos_","acosh_","asin_","asinh_","atan_","atanh_","ceil_","clipByValue_","cos_","cosh_","erf_","exp_","expm1_","floor_","log_","log1p_","logSigmoid_","neg_","reciprocal_","round_","rsqrt_","sigmoid_","sign_","sin_","sinh_","softplus_","sqrt_","square_","step_","tan_","tanh_","$scale","$offset","$mean","$variance","x4D","as4D","batchnormReshape4D","scaleValue","reductionAxes","getReductionAxes","tileShape","xMinusMean","dyTimesScaleValue","oneOverSqrtVariance","minusHalfRCube","meanDer","varianceDer","xMinusMean2TimesRsqrt","scaleDer","offsetDer","batchNormalization2d","batchNormalization2d_","batchNormalization3d","batchNormalization3d_","batchNormalization4d","batchNormalization4d_","batchNormalization_","roundingMode","computeConv2DInfo","depthwise","filterChannels","getEffectiveFilterSize","_d","fieldSize","outDepth","zeroPad","computeDefaultPad","inputRows","inputCols","outputRows","conditionalRound","outputCols","effectiveFieldSize","param","bottom","right","computeOutputShape3D","padAlongHeight","padAlongWidth","top_1","dimA","dimB","tupleValuesAreOne","$a","$b","innerShapeA","innerShapeB","outerDimsA","outerDimsB","batchDimA","batchDimB","batchMatMul","$v1","$v2","t1","t2","$t1","$t2","t1Inner","t2Inner","asScalar","matMul_","dot_","outerProduct","outerProduct_","$filter","x3D","reshapedTo3D","conv_util.eitherStridesOrDilationsAreOne","filter4D","input4D","reshapedTo4D","conv_util.computeConv2DInfo","conv_util.tupleValuesAreOne","conv2dDerInput_","conv2dDerFilter_","x2d","w2d","xShape4D","dy4D","inDepth","conv2dDerInput","ddx","conv2dDerFilter","depthwiseConv2D","depthwiseConv2dDerInput","depthwiseConv2dDerFilter","$depthwiseFilter","$pointwiseFilter","channelMultiplier","depthwiseConv2DDerInput","depthwiseConv2DDerFilter","conv1d_","conv2d_","depthwiseConv2d_","separableConv2d_","conv2dTranspose_","reshapeAs","reverse_","reverse1d","reverse1d_","reverse2d","reverse2d_","reverse3d","reverse3d_","reverse4d","reverse4d_","conv_util.computePool2DInfo","y4D","maxPoolBackprop","maxPoolImpl_","avgPoolBackprop","avgPoolImpl_","basePadding","withSpaceToBatchBasePaddings","isDilationOne","adjustedPadding","adjustedCrops","convertedPad","convertedX","$dy","$output","padStart","origPadEnd","fullInputShape","padEndExtra","padEnd","padExtraShape","padExtraStart","padExtraEnd","maxPool_","avgPool_","pool_","begin_","size_","slice_util.assertParamsValid","slice_","slice1d","slice1d_","slice2d","slice2d_","slice3d","slice3d_","slice4d","slice4d_","axis_util.parseAxisParam","axis_util.expandShapeToKeepDim","toInt","expandedDyShape","reduceSize","axis_util.computeOutAndReduceShapes","reduceSizeScalar","xOrig","origAxes","permutedAxes","gradForMinAndMax","keepDimsShape","all_","any_","argMax_","argMin_","logSumExp_","max_","mean_","min_","moments","moments_","sum_","prod_","assertTypesMatch","assertAndGetBroadcastShape","equal_","equalStrict_","greater_","greaterEqual_","greaterEqualStrict_","greaterStrict_","less_","lessEqual_","lessEqualStrict_","lessStrict_","notEqual_","notEqualStrict_","reduceAxes","broadcast_util.getReductionAxes","firstTensor","addN","ders","base","$base","$exp","expFloat","realDivide","tmp","two","add_","addN_","addStrict_","atan2_","div_","divStrict_","floorDiv_","maximum_","maximumStrict_","minimum_","minimumStrict_","mod_","modStrict_","mul_","mulStrict_","pow_","powStrict_","squaredDifference_","squaredDifferenceStrict_","sub_","subStrict_","$condition","select","logicalAnd_","logicalNot_","logicalOr_","logicalXor_","where_","whereAsync","whereAsync_","stepRes","eluDer","scaleAlpha","greaterThanZeroDer","lessEqualZeroDer","$alpha","elu_","leakyRelu_","prelu_","relu_","selu_","undoPerm","transpose_","localResponseNormalization4D","LRNGrad","localResponseNormalization_","normImpl","norm_","$segmentIds","isInt","gatherDropNegatives","paramsShape","indicesSize","outerShape","outerDims","innerShape","outerAxesIndices","arrayRange","innerAxesIndices","valuesShape","arrayConcat","reshapedIndices","transposeDims","valuesTranspose","paramsGrad","invertTransposeDims","getUndoAxesPermutation","arrays","zeroClippedIndices","gathered","isPositive","numIters","zeroSlice","gather_","unsortedSegmentSum_","lstmCells","$data","$c","$h","newStates","newC","newH","forgetBias","lstmKernel","lstmBias","$forgetBias","$lstmKernel","$lstmBias","sliceCols","basicLSTMCell","basicLSTMCell_","multiRNNCell","multiRNNCell_","decay","zeroDebias","$v","$decay","one","oneMinusDecay","update","$step","movingAverage","movingAverage_","stridedSlice_","topk_","$updates","scatter_nd_util.validateInput","scatterND","scatterND_","innerDimensionSize","input2D","complexInput","half","realValues","imagValues","realComplexConjugate","imagComplexConjugate","fft_","ifft_","rfft","rfft_","defaultValues","numElems","numDims","numValues","$sparseIndices","$sparseValues","$defaultValue","sparse_to_dense.validateInput","sparseToDense","sparseToDense_","gatherND","gatherND_","losses","weights","reduction","Reduction","SUM_BY_NONZERO_WEIGHTS","$losses","$weights","weightedLoss","NONE","SUM","MEAN","broadcastFactor","numNonZeros","labels","predictions","$labels","$predictions","computeWeightedLoss","epsilon","epsilonScalar","maxOutput","outputXTarget","sigmoidOutput","multiClassLabels","labelSmoothing","$multiClassLabels","labelSmoothingScalar","sigmoidCrossEntropyWithLogits_","delta","deltaScalar","quadratic","linear","logResult","dyShape","expandShapeToKeepDim","onehotLabels","$onehotLabels","numClasses","softmaxCrossEntropyWithLogits_","absoluteDifference","absoluteDifference_","computeWeightedLoss_","cosineDistance","cosineDistance_","hingeLoss","hingeLoss_","huberLoss","huberLoss_","logLoss","logLoss_","meanSquaredError","meanSquaredError_","sigmoidCrossEntropy","sigmoidCrossEntropy_","softmaxCrossEntropy","softmaxCrossEntropy_","inputIsTensor2D","ys","xs1d","proj","fullMatrices","qr2d","outerDimsProd","prev","q2ds_1","r2ds_1","q2d","r2d","m","q","one2D","tensor2d","iters","rTemp","wTemp","qTemp","gramSchmidt","gramSchmidt_","qr","qr_","images","$images","batchImages","resizeBilinearBackprop","resizeNearestNeighborBackprop","NEGATIVE_INFINITY","$boxes","$scores","nonMaxSuppSanityCheck","nonMaxSuppression","boxesVals","scoresVals","boxInd","$image","$boxInd","cropAndResize","resizeBilinear_","resizeNearestNeighbor_","nonMaxSuppression_","nonMaxSuppressionAsync","nonMaxSuppressionAsync_","cropAndResize_","MathBackendCPU","firstUse","getImageData","numPixels","channel","tensor3d","complex_util.mergeRealAndImagArrays","assertNotComplex","ops.buffer","xLoc","ops.tensor","newLoc","xBuffer","outLoc","inLoc","tensors2D","innerSize","offset_1","colOffset_1","tVals","tIdx","resIdx","finalOutShape","ops.scalar","broadcastedBinaryComplexOp","aReal","aImag","bReal","bImag","broadcastedBinaryOp","aValue","bValue","resultVals","currVals","leftDim","rightDim","aValues","bValues","aBatch","aOuterStep","aInnerStep","bInnerStep","bOuterStep","bBatch","b_1","i0","j0","k0","iBlock","jBlock","kBlock","ops.tensor3d","reduceShape","ops.zeros","aVals","segmentId","ops.equal","ops.stack","minIndex","maxIndex","resultDtype","indexAdjuster","prevIdx","aVal","bVal","newValues","rem","anyVal","resVals","inVals","resultValues","dyValues","threshold","tooLarge","tooSmall","expX","util.tanh","a1","a2","a3","a4","a5","wVals","xOffset1","yOffset1","yR","yOffset2","xRCorner","wR","xR","wOffset1","xOffset2","yC","yOffset3","xCCorner","wC","xC","wOffset2","xOffset3","wOffset3","d1","xVal","d2","dxValues","dxS0","dxS1","dxS2","dyS0","dyS1","dyS2","fltValues","fltS0","fltS1","fltS2","topPad","leftPad","xRMin","yRMax","xCMin","yCMax","dotProd","dyOffset","fltOffset","dW","yRMin","yCMin","chMul","yOffset4","dm","trunc","xBuf","originalLoc","i_1","originalIndex","outCoords","i_2","newIndex","indicesValues","POSITIVE_INFINITY","xValues","outputVals","outputBatchStrides","outputRowStrides","outputColStrides","outputBatchOffset","inputBatchOffset","xRMax","outputRowOffset","xCMax","minMaxValue","avgValue","xROffset","pixel","maxPositions","maxValue","maxPosition","dxR","dxC","dyRCorner","dyCCorner","dyR","dyC","effectiveInputSize","effectiveOutputSize","outputIdx","effectiveRowSizeRatio","effectiveColSizeRatio","sourceFracRow","sourceRowFloor","rowFrac","sourceRowCeil","topRowOffset","botRowOffset","sourceFracCol","sourceColFloor","colFrac","sourceColCeil","topLeftOffest","botLeftOffset","topRightOffset","botRightOffest","topLeft","bottomLeft","bOffset","topDxRIndex","bottomDxRIndex","topDxROffset","bottomDxROffset","dxRLerp","inverseDxRLerp","leftDxCIndex","rightDxCIndex","dxCLerp","inverseDxCLerp","topLeftRCOffset","topRightRCOffset","bottomLeftRCOffset","bottomRightRCOffset","inverseDxRLerpTimesInverseDxCLerp","inverseDxRLerpTimesDxCLerp","dxRLerpTimesInverseDxCLerp","dxRLerpTimesDxCLerp","dyVal","ops.tensor4d","outputOffset","rowOffset","colOffset","newVal","startRLerp","startDyR","startCLerp","startDyC","accum","dyRIndex","dyROffset","dyCIndex","dyCOffset","mVals","varVals","sVals","offVals","outVals","offValsLength","sValsLength","varValsLength","mValsLength","offi","mi","si","vi","tensor4d","currentChannel","beginSumOffset","endSumOffset","sumAcrossChannels","inputImageValues","outputImageValues","depthBegin","depthEnd","dyi","probabilities","ops.softmax","numEvents","probVals","cdf","event_1","sampleId","event_2","event_3","ops.tensor2d","x1D","isExponentOf2","fftRadix2","ops.complex","ops.real","ops.imag","complex_util.splitRealAndImagArrays","fourierTransformByMatmul","evenComplex","complex_util.complexWithEvenIndex","evenTensor","oddComplex","complex_util.complexWithOddIndex","oddTensor","complex_util.exponents","exponent","addPart","subPart","realTensor","imagTensor","complex_util.exponent","term","complex_util.getComplexWithIndex","complex_util.assignToTypedArray","inH","offsetH","inW","offsetD","inputIdx","bVals","aBroadcastDims","bBroadcastDims","aBuf","bBuf","aLoc","aIndex","bLoc","bIndex","realResult","imagResult","realVals","imagVals","aIdx","bIdx","aRealBuf","bRealBuf","opResult","boxVals","boxIndVals","imageVals","inStride","outStride","startInd","y1","x1","y2","x2","bInd","yInd","ind","topInd","bottomInd","yLerp","xInd","leftInd","rightInd","xLerp","topRight","top_2","closestX","closestY","inInd","outInd","scatter","indicesData","flattenIndex","sumDupeIndices","updatesData","delayCallback","requestAnimationFrame","setImmediate","DTYPE_VALUE_SIZE_MAP","uint16","uint8","name_1","specs","dataPromises","concatenateTypedArrays","spec","name_2","typedArray","quantization_1","quantization","quantizationSizeFactor","byteBuffer","quantizedArray","dtypeFactor","specs_1","totalByteLength","normalizedXs","byteLength","useNodeBuffer","Buffer","Blob","atob","btoa","str","buf","byteOffset","buffers","path","trim","items","modelArtifacts","modelTopology","ArrayBuffer","dateSaved","modelTopologyType","modelTopologyBytes","stringByteLength","weightSpecsBytes","weightSpecs","weightDataBytes","weightData","saveRouters","loadRouters","IORouterRegistry","saveRouter","getInstance","loadRouter","url","getHandlers","handlerType","validHandlers","router","URL_SCHEME_SUFFIX","managers","ModelStoreManagerRegistry","scheme","manager","getSchemes","sourceURL","destURL","deleteSource","loadHandlers","getLoadHandlers","loadHandler","saveHandlers","getSaveHandlers","saveHandler","sourceScheme","parseURL","sourcePath","sameMedium","load","getManager","removeModel","saveResult","modelArtifactsInfo","schemes","schemes_1","listModels","schemeOut","schemeAndPath","cloneModelInternal","DATABASE_NAME","DATABASE_VERSION","MODEL_STORE_NAME","INFO_STORE_NAME","theWindow","indexedDB","mozIndexedDB","webkitIndexedDB","msIndexedDB","shimIndexedDB","openRequest","db","createObjectStore","keyPath","modelPath","getIndexedDBFactory","BrowserIndexedDB","databaseAction","open","onupgradeneeded","setUpDatabase","onsuccess","modelTx","transaction","getRequest_1","objectStore","close","onerror","oncomplete","modelTx_1","modelArtifactsInfo_1","getModelArtifactsInfoForJSON","infoTx_1","infoStore_1","putInfoRequest_1","put","putModelRequest","deleteInfoRequest","indexedDBRouter","startsWith","URL_SCHEME","browserIndexedDB","registerSaveRouter","registerLoadRouter","BrowserIndexedDBManager","tx","getAllInfoRequest","getAll","maybeStripScheme","infoTx","infoStore","getInfoRequest","deleteModelData_1","deleteModelRequest","registerManager","PATH_SEPARATOR","PATH_PREFIX","INFO_SUFFIX","MODEL_TOPOLOGY_SUFFIX","WEIGHT_SPECS_SUFFIX","WEIGHT_DATA_SUFFIX","topology","BrowserLocalStorage","localStorage","LS","getModelKeys","setItem","arrayBufferToBase64String","removeItem","getItem","weightDataBase64","base64StringToArrayBuffer","localStorageRouter","browserLocalStorage","BrowserLocalStorageManager","prefix","suffix","getModelPathFromKey","DEFAULT_FILE_NAME_PREFIX","DEFAULT_JSON_EXTENSION_NAME","DEFAULT_WEIGHT_DATA_EXTENSION_NAME","fileNamePrefix","BrowserDownloads","modelTopologyFileName","weightDataFileName","weightsURL","URL","createObjectURL","weightsManifest","paths","modelTopologyAndWeightManifest","modelTopologyAndWeightManifestURL","jsonAnchor","download","href","click","weightDataAnchor","files","BrowserFiles","jsonFile","weightFiles","jsonReader","FileReader","onload","event","modelJSON","target","pathToFile","checkManifestAndWeightFiles","perFileBuffers","weightsGroup","weightFileReader","concatenateArrayBuffers","readAsArrayBuffer","readAsText","manifest","basenames","fileNames","file","basename","manifest_1","pathBasename","browserDownloadsRouter","browserDownloads","fetchURLs","requestOptions","requests","fetchURL","fetch","responses","response","arrayBuffer","filePathPrefix","weightNames","groupIndicesToFetchMap","groupWeightsToFetch","weightsFound","allManifestWeightNames","manifestGroupConfig","groupIndex","groupOffset","weightsEntry","rawDtype","weightsBytes","enqueueWeightsForFetchingFn","manifestEntry","sizeBytes","weightName","weightIndex","found","weightsNotFound","weight","groupIndicesToFetch","accumulator","shouldFetch","fetchUrls","filepath","fetchUrl","loadWeightsAsArrayBuffer","weightsTensorMap","bufferIndexOffset","numBuffers","groupBytes","groupBuffer","groupByteBuffer","groupBufferOffset","nameToTensorMap","decodeWeights","requestInit","weightPathPrefix","body","BrowserHTTPRequest","DEFAULT_METHOD","FormData","append","ok","loadBinaryModel","loadJSONModel","statusText","error_1","graphPromise","loadBinaryTopology","manifestPromise","results","json","loadWeights","results_1","modelConfigRequest","modelConfig","weightsManifest_1","weightPath","parseUrl","pathPrefix","weightsManifest_2","lastSlash","lastIndexOf","lastSearchParam","match","URL_SCHEME_REGEX","httpRequestRouter","urlItem","isHTTPScheme","browserHTTPRequest","PassthroughLoader","PassthroughSaver","isInteger","oneHotLabels","oneHotPredictions","confusionMatrix","confusionMatrix_","Serializable","className","cls","config","classNameMap","SerializationMap","getMap","fromConfig","WEBGL_ENVS","HAS_WEBGL","NODE_ENVS","IS_NODE","CHROME_ENVS","IS_CHROME","BROWSER_ENVS","IS_BROWSER","CPU_ENVS","BROWSER_CPU_ENVS","BACKEND","ALL_ENVS","actual","expected","aType","bType","actualValues","expectedValues","areClose","fail","expectArraysClose","low","high","actualVals","expect","toEqual","version","Optimizer","returnCost","applyGradients","variableGrads","learningRate","rho","rhoScalar","oneMinusRho","AdadeltaOptimizer","variableGradients","this_1","accumulatedGrads","accumulatedUpdates","accumulatedGrad","accumulatedUpdate","newAccumulatedGrad","newAccumulatedUpdate","initialAccumulatorValue","AdagradOptimizer","beta1","beta2","beta1Scalar","beta2Scalar","accBeta1","accBeta2","oneMinusBeta1","oneMinusBeta2","epsScalar","AdamOptimizer","oneMinusAccBeta1","oneMinusAccBeta2","accumulatedFirstMoment","accumulatedSecondMoment","firstMoment","secondMoment","newFirstMoment","newSecondMoment","biasCorrectedFirstMoment","biasCorrectedSecondMoment","decayScalar","iteration","AdamaxOptimizer","lr","accumulatedWeightedInfNorm","weightedInfNorm","ut0","ut1","newWeightedInfNorm","setLearningRate","SGDOptimizer","momentum","useNesterov","accumulations","MomentumOptimizer","accumulation","newAccumulation","centered","momentumScalar","RMSPropOptimizer","accumulatedMeanSquares","accumulatedMeanGrads","accumulatedMoments","accumulatedMeanSquare","accumulatedMeanGrad","newAccumulatedMeanSquare","newAccumulatedMeanGrad","newAccumulatedMoments","newAccumulatedMeanSquare_1","OptimizerConstructors","train","sgd","adadelta","adagrad","rmsprop","adamax","adam","setBackend","getBackend","setOpHandler","ops","_epsilon","_nextUniqueTensorId","_uidPrefixes","scalarCache","DEFAULT_DTYPE","undefined","setPrototypeOf","AttributeError","prototype","RuntimeError","ValueError","NotImplementedError","AssertionError","IndexError","newArray","refernce","array_1","insecure","toLowerCase","identifier","p1","_GLOBAL_CUSTOM_OBJECTS","getClassName","getConfig","moduleObjects","customObjects","printableModuleName","customObjectsCombined","_f","_e","backupCustomObjects","_h","_g","returnObj","_k","_j","numberCompare","xs_1","hasOwnProperty","label","expectedType","minLength","maxLength","tfc.sqrt","tfc.sum","tfc.mulStrict","Constraint","serialization","defaultMaxValue","defaultAxis","MaxNorm","norms","calcL2Norms","desired","tfc.clipByValue","tfc.mul","tfc.div","tfc.add","getScalar","registerClass","UnitNorm","NonNeg","tfc.relu","minValue","defaultMinValue","rate","defaultRate","MinMaxNorm","CONSTRAINT_IDENTIFIER_REGISTRY_SYMBOL_MAP","maxNorm","minMaxNorm","nonNeg","unitNorm","constraint","serializeKerasObject","deserializeKerasObject","deserializeConstraint","nameMap","Map","VALID_DATA_FORMAT_VALUES","checkStringTypeUnionValue","VALID_PADDING_MODE_VALUES","VALID_POOL_MODE_VALUES","_nameScopeStack","_nameScopeDivider","tensorName","isValidTensorName","currentNameScopePrefix","scopedName","tensorNameRegex","RegExp","parseInt","tfc.min","toArray1D","tfc.max","math_utils.arrayProd","tfc.slice1d","tfc.slice2d","tfc.slice3d","tfc.slice4d","sliceAlongFirstAxis","sliceAlongLastAxis","tfc.concat","tfc.concat1d","tfc.concat2d","tfc.concat3d","tfc.concat4d","tfc.tile","stddev","tfc.randomNormal","xLastDim","ySecondLastDim","tfc.matMul","xFirstDims","yShape","yLastDim","yOtherDims","reference","tfc.gather","imageDataFormat","checkDataFormat","biasShape","tfc.elu","tfc.abs","level","noiseShape","util","tfc.step","tfc.neg","tfc.randomUniform","tfc.sub","alt","training","VALID_FAN_MODE_VALUES","VALID_DISTRIBUTION_VALUES","Initializer","Zeros","Ones","Constant","DEFAULT_MINVAL","DEFAULT_MAXVAL","RandomUniform","DEFAULT_MEAN","DEFAULT_STDDEV","RandomNormal","K.randomNormal","TruncatedNormal","gain","Identity","fanIn","fanOut","receptiveFieldSize","arrayProd","mode","checkFanMode","distribution","checkDistribution","VarianceScaling","fans","computeFans","GlorotUniform","GlorotNormal","HeNormal","LeCunNormal","DEFAULT_GAIN","Orthogonal","linalg","INITIALIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP","constant","glorotNormal","glorotUniform","heNormal","identity","leCunNormal","orthogonal","varianceScaling","initializer","deserializeInitializer","weights_1","DEFAULT_VARIABLE_NAME_PREFIX","getNextUniqueTensorId","originalName","getScopedTensorName","getUniqueTensorName","tfc.variable","LayerVariable","assertNotDisposed","checkShapesMatch","variablesAndValues","variableAndValue","ndim","maxNDim","minNDim","sourceLayer","callArgs","outputTensorIndex","_nextNodeID","outboundLayer","inboundLayers","nodeIndices","tensorIndices","inputTensors","outputTensors","inputMasks","outputMasks","outputShapes","layer","outboundNodes","inboundNodes","Node","inboundNames","_nextLayerID","activityRegularizer","inputSpec","supportsMasking","_trainableWeights","_nonTrainableWeights","_losses","_updates","_built","generic_utils.toSnakeCase","getUid","updatable","batchInputShape","inputDType","initialWeights","_refCount","Layer","nodeIndex","attrName","generic_utils.singletonOrArray","getNodeAtIndex","lossFn","built","trainableWeights","nonTrainableWeights","_stateful","stateful","generic_utils.toList","inputIndex","xShapeAtAxis","specDim","kwargs","_callHook","callHook","inputsList","allAreSymbolic","inputsList_1","SymbolicTensor","noneAreSymbolic","inputsList_2","nameScope","assertInputCompatibility","xElem","build","setWeights","outputListCopy","outputList_1","collectInputShape","computeOutputShape","outputDType_1","guessOutputDType","warnOnIncompatibleInputShape","addInboundNode","dimMismatch_1","dimension","allOutputShapes","shapeString","variable_utils.countParamsInWeights","trainableOnly","batchGetValue","weightValueTuples","paramValues","pv","batchSetValue","regularizer","_addedWeightNames","addLoss","TypeError","maskElement","inputTensorList","types_utils.normalizeShapeList","inputTensorList_1","tensorIndex","numDisposedVariables","disposeWeights","refCountAfterDispose","inputTensors_1","sourceTensors","previousSources_1","getSourceInputs","x_1","ModelLoggingVerbosity","sparse","inputTensor","InputLayer","logs","promises","scalarsToDispose","valueScalar","BaseCallback","epoch","model","callbacks","queueLength","CallbackList","setParams","setModel","onEpochBegin","onEpochEnd","onBatchBegin","resolveScalarsInLogs","onBatchEnd","onTrainBegin","onTrainEnd","yieldEvery","batchCount","batchDurationsMillis","autoYieldEveryBatches","batchStartMillis","ModelTrainingYielder","resolveOneTensorInLogs","nextFrame","SKIP_FIRST_BATCHES","DECISION_BATCH_COUNT","meanBatchDuration","dur","THRESHOLD_MILLIS","lastYieldBatchCount","BaseLogger","autoYielder","totals","maybeYieldOnBatch","oldTotalsToDispose","maybeYieldOnEpoch","this_2","metrics","History","history","valueArray","trainBegin","trainEnd","epochBegin","epochEnd","batchBegin","batchEnd","CustomCallback","callbackConfig","CallbackConstructorRegistry","verbosityLevel","callbackConstructor","checkForDuplicate","constructors","levelName","ctor","epochs","initialEpoch","numTrainSamples","stepsPerEpoch","doValidation","callbackMetrics","actualCallbacks","createCallbacks","callbackList","samples","steps","squareSum","K.square","epsilonTensor","tfc.onesLike","tfc.maximum","yTrue","yPred","tfc.mean","clippedTrue","MAX_VALUE","absResult","clippedPred","firstLog","tfc.log","secondLog","zeroTensor","maxResult","pos","log2","predictionDiff","logcoshResult","tfc.softplus","fromLogits","tfc.softmax","outputSum","flatTarget","tfc.floor","K.flatten","categoricalCrossentropy","tfc.oneHot","tfc.zerosLike","tfc.exp","sigmoidCrossEntropyWithLogits","logPred","trueNormalized","l2Normalize","predNormalized","trueXPred","identifierOrFn","lossesMap","meanAbsoluteError","meanAbsolutePercentageError","meanSquaredLogarithmicError","squaredHinge","hinge","categoricalHinge","logcosh","sparseCategoricalCrossentropy","binaryCrossentropy","kullbackLeiblerDivergence","poisson","cosineProximity","errMsg","includes","yPredThresholded","K.cast","tfc.greater","tfc.equal","tfc.argMax","tfc.logicalAnd","tp","truePositives","fp","falsePositives","denominator","tfc.where","falseNegatives","lossBinaryCrossentropy","mse","MSE","mae","MAE","mape","MAPE","categoricalCrossentropyLoss","cosine","sparseCategoricalCrossentropyLoss","metricsMap","binaryAccuracy","categoricalAccuracy","precision","optimizerMap","Adagrad","Adadelta","Adam","Adamax","RMSProp","SGD","lineLength","positions","printFn","relevantNodes","sequentialLike","isModelSequentialLike","toDisplay","nodesByDepth","printRow","layers","printLayerSummary","printLayerSummaryWithConnections","checkTrainableWeightsConsistency","trainableCount","countTrainableParams","nonTrainableCount","countParamsInWeights","collectedTrainableWeights","nodes","nodesByDepth_1","depthNodes","flag","fields","countParams","connections","inboundLayer","inboundLayerIndex","inboundTensorIndex","firstConnection","pythonicConfig","generic_utils.toCamelCase","tsArray","arrayLength","isArrayItemInputOrOutputName","convertPythonicToTs","tsDict","pythonicKey","pythonicValue","tsKey","tsConfig","pyArray","convertTsToPythonic","pyDict","tsValue","pyKey","originalKerasVersion","originalBackend","dataType","generic_utils.stringToDType","weightsJSON","skipMismatch","keras_version","layerNames","layers_1","nameToWeights","layerWeights","weightValues","weightEntry","loadTensor","symbolicWeights","preprocessWeightsForLoading","strict","nameToWeight","totalWeightsCount","layers_2","unsetNames","name_3","generic_utils.unique","inputLayers","inputLayersNodeIndices","inputLayersTensorIndices","outputLayers","outputLayersNodeIndices","outputLayersTensorIndices","generic_utils.assert","inputNames","outputNames","feedInputShapes","feedInputNames","feedOutputNames","internalInputShapes","internalOutputShapes","nodesDepths","nodeIDToNode","layersDepths","layerIDToLayer","layerIndices","nodesInDecreasingDepth","buildMapOfGraph","finishedNodes","nodesInProgress","containerNodes","Container","nodeKey","numInboundLayers","layer_1","nodeIndex_1","tensorIndex_1","reversedNodesInDecreasingDepth_1","previousDepth","inboundNode","previousDepth_1","nodeID","layersByDepth","layerID","depthKeys","generic_utils.reverseNumberCompare","depthKeys_1","layersForDepth","layersForDepth_1","computableTensors","layersWithCompleteInput","depthKeys_2","_l","_o","_m","_q","_p","_s","_r","allNames","name_4","numOccurrences","allNames_1","_t","isNamedTensorMap","loadWeightsFromNamedTensorMap","loadWeightsFromJson","theConfig","kerasVersion","layersVersion","unused","returnString","updatedConfig","masks","generic_utils.pyListRepeat","runInternalGraph","layersToOutputShapes","inputShape_1","depthKeys_3","nodes_1","inputShapes_1","nodeIndex_2","inputShape_2","outputShapes_1","outputShapeKeys","tensorMap","depthKeys_4","nodes_2","referenceInputTensors","referenceOutputTensors","computedData","referenceInputTensors_1","computedTensors","computedMasks","outputTensors_1","outputMasks_1","computedTensor","computedMask","computeMask","keptNodes","nodeConversionMap","originalNodeIndex","calculateLosses","buildNodeConversionMap","layerConfigs","layerClassName","layerConfig","filteredInboundNodes","nodeData","newNodeIndex","modelInputs","modelOutputs","createdLayers","unprocessedNodes","nodeData_1","inputData","inboundLayerName","inboundNodeIndex","addUnprocessedNode","layerData","layerName","deserializeLayer","inboundNodesData_1","layersFromConfig","layersFromConfig_1","processLayer","generic_utils.isObjectEmpty","layersFromConfig_2","currentUnprocessedNodesForLayer","currentUnprocessedNodesForLayer_1","processNode","inputLayersFromConfig_1","layerOutputTensors","outputLayersFromConfig_1","resetStates","feeds","FeedDict","id2Value","feeds_1","feed","assertFeedCompatibility","fetches","feedDict","arrayFetches","fetchArray","internalFeedDict","fetchArray_1","fetch_1","executeInternal","hasKey","getValue","inputValues","inputs_1","inputVal","layerOutputs","getNodeOutputs","outputSymbolicTensors","getOutputAt","DEFAULT_VALIDATION_BATCH_SIZE","iteratorOut","tfc.util","tfc.Tensor","flattendXs","dataset","hasBatchesPerEpoch","batchesPerEpoch","optimizer","validationSplit","isTraining","validationData","valXs","valYs","isDatasetObject","validationBatches","standardizeTensorValidationData","trainFunction","makeTrainFunction","outLabels","getDedupedMetricsNames","standardizeCallbacks","configureCallbacks","history_1","epochLogs","iterator","dataIterator","stepsDone","batchIndex","xsAndYs","standardizeDataIteratorOutput","batchLogs","outs","tfc.dispose","tfc.keep","disposeTensorsInLogs","valOuts","toList","evaluateDataset","evaluate","validationBatchSize","metricsNames","stopTraining_","syncData","hasBatches","testFunction","isLazyIteratorObject","numExamples","xsAndYs_1","batchOuts","tfc.tidy","batchSize_1","batchOut","oldScalar","singletonOrArray","sliceArraysByIndices","batchStart","ins","valF","valIns","validationSteps","checkNumSamples","indexArray","epochIndexArray1D_1","batches_1","makeBatches","batchIds","insBatch","testLoop","checkBatchSize","standardizedOuts","standardizeUserData","targets","inputValX","inputValY","valStandardized","valX","valY","splitAt","originalBatchSize","sliceArrays","valFunction","makeTestFunction","fitLoop","disposeNewTensors","refTensors","oldTensorIds","oldTensor","tensorsToDispose","isDataTensor","isDataArray","names","checkBatchAxis","exceptionPrefix","gotUnexpectedData","isDataDict","names_1","ensureTensorsRank2OrHigher","refDim","setX","unique","setY","lossFns","keyLosses","losses.meanSquaredError","losses.binaryCrossentropy","losses.categoricalCrossentropy","loss","slicedYShape","slicedShape","targetDim","nestedMetrics","outputNames_1","outputMetrics","Model","printSummary","optimizers.getOptimizer","lossFunctions","theLosses","losses.get","lossFunction_1","feedOutputShapes","feedLossFns","name_5","skipTargetIndices","metricsTensors","collectMetrics","metricName","accFn","weightedMetricFn","metric","Metrics.binaryAccuracy","Metrics.binaryCrossentropy","losses.sparseCategoricalCrossentropy","Metrics.sparseCategoricalAccuracy","Metrics.sparseCategoricalCrossentropy","Metrics.categoricalAccuracy","Metrics.categoricalCrossentropy","metricFn","Metrics.get","metricResult","outputIndex","metricTensor","appendMetric","metrics_1","handleMetrics","stepsName","outputsIsArray","retrieveSymbolicTensors","tensorValue","executeOutputs","execute","symbolicTensorNames","pyListRepeat","outputsRemaining","layerOutputNames","remainingNames_1","batchOuts_1","K.concatAlongFirstAxis","xsRank2OrHigher","checkInputData","predictLoop","checkArrayLengths","standardizeInputData","checkLossAndTargetCompatibility","K.sliceAlongFirstAxis","dedupedOutLabels","newLabel","metricsValues","variables","minimize","totalLoss","lossFunction","meanMetric","regularizerLoss","valOutputs","fitTensors","fitDataset","namedWeights","getWeights","handlerOrURL","handlers","io","encodeWeights","getNamedWeights","weightDataAndSpecs","unusedArg","toJSON","modelAndWeightsConfig","model_config","deserialize","uniqueWeightValues","skipMismatches","pathOrIOHandler","loadModelFromIOHandler","artifacts","_updatable","Sequential","modelLayer","isLayerModelInstance","Input","checkShape","outputTensor","lastLayerIndex","getExactlyOneShape","summary","predict","predictOnBatch","compile","fit","configArray","extraModelConfig","configArray_1","stopTraining","loadModelInternal","registerCallbackConstructor","Activation","Elu","K.elu","Selu","tfc.selu","Relu","Relu6","tfc.minimum","Linear","Sigmoid","tfc.sigmoid","HardSigmoid","K.hardSigmoid","Softplus","Softsign","K.softsign","Tanh","tfc.tanh","Softmax","activation","deserializeActivation","ReLU","getExactlyOneTensor","baseConfig","DEFAULT_ALPHA","LeakyReLU","theta","DEFAULT_THETA","thetaTensor","ThresholdedReLU","softmaxActivation","DEFAULT_AXIS","l1","l2","hasL1","hasL2","L1L2","regularization","Regularizer","REGULARIZER_IDENTIFIER_REGISTRY_SYMBOL_MAP","l1l2","deserializeRegularizer","singleValue","inputLength","outputLength","dimSize","strideSize","kernelSize","tfc.transpose","kernel","tfc.conv1d","K.biasAdd","preprocessConv2DInput","tfc.conv2d","BaseConv","verifyConfig","normalizeArray","checkPaddingMode","getActivation","useBias","biasInitializer","getInitializer","DEFAULT_BIAS_INITIALIZER","biasConstraint","getConstraint","biasRegularizer","getRegularizer","generic_utils.checkArrayTypeAndLength","serializeActivation","serializeInitializer","serializeRegularizer","serializeConstraint","Conv","filters","kernelInitializer","DEFAULT_KERNEL_INITIALIZER","kernelConstraint","kernelRegularizer","channelAxis","inputDim","kernelShape","addWeight","biasValue","conv1dWithBias","conv2dWithBias","newSpace","space","convOutputLength","Conv2D","InputSpec","Conv2DTranspose","hAxis","wAxis","kernelH","kernelW","strideH","strideW","deconvLength","tfc.conv2dTranspose","heightAxis","widthAxis","depthMultiplier","depthwiseInitializer","DEFAULT_DEPTHWISE_INITIALIZER","depthwiseRegularizer","depthwiseConstraint","pointwiseInitializer","DEFAULT_POINTWISE_INITIALIZER","pointwiseRegularizer","pointwiseConstraint","SeparableConv","depthwiseKernelShape","pointwiseKernelShape","depthwiseKernel","pointwiseKernel","tfc.separableConv2d","SeparableConv2D","Conv1D","cropping","Cropping2D","K.sliceAlongAxis","DEFAULT_SIZE","UpSampling2D","resized","tfc.depthwiseConv2d","DepthwiseConv2D","outFilters","outRows","outCols","rateScalar","Dropout","invokeCallHook","noiseShape_1","getNoiseShape","K.inTrainPhase","K.dropout","units","Dense","inputLastDim","K.dot","Flatten","K.batchFlatten","RepeatVector","K.repeat","targetShape","isUnknown","Reshape","errorMsg","finalShape","known","unknown","originalSize","anyUnknownDims","fixUnknownDimension","expectedSortedIndices","dimsIncludingBatch","Permute","outputDim","embeddingsInitializer","DEFAULT_EMBEDDINGS_INITIALIZER","embeddingsRegularizer","embeddingsConstraint","maskZero","Embedding","embeddings","inLens","K.gather","Merge","computeElementwiseOpOutputShape","batchSizes","allRanks","reshapeRequired","reshapedInputs","inputDims","mathUtils.max","xNDim","K.expandDims","mergeFunction","transposed","inputs_2","xTransposed","mathUtils.arrayProd","mathUtils.range","yNDim","Add","Multiply","Average","Maximum","Minimum","Concatenate","allNoneShape","inputShape_3","shapeSet","shapeWithoutConcatAxis","exists","shapeSet_1","K.concatenate","axesArray","diffShape","adjX","adjY","squeezeAxes","normalize","Dot","interpretAxes","interpretAxis","batchDot","gamma","tfc.batchNormalization2d","tfc.batchNormalization3d","tfc.batchNormalization4d","meanAndVariance","tfc.moments","math_utils.range","broadcastMean","broadcastVariance","broadcastGamma","broadcastBeta","regularNormalizeBatchInTraining","broadcastNormalizeBatchInTraining","center","betaInitializer","gammaInitializer","movingMeanInitializer","movingVarianceInitializer","betaConstraint","gammaConstraint","betaRegularizer","gammaRegularizer","stepCount","BatchNormalization","movingMean","movingVariance","broadcastShape","sortedReductionAxes","needsBroadcasting","broadcastMovingMean","broadcastMovingVariance","normalizeInference","normedTraining","sampleSize","varianceDebiased","newMovingMean","tfc.movingAverage","newMovingVariance","updateMovingMeanAndVariance","pattern","tfc.pad","heightPadding","widthPadding","ZeroPadding2D","spatial2dPadding","poolSize","poolMode","checkPoolMode","paddingString","tfc.maxPool","tfc.avgPool","Pooling1D","poolingFunction","tfc.squeeze","MaxPooling1D","pool2d","AveragePooling1D","Pooling2D","MaxPooling2D","AveragePooling2D","GlobalPooling1D","GlobalAveragePooling1D","GlobalMaxPooling1D","GlobalPooling2D","GlobalAveragePooling2D","GlobalMaxPooling2D","initialState","constants","numConstants","toListOrNull","stepFunction","initialStates","goBackwards","unroll","needPerStepOutputs","lastOutput","tfc.reverse","states","timeSteps","currentInput","stepOutputs","newOutputs","cell","StackedRNNCells","cells","stateSize","returnSequences","returnState","stateSpec","keptStates","RNN","isArrayOfShapes","stateShape","stateSize_1","stepInputShape","tfc.zeros","expectedShape","standardized","standardizeArgs","additionalInputs","additionalSpecs","initialState_1","fullInput","fullInputSpec","originalInputSpec","getInitialState","numStates","cellCallKwargs","rnnOutputs","rnn","K.tile","cellConfig","DEFAULT_ACTIVATION","recurrentInitializer","DEFAULT_RECURRENT_INITIALIZER","recurrentRegularizer","recurrentConstraint","dropout","math_utils.min","math_utils.max","recurrentDropout","dropoutMask","recurrentDropoutMask","SimpleRNNCell","recurrentKernel","prevOutput","generateDropoutMask","dpMask","recDpMask","RNNCell","SimpleRNN","recurrentActivation","DEFAULT_RECURRENT_ACTIVATION","implementation","GRUCell","hTMinus1","hh","matrixX","recurrentKernelValue","rk1","rk2","matrixInner","xZ","xH","recurrentZ","recurrentR","recurrentH","GRU","implmentation","unitForgetBias","LSTMCell","capturedBiasInit_1","capturedUnits_1","CustomInit","bI","bF","bCAndH","cTMinus1","z0","z1","z2","z3","LSTM","nestedStates","callInputs","newNestedStates","cellStates","cellConfigs","tuples","numParams","inputWeights","droppedInputs","Wrapper","newConfig","TimeDistributed","childInputShape","childOutputShape","timesteps","VALID_BIDIRECTIONAL_MERGE_MODES","generic_utils.checkStringTypeUnionValue","forwardLayer","backwardLayer","checkBidirectionalMergeMode","mergeMode","_trainable","Bidirectional","numWeights","numeightsOver2","layerShapes","stateSpecs","isSymbolicTensor","additionalInputs_1","yRev","forwardState","backwardState","rnnLayer","averagePooling1d","averagePooling2d","globalMaxPool1d","globalMaxPooling1d","globalMaxPool2d","globalMaxPooling2d","maxPool1d","maxPooling1d","maxPool2d","maxPooling2d","metrics.binaryAccuracy","metrics.binaryCrossentropy","metrics.categoricalAccuracy","metrics.categoricalCrossentropy","metrics.precision","metrics.recall","losses.cosineProximity","losses.meanAbsoluteError","losses.meanAbsolutePercentageError","regularizers.l1","regularizers.l2","Callback","asPromise","arguments","pending","base64","string","b64","s64","encode","parts","chunk","decode","EventEmitter","_listeners","on","evt","off","listeners","emit","f32","f8b","le","writeFloat_f32_cpy","writeFloat_f32_rev","readFloat_f32_cpy","readFloat_f32_rev","writeFloatLE","writeFloatBE","readFloatLE","readFloatBE","writeFloat_ieee754","writeUint","LN2","readFloat_ieee754","readUint","uint","mantissa","bind","writeUintLE","writeUintBE","readUintLE","readUintBE","Float64Array","f64","writeDouble_f64_cpy","writeDouble_f64_rev","readDouble_f64_cpy","readDouble_f64_rev","writeDoubleLE","writeDoubleBE","readDoubleLE","readDoubleBE","writeDouble_ieee754","off0","off1","readDouble_ieee754","lo","hi","inquire","moduleName","eval","utf8","alloc","SIZE","slab","LongBits","toNumber","zzEncode","zzDecode","zeroHash","fromNumber","isString","Long","fromString","unsigned","toLong","Boolean","fromHash","hash","toHash","part0","part1","part2","merge","ifNotSet","newError","CustomError","properties","captureStackTrace","create","require$$0","require$$1","require$$2","float","require$$3","require$$4","require$$5","require$$6","require$$7","emptyArray","freeze","emptyObject","isNode","isFinite","isObject","isset","isSet","utf8Write","_Buffer_from","_Buffer_allocUnsafe","newBuffer","sizeOrArray","dcodeIO","key2Re","key32Re","key64Re","longToHash","longFromHash","bits","fromBits","lcFirst","ProtocolError","oneOfGetter","fieldNames","fieldMap","oneOfSetter","toJSONOptions","longs","enums","_configure","encoding","allocUnsafe","Writer","BufferWriter","Op","noop","State","writer","head","tail","writeByte","writeVarint32","VarintOp","writeVarint64","writeFixed32","_push","uint32","sint32","uint64","int64","sint64","fixed32","sfixed32","fixed64","sfixed64","writeBytes","fork","reset","ldelim","BufferWriter_","writeBytesBuffer","writeStringBuffer","Reader","BufferReader","indexOutOfRange","reader","writeLength","RangeError","create_array","readLongVarint","readFixed32_end","readFixed64","isBuffer","_slice","skip","skipType","wireType","BufferReader_","utf8Slice","Service","rpcImpl","requestDelimited","responseDelimited","rpcCall","requestCtor","responseCtor","request","self","endedByRPC","protobuf","configure","rpc","roots","$Reader","$protobuf","$util","$root","default","tensorflow","valuesById","Any","ks","typeUrl","DataType","TensorShape","unknownRank","Dim","floatVal","doubleVal","intVal","stringVal","scomplexVal","int64Val","boolVal","uint32Val","uint64Val","tensorShape","versionNumber","tensorContent","AttrValue","$oneOfFields","placeholder","ListValue","NameAttrList","attr","NodeDef","device","VersionDef","badConsumers","producer","minConsumer","GraphDef","library","FunctionDefLibrary","CollectionDef","nodeList","bytesList","int64List","floatList","anyList","NodeList","BytesList","Int64List","FloatList","AnyList","SaverDef","filenameTensorName","saveTensorName","restoreOpName","maxToKeep","sharded","keepCheckpointEveryNHours","CheckpointFormatVersion","TensorInfo","cooSparse","CooSparse","valuesTensorName","indicesTensorName","denseShapeTensorName","SignatureDef","methodName","AssetFileDef","filename","OpDef","inputArg","outputArg","deprecation","description","isCommutative","isAggregate","isStateful","allowsUninitializedInput","ArgDef","AttrDef","OpDeprecation","typeAttr","numberAttr","typeListAttr","isRef","hasMinimum","allowedValues","explanation","OpList","MetaGraphDef","collectionDef","signatureDef","assetFileDef","metaInfoDef","graphDef","saverDef","MetaInfoDef","tags","metaGraphVersion","strippedOpList","anyInfo","tensorflowVersion","tensorflowGitVersion","SavedModel","metaGraphs","savedModelSchemaVersion","function","FunctionDef","GradientDef","nodeDef","signature","gradientFunc","paramName","context","getTensor","inputParamLength","tensorsMap","nodeName","contextId","currentContextIds","find","getNodeNameWithContextId","currentContextId","tfOpName","dlOpName","category","tfInputIndex","dlParamName","tfParamName","notSupported","tfInputParamLength","notSupprted","tfParamNameDeprecated","CONTROL_FLOW_OPS","DYNAMIC_SHAPE_OPS","arithmetic","basicMath","control","convolution","creation","dynamic","evaluation","logical","graph","matrices","normalization","sliceJoin","transformation","mappersJson","opMappers","mapper","OperationMapper","_instance","withControlFlow","withDynamicShape","placeholders","mapNode","isControlFlow","isDynamicShape","children","newNode","getStringParam","getNumberParam","getNumericArrayParam","getBoolParam","getTensorShapeParam","getDtypeParam","attrs","def","keepCase","DT_FLOAT","DT_INT32","DT_BOOL","executeOp","getParamValue","tfc.addN","tfc.mod","tfc.floorDiv","tfc.pow","tfc.squaredDifference","tfc.acos","tfc.acosh","tfc.asin","tfc.asinh","tfc.atan","tfc.atan2","tfc.atanh","tfc.ceil","tfc.cos","tfc.cosh","tfc.erf","tfc.expm1","tfc.log1p","tfc.reciprocal","tfc.round","tfc.sin","tfc.sign","tfc.sinh","tfc.square","tfc.tan","tfc.scalar","tfc.prod","tfc.leakyRelu","maxSize","elementShape","identicalElementShapes","dynamicSize","clearAfterRead","TensorArray","nextId","closed_","tensorWithState","cleared","written","readMany","writeMany","totalLength","cumulativeLengths","elementPerRow","indices_1","sizes","pred","data_1","frameId","enterFrame","exitFrame","nextIteration","tensorArray","addTensorArray","writeTensor","getTensorArray","readId","readIndex","gatherId","gatherIndices","gatherDtype","scatterId","scatterIndices","scatterTensor","concatId","concatTensorArray","concatDtype","splitId","splitTensor","lengths","sizeId","sizeTensorArray","closeId","clearAndClose","tfc.fill","stop_1","tfc.linspace","tfc.ones","stop_2","tfc.range","tfc.truncatedNormal","tfc.image","tfc.whereAsync","tfc.setdiff1dAsync","tfc.topk","tfc.tensor1d","summarize","tfc.notEqual","tfc.greaterEqual","tfc.less","tfc.lessEqual","tfc.logicalNot","tfc.logicalOr","tfc.batchNormalization","tfc.localResponseNormalization","tfc.logSoftmax","tfc.sparseToDense","tfc.all","tfc.any","tfc.argMin","tfc.slice","tfc.stridedSlice","mapped","sameShape","tfc.stack","tfc.unstack","tfc.split","tfc.scatterND","tfc.gatherND","tfc.cast","tfc.expandDims","tfc.reshape","tfc.spaceToBatchND","tfc.batchToSpaceND","tfc.depthToSpace","arithmetic.executeOp","basicMath.executeOp","control.executeOp","convolution.executeOp","creation.executeOp","dynamic.executeOp","evaluation.executeOp","image.executeOp","graph.executeOp","logical.executeOp","matrices.executeOp","normalization.executeOp","reduction.executeOp","sliceJoin.executeOp","transformation.executeOp","weightMap","tensorArrayMap","frameName","iterationId","rootContext","generateCurrentContextIds","ExecutionContext","_currentContextIds","contextIdforContexts","lastId","newFrame","_outputs","GraphExecutor","_weightMap","weightIds","startNodes","compiledOrder","nameKey","SEPERATOR","compiledMap","visited","childNode","strictInputCheck","checkInput","checkInputShapeAndType","calculateOutputs","checkOutput","getFrozenTensorIds","intermediateTensorConsumerCount","compiledNodes","checkTensorForDisposal","findOutputs","ids","getTensorsForCurrentContenxt","executeWithControlFlow","outputIds","inputIdArray","inputIds","inputNodes","currentContext","added","processStack","currentContext_1","processChildNodes","shape_1","inputKeys","missing","extra","notInGraph","compiledNodeNames","modelUrl","weightManifestUrl","requestOption","FrozenModel","executor","outputNodes","tfc.io","findIOHandler","Instance","transformGraph","convertTensorMapToTensorsMap","execute_","inputArray","constructTensorMap","isControlFlowModel","isDynamicShapeModel","executeAsync","newMap","weightsManifestUrl","version_core","version_layers","version_converter","version_union"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAiBA,KAAMA,aAAN;EAAA,IAEMC,qBACJC,QAAO,GACPC,YAAW,GACXC,qBAAoB,GACpBC,wBAAuB,GACvBC,QAAO,GACPC,UAAS,GACTC,+BAA8B,GAThC,CAYA,wBAAA,CAAgCC,CAAhC;EACQA,OAAgBT,QAAhBS,KACWC,SAASC,aAATD,CAAuB,QAAvBA,EACRE,gBADQF,CACS,kBADTA,EAC6B,UAAAG,CAAA;EAC1CA,MAAGC,cAAHD,WACOb,SAASS,CAATT,CADPa;KAFaH,GAIZ,CAJYA,GAKfV,SAASS,CAATT,IAAyBe,yBAAyBN,CAAzBM,CANrBN,EAQN,IAAMO,IAAKhB,SAASS,CAATT,CAAX,CACA,OAAIgB,EAAGC,aAAHD,aACKhB,SAASS,CAATT,GACAkB,gBAAgBT,CAAhBS,CAFLF,KAKJA,EAAGG,OAAHH,CAAWA,EAAGI,UAAdJ,GACAA,EAAGG,OAAHH,CAAWA,EAAGK,YAAdL,CADAA,EAEAA,EAAGG,OAAHH,CAAWA,EAAGM,KAAdN,CAFAA,EAGAA,EAAGG,OAAHH,CAAWA,EAAGO,MAAdP,CAHAA,EAIAA,EAAGG,OAAHH,CAAWA,EAAGQ,mBAAdR,CAJAA,EAKAA,EAAGG,OAAHH,CAAWA,EAAGS,eAAdT,CALAA,EAMAA,EAAGU,MAAHV,CAAUA,EAAGW,YAAbX,CANAA,EAOAA,EAAGU,MAAHV,CAAUA,EAAGY,SAAbZ,CAPAA,EAQAA,EAAGa,QAAHb,CAAYA,EAAGc,IAAfd,CARAA,EAUOhB,SAASS,CAATT,CAfHgB,CAAJ;EAkBF,kCAAA,CAAkCP,CAAlC;EACE,MAAqB,MAAjBA,CAAiB,IAAsB,MAAjBA,CAA1B,EACE,MAAM,IAAIsB,KAAJ,CAAU,wDAAV,CAAN,CAGF,IAAMC,IAAStB,SAASC,aAATD,CAAuB,QAAvBA,CAAf,CACA,OAAqB,MAAjBD,CAAiB,GACXuB,EAAOC,UAAPD,CAAkB,OAAlBA,EAA2B/B,gBAA3B+B,KACAA,EAAOC,UAAPD,CAAkB,oBAAlBA,EAAwC/B,gBAAxC+B,CAFW,GAKdA,EAAOC,UAAPD,CAAkB,QAAlBA,EAA4B/B,gBAA5B+B,CALP;;EC5CA,MAAME,IAAIC,UAAUC,SAAVD,IAAuBA,UAAUE,MAAjCF,IAA4CG,OAAeC,KAArE,CAEA,mUACYC,IADL,CACUN,CADV,KAGH,0kDACKM,IADL,CACUN,EAAEO,MAAFP,CAAS,CAATA,EAAY,CAAZA,CADV;EAHJ;oBCAsBQ;EAMtB,OAJA,IAAIC,IAAUD,EAAME,MAApB,EACIC,IAAO,CADX,EAEIC,IAAQ,CAEZ,EAAOH,IAAU,CAAjB,GAEEG,IAASC,KAAKC,MAALD,KAAgBJ,CAAhBI,GAA2B,CAApCD,EAIAD,IAAOH,IAFPC,CAEOD,CAJPI,EAKAJ,EAAMC,CAAND,IAAiBA,EAAMI,CAANJ,CALjBI,EAMAJ,EAAMI,CAANJ,IAAeG,CANfC;EAWJ,eAAA,CAAsBG,CAAtB,EAAmCC,CAAnC,EAA8CC,CAA9C;EACE,SAAOJ,KAAKI,GAALJ,CAASE,CAATF,EAAcA,KAAKE,GAALF,CAASG,CAATH,EAAYI,CAAZJ,CAAdA,CAAP;EAGF,2BAAA,CAAkCK,CAAlC;EACE,SAAOA,IAAM,CAANA,IAAY,CAAZA,GAAgBA,CAAhBA,GAAsBA,IAAM,CAAnC;EAGF,aAAA,CAAoBC,CAApB;EAEE,OADA,IAAIC,IAAM,CAAV,EACSC,IAAI,CAAb,EAAgBA,IAAIF,EAAIT,MAAxB,EAAgCW,GAAhC,EACED,KAAOD,EAAIE,CAAJF,CAAPC,CAEF,OAAOA,CAAP;EAUF,qBAAA,CAA4BpB,CAA5B,EAAuCsB,CAAvC;EACE,MAAMC,IAAIV,KAAKC,MAALD,EAAV,CACA,OAAQS,IAAIC,CAAJD,IAAU,IAAIC,KAAKvB,CAA3B;EAIF,qBAAA,CAA4BA,CAA5B,EAA2CsB,CAA3C;EAEE,OADA,IAAIE,IAAS,CAAb,EACSH,IAAI,CAAb,EAAgBA,IAAIrB,EAAEU,MAAtB,EAA8BW,GAA9B,EAAmC;EACjC,QAAMI,IAAOC,OAAO1B,EAAEqB,CAAFrB,CAAP0B,IAAeA,OAAOJ,EAAED,CAAFC,CAAPI,CAA5B,CACAF,KAAUC,IAAOA,CAAjBD;EAEF,UAAOA,CAAP;EAGF,gBAAA,CAAuBG,CAAvB,EAAsCC,CAAtC;EACE,OAAKD,CAAL,EACE,MAAM,IAAI9B,KAAJ,CAAyB,mBAAR+B,CAAQ,GAAWA,CAAX,GAAiBA,GAA1C,CAAN;EAIJ,2BAAA,CACIC,CADJ,EACsBC,CADtB,EACwCC,CADxC;qBACwCA,SACtCC,OACIC,YAAYJ,CAAZI,EAAoBH,CAApBG,CADJD,EAEID,IAAqB,UAArBA,GAAgCF,CAAhCE,UAAAA,GAA8CD,CAA9CC,gBAFJC;EAKF,uBAAA,CAA8BhC,CAA9B;EACEgC,SACS,QAALhC,CADJgC,EAEI,+DAFJA;EAQF,iBAAA,CACIb,CADJ,EAC8Be,CAD9B;EAEE,uBAD4BA,SACxBC,MAAMC,OAAND,CAAchB,CAAdgB,CAAJ,EACE,KAAK,IAAId,IAAI,CAAb,EAAgBA,IAAIF,EAAIT,MAAxB,IAAkCW,CAAlC,EACEgB,QAAQlB,EAAIE,CAAJF,CAARkB,EAAgBH,CAAhBG,EAFJ,KAKEH,EAAII,IAAJJ,CAASf,CAATe,EAEF,OAAOA,CAAP;EAGF,uBAAA,CAA8BK,CAA9B;EACE,MAAqB,MAAjBA,EAAM7B,MAAV,EAEE,OAAO,CAAP,CAGF,KADA,IAAI8B,IAAOD,EAAM,CAANA,CAAX,EACSlB,IAAI,CAAb,EAAgBA,IAAIkB,EAAM7B,MAA1B,EAAkCW,GAAlC,EACEmB,KAAQD,EAAMlB,CAANkB,CAARC,CAEF,OAAOA,CAAP;EAGF,uBAAA,CAA8BD,CAA9B;EACE,SAAwB,MAAjBA,EAAM7B,MAAb;EAGF,qBAAA,CAA4B+B,CAA5B,EAA4CC,CAA5C;EACE,MAAID,MAAOC,CAAX,EACE,QAAO,CAAP,CAEF,IAAU,QAAND,CAAM,IAAc,QAANC,CAAlB,EACE,QAAO,CAAP,CAGF,IAAID,EAAG/B,MAAH+B,KAAcC,EAAGhC,MAArB,EACE,QAAO,CAAP,CAEF,KAAK,IAAIW,IAAI,CAAb,EAAgBA,IAAIoB,EAAG/B,MAAvB,EAA+BW,GAA/B,EACE,IAAIoB,EAAGpB,CAAHoB,MAAUC,EAAGrB,CAAHqB,CAAd,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;EAGF,eAAA,CAAsB1C,CAAtB;EACE,SAAOA,IAAI,CAAJA,IAAU,CAAjB;EAGF,cAAA,CAAqBgB,CAArB;EAEE,MAA0B,QAArBH,KAAa8B,IAAlB,EAEE,OAAQ9B,KAAa8B,IAAb9B,CAAkBG,CAAlBH,CAAR,CAEF,IAAIG,MAAM4B,IAAAA,CAAV,EACE,OAAO,CAAP,CACK,IAAI5B,OAAO4B,IAAAA,CAAX,EACL,QAAQ,CAAR,CAEA,IAAMC,IAAMhC,KAAKiC,GAALjC,CAAS,IAAIG,CAAbH,CAAZ,CACA,QAAQgC,IAAM,MAAMA,IAAM,EAA1B;EAIJ,6BAAA,CAAoCL,CAApC;EACE,OAAK,IAAIxC,IAAIa,KAAKkC,KAALlC,CAAWA,KAAKmC,IAALnC,CAAU2B,CAAV3B,CAAXA,CAAb,EAA0Cb,IAAI,CAA9C,IAAmDA,CAAnD,EACE,IAAIwC,IAAOxC,CAAPwC,IAAa,CAAjB,EACE,QAAQxC,GAAGwC,IAAOxC,EAAlB,CAGJ,QAAQ,GAAGwC,EAAX;EAGF,+BAAA,CAAsCS,CAAtC;EAEE,OADA,IAAMC,IAAkB,IAAIC,WAAJ,CAAgBF,CAAhB,CAAxB,EACS5B,IAAI,CAAb,EAAgBA,IAAI4B,CAApB,IAAyB5B,CAAzB,EACE6B,EAAgB7B,CAAhB6B,IAAqB7B,CAArB6B,CAGF,OADAE,QAAQF,CAARE,GACOF,CAAP;EAGF,kBAAA,CAAyBlD,CAAzB,EAAoCwC,CAApC;EACE,SAAIA,KAAQxC,EAAEU,MAAV8B,GACKxC,CADLwC,GAGGxC,IAAI,IAAIqD,MAAJ,CAAWb,IAAOxC,EAAEU,MAApB,CAHX;EAMF,qBAAA,CACI4C,CADJ,EAC4BC,CAD5B,EAEIC,CAFJ;EAGE,0BAF0BD,cAAW9C;EAAoB,WAAA,CAAA;QAElD,IAAIgD,OAAJ,CAAkB,UAACC,CAAD,EAAUC,CAAV;EACvB,QAAIC,IAAW,CAAf;EAAA,QAEMC,IAAQ;EACZ,UAAIP,GAAJ,EACEI,IADF,KAAA;EAOA,YAAMI,IAAcP,IAFpBK,CAEoBL,CAApB,CAEkB,QAAdC,CAAc,IAAQI,KAAYJ,CAApB,GAChBG,GADgB,GAIlBI,WAAWF,CAAXE,EAAkBD,CAAlBC,CAJkB;;OAZpB,CAmBAF;KApBK,CAAP;EAiCF,gCAAA,CACItB,CADJ,EACqBC,CADrB;EAKE,OAHA,IAAIwB,IAAY,CAAhB,EACIC,KAAe,CADnB,EAGS5C,IAAI,CAAb,EAAgBA,IAAIkB,EAAM7B,MAA1B,IAAoCW,CAApC,EACE,IAAIkB,EAAMlB,CAANkB,KAAY,CAAhB,EACEyB,KAAazB,EAAMlB,CAANkB,CAAbyB,CADF,KAEO,KAAkB,MAAdzB,EAAMlB,CAANkB,CAAJ,EAAqB;EAC1B,SAAqB,MAAjB0B,CAAJ,EACE,MAAMpE,MACF,2DACmBoE,CADnB,cAAA,GAC0C5C,CAFxCxB,CAAN,CAIFoE,IAAc5C,CAAd4C;KANK,MAOA,IAAI1B,EAAMlB,CAANkB,IAAW,CAAf,EACL,MAAM1C,MAAM,kCAAgC0C,EAAMlB,CAANkB,CAAhC,aAAA,GAAmDlB,CAAzDxB,CAAN,CAIJ,KAAqB,MAAjBoE,CAAJ,EAAwB;EACtB,QAAIzB,IAAO,CAAPA,IAAYA,MAASwB,CAAzB,EACE,MAAMnE,MAAM,UAAQ2C,CAAR,uCAAA,GAAiDD,CAAvD1C,CAAN,CAEF,OAAO0C,CAAP;EAGF,OAAkB,MAAdyB,CAAJ,EACE,MAAMnE,MACF,uCAAqC0C,CAArC,gCADE1C,CAAN,CAIF,IAAI2C,IAAOwB,CAAPxB,IAAqB,CAAzB,EACE,MAAM3C,MACF,0DACO2C,CADP,QAAA,GACiBwB,CAFfnE,CAAN,CAKF,IAAMqE,IAAW3B,EAAM4B,KAAN5B,EAAjB,CAEA,OADA2B,EAASD,CAATC,IAAwB1B,IAAOwB,CAA/BE,EACOA,CAAP;EAIF,sBAAA,CAA6B3B,CAA7B,EAA8C6B,CAA9C;EAKE,OAHA,IAAMF,MAAN,EACMG,MADN,EAEIC,IAAI,CAFR,EAGSjD,IAAI,CAAb,EAAgBA,IAAIkB,EAAM7B,MAA1B,IAAoCW,CAApC,EAAuC;EACrC,QAAY,QAAR+C,CAAJ,EAAkB;EAChB,UAAIA,EAAKE,CAALF,MAAY/C,CAAZ+C,IAA8B,MAAb7B,EAAMlB,CAANkB,CAArB,EACE,MAAM,IAAI1C,KAAJ,CACF,wBAAsBwB,CAAtB,qBAAA,GAA0CkB,EAAMlB,CAANkB,CAA1C,eADE,CAAN,EAGc,QAAX6B,EAAKE,CAALF,CAAW,IAAQA,EAAKE,CAALF,IAAU/C,MAAmB,MAAbkB,EAAMlB,CAANkB,MACtC2B,EAAS5B,IAAT4B,CAAc3B,EAAMlB,CAANkB,CAAd2B,GACAG,EAAS/B,IAAT+B,CAAchD,CAAdgD,IAEED,EAAKE,CAALF,KAAW/C,CAAX+C,IACFE;EAGa,WAAb/B,EAAMlB,CAANkB,CAAa,KACf2B,EAAS5B,IAAT4B,CAAc3B,EAAMlB,CAANkB,CAAd2B,GACAG,EAAS/B,IAAT+B,CAAchD,CAAdgD,CAFe;EAKnB,YAAQH,aAAUG,aAAlB;EAGF,gCAAA,CACIE,CADJ,EACc/B,CADd;EAEE,MAAIgC,IAAS,IAAb,CACA,IAAa,QAATD,CAAS,IAAkB,cAAVA,CAArB,EACEC,IAAS,IAAIC,YAAJ,CAAiBjC,CAAjB,CAATgC,CADF,KAEO,IAAc,YAAVD,CAAJ,EACLC,IAAS,IAAIE,UAAJ,CAAelC,CAAf,CAATgC,CADK,KAEA;EAAA,QAAc,WAAVD,CAAJ,EAGL,MAAM,IAAI1E,KAAJ,CAAU,uBAAqB0E,CAA/B,CAAN,CAFAC,IAAS,IAAIG,UAAJ,CAAenC,CAAf,CAATgC;EAIF,UAAOA,CAAP;EAGF,gCAAA,CACII,CADJ,EAC0BL,CAD1B,EACoCM,CADpC;EAEE,MAAc,cAAVN,CAAJ,EAIA,KAAK,IAAIlD,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,EAAiCW,GAAjC,EACE,IAAIyD,MAAMF,EAAKvD,CAALuD,CAANE,CAAJ,EACE,MAAMjF,MAAM,wBAAsBgF,CAAtB,gBAANhF,CAAN;EAKN,+BAAA,CACI+E,CADJ,EACmCL,CADnC;EAEE,MAAc,cAAVA,CAAJ,EAKA,KAAK,IAAIlD,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,EAAiCW,GAAjC,EACE,IAAIyD,MAAMF,EAAKvD,CAALuD,CAANE,CAAJ,EACE,MAAMjF,MAAM,0CAAwC0E,CAAxC,OAAN1E,CAAN;EASN,yBAAA,CAAgCkF,CAAhC,EAAmDC,CAAnD;EACE,SAAgB,gBAAZA,CAAY,KAGA,cAAZA,CAAY,IAAyB,gBAAZD,OAGb,YAAZC,CAAY,IAAuB,cAAZD,CAAX,IAAgD,gBAAZA,OAGpC,WAAZC,CAAY,IAAsB,WAAZD,EAT1B;EAeF,wBAAA,CACIvE,CADJ,EAC8C+D,CAD9C,EAEIU,CAFJ;EAGE,MAAa,QAATV,CAAS,IAAkB,cAAVA,CAAR,IAAyC,gBAAVA,CAA5C,EACE,OAAO,IAAIE,YAAJ,CAAiBjE,CAAjB,CAAP,CACK,IAAc,YAAV+D,CAAJ,EAIL,OAHIU,KACFC,sBAAsB1E,CAAtB0E,EAAyCX,CAAzCW,CADED,EAGG,IAAIP,UAAJ,CAAelE,CAAf,CAAP,CACK,IAAc,WAAV+D,CAAJ,EAAsB;EAE3B,SADA,IAAMY,IAAO,IAAIR,UAAJ,CAAenE,EAAME,MAArB,CAAb,EACSW,IAAI,CAAb,EAAgBA,IAAI8D,EAAKzE,MAAzB,IAAmCW,CAAnC,EACyC,MAAnCR,KAAKuE,KAALvE,CAAWL,EAAMa,CAANb,CAAXK,CAAmC,KACrCsE,EAAK9D,CAAL8D,IAAU,CAD2B,EAIzC,OAAOA,CAAP;EAEA,SAAM,IAAItF,KAAJ,CAAU,uBAAqB0E,CAA/B,CAAN;EAIJ,sBAAA,CAA6BvE,CAA7B;EAEE,SAAOA,aAAayE,YAAbzE,IAA6BA,aAAa0E,UAA1C1E,IACHA,aAAa2E,UADjB;EAIF,yBAAA,CAAgCJ,CAAhC;EACE,MAAc,cAAVA,CAAU,IAAuB,YAAVA,CAA3B,EACE,OAAO,CAAP,CACK,IAAc,gBAAVA,CAAJ,EACL,OAAO,CAAP,CACK,IAAc,WAAVA,CAAJ,EACL,OAAO,CAAP,CAEA,MAAM,IAAI1E,KAAJ,CAAU,mBAAiB0E,CAA3B,CAAN;EAIJ,oBAAA,CAA2Bc,CAA3B;EACE,YAAUA,KAAKA,EAAEC,WAAPD,IAAsBA,EAAEE,IAAxBF,IAAgCA,EAAEG,MAA5C;EAGF,wBAAA,CAA+BhD,CAA/B,EAA6CiD,CAA7C;EACE,OAAK,IAAIpE,IAAIoE,CAAb,EAAoBpE,IAAImB,CAAxB,IAAgCnB,CAAhC,EACE,IAAImB,IAAOnB,CAAPmB,IAAa,CAAjB,EACE,OAAOnB,CAAP,CAGJ,OAAOmB,CAAP;EAGF,wBAAA,CAA+BD,CAA/B;EACE,MAAMmD,IAAOnD,EAAM7B,MAAnB,CACA,IAAIgF,IAAO,CAAX,EACE,SAAA,CAKF,IAAMC,IAAU,IAAIxD,KAAJ,CAAUuD,IAAO,CAAjB,CAAhB,CACAC,EAAQD,IAAO,CAAfC,IAAoBpD,EAAMmD,IAAO,CAAbnD,CAApBoD,CACA,KAAK,IAAItE,IAAIqE,IAAO,CAApB,EAAuBrE,KAAK,CAA5B,IAAiCA,CAAjC,EACEsE,EAAQtE,CAARsE,IAAaA,EAAQtE,IAAI,CAAZsE,IAAiBpD,EAAMlB,IAAI,CAAVkB,CAA9BoD,CAEF,OAAOA,CAAP;EAGF,sBAAA,CACI3F,CADJ,EACqBuE,CADrB,EAC+BU,CAD/B;EAEE,SAAIW,mBAAmB5F,CAAnB4F,EAAsBrB,CAAtBqB,IACK5F,CADL4F,IAGAzD,MAAMC,OAAND,CAAcnC,CAAdmC,MACFnC,IAAIqC,QAAQrC,CAARqC,CADFF,GAGG0D,eAAe7F,CAAf6F,EAAkBtB,CAAlBsB,EAAyBZ,CAAzBY,CANHD,CAAJ;EASF,4BAAA,CACI5F,CADJ,EACqBuE,CADrB;EAEE,SAAQvE,aAAayE,YAAbzE,IAAuC,cAAVuE,CAA7BvE,IACHA,aAAa0E,UAAb1E,IAAqC,YAAVuE,CADxBvE,IAEHA,aAAa2E,UAAb3E,IAAqC,WAAVuE,CAFhC;EAKF,4BAAA,CACI/B,CADJ,EACkB+B,CADlB;EAGE,OADA,IAAM/D,IAAQsF,oBAAoBtD,CAApBsD,EAA0BvB,CAA1BuB,CAAd,EACSzE,IAAI,CAAb,EAAgBA,IAAIb,EAAME,MAA1B,EAAkCW,GAAlC,EACEb,EAAMa,CAANb,IAAW,CAAXA,CAEF,OAAOA,CAAP;EAGF,6BAAA,CACIgC,CADJ,EACkB+B,CADlB;EAEE,MAAa,QAATA,CAAS,IAAkB,cAAVA,CAAR,IAAyC,gBAAVA,CAA5C,EACE,OAAO,IAAIE,YAAJ,CAAiBjC,CAAjB,CAAP,CACK,IAAc,YAAV+B,CAAJ,EACL,OAAO,IAAIG,UAAJ,CAAelC,CAAf,CAAP,CACK,IAAc,WAAV+B,CAAJ,EACL,OAAO,IAAII,UAAJ,CAAenC,CAAf,CAAP,CAEA,MAAM,IAAI3C,KAAJ,CAAU,uBAAqB0E,CAA/B,CAAN;EAQJ,aAAA;EACE,MAA2B,sBAAhBwB,WAAX,EACE,OAAOA,YAAYC,GAAZD,EAAP,CACK,IAAuB,sBAAZE,OAAX,EAAoC;EACzC,QAAMC,IAAOD,QAAQE,MAARF,EAAb,CACA,OAAiB,MAAVC,EAAK,CAALA,CAAU,GAAOA,EAAK,CAALA,IAAU,GAAlC;EAEA,SAAM,IAAIrG,KAAJ,CACF,4FADE,CAAN;;;EC3cF,YAAA,CAAoBuG,CAApB,EAAwDC,CAAxD;EAAoBC,qBAAAA,GAAAF,CAAAE,EAAoCA,WAAAA,GAAAD,CAApCC,EACJ,QAAVD,CAAU,KACZC,KAAKD,MAALC,GAAc,IAAIC,MAAJ,EADF,CADID;EAgCtB,UA1BEE,WAAAA,cAAAA,GAAA,UAAyC3B,CAAzC,EAAuDQ,CAAvD;EAAA,QAEM7D,CAFN;EAAA,gBAAA;EAAA,QAMQiF,IAAQH,KAAKF,YAALE,CAAkBJ,IAAlBI,CAHc;EAC1B9E,UAAS6D,GAAT7D;OAEY8E,CANhB,CAwBE,QAfInE,MAAMC,OAAND,CAAcX,CAAdW,IAAwBX,CAAxBW,IAAkCX,IAC9BkF,QAAQ,UAAAnF,CAAA;EACd,UAAMqD,IAAOrD,EAAEoF,QAAFpF,EAAb,CACAqF,uBAA4BhC,CAA5BgC,EAAkCrF,EAAEgD,KAApCqC,EAA2C/B,CAA3C+B,GAEAH,EAAMI,IAANJ,CAAW,UAAAK,CAAA;EACT,YAAIC,IAAY,EAAhB,CACkC,QAA9BD,EAAOE,mBAAuB,KAChCD,IAAYD,EAAOE,mBAAPF,EADoB,GAIlCG,EAAKZ,MAALY,CAAYC,gBAAZD,CAA6BpC,CAA7BoC,EAAmC1F,CAAnC0F,EAAsCrC,CAAtCqC,EAA4CH,EAAOK,QAAnDF,EAA6DF,CAA7DE,CAJkC;SAFpCR,CAFAG;UAYKpF,CAAP;KAxBFgF,GA0BF;;;EAEA,YAAA,YACED,WAAAA,iBAAAA,GAAA,UACI1B,CADJ,EACkBrD,CADlB,EACkCoD,CADlC,EACoDwC,CADpD,EAEIL,CAFJ;EAGE,QAAMb,IAAOmB,SAAiBD,QAAjBC,EAA6B,CAA7BA,CAAb;EAAA,QACMC,IAAaD,SAAcxC,CAAdwC,EAAoB,EAApBA,CADnB;EAAA,QAEM3B,IAAOlE,EAAOkE,IAFpB;EAAA,QAGMlD,IAAOhB,EAAOgB,IAHpB;EAAA,QAIMD,IAAQ8E,SAAc7F,EAAOe,KAAPf,CAAa+F,QAAb/F,EAAd6F,EAAuC,EAAvCA,CAJd,CAMAG,QAAQC,GAARD,CACI,OAAKF,CAAL,SAAA,GAAsBpB,CAAtB,SAAA,GAAiCR,CAAjC,OAAA,GAA0CnD,CAA1C,SAAA,GAAsDC,CAAtD,SAAA,GACIuE,CAFRS,EAGI,kBAHJA,EAGwB,WAHxBA,EAGqC,YAHrCA,EAGmD,eAHnDA,EAII,cAJJA;KATFjB,GAeF;;MCpDMmB,wBAAwB;MAExBC,6BAA6B;MAE7BC,wBAAwB,EAE9B,uBAAA,CACIhD,CADJ,EACsBrC,CADtB,EACuCgC,CADvC,EACwDsD,CADxD;EAEE,MAAMlC,IAAUmC,eAAevF,CAAfuF,CAAhB;EAAA,MACMC,IAAYC,wBAAwBpD,CAAxBoD,EAA8BzF,CAA9ByF,EAAqCzD,CAArCyD,EAA4CrC,CAA5CqC,CADlB;EAAA,MAEMtC,IAAOnD,EAAM7B,MAFnB;EAAA,MAGMuH,IAAYC,kBAAkBtD,CAAlBsD,EAAwB3F,CAAxB2F,EAA+B3D,CAA/B2D,EAAsCvC,CAAtCuC,EAA+CH,CAA/CG,CAHlB;EAAA,MAIMC,KAAS,SAJf,CAYA,OAPIN,MACFM,EAAM7F,IAAN6F,CAAW,cAAY5D,CAAvB4D,GACAA,EAAM7F,IAAN6F,CAAW,aAAWzC,CAAtByC,CADAA,EAEAA,EAAM7F,IAAN6F,CAAW,eAAa5F,CAAb,MAAX4F,CAFAA,EAGAA,EAAM7F,IAAN6F,CAAW,WAAXA,CAJEN,GAMJM,EAAM7F,IAAN6F,CAAWF,EAAUG,GAAVH,CAAc,UAAAI,CAAA;EAAK,WAAA,SAASA,CAAT;KAAnBJ,EAA+BK,IAA/BL,CAAoC,IAApCA,CAAXE,CANIN,EAOGM,EAAMG,IAANH,CAAW,IAAXA,CAAP;EAGF,iCAAA,CACIvD,CADJ,EACsBrC,CADtB,EACuCgC,CADvC,EAEIoB,CAFJ;EAGE,MAAM1C,IAAIsF,cAAchG,CAAdgG,CAAV;EAAA,MACMC,IAAU7C,EAAQA,EAAQjF,MAARiF,GAAiB,CAAzBA,CADhB;EAAA,MAEMoC,IAAY,IAAI5F,KAAJ,CAAUqG,CAAV,EAAmBC,IAAnB,CAAwB,CAAxB,CAFlB;EAAA,MAGM/C,IAAOnD,EAAM7B,MAHnB;EAAA,MAIMgI,IACQ,gBAAVnE,CAAU,GAAcoE,oBAAoB/D,CAApB+D,CAAd,GAA0C/D,CALxD,CAOA,IAAIc,IAAO,CAAX,EACE,KAAK,IAAIkD,IAAM,CAAf,EAAkBA,IAAM3F,IAAIuF,CAA5B,EAAqCI,GAArC,EAEE,KADA,IAAMC,IAASD,IAAMJ,CAArB,EACSlE,IAAI,CAAb,EAAgBA,IAAIkE,CAApB,EAA6BlE,GAA7B,EACEyD,EAAUzD,CAAVyD,IAAelH,KAAKI,GAALJ,CACXkH,EAAUzD,CAAVyD,CADWlH,EACGiI,YAAYJ,EAAeG,IAASvE,CAAxBoE,CAAZI,EAAwC,CAAxCA,EAA2CpI,MAD9CG,CAAfkH,CAKN,OAAOA,CAAP;EAGF,qBAAA,CAAqB7G,CAArB,EAAmD6H,CAAnD;EASE,SAAOC,SAPH7G,MAAMC,OAAND,CAAcjB,CAAdiB,IACU8G,WAAW/H,EAAI,CAAJA,EAAOgI,OAAPhI,CAAe0G,qBAAf1G,CAAX+H,SAAAA,GACLA,WAAW/H,EAAI,CAAJA,EAAOgI,OAAPhI,CAAe0G,qBAAf1G,CAAX+H,CADKA,MADV9G,GAIO8G,WAAW/H,EAAIgI,OAAJhI,CAAY0G,qBAAZ1G,CAAX+H,EAA+C1B,QAA/C0B,EAGJD,EAAiBD,CAAjBC,CAAP;EAGF,2BAAA,CACIpE,CADJ,EACsBrC,CADtB,EACuCgC,CADvC,EACwDoB,CADxD,EAEIoC,CAFJ,EAEyBoB,CAFzB;qBAEyBA,QACvB,IAAMC,IAA8B,gBAAV7E,CAAU,GAAc,CAAd,GAAkB,CAAtD;EAAA,MAEM/B,IAAOD,EAAM,CAANA,CAFb;EAAA,MAGMmD,IAAOnD,EAAM7B,MAHnB,CAIA,IAAa,MAATgF,CAAJ,EACE,OAAc,gBAAVnB,CAAU,IAEJuE,YADaH,oBAAoB/D,CAApB+D,EACY,CADZA,CACbG,EAA6B,CAA7BA,EAFI,IAINlE,EAAK,CAALA,EAAQ2C,QAAR3C,GAJR,CAOF,IAAa,MAATc,CAAJ,EAAgB;EACd,QAAIlD,IAAOkF,qBAAX,EAAkC;EAChC,UAAM2B,IAAgB1B,6BAA6ByB,CAAnD;EAAA,UAEIE,IACAnH,MAAMoH,IAANpH,CAAWyC,EAAK4E,QAAL5E,CAAc,CAAdA,EAAiByE,CAAjBzE,CAAXzC,CAHJ;EAAA,UAIIsH,IAA2CtH,MAAMoH,IAANpH,CAAWyC,EAAK4E,QAAL5E,CACtDpC,IAAOmF,6BAA6ByB,CADkBxE,EACCpC,CADDoC,CAAXzC,CAJ/C,CAWA,OALc,gBAAVoC,CAAU,KACZ+E,IAAYX,oBAAoBW,CAApBX,CAAZW,EACAG,IAAWd,oBAAoBc,CAApBd,CAFC,IAMZ,MAAMW,EAAUlB,GAAVkB,CAAc,UAACtI,CAAD,EAAIK,CAAJ;EAAU,eAAAyH,YAAY9H,CAAZ8H,EAAef,EAAU1G,CAAV0G,CAAfe,CAAA;SAAxBQ,EAAsDhB,IAAtDgB,CAA2D,IAA3DA,CAAN,GACA,SADA,GAEAG,EACKrB,GADLqB,CAEQ,UAACzI,CAAD,EAAIK,CAAJ;EAAU,eAAAyH,YACN9H,CADM8H,EACHf,EAAUvF,IAAOmF,0BAAPnF,GAAoCnB,CAA9C0G,CADGe,CAAA;SAFlBW,EAIKnB,IAJLmB,CAIU,IAJVA,CAFA,GAOA,IARF;EAcF,aACE,OAHY,gBAAVlF,CAAU,GAAcoE,oBAAoB/D,CAApB+D,CAAd,GAA0CxG,MAAMoH,IAANpH,CAAWyC,CAAXzC,GAGpCiG,IAAI,UAACpH,CAAD,EAAIK,CAAJ;EAAU,aAAAyH,YAAY9H,CAAZ8H,EAAef,EAAU1G,CAAV0G,CAAfe,CAAA;SAA8BR,KAAK,KAAnE,GACA,IAFF;EAOF,OAAMoB,IAAWnH,EAAM4B,KAAN5B,CAAY,CAAZA,CAAjB;EAAA,MACMoH,IAAahE,EAAQxB,KAARwB,CAAc,CAAdA,CADnB;EAAA,MAEMiE,IAASjE,EAAQ,CAARA,IAAayD,CAF5B;EAAA,MAGMjB,MAHN,CAIA,IAAI3F,IAAOkF,qBAAX,EAAkC;EAChC,SAAK,IAAIrG,IAAI,CAAb,EAAgBA,IAAIsG,0BAApB,EAAgDtG,GAAhD,EAAqD;EACnD,UACMwI,KADApE,IAAQpE,IAAIuI,KACEA,CADpB,CAEAzB,EAAM7F,IAAN6F,MAAAA,CAAAA,CAAAA,EAAcD,kBACVtD,EAAK4E,QAAL5E,CAAca,CAAdb,EAAqBiF,CAArBjF,CADUsD,EACiBwB,CADjBxB,EAC2B3D,CAD3B2D,EACkCyB,CADlCzB,EAC8CH,CAD9CG,GAEV,CAFUA,CAAdC;EAIFA,OAAM7F,IAAN6F,CAAW,KAAXA,EACA,KAAS9G,IAAImB,IAAOmF,0BAApB,EAAgDtG,IAAImB,CAApD,EAA0DnB,GAA1D,EAA+D;EAEvDwI,WADApE,IAAQpE,IAAIuI,KACEA,CAAdC,CACN1B,EAAM7F,IAAN6F,MAAAA,CAAAA,CAAAA,EAAcD,kBACVtD,EAAK4E,QAAL5E,CAAca,CAAdb,EAAqBiF,CAArBjF,CADUsD,EACiBwB,CADjBxB,EAC2B3D,CAD3B2D,EACkCyB,CADlCzB,EAC8CH,CAD9CG,EAEV7G,MAAMmB,IAAO,CAFH0F,CAAdC;;KAZJ,MAiBE,KAAS9G,IAAI,CAAb,EAAgBA,IAAImB,CAApB,EAA0BnB,GAA1B,EAA+B;EAC7B,QAAMoE,CAAN,CACMoE,KADApE,IAAQpE,IAAIuI,KACEA,CAAdC,CACN1B,EAAM7F,IAAN6F,MAAAA,CAAAA,CAAAA,EAAcD,kBACVtD,EAAK4E,QAAL5E,CAAca,CAAdb,EAAqBiF,CAArBjF,CADUsD,EACiBwB,CADjBxB,EAC2B3D,CAD3B2D,EACkCyB,CADlCzB,EAC8CH,CAD9CG,EAEV7G,MAAMmB,IAAO,CAFH0F,CAAdC;EAKJ,OAAM2B,IAAe,MAATpE,CAAS,GAAI,GAAJ,GAAU,EAA/B,CACAyC,EAAM,CAANA,IAAW,MAAMA,EAAM,CAANA,CAAN,GAAiB2B,CAA5B3B,CACA,KAAS9G,IAAI,CAAb,EAAgBA,IAAI8G,EAAMzH,MAANyH,GAAe,CAAnC,EAAsC9G,GAAtC,EACE8G,EAAM9G,CAAN8G,IAAW,MAAMA,EAAM9G,CAAN8G,CAAN,GAAiB2B,CAA5B3B,CAEF,IAAI4B,IAAa,KAAjB,CACA,KAAS1I,IAAI,CAAb,EAAgBA,IAAIqE,CAApB,EAA0BrE,GAA1B,EACE0I,KAAc,IAAdA,CAIF,OAFA5B,EAAMA,EAAMzH,MAANyH,GAAe,CAArBA,IACI,MAAMA,EAAMA,EAAMzH,MAANyH,GAAe,CAArBA,CAAN,GAAgC,GAAhC,IAAuCgB,IAAS,EAATA,GAAcY,CAArD,CADJ5B,EAEOA,CAAP;EAGF,6BAAA,CAA6BvD,CAA7B;EAGE,OADA,IAAMoF,MAAN,EACS3I,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,EAAiCW,KAAK,CAAtC,EACE2I,EAAc1H,IAAd0H,EAAoBpF,EAAKvD,CAALuD,GAASA,EAAKvD,IAAI,CAATuD,EAA7BoF,EAEF,OAAOA,CAAP;;ECvIA,YAAA,CAAYzH,CAAZ,EAAuCgC,CAAvC,EAAwDC,CAAxD;EAIE,QAJqC8B,UAAAA,GAAA/B,CAAA+B,EACrCA,KAAK/D,KAAL+D,GAAa/D,EAAM4B,KAAN5B,EADwB+D,EAErCA,KAAK9D,IAAL8D,GAAY2D,cAAmB1H,CAAnB0H,CAFyB3D,EAIvB,QAAV9B,CAAJ,EAAoB;EAClB,UAAMvB,IAAIuB,EAAO9D,MAAjB,CACAwJ,OACIjH,MAAMqD,KAAK9D,IADf0H,EAEI,uBAAqBjH,CAArB,sDAAA,GAC8BqD,KAAK9D,IADnC,OAFJ0H;EAKF,SAAc,gBAAV3F,CAAJ,EACE,MAAM,IAAI1E,KAAJ,CACF,4JADE,CAAN,CAKFyG,KAAK9B,MAAL8B,GAAc9B,KACV2F,uBAA4B5F,CAA5B4F,EAAmCF,cAAmB3D,KAAK/D,KAAxB0H,CAAnCE,CADJ7D,EAEAA,KAAKX,OAALW,GAAewB,eAAevF,CAAfuF,CAFfxB;EAiFJ,UArEE8D,WAAAA,IAAAA,GAAA,UAAIC,CAAJ;WAAmB,UAAA,SAAAC,sBAAAA,KAAAC,OAAAA,gBAAAA,CACG,MAAhBA,EAAK7J,MAAW,KAClB6J,KAAQ,EADU,GAGpBL,OACIK,EAAK7J,MAAL6J,KAAgBjE,KAAKZ,IADzBwE,EAEI,yCAAuCK,EAAK7J,MAA5C,4BAAA,GACuB4F,KAAKZ,IAD5B,MAFJwE,CAHoB,CAQpB,IAAMtJ,IAAQ0F,KAAKkE,UAALlE,CAAgBiE,CAAhBjE,CAAd,CACAA,KAAK9B,MAAL8B,CAAY1F,CAAZ0F,IAAqB+D,CAArB/D;KAVF8D,EAmBAA,WAAAA,IAAAA,GAAA;WAAI,UAAA,SAAAE,sBAAAA,KAAAC,GAAAA,gBAAAA,CACkB,MAAhBA,EAAK7J,MAAW,KAClB6J,KAAQ,EADU,EAIpB,KADA,IAAI3J,IAAQ2J,EAAKA,EAAK7J,MAAL6J,GAAc,CAAnBA,CAAZ,EACSlJ,IAAI,CAAb,EAAgBA,IAAIkJ,EAAK7J,MAAL6J,GAAc,CAAlC,IAAuClJ,CAAvC,EACET,KAAS0F,KAAKX,OAALW,CAAajF,CAAbiF,IAAkBiE,EAAKlJ,CAALkJ,CAA3B3J,CAEF,OAAO0F,KAAK9B,MAAL8B,CAAY1F,CAAZ0F,CAAP;KA3BF8D,EA8BAA,WAAAA,WAAAA,GAAA,UAAWG,CAAX;EACE,QAAkB,MAAdjE,KAAKZ,IAAT,EACE,OAAO,CAAP,CACK,IAAkB,MAAdY,KAAKZ,IAAT,EACL,OAAO6E,EAAK,CAALA,CAAP,CAGF,KADA,IAAI3J,IAAQ2J,EAAKA,EAAK7J,MAAL6J,GAAc,CAAnBA,CAAZ,EACSlJ,IAAI,CAAb,EAAgBA,IAAIkJ,EAAK7J,MAAL6J,GAAc,CAAlC,IAAuClJ,CAAvC,EACET,KAAS0F,KAAKX,OAALW,CAAajF,CAAbiF,IAAkBiE,EAAKlJ,CAALkJ,CAA3B3J,CAEF,OAAOA,CAAP;KAxCFwJ,EA2CAA,WAAAA,WAAAA,GAAA,UAAWxJ,CAAX;EACE,QAAkB,MAAd0F,KAAKZ,IAAT,EACE,SAAA,CACK,IAAkB,MAAdY,KAAKZ,IAAT,EACL,QAAQ9E,EAAR,CAGF,KADA,IAAM2J,IAAiB,IAAIpI,KAAJ,CAAUmE,KAAK/D,KAAL+D,CAAW5F,MAArB,CAAvB,EACSW,IAAI,CAAb,EAAgBA,IAAIkJ,EAAK7J,MAAL6J,GAAc,CAAlC,IAAuClJ,CAAvC,EACEkJ,EAAKlJ,CAALkJ,IAAU1J,KAAKkC,KAALlC,CAAWD,IAAQ0F,KAAKX,OAALW,CAAajF,CAAbiF,CAAnBzF,CAAV0J,EACA3J,KAAS2J,EAAKlJ,CAALkJ,IAAUjE,KAAKX,OAALW,CAAajF,CAAbiF,CADnBiE,CAIF,OADAA,EAAKA,EAAK7J,MAAL6J,GAAc,CAAnBA,IAAwB3J,CAAxB2J,EACOA,CAAP;KAvDFH,EA0DAK,qBAAAA,CAAIL,WAAJK,QAAAA,SAAA;EACE,aAAOnE,KAAK/D,KAAL+D,CAAW5F,MAAlB;2CADF+J,CA1DAL,EAkEAA,WAAAA,SAAAA,GAAA;EACE,WAAOM,OAAOC,IAAPD,CAAYpE,KAAK/D,KAAjBmI,IAAyBlG,QAAQ8B,KAAK9B,QAAtCkG,EAA+CpE,KAAK/B,KAApDmG,CAAP;KAnEFN,GAqEF;;MA2MIQ,YAAiC;MAEjCC,YAAuB,KAO3B,yBAAA,CAAiCC,CAAjC;EACEF,cAAYE,CAAZF;EAOF,sBAAA,CAA6BG,CAA7B;EACEF,cAAYE,CAAZF;EAoBF;EAwBE,YAAA,CACItI,CADJ,EACwBgC,CADxB,EACyCC,CADzC,EAEIwG,CAFJ;EA6KQ1E,2BAAAA,IAAqB,CAArBA,EA1KNA,KAAK/D,KAAL+D,GAAa/D,EAAM4B,KAAN5B,EA0KP+D,EAzKNA,KAAK/B,KAAL+B,GAAa/B,KAAS,SAyKhB+B,EAxKNA,KAAK9D,IAAL8D,GAAY2D,cAAmB1H,CAAnB0H,CAwKN3D,EAvKQ,QAAV9B,CAAU,IACZ0F,OACI5D,KAAK9D,IAAL8D,KAAc9B,EAAO9D,MADzBwJ,EAEI,mCAAiC3H,CAAjC,kBAAA,GACO+D,KAAK/B,KADZ,8BAAA,GAEO+B,KAAK9D,IAFZ,qBAAA,GAEmCgC,EAAO9D,MAJ9CwJ,CAsKI5D,EA/JNA,KAAKX,OAALW,GAAewB,eAAevF,CAAfuF,CA+JTxB,EA9JNA,KAAK0E,MAAL1E,GAAwB,QAAV0E,CAAU,GAAOA,CAAP,KA8JlB1E,EA7JNA,KAAK2E,EAAL3E,GAAUsE,YAAYM,YAAZN,EA6JJtE,EA5JNA,KAAK6E,QAAL7E,GAAiBA,KAAKZ,IAALY,GAAY,CAAZA,GAAgBA,KAAKZ,IAALY,CAAUiB,QAAVjB,EAAhBA,GAAuC,QA4JlDA,EA3JNsE,YAAYQ,cAAZR,CAA2BtE,IAA3BsE,CA2JMtE,EA1JQ,QAAV9B,CAAU,IACZoG,YAAYS,KAAZT,CAAkBtE,KAAK0E,MAAvBJ,EAA+BpG,CAA/BoG,CAyJItE;EA8rBV,UA/0BSoE,MAAAA,GAAP,UAEInI,CAFJ,EAEwB+I,CAFxB,EAE0C/G,CAF1C;EAGE,WAAO,IAAImG,CAAJ,CAAWnI,CAAX,EAAkBgC,CAAlB,EAAyB+G,EAAK9G,MAA9B,EAAsC8G,EAAKN,MAA3C,CAAP;KAHKN,EAQPA,WAAAA,QAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOA,KAAKkF,IAALlF,EAAP;KAVKoE,EAePA,WAAAA,SAAAA,GAAA;EAGE,WAFApE,KAAKiF,eAALjF,IACA4D,OAA0B,MAAd5D,KAAK9D,IAAjB0H,EAA6B,qCAA7BA,CADA5D,EAEOA,KAAKmF,OAALnF,GAAAA,CAAP;KAlBKoE,EAuBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOA,KAAKmF,OAALnF,EAAuBA,KAAK9D,KAA5B8D,CAAP;KAzBKoE,EAmCPA,WAAAA,KAAAA,GAAA,UAAKgB,CAAL,EAAmBC,CAAnB;EAEE,WADArF,KAAKiF,eAALjF,IACOA,KAAKmF,OAALnF,EAAuBoF,GAAMC,EAA7BrF,CAAP;KArCKoE,EAgDPA,WAAAA,KAAAA,GAAA,UAAKgB,CAAL,EAAmBC,CAAnB,EAAoCvN,CAApC;EAEE,WADAkI,KAAKiF,eAALjF,IACOA,KAAKmF,OAALnF,EAAuBoF,GAAMC,GAASvN,EAAtCkI,CAAP;KAlDKoE,EA8DPA,WAAAA,KAAAA,GAAA,UAAKgB,CAAL,EAAmBC,CAAnB,EAAoCvN,CAApC,EAAmDwN,CAAnD;EAEE,WADAtF,KAAKiF,eAALjF,IACOA,KAAKmF,OAALnF,EAAuBoF,GAAMC,GAASvN,GAAOwN,EAA7CtF,CAAP;KAhEKoE,EAyEPA,WAAAA,OAAAA,GAAA,UAAgCnG,CAAhC;EAEE,WADA+B,KAAKiF,eAALjF,IACOuE,UAAUgB,IAAVhB,CAAevE,IAAfuE,EAAqBtG,CAArBsG,CAAP;KA3EKH,EA8EPD,qBAAAA,CAAIC,WAAJD,QAAAA,SAAA;EACE,aAAOnE,KAAK/D,KAAL+D,CAAW5F,MAAlB;2CADF+J,CA9EOC,EAyFPA,WAAAA,IAAAA,GAAA;WAAI,UAAA,SAAAJ,sBAAAA,KAAAC,GAAAA,gBAAAA,CACFL,OACIK,EAAK7J,MAAL6J,KAAgBjE,KAAKZ,IADzBwE,EAEI,kEAFJA,GAGAA,OACmB,gBAAf5D,KAAK/B,KADT2F,EAEI,0DAFJA,CAHAA,EAMA5D,KAAKiF,eAALjF,EANA4D,EAOoB,MAAhBK,EAAK7J,MAAW,KAClB6J,KAAQ,EADU,CAPpBL,CAWA,KADA,IAAItJ,IAAQ2J,EAAKA,EAAK7J,MAAL6J,GAAc,CAAnBA,CAAZ,EACSlJ,IAAI,CAAb,EAAgBA,IAAIkJ,EAAK7J,MAAL6J,GAAc,CAAlC,IAAuClJ,CAAvC,EACET,KAAS0F,KAAKX,OAALW,CAAajF,CAAbiF,IAAkBiE,EAAKlJ,CAALkJ,CAA3B3J,CAEF,OAAO0F,KAAKK,QAALL,GAAgB1F,CAAhB0F,CAAP;KAxGKoE,EA6GPA,WAAAA,OAAAA,GAAA;EACE,WAAOG,UAAUiB,MAAVjB,CAAiBvE,KAAK/D,KAAtBsI,EAA6BvE,KAAK/B,KAAlCsG,EAAyCvE,KAAKK,QAALL,EAAzCuE,CAAP;KA9GKH,EAsHDA,WAAAA,KAAAA,GAAN;;;EAEE,eADApE,KAAKiF,eAALjF,QACOsE,YAAYmB,IAAZnB,CAAiBtE,KAAK0E,MAAtBJ,EAAP;;;KAxHKF,EAgIPA,WAAAA,SAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOsE,YAAYoB,QAAZpB,CAAqBtE,KAAK0E,MAA1BJ,CAAP;KAlIKF,EAyIPA,WAAAA,QAAAA,GAAA;EACMpE,SAAK2F,UAAL3F,KAGJsE,YAAYsB,aAAZtB,CAA0BtE,IAA1BsE,GACAtE,KAAK6F,kBAAL7F,IAA0B,CAJtBA;KA1ICoE,EAkJPD,qBAAAA,CAAIC,WAAJD,cAAAA,SAAA;EACE,aAAOnE,KAAK6F,kBAAZ;2CADF1B,CAlJOC,EAsJCA,WAAAA,gBAAAA,GAAR;EACE,QAAIpE,KAAK2F,UAAT,EACE,MAAM,IAAIpM,KAAJ,CAAU,qBAAV,CAAN;KAxJG6K,EA8JPA,WAAAA,QAAAA,GAAA;EACE,WAAOpE,KAAK8F,MAAL9F,CAAY,SAAZA,CAAP;KA/JKoE,EAoKPA,WAAAA,MAAAA,GAAA;EACE,WAAOpE,KAAK8F,MAAL9F,CAAY,OAAZA,CAAP;KArKKoE,EA0KPA,WAAAA,OAAAA,GAAA;EACE,WAAOpE,KAAK8F,MAAL9F,CAAY,MAAZA,CAAP;KA3KKoE,EAqLPA,WAAAA,MAAAA,GAAA,UAAM7C,CAAN;EACE,4BADIA,SACGgD,UAAUwB,KAAVxB,CAAgBvE,IAAhBuE,EAAsBhD,CAAtBgD,CAAP;KAtLKH,EAgMPA,WAAAA,QAAAA,GAAA,UAAyBxG,CAAzB;EAEE,WADAoC,KAAKiF,eAALjF,IACOuE,UAAUY,OAAVZ,CAAkBvE,IAAlBuE,EAAwB3G,CAAxB2G,CAAP;KAlMKH,EA2MPA,WAAAA,UAAAA,GAAA,UAA4B1J,CAA5B;EAEE,WADAsF,KAAKiF,eAALjF,IACOA,KAAKmF,OAALnF,CAAatF,EAAEuB,KAAf+D,CAAP;KA7MKoE,EAwNPA,WAAAA,WAAAA,GAAA,UAA4BtG,CAA5B;EACE,4BAD0BA,QACnByG,UAAUyB,UAAVzB,CAAqBvE,IAArBuE,EAA2BzG,CAA3ByG,CAAP;KAzNKH,EAwOPA,WAAAA,OAAAA,GAAA,UAAyBtG,CAAzB,EAAmCmI,CAAnC,EAAsDC,CAAtD;EACE,4BADuBpI,yBAAUmI,0BAAmBC,SAC7C3B,UAAU4B,MAAV5B,CAAiBvE,IAAjBuE,EAAuBzG,CAAvByG,EAA6B0B,CAA7B1B,EAAwC2B,CAAxC3B,CAAP;KAzOKH,EAqPPA,WAAAA,QAAAA,GAAA,UAA0BtG,CAA1B;EAEE,WADAkC,KAAKiF,eAALjF,IACOuE,UAAU6B,OAAV7B,CAAkBvE,IAAlBuE,EAAwBzG,CAAxByG,CAAP;KAvPKH,EA4PPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU8B,KAAV9B,CAAgBvE,IAAhBuE,CAAP;KA9PKH,EAmQPA,WAAAA,SAAAA,GAAA,UAAS7C,CAAT;EAEE,4BAFOA,SAEA+E,eADMtG,KAAKK,QAALL,EACNsG,EAAqBtG,KAAK/D,KAA1BqK,EAAiCtG,KAAK/B,KAAtCqI,EAA6C/E,CAA7C+E,CAAP;KArQKlC,EA2QPA,WAAAA,KAAAA,GAAA,UAA8BmC,CAA9B;EAEE,WADAvG,KAAKiF,eAALjF,IACOuE,UAAUiC,IAAVjC,CAAevE,IAAfuE,EAAqBgC,CAArBhC,CAAP;KA7QKH,EAgRPA,WAAAA,OAAAA,GAAA,UAAgCqC,CAAhC,EAAmD3I,CAAnD;EAEE,4BAFiDA,QACjDkC,KAAKiF,eAALjF,IACOuE,UAAUmC,MAAVnC,CAAiBvE,IAAjBuE,EAAuBkC,CAAvBlC,EAAgCzG,CAAhCyG,CAAP;KAlRKH,EAqRPA,WAAAA,OAAAA,GAAA,UACapJ,CADb,EACmB2L,CADnB,EACuCC,CADvC;EAGE,4BAFiBD,0BAAoBC,SACrC5G,KAAKiF,eAALjF,IACOuE,UAAUsC,MAAVtC,CAAiBvE,IAAjBuE,EAAuBvJ,CAAvBuJ,EAA0BoC,CAA1BpC,EAAsCqC,CAAtCrC,CAAP;KAxRKH,EA0RPA,WAAAA,IAAAA,GAAA,UAAIpJ,CAAJ;EAEE,WADAgF,KAAKiF,eAALjF,IACOuE,UAAUuC,GAAVvC,CAAcvE,IAAduE,EAAoBvJ,CAApBuJ,CAAP;KA5RKH,EA8RPA,WAAAA,KAAAA,GAAA,UACI2C,CADJ,EACiDjJ,CADjD,EAEIkJ,CAFJ;EAIE,4BAHED,mCAA6CjJ,4BAC7CkJ,SACFhH,KAAKiF,eAALjF,IACOuE,UAAU0C,IAAV1C,CAAevE,IAAfuE,EAAqBwC,CAArBxC,EAA0BzG,CAA1ByG,EAAgCyC,CAAhCzC,CAAP;KAlSKH,EAoSPA,WAAAA,MAAAA,GAAA,UACa8C,CADb,EACqChL,CADrC;EAGE,WADA8D,KAAKiF,eAALjF,IACOuE,UAAU1G,KAAV0G,CAAgBvE,IAAhBuE,EAAsB2C,CAAtB3C,EAA6BrI,CAA7BqI,CAAP;KAvSKH,EAySPA,WAAAA,QAAAA,GAAA,UAAmCtG,CAAnC;EAEE,WADAkC,KAAKiF,eAALjF,IACOuE,UAAU2B,OAAV3B,CAAkBvE,IAAlBuE,EAAwBzG,CAAxByG,CAAP;KA3SKH,EA6SPA,WAAAA,OAAAA,GAAA,UAAkC1J,CAAlC,EAAwCoD,CAAxC;EAEE,4BAFsCA,QACtCkC,KAAKiF,eAALjF,IACOuE,UAAU4C,MAAV5C,EAAkBvE,MAAMtF,EAAxB6J,EAA4BzG,CAA5ByG,CAAP;KA/SKH,EAiTPA,WAAAA,MAAAA,GAAA,UAAiCgD,CAAjC,EAAmEtJ,CAAnE;EAGE,4BAHiEA,QAEjEkC,KAAKiF,eAALjF,IACOuE,UAAU8C,KAAV9C,CAAgBvE,IAAhBuE,EAAsB6C,CAAtB7C,EAAuCzG,CAAvCyG,CAAP;KApTKH,EAsTPA,WAAAA,MAAAA,GAAA,UAAM1J,CAAN,EAAiBoD,CAAjB;EACE,4BADeA,QACRyG,UAAU+C,KAAV/C,EAAiBvE,MAAMtF,EAAvB6J,EAA2BzG,CAA3ByG,CAAP;KAvTKH,EAyTPA,WAAAA,QAAAA,GAAA,UAAQ1J,CAAR,EAAmBoD,CAAnB;EACE,4BADiBA,QACVyG,UAAUgD,OAAVhD,CAAkBvE,IAAlBuE,EAAwBzG,CAAxByG,CAAP;KA1TKH,EA4TPA,WAAAA,IAAAA,GAAA,UACaoD,CADb,EACgDC,CADhD;EAEE,4BAD8CA,QACvClD,UAAU9B,GAAV8B,CAAcvE,IAAduE,EAAoBiD,CAApBjD,EAA8BkD,CAA9BlD,CAAP;KA9TKH,EAgUPA,WAAAA,mBAAAA,GAAA,UACIsD,CADJ,EAC8BC,CAD9B,EAEIC,CAFJ,EAE4BC,CAF5B,EAGItF,CAHJ;EAKE,4BAHEqF,WAEF5H,KAAKiF,eAALjF,IACOuE,UAAUuD,kBAAVvD,CACHvE,IADGuE,EACGmD,CADHnD,EACSoD,CADTpD,EACmBqD,CADnBrD,EACoCsD,CADpCtD,EAC2ChC,CAD3CgC,CAAP;KArUKH,EA0UPA,WAAAA,IAAAA,GAAA,UAAsBtG,CAAtB,EAAoDkJ,CAApD;EAEE,4BAFoBlJ,4BAA8BkJ,SAClDhH,KAAKiF,eAALjF,IACOuE,UAAUwD,GAAVxD,CAAcvE,IAAduE,EAAoBzG,CAApByG,EAA0ByC,CAA1BzC,CAAP;KA5UKH,EA8UPA,WAAAA,IAAAA,GAAA,UAAsBtG,CAAtB,EAAoDkJ,CAApD;EAEE,4BAFoBlJ,4BAA8BkJ,SAClDhH,KAAKiF,eAALjF,IACOuE,UAAUyD,GAAVzD,CAAcvE,IAAduE,EAAoBzG,CAApByG,EAA0ByC,CAA1BzC,CAAP;KAhVKH,EAkVPA,WAAAA,UAAAA,GAAA,UAA4BtG,CAA5B,EAA0DkJ,CAA1D;EAGE,4BAH0BlJ,4BAA8BkJ,SAExDhH,KAAKiF,eAALjF,IACOuE,UAAU0D,SAAV1D,CAAoBvE,IAApBuE,EAA0BzG,CAA1ByG,EAAgCyC,CAAhCzC,CAAP;KArVKH,EAuVPA,WAAAA,IAAAA,GAAA,UAAsBtG,CAAtB,EAAoDkJ,CAApD;EAEE,4BAFoBlJ,4BAA8BkJ,SAClDhH,KAAKiF,eAALjF,IACOuE,UAAUzJ,GAAVyJ,CAAcvE,IAAduE,EAAoBzG,CAApByG,EAA0ByC,CAA1BzC,CAAP;KAzVKH,EA2VPA,WAAAA,KAAAA,GAAA,UAAuBtG,CAAvB,EAAqDkJ,CAArD;EAEE,4BAFqBlJ,4BAA8BkJ,SACnDhH,KAAKiF,eAALjF,IACOuE,UAAU2D,IAAV3D,CAAevE,IAAfuE,EAAqBzG,CAArByG,EAA2ByC,CAA3BzC,CAAP;KA7VKH,EA+VPA,WAAAA,KAAAA,GAAA,UAAuBtG,CAAvB,EAAqDkJ,CAArD;EAEE,4BAFqBlJ,4BAA8BkJ,SACnDhH,KAAKiF,eAALjF,IACOuE,UAAUmD,IAAVnD,CAAevE,IAAfuE,EAAqBzG,CAArByG,EAA2ByC,CAA3BzC,CAAP;KAjWKH,EAmWPA,WAAAA,IAAAA,GAAA,UAAsBtG,CAAtB,EAAoDkJ,CAApD;EAEE,4BAFoBlJ,4BAA8BkJ,SAClDhH,KAAKiF,eAALjF,IACOuE,UAAU9J,GAAV8J,CAAcvE,IAAduE,EAAoBzG,CAApByG,EAA0ByC,CAA1BzC,CAAP;KArWKH,EAuWPA,WAAAA,IAAAA,GAAA,UAAsBtG,CAAtB,EAAoDkJ,CAApD;EAEE,4BAFoBlJ,4BAA8BkJ,SAClDhH,KAAKiF,eAALjF,IACOuE,UAAU5J,GAAV4J,CAAcvE,IAAduE,EAAoBzG,CAApByG,EAA0ByC,CAA1BzC,CAAP;KAzWKH,EA2WPA,WAAAA,OAAAA,GAAA,UAAyBtG,CAAzB;EAEE,4BAFuBA,WACvBkC,KAAKiF,eAALjF,IACOuE,UAAU4D,MAAV5D,CAAiBvE,IAAjBuE,EAAuBzG,CAAvByG,CAAP;KA7WKH,EA+WPA,WAAAA,OAAAA,GAAA,UAAyBtG,CAAzB;EAEE,4BAFuBA,WACvBkC,KAAKiF,eAALjF,IACOuE,UAAU6D,MAAV7D,CAAiBvE,IAAjBuE,EAAuBzG,CAAvByG,CAAP;KAjXKH,EAqXPA,WAAAA,KAAAA,GAAA,UAAqBnG,CAArB;EAEE,WADA+B,KAAKiF,eAALjF,IACOuE,UAAUgB,IAAVhB,CAAevE,IAAfuE,EAA0BtG,CAA1BsG,CAAP;KAvXKH,EA4XPA,WAAAA,IAAAA,GAAA,UAAsB1J,CAAtB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU8D,GAAV9D,CAAcvE,IAAduE,EAAoB7J,CAApB6J,CAAP;KA9XKH,EAgYPA,WAAAA,UAAAA,GAAA,UAAmC1J,CAAnC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU+D,SAAV/D,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KAlYKH,EAoYPA,WAAAA,MAAAA,GAAA,UAA+B1J,CAA/B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUgE,KAAVhE,CAAgBvE,IAAhBuE,EAAsB7J,CAAtB6J,CAAP;KAtYKH,EAwYPA,WAAAA,IAAAA,GAAA,UAAsB1J,CAAtB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUiE,GAAVjE,CAAcvE,IAAduE,EAAoB7J,CAApB6J,CAAP;KA1YKH,EA4YPA,WAAAA,UAAAA,GAAA,UAAmC1J,CAAnC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUkE,SAAVlE,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KA9YKH,EAgZPA,WAAAA,IAAAA,GAAA,UAA+B5H,CAA/B;EAEE,WADAwD,KAAKiF,eAALjF,IACOuE,UAAUmE,GAAVnE,CAAcvE,IAAduE,EAAoB/H,CAApB+H,CAAP;KAlZKH,EAoZPA,WAAAA,UAAAA,GAAA,UAAU5H,CAAV;EAEE,WADAwD,KAAKiF,eAALjF,IACOuE,UAAUoE,SAAVpE,CAAoBvE,IAApBuE,EAA0B/H,CAA1B+H,CAAP;KAtZKH,EAwZPA,WAAAA,IAAAA,GAAA,UAAsB1J,CAAtB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUqE,GAAVrE,CAAcvE,IAAduE,EAAoB7J,CAApB6J,CAAP;KA1ZKH,EA4ZPA,WAAAA,UAAAA,GAAA,UAAmC1J,CAAnC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUsE,SAAVtE,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KA9ZKH,EAgaPA,WAAAA,IAAAA,GAAA,UAAsB1J,CAAtB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUuE,GAAVvE,CAAcvE,IAAduE,EAAoB7J,CAApB6J,CAAP;KAlaKH,EAoaPA,WAAAA,SAAAA,GAAA,UAA2B1J,CAA3B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUwE,QAAVxE,CAAmBvE,IAAnBuE,EAAyB7J,CAAzB6J,CAAP;KAtaKH,EAwaPA,WAAAA,UAAAA,GAAA,UAAmC1J,CAAnC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUyE,SAAVzE,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KA1aKH,EA4aPA,WAAAA,QAAAA,GAAA,UAA0B1J,CAA1B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU0E,OAAV1E,CAAkBvE,IAAlBuE,EAAwB7J,CAAxB6J,CAAP;KA9aKH,EAgbPA,WAAAA,cAAAA,GAAA,UAAuC1J,CAAvC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU2E,aAAV3E,CAAwBvE,IAAxBuE,EAA8B7J,CAA9B6J,CAAP;KAlbKH,EAobPA,WAAAA,QAAAA,GAAA,UAA0B1J,CAA1B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU4E,OAAV5E,CAAkBvE,IAAlBuE,EAAwB7J,CAAxB6J,CAAP;KAtbKH,EAwbPA,WAAAA,cAAAA,GAAA,UAAuC1J,CAAvC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU6E,aAAV7E,CAAwBvE,IAAxBuE,EAA8B7J,CAA9B6J,CAAP;KA1bKH,EA4bPA,WAAAA,IAAAA,GAAA,UAAsB1J,CAAtB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU8E,GAAV9E,CAAcvE,IAAduE,EAAoB7J,CAApB6J,CAAP;KA9bKH,EAgcPA,WAAAA,UAAAA,GAAA,UAAmC1J,CAAnC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU+E,SAAV/E,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KAlcKH,EAocPA,WAAAA,kBAAAA,GAAA,UAAoC1J,CAApC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUgF,iBAAVhF,CAA4BvE,IAA5BuE,EAAkC7J,CAAlC6J,CAAP;KAtcKH,EAwcPA,WAAAA,wBAAAA,GAAA,UAAiD1J,CAAjD;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUiF,uBAAVjF,CAAkCvE,IAAlCuE,EAAwC7J,CAAxC6J,CAAP;KA1cKH,EA4cPA,WAAAA,UAAAA,GAAA,UAAqCqF,CAArC;EAEE,WADAzJ,KAAKiF,eAALjF,IACOuE,UAAUmF,SAAVnF,CAAoBvE,IAApBuE,EAA0BkF,CAA1BlF,CAAP;KA9cKH,EAmdPA,WAAAA,SAAAA,GAAA,UAA2B1J,CAA3B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUoF,QAAVpF,CAAmBvE,IAAnBuE,EAAyB7J,CAAzB6J,CAAP;KArdKH,EAudPA,WAAAA,eAAAA,GAAA,UAAwC1J,CAAxC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUqF,cAAVrF,CAAyBvE,IAAzBuE,EAA+B7J,CAA/B6J,CAAP;KAzdKH,EA2dPA,WAAAA,KAAAA,GAAA,UAAuB1J,CAAvB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUsF,IAAVtF,CAAevE,IAAfuE,EAAqB7J,CAArB6J,CAAP;KA7dKH,EA+dPA,WAAAA,WAAAA,GAAA,UAAoC1J,CAApC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUuF,UAAVvF,CAAqBvE,IAArBuE,EAA2B7J,CAA3B6J,CAAP;KAjeKH,EAmePA,WAAAA,MAAAA,GAAA,UAAwB1J,CAAxB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUwF,KAAVxF,CAAgBvE,IAAhBuE,EAAsB7J,CAAtB6J,CAAP;KAreKH,EAuePA,WAAAA,YAAAA,GAAA,UAAqC1J,CAArC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUyF,WAAVzF,CAAsBvE,IAAtBuE,EAA4B7J,CAA5B6J,CAAP;KAzeKH,EA2ePA,WAAAA,UAAAA,GAAA,UAA4B1J,CAA5B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU0F,SAAV1F,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KA7eKH,EA+ePA,WAAAA,gBAAAA,GAAA,UAAyC1J,CAAzC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU2F,eAAV3F,CAA0BvE,IAA1BuE,EAAgC7J,CAAhC6J,CAAP;KAjfKH,EAmfPA,WAAAA,QAAAA,GAAA,UAA0B1J,CAA1B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU4F,OAAV5F,CAAkBvE,IAAlBuE,EAAwB7J,CAAxB6J,CAAP;KArfKH,EAufPA,WAAAA,cAAAA,GAAA,UAAuC1J,CAAvC;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU6F,aAAV7F,CAAwBvE,IAAxBuE,EAA8B7J,CAA9B6J,CAAP;KAzfKH,EA2fPA,WAAAA,aAAAA,GAAA,UAA+B1J,CAA/B;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU8F,YAAV9F,CAAuBvE,IAAvBuE,EAA6B7J,CAA7B6J,CAAP;KA7fKH,EA+fPA,WAAAA,mBAAAA,GAAA,UAA4C1J,CAA5C;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAU+F,kBAAV/F,CAA6BvE,IAA7BuE,EAAmC7J,CAAnC6J,CAAP;KAjgBKH,EAqgBPA,WAAAA,WAAAA,GAAA,UAAW1J,CAAX;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUgG,UAAVhG,CAAqBvE,IAArBuE,EAA2B7J,CAA3B6J,CAAP;KAvgBKH,EAygBPA,WAAAA,UAAAA,GAAA,UAAU1J,CAAV;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUiG,SAAVjG,CAAoBvE,IAApBuE,EAA0B7J,CAA1B6J,CAAP;KA3gBKH,EA6gBPA,WAAAA,WAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUkG,UAAVlG,CAAqBvE,IAArBuE,CAAP;KA/gBKH,EAihBPA,WAAAA,WAAAA,GAAA,UAAW1J,CAAX;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUmG,UAAVnG,CAAqBvE,IAArBuE,EAA2B7J,CAA3B6J,CAAP;KAnhBKH,EAqhBPA,WAAAA,MAAAA,GAAA,UAAMuG,CAAN,EAAyBjQ,CAAzB;EAEE,WADAsF,KAAKiF,eAALjF,IACOuE,UAAUqG,KAAVrG,CAAgBoG,CAAhBpG,EAA2BvE,IAA3BuE,EAAiC7J,CAAjC6J,CAAP;KAvhBKH,EA2hBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUsG,GAAVtG,CAAcvE,IAAduE,CAAP;KA7hBKH,EA+hBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUuG,IAAVvG,CAAevE,IAAfuE,CAAP;KAjiBKH,EAmiBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU9H,KAAV8H,CAAgBvE,IAAhBuE,CAAP;KAriBKH,EAuiBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUwG,IAAVxG,CAAevE,IAAfuE,CAAP;KAziBKH,EA2iBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU/H,GAAV+H,CAAcvE,IAAduE,CAAP;KA7iBKH,EA+iBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUyG,KAAVzG,CAAgBvE,IAAhBuE,CAAP;KAjjBKH,EAmjBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUpD,GAAVoD,CAAcvE,IAAduE,CAAP;KArjBKH,EAujBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU0G,KAAV1G,CAAgBvE,IAAhBuE,CAAP;KAzjBKH,EA2jBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU7H,IAAV6H,CAAevE,IAAfuE,CAAP;KA7jBKH,EA+jBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU2G,KAAV3G,CAAgBvE,IAAhBuE,CAAP;KAjkBKH,EAmkBPA,WAAAA,OAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU4G,MAAV5G,CAAiBvE,IAAjBuE,CAAP;KArkBKH,EAukBPA,WAAAA,WAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU6G,UAAV7G,CAAqBvE,IAArBuE,CAAP;KAzkBKH,EA2kBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU8G,GAAV9G,CAAcvE,IAAduE,CAAP;KA7kBKH,EA+kBPA,WAAAA,YAAAA,GAAA,UAAY3J,CAAZ,EAAyBE,CAAzB;EAEE,WADAqF,KAAKiF,eAALjF,IACOuE,UAAU+G,WAAV/G,CAAsBvE,IAAtBuE,EAA4B9J,CAA5B8J,EAAiC5J,CAAjC4J,CAAP;KAjlBKH,EAmlBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUgH,IAAVhH,CAAevE,IAAfuE,CAAP;KArlBKH,EAulBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUiH,GAAVjH,CAAcvE,IAAduE,CAAP;KAzlBKH,EA2lBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUkH,IAAVlH,CAAevE,IAAfuE,CAAP;KA7lBKH,EA+lBPA,WAAAA,UAAAA,GAAA,UAAU1M,CAAV;EAEE,4BAFQA,SACRsI,KAAKiF,eAALjF,IACOuE,UAAUmH,SAAVnH,CAAoBvE,IAApBuE,EAA0B7M,CAA1B6M,CAAP;KAjmBKH,EAmmBPA,WAAAA,MAAAA,GAAA,UAAM1M,CAAN;EAEE,WADAsI,KAAKiF,eAALjF,IACOuE,UAAUoH,KAAVpH,CAAgBvE,IAAhBuE,EAAsB7M,CAAtB6M,CAAP;KArmBKH,EAumBPA,WAAAA,QAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUqH,OAAVrH,CAAkBvE,IAAlBuE,CAAP;KAzmBKH,EA2mBPA,WAAAA,WAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUsH,UAAVtH,CAAqBvE,IAArBuE,CAAP;KA7mBKH,EA+mBPA,WAAAA,SAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUuH,QAAVvH,CAAmBvE,IAAnBuE,CAAP;KAjnBKH,EAmnBPA,WAAAA,UAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUwH,SAAVxH,CAAoBvE,IAApBuE,CAAP;KArnBKH,EAunBPA,WAAAA,SAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUyH,QAAVzH,CAAmBvE,IAAnBuE,CAAP;KAznBKH,EA2nBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU0H,GAAV1H,CAAcvE,IAAduE,CAAP;KA7nBKH,EA+nBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU2H,GAAV3H,CAAcvE,IAAduE,CAAP;KAjoBKH,EAmoBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU4H,GAAV5H,CAAcvE,IAAduE,CAAP;KAroBKH,EAuoBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU6H,IAAV7H,CAAevE,IAAfuE,CAAP;KAzoBKH,EA2oBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU8H,IAAV9H,CAAevE,IAAfuE,CAAP;KA7oBKH,EA+oBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU+H,IAAV/H,CAAevE,IAAfuE,CAAP;KAjpBKH,EAmpBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUgI,IAAVhI,CAAevE,IAAfuE,CAAP;KArpBKH,EAupBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUiI,IAAVjI,CAAevE,IAAfuE,CAAP;KAzpBKH,EA2pBPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUlI,IAAVkI,CAAevE,IAAfuE,CAAP;KA7pBKH,EA+pBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUkI,KAAVlI,CAAgBvE,IAAhBuE,CAAP;KAjqBKH,EAmqBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUmI,KAAVnI,CAAgBvE,IAAhBuE,CAAP;KArqBKH,EAuqBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUoI,KAAVpI,CAAgBvE,IAAhBuE,CAAP;KAzqBKH,EA2qBPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUqI,GAAVrI,CAAcvE,IAAduE,CAAP;KA7qBKH,EA+qBPA,WAAAA,MAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAUzF,KAAVyF,CAAgBvE,IAAhBuE,CAAP;KAjrBKH,EAmrBPA,WAAAA,KAAAA,GAAA,UAAgC1M,CAAhC;EAEE,4BAF8BA,QAC9BsI,KAAKiF,eAALjF,IACOuE,UAAUsI,IAAVtI,CAAevE,IAAfuE,EAAqB7M,CAArB6M,CAAP;KArrBKH,EAurBPA,WAAAA,QAAAA,GAAA,UAAiC0I,CAAjC;EAEE,4BAF+BA,KAAO,IACtC9M,KAAKiF,eAALjF,IACOuE,UAAUwI,OAAVxI,CAAkBvE,IAAlBuE,EAAwBuI,CAAxBvI,CAAP;KAzrBKH,EA2rBPA,WAAAA,WAAAA,GAAA,UAAoCtG,CAApC;EAEE,4BAFkCA,KAAQ,IAC1CkC,KAAKiF,eAALjF,IACOuE,UAAUyI,UAAVzI,CAAqBvE,IAArBuE,EAA2BzG,CAA3ByG,CAAP;KA7rBKH,EAisBPA,WAAAA,eAAAA,GAAA,UACa6I,CADb,EAC2CC,CAD3C;EAGE,4BAFyCA,SACxClN,KAAgBiF,eAAhBjF,IACMuE,UAAU4I,KAAV5I,CAAgB6I,cAAhB7I,CAA+BvE,IAA/BuE,EAAqC0I,CAArC1I,EAAiD2I,CAAjD3I,CAAP;KApsBKH,EAusBPA,WAAAA,sBAAAA,GAAA,UACa6I,CADb,EAC2CC,CAD3C;EAGE,4BAFyCA,SACxClN,KAAgBiF,eAAhBjF,IACMuE,UAAU4I,KAAV5I,CAAgB8I,qBAAhB9I,CACHvE,IADGuE,EACG0I,CADH1I,EACe2I,CADf3I,CAAP;KA1sBKH,EA+sBPA,WAAAA,OAAAA,GAAA,UACakJ,CADb,EAC+BhK,CAD/B,EAC+Cb,CAD/C,EAEI8K,CAFJ,EAEqCC,CAFrC,EAGIC,CAHJ;EAKE,4BAHEF,6BAAiCC,QAElCxN,KAAgBiF,eAAhBjF,IACMuE,UAAUmJ,MAAVnJ,CACHvE,IADGuE,EACG+I,CADH/I,EACWjB,CADXiB,EACmB9B,CADnB8B,EACwBgJ,CADxBhJ,EACoCiJ,CADpCjJ,EAC8CkJ,CAD9ClJ,CAAP;KAptBKH,EAutBPA,WAAAA,OAAAA,GAAA,UACakJ,CADb,EAC+BjO,CAD/B,EAEIoD,CAFJ,EAEgC8K,CAFhC,EAGII,CAHJ,EAIIF,CAJJ;EAME,4BAJ8BF,8BAC5BI,KAAsC,GAAG,KAE1C3N,KAAgBiF,eAAhBjF,IACMuE,UAAUqJ,MAAVrJ,CACHvE,IADGuE,EACG+I,CADH/I,EACWlF,CADXkF,EACoB9B,CADpB8B,EACyBgJ,CADzBhJ,EACqCoJ,CADrCpJ,EACgDkJ,CADhDlJ,CAAP;KA7tBKH,EAguBPA,WAAAA,gBAAAA,GAAA,UACakJ,CADb,EAEIO,CAFJ,EAGIxO,CAHJ,EAGsCoD,CAHtC,EAIIgL,CAJJ;EAME,WADCzN,KAAgBiF,eAAhBjF,IACMuE,UAAUuJ,eAAVvJ,CACHvE,IADGuE,EACG+I,CADH/I,EACWsJ,CADXtJ,EACwBlF,CADxBkF,EACiC9B,CADjC8B,EACsCkJ,CADtClJ,CAAP;KAtuBKH,EAyuBPA,WAAAA,gBAAAA,GAAA,UACakJ,CADb,EAC+BjO,CAD/B,EAEIoD,CAFJ,EAEgC8K,CAFhC,EAGII,CAHJ,EAIIF,CAJJ;EAME,4BAJ8BF,8BAC5BI,KAAsC,GAAG,KAE1C3N,KAAgBiF,eAAhBjF,IACMuE,UAAUwJ,eAAVxJ,CACHvE,IADGuE,EACG+I,CADH/I,EACWlF,CADXkF,EACoB9B,CADpB8B,EACyBgJ,CADzBhJ,EACqCoJ,CADrCpJ,EACgDkJ,CADhDlJ,CAAP;KA/uBKH,EAmvBPA,WAAAA,gBAAAA,GAAA,UACwB4J,CADxB,EAEIC,CAFJ,EAE0C5O,CAF1C,EAGIoD,CAHJ,EAGyB+K,CAHzB,EAIID,CAJJ;EAME,4BAHuBC,KAAqC,GAAG,sBAC7DD,aACDvN,KAAgBiF,eAAhBjF,IACMuE,UAAU2J,eAAV3J,CACHvE,IADGuE,EACGyJ,CADHzJ,EACoB0J,CADpB1J,EACqClF,CADrCkF,EAC8C9B,CAD9C8B,EACmDiJ,CADnDjJ,EAEHgJ,CAFGhJ,CAAP;KAzvBKH,EA+vBPA,WAAAA,QAAAA,GAAA,UACa+J,CADb,EAEI9O,CAFJ,EAEsCoD,CAFtC,EAGIgL,CAHJ;EAKE,WADCzN,KAAgBiF,eAAhBjF,IACMuE,UAAU6J,OAAV7J,CAAkBvE,IAAlBuE,EAAwB4J,CAAxB5J,EAAoClF,CAApCkF,EAA6C9B,CAA7C8B,EAAkDkJ,CAAlDlJ,CAAP;KApwBKH,EAswBPA,WAAAA,QAAAA,GAAA,UACa+J,CADb,EAEI9O,CAFJ,EAEsCoD,CAFtC,EAGIgL,CAHJ;EAKE,WADCzN,KAAgBiF,eAAhBjF,IACMuE,UAAU8J,OAAV9J,CAAkBvE,IAAlBuE,EAAwB4J,CAAxB5J,EAAoClF,CAApCkF,EAA6C9B,CAA7C8B,EAAkDkJ,CAAlDlJ,CAAP;KA3wBKH,EA6wBPA,WAAAA,2BAAAA,GAAA,UACakK,CADb,EACyBC,CADzB,EACmC7W,CADnC,EAC8C8W,CAD9C;EAEE,4BADWF,yBAAYC,yBAAU7W,yBAAW8W,SACrCjK,UAAUkK,0BAAVlK,CACHvE,IADGuE,EACG+J,CADH/J,EACWgK,CADXhK,EACiB7M,CADjB6M,EACwBiK,CADxBjK,CAAP;KA/wBKH,EAkxBPA,WAAAA,KAAAA,GAAA,UACasK,CADb,EACmDC,CADnD,EAEIC,CAFJ,EAEoCC,CAFpC,EAGIxP,CAHJ;EAKE,WADCW,KAAgBiF,eAAhBjF,IACMuE,UAAUuK,IAAVvK,CACHvE,IADGuE,EACGmK,CADHnK,EACgBoK,CADhBpK,EAC6BqK,CAD7BrK,EACsCsK,CADtCtK,EACoDlF,CADpDkF,CAAP;KAvxBKH,EA2xBPA,WAAAA,SAAAA,GAAA,UAAS2K,CAAT,EAA2BxQ,CAA3B,EAA0CN,CAA1C;EAEE,4BAFO8Q,SACP/O,KAAKiF,eAALjF,IACOgP,SAASC,QAATD,CAAkBhP,IAAlBgP,EAAwBD,CAAxBC,EAAmCzQ,CAAnCyQ,EAAyC/Q,CAAzC+Q,CAAP;KA7xBK5K,EAgyBPA,WAAAA,mBAAAA,GAAA,UACa8K,CADb,EACmCC,CADnC;EAGE,WADAnP,KAAKiF,eAALjF,IACOuE,UAAU6K,kBAAV7K,CAA6BvE,IAA7BuE,EAAmC2K,CAAnC3K,EAA+C4K,CAA/C5K,CAAP;KAnyBKH,EAsyBPA,WAAAA,eAAAA,GAAA,UACaiL,CADb,EACmCC,CADnC;EAGE,WADAtP,KAAKiF,eAALjF,IACOuE,UAAUgL,cAAVhL,CAAyBvE,IAAzBuE,EAA+B8K,CAA/B9K,EAA2C+K,CAA3C/K,CAAP;KAzyBKH,EA4yBPA,WAAAA,eAAAA,GAAA,UACaiL,CADb,EACmC7H,CADnC;EAGE,WADAxH,KAAKiF,eAALjF,IACOuE,UAAUiL,cAAVjL,CAAyBvE,IAAzBuE,EAA+B8K,CAA/B9K,EAA2CiD,CAA3CjD,CAAP;KA/yBKH,EAkzBPA,WAAAA,KAAAA,GAAA,UAAgCqL,CAAhC,EAAuCC,CAAvC;EAGE,4BAH8BD,yBAAOC,SAErC1P,KAAKiF,eAALjF,IACOuE,UAAUoL,IAAVpL,CAAevE,IAAfuE,EAAqBkL,CAArBlL,EAAwBmL,CAAxBnL,CAAP;KArzBKH,EAwzBPA,WAAAA,aAAAA,GAAA,UACa8C,CADb,EAC8B3D,CAD9B,EAC6ClE,CAD7C,EACgEuQ,CADhE,EAEIC,CAFJ;EAIE,4BAH8DD,yBAC5DC,QACF7P,KAAKiF,eAALjF,IACOuE,UAAUuL,YAAVvL,CACHvE,IADGuE,EACG2C,CADH3C,EACUhB,CADVgB,EACelF,CADfkF,EACwBqL,CADxBrL,EACmCsL,CADnCtL,CAAP;KA5zBKH,EAg0BPA,WAAAA,aAAAA,GAAA,UAA6B2L,CAA7B,EAAgDxC,CAAhD;EAGE,WADAvN,KAAKiF,eAALjF,IACOuE,UAAUyL,YAAVzL,CAAuBvE,IAAvBuE,EAA6BwL,CAA7BxL,EAAwCgJ,CAAxChJ,CAAP;KAn0BKH,EAs0BPA,WAAAA,IAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU0L,QAAV1L,CAAmB2L,GAAnB3L,CAAuBvE,IAAvBuE,CAAP;KAx0BKH,EA20BPA,WAAAA,KAAAA,GAAA;EAEE,WADApE,KAAKiF,eAALjF,IACOuE,UAAU0L,QAAV1L,CAAmB4L,IAAnB5L,CAAwBvE,IAAxBuE,CAAP;KA70BKH,GA+0BT;KAn4BA,QAo4BOgM,eAAehM,QAAQiM,OAAOC,eACnCvM,OAAO,UAACwM,CAAD;EACL,aAASA,KAA8B,QAAlBA,EAAStU,SAAmC,QAAlBsU,EAAStS,KAAxD;SAuBJ;EAQE,YAAA,CACIuS,CADJ,EACoCzB,CADpC,EACsDxQ,CADtD;uBACoCwQ,QADpC,QAEE0B,MAAAA,KAAAA,EACID,EAAavU,KADjBwU,EACwBD,EAAavS,KADrCwS,EAC4C,IAD5CA,EAEID,EAAa9L,MAFjB+L,SAFF,CACoC9P,WAAAA,GAAAoO,CAAApO,EAIlCA,EAAKpC,IAALoC,GAAYpC,CAJsBoC,EAKjB,QAAbA,EAAKpC,IAAQ,KACfoC,EAAKpC,IAALoC,GAAY2D,YAAYoM,cAAZpM,GAA6BrD,QAA7BqD,EADG,CALiB3D,CAQlC;EACE2D,kBAAYqM,gBAAZrM,CAA6B3D,CAA7B2D;EACA,KAFF,CAEE,OAAOsM,CAAP;EAEA,YADAtM,YAAYsB,aAAZtB,CAA0B3D,CAA1B2D,GACMsM,CAAN;;EAkDN,UAvEqDC,WAAAA,GAAAA,GAwC5C7B,UAAAA,GAAP,UACIwB,CADJ,EAC6BzB,CAD7B,EAC+CxQ,CAD/C,EAEIN,CAFJ;EAME,4BAL2B8Q,SAEd,QAAT9Q,CAAS,IAAQA,MAAUuS,EAAavS,KAA/B,KACXuS,IAAeA,EAAa1K,MAAb0K,CAAoBvS,CAApBuS,CADJ,GAGN,IAAIxB,CAAJ,CAAawB,CAAb,EAA2BzB,CAA3B,EAAsCxQ,CAAtC,CAAP;KA9CiDsS,EAwDnD7B,WAAAA,OAAAA,GAAA,UAAO8B,CAAP;EACE,QAAIA,EAAS7S,KAAT6S,KAAmB9Q,KAAK/B,KAA5B,EACE,MAAM,IAAI1E,KAAJ,CACF,6BAA2BuX,EAAS7S,KAApC,2BAAA,GACmB+B,KAAK/B,KADxB,iBADE,CAAN,CAIF,KAAK8S,YAAiBD,EAAS7U,KAA1B8U,EAAiC/Q,KAAK/D,KAAtC8U,CAAL,EACE,MAAM,IAAIxX,KAAJ,CACF,6BAA2BuX,EAAS7U,KAApC,2BAAA,GACmB+D,KAAK/D,KADxB,iBADE,CAAN,CAIFqI,YAAYsB,aAAZtB,CAA0BtE,IAA1BsE,GACAtE,KAAK0E,MAAL1E,GAAc8Q,EAASpM,MADvBJ,EAEAA,YAAYQ,cAAZR,CAA2BtE,IAA3BsE,CAFAA;KAnEiDuM,GAuErD;IAvEqDzM,OAArD,QAwEOgM,eAAepB,UAAUqB,OAAOC,eACrCvM,OAAO,UAACwM,CAAD;EACL,WAAOA,aAAoBnM,MAApBmM,IAAiD,QAAnBA,EAASS,MAAvCT,IACHA,EAASS,MAATT,YAA2BU,QAD/B;SAKJ,IAAMhC,WAAWD,SAASC,QAA1B,+BC7zCIiC,GAAkBC,GAAcC;EAKlC,OAFA,IAAMC,MAAN,EACMC,MADN,EAESvW,IAAI,CAAb,EAAgBA,IAAIoW,EAAG/W,MAAvB,EAA+BW,GAA/B,EACEsW,EAAaF,EAAGpW,CAAHoW,EAAMxM,EAAnB0M,KAAyB,CAAzBA,CAGF,KAAStW,IAAI,CAAb,EAAgBA,IAAImW,EAAK9W,MAAzB,EAAiCW,GAAjC,EAAsC;EACpC,QACMwW,KADAC,IAAON,EAAKnW,CAALmW,GACWO,MADxB,CAEA,KAAK,IAAMC,CAAX,IAAwBH,CAAxB,EAAoC;EAIlC,WAHA,IAAMI,IAAQJ,EAAWG,CAAXH,CAAd,EAEIK,KAAgB,CAFpB,EAGS5T,IAAI,CAAb,EAAgBA,IAAImT,EAAG/W,MAAvB,EAA+B4D,GAA/B,EACE,IAAIqT,EAAaM,EAAMhN,EAAnB0M,CAAJ,EAA4B;EAC1BG,UAAKK,OAALL,CAAapR,OAAboR,CAAqB,UAAAM,CAAA;EAAU,iBAAAT,EAAaS,EAAOnN,EAApB0M,KAA0B,CAA1B;WAA/BG,GACAI,KAAgB,CADhBJ,EAEAF,EAAWE,EAAK7M,EAAhB2M,KAAsB,CAFtBE,CAGA;EAIJ,WAAII,CAAJ,EACE;;EAMN,OAAMG,MAAN,CACAA,EAAeX,EAAEzM,EAAjBoN,KAAuB,CAAvBA,CACA,IAAMC,MAAN,CAEA,KAASjX,IAAImW,EAAK9W,MAAL8W,GAAc,CAA3B,EAA8BnW,KAAK,CAAnC,EAAsCA,GAAtC,EAKE,KAHMwW,KADAC,IAAON,EAAKnW,CAALmW,GACWO,MAAlBF,EAGGvT,IAAI,CAAb,EAAgBA,IAAIwT,EAAKK,OAALL,CAAapX,MAAjC,EAAyC4D,GAAzC,EACE,IAAI+T,EAAeP,EAAKK,OAALL,CAAaxT,CAAbwT,EAAgB7M,EAA/BoN,CAAJ,EAAwC;EACtC,SAAK,IAAML,CAAX,IAAwBH,CAAxB,EACEQ,EAAeR,EAAWG,CAAXH,EAAsB5M,EAArCoN,KAA2C,CAA3CA,EACAC,EAASR,EAAK7M,EAAdqN,KAAoB,CADpBD,CAGF;EAMN,OAAME,MAAN,CACA,KAASlX,IAAI,CAAb,EAAgBA,IAAImW,EAAK9W,MAAzB,EAAiCW,GAAjC,EAAsC;EACpC,QAAMyW,CAAN,CAEA,IAAIF,GAFEE,IAAON,EAAKnW,CAALmW,GAEOvM,EAAhB2M,KAAuBU,EAASR,EAAK7M,EAAdqN,CAA3B,EAA8C;EAE5C,UAAME,MAAN,CACA,KAAK,IAAMR,CAAX,IAAwBF,EAAKC,MAA7B,EAAqC;EACnC,YAAMU,IAAYX,EAAKC,MAALD,CAAYE,CAAZF,CAAlB,CACIH,EAAac,EAAUxN,EAAvB0M,MACFa,EAAaR,CAAbQ,IAA0BC,CADxBd;EAMN,WAAMe,IAAajO,OAAO6M,MAAP7M,GAAAA,EAAkBqN,CAAlBrN,CAAnB,CACAiO,EAAWX,MAAXW,GAAoBF,CAApBE,EACAA,EAAWP,OAAXO,GAAqBZ,EAAKK,OAD1BO,EAGAH,EAAajW,IAAbiW,CAAkBG,CAAlBH,CAHAG;;EAOJ,UAAOH,CAAP;EAUF,gCAAA,CACII,CADJ,EAEIJ,CAFJ;EAIE,yBAASlX;EACP,QAAMyW,IAAOS,EAAalX,CAAbkX,CAAb;EAAA,QAEMK,MAFN,CAiBA,IAdAd,EAAKK,OAALL,CAAapR,OAAboR,CAAqB,UAAAe,CAAA;EACnB,UAAMC,IAAaH,EAA6BE,EAAE5N,EAA/B0N,CAAnB,CACA,IAAkB,QAAdG,CAAJ,EACEF,EAAItW,IAAJsW,CAASE,CAATF,EADF,KAEO;EAGL,YAAMG,IAAKrO,OAAOC,IAAPD,CACPmO,EAAEtW,KADKmI,IACGlG,QAAQwU,oBAAyBH,EAAErW,IAA3BwW,EAAiCH,EAAEtU,KAAnCyU,GADXtO,EAEPmO,EAAEtU,KAFKmG,CAAX,CAGAkO,EAAItW,IAAJsW,CAASG,CAATH;;OAVJd,GAcqB,QAAjBA,EAAKmB,QAAT,EACE,MAAM,IAAIpZ,KAAJ,CACF,8DACOiY,EAAKjT,IADZ,MADE,CAAN,CAMF,IAAMqU,IAGFpB,EAAKmB,QAALnB,CAAsC,MAAxBA,EAAKK,OAALL,CAAapX,MAAW,GAAIkY,EAAI,CAAJA,CAAJ,GAAaA,CAAnDd,CAHJ,CAIA,KAAK,IAAME,CAAX,IAAwBF,EAAKC,MAA7B,EAAqC;EACnC,YAAMC,KAAakB,EAAnB,EACE,MAAM,IAAIrZ,KAAJ,CACF,mCAAiCmY,CAAjC,kCAAA,GAC8BvN,OAAO0O,IAAP1O,CAAYyO,CAAZzO,CAD9B,MADE,CAAN,CAMF,IAAM2O,IAAKF,EAAelB,CAAfkB,GAAX;EAAA,UACMlY,IAAI8W,EAAKC,MAALD,CAAYE,CAAZF,CADV,CAEA,KAAKT,YAAiB+B,EAAG7W,KAApB8U,EAA2BrW,EAAEuB,KAA7B8U,CAAL,EACE,MAAM,IAAIxX,KAAJ,CACF,8BAA4BiY,EAAKjT,IAAjC,8BAAA,GACImT,CADJ,kBAAA,GAC6BoB,EAAG7W,KADhC,qDAAA,GAE2BvB,EAAEuB,KAF7B,MADE,CAAN,CAMF,IAA0C,QAAtCoW,EAA6B3X,EAAEiK,EAA/B0N,CAAJ,EACEA,EAA6B3X,EAAEiK,EAA/B0N,IAAqCS,CAArCT,CADF,KAEO;EACL,YAAMU,IAAcV,EAA6B3X,EAAEiK,EAA/B0N,CAApB,CACAA,EAA6B3X,EAAEiK,EAA/B0N,IAAqCU,EAAY1K,GAAZ0K,CAAgBD,CAAhBC,CAArCV,EACAU,EAAYC,OAAZD,EADAV;;;OAlDGtX,IAAIkX,EAAa7X,MAAb6X,GAAsB,CAAnC,EAAsClX,KAAK,CAA3C,EAA8CA,GAA9C,IAASA;6BChHsBrB,GAAWsB;EAC1CU,SACIhC,EAAEuE,KAAFvE,KAAYsB,EAAEiD,KADlBvC,EAEI,6BAA2BhC,EAAEuE,KAA7B,kBAAA,GACejD,EAAEiD,KADjB,uBAFJvC;EAMF,wBAAA,CAA+BuX,CAA/B,EAA+CC,CAA/C;EACE,OAAK,IAAInY,IAAI,CAAb,EAAgBA,IAAImY,EAAW9Y,MAA/B,EAAuCW,GAAvC,EACE,IAAImY,EAAWnY,CAAXmY,EAAcvO,EAAduO,KAAqBD,EAAOtO,EAAhC,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;EAGF,+BAAA,CAuCsCzJ,CAvCtC;EAwCE,MAAMiY,MAAN,CAGA,OADAC,oBAAoBlY,CAApBkY,EAA4BD,CAA5BC,EADa,IAAIC,GAAJ,EACbD,GACOD,CAAP;EAGF,6BAAA,CACIG,CADJ,EACgCH,CADhC,EACgDI,CADhD;EAEE,MAAiB,QAAbD,CAAJ,EAGA,IAAIA,aAAqBlP,MAAzB,EACE+O,EAAKnX,IAALmX,CAAUG,CAAVH,EADF,KAIA,IAAKK,WAAWF,CAAXE,CAAL,EAAA;EAIA,QAAMC,IAAWH,CAAjB,CACA,KAAK,IAAM7D,CAAX,IAAgBgE,CAAhB,EAA0B;EACxB,UAAM7Y,IAAM6Y,EAAShE,CAATgE,CAAZ,CACKF,EAAKG,GAALH,CAAS3Y,CAAT2Y,MACHA,EAAKlL,GAALkL,CAAS3Y,CAAT2Y,GACAH,oBAAoBxY,CAApBwY,EAAyBD,CAAzBC,EAA+BG,CAA/BH,CAFGG;;;EAQT,oBAAA,CAAoBI,CAApB;EACE,SAAO9X,MAAMC,OAAND,CAAc8X,CAAd9X,KAAqC,mBAAR8X,CAApC;EC5BF,KCAYC,IDAZ;EAAA;EA6BE,YAAA,CACWC,CADX,EAC0CC,CAD1C,EAEYnV,CAFZ;EACWqB,gBAAAA,GAAA6T,CAAA7T,EAA+BA,aAAAA,GAAA8T,CAA/B9T,EACCA,cAAAA,GAAArB,CADDqB,EA5BXA,wBAAAA,KA4BWA,EA1BHA,mBAAAA,GAAiB,CA0BdA,EAzBHA,aAAAA,GAAW,CAyBRA,EAxBHA,eAAAA,GAAa,CAwBVA,EAvBHA,mBAAAA,GAAiB,CAuBdA,EArBHA,cAAAA,IAAY,CAqBTA,EAjBHA,uBAAAA,GAAqB,CAiBlBA,EAhBHA,wBAAAA,GAAsB,CAgBnBA,EAXHA,gBAAAA,GAA2B,IAAIqT,GAAJ,EAWxBrT,EARHA,eAAAA,GAAa,IAAI+T,OAAJ,EAQV/T,EAGTA,KAAKgU,WAALhU,KAAoBiU,WAAW1V,MAAM,iBAH5ByB,EAITA,KAAKkU,UAALlU,IAAmBA,KAAKgU,YAJfhU,EAKTA,KAAKmU,QAALnU,GAAgB,IAAIE,QAAJ,CAAa2T,CAAb,CALP7T,EAMTA,KAAKoU,aAALpU,KACKqU,UAAU,GAAGC,YAAY,GAAGC,WAAW,GAAGC,aAAatZ,QAAQ,MAP3D8E;EA4db,UAldEyU,WAAAA,SAAAA,GAAA,UAAS/P,CAAT;EACE1E,SAAK+E,KAAL/E,CAAW0E,CAAX1E,EAAmBA,KAAK0F,QAAL1F,CAAc0E,CAAd1E,CAAnBA;KADFyU,EAIAA,WAAAA,KAAAA,GAAA,UACIC,CADJ,EACiClQ,CADjC,EACkDmQ,CADlD;EAAA,gBAAA,kBACkDA,QAIhD,IAuBIzZ,CAvBJ;EAAA,QAAIqD,IAAe,IAAnB,CACA,IAAU,QAANiG,CAAJ,EAAgB;EAEd,UAAwB,qBAAbkQ,CAAX,EACE,MAAM,IAAInb,KAAJ,CAAU,qCAAV,CAAN,CAEFiL,IAAKkQ,CAALlQ;OALF,MAMO;EAEL,UAAwB,mBAAbkQ,CAAa,MAAcA,aAAoBE,OAA1D,EACE,MAAM,IAAIrb,KAAJ,CACF,gFADE,CAAN,CAIF,IAAkB,qBAAPiL,CAAX,EACE,MAAM,IAAIjL,KAAJ,CACF,gFADE,CAAN,CAIFgF,IAAOmW,CAAPnW;EAKF,YAAOyB,KAAK6U,SAAL7U,CACH;EAAM,aAAAW,EAAKmU,UAALnU,CAAgBpC,CAAhBoC,EAAsBgU,CAAtBhU,CAAA;OADHX,EAEH;EAAM,aAAAW,EAAKoU,QAALpU,CAAczF,CAAdyF,EAAsBgU,CAAtBhU,CAAA;OAFHX,EAEoC;EAKrC,cAJA9E,IAASsJ,gBACarH,WACpB+D,QAAQ8T,KAAR9T,CAAc,yCAAdA,GAEKhG,CAAP;OAPC8E,CAAP;KAjCFyU,EA4CQA,WAAAA,UAAAA,GAAR,UAAqBtV,CAArB,EAAwCoE,CAAxC,EAAyDxE,CAAzD;EACEI,QACA;EACE,UAAM8V,IAAMlW,GAAZ,CAEA,OADAwE,KACO0R,CAAP;EACA,KAJF,CAIE,OAAOrE,CAAP;EAEA,YADArN,KACMqN,CAAN;;KApDJ6D,EAyDAA,WAAAA,aAAAA,GAAA;EACE,WAAOA,EAAO7P,YAAP6P,EAAP;KA1DFA,EA8DAA,WAAAA,eAAAA,GAAA;EACE,WAAOA,EAAO/D,cAAP+D,EAAP;KA/DFA,EAkEAA,WAAAA,UAAAA,GAAA,UACIS,CADJ,EAEIzD,CAFJ,EAGI0D,CAHJ;EAAA,QAKMja,CALN;EAAA,gBAAA;EAAA,QAMQka,MANR;EAAA,QAOQC,IAAW,UAAmB3a,CAAnB;EAEf,aADA0a,EAAMpZ,IAANoZ,CAAW1a,CAAX0a,GACO1a,CAAP;OATJ;EAAA,QAWQ4a,IAAYtV,KAAKgU,WAALhU,CAAiBzB,IAXrC;EAAA,QAYQgX,IAAoBvV,KAAKwV,QAZjC;EAAA,QAaQC,IAAqBzV,KAAK0V,UAblC,CA2BE,IAXA1V,KAAK6U,SAAL7U,CACI;EAAM,aAAAW,EAAKgV,mBAALhV,EAAA;OADVX,EACsC;EAAM,aAAAW,EAAKgV,mBAALhV,EAAA;OAD5CX,EAEI;EAII9E,UAHGyF,EAAKhC,SAALgC,KAGMA,EAAKwT,QAALxT,CAAciV,aAAdjV,CACL2U,CADK3U,EACM;EAAM,eAAAuU,EAAYvU,EAAKkT,OAAjBqB,EAA0BG,CAA1BH,CAAA;SADZvU,CAHNA,GACMuU,EAAYvU,EAAKkT,OAAjBqB,EAA0BG,CAA1BH,CAETha;OANR8E,GAWIA,KAAK6V,YAAL7V,EAAJ,EAAyB;EACvB,UAAM8V,MACJnR,IAAI3E,KAAK+V,cAAL/V,IACJzB,MAAM+W,GACN7D,WACAI,SAAShW,MAAMC,OAAND,CAAcX,CAAdW,IAAwBX,CAAxBW,IAAkCX,IAJ7C,CAMqB,QAAjBia,CAAiB,KACnBW,EAASnD,QAATmD,aACMrD;EAAU,eAAA0C,EAAc1C,CAAd0C,EAAkBC,CAAlBD,CAAA;SAFG,GAKrBnV,KAAKgW,UAALhW,CAAgBhE,IAAhBgE,CAAqB8V,CAArB9V,CALqB;EAsBvB,YAdIA,KAAKiW,SAALjW,IACFA,KAAKoU,aAALpU,CAAmBwU,OAAnBxU,CAA2BhE,IAA3BgE,GACEzB,MAAM+W,GACNY,YAAYlW,KAAKwV,QAALxV,GAAgBuV,GAC5BY,oBAAoBnW,KAAKwV,UACzBY,cAAcpW,KAAK0V,UAAL1V,GAAkByV,GAChCY,sBAAsBrW,KAAK0V,YAC3BY,aAAanS,OAAO0O,IAAP1O,CAAYsN,CAAZtN,EAAoBrC,GAApBqC,CAAwB,UAAAoS,CAAA;EAAO,eAAA9E,EAAO8E,CAAP9E,EAAYxV,KAAZ;SAA/BkI,GACb0J,aAAahS,MAAMC,OAAND,CAAcX,CAAdW,IACRX,EAAoB4G,GAApB5G,CAAwB,UAAAsb,CAAA;EAAQ,eAACA,EAAgBva,KAAjB;SAAhCf,CADQW,GAERX,EAAkBe,OATzB+D,CADEA,EAcG9E,CAAP;KA1HFuZ,EA+HAA,WAAAA,eAAAA,GAAA,UAAe/a,CAAf;EACE,QAAM+c,IAAWzW,KAAK0W,UAAL1W,CAAgB0T,GAAhB1T,CAAoBtG,EAAEgL,MAAtB1E,IACbA,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoBtG,EAAEgL,MAAtB1E,EAA8ByW,QADjBzW,GAEb,CAFJ,CAGAA,KAAK0V,UAAL1V,IACiB,MAAbyW,CAAa,KACfzW,KAAK4W,cAAL5W,IAIgB,gBAAZtG,EAAEuE,KAAU,KACd+B,KAAKwV,QAALxV,IACI2D,cAAmBjK,EAAEuC,KAArB0H,IAA8BkT,gBAAqBnd,EAAEuE,KAAvB4Y,CAFpB,CAJhB7W,EAQAA,KAAK0W,UAAL1W,CAAgB8W,GAAhB9W,CACItG,EAAEgL,MADN1E,IAEK6T,SAAS7T,KAAK6T,SAAS5V,OAAOvE,EAAEuE,OAAOhC,OAAOvC,EAAEuC,OAAOwa,UAAU,GAFtEzW,CARAA,EAWAA,KAAK6T,OAAL7T,CAAa+W,QAAb/W,CAAsBtG,EAAEgL,MAAxB1E,EAAgCtG,EAAEuC,KAAlC+D,EAAyCtG,EAAEuE,KAA3C+B,CAZe,CADjBA,EAeAA,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoBtG,EAAEgL,MAAtB1E,EAA8ByW,QAA9BzW,EAfAA,EAgBMtG,aAAasV,QAAbtV,IACJsG,KAAKiU,KAALjU,CAAWtG,CAAXsG,CAjBFA;KAnIFyU,EAwJAA,WAAAA,iBAAAA,GAAA,UAAiBuC,CAAjB;EACE,QAAwC,QAApChX,KAAKiX,mBAALjX,CAAyBgX,EAAEzY,IAA3ByB,CAAJ,EACE,MAAM,IAAIzG,KAAJ,CAAU,wBAAsByd,EAAEzY,IAAxB,4BAAV,CAAN,CAEFyB,KAAKiX,mBAALjX,CAAyBgX,EAAEzY,IAA3ByB,IAAmCgX,CAAnChX;KA5JFyU,EA+JAA,WAAAA,cAAAA,GAAA,UAAc/a,CAAd;EACOsG,SAAK0W,UAAL1W,CAAgB0T,GAAhB1T,CAAoBtG,EAAEgL,MAAtB1E,MAGDA,KAAKkX,WAALlX,CAAiB0T,GAAjB1T,CAAqBtG,EAAEiL,EAAvB3E,KACFA,KAAKkX,WAALlX,CAAiBmX,MAAjBnX,CAAwBtG,EAAEiL,EAA1B3E,CADEA,EAGJA,KAAK0V,UAAL1V,EAHIA,EAIaA,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoBtG,EAAEgL,MAAtB1E,EAA8ByW,QAA9BzW,IACD,CADCA,IAEFA,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoBtG,EAAEgL,MAAtB1E,EACR6T,OADQ7T,CACAoX,WADApX,CACYtG,EAAEgL,MADd1E,GAEbA,KAAK4W,cAAL5W,EAFaA,EAKG,gBAAZtG,EAAEuE,KAAU,KACd+B,KAAKwV,QAALxV,IACI2D,cAAmBjK,EAAEuC,KAArB0H,IAA8BkT,gBAAqBnd,EAAEuE,KAAvB4Y,CAFpB,CALH7W,EASbA,KAAK0W,UAAL1W,CAAgBmX,MAAhBnX,CAAuBtG,EAAEgL,MAAzB1E,CAXeA,IAafA,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoBtG,EAAEgL,MAAtB1E,EAA8ByW,QAA9BzW,EApBGA;KAhKPyU,EA2LAA,WAAAA,iBAAAA,GAAA;EACE,SAAK,IAAM4C,CAAX,IAAsBrX,KAAKiX,mBAA3B,EAAgD;EAC9C,UAAMD,IAAIhX,KAAKiX,mBAALjX,CAAyBqX,CAAzBrX,CAAV,CACAA,KAAK4F,aAAL5F,CAAmBgX,CAAnBhX,UACOA,KAAKiX,mBAALjX,CAAyBqX,CAAzBrX,CADPA;;KA9LJyU,EAmMAA,WAAAA,OAAAA,GAAA;EACE,QAAM6C,IAAOtX,KAAK6T,OAAL7T,CAAauX,MAAbvX,EAAb,CAIA,OAHAsX,EAAK5B,UAAL4B,GAAkBtX,KAAK0V,UAAvB4B,EACAA,EAAKV,cAALU,GAAsBtX,KAAK4W,cAD3BU,EAEAA,EAAK9B,QAAL8B,GAAgBtX,KAAKwV,QAFrB8B,EAGOA,CAAP;KAxMF7C,EA2MMA,WAAAA,QAAAA,GAAN,UAAc+C,CAAd;;;EAeE,eAdAxX,KAAKiW,SAALjW,IAAiB,CAAjBA,EAEMyX,IAAazX,KAAKwV,QAFxBxV,EAGM0X,IAAkB1X,KAAK0V,UAH7B1V,EAKAA,KAAKoU,aAALpU,CAAmBwU,OAAnBxU,KALAA,EAMAA,KAAKoU,aAALpU,CAAmB9E,MAAnB8E,GAA4BwX,GAN5BxX,EAQAA,KAAKiW,SAALjW,IAAiB,CARjBA,EAUAA,KAAKoU,aAALpU,CAAmBuU,SAAnBvU,GACIzF,KAAKI,GAALJ,MAAAA,CAAAA,IAAAA,EAAYyF,KAAKoU,aAALpU,CAAmBwU,OAAnBxU,CAA2B8B,GAA3B9B,CAA+B,UAAA2X,CAAA;EAAK,iBAAAA,EAAExB,kBAAF;WAApCnW,CAAZzF,CAXJyF,EAYAA,KAAKoU,aAALpU,CAAmBqU,QAAnBrU,GAA8BA,KAAKwV,QAALxV,GAAgByX,CAZ9CzX,EAaAA,KAAKoU,aAALpU,CAAmBsU,UAAnBtU,GAAgCA,KAAK0V,UAAL1V,GAAkB0X,CAblD1X,MAcOA,KAAKoU,cAAZ;;;KA1NFK,EA6NQA,WAAAA,aAAAA,GAAR;EACE,WAA0B,QAAnBzU,KAAKgW,UAAc,IAAqC,MAA7BhW,KAAK2V,mBAAvC;KA9NFlB,EAiOQA,WAAAA,YAAAA,GAAR,UACIhD,CADJ,EACsBvW,CADtB,EAEI0c,CAFJ;EAGE,QAAMC,MAAN,CACApG,EAAOrR,OAAPqR,CAAe,UAACE,CAAD,EAAQmG,CAAR;EACbD,QAAUC,CAAVD,IAAiBlG,CAAjBkG;OADFpG,EAIA,IASMqE,MACJnR,IAAI3E,KAAK+V,cAAL/V,IACJzB,MAAMyB,KAAKgU,WAALhU,CAAiBzB,MACvBkT,QAAQoG,GACRhG,UAAU3W,IACVyX,UAde,UAACF,CAAD;EACf,YACMsF,MADN,CAKA,OALYH,EAAcnF,CAAdmF,EAERxX,OAFQwX,CAEA,UAAC3c,CAAD,EAAI6c,CAAJ;EACVC,YAAOD,CAAPC,IAAc;EAAM,mBAAA9c,CAAA;aAApB8c;WAHUH,GAKLG,CAAP;WANF,CAgBA/X,KAAKgW,UAALhW,CAAgBhE,IAAhBgE,CAAqB8V,CAArB9V;KAzPFyU,EA4PAA,WAAAA,KAAAA,GAAA,UAAuBvZ,CAAvB;EACE,QAA+B,MAA3B8E,KAAKkU,UAALlU,CAAgB5F,MAAW,IAAK4F,KAAK8T,QAAzC,EACE,MAAM,IAAIva,KAAJ,CACF,8GADE,CAAN,CAKF,OADAyG,KAAKkX,WAALlX,CAAiBqI,GAAjBrI,CAAqB9E,EAAOyJ,EAA5B3E,GACO9E,CAAP;KAnQFuZ,EA0QAA,WAAAA,WAAAA,GAAA,UAAWlW,CAAX,EAA0ByZ,CAA1B;uBAA0BA,SACpBA,KAA6C,MAA5BhY,KAAKiY,kBAAtBD,KACFhY,KAAKgW,UAALhW,KADEgY,GAGAA,KACFhY,KAAKiY,kBAALjY,GAGF,IAAMkY,MAAyBjE,WAAW1V,MAAM,iBAAhD,CACIA,MACF2Z,EAAU3Z,IAAV2Z,GAAiB3Z,CADfA,GAGJyB,KAAKkU,UAALlU,CAAgBhE,IAAhBgE,CAAqBkY,CAArBlY,CAHIzB,EAIJyB,KAAKgU,WAALhU,GAAmBkY,CAJf3Z;KAnRNkW,EA8RAA,WAAAA,SAAAA,GAAA,UAASvZ,CAAT,EAAmC8c,CAAnC;EAAA,gBAAA,kBAAmCA,SAC7BA,MACFhY,KAAKiY,kBAALjY,IACgC,MAA5BA,KAAKiY,kBAAuB,KAC9BjY,KAAKgW,UAALhW,GAAkB,IADY,CAF9BgY,EAOJ,IAAMG,IAAgB,IAAI9E,GAAJ,CAAQrT,KAAKkX,WAAb,CAAtB;EAAA,QAEMkB,IAAyBC,sBAAsBnd,CAAtBmd,CAF/B,CAGAD,EAAuBhY,OAAvBgY,CAA+B,UAAAnF,CAAA;EAAU,aAAAkF,EAAc9P,GAAd8P,CAAkBlF,EAAOtO,EAAzBwT,CAAA;OAAzCC,EAGA,KAAK,IAAIrd,IAAI,CAAb,EAAgBA,IAAIiF,KAAKgU,WAALhU,CAAiBiU,KAAjBjU,CAAuB5F,MAA3C,EAAmDW,GAAnD,EAAwD;EACtD,UAAMkY,IAASjT,KAAKgU,WAALhU,CAAiBiU,KAAjBjU,CAAuBjF,CAAvBiF,CAAf,CACImY,EAAczE,GAAdyE,CAAkBlF,EAAOtO,EAAzBwT,MAImB,QAAnBnY,KAAKgW,UAAc,GACrBoC,EAAuBpc,IAAvBoc,CAA4BnF,CAA5BmF,CADqB,GAGrBnF,EAAOD,OAAPC,EAPEkF;EAWN,SAAMG,IAAWtY,KAAKkU,UAALlU,CAAgBuY,GAAhBvY,EAAjB,CACAA,KAAKgU,WAALhU,GAA8C,MAA3BA,KAAKkU,UAALlU,CAAgB5F,MAAW,KACzC6Z,WAAW1V,MAAM,iBADwB,GAE1CyB,KAAKkU,UAALlU,CAAgBA,KAAKkU,UAALlU,CAAgB5F,MAAhB4F,GAAyB,CAAzCA,CAFJA,EAKAoY,EAAuBhY,OAAvBgY,CAA+B,UAAAnF,CAAA;SAGxBtS,EAAKuW,WAALvW,CAAiB+S,GAAjB/S,CAAqBsS,EAAOtO,EAA5BhE,KACD6X,eAAevF,CAAfuF,EAAuBF,EAASrE,KAAhCuE,KACF7X,EAAKsT,KAALtT,CAAWsS,CAAXtS;OALJyX,CALApY;KA1TFyU,EA+UAA,WAAAA,UAAAA,GAAA,UACI1V,CADJ,EACgBoS,CADhB,EAC8BsB,CAD9B,EAEIgG,CAFJ;EAAA,gBAAA,CAKE,wBAHEA,SACF7U,OAAYuN,EAAG/W,MAAH+W,GAAY,CAAxBvN,EAA2B,2CAA3BA,GAEO5D,KAAK0Y,IAAL1Y,CAAU,WAAVA,EAAuB;EAC5B,UAAMoR,IAAIrS,GAAV,CACA6E,OACIwN,aAAahN,MADjBR,EAEI,gDAFJA,EAIA,IAAMqO,IAAe0G,qBAAqBhY,EAAKqV,UAA1B2C,EAAsCxH,CAAtCwH,EAA0CvH,CAA1CuH,CAArB,CACA,KAAKF,KAA4C,MAAxBxG,EAAa7X,UAAgB+W,EAAG/W,MAAH+W,GAAY,CAAlE,EACE,MAAM,IAAI5X,KAAJ,CACF,qIADE,CAAN,CAMF,IAAMqf,MAAN,CAOA,OANAA,EAAuBxH,EAAEzM,EAAzBiU,IAAsC,QAANnG,CAAM,GAAQoG,KAAKzH,EAAEnV,KAAP4c,CAAR,GAAwBpG,CAA9DmG,EAGAE,uBAAuBF,CAAvBE,EAA+C7G,CAA/C6G,CAHAF,IAMQ7U,OAAOqN,GAAG2H,OADJ5H,EAAGrP,GAAHqP,CAAO,UAAAzW,CAAA;EAAK,iBAAAke,EAAuBle,EAAEiK,EAAzBiU,CAAA;WAAZzH,GACd;OArBKnR,GAsBJ,CAtBIA,CAAP;KApVFyU,EA6WAA,WAAAA,WAAAA,GAAA,UAA6B1V,CAA7B;EAAA,gBAAA,CAKE,OAHA6E,OACIoV,WAAgBja,CAAhBia,CADJpV,EAEI,mDAFJA,GAGO;aAAC,IAKFgU,CALE,EAMF1c,CANE,QAAA,SAAA8I,sBAAAA,KAAAyN,GAAAA,gBAAAA,CA0BN,IAzBA7N,OACI6N,EAAOwH,KAAPxH,CAAa,UAAAyH,CAAA;EAAK,eAAAA,aAAa9U,MAAb;SAAlBqN,CADJ7N,EAEI,kEAFJA,GAMAjD,EAAKkU,SAALlU,CACI;EAAM,eAAAA,EAAKgV,mBAALhV,EAAA;SADVA,EACsC;EAAM,eAAAA,EAAKgV,mBAALhV,EAAA;SAD5CA,EAEI;EAEEzF,YAASyF,EAAK+X,IAAL/X,CAAU5B,EAAER,IAAZoC,EAAkB;EACnB,cAAAwY,sBAAA;EAAA,cAACpV,WAAD;EAAA,cAAQqV,cAAR,CAUN,OATAxV,OACIG,aAAiBK,MADrBR,EAEI,4FAFJA,GAIAA,OACIoV,WAAgBI,CAAhBJ,CADJpV,EAEI,kGAFJA,CAJAA,EAQAgU,IAAgBwB,CARhBxV,EASOG,CAAP;WAXOpD,GADa,CACbA,CAATzF;SAJNyF,CANAiD,EAyBIjD,EAAKkV,YAALlV,EAAJ,EAAyB;EAgBvBA,UAAK0Y,WAAL1Y,CAAiB8Q,CAAjB9Q,EAAyBzF,CAAzByF,EAfiB,UAAC8R,CAAD;EACf,cAAMwC,IAAM2C,EAAcnF,CAAdmF,CAAZ;EAAA,cACMmB,IAAkBld,MAAMC,OAAND,CAAcoZ,CAAdpZ,IAAqBoZ,CAArBpZ,IAA4BoZ,EADpD,CAYA,OAVArR,OACImV,EAAM3e,MAAN2e,KAAiBtH,EAAOrX,MAD5BwJ,EAEI,qKAFJA,GAKAA,OACImV,EAAME,KAANF,CAAY,UAAAG,CAAA;EAAK,mBAAAA,aAAa9U,MAAb;aAAjB2U,CADJnV,EAEI,sIAFJA,CALAA,EAUOmV,CAAP;WAEFpY;EAEF,cAAOzF,CAAP;OA5CF;KAlXFuZ,EAmaAA,WAAAA,MAAAA,GAAA,UAAM/P,CAAN,EAAsBxG,CAAtB;EACE,QAAMoZ,IAAOtX,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoB0E,CAApB1E,CAAb,CACIA,KAAK6T,OAAL7T,KAAiBsX,EAAKzD,OAAtB7T,KAEFsX,EAAKzD,OAALyD,CAAaF,WAAbE,CAAyB5S,CAAzB4S,GACAA,EAAKzD,OAALyD,GAAetX,KAAK6T,OADpByD,EAEAtX,KAAK6T,OAAL7T,CAAa+W,QAAb/W,CAAsB0E,CAAtB1E,EAA8BsX,EAAKrb,KAAnC+D,EAA0CsX,EAAKrZ,KAA/C+B,CAJEA,GAMJA,KAAK6T,OAAL7T,CAAa+E,KAAb/E,CAAmB0E,CAAnB1E,EAA2B9B,CAA3B8B,CANIA;KAraNyU,EA6aAA,WAAAA,SAAAA,GAAA,UAAS/P,CAAT;EAGE,WADa1E,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoB0E,CAApB1E,EACD6T,OADC7T,CACO0F,QADP1F,CACgB0E,CADhB1E,CACb;KAhbFyU,EAkbAA,WAAAA,KAAAA,GAAA,UAAK/P,CAAL;EAGE,WADa1E,KAAK0W,UAAL1W,CAAgB2W,GAAhB3W,CAAoB0E,CAApB1E,EACD6T,OADC7T,CACOyF,IADPzF,CACY0E,CADZ1E,CACb;KArbFyU,EAubAA,WAAAA,WAAAA,GAAA,UACI6E,CADJ,EAEIC,CAFJ;EAGE,WAAOvZ,KAAK6T,OAAL7T,CAAawZ,UAAbxZ,CAAwBsZ,CAAxBtZ,EAAgCuZ,CAAhCvZ,CAAP;KA1bFyU,EA4bMA,WAAAA,KAAAA,GAAN,UAAW+C,CAAX;;;;EAEqB,mBADbrY,IAAQO,KAARP,MACmBa,KAAK6T,OAAL7T,CAAaJ,IAAbI,CAAkBwX,CAAlBxX,EAAN;EAEnB,oBAFMyZ,IAAaN,MAAAA,IACRO,SAASha,QAAQP,OACrBsa,EAAP;;;KAhcFhF,EAycQA,WAAAA,MAAAA,GAAR,UAAgCvZ,CAAhC;EACE,QAA+B,MAA3B8E,KAAKkU,UAALlU,CAAgB5F,MAAW,IAAK4F,KAAK8T,QAAzC,EACE,MAAM,IAAIva,KAAJ,CACF,oHADE,CAAN,CAKF,OADAyG,KAAKgU,WAALhU,CAAiBiU,KAAjBjU,CAAuBhE,IAAvBgE,CAA4B9E,CAA5B8E,GACO9E,CAAP;KAhdFuZ,EAwDeA,cAAAA,GAAe,CAxD9BA,EA6DeA,gBAAAA,GAAiB,CA7DhCA,GAkdF;KA1fA,eA4fcxY;EACZ,MAAMiC,IAASyb,mBAAmB1X,cAAchG,CAAdgG,CAAnB0X,EAAyC,SAAzCA,CAAf,CACA,OAAOvV,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBlG,WAApBkG,CAAP;IC9fF,UAAYwP,CAAZ;EACEA,gBAAAA,YAAAA,EACAA,eAAAA,aADAA,EAEAA,cAAAA,YAFAA;EADF,CAAA,CAAYA,SAAAA,SAAAA,CAAZ,EAMA,IAuCIgG,gBAvCJ;EAAA,IAAaC,oBACVtb,MAAM,SAASub,MAAMlG,KAAKmG,aAC1Bxb,MAAM,cAAcub,MAAMlG,KAAKmG,aAC/Bxb,MAAM,uBAAuBub,MAAMlG,KAAKmG,aACxCxb,MAAM,qBAAqBub,MAAMlG,KAAKmG,aACtCxb,MAAM,iCAAiCub,MAAMlG,KAAKmG,aAClDxb,MAAM,qBAAqBub,MAAMlG,KAAKmG,aACtCxb,MAAM,0BAA0Bub,MAAMlG,KAAKoG,YAC3Czb,MAAM,wBAAwBub,MAAMlG,KAAKmG,aACzCxb,MAAM,gDAAgDub,MAAMlG,KAAKoG,YACjEzb,MAAM,iDAAiDub,MAAMlG,KAAKmG,aAClExb,MAAM,iBAAiBub,MAAMlG,KAAKoG,YAClCzb,MAAM,gCAAgCub,MAAMlG,KAAKmG,aACjDxb,MAAM,gCAAgCub,MAAMlG,KAAKmG,aACjDxb,MAAM,2BAA2Bub,MAAMlG,KAAKmG,aAC5Cxb,MAAM,6BAA6Bub,MAAMlG,KAAKoG,YAC9Czb,MAAM,WAAWub,MAAMlG,KAAKqG,YAC5B1b,MAAM,WAAWub,MAAMlG,KAAKoG,YAC5Bzb,MAAM,QAAQub,MAAMlG,KAAKmG,aACzBxb,MAAM,sCAAsCub,MAAMlG,KAAKmG,UAnB1D,CA2BA,8BAAA,CAAsC9hB,CAAtC;EACE;EAEE,QAAU,QADCS,gBAAgBT,CAAhBS,CACX,EACE,QAAO,CAAP;EAEF,GALF,CAKE,OAAOwhB,CAAP;EACA,YAAO,CAAP;EAEF,WAAO,CAAP;EAOF,gCAAA,CAAuCjiB,CAAvC;EACE,MAAwB,QAApB2hB,gBAAJ,EAA8B;EAC5B,QAAMphB,IAAKE,gBAAgBT,CAAhBS,CAAX,CACAkhB,mBAAmBphB,EAAG2hB,YAAH3hB,CAAgBA,EAAGohB,gBAAnBphB,CAAnBohB;EAEF,UAAOA,gBAAP;EAGF,2CAAA,CAAkD3hB,CAAlD;EAEE,MAAqB,MAAjBA,CAAJ,EACE,OAAO,CAAP,CAGF,IACMO,IAAKE,gBAAgBT,CAAhBS,CADX,CAWA,OARI0hB,aAAa5hB,CAAb4hB,EAAiB,iCAAjBA,KACiB,MAAjBniB,CADAmiB,GAEkB,CAFlBA,GAGOA,aAAa5hB,CAAb4hB,EAAiB,0BAAjBA,IACW,CADXA,GAGW,CAEtB;EAGF,uCAAA,CAA8CniB,CAA9C;EACE,MAAqB,MAAjBA,CAAJ,EACE,QAAO,CAAP,CAGF,IAAMO,IAAKE,gBAAgBT,CAAhBS,CAAX,CAEA,IAAqB,MAAjBT,CAAJ;EACE,SAAKmiB,aAAa5hB,CAAb4hB,EAAiB,mBAAjBA,CAAL,EACE,QAAO,CAAP;KAFJ,MAKE,KAAKA,aAAa5hB,CAAb4hB,EAAiB,wBAAjBA,CAAL,EACE,QAAO,CAAP,CAMJ,OADIC,uCAAuC7hB,CAAvC6hB,EAA2CpiB,CAA3CoiB,CACJ;EAGF,uCAAA,CAA8CpiB,CAA9C;EACE,MAAqB,MAAjBA,CAAJ,EACE,QAAO,CAAP,CAGF,IAAMO,IAAKE,gBAAgBT,CAAhBS,CAAX,CAEA,IAAqB,MAAjBT,CAAJ,EAAwB;EACtB,SAAKmiB,aAAa5hB,CAAb4hB,EAAiB,mBAAjBA,CAAL,EACE,QAAO,CAAP,CAEF,KAAKA,aAAa5hB,CAAb4hB,EAAiB,0BAAjBA,CAAL,EACE,QAAO,CAAP;KALJ,MAQE,KAAKA,aAAa5hB,CAAb4hB,EAAiB,wBAAjBA,CAAL,EACE,QAAO,CAAP,CAMJ,OADIC,uCAAuC7hB,CAAvC6hB,EAA2CpiB,CAA3CoiB,CACJ;EAGF,6BAAA,CAAoCpiB,CAApC;EACE,SAAqB,MAAjBA,CAAiB,IAMsB,QAHhCS,gBAAgBT,CAAhBS,EAGmB4hB,SAN9B;EAUF,kBAAA;EACE,SAA4B,sBAAd3gB,SAAc,IAA4B,QAAbA,SAAf,IACD,QAAvBA,UAAUC,SADc,IACO,SAASI,IAAT,CAAcL,UAAUC,SAAxB,CADP,IAExB,aAAaI,IAAb,CAAkBL,UAAUE,MAA5B,CAFJ;EAMF,KAAM0gB,4BAA4B,WAAlC,CACA,2BAAA;EACE,MAAMC,MAAN,CAEA,IAAsB,sBAAX1gB,MAAW,SAA0C,MAApBA,OAAO2gB,QAA7B,SACgB,MAA3B3gB,OAAO2gB,QAAP3gB,CAAgB4gB,MAD3B,EAEE,OAAOF,CAAP,CAGF,IAAMG,IAAYC,eAAe9gB,OAAO2gB,QAAP3gB,CAAgB4gB,MAA/BE,CAAlB,CACA,IAAIL,6BAA6BI,CAAjC,EAA4C;EAC1C,QAAME,MAAN,CAEkBF,EAAUJ,yBAAVI,EAAqCtT,KAArCsT,CAA2C,GAA3CA,EACRva,OADQua,CACA,UAAAG,CAAA;EACV,UAAA3B,gBAAA;EAAA,UAAC5C,QAAD;EAAA,UAAMxS,QAAN,CACN8W,EAAStE,CAATsE,IAAgB9W,CAAhB8W;OAHgBF,GAMlBd,eAAezZ,OAAfyZ,CAAuB,UAAAkB,CAAA;EACjBA,QAAYxc,IAAZwc,IAAoBF,CAApBE,KACF7Z,QAAQC,GAARD,CACI,uCAAqC6Z,EAAYxc,IAAjD,OAAA,GACGsc,EAASE,EAAYxc,IAArBsc,CAFP3Z,GAGI6Z,EAAYjB,IAAZiB,KAAqBnH,KAAKoG,MAA1Be,GACFP,EAASO,EAAYxc,IAArBic,KAA8BK,EAASE,EAAYxc,IAArBsc,CAD5BE,GAEOA,EAAYjB,IAAZiB,KAAqBnH,KAAKmG,OAA1BgB,GACTP,EAASO,EAAYxc,IAArBic,IAA4D,WAA/BK,EAASE,EAAYxc,IAArBsc,CADpBE,GAEAA,EAAYjB,IAAZiB,KAAqBnH,KAAKqG,MAA1Bc,GAETP,EAASO,EAAYxc,IAArBic,IAA6BK,EAASE,EAAYxc,IAArBsc,CAFpBE,GAIT7Z,QAAQ8Z,IAAR9Z,CAAa,wBAAsB6Z,EAAYxc,IAAlC,MAAb2C,CAZA6Z;OADNlB,CANkBc;EAyBpB,UAAOH,CAAP;EAGF,sBAAA,CAAsBhiB,CAAtB,EAAiDyiB,CAAjD;EAEE,SAAc,QADFziB,EAAG0iB,YAAH1iB,CAAgByiB,CAAhBziB,CACZ;EAGF,gDAAA,CACIA,CADJ,EAC+BP,CAD/B;EAEE,MAAMkjB,IAAc3iB,EAAG4iB,iBAAH5iB,EAApB;EAAA,MACM6iB,IAAU7iB,EAAG8iB,aAAH9iB,EADhB,CAGAA,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B6iB,CAA9B7iB,EAGA,IAAMijB,IAAkC,MAAjBxjB,CAAiB,GAAKO,EAAWkjB,OAAhB,GAA0BljB,EAAGmjB,IAArE,CACAnjB,EAAGojB,UAAHpjB,CACIA,EAAGgjB,UADPhjB,EACmB,CADnBA,EACsBijB,CADtBjjB,EACsC,CADtCA,EACyC,CADzCA,EAC4C,CAD5CA,EAC+CA,EAAGmjB,IADlDnjB,EACwDA,EAAGqjB,KAD3DrjB,EACkE,IADlEA,GAGAA,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmC2iB,CAAnC3iB,CAHAA,EAIAA,EAAGwjB,oBAAHxjB,CACIA,EAAGujB,WADPvjB,EACoBA,EAAGyjB,iBADvBzjB,EAC0CA,EAAGgjB,UAD7ChjB,EACyD6iB,CADzD7iB,EACkE,CADlEA,CAJAA,CAOA,IAAM0jB,IACF1jB,EAAG2jB,sBAAH3jB,CAA0BA,EAAGujB,WAA7BvjB,MAA8CA,EAAG4jB,oBADrD,CAQA,OALA5jB,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B,IAA9BA,GACAA,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmC,IAAnCA,CADAA,EAEAA,EAAG6jB,aAAH7jB,CAAiB6iB,CAAjB7iB,CAFAA,EAGAA,EAAG8jB,iBAAH9jB,CAAqB2iB,CAArB3iB,CAHAA,EAKO0jB,CAAP;EAGF,wBAAA,CAA+BK,CAA/B;EACE,MAAMC,MAAN,CAKA,OAJAD,EAAYE,OAAZF,CAAoB,6BAApBA,EAAmD,UAACG,CAAD;WAAI,UAAA,SAAA1Y,sBAAAA,KAAAkV,OAAAA,gBAAAA,CAErD,OADAyD,YAAYH,CAAZG,EAAoBzD,EAAE,CAAFA,CAApByD,EAA0BzD,EAAE,CAAFA,CAA1ByD,GACOzD,EAAElX,IAAFkX,CAAO,GAAPA,CAAP;KAFFqD,GAIOC,CAAP;EAGF,qBAAA,CACIA,CADJ,EACqCje,CADrC,EACmDwF,CADnD;EAEEyY,IAAOI,mBAAmBre,CAAnBqe,CAAPJ,IAAmCI,mBAAmB7Y,KAAS,EAA5B6Y,CAAnCJ;ECzRF,KAAMK,kBAAkB,IAAxB;EAAA,IACMC,uBAAuB,EAD7B;EAAA,IAGMC,kBAAkB,IAHxB;EAAA,IAIMC,uBAAuB,IAJ7B;EAAA;EAaE,YAAA,CAAYxC,CAAZ;EANQxa,iBAAAA,KAAAA,EAEAA,aAAAA,KAFAA,EAOU,QAAZwa,CAAY,KACdxa,KAAKwa,QAALxa,GAAgBwa,CADF,CAPVxa,EAWFA,KAAK2W,GAAL3W,CAAS,OAATA,KACFkB,QAAQ8Z,IAAR9Z,CACI,6IADJA,CAZIlB;EAgaV,UA7XSid,YAAAA,GAAP,UAAkBC,CAAlB,EAAuCpJ,CAAvC;EACE,yBADqCA,WAC/BoJ,KAAeC,IAAIC,SAAzB,EACE,MAAM,IAAI7jB,KAAJ,CAAU,mBAAiB2jB,CAAjB,4BAAV,CAAN,CAEFC,IAAIE,MAAJF,CAAWtJ,OAAXsJ,GAAqBA,IAAIG,WAAJH,CAAgBD,CAAhBC,CAArBA,EACAA,IAAID,WAAJC,GAAkBD,CADlBC;KAJKF,EAaAA,YAAAA,GAAP;EAEE,WADAE,IAAII,UAAJJ,IACOA,IAAID,WAAX;KAfKD,EAsBAA,kBAAAA,GAAP;EACEE,QAAIE,MAAJF,CAAWK,gBAAXL;KAvBKF,EA2CAA,QAAAA,GAAP;EACE,WAAOE,IAAIE,MAAJF,CAAW5F,MAAX4F,EAAP;KA5CKF,EA2EAA,SAAAA,GAAP,UAAele,CAAf;EACE,WAAOoe,IAAIE,MAAJF,CAAWM,OAAXN,CAAmBpe,CAAnBoe,CAAP;KA5EKF,EAqHAA,MAAAA,GAAP,UACIvI,CADJ,EACiClQ,CADjC,EACkDmQ,CADlD;EAEE,4BADgDA,SACzCwI,IAAIE,MAAJF,CAAWzE,IAAXyE,CAAgBzI,CAAhByI,EAA0B3Y,CAA1B2Y,EAA8BxI,CAA9BwI,CAAP;KAvHKF,EAoIAA,SAAAA,GAAP,UAAe3J,CAAf;EACkB+E,0BAAsB/E,CAAtB+E,EACRjY,OADQiY,CACA,UAAApF,CAAA;EAAU,aAAAA,EAAOD,OAAPC,EAAA;OADVoF;KArIX4E,EAwKAA,MAAAA,GAAP,UAA8B/hB,CAA9B;EACE,WAAOiiB,IAAIE,MAAJF,CAAWO,IAAXP,CAAgBjiB,CAAhBiiB,CAAP;KAzKKF,EAkMAA,MAAAA,GAAP,UAAYle,CAAZ;EACE,WAAOoe,IAAIE,MAAJF,CAAWvd,IAAXud,CAAgBpe,CAAhBoe,CAAP;KAnMKF,EAsMPA,WAAAA,IAAAA,GAAA,UAA8BU,CAA9B;EACE,WAAIA,KAAW3d,KAAKwa,QAAhBmD,GACK3d,KAAKwa,QAALxa,CAAc2d,CAAd3d,CADL2d,IAIJ3d,KAAKwa,QAALxa,CAAc2d,CAAd3d,IAAyBA,KAAK4d,eAAL5d,CAAqB2d,CAArB3d,CAAzBA,EAEOA,KAAKwa,QAALxa,CAAc2d,CAAd3d,CANH2d,CAAJ;KAvMKV,EAgNPA,WAAAA,YAAAA,GAAA;EACE,WAAOjd,KAAKwa,QAAZ;KAjNKyC,EAoNPA,WAAAA,IAAAA,GAAA,UAA8BU,CAA9B,EAA0C5Z,CAA1C;EACE/D,SAAKwa,QAALxa,CAAc2d,CAAd3d,IAAyB+D,CAAzB/D;KArNKid,EAwNCA,WAAAA,mBAAAA,GAAR;EAAA,gBAAA,CACE,IAA0C,MAAtC9Y,OAAO0O,IAAP1O,CAAYnE,KAAKod,QAAjBjZ,EAA2B/J,MAA/B,EACE,MAAM,IAAIb,KAAJ,CAAU,+BAAV,CAAN,CAUF,OARuB4K,OAAO0O,IAAP1O,CAAYnE,KAAKod,QAAjBjZ,EACKrC,GADLqC,CACS,UAAA5F,CAAA;EACH,eAAQA,SAAMsf,OAAOld,EAAKyc,QAALzc,CAAcpC,CAAdoC,GAArB;OAFNwD,EAIK2Z,IAJL3Z,CAIU,UAACzK,CAAD,EAAIsB,CAAJ;EAEJ,aAAOA,EAAE6iB,KAAF7iB,CAAQ+iB,QAAR/iB,GAAmBtB,EAAEmkB,KAAFnkB,CAAQqkB,QAAlC;OANN5Z,EAQD,CARCA,EAQE5F,IAAzB;KApOK0e,EAuOCA,WAAAA,gBAAAA,GAAR,UAAkDU,CAAlD;EACE,QAAgB,YAAZA,CAAJ,EACE,QAAO,CAAP,CACK,IAAgB,iBAAZA,CAAJ,EACL,OAAyB,sBAAX7jB,MAAd,CACK,IAAgB,cAAZ6jB,CAAJ,EACL,OAA2B,sBAAZhe,OAAY,SACW,MAA1BA,QAAQqe,QAARre,CAAiB6R,IAD7B,CAEK,IAAgB,gBAAZmM,CAAJ,EACL,OAAOM,UAAP,CACK,IAAgB,wBAAZN,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,oCAAZA,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,0BAAZA,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,wBAAZA,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,2BAAZA,CAAJ,EACL,OAAO3d,KAAK2W,GAAL3W,CAAS,YAATA,MAA2BA,KAAK2W,GAAL3W,CAAS,MAATA,CAAlC,CACK,IAAgB,6BAAZ2d,CAAJ,EACL,OAAOO,uBAAuBle,KAAK2W,GAAL3W,CAAS,eAATA,CAAvBke,CAAP,CACK,IAAgB,cAAZP,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,cAAZA,CAAJ,EACL,OAAO3d,KAAKme,kBAALne,EAAP,CACK,IAAgB,mDAAZ2d,CAAJ,EAAgE;EACrE,UAAM1lB,IAAe+H,KAAK2W,GAAL3W,CAAS,eAATA,CAArB,CAEA,OAAqB,MAAjB/H,CAAiB,GACZ,CADY,GAGdmmB,kCAAkCnmB,CAAlCmmB,CAHP;EAIK,SAAgB,oDAAZT,CAAJ,EACL,OAAO3d,KAAK2W,GAAL3W,CAAS,8CAATA,IAA2D,CAA3DA,KACFqe,UADL,CAEK,IAAgB,gBAAZV,CAAJ,EACL,OAAO3d,KAAK2W,GAAL3W,CAAS,eAATA,IAA4B,CAAnC,CACK,IAAgB,oBAAZ2d,CAAJ,EACL,OAAIW,sBAAsB,CAAtBA,IACK,CADLA,GAEOA,sBAAsB,CAAtBA,IACF,CADEA,GAGJ,CALP,CAMK,IAAgB,mCAAZX,CAAJ,EACL,OAAOY,8BAA8Bve,KAAK2W,GAAL3W,CAAS,eAATA,CAA9Bue,CAAP,CACK,IAAgB,mCAAZZ,CAAJ,EACL,OAAOa,8BAA8Bxe,KAAK2W,GAAL3W,CAAS,eAATA,CAA9Bwe,CAAP,CACK,IAAgB,8BAAZb,CAAJ,EACL,OAAOc,oBAAoBze,KAAK2W,GAAL3W,CAAS,eAATA,CAApBye,CAAP,CACK,IAAgB,gCAAZd,CAAJ,EAKL,OADoB3d,KAAK2W,GAAL3W,CAAS,8BAATA,IACC,CADDA,GACK,CAAzB,CACK,IAAgB,mBAAZ2d,CAAJ,EACL,OAAyC,OAAlC3d,KAAK6T,OAAL7T,CAAa0e,cAAb1e,EAAkC,GAAKgd,oBAAL,GACKF,oBAD9C,CAEK,IAAgB,cAAZa,CAAJ,EACL,OAAyC,OAAlC3d,KAAK6T,OAAL7T,CAAa0e,cAAb1e,EAAkC,GAAK+c,eAAL,GACKF,eAD9C,CAEK,IAAgB,WAAZc,CAAJ,EACL,QAAO,CAAP,CACK,IAAgB,yCAAZA,CAAJ,EACL,QAAQ3d,KAAK2W,GAAL3W,CAAS,MAATA,CAAR,CAEF,MAAM,IAAIzG,KAAJ,CAAU,qBAAmBokB,CAAnB,MAAV,CAAN;KA3SKV,EA8SPA,WAAAA,YAAAA,GAAA,UAAYzC,CAAZ;EACExa,SAAKwa,QAALxa,GAAgBmE,OAAO6M,MAAP7M,GAAAA,EAAkBqW,CAAlBrW,CAAhBnE;KA/SKid,EAkTPA,WAAAA,MAAAA,GAAA;EACEjd,SAAKwa,QAALxa,GAAgB2e,oBAAhB3e,EACyB,QAArBA,KAAK4e,YAAgB,KACvB5e,KAAK4e,YAAL5e,GAAoB,IADG,CADzBA;KAnTKid,EAyTP9Y,qBAAAA,CAAI8Y,WAAJ9Y,WAAAA,SAAA;EACE,aAAOnE,KAAKqd,MAALrd,CAAY6T,OAAnB;2CADF1P,CAzTO8Y,EA6TPA,WAAAA,YAAAA,GAAA,UAAY1e,CAAZ;EACE,WAAMA,KAAQyB,KAAKod,QAAb7e,GAGCyB,KAAKod,QAALpd,CAAczB,CAAdyB,EAAoB6T,OAHrBtV,GACG,IADT;KA9TK0e,EAgVPA,WAAAA,gBAAAA,GAAA,UACI1e,CADJ,EACkBsgB,CADlB,EACgDd,CADhD,EAEIe,CAFJ;EAAA,gBAAA,CAGE,qBAF8Cf,QAE1Cxf,KAAQyB,KAAKod,QAAjB,EAME,OALAlc,QAAQ8Z,IAAR9Z,CACO3C,+DADP2C,GAE0B,QAAtB4d,CAAsB,IACxBA,EAAmB;EAAM,aAAAne,EAAK0c,MAAL;OAAzByB,CAHF5d,GAKO,CAAP,CAEF;EACE,UAAM2S,IAAUgL,GAAhB,CAIA,OAHAhL,EAAQkL,YAARlL,GACKmL,UAAU,UAACta,CAAD;EAAoB,iBAAA/D,EAAK0c,MAAL1c,CAAYqe,QAAZre,CAAqB+D,CAArB/D,CAAA;aADnCkT,GAEA7T,KAAKod,QAALpd,CAAczB,CAAdyB,MAAuB6T,YAASkK,aAFhClK,GAGO,CAAP;EACA,KANF,CAME,OAAOoL,CAAP;EAGA,aAFA/d,QAAQ8Z,IAAR9Z,CAAa,6BAA2B3C,CAA3B,YAAb2C,GACAA,QAAQ8Z,IAAR9Z,CAAa+d,EAAI3X,KAAJ2X,IAAaA,EAAIC,OAA9Bhe,CADAA,GAEO,CAAP;;KApWG+b,EAwWPA,WAAAA,cAAAA,GAAA,UAAc1e,CAAd;EACE,UAAMA,KAAQyB,KAAKod,SAAnB,EACE,MAAM,IAAI7jB,KAAJ,CAAagF,oCAAb,CAAN,CAEFyB,KAAKod,QAALpd,CAAczB,CAAdyB,EAAoB6T,OAApB7T,CAA4BgT,OAA5BhT,WACOA,KAAKod,QAALpd,CAAczB,CAAdyB,CADPA;KA5WKid,EAgXP9Y,qBAAAA,CAAI8Y,WAAJ9Y,UAAAA,SAAA;EAEE,aADAnE,KAAKud,UAALvd,IACOA,KAAK4e,YAAZ;2CAFFza,CAhXO8Y,EAqXCA,WAAAA,WAAAA,GAAR;EAAA,gBAAA,CACE,IAAyB,QAArBjd,KAAK4e,YAAT,EAA+B;EAC7B5e,WAAKkd,WAALld,GAAmBA,KAAK2W,GAAL3W,CAAS,SAATA,CAAnBA,CACA,IAAM6T,IAAU7T,KAAKsd,WAALtd,CAAiBA,KAAKkd,WAAtBld,CAAhB,CACAA,KAAK4e,YAAL5e,GACI,IAAIyU,MAAJ,CAAWZ,CAAX,GAAoB,CAApB,EAA0C;EAAM,eAAAlT,EAAKgW,GAALhW,CAAS,OAATA,CAAA;SAAhD,CADJX;;KAzXGid,GA6XT;KAvaA;EA2aE,MAAIkC,CAAJ,CACA,IAAwB,4BAAxB,EACEA,IAAKrlB,MAALqlB,CADF,KAEO;EAAA,QAAyB,6BAAzB,EAGL,MAAM,IAAI5lB,KAAJ,CAAU,gCAAV,CAAN,CAFA4lB,IAAKxf,OAALwf;EAIF,UAAOA,CAAP;EAGF,8BAAA;EACE,MAAMA,IAAKC,oBAAX,CAKA,OAJc,QAAVD,EAAGhC,GAAO,KACZgC,EAAGhC,GAAHgC,GAAS,IAAIlC,WAAJ,CAAgB0B,oBAAhB,CAATQ,EACAE,iBAAiB;EAAM,WAAAF,EAAGhC,GAAHgC,CAAO9B,MAAP;KAAvBgC,CAFY,GAIPF,EAAGhC,GAAV;EAGF,KAAWA,MAAMmC,sBAAjB;EAAA,uEAAA,CChZA,aAAA,CAAkDvgB,CAAlD;EAGE,SADA6E,OAAYoV,WAAgBja,CAAhBia,CAAZpV,EAAgC,4CAAhCA,GACO,UAAClJ,CAAD,EAAO+X,CAAP;EAML,WALA7O,OACIlJ,aAAa0J,MADjBR,EACyB,6CADzBA,GAEAA,OACU,QAAN6O,CAAM,IAAQA,aAAcrO,MADhCR,EAEI,kDAFJA,CAFAA,EAKOuZ,IAAIE,MAAJF,CAAWzE,IAAXyE,CAAgB;EACf,UAAAhE;;kBAAA;EAAA,UAACpV,WAAD;EAAA,UAAQgV,WAAR,CAQN,OAPU,QAANtG,CAAM,IACR8M,kBACIxb,EAAM9H,KADVsjB,EACiB9M,EAAGxW,KADpBsjB,EAEI,gFAFJA,CADQ,EAMVC,WAAWzG,CAAXyG,CANU,EAOHzG,EAAM,CAANA,CAAP;OATKoE,CAAP;KANF;EAgDF,eAAA,CAAiCpe,CAAjC;EAIE,SAFA6E,OACIoV,WAAgBja,CAAhBia,CADJpV,EACwB,6CADxBA,GAEO,UAAC6b,CAAD,EAAiBhN,CAAjB;EAOL,WANA7O,OACI/H,MAAMC,OAAND,CAAc4jB,CAAd5jB,KAAuB4jB,EAAKxG,KAALwG,CAAW,UAAAC,CAAA;EAAO,aAAAA,aAAetb,MAAf;OAAlBqb,CAD3B7b,EAEI,+DAFJA,GAGAA,OACU,QAAN6O,CAAM,IAAQA,aAAcrO,MADhCR,EAEI,sDAFJA,CAHAA,EAMOuZ,IAAIE,MAAJF,CAAWzE,IAAXyE,CAAgB;EACf,UAAAhE;;gBAAA;EAAA,UAACpV,WAAD;EAAA,UAAQgV,WAAR,CAQN,OAPU,QAANtG,CAAM,IACR8M,kBACIxb,EAAM9H,KADVsjB,EACiB9M,EAAGxW,KADpBsjB,EAEI,+FAFJA,CADQ,EAMVC,WAAWzG,CAAXyG,CANU,EAOHzG,CAAP;OATKoE,CAAP;KAPF;EA6CF,sBAAA,CAA0Dpe,CAA1D;EAOE,SAFA6E,OACIoV,WAAgBja,CAAhBia,CADJpV,EACwB,oDADxBA,GAEO,UAAClJ,CAAD,EAAO+X,CAAP;EACL7O,WACIlJ,aAAa0J,MADjBR,EAEI,qDAFJA,GAGAA,OACU,QAAN6O,CAAM,IAAQA,aAAcrO,MADhCR,EAEI,0DAFJA,CAHAA,CAMM,IAAAuV;;gBAAA;EAAA,QAACJ,WAAD;EAAA,QAAQhV,WAAR,CAEN,OADAyb,WAAWzG,CAAXyG,KACQG,MAAM5G,EAAM,CAANA,GAAehV,OAAOA,GAApC;KATF;EA2CF,uBAAA,CAAyChF,CAAzC;EAQE,SAHA6E,OACIoV,WAAgBja,CAAhBia,CADJpV,EAEI,qDAFJA,GAGO,UAAC6b,CAAD,EAAiBhN,CAAjB;EACL7O,WACI/H,MAAMC,OAAND,CAAc4jB,CAAd5jB,KAAuB4jB,EAAKxG,KAALwG,CAAW,UAAAC,CAAA;EAAO,aAAAA,aAAetb,MAAf;OAAlBqb,CAD3B7b,EAEI,oEAFJA,GAGAA,OACU,QAAN6O,CAAM,IAAQA,aAAcrO,MADhCR,EAEI,8DAFJA,CAHAA,CAMA,IAAMqR,IAAMkI,IAAIE,MAAJF,CAAWyC,SAAXzC,CAAqB;EAAM,aAAApe,OAAAA,OAAAA,EAAK0gB,CAAL1gB,CAAA;OAA3Boe,EAAuCsC,CAAvCtC,EAA6C1K,CAA7C0K,CAAZ,CAQA,OAPU,QAAN1K,CAAM,IACR8M,kBACItK,EAAIlR,KAAJkR,CAAUhZ,KADdsjB,EACqB9M,EAAGxW,KADxBsjB,EAEI,uGAFJA,CADQ,EAMVC,WAAWvK,EAAI8D,KAAfyG,CANU,EAOHvK,CAAP;KAfF;EAyCF,uBAAA,CAAuBlW,CAAvB,EAAwC8gB,CAAxC;EAUE,MARAjc,OACIoV,WAAgBja,CAAhBia,CADJpV,EAEI,qDAFJA,GAGAA,OACe,QAAXic,CAAW,IACPhkB,MAAMC,OAAND,CAAcgkB,CAAdhkB,KAA0BgkB,EAAQ5G,KAAR4G,CAAc,UAAA7I,CAAA;EAAK,WAAAA,aAAahI,QAAb;KAAnB6Q,CAFlCjc,EAGI,+EAHJA,CAHAA,EAQe,QAAXic,CAAJ,EAGE,KAAK,IAAMxI,CAAX,IADAwI,MAAAA,EACsB1C,IAAIE,MAAJF,CAAWlG,mBAAjC,EACE4I,EAAQ7jB,IAAR6jB,CAAa1C,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+B9F,CAA/B8F,CAAb0C,EAIJ,IAAMC,IAAmBD,EAAQzlB,MAAjC,CAEAwJ,QADAic,IAAUA,EAAQvS,MAARuS,CAAe,UAAA5Q,CAAA;EAAY,WAAAA,EAASF,SAAT;KAA3B8Q,GAEEzlB,SAAS,CADrBwJ,EAEI,kGACkCkc,CADlC,6BAFJlc,EAMA,IACMuV,sCADmB,EAAzB;EAAA,MACOpV,WADP;EAAA,MACcgV,WADd,CAIAnV,OACImV,EAAMgH,IAANhH,CAAW,UAAAiH,CAAA;EAAK,WAAK,QAALA,CAAA;KAAhBjH,CADJnV,EAEI,8LAFJA,GAKAA,OACmB,MAAfG,EAAM3E,IADVwE,EAEI,mFACuBG,EAAM3E,IAD7B,YAFJwE,CALAA,CAUA,IAAMqc,MAAN,CAMA,OALAJ,EAAQzf,OAARyf,CAAgB,UAAC7I,CAAD,EAAIjc,CAAJ;EACE,YAAZge,EAAMhe,CAANge,CAAY,KACdkH,EAAWjJ,EAAEzY,IAAb0hB,IAAqBlH,EAAMhe,CAANge,CADP;KADlB8G,KAKQ9b,UAAOgV,OAAOkH,GAAtB;EAgCF,oBAAA,CAAsClhB,CAAtC;EAEE,SAAOoe,IAAIE,MAAJF,CAAW+C,UAAX/C,CAAsBpe,CAAtBoe,CAAP;EAGF,oBAAA,CAAoBpE,CAApB;EAEE,MADyBA,EAAMzL,MAANyL,CAAa,UAAAiH,CAAA;EAAK,WAAK,QAALA,CAAA;KAAlBjH,EAA6B3e,MAA7B2e,GACF,CAAvB,EACE,MAAM,IAAIxf,KAAJ,CACF,0IADE,CAAN;OCjVSmf,OAAOuE,YAAYvE;MACnBgF,OAAOT,YAAYS;MACnB1K,UAAUiK,YAAYjK;MACtBpT,OAAOqd,YAAYrd;MACnB6d,UAAUR,YAAYQ;SCLd,UAAA,SAAAzZ,sBAAAA,KAAA1I,GAAAA,gBAAAA,CACd6hB,IAAIxG,GAAJwG,CAAQ,SAARA,KACHjc,QAAQ8Z,IAAR9Z,MAAAA,CAAAA,OAAAA,EAAgB5F,CAAhB4F,CADGic;wBCMHgD,GAAsB9Q,GAAsBnH,GAC5CkY;qBAAAA,QACF,IAAIC,MAAJ,CACA,IAAID,CAAJ,GACEC,IAAWA,EAASlZ,MAATkZ,CAAgBhR,EAAWxR,KAAXwR,CAAiB,CAAjBA,CAAhBgR,GACFrkB,KAAKmkB,EAAW,CAAXA,IAAgBjY,IAC9BmY,IAAWA,EAASlZ,MAATkZ,CAAgBF,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,CAAhBE,EAHb,KAIO;EACLA,QAAWA,EAASlZ,MAATkZ,CAAgBF,EAAW,CAAXA,CAAhBE,CAAXA,CAEA,KADA,IAAMC,IAAgBjR,EAAWjV,MAAjC,EACSW,IAAI,CAAb,EAAgBA,IAAIulB,CAApB,IAAqCvlB,CAArC,EACEslB,IACIA,EAASlZ,MAATkZ,EAAiBF,EAAWplB,IAAI,CAAfolB,IAAoB9Q,EAAWtU,CAAXsU,GAAeA,EAAWtU,CAAXsU,EAApDgR,CADJA,CAGFA,IAAWA,EAASlZ,MAATkZ,CAAgBF,EAAWtiB,KAAXsiB,CAAiBG,IAAgB,CAAjCH,CAAhBE,CAAXA;EAEF,UAAOA,CAAP;EAYF,qBAAA,CACIE,CADJ,EAC0BC,CAD1B,EAEIJ,CAFJ;qBAEIA,QACF,IAAMK,MAAN,CACA,IAAIL,CAAJ,EAAkB;EAChBK,MAASzkB,IAATykB,CAAcD,CAAdC,EACA,KAAK,IAAI1lB,IAAIylB,IAAiB,CAA9B,EAAiCzlB,IAAIwlB,CAArC,IAAqDxlB,CAArD,EACMA,KAAK,IAAIylB,CAATzlB,IACF0lB,EAASzkB,IAATykB,CAAc1lB,CAAd0lB,GACAA,EAASzkB,IAATykB,CAAc1lB,KAAKylB,IAAiB,CAAtBzlB,CAAd0lB,CAFE1lB,IAIF0lB,EAASzkB,IAATykB,CAAc1lB,CAAd0lB,CAJE1lB;KAHR,MAUO;EACL,QAAM2lB,MAAN;EAAA,QACMC,MADN,CAEA,KAAS5lB,IAAI,CAAb,EAAgBA,IAAIwlB,CAApB,IAAoCxlB,CAApC,EACMA,KAAsB,IAAjBylB,CAAiB,GAAI,CAA1BzlB,IAA+BA,IAAI,CAAJA,IAAU,CAAzCA,GACF4lB,EAAmB3kB,IAAnB2kB,CAAwB5lB,CAAxB4lB,CADE5lB,GAGF2lB,EAAoB1kB,IAApB0kB,CAAyB3lB,CAAzB2lB,CAHE3lB,CAMN0lB,EAASzkB,IAATykB,MAAAA,CAAAA,CAAAA,EAAiBC,CAAjBD,GACAA,EAASzkB,IAATykB,CAAc,CAAdA,CADAA,EAEAA,EAASzkB,IAATykB,MAAAA,CAAAA,CAAAA,EAAiBE,CAAjBF,CAFAA;EAIF,UAAOA,CAAP;EAYF,6BAAA,CACIN,CADJ,EAC0B9Q,CAD1B,EACgDnH,CADhD,EAEIkY,CAFJ;qBAEIA,QACF,IAAMQ,MAAN,CAEIR,IACFQ,EAAiB5kB,IAAjB4kB,CAAsBT,EAAW,CAAXA,IAAgBjY,CAAtC0Y,CADER,GAGFQ,EAAiB5kB,IAAjB4kB,CAAsBT,EAAW,CAAXA,IAAgBjY,CAAtC0Y,CAHER,CAMJ,KAAK,IAAIrlB,IAAI,CAAb,EAAgBA,IAAIolB,EAAW/lB,MAA/B,IAAyCW,CAAzC,EACMA,KAAKsU,EAAWjV,MAAhBW,GACEqlB,IACFQ,EAAiB5kB,IAAjB4kB,CAAsBvR,EAAWtU,IAAI,CAAfsU,IAAoB8Q,EAAWplB,CAAXolB,CAA1CS,CADER,GAGFQ,EAAiB5kB,IAAjB4kB,CAAsBT,EAAWplB,CAAXolB,IAAgB9Q,EAAWtU,IAAI,CAAfsU,CAAtCuR,CAJA7lB,GAOF6lB,EAAiB5kB,IAAjB4kB,CAAsBT,EAAWplB,CAAXolB,CAAtBS,CAPE7lB,CAWN,OAAO6lB,CAAP;EAOF,6BAAA,CACItR,CADJ,EACuBD,CADvB;EAGE,OADA,IAAMwR,KAAoB,EAA1B,EACS9lB,IAAI,CAAb,EAAgBA,IAAIsU,CAApB,IAAkCtU,CAAlC,EACE8lB,EAAiB7kB,IAAjB6kB,CAAsBvR,EAAMvU,CAANuU,EAAS,CAATA,CAAtBuR,EAEF,OAAOA,CAAP;EAcF,sBAAA,CACIC,CADJ,EAC8BxR,CAD9B,EACiDD,CADjD;EAGE,OADA,IAAM0R,IAAYD,EAAejjB,KAAfijB,CAAqB,CAArBA,EAAwB,CAAxBA,CAAlB,EACS/lB,IAAI,CAAb,EAAgBA,IAAIsU,CAApB,IAAkCtU,CAAlC,EACEgmB,EAAU/kB,IAAV+kB,CAAeD,EAAe/lB,IAAI,CAAnB+lB,IAAwBxR,EAAMvU,CAANuU,EAAS,CAATA,CAAxBwR,GAAsCxR,EAAMvU,CAANuU,EAAS,CAATA,CAArDyR,EAGF,OAAOA,CAAP;iCChImCC,GAAgB5hB;EACnD,OAAK,IAAIrE,IAAI,CAAb,EAAgBA,IAAIimB,EAAK5mB,MAAzB,IAAmCW,CAAnC,EACE,IAAIimB,EAAKA,EAAK5mB,MAAL4mB,GAAcjmB,CAAdimB,GAAkB,CAAvBA,MAA8B5hB,IAAO,CAAPA,GAAWrE,CAA7C,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;EAGF,0BAAA,CACIkmB,CADJ,EACyBC,CADzB,EAC8CF,CAD9C;EAMI,OAJF,IAAM5hB,IAAO6hB,EAAU7mB,MAAV6mB,GAAmBC,EAAU9mB,MAA1C,EACM+mB,MADN,EAEIC,IAAS,CAFb,EAGIC,IAAY,CAHhB,EAIWvU,IAAM,CAAf,EAAkBA,IAAM1N,CAAxB,EAA8B0N,GAA9B,GAC2B,MAAvBkU,EAAKM,OAALN,CAAalU,CAAbkU,IACFG,EAAInlB,IAAJmlB,CAASF,EAAUG,GAAVH,CAATE,IAEAA,EAAInlB,IAAJmlB,CAASD,EAAUG,GAAVH,CAATC,EAGJ,OAAOA,CAAP;EAGF,mCAAA,CACII,CADJ,EACsBP,CADtB;EAIE,OAFA,IAAMQ,MAAN,EACMpiB,IAAOmiB,EAAOnnB,MADpB,EAES0S,IAAM,CAAf,EAAkBA,IAAM1N,CAAxB,EAA8B0N,GAA9B,GAC6B,MAAvBkU,EAAKM,OAALN,CAAalU,CAAbkU,KACFQ,EAASxlB,IAATwlB,CAAcD,EAAOzU,CAAPyU,CAAdC,EAIJ,QAAQA,GADYR,EAAKlf,GAALkf,CAAS,UAAAlU,CAAA;EAAO,WAAAyU,EAAOzU,CAAPyU,CAAA;KAAhBP,EACpB;EAGF,8BAAA,CACI/kB,CADJ,EACqB+kB,CADrB;EAGE,SAAOS,iBAAiBxlB,CAAjBwlB,EADgBT,EAAKlf,GAALkf,CAAS,UAAAtmB,CAAA;EAAK,WAAA,CAAA;KAAdsmB,CAChBS,EAAwCT,CAAxCS,CAAP;EAGF,wBAAA,CACI3jB,CADJ,EAC2B7B,CAD3B;EAEE,MAAMmD,IAAOnD,EAAM7B,MAAnB,CAkBA,OAZAwJ,QAHA9F,IAAe,QAARA,CAAQ,GAAO7B,EAAM6F,GAAN7F,CAAU,UAACygB,CAAD,EAAI3hB,CAAJ;EAAU,WAAAA,CAAA;KAApBkB,CAAP,MAAmCkL,OAAOrJ,IAIhDmb,MAAM,UAAAyI,CAAA;EAAM,WAAAA,MAAOtiB,CAAPsiB,IAAeA,IAAKtiB,CAApB;MADrBwE,EAEI,iDAA+CxE,CAA/C,OAAA,GAAwDA,CAAxD,oBAAA,GACgBtB,CAHpB8F,GAMAA,OACI9F,EAAKmb,KAALnb,CAAW,UAAA4jB,CAAA;EAAM,WAAAC,MAAWD,CAAXC,CAAA;KAAjB7jB,CADJ8F,EAEI,4DACgB9F,CAHpB8F,CANAA,EAYO9F,EAAKgE,GAALhE,CAAS,UAAApE,CAAA;EAAK,WAAAA,IAAI,CAAJA,GAAQ0F,IAAO1F,CAAfA,GAAmBA,CAAnB;KAAdoE,CAAP;EAGF,oCAAA,CACIxC,CADJ,EACiB0lB,CADjB,EACiC5hB,CADjC;EAEEwE,SACIge,qBAAqBZ,CAArBY,EAA2BxiB,CAA3BwiB,CADJhe,EAEOtI,uDAAAA,GACa0lB,CADb1lB,eAAAA,GAC8B8D,CAD9B9D,YAFPsI;EAWF,4BAAA,CAAmCod,CAAnC,EAAmD5hB,CAAnD;EAEE,MAAIwiB,qBAAqBZ,CAArBY,EAA2BxiB,CAA3BwiB,CAAJ,EACE,OAAO,IAAP,CAGF,KADA,IAAM1mB,MAAN,EACSH,IAAI,CAAb,EAAgBA,IAAIqE,CAApB,IAA4BrE,CAA5B,GAC2B,MAArBimB,EAAKM,OAALN,CAAajmB,CAAbimB,KACF9lB,EAAOc,IAAPd,CAAYH,CAAZG,EAIJ,OADA8lB,EAAK5gB,OAAL4gB,CAAa,UAAAljB,CAAA;EAAQ,WAAA5C,EAAOc,IAAPd,CAAY4C,CAAZ5C,CAAA;KAArB8lB,GACO9lB,CAAP;EAIF,gCAAA,CAAuC8lB,CAAvC;EACE,SAAOA,EAAKlf,GAALkf,CAAS,UAACljB,CAAD,EAAO/C,CAAP;EAAa,YAACA,GAAG+C,EAAJ;KAAtBkjB,EACFlD,IADEkD,CACG,UAACtnB,CAAD,EAAIsB,CAAJ;EAAU,WAAAtB,EAAE,CAAFA,IAAOsB,EAAE,CAAFA,CAAP;KADbgmB,EAEFlf,GAFEkf,CAEE,UAAAtmB,CAAA;EAAK,WAAAA,EAAE,CAAFA,CAAA;KAFPsmB,CAAP;EAKF,0BAAA,CAAiCa,CAAjC,EAAkDziB,CAAlD;EAEE,OADA,IAAM6V,MAAN,EACSla,IAAIqE,IAAOyiB,CAApB,EAA6B9mB,IAAIqE,CAAjC,IAAyCrE,CAAzC,EACEka,EAAIjZ,IAAJiZ,CAASla,CAATka,EAEF,OAAOA,CAAP;mCC/GqC6M,GAAoBhkB;EACzD,MAAMsB,IAAO0iB,EAAO,CAAPA,EAAU1nB,MAAvB,CACA0nB,EAAO1hB,OAAP0hB,CAAe,UAAC7lB,CAAD,EAAQlB,CAAR;EACb6I,WACI3H,EAAM7B,MAAN6B,KAAiBmD,CADrBwE,EAEI,oBAAkBxE,CAAlB,wBAAA,GAA4CrE,CAA5C,iDAAA,GACgCqE,CADhC,MAFJwE;KADFke,GAOAle,OACI9F,KAAQ,CAARA,IAAaA,IAAOsB,CADxBwE,EAEI,oBAAkBxE,CAAlB,mCAAA,IAAuDA,IAAO,CAA9D,OAFJwE,CAPAke,CAWA,IAAMC,IAAaD,EAAO,CAAPA,CAAnB,CACAA,EAAO1hB,OAAP0hB,CAAe,UAAC7lB,CAAD,EAAQlB,CAAR;EACb,SAAK,IAAIE,IAAI,CAAb,EAAgBA,IAAImE,CAApB,EAA0BnE,GAA1B,EACE2I,OACK3I,MAAM6C,CAAN7C,IAAgBgB,EAAMhB,CAANgB,MAAa8lB,EAAW9mB,CAAX8mB,CADlCne,EAEI,oBAAkBxE,CAAlB,yBAAA,GAA6CrE,CAA7C,QAAA,GAAoDkB,CAApD,6CAAA,GAC6C8lB,CAD7C,uCAAA,GAEuChnB,CAFvC,MAFJ6I;KAFJke;EAWF,yBAAA,CAAgCA,CAAhC,EAAoDhkB,CAApD;EAEE,OADA,IAAM+P,IAAciU,EAAO,CAAPA,EAAUjkB,KAAVikB,EAApB,EACS/mB,IAAI,CAAb,EAAgBA,IAAI+mB,EAAO1nB,MAA3B,EAAmCW,GAAnC,EACE8S,EAAY/P,CAAZ+P,KAAqBiU,EAAO/mB,CAAP+mB,EAAUhkB,CAAVgkB,CAArBjU,CAEF,OAAOA,CAAP;+BCrBEoF,GAAgBxM;EAClB,MAAIwM,EAAO7T,IAAP6T,GAAc,CAAlB,EACE,MAAM,IAAI1Z,KAAJ,CACF,8EACqB0Z,EAAO7T,IAD5B,MADE,CAAN,CAIF,IAAIqH,EAAQrH,IAARqH,GAAe,CAAnB,EACE,MAAM,IAAIlN,KAAJ,CACF,gFACqBkN,EAAQrH,IAD7B,MADE,CAAN,CAIF,IAAsB,YAAlBqH,EAAQxI,KAAZ,EACE,MAAM,IAAI1E,KAAJ,CACF,2EACsBkN,EAAQxI,KAD9B,MADE,CAAN,CAIF,IAAIwI,EAAQxK,KAARwK,CAAcA,EAAQrH,IAARqH,GAAe,CAA7BA,IAAkCwM,EAAO7T,IAA7C,EACE,MAAM,IAAI7F,KAAJ,CACF,mEACGkN,EAAQxK,KAARwK,CAAcA,EAAQrH,IAARqH,GAAe,CAA7BA,CADH,UAAA,GAC0CwM,EAAO7T,IAF/C,CAAN,CAKF,IAAoB,MAAhB6T,EAAO/W,IAAX,EACE,MAAM,IAAI3C,KAAJ,CACF,qEACiB0Z,EAAOhX,KADxB,MADE,CAAN,CAWF,KANA,IAAM+lB,IAAevb,EAAQxK,KAA7B,EACMgmB,IAAYD,EAAaA,EAAa5nB,MAAb4nB,GAAsB,CAAnCA,CADlB,EAKIE,IAAU,CALd,EAMSnnB,IAAI,CAAb,EAAgBA,IAAIinB,EAAa5nB,MAAb4nB,GAAsB,CAA1C,IAA+CjnB,CAA/C,EACEmnB,KAAWF,EAAajnB,CAAbinB,CAAXE,CAGF,IAAM/B,IAAalN,EAAOhX,KAA1B;EAAA,MAEMkmB,IAAcH,EAAankB,KAAbmkB,EAFpB,CAGAG,EAAY5J,GAAZ4J,GAEA,IAAIpB,IAAY,CAAhB,CACA,KAAShmB,IAAIknB,CAAb,EAAwBlnB,IAAIkY,EAAO7T,IAAnC,IAA2CrE,CAA3C,EACEgmB,KAAaZ,EAAWplB,CAAXolB,CAAbY,EACAoB,EAAYnmB,IAAZmmB,CAAiBhC,EAAWplB,CAAXolB,CAAjBgC,CADApB,CAIF,IAAM1hB,IACEmC,eAAeyR,EAAOhX,KAAtBuF,EAA6BM,GAA7BN,CAAiC,UAAA8B,CAAA;EAAU,WAAAA,IAASyd,CAAT;KAA3Cvf,QAAAA,EACH,EADGA,EACA3D,KADA2D,CACM,CADNA,EACSygB,CADTzgB,CADR,CAIA,QAAQ2gB,GAAaD,GAASnB,GAAW1hB,EAAzC;EC1DK,KAAM+iB,wBAAwB,EAA9B,CAQP,iCAAA,CAAyCC,CAAzC;EACE,SAAIA,KAAUD,qBAAVC,GACKA,CADLA,GAGGC,eAAeD,CAAfC,EAAuB/nB,KAAKkC,KAALlC,CAAWA,KAAKmC,IAALnC,CAAU8nB,CAAV9nB,CAAXA,CAAvB+nB,CAHP;gCCNErmB,GAAiBwK,GAAiB8b;EACpC,MAAMC,IAAY/b,EAAQrH,IAARqH,GAAe,CAAfA,GAAoBA,EAAQxK,KAARwK,CAAcA,EAAQrH,IAARqH,GAAe,CAA7BA,CAApBA,GAAsD,CAAxE;EAAA,MACMgc,IAAYhc,EAAQrH,IAARqH,GAAe,CAAfA,GAAoBA,EAAQrH,IAARqH,GAAe,CAAnCA,GAAuC,CADzD;EAAA,MAGMic,IAAa,+FACyBH,EAAQtmB,KADjC,GAEf,mBAFe,GAEKwK,EAAQxK,KAFb,cAAA,GAE8BA,CAF9B,GAGf,cAHe,GAGAumB,CAHA,qBAAA,GAG2BC,CAH3B,MAHnB,CAQA,IAAIF,EAAQnjB,IAARmjB,GAAeE,CAAnB,EACE,MAAM,IAAIlpB,KAAJ,CAAUmpB,IAAa,iBAAbA,GAA+BD,CAA/BC,OAAV,CAAN,CAEF,IAAIzmB,EAAM7B,MAAN6B,GAAeumB,KAAYD,EAAQnjB,IAARmjB,GAAeE,CAA3BD,CAAnB,EACE,MAAM,IAAIjpB,KAAJ,CACFmpB,IACA,yBADAA,IAC0BF,KAAYD,EAAQnjB,IAARmjB,GAAeE,CAA3BD,CAD1BE,CADE,CAAN,CAIF,IAAIH,EAAQnjB,IAARmjB,KAAiBE,IAAWxmB,EAAM7B,MAAjBqoB,GAA0BD,CAA/C,EACE,MAAM,IAAIjpB,KAAJ,CACFmpB,IAAa,kBAAbA,IAAgCD,IAAWxmB,EAAM7B,MAAjBqoB,GAA0BD,CAA1DE,CADE,CAAN,CAGF,KAAK,IAAI/K,IAAI,CAAb,EAAgBA,IAAI8K,CAApB,IAAgC9K,CAAhC,EACE,IAAI4K,EAAQtmB,KAARsmB,CAAc5K,CAAd4K,MAAqB9b,EAAQxK,KAARwK,CAAckR,CAAdlR,CAAzB,EACE,MAAM,IAAIlN,KAAJ,CACFmpB,IACA,iBADAA,GACkB/K,CADlB+K,QAAAA,GACyBH,EAAQtmB,KAARsmB,CAAc5K,CAAd4K,CADzBG,wBAAAA,GAC+D/K,CAD/D+K,QAAAA,GAEIjc,EAAQxK,KAARwK,CAAckR,CAAdlR,CAFJic,OADE,CAAN,CAMJ,KAAS/K,IAAI,CAAb,EAAgBA,IAAI4K,EAAQnjB,IAARmjB,GAAeE,CAAnC,IAA+C9K,CAA/C,EACE,IAAI4K,EAAQtmB,KAARsmB,CAAc5K,IAAI8K,CAAlBF,MAAgCtmB,EAAM0b,IAAI6K,CAAVvmB,CAApC,EACE,MAAM,IAAI1C,KAAJ,CACFmpB,IACA,iBADAA,IACkB/K,IAAI8K,CADtBC,SAAAA,GAEIH,EAAQtmB,KAARsmB,CAAc5K,IAAI8K,CAAlBF,CAFJG,gBAAAA,IAE6C/K,IAAI8K,CAFjDC,SAAAA,GAGIzmB,EAAM0b,IAAI8K,CAAVxmB,CAHJymB,MADE,CAAN;EAuBN,uBAAA,CACIH,CADJ,EACqB9b,CADrB,EACsCxK,CADtC;EAEE,MAAIwK,EAAQrH,IAARqH,GAAe,CAAnB,EACE,MAAM,IAAIlN,KAAJ,CACF,iFACqBkN,EAAQrH,IAD7B,MADE,CAAN,CAIF,IAAImjB,EAAQnjB,IAARmjB,GAAe,CAAnB,EACE,MAAM,IAAIhpB,KAAJ,CACF,iFACqBgpB,EAAQnjB,IAD7B,MADE,CAAN,CAIF,IAAsB,YAAlBqH,EAAQxI,KAAZ,EACE,MAAM,IAAI1E,KAAJ,CAAU,4DACZkN,EAAQxI,KADN,CAAN,CAGF,IAAIhC,EAAM7B,MAAN6B,GAAe,CAAnB,EACE,MAAM,IAAI1C,KAAJ,CACF,+DAA6D0C,CAD3D,CAAN,CAIF,IAAqB,MAAjBA,EAAM7B,MAAV,EAAwB;EACtB,QAAqB,MAAjBqM,EAAQvK,IAAZ,EACE,MAAM,IAAI3C,KAAJ,CAAU,wDACZkN,EAAQxK,KADN,CAAN,CAGF,IAAqB,MAAjBsmB,EAAQrmB,IAAZ,EACE,MAAM,IAAI3C,KAAJ,CAAU,wDACZgpB,EAAQtmB,KADN,CAAN;EAKJ0mB,uBAAoB1mB,CAApB0mB,EAA2Blc,CAA3Bkc,EAAoCJ,CAApCI;EAYF,yBAAA,CACIJ,CADJ,EACqB9b,CADrB,EACsCxK,CADtC;EAWE,OARA,IAAMgmB,IAAaxb,EAAQrH,IAARqH,GAAe,CAAfA,GAAoBA,EAAQxK,KAARwK,CAAcA,EAAQrH,IAARqH,GAAe,CAA7BA,CAApBA,GAAsD,CAAzE,EAKMmc,IAAU3mB,EAAM7B,MALtB,EAOI2mB,IAAY,CAPhB,EAQShmB,IAAIknB,CAAb,EAAwBlnB,IAAI6nB,CAA5B,IAAuC7nB,CAAvC,EACEgmB,KAAa9kB,EAAMlB,CAANkB,CAAb8kB,CAGF,IAAM8B,IAAgBZ,IAAY,CAAZA,GAAiB,CAAjBA,GAAqBA,CAA3C;EAAA,MACMa,IAAarc,EAAQvK,IAARuK,GAAeoc,CADlC;EAAA,MAGME,IAAoBvhB,eAAevF,CAAfuF,QAAAA,EAAuB,EAAvBA,CAH1B,CAOA,SAAQygB,cAAWa,eAAY/B,cAAW1hB,SAH1B0jB,EAAcllB,KAAdklB,CACZA,EAAc3oB,MAAd2oB,GAAuBd,CADXc,EACsBA,EAAc3oB,MADpC2oB,GAGmCC,YADhC/gB,cAAchG,CAAdgG,GACnB;0CCtHEogB,GAAgBlT;EAClB,MACI8F,CADJ;EAAA,MAAIgO,KAAO,CAAX,CAUA,KAPIZ,KAAUD,qBAAVC,IACFpN,IAAMoN,CAANpN,EACAgO,KAAO,CAFLZ,IAIFpN,IAAMqN,eAAeD,CAAfC,EAAuB/nB,KAAKkC,KAALlC,CAAWA,KAAKmC,IAALnC,CAAU8nB,CAAV9nB,CAAXA,CAAvB+nB,CAGR,GAAQW,CAAR,GAAc;EACZ,QAAIhO,IAAM9F,CAAN8F,IAAqBA,MAAQoN,CAAjC,EAAyC;EACvCY,WAAO,CAAPA,CACA;EAEAhO,SAAMqN,eAAeD,CAAfC,EAAuBrN,IAAM,CAA7BqN,CAANrN;EAGJ,UAAOA,CAAP;EAGF,2BAAA,CACIsM,CADJ,EACsBzjB,CADtB,EACoCqR,CADpC;EAIE,OAFA,IAAMqS,MAAN,EACMpiB,IAAOmiB,EAAOnnB,MADpB,EAES0S,IAAM,CAAf,EAAkBA,IAAM1N,CAAxB,EAA8B0N,GAA9B,EACMA,MAAQhP,CAARgP,GACF0U,EAASxlB,IAATwlB,CAAcD,EAAOzU,CAAPyU,CAAdC,CADE1U,GAGF0U,EAASxlB,IAATwlB,CAAcrS,CAAdqS,CAHE1U,CAMN,OAAO0U,CAAP;8BCxCE7P,GAAezK,GAAiBhL;EAClC0H,SACI+N,EAAMvS,IAANuS,KAAezK,EAAM9M,MADzBwJ,EAEI,mBAAiB+N,EAAMvS,IAAvB,wBAAA,GAAiD8H,CAAjD,wCAAA,GACoCyK,EAAMvS,IAD1C,OAFJwE,GAIAA,OACI+N,EAAMvS,IAANuS,KAAezV,EAAK9B,MADxBwJ,EAEI,mBAAiB+N,EAAMvS,IAAvB,uBAAA,GAAgDlD,CAAhD,wCAAA,GACoCyV,EAAMvS,IAD1C,OAFJwE,CAJAA,CASA,KAAK,IAAI7I,IAAI,CAAb,EAAgBA,IAAI4W,EAAMvS,IAA1B,IAAkCrE,CAAlC,EACE6I,OACIsD,EAAMnM,CAANmM,IAAWhL,EAAKnB,CAALmB,CAAXgL,IAAsByK,EAAM1V,KAAN0V,CAAY5W,CAAZ4W,CAD1B/N,EAEI,mBAAiB+N,EAAMvS,IAAvB,cAAA,GAAuCrE,CAAvC,cAAA,GAAoDA,CAApD,QAAA,IACQmM,EAAMnM,CAANmM,IAAWhL,EAAKnB,CAALmB,CADnB,mCAAA,GAC0DnB,CAD1D,QAAA,GAEQ4W,EAAM1V,KAAN0V,CAAY5W,CAAZ4W,CAFR,MAFJ/N;EAYJ,8BAAA,CACI3H,CADJ,EACqBiL,CADrB,EACsC3D,CADtC,EACqDlE,CADrD,EAEIuQ,CAFJ,EAEmBC,CAFnB,EAEgCqT,CAFhC,EAEkDC,CAFlD,EAGIC,CAHJ;EAIE,uBAFExT,yBAAeC,yBAAaqT,yBAAkBC,yBAC9CC,QACmB,MAAjBF,CAAJ,EACE,MAAM,IAAI3pB,KAAJ,CAAU,oCAAV,CAAN,CAEF,IAAoB,MAAhB4pB,CAAJ,EACE,MAAM,IAAI5pB,KAAJ,CAAU,oCAAV,CAAN,CAOF,KAHA,IAAM8pB,MAAN,EACMC,MADN,EAEMC,MAFN,EAGSxoB,IAAI,CAAb,EAAgBA,IAAIkB,EAAM7B,MAA1B,EAAkCW,GAAlC,EACEsoB,EAAWtoB,CAAXsoB,IAAgBG,aAAa5T,CAAb4T,EAAwBtc,CAAxBsc,EAA+BnkB,CAA/BmkB,EAAwCvnB,CAAxCunB,EAA+CzoB,CAA/CyoB,CAAhBH,EACAC,EAASvoB,CAATuoB,IAAcG,YAAY5T,CAAZ4T,EAAqBlgB,CAArBkgB,EAA0BpkB,CAA1BokB,EAAmCxnB,CAAnCwnB,EAA0C1oB,CAA1C0oB,CADdJ,EAIID,IAAiB,KAAKroB,CAAtBqoB,KACFE,EAASvoB,CAATuoB,IAAcD,EAAWtoB,CAAXsoB,IAAgB,CAA9BC,EACAC,EAAWvnB,IAAXunB,CAAgBxoB,CAAhBwoB,CAFEH,CAJJC,CAUF,IAAInnB,IAAO,IAAIL,KAAJ,CAAUI,EAAM7B,MAAhB,EAAwB+H,IAAxB,CAA6B,CAA7B,CAAX,CAWA,OAVAjG,IAAOA,EAAK4F,GAAL5F,CAAS,UAACyb,CAAD,EAAI5c,CAAJ;EAEd,SADA,IAAI2oB,IAAQ,CAAZ,EACSvkB,IAAQkkB,EAAWtoB,CAAXsoB,CAAjB,IACOhkB,EAAQtE,CAARsE,IAAa,CAAbA,GAAiBF,KAASmkB,EAASvoB,CAATuoB,CAA1BjkB,GAAwCF,KAASmkB,EAASvoB,CAATuoB,EADxD,EAEKnkB,KAASE,EAAQtE,CAARsE,CAFd,EAGEqkB,KAAS,CAATA,CAEF,OAAOA,CAAP;KAPKxnB,CAAPA,GAUQmnB,GAAYnnB,GAAMqnB,EAA1B;EAGF,sBAAA,CACI3T,CADJ,EACuB+T,CADvB,EAC+CtkB,CAD/C,EAEI8gB,CAFJ,EAE0BriB,CAF1B;EAIE,MAAIqB,IAAQwkB,EAAa7lB,CAAb6lB,CAAZ,CAGI/T,IAAY,KAAK9R,CAAjB8R,KAKAzQ,IAJEE,EAAQvB,CAARuB,IAAgB,CAAhBA,GAIMjE,OAAOwoB,gBAJbvkB,GAOMjE,OAAOyoB,gBARfjU,EAaJ,IAAMkU,IAAW3D,EAAWriB,CAAXqiB,CAAjB,CAQA,OAPIhhB,IAAQ,CAARA,KACFA,KAAS2kB,CADP3kB,GAKJA,IAAQ4kB,MAAW,CAAXA,EAAc5kB,CAAd4kB,EAAqBD,IAAW,CAAhCC,CAER;EAGF,qBAAA,CACIlU,CADJ,EACqBmU,CADrB,EAC4C3kB,CAD5C,EAEI8gB,CAFJ,EAE0BriB,CAF1B;EAIE,MAAImmB,IAAOD,EAAYlmB,CAAZkmB,CAAX,CAGInU,IAAW,KAAK/R,CAAhB+R,KAIAoU,IAHE5kB,EAAQvB,CAARuB,IAAgB,CAAhBA,GAGKjE,OAAOyoB,gBAHZxkB,GAMKjE,OAAOwoB,gBAPd/T,EAYJ,IAAMiU,IAAW3D,EAAWriB,CAAXqiB,CAAjB,CAgBA,OAfI8D,IAAO,CAAPA,KACFA,KAAQH,CADNG,GASFA,IAFE5kB,EAAQvB,CAARuB,IAAgB,CAAhBA,GAEK0kB,MAAW,CAAXA,EAAcE,CAAdF,EAAoBD,CAApBC,CAFL1kB,GAKK0kB,OAAY,CAAZA,EAAeE,CAAfF,EAAqBD,IAAW,CAAhCC,CAGT;uBChIyBnpB;EAEzB,MAAIspB,IAAwBtpB,CAA5B,CAEA,IAAIupB,aAAavpB,CAAbupB,CAAJ,EACE,QAASvpB,EAAmBR,OAA5B,CAEF,KAAKyB,MAAMC,OAAND,CAAcjB,CAAdiB,CAAL,EACE,SAAA,CAIF,KAFA,IAAMI,MAEN,EAAOioB,aAAqBroB,KAA5B,GACEI,EAAMD,IAANC,CAAWioB,EAAU9pB,MAArB6B,GACAioB,IAAYA,EAAU,CAAVA,CADZjoB,CAOF,OAJIrB,aAAeiB,KAAfjB,IAAwBuiB,IAAIxG,GAAJwG,CAAQ,oCAARA,CAAxBviB,IACFwpB,2BAA2BxpB,CAA3BwpB,EAAgCnoB,CAAhCmoB,IAAAA,CADExpB,EAIGqB,CAAP;EAGF,oCAAA,CACIrB,CADJ,EAEIqB,CAFJ,EAEqBwK,CAFrB;EAIE,MADAA,IAAUA,OAAVA,EACM7L,aAAeiB,KAArB,EAAA;EAOAH,WACIO,EAAM7B,MAAN6B,GAAe,CADnBP,EAEI;EAAM,aAAA,iBAAe+K,EAAQzE,IAARyE,CAAa,IAAbA,CAAf,iDAAA,GACoB7L,EAAIR,MADxB,cAAA;OAFVsB,GAIAA,OACId,EAAIR,MAAJQ,KAAeqB,EAAM,CAANA,CADnBP,EAEI;EAAM,aAAA,iBAAe+K,EAAQzE,IAARyE,CAAa,IAAbA,CAAf,mBAAA,GAAkDxK,EAAM,CAANA,CAAlD,wBAAA,GACmBrB,EAAIR,MADvB,cAAA;OAFVsB,CAJAA,CASA,KADA,IAAM2oB,IAAWpoB,EAAM4B,KAAN5B,CAAY,CAAZA,CAAjB,EACSlB,IAAI,CAAb,EAAgBA,IAAIH,EAAIR,MAAxB,IAAkCW,CAAlC,EACEqpB,2BAA2BxpB,EAAIG,CAAJH,CAA3BwpB,EAAmCC,CAAnCD,EAA6C3d,EAAQU,MAARV,CAAe1L,CAAf0L,CAA7C2d;KAjBF,MACE1oB,OACqB,MAAjBO,EAAM7B,MADVsB,EAEI;EAAM,WAAA,iBAAe+K,EAAQzE,IAARyE,CAAa,IAAbA,CAAf,iDAAA,GAC2BxK,EAAM,CAANA,CAD3B,cAAA;KAFVP;EAoBJ,yBAAA,CACIhB,CADJ,EACqB4pB,CADrB,EACsCC,CADtC,EAEItmB,CAFJ;EAIE,uBAFEA,gBACFA,IAAQA,KAAS,WACbvD,aAAa0J,MAAjB,EACE,OAAO1J,CAAP,CAEF,KAAKypB,aAAazpB,CAAbypB,MAAoBtoB,MAAMC,OAAND,CAAcnB,CAAdmB,KAAiC,mBAANnB,KACnC,oBAANA,CADX,EAEE,MAAM,IAAInB,KAAJ,CACF,eAAa+qB,CAAb,kBAAA,GAAoCC,CAApC,+CAAA,GACiC7pB,EAAEsE,WAAFtE,CAAc6D,IAF7C,CAAN,CAIF,IAAMimB,IAAgBC,WAAW/pB,CAAX+pB,CAAtB,CAIA,OAHKN,aAAazpB,CAAbypB,KAAoBtoB,MAAMC,OAAND,CAAcnB,CAAdmB,CAApBsoB,KACHzpB,KAAKA,EADFypB,GAGE/f,OAAOC,IAAPD,CACHogB,CADGpgB,IAEFlG,QAAQwmB,aAAahqB,CAAbgqB,EAAuCzmB,CAAvCymB,EAA8CvH,IAAIxG,GAAJwG,CAAQ,OAARA,CAA9CuH,GAFNtgB,EAGHnG,CAHGmG,CAAP;EAMF,8BAAA,CACIsb,CADJ,EAC2B4E,CAD3B,EAC4CC,CAD5C;EAEE,OAAK1oB,MAAMC,OAAND,CAAc6jB,CAAd7jB,CAAL,EACE,MAAM,IAAItC,KAAJ,CACF,cAAY+qB,CAAZ,gBAAA,GAAiCC,CAAjC,4CADE,CAAN,CAKF,OADgB7E,EACD5d,GADC4d,CAEZ,UAACxG,CAAD,EAAIne,CAAJ;EAAU,WAAA4pB,gBAAgBzL,CAAhByL,EAAsBL,OAAAA,GAAWvpB,CAAXupB,MAAtBK,EAAuCJ,CAAvCI,CAAA;KAFEjF,CAChB;eC7EqC3gB;EACrC,MAAM8T,IAAO1O,OAAO0O,IAAP1O,CAAYpF,CAAZoF,CAAb,CACA,IAAoB,MAAhB0O,EAAKzY,MAAT,EACE,MAAM,IAAIb,KAAJ,CACF,2GAEGsZ,EAAKzY,MAFR,WADE,CAAN,CAMF,IAAIwqB,IAAS/R,EAAK,CAALA,CAAb;EAAA,MACMrO,IAAKzF,EAAE6lB,CAAF7lB,CADX,CAII6lB,EAAOC,QAAPD,CAAgB,GAAhBA,MACFA,IAASA,EAAOE,SAAPF,CAAiB,CAAjBA,EAAoBA,EAAOxqB,MAAPwqB,GAAgB,CAApCA,CADPA,EAKJ,IAAMG,IAAK;WAAC,UAAA,SAAA/gB,sBAAAA,KAAAyb,GAAAA,gBAAAA,CACVtC,IAAIE,MAAJF,CAAWrI,UAAXqI,CAAsByH,CAAtBzH,EACA;EACE,UAAMjiB,IAASsJ,OAAAA,OAAAA,EAAMib,CAANjb,CAAf,CAKA,OAJItJ,aAAkBiC,OAAlBjC,IACFgG,QAAQ8T,KAAR9T,CAAc,yCAAdA,CADEhG,EAGJiiB,IAAIE,MAAJF,CAAWpI,QAAXoI,CAAoBjiB,CAApBiiB,CAHIjiB,EAIGA,CAAP;EACA,KAPF,CAOE,OAAO0V,CAAP;EAEA,YADAuM,IAAIE,MAAJF,CAAWpI,QAAXoI,CAAoB,IAApBA,GACMvM,CAAN;;KAXJ,CAiBA,OAHAzM,OAAOiM,cAAPjM,CAAsB4gB,CAAtB5gB,EAA0B,MAA1BA,IAAmCJ,OAAO6gB,GAAQI,eAAc,GAAhE7gB,GAGO4gB,CAAP;EChBF,kBAAA,CAAoCE,CAApC,EAA0DnY,CAA1D;qBAA0DA,KAAO,GAC/D,IAAMoY,IAAUP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,SAAlCA,CAAhB,CAKA,KAHa,MAAT7X,MACFA,IAAMoY,EAAQ9lB,IAAR8lB,GAAe,IAEnBpY,MAAQoY,EAAQ9lB,IAAR8lB,GAAe,CAA3B,EACE,MAAM3rB,MACF,8EACmB2rB,EAAQ9lB,IAD3B,kBAAA,GAC+C0N,CAF7CvT,CAAN,CAsBF,OAjBiB2mB,WAAW,UAAA+E,CAAA;EAG1B,QACME,IAAMF,EAAOhd,SAAPgd,EAAkBnY,EAAlBmY,GADK,CACLA,CADZ;EAAA,QAGM7T,IADY6T,EAAOG,OAAPH,GAAiBzc,GAAjByc,CAAqBE,CAArBF,EACEzoB,GADFyoB,EAFlB,CAWA,SAAQlhB,OAAOqN,GAAGgI,UAND,UAAC3G,CAAD;EACf,YAAM4S,IAAW5S,EAAG7J,GAAH6J,CAAOrB,CAAPqB,CAAjB,CAEA,OAAO4S,EAAS7c,GAAT6c,CAAaA,EAASvqB,GAATuqB,EAAcvY,EAAduY,GADH,CACGA,EAA8Bzc,GAA9Byc,CAAkCjU,CAAlCiU,CAAbA,CAAP;WAGF;KAdenF,EAiBDgF,CAjBChF,CAiBjB;EAuBF,qBAAA,CAAuC+E,CAAvC,EAA6DnnB,CAA7D;qBAA6DA,KAAQ,GACnE,IAAMonB,IAAUP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,YAAlCA,CAAhB,CAKA,KAHc,MAAV7mB,MACFA,IAAOonB,EAAQ9lB,IAAR8lB,GAAe,IAEpBpnB,MAASonB,EAAQ9lB,IAAR8lB,GAAe,CAA5B,EACE,MAAM3rB,MACF,kFACmB2rB,EAAQ9lB,IAD3B,mBAAA,GACgDtB,CAF9CvE,CAAN,CAoBF,OAfiB2mB,WAAW,UAAA+E,CAAA;EAC1B,QACMK,IAAOL,EAAOtqB,GAAPsqB,CAAWnnB,CAAXmnB,GAAiB,CAAjBA,CADb;EAAA,QAEMM,IAAUN,EAAOzc,GAAPyc,CAAWK,CAAXL,CAFhB;EAAA,QAGMlhB,IACFwhB,EAAQH,OAARG,GAAkB/c,GAAlB+c,CAAsBA,EAAQ/oB,GAAR+oB,GAAczqB,GAAdyqB,CAAkBznB,CAAlBynB,GAJT,CAISA,EAAkCpkB,GAAlCokB,EAAtBA,CAJJ,CAWA,SAAQxhB,UAAOqV,UALE,UAAC3G,CAAD;EACf,YAAM1F,IAAUhJ,EAAMvH,GAANuH,EAAhB,CACA,OAAO0O,EAAGjK,GAAHiK,CAAOA,EAAG3X,GAAH2X,CAAO3U,CAAP2U,GARC,CAQDA,EAAuB7J,GAAvB6J,CAA2B1F,CAA3B0F,CAAPA,CAAP;WAGF;KAZeyN,EAeDgF,CAfChF,CAejB;EAGF,KAAanT,UAAUyY,KAAIC,oBAAJD,CAAvB;EAAA,IACaxY,aAAawY,KAAIE,0BAAJF,CAD1B,CCnFA,iBAAA,CAAoCG,CAApC,EAAwDC,CAAxD;EACE,MAAMC,IAAQlB,gBAAgBgB,CAAhBhB,EAAsB,MAAtBA,EAA8B,SAA9BA,CAAd;EAAA,MACMmB,IAAQnB,gBAAgBiB,CAAhBjB,EAAsB,MAAtBA,EAA8B,SAA9BA,CADd,CAOA,OALApF,kBACIsG,EAAM5pB,KADVsjB,EACiBuG,EAAM7pB,KADvBsjB,EAEI,2BAAyBsG,EAAM5pB,KAA/B,UAAA,GAA4C6pB,EAAM7pB,KAAlD,0CAFJsjB,GAKOpC,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQmS,OAARnS,CAAgBgS,CAAhBhS,EAAuBiS,CAAvBjS,CAAA;KADfsJ,IAC+C0I,UAAOC,UADtD3I,CAAP;EAkBF,eAAA,CAAiCxL,CAAjC;EACE,MAAMsU,IAAStB,gBAAgBhT,CAAhBgT,EAAuB,OAAvBA,EAAgC,MAAhCA,CAAf,CAEA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ8R,IAAR9R,CAAaoS,CAAbpS,CAAA;KAAhCsJ,IAAuD8I,WAAvD9I,CAAP;EAgBF,eAAA,CAAiCxL,CAAjC;EACE,MAAMsU,IAAStB,gBAAgBhT,CAAhBgT,EAAuB,OAAvBA,EAAgC,MAAhCA,CAAf,CAEA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ+R,IAAR/R,CAAaoS,CAAbpS,CAAA;KAAhCsJ,IAAuD8I,WAAvD9I,CAAP;EAGF,KAAa6I,UAAUR,KAAIU,oBAAJV,CAAvB;EAAA,IACaG,OAAOH,KAAIW,cAAJX,CADpB;EAAA,IAEaI,OAAOJ,KAAIY,cAAJZ,CAFpB,CCxCA,eAAA,CACItnB,CADJ,EACwBjC,CADxB,EAEIgC,CAFJ;EAGE,uBADEA,gBACY,gBAAVA,CAAJ,EACE,MAAM,IAAI1E,KAAJ,CACF,kFADE,CAAN,CAIF,KAAK4qB,aAAajmB,CAAbimB,MAAyBtoB,MAAMC,OAAND,CAAcqC,CAAdrC,KACR,mBAAXqC,KAAyC,oBAAXA,CADzC,EAEE,MAAM,IAAI3E,KAAJ,CACF,0FADE,CAAN,CAIF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CAYA,OAXa,QAATxoB,CAAS,IAAiC,MAAzBuoB,EAAcpqB,MAAtB,IACXisB,kBACIpqB,CADJoqB,EACW7B,CADX6B,EAEI,kDACuB7B,CADvB,0CAAA,GAEuBvoB,CAFvB,QAFJoqB,CADW,EAORlC,aAAajmB,CAAbimB,KAAyBtoB,MAAMC,OAAND,CAAcqC,CAAdrC,CAAzBsoB,KACHjmB,KAAUA,EADPimB,CAPQ,EAUbloB,IAAQA,KAASuoB,CAVJ,EAWNpgB,OAAOC,IAAPD,CACHnI,CADGmI,IAEDlG,QACIwmB,aAAaxmB,CAAbwmB,EAA4CzmB,CAA5CymB,EAAmDvH,IAAIxG,GAAJwG,CAAQ,OAARA,CAAnDuH,GAHHtgB,EAKHnG,CALGmG,CAAP;EAsBF,gBAAA,CACIL,CADJ,EAEI9F,CAFJ;EAGE,uBADEA,iBACGkmB,aAAapgB,CAAbogB,KAAuBtoB,MAAMC,OAAND,CAAckI,CAAdlI,MAAmC,gBAAVoC,CAArD,EACE,MAAM,IAAI1E,KAAJ,CACF,yEADE,CAAN,CAIF,OAAO0Z,OAAOlP,CAAPkP,IAAAA,EAAkBhV,CAAlBgV,CAAP;EAkBF,kBAAA,CAAkB/U,CAAlB,EAAwCD,CAAxC;qBAAwCA,gBACtCqoB,cAAcpoB,CAAdooB,EACA,IAAM9B,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAlB,EACE,MAAM,IAAIb,KAAJ,CAAU,oDAAV,CAAN,CAEF,OAAO0Z,OAAO/U,CAAP+U,EAAeuR,CAAfvR,EAA0ChV,CAA1CgV,CAAP;EAyBF,kBAAA,CACI/U,CADJ,EAC0BjC,CAD1B,EAEIgC,CAFJ;EAIE,uBAFEA,gBACFqoB,cAAcpoB,CAAdooB,GACa,QAATrqB,CAAS,IAAyB,MAAjBA,EAAM7B,MAA3B,EACE,MAAM,IAAIb,KAAJ,CAAU,+CAAV,CAAN,CAEF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAW,IAA8B,MAAzBoqB,EAAcpqB,MAAhD,EACE,MAAM,IAAIb,KAAJ,CACF,gEADE,CAAN,CAGF,IAA6B,MAAzBirB,EAAcpqB,MAAW,IAAc,QAAT6B,CAAlC,EACE,MAAM,IAAI1C,KAAJ,CACF,8EADE,CAAN,CAKF,OAAO0Z,OAAO/U,CAAP+U,EADPhX,IAAQA,KAASuoB,CACVvR,EAAsBhV,CAAtBgV,CAAP;EAyBF,kBAAA,CACI/U,CADJ,EAC0BjC,CAD1B,EAEIgC,CAFJ;EAIE,uBAFEA,gBACFqoB,cAAcpoB,CAAdooB,GACa,QAATrqB,CAAS,IAAyB,MAAjBA,EAAM7B,MAA3B,EACE,MAAM,IAAIb,KAAJ,CAAU,iDAAV,CAAN,CAEF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAW,IAA8B,MAAzBoqB,EAAcpqB,MAAhD,EACE,MAAM,IAAIb,KAAJ,CACF,kEADE,CAAN,CAGF,IAA6B,MAAzBirB,EAAcpqB,MAAW,IAAc,QAAT6B,CAAlC,EACE,MAAM,IAAI1C,KAAJ,CACF,yEADE,CAAN,CAKF,OAAO0Z,OAAO/U,CAAP+U,EADPhX,IAAQA,KAASuoB,CACVvR,EAAsBhV,CAAtBgV,CAAP;EAyBF,kBAAA,CACI/U,CADJ,EAC0BjC,CAD1B,EAEIgC,CAFJ;EAIE,uBAFEA,gBACFqoB,cAAcpoB,CAAdooB,GACa,QAATrqB,CAAS,IAAyB,MAAjBA,EAAM7B,MAA3B,EACE,MAAM,IAAIb,KAAJ,CAAU,gDAAV,CAAN,CAEF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAW,IAA8B,MAAzBoqB,EAAcpqB,MAAhD,EACE,MAAM,IAAIb,KAAJ,CACF,oEADE,CAAN,CAGF,IAA6B,MAAzBirB,EAAcpqB,MAAW,IAAc,QAAT6B,CAAlC,EACE,MAAM,IAAI1C,KAAJ,CACF,yEADE,CAAN,CAKF,OAAO0Z,OAAO/U,CAAP+U,EADPhX,IAAQA,KAASuoB,CACVvR,EAAsBhV,CAAtBgV,CAAP;EAyBF,kBAAA,CACI/U,CADJ,EAC0BjC,CAD1B,EAEIgC,CAFJ;EAIE,uBAFEA,gBACFqoB,cAAcpoB,CAAdooB,GACa,QAATrqB,CAAS,IAAyB,MAAjBA,EAAM7B,MAA3B,EACE,MAAM,IAAIb,KAAJ,CAAU,gDAAV,CAAN,CAEF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAW,IAA8B,MAAzBoqB,EAAcpqB,MAAhD,EACE,MAAM,IAAIb,KAAJ,CACF,sEADE,CAAN,CAIF,IAA6B,MAAzBirB,EAAcpqB,MAAW,IAAc,QAAT6B,CAAlC,EACE,MAAM,IAAI1C,KAAJ,CACF,yEADE,CAAN,CAKF,OAAO0Z,OAAO/U,CAAP+U,EADPhX,IAAQA,KAASuoB,CACVvR,EAAsBhV,CAAtBgV,CAAP;EAyBF,kBAAA,CACI/U,CADJ,EAEIjC,CAFJ,EAGIgC,CAHJ;EAKE,uBAFEA,gBACFqoB,cAAcpoB,CAAdooB,GACa,QAATrqB,CAAS,IAAyB,MAAjBA,EAAM7B,MAA3B,EACE,MAAM,IAAIb,KAAJ,CAAU,+CAAV,CAAN,CAEF,IAAMirB,IAAgBC,WAAWvmB,CAAXumB,CAAtB,CACA,IAA6B,MAAzBD,EAAcpqB,MAAW,IAA8B,MAAzBoqB,EAAcpqB,MAAhD,EACE,MAAM,IAAIb,KAAJ,CACF,oEADE,CAAN,CAGF,IAA6B,MAAzBirB,EAAcpqB,MAAW,IAAc,QAAT6B,CAAlC,EACE,MAAM,IAAI1C,KAAJ,CACF,yEADE,CAAN,CAMF,OAAO0Z,OAAO/U,CAAP+U,EAFPhX,IAAQA,KACJuoB,CACGvR,EAAsBhV,CAAtBgV,CAAP;EAeF,gBAAA,CACIhX,CADJ,EACwBgC,CADxB;EAEE,uBADsBA,gBACR,gBAAVA,CAAJ,EAA2B;EACzB,QAAM0nB,IAAO9M,OAAK5c,CAAL4c,EAAY,SAAZA,CAAb;EAAA,QACM+M,IAAO/M,OAAK5c,CAAL4c,EAAY,SAAZA,CADb,CAEA,OAAOmN,QAAQL,CAARK,EAAcJ,CAAdI,CAAP;EAEF,OAAM9nB,IAASyb,mBAAmB1X,cAAchG,CAAdgG,CAAnB0X,EAAyC1b,CAAzC0b,CAAf,CACA,OAAOvV,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBlG,WAApBkG,EAA6BnG,CAA7BmG,CAAP;EAeF,eAAA,CACInI,CADJ,EACwBgC,CADxB;EAEE,uBADsBA,gBACR,gBAAVA,CAAJ,EAA2B;EACzB,QAAM0nB,IAAOY,MAAMtqB,CAANsqB,EAAa,SAAbA,CAAb;EAAA,QACMX,IAAOW,MAAMtqB,CAANsqB,EAAa,SAAbA,CADb,CAEA,OAAOP,QAAQL,CAARK,EAAcJ,CAAdI,CAAP;EAEF,OAAM9nB,IAASsB,oBAAoByC,cAAchG,CAAdgG,CAApBzC,EAA0CvB,CAA1CuB,CAAf,CACA,OAAO4E,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBlG,WAApBkG,EAA6BnG,CAA7BmG,CAAP;EAgBF,cAAA,CACInI,CADJ,EACwB8H,CADxB,EACuC9F,CADvC;qBACuCA,eACrC,IAAMC,IAASsoB,uBAAuBvoB,CAAvBuoB,EAA8BvkB,cAAchG,CAAdgG,CAA9BukB,CAAf,CAEA,OADAtoB,EAAOiE,IAAPjE,CAAY6F,CAAZ7F,GACOkG,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBlG,WAApBkG,EAA6BnG,CAA7BmG,CAAP;EAcF,mBAAA,CAAqC1J,CAArC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,UAAxBA,CAAX,CACA,OAAO9L,OAAK4N,EAAGxqB,KAAR4c,EAAe4N,EAAGxoB,KAAlB4a,CAAP;EAeF,oBAAA,CAAsCne,CAAtC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAX,CACA,OAAO4B,MAAME,EAAGxqB,KAATsqB,EAAgBE,EAAGxoB,KAAnBsoB,CAAP;EAcF,kBAAA,CAAkBpnB,CAAlB,EAAiC8kB,CAAjC,EAA+CyC,CAA/C;EACE,MAAY,MAARA,CAAJ,EACE,MAAM,IAAIntB,KAAJ,CAAU,6BAAV,CAAN,CAGF,IAAMsT,KAAQoX,IAAO9kB,MAAUunB,IAAM,EAArC;EAAA,MAEMxoB,IAASsB,oBAAoBknB,CAApBlnB,EAAyB,SAAzBA,CAFf,CAGAtB,EAAO,CAAPA,IAAYiB,CAAZjB,CACA,KAAK,IAAInD,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,EAAmCW,GAAnC,EACEmD,EAAOnD,CAAPmD,IAAYA,EAAOnD,IAAI,CAAXmD,IAAgB2O,CAA5B3O,CAGF,OAAOyoB,SAASzoB,CAATyoB,EAAiB,SAAjBA,CAAP;EAoBF,eAAA,CACIxnB,CADJ,EACmB8kB,CADnB,EACiCpX,CADjC,EAEI5O,CAFJ;EAGE,uBAF+B4O,yBAC7B5O,gBACW,MAAT4O,CAAJ,EACE,MAAM,IAAItT,KAAJ,CAAU,4BAAV,CAAN,CAOF,IAJsB4F,MAAU8kB,CAAV9kB,IACcA,IAAQ8kB,CAAR9kB,IAAgB0N,IAAO,CADrC1N,IAEc8kB,IAAO9kB,CAAP8kB,IAAgBpX,IAAO,CAE3D,EAEE,OAAO0Z,OAAO,EAAPA,EAAWtoB,CAAXsoB,CAAP,CAGF,IACMroB,IAASsB,oBADKjF,KAAK8Q,GAAL9Q,CAASA,KAAKuQ,IAALvQ,EAAW0pB,IAAO9kB,KAAS0N,CAA3BtS,CAATA,CACLiF,EAAiCvB,CAAjCuB,CADf,CAGIykB,IAAO9kB,CAAP8kB,IAAyB,MAATpX,CAAhBoX,KAGFpX,KAAQ,CAHNoX,GAMJ/lB,EAAO,CAAPA,IAAYiB,CANR8kB,CAOJ,KAAK,IAAIlpB,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,EAAmCW,GAAnC,EACEmD,EAAOnD,CAAPmD,IAAYA,EAAOnD,IAAI,CAAXmD,IAAgB2O,CAA5B3O,CAGF,OAAOyoB,SAASzoB,CAATyoB,EAAiB1oB,CAAjB0oB,CAAP;EAGF,KCjgBYC,KDigBZ;EAAA,ICreYC,AAuBPC,iBD8cL;EAAA,ICvcKC,gBDucL;EAAA,IChcKC,mBDgcL;EAAA,ICzbKC,qBDybL;EAAA,IAgBajb,WAAWwZ,KAAI0B,sBAAJ1B,CAhBxB;EAAA,IAiBazZ,YAAYyZ,KAAI2B,wBAAJ3B,CAjBzB,ECjgBA,UAAYoB,CAAZ;EACEA,WAAAA,YAAAA,EACAA,OAAAA,UADAA,EAEAA,MAAAA,SAFAA;EADF,CAAA,CAAYA,UAAAA,UAAAA,CAAZ,GA4BA,UAAYC,CAAZ;EACEA,MAAAA,OAAAA,EACAA,IAAAA,OADAA,EAEAA,IAAAA,OAFAA,EAGAA,IAAAA,OAHAA,EAIAA,IAAAA,OAJAA,EAKAA,IAAAA,OALAA,EAMAA,IAAAA,OANAA;EADF,CAAA,CAAYA,iBAAAA,iBAAAA,CAAZ,GAuBA,UAAKC,CAAL;EACEA,WAAAA,YAAAA,EACAA,OAAAA,UADAA,EAEAA,MAAAA,UAFAA,EAGAA,WAAAA,cAHAA;EADF,CAAA,CAAKA,sBAAAA,sBAAAA,CAAL,GAOA,UAAKC,CAAL;EACEA,WAAAA,YAAAA,EACAA,OAAAA,UADAA,EAEAA,MAAAA,SAFAA,EAGAA,WAAAA,cAHAA;EADF,CAAA,CAAKA,qBAAAA,qBAAAA,CAAL,GAOA,UAAKC,CAAL;EACEA,WAAAA,YAAAA,EACAA,OAAAA,YADAA,EAEAA,MAAAA,YAFAA,EAGAA,WAAAA,cAHAA;EADF,CAAA,CAAKA,wBAAAA,wBAAAA,CAAL,GAOA,UAAKC,CAAL;EACEA,WAAAA,cAAAA,EACAA,OAAAA,cADAA,EAEAA,MAAAA,cAFAA,EAGAA,WAAAA,cAHAA;EADF,CAAA,CAAKA,0BAAAA,0BAAAA,CAAL,EAOA,IAAMG,kBACJC,SAAWL,qBACXM,OAASR,mBACTjoB,MAAQkoB,kBACRQ,WAAaN,uBAJf,CAOA,mBAAA,CAA2BO,CAA3B,EAA4CC,CAA5C;EACE,SAAOL,cAAcI,CAAdJ,EAAqBK,CAArBL,CAAP;EAIF,oBAAA,CAA2BtN,CAA3B;EACE,SAAO4N,WAAW5N,CAAX4N,EAAiB,OAAjBA,CAAP;;ECjEA,YAAA,CAAoBC,CAApB;EAAoB3nB,kBAAAA,GAAA2nB,CAAA3nB,EAFZA,SAAAA,GAAO,IAAI+T,OAAJ,EAEK/T;EAoBtB,UAlBE4nB,WAAAA,IAAAA,GAAA,UAAIljB,CAAJ;EAIE,WAHK1E,KAAKgF,IAALhF,CAAU0T,GAAV1T,CAAc0E,CAAd1E,KACHA,KAAK2nB,SAAL3nB,CAAegf,QAAfhf,CAAwB0E,CAAxB1E,CADGA,EAGEA,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc0E,CAAd1E,CAAP;KAJF4nB,EAOAA,WAAAA,IAAAA,GAAA,UAAIljB,CAAJ,EAAoBX,CAApB;EACE/D,SAAKgF,IAALhF,CAAU8W,GAAV9W,CAAc0E,CAAd1E,EAAsB+D,CAAtB/D;KARF4nB,EAWAA,WAAAA,IAAAA,GAAA,UAAIljB,CAAJ;EACE,WAAO1E,KAAKgF,IAALhF,CAAU0T,GAAV1T,CAAc0E,CAAd1E,CAAP;KAZF4nB,EAeAA,WAAAA,OAAAA,GAAA,UAAOljB,CAAP;EACE,WAAO1E,KAAKgF,IAALhF,CAAUmX,MAAVnX,CAAiB0E,CAAjB1E,CAAP;KAhBF4nB,GAkBF;;;EAqBA,YAAA,YACEC,WAAAA,KAAAA,GAAA,UAAK9oB,CAAL;EACE,UAAM,IAAIxF,KAAJ,CAAU,sBAAV,CAAN;KADFsuB,EAGAA,WAAAA,KAAAA,GAAA,UAAKnjB,CAAL;EACE,UAAM,IAAInL,KAAJ,CAAU,sBAAV,CAAN;KAJFsuB,EAMAA,WAAAA,SAAAA,GAAA,UAASnjB,CAAT;EACE,UAAM,IAAInL,KAAJ,CAAU,sBAAV,CAAN;KAPFsuB,EASAA,WAAAA,YAAAA,GAAA,UAAYnjB,CAAZ;EACE,UAAM,IAAInL,KAAJ,CAAU,sBAAV,CAAN;KAVFsuB,EAYAA,WAAAA,MAAAA,GAAA,UAAMnjB,CAAN,EAAsBxG,CAAtB;EACE,UAAM,IAAI3E,KAAJ,CAAU,sBAAV,CAAN;KAbFsuB,EAeAA,WAAAA,WAAAA,GAAA,UACIvO,CADJ,EAEIC,CAFJ;EAGE,UAAM,IAAIhgB,KAAJ,CAAU,sBAAV,CAAN;KAlBFsuB,EAoBAA,WAAAA,SAAAA,GAAA,UACInjB,CADJ,EACoBzI,CADpB,EAEIgC,CAFJ;EAGE,UAAM,IAAI1E,KAAJ,CAAU,sBAAV,CAAN;KAvBFsuB,EAyBAA,WAAAA,OAAAA,GAAA;EACE,UAAM,IAAItuB,KAAJ,CAAU,sBAAV,CAAN;KA1BFsuB,EA6BAA,WAAAA,eAAAA,GAAA;EACE,UAAM,IAAItuB,KAAJ,CAAU,qBAAV,CAAN;KA9BFsuB,EAiCAA,WAAAA,YAAAA,GAAA,UACInuB,CADJ,EACiBsB,CADjB,EAC8B2L,CAD9B,EAEIC,CAFJ;EAGE,UAAM,IAAIrN,KAAJ,CAAU,qBAAV,CAAN;KApCFsuB,EAuCAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB,EAA8BwM,CAA9B,EAA+ChL,CAA/C;EACE,UAAM,IAAI3C,KAAJ,CAAU,qBAAV,CAAN;KAxCFsuB,EA0CAA,WAAAA,aAAAA,GAAA,UACIntB,CADJ,EACUwM,CADV,EAC2B3D,CAD3B,EAC0ClE,CAD1C,EAEIuQ,CAFJ,EAEuBC,CAFvB,EAEwCqT,CAFxC,EAGIC,CAHJ,EAGyBC,CAHzB;EAIE,UAAM,IAAI7pB,KAAJ,CAAU,qBAAV,CAAN;KA9CFsuB,EAgDAA,WAAAA,QAAAA,GAAA,UAA0BnuB,CAA1B,EAAgCoE,CAAhC;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KAjDFsuB,EAoDAA,WAAAA,OAAAA,GAAA,UAAOC,CAAP,EAA0BhqB,CAA1B;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KArDFsuB,EAwDAA,WAAAA,IAAAA,GAAA,UAAsBnuB,CAAtB;EACE,UAAM,IAAIH,KAAJ,CAAU,qBAAV,CAAN;KAzDFsuB,EA4DAA,WAAAA,IAAAA,GAAA,UAAInuB,CAAJ,EAAesB,CAAf;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA7DFsuB,EA+DAA,WAAAA,KAAAA,GAAA,UAAuBC,CAAvB;EACE,UAAM,IAAIvuB,KAAJ,CAAU,qBAAV,CAAN;KAhEFsuB,EAkEAA,WAAAA,SAAAA,GAAA,UAASnuB,CAAT,EAAoBsB,CAApB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAnEFsuB,EAqEAA,WAAAA,SAAAA,GAAA,UAASnuB,CAAT,EAAoBsB,CAApB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAtEFsuB,EAwEAA,WAAAA,WAAAA,GAAA,UAAWnuB,CAAX,EAAsBsB,CAAtB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAzEFsuB,EA2EAA,WAAAA,SAAAA,GAAA,UAASnuB,CAAT,EAAoBsB,CAApB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA5EFsuB,EA+EAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ,EAAesmB,CAAf;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KAhFFsuB,EAkFAA,WAAAA,KAAAA,GAAA,UAAKntB,CAAL,EAAgBsmB,CAAhB;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KAnFFsuB,EAsFAA,WAAAA,mBAAAA,GAAA,UACIntB,CADJ,EACUwU,CADV,EACgCC,CADhC;EAEE,UAAM,IAAI5V,KAAJ,CAAU,qBAAV,CAAN;KAxFFsuB,EA2FAA,WAAAA,OAAAA,GAAA,UAAOntB,CAAP,EAAkBoD,CAAlB;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KA5FFsuB,EA8FAA,WAAAA,OAAAA,GAAA,UAAOntB,CAAP,EAAkBoD,CAAlB;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KA/FFsuB,EAkGAA,WAAAA,MAAAA,GAAA,UAAMnuB,CAAN,EAAiBsB,CAAjB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAnGFsuB,EAqGAA,WAAAA,SAAAA,GAAA,UAASnuB,CAAT,EAAoBsB,CAApB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAtGFsuB,EAyGAA,WAAAA,KAAAA,GAAA,UAAKnuB,CAAL,EAAgBsB,CAAhB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA1GFsuB,EA4GAA,WAAAA,UAAAA,GAAA,UAAUnuB,CAAV,EAAqBsB,CAArB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA7GFsuB,EAgHAA,WAAAA,QAAAA,GAAA,UAAQnuB,CAAR,EAAmBsB,CAAnB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAjHFsuB,EAmHAA,WAAAA,aAAAA,GAAA,UAAanuB,CAAb,EAAwBsB,CAAxB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KApHFsuB,EAuHAA,WAAAA,WAAAA,GAAA,UAA6BnuB,CAA7B;EACE,UAAM,IAAIH,KAAJ,CAAU,qBAAV,CAAN;KAxHFsuB,EA0HAA,WAAAA,WAAAA,GAAA,UAAWnuB,CAAX,EAAsBsB,CAAtB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA3HFsuB,EA6HAA,WAAAA,UAAAA,GAAA,UAAUnuB,CAAV,EAAqBsB,CAArB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA9HFsuB,EAiIAA,WAAAA,MAAAA,GAAA,UAAMld,CAAN;EACE,UAAM,IAAIpR,KAAJ,CAAU,qBAAV,CAAN;KAlIFsuB,EAoIAA,WAAAA,OAAAA,GAAA,UAAOld,CAAP,EAA0BjR,CAA1B,EAAqCsB,CAArC;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KArIFsuB,EAwIAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB,EAA6B+U,CAA7B,EAAwCC,CAAxC;EACE,UAAM,IAAInW,KAAJ,CAAU,qBAAV,CAAN;KAzIFsuB,EA4IAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ,EAAesmB,CAAf;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KA7IFsuB,EA+IAA,WAAAA,QAAAA,GAAA,UAAQnuB,CAAR,EAAmBsB,CAAnB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAhJFsuB,EAmJAA,WAAAA,IAAAA,GAAA,UAAInuB,CAAJ,EAAesB,CAAf;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KApJFsuB,EAuJAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ,EAAesmB,CAAf;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KAxJFsuB,EA0JAA,WAAAA,QAAAA,GAAA,UAAQnuB,CAAR,EAAmBsB,CAAnB;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA3JFsuB,EA8JAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ,EAAesmB,CAAf;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KA/JFsuB,EAiKAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ,EAAesmB,CAAf;EACE,UAAM,IAAIznB,KAAJ,CAAU,qBAAV,CAAN;KAlKFsuB,EAqKAA,WAAAA,kBAAAA,GAAA,UAAkBnuB,CAAlB,EAA6BsB,CAA7B;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAtKFsuB,EAyKAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA1KFsuB,EA4KAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA7KFsuB,EA+KAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAhLFsuB,EAmLAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KApLFsuB,EAuLAA,WAAAA,IAAAA,GAAA,UAAsBnuB,CAAtB,EAA4BsB,CAA5B;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KAxLFsuB,EA0LAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA3LFsuB,EA6LAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA9LFsuB,EAgMAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAjMFsuB,EAmMAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KApMFsuB,EAsMAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAvMFsuB,EAyMAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA1MFsuB,EA6MAA,WAAAA,OAAAA,GAAA,UAAyBntB,CAAzB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA9MFsuB,EAgNAA,WAAAA,WAAAA,GAAA,UAA6BntB,CAA7B;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAjNFsuB,EAoNAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KArNFsuB,EAuNAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAxNFsuB,EA0NAA,WAAAA,OAAAA,GAAA,UAAyBpV,CAAzB,EAAgCrB,CAAhC;EACE,UAAM,IAAI7X,KAAJ,CAAU,qBAAV,CAAN;KA3NFsuB,EA6NAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA9NFsuB,EAgOAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAjOFsuB,EAoOAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB,EAA6BD,CAA7B,EAA0CE,CAA1C;EACE,UAAM,IAAIpB,KAAJ,CAAU,qBAAV,CAAN;KArOFsuB,EAwOAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAzOFsuB,EA2OAA,WAAAA,WAAAA,GAAA,UAA6BntB,CAA7B;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA5OFsuB,EA+OAA,WAAAA,QAAAA,GAAA,UAA0BntB,CAA1B;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAhPFsuB,EAmPAA,WAAAA,SAAAA,GAAA,UAA2BntB,CAA3B;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KApPFsuB,EAuPAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAxPFsuB,EA0PAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA3PFsuB,EA6PAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA9PFsuB,EAiQAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAlQFsuB,EAoQAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KArQFsuB,EAuQAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAxQFsuB,EA0QAA,WAAAA,MAAAA,GAAA,UAAwBnuB,CAAxB,EAA8BsB,CAA9B;EACE,UAAM,IAAIzB,KAAJ,CAAU,qBAAV,CAAN;KA3QFsuB,EA8QAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA/QFsuB,EAiRAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAlRFsuB,EAoRAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KArRFsuB,EAwRAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAzRFsuB,EA2RAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA5RFsuB,EA8RAA,WAAAA,MAAAA,GAAA,UAAwBntB,CAAxB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA/RFsuB,EAkSAA,WAAAA,IAAAA,GAAA,UAAsBntB,CAAtB;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KAnSFsuB,EAsSAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB,EAA6BhD,CAA7B;EACE,UAAM,IAAI6B,KAAJ,CAAU,qBAAV,CAAN;KAvSFsuB,EA0SAA,WAAAA,OAAAA,GAAA,UAAOntB,CAAP,EAAoB4S,CAApB,EAAsCya,CAAtC;EACE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA3SFsuB,EA6SAA,WAAAA,eAAAA,GAAA,UAAepV,CAAf,EAA6BnF,CAA7B,EAA+Cya,CAA/C;EAEE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA/SFsuB,EAiTAA,WAAAA,gBAAAA,GAAA,UAAgBntB,CAAhB,EAA6BstB,CAA7B,EAA2CD,CAA3C;EACE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KAlTFsuB,EAqTAA,WAAAA,gBAAAA,GAAA,UAAgBlW,CAAhB,EAAiCrE,CAAjC,EAAmDya,CAAnD;EAEE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KAvTFsuB,EAyTAA,WAAAA,wBAAAA,GAAA,UAAwBpV,CAAxB,EAAsCnF,CAAtC,EAAwDya,CAAxD;EAEE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA3TFsuB,EA6TAA,WAAAA,yBAAAA,GAAA,UAAyBntB,CAAzB,EAAsCstB,CAAtC,EAAoDD,CAApD;EAEE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA/TFsuB,EAkUAA,WAAAA,QAAAA,GAAA,UAAQntB,CAAR,EAAqBqtB,CAArB;EACE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KAnUFsuB,EAqUAA,WAAAA,gBAAAA,GAAA,UAAgBpV,CAAhB,EAA8B/X,CAA9B,EAA2C0W,CAA3C,EAAwD2W,CAAxD;EAEE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KAvUFsuB,EAyUAA,WAAAA,QAAAA,GAAA,UAAQntB,CAAR,EAAqBqtB,CAArB;EACE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA1UFsuB,EA4UAA,WAAAA,gBAAAA,GAAA,UAAgBpV,CAAhB,EAA8B/X,CAA9B,EAA2CqtB,CAA3C;EACE,UAAM,IAAIxuB,KAAJ,CAAU,qBAAV,CAAN;KA7UFsuB,EAgVAA,WAAAA,QAAAA,GAAA,UAA0CntB,CAA1C,EAAgDuB,CAAhD;EAEE,UAAM,IAAI1C,KAAJ,CAAU,qBAAV,CAAN;KAlVFsuB,EAoVAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB,EAA6BuD,CAA7B;EACE,UAAM,IAAI1E,KAAJ,CAAU,qBAAV,CAAN;KArVFsuB,EAwVAA,WAAAA,KAAAA,GAAA,UAAuBntB,CAAvB,EAA6B6L,CAA7B;EACE,UAAM,IAAIhN,KAAJ,CAAU,qBAAV,CAAN;KAzVFsuB,EA4VAA,WAAAA,IAAAA,GAAA,UACIntB,CADJ,EACU8M,CADV,EAC6CC,CAD7C;EAEE,UAAM,IAAIlO,KAAJ,CAAU,qBAAV,CAAN;KA9VFsuB,EAiWAA,WAAAA,UAAAA,GAAA,UAA4BntB,CAA5B,EAAkC+O,CAAlC;EACE,UAAM,IAAIlQ,KAAJ,CAAU,qBAAV,CAAN;KAlWFsuB,EAqWAA,WAAAA,OAAAA,GAAA,UAAyBntB,CAAzB,EAA+B+L,CAA/B,EAAkD3I,CAAlD;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KAtWFsuB,EAyWAA,WAAAA,SAAAA,GAAA,UAASntB,CAAT,EAAoB+L,CAApB;EACE,UAAM,IAAIlN,KAAJ,CAAU,qBAAV,CAAN;KA1WFsuB,EA6WAA,WAAAA,UAAAA,GAAA,UACIphB,CADJ,EACqB8b,CADrB,EACsCtmB,CADtC;EAEE,UAAM,IAAI1C,KAAJ,CAAU,qBAAV,CAAN;KA/WFsuB,EAkXAA,WAAAA,eAAAA,GAAA,UACIntB,CADJ,EACU2U,CADV,EACgCC,CADhC;EAEE,UAAM,IAAI/V,KAAJ,CAAU,qBAAV,CAAN;KApXFsuB,EAuXAA,WAAAA,eAAAA,GAAA,UACIntB,CADJ,EACU2U,CADV,EACgC7H,CADhC;EAEE,UAAM,IAAIjO,KAAJ,CAAU,qBAAV,CAAN;KAzXFsuB,EA4XAA,WAAAA,eAAAA,GAAA,UACIntB,CADJ,EACiButB,CADjB,EACoCC,CADpC,EAEIhb,CAFJ;EAGE,UAAM,IAAI3T,KAAJ,CAAU,qBAAV,CAAN;KA/XFsuB,EAkYAA,WAAAA,uBAAAA,GAAA,UAAuBpV,CAAvB,EAAqC/X,CAArC,EAAkDwS,CAAlD;EAEE,UAAM,IAAI3T,KAAJ,CAAU,qBAAV,CAAN;KApYFsuB,EAuYAA,WAAAA,sBAAAA,GAAA,UACIntB,CADJ,EACiBytB,CADjB,EACoCD,CADpC,EAEIhb,CAFJ;EAGE,UAAM,IAAI3T,KAAJ,CAAU,qBAAV,CAAN;KA1YFsuB,EA6YAA,WAAAA,8BAAAA,GAAA,UACIpV,CADJ,EACkB/X,CADlB,EAC+BwS,CAD/B;EAEE,UAAM,IAAI3T,KAAJ,CAAU,qBAAV,CAAN;KA/YFsuB,EAkZAA,WAAAA,mBAAAA,GAAA,UACIntB,CADJ,EACiBgN,CADjB,EAC0CC,CAD1C,EAEIC,CAFJ,EAE6BC,CAF7B,EAGItF,CAHJ;EAIE,UAAM,IAAIhJ,KAAJ,CAAU,qBAAV,CAAN;KAtZFsuB,EAyZAA,WAAAA,6BAAAA,GAAA,UACIntB,CADJ,EACiB4T,CADjB,EACiCC,CADjC,EAC+C7W,CAD/C,EAEI8W,CAFJ;EAGE,UAAM,IAAIjV,KAAJ,CAAU,qBAAV,CAAN;KA5ZFsuB,EA+ZAA,WAAAA,QAAAA,GAAA,UACIpV,CADJ,EACkB2V,CADlB,EACwCC,CADxC,EAC+D/Z,CAD/D,EAEIC,CAFJ,EAEkB7W,CAFlB,EAEiC8W,CAFjC;EAGE,UAAM,IAAIjV,KAAJ,CAAU,qBAAV,CAAN;KAlaFsuB,EAqaAA,WAAAA,YAAAA,GAAA,UACI5C,CADJ,EACsBqD,CADtB,EAC2CC,CAD3C,EAEIC,CAFJ;EAGE,UAAM,IAAIjvB,KAAJ,CAAU,qBAAV,CAAN;KAxaFsuB,EA2aAA,WAAAA,OAAAA,GAAA,UAAOphB,CAAP,EAA0B3O,CAA1B,EAAyC2wB,CAAzC,EAA0DC,CAA1D;EAEE,UAAM,IAAInvB,KAAJ,CAAU,qBAAV,CAAN;KA7aFsuB,EAgbAA,WAAAA,OAAAA,GAAA,UAAOntB,CAAP,EAAkBoD,CAAlB,EAAgCmI,CAAhC,EAAoDC,CAApD;EAEE,UAAM,IAAI3M,KAAJ,CAAU,qBAAV,CAAN;KAlbFsuB,EAqbAA,WAAAA,kBAAAA,GAAA,UACIc,CADJ,EACqBC,CADrB,EACuCC,CADvC,EAEIC,CAFJ,EAE0BC,CAF1B;EAGE,UAAM,IAAIxvB,KAAJ,CAAU,qBAAV,CAAN;KAxbFsuB,EA2bAA,WAAAA,IAAAA,GAAA,UAAIntB,CAAJ;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA5bFsuB,EA8bAA,WAAAA,KAAAA,GAAA,UAAKntB,CAAL;EACE,UAAM,IAAInB,KAAJ,CAAU,qBAAV,CAAN;KA/bFsuB,EAicAA,WAAAA,QAAAA,GAAA,UAA0BlC,CAA1B,EAAmCC,CAAnC;EACE,UAAM,IAAIrsB,KAAJ,CAAU,qBAAV,CAAN;KAlcFsuB,EAocAA,WAAAA,KAAAA,GAAA,UAAuBlW,CAAvB;EACE,UAAM,IAAIpY,KAAJ,CAAU,qBAAV,CAAN;KArcFsuB,EAucAA,WAAAA,KAAAA,GAAA,UAAuBlW,CAAvB;EACE,UAAM,IAAIpY,KAAJ,CAAU,qBAAV,CAAN;KAxcFsuB,EA2cAA,WAAAA,cAAAA,GAAA,UACI1a,CADJ,EACqBwb,CADrB,EACsCK,CADtC,EAEIC,CAFJ,EAEgCC,CAFhC,EAGIC,CAHJ;EAIE,UAAM,IAAI5vB,KAAJ,CAAU,qBAAV,CAAN;KA/cFsuB,EAkdAA,WAAAA,aAAAA,GAAA,UAAantB,CAAb,EAA0BqV,CAA1B,EAA6CxC,CAA7C;EACE,UAAM,IAAIhU,KAAJ,CAAU,qBAAV,CAAN;KAndFsuB,EAudAA,WAAAA,MAAAA,GAAA,UAAwB9jB,CAAxB,EAAkCqlB,CAAlC,EAAwDtrB,CAAxD;EACE,UAAM,IAAIvE,KAAJ,CAAU,qBAAV,CAAN;KAxdFsuB,EA2dAA,WAAAA,cAAAA,GAAA,UACIwB,CADJ,EAC2BC,CAD3B,EACiDzb,CADjD,EAEI0b,CAFJ;EAGE,UAAM,IAAIhwB,KAAJ,CAAU,qBAAV,CAAN;KA9dFsuB,EAoeAA,WAAAA,aAAAA,GAAA,UAAaF,CAAb;EACE,UAAM,IAAIpuB,KAAJ,CAAU,qBAAV,CAAN;KAreFsuB,EAweAA,WAAAA,QAAAA,GAAA;EACE,UAAM,IAAItuB,KAAJ,CAAU,qBAAV,CAAN;KAzeFsuB,GA2eF;0BCxiBIntB,GAAMuD,GAAiB4V;EACzB,MAAc,gBAAV5V,CAAJ,EAA2B;EACzB,QAAgB,gBAAZvD,EAAEuD,KAAN,EACE,OAAOvD,EAAE2L,KAAF3L,EAAP,CAEF,IAAM8uB,IAAcjD,MAAM7rB,EAAEuB,KAARsqB,CAApB;EAAA,QACMkD,IAAS/uB,EAAE0qB,OAAF1qB,EADf;EAAA,QAEMQ,IAAS2Y,EAAQmS,OAARnS,CAAgB4V,CAAhB5V,EAAwB2V,CAAxB3V,CAFf,CAKA,OAFA2V,EAAYxW,OAAZwW,IACAC,EAAOzW,OAAPyW,EADAD,EAEOtuB,CAAP;EAGF,QAAKwuB,gBAAgBhvB,EAAEuD,KAAlByrB,EAAyBzrB,CAAzByrB,CAAL,EAGE,OAAOtlB,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBM,QAAQhK,EAAEgK,QAAhCN,EAAyCnG,CAAzCmG,CAAP,CAEF,IAAgB,gBAAZ1J,EAAEuD,KAAN,EAA6B;EAC3B,QAAM0nB,IAAO9R,EAAQ8R,IAAR9R,CAAanZ,CAAbmZ,CAAb,CACM3Y,IAASyqB,EAAKpgB,IAALogB,CAAU1nB,CAAV0nB,CAATzqB,CAEN,OADAyqB,EAAK3S,OAAL2S,IACOzqB,CAAP;EAEF,OAAc,YAAV+C,CAAJ,EACE,OAAO4V,EAAQ8V,GAAR9V,CAAYnZ,CAAZmZ,CAAP,CACK,IAAc,WAAV5V,CAAJ,EAAsB;EAC3B,QAAM2rB,IAAOC,OAAO,CAAPA,EAAUnvB,EAAEuD,KAAZ4rB,CAAb,CACM3uB,IAAS2Y,EAAQlK,QAARkK,CAAiBnZ,CAAjBmZ,EAAoB+V,CAApB/V,CAAT3Y,CAEN,OADA0uB,EAAK5W,OAAL4W,IACO1uB,CAAP;EAEA,SAAM,IAAI3B,KAAJ,CAAU,4CAA0C0E,CAA1C,MAAV,CAAN;EAIJ,uBAAA,CACIvD,CADJ,EACUuB,CADV;EAEE,SAAOmI,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBM,QAAQhK,EAAEgK,QAA9BN,EAAuC1J,EAAEuD,KAAzCmG,CAAP;mCC9BEuhB,GAAoBC;EACtB,MAAID,EAAKvrB,MAALurB,KAAgBC,EAAKxrB,MAAzB,EACE,MAAM,IAAIb,KAAJ,CACF,kEACGosB,EAAKvrB,MADR,aAAA,GACyBwrB,EAAKxrB,MAD9B,MADE,CAAN,CAKF,KADA,IAAMc,IAAS,IAAIiD,YAAJ,CAA+B,IAAdwnB,EAAKvrB,MAAtB,CAAf,EACSW,IAAI,CAAb,EAAgBA,IAAIG,EAAOd,MAA3B,EAAmCW,KAAK,CAAxC,EACEG,EAAOH,CAAPG,IAAYyqB,EAAK5qB,IAAI,CAAT4qB,CAAZzqB,EACAA,EAAOH,IAAI,CAAXG,IAAgB0qB,EAAK7qB,IAAI,CAAT6qB,CADhB1qB,CAGF,OAAOA,CAAP;EAiBF,gCAAA,CAAuC8qB,CAAvC;EAIE,OAFA,IAAML,IAAO,IAAIxnB,YAAJ,CAAiB6nB,EAAQ5rB,MAAR4rB,GAAiB,CAAlC,CAAb,EACMJ,IAAO,IAAIznB,YAAJ,CAAiB6nB,EAAQ5rB,MAAR4rB,GAAiB,CAAlC,CADb,EAESjrB,IAAI,CAAb,EAAgBA,IAAIirB,EAAQ5rB,MAA5B,EAAoCW,KAAK,CAAzC,EACE4qB,EAAK5qB,IAAI,CAAT4qB,IAAcK,EAAQjrB,CAARirB,CAAdL,EACAC,EAAK7qB,IAAI,CAAT6qB,IAAcI,EAAQjrB,IAAI,CAAZirB,CADdL,CAGF,SAAQA,SAAMC,SAAd;EAOF,8BAAA,CAAqCI,CAArC;EAKE,OAHA,IAAM8D,IAAMvvB,KAAKuQ,IAALvQ,CAAUyrB,EAAQ5rB,MAAR4rB,GAAiB,CAA3BzrB,CAAZ,EACMorB,IAAO,IAAIxnB,YAAJ,CAAiB2rB,CAAjB,CADb,EAEMlE,IAAO,IAAIznB,YAAJ,CAAiB2rB,CAAjB,CAFb,EAGS/uB,IAAI,CAAb,EAAgBA,IAAIirB,EAAQ5rB,MAA5B,EAAoCW,KAAK,CAAzC,EACE4qB,EAAKprB,KAAKkC,KAALlC,CAAWQ,IAAI,CAAfR,CAALorB,IAA0BK,EAAQjrB,CAARirB,CAA1BL,EACAC,EAAKrrB,KAAKkC,KAALlC,CAAWQ,IAAI,CAAfR,CAALqrB,IAA0BI,EAAQjrB,IAAI,CAAZirB,CAD1BL,CAGF,SAAQA,SAAMC,SAAd;EAOF,6BAAA,CAAoCI,CAApC;EAKE,OAHA,IAAM8D,IAAMvvB,KAAKkC,KAALlC,CAAWyrB,EAAQ5rB,MAAR4rB,GAAiB,CAA5BzrB,CAAZ,EACMorB,IAAO,IAAIxnB,YAAJ,CAAiB2rB,CAAjB,CADb,EAEMlE,IAAO,IAAIznB,YAAJ,CAAiB2rB,CAAjB,CAFb,EAGS/uB,IAAI,CAAb,EAAgBA,IAAIirB,EAAQ5rB,MAA5B,EAAoCW,KAAK,CAAzC,EACE4qB,EAAKprB,KAAKkC,KAALlC,CAAWQ,IAAI,CAAfR,CAALorB,IAA0BK,EAAQjrB,CAARirB,CAA1BL,EACAC,EAAKrrB,KAAKkC,KAALlC,CAAWQ,IAAI,CAAfR,CAALqrB,IAA0BI,EAAQjrB,IAAI,CAAZirB,CAD1BL,CAGF,SAAQA,SAAMC,SAAd;EAQF,6BAAA,CACII,CADJ,EAC2B1rB,CAD3B;EAIE,WAAQqrB,MAFKK,EAAgB,IAAR1rB,CAAR0rB,GAECJ,MADDI,EAAgB,IAAR1rB,CAAQ,GAAI,CAApB0rB,GACb;EASF,4BAAA,CACIhhB,CADJ,EACsB2gB,CADtB,EACoCC,CADpC,EACkDtrB,CADlD;EAEE0K,IAAa,IAAR1K,CAAL0K,IAAkB2gB,CAAlB3gB,EACAA,EAAa,IAAR1K,CAAQ,GAAI,CAAjB0K,IAAsB4gB,CADtB5gB;EAOF,mBAAA,CACIrI,CADJ,EACeotB,CADf;EAIE,OAFA,IAAMpE,IAAO,IAAIxnB,YAAJ,CAAiBxB,IAAI,CAArB,CAAb,EACMipB,IAAO,IAAIznB,YAAJ,CAAiBxB,IAAI,CAArB,CADb,EAES5B,IAAI,CAAb,EAAgBA,IAAIR,KAAKuQ,IAALvQ,CAAUoC,IAAI,CAAdpC,CAApB,EAAsCQ,GAAtC,EAA2C;EACzC,QAAML,KAAKqvB,IAAU,CAAVA,IAAe,KAAKxvB,KAAKyvB,MAAMjvB,IAAI4B,EAA9C,CACAgpB,EAAK5qB,CAAL4qB,IAAUprB,KAAK2R,GAAL3R,CAASG,CAATH,CAAVorB,EACAC,EAAK7qB,CAAL6qB,IAAUrrB,KAAK0R,GAAL1R,CAASG,CAATH,CADVorB;EAGF,YAAQA,SAAMC,SAAd;EAMF,kBAAA,CACInW,CADJ,EACe9S,CADf,EAC0BotB,CAD1B;EAEE,MAAMrvB,KAAKqvB,IAAU,CAAVA,IAAe,KAAKxvB,KAAKyvB,MAAMva,IAAI9S,EAA9C,CAGA,SAAQgpB,MAFKprB,KAAK2R,GAAL3R,CAASG,CAATH,GAECqrB,MADDrrB,KAAK0R,GAAL1R,CAASG,CAATH,GACb;kCC7HEouB,GAAmBC,GAAoBC,GACvCC,GAAsBC;EAQxB,OAPA,IAAMkB,IAAapuB,MAAMoH,IAANpH,CAAW+sB,CAAX/sB,EACKiG,GADLjG,CACS,UAACquB,CAAD,EAAQlB,CAAR;EAAqB,aAAEkB,UAAOlB,aAAT;KAD9BntB,EAEKyR,MAFLzR,CAEY,UAAAsuB,CAAA;EAAK,WAAAA,EAAED,KAAFC,GAAUpB,CAAV;KAFjBltB,EAGKiiB,IAHLjiB,CAGU,UAACuuB,CAAD,EAAKC,CAAL;EAAY,WAAAA,EAAGH,KAAHG,GAAWD,EAAGF,KAAd;KAHtBruB,CAAnB,EAKMyuB,MALN,EAOSvvB,IAAI,CAAb,EAAgBA,IAAIkvB,EAAW7vB,MAA/B,EAAuCW,GAAvC,EAA4C;EACpC,QAAAoe,QAAA;EAAA,QAAC+Q,WAAD;EAAA,QAAQlB,cAAR,CACN,IAAIkB,IAAQnB,CAAZ,EACE,MAIF,KADA,IAAIwB,KAAkB,CAAtB,EACSvsB,IAAIssB,EAASlwB,MAATkwB,GAAkB,CAA/B,EAAkCtsB,KAAK,CAAvC,IAA4CA,CAA5C,EAA+C;EAE7C,UADYwsB,sBAAsB7B,CAAtB6B,EAA6BxB,CAA7BwB,EAAuCF,EAAStsB,CAATssB,CAAvCE,KACD1B,CAAX,EAAyB;EACvByB,aAAkB,CAAlBA,CACA;;EAIJ,UAAKA,MACHD,EAAStuB,IAATsuB,CAActB,CAAdsB,GACIA,EAASlwB,MAATkwB,IAAmBzB,EAFzB,EAGI;EAKN,UAAOlC,SAAS2D,CAAT3D,EAAmB,OAAnBA,CAAP;EAGF,+BAAA,CAA+BgC,CAA/B,EAAkD5tB,CAAlD,EAA6DiD,CAA7D;EACE,MAAMysB,IAAS9B,EAAMzlB,QAANylB,CAAmB,IAAJ5tB,CAAf4tB,EAA0B,IAAJ5tB,CAAI,GAAI,CAA9B4tB,CAAf;EAAA,MACM+B,IAAS/B,EAAMzlB,QAANylB,CAAmB,IAAJ3qB,CAAf2qB,EAA0B,IAAJ3qB,CAAI,GAAI,CAA9B2qB,CADf;EAAA,MAEMgC,IAAQpwB,KAAKE,GAALF,CAASkwB,EAAO,CAAPA,CAATlwB,EAAoBkwB,EAAO,CAAPA,CAApBlwB,CAFd;EAAA,MAGMqwB,IAAQrwB,KAAKE,GAALF,CAASkwB,EAAO,CAAPA,CAATlwB,EAAoBkwB,EAAO,CAAPA,CAApBlwB,CAHd;EAAA,MAIMswB,IAAQtwB,KAAKI,GAALJ,CAASkwB,EAAO,CAAPA,CAATlwB,EAAoBkwB,EAAO,CAAPA,CAApBlwB,CAJd;EAAA,MAKMuwB,IAAQvwB,KAAKI,GAALJ,CAASkwB,EAAO,CAAPA,CAATlwB,EAAoBkwB,EAAO,CAAPA,CAApBlwB,CALd;EAAA,MAMMwwB,IAAQxwB,KAAKE,GAALF,CAASmwB,EAAO,CAAPA,CAATnwB,EAAoBmwB,EAAO,CAAPA,CAApBnwB,CANd;EAAA,MAOMywB,IAAQzwB,KAAKE,GAALF,CAASmwB,EAAO,CAAPA,CAATnwB,EAAoBmwB,EAAO,CAAPA,CAApBnwB,CAPd;EAAA,MAQM0wB,IAAQ1wB,KAAKI,GAALJ,CAASmwB,EAAO,CAAPA,CAATnwB,EAAoBmwB,EAAO,CAAPA,CAApBnwB,CARd;EAAA,MASM2wB,IAAQ3wB,KAAKI,GAALJ,CAASmwB,EAAO,CAAPA,CAATnwB,EAAoBmwB,EAAO,CAAPA,CAApBnwB,CATd;EAAA,MAUM4wB,KAASN,IAAQF,MAAUG,IAAQF,EAVzC;EAAA,MAWMQ,KAASH,IAAQF,MAAUG,IAAQF,EAXzC,CAYA,IAAIG,KAAS,CAATA,IAAcC,KAAS,CAA3B,EACE,OAAO,CAAP,CAEF,IAAMC,IAAmB9wB,KAAKI,GAALJ,CAASowB,CAATpwB,EAAgBwwB,CAAhBxwB,CAAzB;EAAA,MACM+wB,IAAmB/wB,KAAKI,GAALJ,CAASqwB,CAATrwB,EAAgBywB,CAAhBzwB,CADzB;EAAA,MAEMgxB,IAAmBhxB,KAAKE,GAALF,CAASswB,CAATtwB,EAAgB0wB,CAAhB1wB,CAFzB;EAAA,MAGMixB,IAAmBjxB,KAAKE,GAALF,CAASuwB,CAATvwB,EAAgB2wB,CAAhB3wB,CAHzB;EAAA,MAIMkxB,IAAmBlxB,KAAKI,GAALJ,CAASgxB,IAAmBF,CAA5B9wB,EAA8C,CAA9CA,IACrBA,KAAKI,GAALJ,CAASixB,IAAmBF,CAA5B/wB,EAA8C,CAA9CA,CALJ,CAMA,OAAOkxB,KAAoBN,IAAQC,CAARD,GAAgBM,CAApCA,CAAP;kBC9DE/wB,GAAM0uB,GAAsBtrB;EAC9B,MAAMoJ,IAAQrL,MAAMnB,EAAE0E,IAARvD,EAAcsG,IAAdtG,CAAmB,CAAnBA,CAAd;EAAA,MACMK,IAAOxB,EAAEuB,KAAFvB,CAAQmD,KAARnD,EADb,CAEA,OAAO0uB,EAAWtnB,GAAXsnB,CAAe,UAAA1M,CAAA;EACpBxgB,MAAK4B,CAAL5B,IAAawgB,CAAbxgB,CACA,IAAM2B,IAAQnD,EAAEmD,KAAFnD,CAAQwM,CAARxM,EAAewB,CAAfxB,CAAd,CAEA,OADAwM,EAAMpJ,CAANoJ,KAAewV,CAAfxV,EACOrJ,CAAP;KAJKurB,CAAP;qBCCE1uB,GAAegxB,GAAkBC,GAAkBlc,GACnDC;EAOF,OALA,IAAMkc,IAAUF,EAAOA,EAAOtxB,MAAPsxB,GAAgB,CAAvBA,CAAhB,EACMvS,qBADN,EACO0S,QADP,EACc3vB,QADd,EAEM4vB,IAActF,uBAAuBmF,CAAvBnF,EAA+BqF,IAAQpc,CAAvC+W,CAFpB,EAGMuF,IAAiBvF,uBAAuB,OAAvBA,EAAgCqF,IAAQpc,CAAxC+W,CAHvB,EAKSxrB,IAAI,CAAb,EAAgBA,IAAI6wB,CAApB,EAA2B7wB,GAA3B,EAAgC;EAI9B,SAHA,IAAMuH,IAASvH,IAAIkB,CAAnB,EACMoC,IAAO5D,EAAEwI,QAAFxI,CAAW6H,CAAX7H,EAAmB6H,IAASrG,CAA5BxB,CADb,EAEMsxB,MAFN,EAGSjxB,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,EAAiCW,GAAjC,EACEixB,EAAUhwB,IAAVgwB,GAAgBjoB,OAAOzF,EAAKvD,CAALuD,GAAShE,OAAOS,GAAvCixB,EAEFA,EAAUlO,IAAVkO,CAAe,UAACtyB,CAAD,EAAIsB,CAAJ;EAAU,aAAAA,EAAE+I,KAAF/I,GAAUtB,EAAEqK,KAAZ;OAAzBioB,EAEA,IAAMC,IAAYjxB,IAAIyU,CAAtB;EAAA,QACMyc,IAAWJ,EAAY5oB,QAAZ4oB,CAAqBG,CAArBH,EAAgCG,IAAYxc,CAA5Cqc,CADjB;EAAA,QAEMK,IAAcJ,EAAe7oB,QAAf6oB,CAAwBE,CAAxBF,EAAmCE,IAAYxc,CAA/Csc,CAFpB,CAGA,KAAShxB,IAAI,CAAb,EAAgBA,IAAI0U,CAApB,EAAuB1U,GAAvB,EACEmxB,EAASnxB,CAATmxB,IAAcF,EAAUjxB,CAAVixB,EAAajoB,KAA3BmoB,EACAC,EAAYpxB,CAAZoxB,IAAiBH,EAAUjxB,CAAVixB,EAAa1xB,KAD9B4xB;EAMJ,OAAMre,IAAc6d,EAAO7tB,KAAP6tB,EAApB,CAEA,OADA7d,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,IAAsC4B,CAAtC5B,GAEEoF,OAAO6Y,CAAP7Y,EAAoBpF,CAApBoF,EAAiC0Y,CAAjC1Y,GACAA,OAAO8Y,CAAP9Y,EAAuBpF,CAAvBoF,EAAoC,OAApCA,EAFF;EClCF;EAyCA,SApCE,UAAYmZ,CAAZ,EAAoC5G,CAApC,EAAqD6G,CAArD;EAJArsB,sBAAAA,IAAiB,IAAjBA,CAKE,IAAMssB,IAAaF,EAAWE,UAA9B;EAAA,QACMC,IAAYH,EAAWG,SAD7B;EAAA,QAEMlK,IAAS+J,EAAW/J,MAF1B;EAAA,QAGMmK,IAAUjyB,KAAKuQ,IAALvQ,CAAU8nB,IAASiK,CAAnB/xB,CAHhB,CAIK8xB,KACHrsB,KAAKysB,aAALzsB,CAAmBhE,IAAnBgE,CAAwB,cAAxBA,CADGqsB,EAGLrsB,KAAK6N,WAAL7N,IAAoBusB,GAAWC,EAH1BH,CAIL,IAAMK,IAAiB,UAAPlH,CAAO,GAAS,GAAT,GAAe,GAAtC;EAAA,QACMmH,IAAeN,IACjB,eADiBA,GAEjB,8CAHJ,CAKArsB,KAAK4sB,QAAL5sB,GAAgB,wKAKcssB,CALd,8HAAA,GAUUA,CAVV,qCAAA,GAWIK,CAXJ,iFAAA,GAaMD,CAbN,mKAAhB1sB;KAsBJ;KAzCA;EAAA;ECkEA,SA5DE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,KAAjBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAAS8E,OAL9B7sB,CAME,IAAM8sB,IAAe/E,EAAS+E,YAA9B;EAAA,QACMC,IAAchF,EAASgF,WAD7B;EAAA,QAEMC,IAAejF,EAASiF,YAF9B;EAAA,QAGMC,IAAclF,EAASkF,WAH7B;EAAA,QAIMC,IAAiBnF,EAASmF,cAJhC;EAAA,QAKMC,IAAgBpF,EAASoF,aAL/B;EAAA,QAMMC,IAAwBrF,EAASqF,qBANvC;EAAA,QAOMC,IAAuBtF,EAASsF,oBAPtC;EAAA,QASMC,IAASF,IAAwB,CAAxBA,GAA4BrF,EAASwF,OAATxF,CAAiByF,GAT5D;EAAA,QAUMC,IAAUJ,IAAuB,CAAvBA,GAA2BtF,EAASwF,OAATxF,CAAiB2F,IAV5D;EAAA,QAYMC,IAAgB,KAAKb,IAAeC,CAApB,CAZtB,CAcA/sB,KAAK4sB,QAAL5sB,GAAgB,sCACastB,CADb,OAAA,GACwBG,CADxB,iDAAA,GAEsBE,CAFtB,mdAAA,GAgBYP,CAhBZ,0BAAA,GAiBAF,CAjBA,wDAAA,GAkB4BF,CAlB5B,8CAAA,GAoBgBjF,EAAS6F,SApBzB,sIAAA,GAyBcP,CAzBd,yBAAA,GA0BDF,CA1BC,0DAAA,GA2B8BF,CA3B9B,gDAAA,GA6BkBlF,EAAS8F,QA7B3B,wSAAhB7tB;KA4CJ;KDlEA,2BEOI6sB,GAAmBrL;EAGrB,OAFA,IAAMsM,IAASjB,EAAQzyB,MAAvB,EACM2zB,MADN,EAEShzB,IAAI,CAAb,EAAgBA,IAAI+yB,CAApB,EAA4B/yB,GAA5B,EAAiC;EAC/B,QAAM+R,IAAMghB,IAAS,CAATA,GAAa/yB,CAAzB;EAAA,QACMrB,IAAImzB,EAAQ/f,CAAR+f,KAAgB,CAD1B,EAEUrL,EAASA,EAASpnB,MAATonB,GAAkB,CAAlBA,GAAsBzmB,CAA/BymB,KAAqC,KACvC,KAAW,MAAN9nB,KACXq0B,EAAKC,OAALD,CAAajhB,CAAbihB;EAGJ,UAAOA,CAAP;EAOF,0BAAA,CACIlB,CADJ,EACuBrL,CADvB;EAGE,OADA,IAAMtmB,MAAN,EACSH,IAAI,CAAb,EAAgBA,IAAIymB,EAASpnB,MAA7B,EAAqCW,GAArC,EAA0C;EACxC,QAAMkzB,IAAQpB,EAAQA,EAAQzyB,MAARyyB,GAAiB9xB,CAAjB8xB,GAAqB,CAA7BA,CAAd;EAAA,QACMqB,IAAU1M,EAASpnB,MAATonB,GAAkBzmB,CAAlBymB,GAAsB,CADtC;EAAA,QAEM2M,IAAS3M,EAAS0M,CAAT1M,CAFf,EAGa,QAATyM,CAAS,IAAmB,MAAVA,CAAU,IAAKE,IAAS,MAC5CjzB,EAAO8yB,OAAP9yB,CAAegzB,CAAfhzB;EAGJ,UAAOA,CAAP;EAOF,+BAAA,CAAsC6yB,CAAtC;EACE,OAAK,IAAIhzB,IAAI,CAAb,EAAgBA,IAAIgzB,EAAK3zB,MAAzB,EAAiCW,GAAjC,EACE,IAAIgzB,EAAKhzB,CAALgzB,MAAYhzB,CAAhB,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;EAGF,oCAAA,CACIQ,CADJ,EACsBC,CADtB;EAKE,OAHA,IAAMN,MAAN,EACM6G,IAAIxH,KAAKI,GAALJ,CAASgB,EAAOnB,MAAhBG,EAAwBiB,EAAOpB,MAA/BG,CADV,EAGSQ,IAAI,CAAb,EAAgBA,IAAIgH,CAApB,EAAuBhH,GAAvB,EAA4B;EAC1B,QAAIrB,IAAI6B,EAAOA,EAAOnB,MAAPmB,GAAgBR,CAAhBQ,GAAoB,CAA3BA,CAAR,CACS,QAAL7B,CAAK,KACPA,IAAI,CADG,EAGT,IAAIsB,IAAIQ,EAAOA,EAAOpB,MAAPoB,GAAgBT,CAAhBS,GAAoB,CAA3BA,CAAR,CAIA,IAHS,QAALR,CAAK,KACPA,IAAI,CADG,GAGC,MAANtB,CAAJ,EACEwB,EAAO8yB,OAAP9yB,CAAeF,CAAfE,EADF,KAEO,IAAU,MAANF,CAAJ,EACLE,EAAO8yB,OAAP9yB,CAAexB,CAAfwB,EADK,KAEA;EAAA,UAAIxB,MAAMsB,CAAV,EAGL,MAAMzB,MAFS,0DACRgC,CADQ,UAAA,GACMC,CADN,MAETjC,CAAN,CAEA2B,EAAO8yB,OAAP9yB,CAAexB,CAAfwB;;EAGJ,UAAOA,CAAP;EC9EF;EAyCA,SAnCE,UACIwwB,CADJ,EACsB0C,CADtB,EAC2CC,CAD3C,EAEIC,CAFJ,EAEgCC,CAFhC,EAGI3mB,CAHJ;EAJA5H,oBAAAA,KAAAA,EAEAA,yBAAAA,IAAuB,CAFvBA,EAQEA,KAAKysB,aAALzsB,IAAsB,KAAK,QAAQ,WARrCA,EASEwuB,2BAA0C9C,CAA1C8C,EAAkDJ,CAAlDI,CATFxuB,EAUEwuB,2BAA0C9C,CAA1C8C,EAAkDH,CAAlDG,CAVFxuB,CAYE,IAAIyuB,IAAgB,KAApB,CACmB,QAAfH,CAAe,KACjBE,2BAA0C9C,CAA1C8C,EAAkDF,CAAlDE,GACAxuB,KAAKysB,aAALzsB,CAAmBhE,IAAnBgE,CAAwB,QAAxBA,CADAwuB,EAEAC,IAAgB,wBAHC,EAMnB,IAAIC,IAAe,KAAnB,CACkB,QAAdH,CAAc,KAChBC,2BAA0C9C,CAA1C8C,EAAkDD,CAAlDC,GACAxuB,KAAKysB,aAALzsB,CAAmBhE,IAAnBgE,CAAwB,OAAxBA,CADAwuB,EAEAE,IAAe,uBAHC,GAMlB1uB,KAAK6N,WAAL7N,GAAmB0rB,CAND,EAOlB1rB,KAAK4sB,QAAL5sB,GAAgB,yLAKKyuB,CALL,8BAAA,GAMIC,CANJ,iEAAA,GAOuC9mB,CAPvC,2FAPE;KAmBtB;KAzCA;EAAA;ECkDA,SA3CE,UACI8jB,CADJ,EACsB0C,CADtB,EAC2CC,CAD3C,EAEIC,CAFJ,EAEgCC,CAFhC,EAGI3mB,CAHJ;EAHA5H,6BAAAA,IAAuB,CAAvBA,EACAA,uBAAAA,IAAqB,CADrBA,EAOEA,KAAKysB,aAALzsB,IAAsB,KAAK,QAAQ,WAPrCA,EAQEwuB,2BAA0C9C,CAA1C8C,EAAkDJ,CAAlDI,CARFxuB,EASEwuB,2BAA0C9C,CAA1C8C,EAAkDH,CAAlDG,CATFxuB,CAWE,IAAM2uB,IAAcC,gBAAgB,MAAhBA,EAAwBR,EAAUh0B,MAAlCw0B,CAApB;EAAA,QACMC,IAAkBD,gBAAgB,UAAhBA,EAA4BP,EAAcj0B,MAA1Cw0B,CADxB;EAAA,QAGIH,IAAgB,yBAHpB,CAImB,QAAfH,CAAe,KACjBE,2BAA0C9C,CAA1C8C,EAAkDF,CAAlDE,GACAxuB,KAAKysB,aAALzsB,CAAmBhE,IAAnBgE,CAAwB,QAAxBA,CADAwuB,EAEAC,IAAgBG,gBAAgB,QAAhBA,EAA0BN,EAAYl0B,MAAtCw0B,CAHC,EAMnB,IAAIF,IAAe,wBAAnB,CACkB,QAAdH,CAAc,KAChBC,2BAA0C9C,CAA1C8C,EAAkDD,CAAlDC,GACAxuB,KAAKysB,aAALzsB,CAAmBhE,IAAnBgE,CAAwB,OAAxBA,CADAwuB,EAEAE,IAAeE,gBAAgB,OAAhBA,EAAyBL,EAAWn0B,MAApCw0B,CAHC,GAMlB5uB,KAAK6N,WAAL7N,GAAmB0rB,CAND,EAOlB1rB,KAAK4sB,QAAL5sB,GAAgB,6EAIVyuB,CAJU,gBAAA,GAKVC,CALU,kEAAA,GAQVC,CARU,gBAAA,GASVE,CATU,iEAAA,GAWqCjnB,CAXrC,wEAPE;KAwBtB;KDlDA,0BCoDyBknB,GAAiB1vB;EACxC,MAAM2vB,IAAa,QAAMD,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAN,GAAwCA,EAAQjxB,KAARixB,CAAc,CAAdA,CAA3D,CACA,OAAa,MAAT1vB,CAAS,GACJ,kBACE0vB,CADF,cAAA,GACqBC,CADrB,yBAAA,GAEED,CAFF,aAAA,GAEoBA,CAFpB,gBAAA,GAEyCA,CAFzC,sBADI,GAMN,UAAQA,CAAR,QAAA,GAAqBC,CAArB,6BANP;EClDK,KAAMG,qBACXC,MAAM,yCACNC,MAAM,yCAFD;EAAA;EA8BP,SAnBE,UAAY5J,CAAZ,EAAwBjE,CAAxB,EAA0C8N,CAA1C;EALArvB,sBAAAA,IAAiB,SAAS,SAAS,SAAS,QAA5CA,EAGAA,yBAAAA,IAAuB,CAHvBA,EAMEA,KAAK6N,WAAL7N,GACIwuB,2BAA0CjN,CAA1CiN,EAAkDa,CAAlDb,CAPNxuB,EASEA,KAAK4sB,QAAL5sB,GAAgB,8GAGVwlB,CAHU,6SATlBxlB;KAwBF;KA9BO;EAAA,ICFDsvB,oBAAoB,0DDEnB;EAAA,ICGMC,MAAM,eDHZ;EAAA,ICIMC,MAAM,eDJZ;EAAA,ICKMC,MAAM,eDLZ;EAAA,ICMMC,MAAM,0CDNZ;EAAA,ICaMC,UAAU,8ODbhB;EAAA,IC0BMC,MAAM,mKD1BZ;EAAA,ICiCMC,qBAAqB,2BDjC3B;EAAA,ICmCMC,QAAQ,uBDnCd;EAAA,ICqCMC,YAAY,uBDrClB;EAAA,ICuCMC,OAAO,sBDvCb;EAAA,ICyCMC,aAAa,uBDzCnB;EAAA,IC2CMC,UAAU,sBD3ChB;EAAA,IC6CMC,gBAAgB,uBD7CtB;EAAA,IC+CMC,cAAc,qCD/CpB;EAAA,ICiDMC,aAAa,qCDjDnB;EAAA,ICmDMC,MAAMhB,oBAAoB,yBDnDhC;EAAA,ICsDMiB,MAAMjB,oBAAoB,yBDtDhC;EAAA,ICyDMkB,MAAM,gDDzDZ;EAAA,IC4DMC,QAAQnB,oBAAoB,0BD5DlC;EAAA,ICgEMoB,UAAU,wCDhEhB;EAAA;EC2EL,YAAA,CAAYlL,CAAZ,EAAwBjE,CAAxB,EAA0C8N,CAA1C;EARArvB,sBAAAA,IAAiB,KAAK,IAAtBA,EAGAA,yBAAAA,IAAuB,CAHvBA,EASEA,KAAK6N,WAAL7N,GACIwuB,2BAA0CjN,CAA1CiN,EAAkDa,CAAlDb,CAVNxuB,EAWEA,KAAK4sB,QAAL5sB,GAAgB,0FAGVwlB,CAHU,2KAXlBxlB;EAsCF,UAbE2wB,WAAAA,mBAAAA,GAAA;EAAA,gBAAA,CACE,OAAO,UAACC,CAAD,EAAsBC,CAAtB;EACgB,cAAjBlwB,EAAKmwB,QAAY,KACnBnwB,EAAKmwB,QAALnwB,GAAgBiwB,EAAMG,yBAANH,CAAgCC,CAAhCD,EAA8C,KAA9CA,CAAhBjwB,EACqB,QAAjBA,EAAKmwB,QAFU,KAQrBF,EAAMp4B,EAANo4B,CAASI,SAATJ,CAAmBjwB,EAAKmwB,QAAxBF,EAAkCK,GAAlCL,CARqB;OADvB;KADFD,GAaF;KDzGO;EAAA;EEcP,SAdE,UAAYpP,CAAZ,EAA8B9mB,CAA9B,EAA2CE,CAA3C;EAJAqF,sBAAAA,IAAiB,IAAjBA,EAKEA,KAAK6N,WAAL7N,GAAmBuhB,CALrBvhB,EAMEA,KAAK4sB,QAAL5sB,GAAgB,qMAQmBvF,CARnB,cAAA,GAQkCE,CARlC,wBANlBqF;KAkBF;KFdO;EAAA;EGYP,SAZE,UAAY/D,CAAZ;EAJA+D,sBAAAA,IAAiB,QAAQ,OAAzBA,EAKEA,KAAK6N,WAAL7N,GAAmB/D,CALrB+D,EAMEA,KAAK4sB,QAAL5sB,GAAgB,yMANlBA;KAgBF;KHZO;EAAA;EIwBP,SAtBE,UAAYuhB,CAAZ,EAAsC8N,CAAtC;EALArvB,sBAAAA,IAAiB,KAAK,IAAtBA,EACAA,gBAAAA,KADAA,EAMEA,KAAK6N,WAAL7N,GACIkxB,iBAA6B3P,GAAQ8N,EAArC6B,EAA8C,CAA9CA,CAPNlxB,EASEA,KAAK4sB,QAAL5sB,GAAgB,8KAODuhB,EAAO,CAAPA,CAPC,6EAAA,GAUFA,EAAO,CAAPA,CAVE,8FATlBvhB;KA2BF;KJxBO;EAAA;EK8CP,SA7CE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,KAAK,KAAtBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAASoJ,WAL9BnxB,CAOE,IAAMgtB,IAAejF,EAASiF,YAA9B;EAAA,QACMC,IAAclF,EAASkF,WAD7B;EAAA,QAEMK,IAASvF,EAASwF,OAATxF,CAAiByF,GAFhC;EAAA,QAGMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAHjC,CAKA1tB,KAAK4sB,QAAL5sB,GAAgB,yYAYU+nB,EAASwE,SAZnB,+CAAA,GAacxE,EAAS6F,SAbvB,+CAAA,GAcaZ,CAdb,QAAA,GAc+BM,CAd/B,0CAAA,GAgBcvF,EAASqJ,QAhBvB,sFAAA,GAoBgBrJ,EAAS8F,QApBzB,iDAAA,GAqBeZ,CArBf,QAAA,GAqBgCQ,CArBhC,4CAAA,GAuBgB1F,EAASsJ,OAvBzB,+RAAhBrxB;KAqCJ;KL9CO;EAAA;EK+GP,SA1DE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,MAAM,IAAvBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAAS8E,OAL9B7sB,CAOE,IAAM8sB,IAAe/E,EAAS+E,YAA9B;EAAA,QACMC,IAAchF,EAASgF,WAD7B;EAAA,QAEMC,IAAejF,EAASiF,YAF9B;EAAA,QAGMC,IAAclF,EAASkF,WAH7B;EAAA,QAKMK,IAASR,IAAe,CAAfA,GAAmB/E,EAASwF,OAATxF,CAAiByF,GALnD;EAAA,QAMMC,IAAUV,IAAc,CAAdA,GAAkBhF,EAASwF,OAATxF,CAAiB2F,IANnD,CAQA1tB,KAAK4sB,QAAL5sB,GAAgB,sCACastB,CADb,OAAA,GACwBG,CADxB,sdAAA,GAeYX,CAfZ,8DAAA,GAgB4BE,CAhB5B,8CAAA,GAkBgBjF,EAAS6F,SAlBzB,6HAAA,GAuBKd,CAvBL,mDAAA,GAyBcC,CAzBd,gEAAA,GA0B8BE,CA1B9B,gDAAA,GA4BkBlF,EAAS8F,QA5B3B,sJAAA,GAkCOd,CAlCP,qDAAA,GAoCgBhF,EAASuJ,WApCzB,oQAAhBtxB;KA+CJ;KL/GO;EAAA;EM+CP,SA9CE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,KAAK,KAAtBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAASoJ,WAL9BnxB,CAOE,IAAMgtB,IAAejF,EAASiF,YAA9B;EAAA,QACMC,IAAclF,EAASkF,WAD7B;EAAA,QAEMK,IAASvF,EAASwF,OAATxF,CAAiByF,GAFhC;EAAA,QAGMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAHjC;EAAA,QAIM6D,IAAaxJ,EAASuJ,WAATvJ,GAAuBA,EAASyJ,UAJnD,CAMAxxB,KAAK4sB,QAAL5sB,GAAgB,6MAOIuxB,CAPJ,sHAAA,GAYUxJ,EAASwE,SAZnB,+CAAA,GAacxE,EAAS6F,SAbvB,+CAAA,GAcaZ,CAdb,QAAA,GAc+BM,CAd/B,0CAAA,GAgBcvF,EAASqJ,QAhBvB,sFAAA,GAoBgBrJ,EAAS8F,QApBzB,iDAAA,GAqBeZ,CArBf,QAAA,GAqBgCQ,CArBhC,4CAAA,GAuBgB1F,EAASsJ,OAvBzB,+RAAhBrxB;KAqCJ;KN/CO;EAAA;EMiHP,SA3DE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,MAAM,IAAvBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAAS8E,OAL9B7sB,CAOE,IAAM8sB,IAAe/E,EAAS+E,YAA9B;EAAA,QACMC,IAAchF,EAASgF,WAD7B;EAAA,QAEMC,IAAejF,EAASiF,YAF9B;EAAA,QAGMC,IAAclF,EAASkF,WAH7B;EAAA,QAKMK,IAASR,IAAe,CAAfA,GAAmB/E,EAASwF,OAATxF,CAAiByF,GALnD;EAAA,QAMMC,IAAUV,IAAc,CAAdA,GAAkBhF,EAASwF,OAATxF,CAAiB2F,IANnD;EAAA,QAOM6D,IAAaxJ,EAASuJ,WAATvJ,GAAuBA,EAASyJ,UAPnD,CASAxxB,KAAK4sB,QAAL5sB,GAAgB,sCACastB,CADb,OAAA,GACwBG,CADxB,8TAAA,GAaYX,CAbZ,8DAAA,GAc4BE,CAd5B,8CAAA,GAgBgBjF,EAAS6F,SAhBzB,6HAAA,GAqBKd,CArBL,mDAAA,GAuBcC,CAvBd,gEAAA,GAwB8BE,CAxB9B,gDAAA,GA0BkBlF,EAAS8F,QA1B3B,sJAAA,GAgCOd,CAhCP,oGAAA,GAmCgBwE,CAnChB,4CAAA,GAoCUA,CApCV,iQAAhBvxB;KA+CJ;KNjHO;EAAA;EO+FP,SA9FE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,KAAK,IAAtBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAASvG,QAL9BxhB,CAME,IAAMstB,IAASvF,EAASwF,OAATxF,CAAiByF,GAAhC;EAAA,QACMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IADjC;EAAA,QAEMV,IAAejF,EAASiF,YAF9B;EAAA,QAGMC,IAAclF,EAASkF,WAH7B;EAAA,QAIMC,IAAiBnF,EAASmF,cAJhC;EAAA,QAKMC,IAAgBpF,EAASoF,aAL/B;EAAA,QAMML,IAAe/E,EAAS+E,YAN9B;EAAA,QAOMC,IAAchF,EAASgF,WAP7B;EAAA,QASM0E,IAA8D,IAAtCl3B,KAAKkC,KAALlC,CAAWwtB,EAASyJ,UAATzJ,GAAsB,CAAjCxtB,CAT9B;EAAA,QAUMm3B,IAA0B3J,EAASyJ,UAATzJ,GAAsB,CAVtD,CAYA/nB,KAAK4sB,QAAL5sB,GAAgB,yCACgBgtB,CADhB,OAAA,GACiCC,CADjC,wCAAA,GAEaK,CAFb,OAAA,GAEwBG,CAFxB,2dAAA,GAgBYX,CAhBZ,mDAAA,GAiBiBI,CAjBjB,wCAAA,GAmBYnF,EAASqJ,QAnBrB,gFAAA,GAuBcrE,CAvBd,qDAAA,GAwBmBI,CAxBnB,0CAAA,GA0BcpF,EAASsJ,OA1BvB,sFAAA,GA8BgBI,CA9BhB,6iBAAA,IA+C0B,MAA5BC,CA/CE,yEAAA,GAiDkBD,CAjDlB,uCAAA,GAkDWA,CAlDX,oCAAA,IAmDiC,MAA5BC,CAnDL,mFAAA,GAqDkBD,CArDlB,6CAAA,GAsDkBA,CAtDlB,+FAAA,GAyDWA,CAzDX,0CAAA,GA0DWA,CA1DX,0GAAA,IA6DiC,MAA5BC,CA7DL,mFAAA,GA+DkBD,CA/DlB,6CAAA,GAgEkBA,CAhElB,iDAAA,GAiEkBA,CAjElB,+FAAA,GAoEWA,CApEX,0CAAA,GAqEWA,CArEX,8CAAA,GAsEWA,CAtEX,oKAAhBzxB;KAgFJ;KP/FO;EAAA;EQ0DP,SAzDE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,KAAK,IAAtBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAASvG,QAL9BxhB,CAOE,IAAM2xB,IAAW5J,EAASqJ,QAA1B;EAAA,QACMQ,IAAW7J,EAASsJ,OAD1B;EAAA,QAEM/D,IAASvF,EAASwF,OAATxF,CAAiByF,GAFhC;EAAA,QAGMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAHjC;EAAA,QAIMV,IAAejF,EAASiF,YAJ9B;EAAA,QAKMC,IAAclF,EAASkF,WAL7B;EAAA,QAMMC,IAAiBnF,EAASmF,cANhC;EAAA,QAOMC,IAAgBpF,EAASoF,aAP/B;EAAA,QAQML,IAAe/E,EAAS+E,YAR9B;EAAA,QASMC,IAAchF,EAASgF,WAT7B;EAAA,QAUMwE,IAAaxJ,EAASuJ,WAATvJ,GAAuBA,EAASyJ,UAVnD,CAYAxxB,KAAK4sB,QAAL5sB,GAAgB,yCACgBgtB,CADhB,OAAA,GACiCC,CADjC,wCAAA,GAEaK,CAFb,OAAA,GAEwBG,CAFxB,mNAAA,GASI8D,CATJ,kCAAA,GAUQA,CAVR,kXAAA,GAmBYzE,CAnBZ,mDAAA,GAoBiBI,CApBjB,wCAAA,GAsBYyE,CAtBZ,gFAAA,GA0Bc5E,CA1Bd,qDAAA,GA2BmBI,CA3BnB,0CAAA,GA6BcyE,CA7Bd,6PAAhB5xB;KA0CJ;KR1DO;EAAA;ESsGP,SAtGE,UACE6xB,CADF,EACgDC,CADhD,EAEE7I,CAFF,EAE8BC,CAF9B,EAGEC,CAHF;EAJAnpB,sBAAAA,IAAiB,SAAS,SAAS,SAAnCA,EACAA,gBAAAA,KADAA,CAQS,IAAA6rB,QAAA;EAAA,QAAOkG,QAAP;EAAA,QAAoBC,QAApB;EAAA,QAAgCl6B,QAAhC;EAAA,QACAm6B,QADA;EAAA,QAEAC,QAFA;EAAA,QAEYC,QAFZ,CAGPnyB,KAAK6N,WAAL7N,IAAoBiyB,GAAUC,GAAYC,GAAWr6B,EAArDkI,CACA,IAAMoyB,IAAsB,eAAXlJ,CAAW,GAAa,CAAb,GAAiB,CAA7C;EAAA,QAEM/P,gCAFN;EAAA,QAEOkZ,QAFP;EAAA,QAEyBC,QAFzB;EAAA,QAKMC,iJALN;EAAA,QAKOC,QALP;EAAA,QAKoBC,QALpB;EAAA,QAKiCC,QALjC;EAAA,QAgBMC,+IAhBN;EAAA,QAgBOC,QAhBP;EAAA,QAgBmBC,QAhBnB;EAAA,QAgB+BC,QAhB/B,CA+BA9yB,KAAK4sB,QAAL5sB,GAAgB,8CACqBwyB,CADrB,+CAAA,GAEoBI,CAFpB,+cAAA,GAkBa/G,CAlBb,uEAAA,GAsBW4G,CAtBX,oCAAA,GAuBUI,CAvBV,+BAAA,GAyBGH,CAzBH,yCAAA,GA0BeL,CA1Bf,qCAAA,GA2BQlJ,CA3BR,6DAAA,GA8BG2J,CA9BH,yCAAA,GA+BeR,CA/Bf,qCAAA,GAgCQnJ,CAhCR,wGAAA,GAqCPiJ,CArCO,umCAAhBpyB;KA+DJ;KTtGO,6CUCH+yB,GAAkB92B,GAAiB3B;qBAAAA,aACrC,IAAM+E,IAAU2zB,eAAoB/2B,CAApB+2B,CAAhB,CACA,OAAO3zB,EACFyC,GADEzC,CACE,UAACiE,CAAD,EAASvI,CAAT;EAKH,WAJc,SAAOg4B,EAAOh4B,CAAPg4B,CAAP,QAAA,GAAsBz4B,CAAtB,QAAA,GAAiCgJ,CAAjC,OAAA,IACAvI,MAAMsE,EAAQjF,MAARiF,GAAiB,CAAvBtE,GACV,SAAOg4B,EAAOh4B,IAAI,CAAXg4B,CAAP,QAAA,GAA0Bz4B,CAA1B,QAAA,GAAqCy4B,EAAOh4B,CAAPg4B,CAArC,QAAA,GAAoDzvB,CAD1CvI,GAEV,cAAYg4B,EAAOh4B,CAAPg4B,CAAZ,QAAA,GAA2BzvB,CAHjB,OAId;KANCjE,EAQF2C,IARE3C,CAQG,EARHA,CAAP;EAWF,kBAAA,CAAkB3E,CAAlB;EACE,SAAiB,MAAbA,EAAEN,MAAW,GACR,KAAGM,EAAE,CAAFA,CADK,GAGV,QAAMA,EAAEN,MAAR,MAAA,GAAkBM,EAAEsH,IAAFtH,CAAO,GAAPA,CAAlB,MAHP;EAUF,gBAAA,CAAuBA,CAAvB,EAAoC0W,CAApC;EACE,MAAI1W,EAAEN,MAAFM,KAAa0W,EAAEhX,MAAnB,EACE,MAAM,IAAIb,KAAJ,CACF,0DACOmB,EAAEN,MADT,UAAA,GACuBgX,EAAEhX,MAFvB,CAAN,CASF,KAJA,IAAM64B,MAAN,EACMC,IAAc34B,KAAKkC,KAALlC,CAAWG,EAAEN,MAAFM,GAAW,CAAtBH,CADpB,EAEM44B,IAAuBz4B,EAAEN,MAAFM,GAAW,CAFxC,EAISK,IAAI,CAAb,EAAgBA,IAAIm4B,CAApB,EAAiCn4B,GAAjC,EAAsC;EACpC,QAAMq4B,IAAS14B,EAAEmD,KAAFnD,CAAY,IAAJK,CAARL,EAAmB,IAAJK,CAAI,GAAI,CAAvBL,CAAf;EAAA,QACM24B,IAASjiB,EAAEvT,KAAFuT,CAAY,IAAJrW,CAARqW,EAAmB,IAAJrW,CAAI,GAAI,CAAvBqW,CADf,CAEA6hB,EAAOj3B,IAAPi3B,CAAeK,SAASF,CAATE,QAAAA,GAAqBA,SAASD,CAATC,CAApCL;EAGF,OAA6B,MAAzBE,CAAJ,EAAgC;EAC1BC,QAAS14B,EAAEmD,KAAFnD,CAAsB,IAAdw4B,CAARx4B,CAAT04B,EACAC,IAASjiB,EAAEvT,KAAFuT,CAAsB,IAAd8hB,CAAR9hB,CADTgiB,CAEkB,MAAlBA,EAAOh5B,MAAW,KACpBg5B,IAASA,EAAOtxB,GAAPsxB,CAAW,UAAAzb,CAAA;EAAK,aAAA,WAASA,CAAT,MAAA;OAAhByb,CAATA,EACAC,IAASA,EAAOvxB,GAAPuxB,CAAW,UAAA1b,CAAA;EAAK,aAAA,WAASA,CAAT,MAAA;OAAhB0b,CAFW,GAItBJ,EAAOj3B,IAAPi3B,CAAeK,SAASF,CAATE,QAAAA,GAAqBA,SAASD,CAATC,CAApCL,CAJsB;EAOxB,UAAOA,EAAOnxB,GAAPmxB,CAAW,UAACtb,CAAD,EAAI5c,CAAJ;EAAU,WAAA,SAAO4c,CAAP,MAAA;KAArBsb,EAAkCjxB,IAAlCixB,CAAuC,GAAvCA,CAAP;uBC3CEM,GAAyB1lB,GAAwB+e,GACjD4G,GAAoBC;EACtB,MAAIC,IAAsCH,EAAWzxB,GAAXyxB,CAAe,UAAA74B,CAAA;EACvD,QAAMwB,IAAOyH,cAAmBjJ,EAAEi5B,SAAFj5B,CAAYk5B,YAA/BjwB,CAAb,CACA,OAAIjJ,EAAEi5B,SAAFj5B,CAAYm5B,SAAZn5B,GACK,mBAAiBA,EAAE6D,IAAnB,IAA0BrC,IAAO,CAAPA,GAAW,MAAIA,CAAJ,MAAXA,GAAyB,EAAnD,OADLxB,GAGG,uBAAqBA,EAAE6D,IAAvB,MAHP;KAFwCg1B,CAA1C,CAOAG,IAAqBA,EAAmB1xB,IAAnB0xB,CAAwB,IAAxBA,CAArBA,CAEA,IAIII,CAJJ;EAAA,MAKIC,CALJ;EAAA,MAAMC,IACFT,EAAWzxB,GAAXyxB,CAAe,UAAA74B,CAAA;EAAK,WAAAu5B,wBAAwBv5B,CAAxBu5B,EAA2BpmB,CAA3BomB,EAAwCT,CAAxCS,CAAA;KAApBV,EACKvxB,IADLuxB,CACU,IADVA,CADJ;EAAA,MAGMW,IAAcrmB,EAAYsmB,QAHhC;EAAA,MAMIC,IAAeC,aANnB,CA0BA,OAlBIxmB,EAAYymB,QAAZzmB,IACFimB,IACIS,+BAA+B1mB,EAAY+lB,YAA3CW,EAAyDL,CAAzDK,CADJT,EAEAC,IAA+BS,8BAH7B3mB,KAKFimB,IACIW,yBAAyB5mB,EAAY+lB,YAArCa,EAAmDP,CAAnDO,CADJX,EAEAC,IAA+BW,2BAP7B7mB,GAUA4lB,MACFW,KAAgBO,oBADdlB,CAVA5lB,GAeFumB,GAAcQ,8BAA8Bb,GAC5CL,GAAoBI,GAAuBE,GAAsBpH,GACjE5qB,KAAK,KACP;EAGF,8BAAA,CAA8B6yB,CAA9B;EACE,MAAM54B,IAAQ44B,EAAOlB,SAAPkB,CAAiBjB,YAA/B,CACA,QAAQ33B,EAAM7B,MAAd,GACE,KAAK,CAAL;EACE,aAAO06B,iBAAiBD,CAAjBC,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaF,CAAbE,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaH,CAAbG,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaJ,CAAbI,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaL,CAAbK,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaN,CAAbM,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,aAAaP,CAAbO,CAAP,CACF;EACE,YAAM,IAAI77B,KAAJ,CACC0C,EAAM7B,MAAN6B,2CADD,CAAN,CAhBJ;EAsBF,oCAAA,CAAoC44B,CAApC;EACE,MAAM54B,IAAQ44B,EAAOlB,SAAPkB,CAAiBjB,YAA/B,CACA,QAAQ33B,EAAM7B,MAAd,GACE,KAAK,CAAL;EACE,aAAOi7B,mBAAmBR,CAAnBQ,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,mBAAmBT,CAAnBS,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,mBAAmBV,CAAnBU,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,mBAAmBX,CAAnBW,CAAP,CACF;EACE,YAAM,IAAIj8B,KAAJ,CACF,YAAU0C,EAAM7B,MAAhB,2CADE,CAAN,CAVJ;EAgBF,iCAAA,CACIy6B,CADJ,EACuBY,CADvB,EACgDjC,CADhD;EAEE,MAAIve,IAAMygB,eAAeb,CAAfa,CAAV,CAeA,OAdIb,EAAOlB,SAAPkB,CAAiBP,QAAjBO,GACF5f,KAAO0gB,2BAA2Bd,CAA3Bc,CADLd,GAGF5f,KAAO2gB,qBAAqBf,CAArBe,CAHLf,GASArB,KACAziB,YACI8jB,EAAOlB,SAAPkB,CAAiBjB,YADrB7iB,EACmC0kB,EAAa7B,YADhD7iB,OAEFkE,KAAO4gB,yBAAyBhB,CAAzBgB,EAAiCJ,CAAjCI,EAA+CrC,CAA/CqC,EAZLhB,EAcG5f,CAAP;EAGF,wCAAA,CACIuM,CADJ,EACwB0S,CADxB;EAEE,UAAQ1S,EAASpnB,MAAjB,GACE,KAAK,CAAL;EACE,aAAO07B,uBAAP,CACF,KAAK,CAAL;EACE,aAAOC,wBAAwBvU,CAAxBuU,EAA8C7B,CAA9C6B,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,wBAAwBxU,CAAxBwU,EAAsD9B,CAAtD8B,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,wBACHzU,CADGyU,EACmC/B,CADnC+B,CAAP,CAEF,KAAK,CAAL;EACE,aAAOC,wBACH1U,CADG0U,EAC2ChC,CAD3CgC,CAAP,CAEF;EACE,YAAM,IAAI38B,KAAJ,CACCioB,EAASpnB,MAATonB,8DADD,CAAN,CAdJ;EAoBF,kCAAA,CACIA,CADJ,EACwB0S,CADxB;EAEE,UAAQ1S,EAASpnB,MAAjB,GACE,KAAK,CAAL;EACE,aAAO07B,uBAAP,CACF,KAAK,CAAL;EACE,aAAOK,kBAAkB3U,CAAlB2U,EAAwCjC,CAAxCiC,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,kBAAkB5U,CAAlB4U,EAAgDlC,CAAhDkC,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,kBACH7U,CADG6U,EACmCnC,CADnCmC,CAAP,CAEF,KAAK,CAAL;EACE,aAAOC,kBACH9U,CADG8U,EAC2CpC,CAD3CoC,CAAP,CAEF,KAAK,CAAL;EACE,aAAOC,kBACH/U,CADG+U,EACmDrC,CADnDqC,CAAP,CAEF,KAAK,CAAL;EACE,aAAOC,kBACHhV,CADGgV,EAEHtC,CAFGsC,CAAP,CAGF;EACE,YAAM,IAAIj9B,KAAJ,CACCioB,EAASpnB,MAATonB,4CADD,CAAN,CArBJ;EA0BF,KAAMiV,oBAAoB,+aAA1B;EAAA,IAcMC,oBAAoB,oiBAd1B;EAAA,IA8BMC,oBAAoB,ssBA9B1B;EAAA,IAiDMC,oBAAoB,8yBAjD1B;EAAA,IAsEMC,oBAAoB,4dAtE1B;EAAA,IAmFMC,oBAAoB,8fAnF1B;EAAA,IAgGMlC,+BAA+B,kHAhGrC;EAAA,IAsGMF,8BAA8B,gFAtGpC;EAAA,IA4GMF,iCAAiC,gEA5GvC;EAAA,IAsHMH,gBAAgB,2iCAyDlBoC,iBAzDkB,SAAA,GA0DlBC,iBA1DkB,SAAA,GA2DlBC,iBA3DkB,SAAA,GA4DlBC,iBA5DkB,SAAA,GA6DlBC,iBA7DkB,SAAA,GA8DlBC,iBA9DkB,OAtHtB;EAAA,IAuLMnC,uBAAuB,qWAvL7B,CAoMA,8BAAA;EACE,SAAO,2DAAP;EAOF,iCAAA,CACI14B,CADJ,EACqBk4B,CADrB;EAEE,MAAM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EADjC,CAEA,OAAoB,MAAhB45B,EAAS,CAATA,CAAgB,GACX,0EAE2B4C,EAAe,CAAfA,CAF3B,wBADW,GAQA,MAAhB5C,EAAS,CAATA,CAAgB,GACX,0EAE2B4C,EAAe,CAAfA,CAF3B,wBADW,GAQb,kHAG2BA,EAAe,CAAfA,CAH3B,OAAA,GAGiDA,EAAe,CAAfA,CAHjD,oCAAA,GAImBA,EAAe,CAAfA,CAJnB,8BAhBP;EAyBF,2BAAA,CACI96B,CADJ,EACqBk4B,CADrB;EAEE,SAAoB,MAAhBA,EAAS,CAATA,CAAgB,GACX,sEAEuBA,EAAS,CAATA,CAFvB,wBADW,GAOA,MAAhBA,EAAS,CAATA,CAAgB,GACX,sEAEuBA,EAAS,CAATA,CAFvB,wBADW,GAOb,kHAG2BA,EAAS,CAATA,CAH3B,OAAA,GAG2CA,EAAS,CAATA,CAH3C,oCAAA,GAImBA,EAAS,CAATA,CAJnB,8BAdP;EAuBF,iCAAA,CACIl4B,CADJ,EACqCk4B,CADrC;EAEE,MAAM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EADjC;EAAA,MAEMy8B,IAAqBz8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAF3B;EAAA,MAGM08B,IAAgBD,IAAqBz8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAH3C,CAKA,OAAO,oHAG2Bw8B,EAAe,CAAfA,CAH3B,OAAA,GAGiDA,EAAe,CAAfA,CAHjD,yCAAA,GAIwBA,EAAe,CAAfA,CAJxB,6CAAA,GAMeE,CANf,2BAAA,GAOYA,CAPZ,qCAAA,GASoBD,CATpB,mCAAA,GAUmBA,CAVnB,sDAAP;EAiBF,2BAAA,CACI/6B,CADJ,EACqCk4B,CADrC;EAEE,MAAM+C,IACFC,oCAAgD,KAAK,KAAK,IAA1DA,EAAgEl7B,CAAhEk7B,CADJ,CAGA,OAAO,oHAG2BhD,EAAS,CAATA,CAH3B,OAAA,GAG2CA,EAAS,CAATA,CAH3C,yCAAA,GAIwBA,EAAS,CAATA,CAJxB,2BAAA,GAKD+C,CALC,8CAAP;EAWF,iCAAA,CACIj7B,CADJ,EAEIk4B,CAFJ;EAGE,MAAM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EADjC;EAAA,MAGMy8B,IAAqBz8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAH3B;EAAA,MAIM08B,IAAgBD,IAAqBz8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAJ3C;EAAA,MAKM68B,IAAiBH,IAAgBh7B,EAAM,CAANA,CALvC,CAOA,OAAO,oHAG2B86B,EAAe,CAAfA,CAH3B,OAAA,GAGiDA,EAAe,CAAfA,CAHjD,yCAAA,GAIwBA,EAAe,CAAfA,CAJxB,8CAAA,GAMgBK,CANhB,4BAAA,GAOaA,CAPb,gCAAA,GASeH,CATf,2BAAA,GAUYA,CAVZ,qCAAA,GAYoBD,CAZpB,mCAAA,GAamBA,CAbnB,0DAAP;EAoBF,2BAAA,CACI/6B,CADJ,EAEIk4B,CAFJ;EAGE,MAAM+C,IAAyBC,oCAC1B,KAAK,KAAK,KAAK,KADWA,EACJl7B,CADIk7B,CAA/B,CAGA,OAAO,+FAGMhD,EAAS,CAATA,CAHN,OAAA,GAGsBA,EAAS,CAATA,CAHtB,yCAAA,GAIwBA,EAAS,CAATA,CAJxB,2BAAA,GAKD+C,CALC,kDAAP;EAWF,2BAAA,CACIj7B,CADJ,EAEIk4B,CAFJ;EAGE,MAAM+C,IAAyBC,oCAC1B,KAAK,KAAK,KAAK,MAAM,KADKA,EACEl7B,CADFk7B,CAA/B,CAGA,OAAO,sFAEyChD,EAAS,CAATA,CAFzC,qCAAA,GAGsBA,EAAS,CAATA,CAHtB,2CAAA,GAKwBA,EAAS,CAATA,CALxB,6BAAA,GAOD+C,CAPC,0FAAP;EAeF,2BAAA,CACIj7B,CADJ,EAEIk4B,CAFJ;EAGE,MAAM+C,IAAyBC,oCAC1B,KAAK,KAAK,KAAK,MAAM,MAAM,KADDA,EACQl7B,CADRk7B,CAA/B,CAGA,OAAO,+FAGMhD,EAAS,CAATA,CAHN,OAAA,GAGsBA,EAAS,CAATA,CAHtB,yCAAA,GAIwBA,EAAS,CAATA,CAJxB,6BAAA,GAMD+C,CANC,0FAAP;EAcF,iCAAA,CACIj7B,CADJ,EAC6Bk4B,CAD7B;EAEE,MAAM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EADjC,CAEA,IAAIwW,YAAiB9U,CAAjB8U,EAAwBojB,CAAxBpjB,CAAJ,EACE,OAAO,oFAEmCgmB,EAAe,CAAfA,CAFnC,OAAA,GAGHA,EAAe,CAAfA,CAHG,uBAAP,CASF,IAAMC,IAAqBz8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAA3B,CAWA,OAAO,oHAG2Bw8B,EAAe,CAAfA,CAH3B,OAAA,GAGiDA,EAAe,CAAfA,CAHjD,2CAAA,GAKwBA,EAAe,CAAfA,CALxB,gDAAA,GAMoBC,CANpB,mCAAA,GAOmBA,CAPnB,mDAAP;EAcF,2BAAA,CACI/6B,CADJ,EAC6Bk4B,CAD7B;EAEE,SAAIpjB,YAAiB9U,CAAjB8U,EAAwBojB,CAAxBpjB,IACK,gFAE+BojB,EAAS,CAATA,CAF/B,OAAA,GAE+CA,EAAS,CAATA,CAF/C,uBADLpjB,GAOa,MAAb9U,EAAM,CAANA,CAAa,GACR,0HAG2Bk4B,EAAS,CAATA,CAH3B,OAAA,GAG2CA,EAAS,CAATA,CAH3C,2CAAA,GAIwBA,EAAS,CAATA,CAJxB,mEADQ,GAUA,MAAbl4B,EAAM,CAANA,CAAa,GACR,0HAG2Bk4B,EAAS,CAATA,CAH3B,OAAA,GAG2CA,EAAS,CAATA,CAH3C,2CAAA,GAIwBA,EAAS,CAATA,CAJxB,mEADQ,GAUV,oHAG2BA,EAAS,CAATA,CAH3B,OAAA,GAG2CA,EAAS,CAATA,CAH3C,yCAAA,GAIwBA,EAAS,CAATA,CAJxB,2CAAA,GAKel4B,EAAM,CAANA,CALf,kCAAA,GAMmBA,EAAM,CAANA,CANnB,4CA3BP;EAuCF,0BAAA,CAA0Bo7B,CAA1B;EACE,MAAMvI,IAAUuI,EAAU94B,IAA1B;EAAA,MACM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAD3D,CAEA,OAAIuI,EAAU1D,SAAV0D,CAAoBxD,SAApBwD,GACK,WAASC,CAAT,gBAAA,GAA+BxI,CAA/B,OADLuI,GAGG,iBACGC,CADH,sCAAA,GAEoBxI,CAFpB,0BAHP;EAUF,4BAAA,CAA4BuI,CAA5B;EACE,MAAMvI,IAAUuI,EAAU94B,IAA1B;EAAA,MACM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAD3D;EAAA,MAEMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAFrC;EAAA,MAGM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EAJjC,CAMA,OAAO,gBACE+8B,CADF,6DAAA,GAGCP,EAAe,CAAfA,CAHD,OAAA,GAGuBA,EAAe,CAAfA,CAHvB,uCAAA,GAIgBjI,CAJhB,sBAAP;EASF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMvI,IAAUuI,EAAU94B,IAA1B;EAAA,MACM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAD3D,CAGA,OAAO,iBACGwI,CADH,iCAAA,GAEMA,CAFN,4BAAP;EAOF,4BAAA,CAA4BD,CAA5B;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAHrC;EAAA,MAKMoD,IAAUpD,EAAS,CAATA,CALhB;EAAA,MAMMqD,IAAUrD,EAAS,CAATA,CANhB,CAOA,IAAgB,QAAZA,CAAY,IAAQpjB,YAAiB9U,CAAjB8U,EAAwBojB,CAAxBpjB,CAAxB,EACE,OAAO,kBACEumB,CADF,8EAAA,GAE0CE,CAF1C,SAAA,GAEwDD,CAFxD,sCAAA,GAIgBzI,CAJhB,0BAAP,CASF,IAAMiI,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EADjC,CAIA,OAAO,gBACE+8B,CADF,0DAAA,GAFc/8B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAEd,OAAA,GAEyCw8B,EAAe,CAAfA,CAFzC,OAAA,GAGHA,EAAe,CAAfA,CAHG,0CAAA,GAIgBjI,CAJhB,sBAAP;EASF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAIMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAJrC,CAKA,IAAgB,QAAZA,CAAY,IAAQpjB,YAAiB9U,CAAjB8U,EAAwBojB,CAAxBpjB,CAAxB,EAA2D;EACzD,QAAM0mB,IAAUtD,EAAS,CAATA,CAAhB,CAEA,OAAO,iBACCmD,CADD,4EAAA,GADSnD,EAAS,CAATA,CACT,SAAA,GAEsDsD,CAFtD,sCAAA,GAGkB3I,CAHlB,sBAAP;EAQI,OAAA3V,mBAAA;EAAA,MAACvb,cAAD;EAAA,MAAWG,cAAX;EAAA,MACA25B,IAAgB95B,CADhB,CAEN,IAAI85B,EAAct9B,MAAds9B,GAAuBz7B,EAAM7B,MAAjC,EAAyC;EAGvC,WAAO,aACHw7B,qBAHiB+B,iBAAiBN,CAAjBM,EAA4BD,CAA5BC,CAGjB/B,CADG,mBAAA,GAEG0B,CAFH,0CAAA,GAGMA,CAHN,MAAA,GAGkBM,mBAJT,OAAO,MAIEA,EAA0B75B,CAA1B65B,CAHlB,sBAAP;EAQF,OAAIP,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EAEE,OAAO,mBACGyD,CADH,0EAAA,GAEsCr7B,EAAM,CAANA,CAFtC,4BAAA,GAGMq7B,CAHN,uCAAP,CAQF,IAAMC,IAAUpD,EAAS,CAATA,CAAhB;EAAA,MACMqD,IAAUrD,EAAS,CAATA,CADhB,CAEA,OAAgB,MAAZqD,CAAY,GAEP,iBACCF,CADD,wEAAA,GAEoCr7B,EAAM,CAANA,CAFpC,uDAAA,GAGiCs7B,CAHjC,sCAAA,GAIkBzI,CAJlB,sBAFO,GAUA,MAAZyI,CAAY,GAEP,iBACCD,CADD,wEAAA,GAEoCr7B,EAAM,CAANA,CAFpC,kDAAA,GAG4Bu7B,CAH5B,2CAAA,GAIkB1I,CAJlB,sBAFO,GAUT,eACCwI,CADD,kDAAA,GAEgBC,CAFhB,OAAA,GAE4BC,CAF5B,OAAA,GAEwCv7B,EAAM,CAANA,CAFxC,4CAAA,GAGkB6yB,CAHlB,kBApBP;EA4BF,4BAAA,CAA4BuI,CAA5B;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAHrC;EAAA,MAIM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EALjC,CAOA,IAAiB,MAAb0B,EAAM,CAANA,CAAJ,EAAoB;EAKlB,WAAO,eACD05B,2BAHegC,iBAAiBN,CAAjBM,EAFC17B,EAAM4B,KAAN5B,CAAY,CAAZA,CAED07B,CAGfhC,CADC,oBAAA,GAEI2B,CAFJ,mDAAA,GAGQA,CAHR,MAAA,GAGoBM,mBAJX,KAAK,OAAO,MAIDA,GANT,GAAG,EAMMA,CAHpB,0BAAP;EAQF,OAAML,IAAUR,EAAe,CAAfA,CAAhB;EAAA,MACMS,IAAUT,EAAe,CAAfA,CADhB;EAAA,MAGMc,IAAet9B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAHrB,CAMA,OAAO,gBACE+8B,CADF,2EAAA,GAGCC,CAHD,OAAA,GAGaC,CAHb,OAAA,GAFeK,IAAet9B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAE9B,OAAA,GAG2Cs9B,CAH3C,6CAAA,GAIgB/I,CAJhB,sBAAP;EASF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMgJ,IAAU77B,EAAM,CAANA,IAAWA,EAAM,CAANA,CAH3B;EAAA,MAIM87B,IAAU97B,EAAM,CAANA,CAJhB;EAAA,MAMMkd,mBANN;EAAA,MAMOvb,cANP;EAAA,MAMiBG,cANjB;EAAA,MAOM25B,IAAgB95B,CAPtB,CAQA,IAAI85B,EAAct9B,MAAds9B,GAAuBz7B,EAAM7B,MAAjC,EAAyC;EAGvC,WAAO,eACDw7B,qBAHe+B,iBAAiBN,CAAjBM,EAA4BD,CAA5BC,CAGf/B,CADC,qBAAA,GAEK0B,CAFL,uDAAA,GAGQA,CAHR,MAAA,GAGoBM,mBAJX,OAAO,OAAO,QAIHA,EAA0B75B,CAA1B65B,CAHpB,0BAAP;EAQF,OAAIP,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EAEE,OAAO,mBACGyD,CADH,uHAAA,GAGsBQ,CAHtB,OAAA,GAGkCC,CAHlC,4BAAA,GAIMT,CAJN,uCAAP,CASF,IAAMnD,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAArC;EAAA,MACMoD,IAAUpD,EAAS,CAATA,CADhB;EAAA,MAEMqD,IAAUrD,EAAS,CAATA,CAFhB,CAGA,OAAIqD,MAAYM,CAAZN,GAEK,qBACKF,CADL,4HAAA,GAGyCS,CAHzC,0FAAA,GAKiBP,CALjB,SAAA,GAK+BD,CAL/B,0CAAA,GAMsBzI,CANtB,8BAFL0I,GAaAA,MAAYO,CAAZP,GAEK,iBACCF,CADD,kFAAA,GAEmCr7B,EAAM,CAANA,CAFnC,kGAAA,GAI0Cu7B,CAJ1C,SAAA,GAIwDD,CAJxD,sCAAA,GAKkBzI,CALlB,sBAFL0I,GAYG,mBACKF,CADL,+EAAA,GAGKC,CAHL,OAAA,GAGiBC,CAHjB,OAAA,GAG6BM,CAH7B,OAAA,GAGyCC,CAHzC,uDAAA,GAIsBjJ,CAJtB,wBAzBP;EAkCF,4BAAA,CAA4BuI,CAA5B;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAHrC;EAAA,MAIM4C,KACDx8B,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,GAA4BA,KAAKuQ,IAALvQ,CAAU45B,EAAS,CAATA,IAAc,CAAxB55B,EALjC;EAAA,MAMMg9B,IAAUR,EAAe,CAAfA,CANhB;EAAA,MAOMS,IAAUT,EAAe,CAAfA,CAPhB;EAAA,MASMc,IAAet9B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CATrB;EAAA,MAUM08B,IAAgBY,IAAet9B,KAAKuQ,IAALvQ,CAAU0B,EAAM,CAANA,IAAW,CAArB1B,CAVrC,CAaA,OAAO,gBACE+8B,CADF,mFAAA,GAGCC,CAHD,OAAA,GAGaC,CAHb,OAAA,GAFgBP,IAAgBh7B,EAAM,CAANA,CAEhC,gBAAA,GAICg7B,CAJD,OAAA,GAImBY,CAJnB,iDAAA,GAKgB/I,CALhB,sBAAP;EAUF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMkJ,IAAU/7B,EAAM,CAANA,CAHhB;EAAA,MAIM87B,IAAU97B,EAAM,CAANA,IAAW+7B,CAJ3B;EAAA,MAKMF,IAAU77B,EAAM,CAANA,IAAW87B,CAL3B;EAAA,MAOM5e,mBAPN;EAAA,MAOOvb,cAPP;EAAA,MAOiBG,cAPjB,CAQA,IAAIH,EAASxD,MAATwD,GAAkB3B,EAAM7B,MAA5B,EAAoC;EAGlC,WAAO,aACHw7B,qBAHiB+B,iBAAiBN,CAAjBM,EAA4B/5B,CAA5B+5B,CAGjB/B,CADG,mBAAA,GAEG0B,CAFH,iEAAA,GAGMA,CAHN,MAAA,GAGkBM,mBAJT,OAAO,OAAO,SAAS,SAIdA,EAA0B75B,CAA1B65B,CAHlB,sBAAP;EAQF,OAAIP,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EAEE,OAAO,mBACGyD,CADH,2IAAA,GAGsBQ,CAHtB,OAAA,GAGkCC,CAHlC,OAAA,GAG8CC,CAH9C,4BAAA,GAIMV,CAJN,uCAAP,CASF,IAAMnD,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAArC;EAAA,MACMoD,IAAUpD,EAAS,CAATA,CADhB;EAAA,MAEMqD,IAAUrD,EAAS,CAATA,CAFhB,CAGA,OAAIqD,MAAYM,CAAZN,GAEK,mBACGF,CADH,yJAAA,GAIsCS,CAJtC,OAAA,GAIkDC,CAJlD,sFAAA,GAMeR,CANf,SAAA,GAM6BD,CAN7B,wCAAA,GAOoBzI,CAPpB,0BAFL0I,GAaAA,MAAYQ,CAAZR,GAEK,mBACGF,CADH,iIAAA,GAGqBr7B,EAAM,CAANA,IAAWA,EAAM,CAANA,CAHhC,OAAA,GAG6CA,EAAM,CAANA,CAH7C,0HAAA,GAMcu7B,CANd,SAAA,GAM4BD,CAN5B,wCAAA,GAOoBzI,CAPpB,0BAFL0I,GAaG,iBACGF,CADH,2EAAA,GAEkBC,CAFlB,OAAA,GAE8BC,CAF9B,OAAA,GAE0CM,CAF1C,OAAA,GAEsDC,CAFtD,kBAAA,GAGGC,CAHH,6DAAA,GAIoBlJ,CAJpB,sBA1BP;EAmCF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMmJ,IAAUh8B,EAAM,CAANA,CAHhB;EAAA,MAIM+7B,IAAU/7B,EAAM,CAANA,IAAWg8B,CAJ3B;EAAA,MAKMF,IAAU97B,EAAM,CAANA,IAAW+7B,CAL3B;EAAA,MAMMF,IAAU77B,EAAM,CAANA,IAAW87B,CAN3B;EAAA,MAQM5e,mBARN;EAAA,MAQOvb,cARP;EAAA,MAQiBG,cARjB,CASA,IAAIH,EAASxD,MAATwD,GAAkB3B,EAAM7B,MAA5B,EAAoC;EAGlC,WAAO,aACHw7B,qBAHiB+B,iBAAiBN,CAAjBM,EAA4B/5B,CAA5B+5B,CAGjB/B,CADG,mBAAA,GAEG0B,CAFH,6EAAA,GAGMA,CAHN,MAAA,GAGkBM,mBAJT,OAAO,OAAO,SAAS,UAAU,SAIxBA,EAA0B75B,CAA1B65B,CAHlB,sBAAP;EAQF,OAAIP,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EAEE,OAAO,mBACGyD,CADH,mJAAA,GAIMQ,CAJN,OAAA,GAIkBC,CAJlB,OAAA,GAI8BC,CAJ9B,OAAA,GAI0CC,CAJ1C,6CAAA,GAMMX,CANN,gCAAP,CAWF,IAAMnD,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAArC;EAAA,MACMoD,IAAUpD,EAAS,CAATA,CADhB;EAAA,MAEMqD,IAAUrD,EAAS,CAATA,CAFhB,CAIA,OAAIqD,MAAYM,CAAZN,GAEK,mBACGF,CADH,8KAAA,GAKMS,CALN,OAAA,GAKkBC,CALlB,OAAA,GAK8BC,CAL9B,sFAAA,GAOeT,CAPf,SAAA,GAO6BD,CAP7B,wCAAA,GAQoBzI,CARpB,0BAFL0I,GAeAA,MAAYS,CAAZT,GAEK,mBACGF,CADH,kJAAA,GAIMr7B,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,GAAsBA,EAAM,CAANA,CAJ5B,OAAA,GAIyCA,EAAM,CAANA,IAAWA,EAAM,CAANA,CAJpD,oBAAA,GAKGA,EAAM,CAANA,CALH,iHAAA,GAQcu7B,CARd,SAAA,GAQ4BD,CAR5B,wCAAA,GASoBzI,CATpB,0BAFL0I,GAgBG,iBACGF,CADH,uFAAA,GAEkBC,CAFlB,OAAA,GAE8BC,CAF9B,OAAA,GAE0CM,CAF1C,OAAA,GAEsDC,CAFtD,kBAAA,GAGGC,CAHH,OAAA,GAGeC,CAHf,qEAAA,GAIoBnJ,CAJpB,sBA/BP;EAwCF,sBAAA,CAAsBuI,CAAtB;EACE,MAAMp7B,IAAQo7B,EAAU1D,SAAV0D,CAAoBzD,YAAlC;EAAA,MACM9E,IAAUuI,EAAU94B,IAD1B;EAAA,MAEM+4B,IAAW,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAF3D;EAAA,MAGMoJ,IAAUj8B,EAAM,CAANA,CAHhB;EAAA,MAIMg8B,IAAUh8B,EAAM,CAANA,IAAWi8B,CAJ3B;EAAA,MAKMF,IAAU/7B,EAAM,CAANA,IAAWg8B,CAL3B;EAAA,MAMMF,IAAU97B,EAAM,CAANA,IAAW+7B,CAN3B;EAAA,MAOMF,IAAU77B,EAAM,CAANA,IAAW87B,CAP3B;EAAA,MAQM5e,mBARN;EAAA,MAQOvb,cARP;EAAA,MAQiBG,cARjB,CASA,IAAIH,EAASxD,MAATwD,GAAkB3B,EAAM7B,MAA5B,EAAoC;EAGlC,WAAO,aACHw7B,qBAHiB+B,iBAAiBN,CAAjBM,EAA4B/5B,CAA5B+5B,CAGjB/B,CADG,mBAAA,GAEG0B,CAFH,8GAAA,GAIMA,CAJN,MAAA,GAIkBM,mBALT,OAAO,OAAO,SAAS,UAAU,UAAU,SAKlCA,EAA0B75B,CAA1B65B,CAJlB,sBAAP;EASF,OAAIP,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EAEE,OAAO,mBACGyD,CADH,kLAAA,GAKMQ,CALN,OAAA,GAKkBC,CALlB,OAAA,GAK8BC,CAL9B,OAAA,GAK0CC,CAL1C,+EAAA,GAQQC,CARR,4BAAA,GASMZ,CATN,gCAAP,CAcF,IAAMnD,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAArC;EAAA,MACMoD,IAAUpD,EAAS,CAATA,CADhB;EAAA,MAEMqD,IAAUrD,EAAS,CAATA,CAFhB,CAGA,OAAIqD,MAAYM,CAAZN,GAEK,mBACGF,CADH,+MAAA,GAMMS,CANN,OAAA,GAMkBC,CANlB,OAAA,GAM8BC,CAN9B,OAAA,GAM0CC,CAN1C,4FAAA,GAQeV,CARf,SAAA,GAQ6BD,CAR7B,wCAAA,GASoBzI,CATpB,0BAFL0I,GAeAA,MAAYU,CAAZV,GAEK,mBACGF,CADH,mLAAA,GAKMr7B,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,GAAsBA,EAAM,CAANA,CAAtBA,GAAiCA,EAAM,CAANA,CALvC,uBAAA,GAMMA,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,GAAsBA,EAAM,CAANA,CAN5B,uBAAA,GAOMA,EAAM,CAANA,IAAWA,EAAM,CAANA,CAPjB,uBAAA,GAQMA,EAAM,CAANA,CARN,uHAAA,GAWcu7B,CAXd,SAAA,GAW4BD,CAX5B,wCAAA,GAYoBzI,CAZpB,0BAFL0I,GAkBG,iBACGF,CADH,sHAAA,GAGkBC,CAHlB,OAAA,GAG8BC,CAH9B,OAAA,GAG0CM,CAH1C,OAAA,GAGsDC,CAHtD,kBAAA,GAIGC,CAJH,OAAA,GAIeC,CAJf,OAAA,GAI2BC,CAJ3B,wFAAA,GAMoBpJ,CANpB,sBAjCP;EA4CF,wBAAA,CAAwBuI,CAAxB;EACE,MAAMvI,IAAUuI,EAAU94B,IAA1B;EAAA,MACM+4B,IACF,QAAQxI,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,EAAR,GAA0CA,EAAQjxB,KAARixB,CAAc,CAAdA,CAA1C,GAA6D,MAFjE;EAAA,MAGMzM,IAAS1e,cAAmB0zB,EAAU1D,SAAV0D,CAAoBzD,YAAvCjwB,CAHf,CAKA,IAAI0zB,EAAU1D,SAAV0D,CAAoBxD,SAAxB,EACE,OAAe,MAAXxR,CAAW,GACN,WAASiV,CAAT,yBAAA,GAAwCxI,CAAxC,OADM,GAGR,mBACGwI,CADH,gDAAA,GAEmBjV,CAFnB,+DAAA,GAIUyM,CAJV,gDAHP,CAcF,IAAMqF,IAAWkD,EAAU1D,SAAV0D,CAAoBlD,QAArC;EAAA,MACMgE,IAAQhE,EAAS,CAATA,CADd;EAAA,MAEMiE,IAAQjE,EAAS,CAATA,CAFd,CAGA,OAAc,MAAViE,CAAU,IAAe,MAAVD,CAAL,GACL,mBACGb,CADH,iDAAA,GAEoBxI,CAFpB,8BADK,GAOA,MAAVsJ,CAAU,GACL,mBACGd,CADH,uEAAA,GAE0Ca,CAF1C,wCAAA,GAGoBrJ,CAHpB,0BADK,GAQA,MAAVqJ,CAAU,GACL,mBACGb,CADH,kEAAA,GAEqCc,CAFrC,6CAAA,GAGoBtJ,CAHpB,0BADK,GAQP,iBACGwI,CADH,6CAAA,GAEkBa,CAFlB,OAAA,GAE4BC,CAF5B,2CAAA,GAGoBtJ,CAHpB,sBAvBP;EA+BF,yCAAA,CACIuI,CADJ,EAC0B5B,CAD1B,EACmD4C,CADnD,EAEIf,CAFJ;EAGE,MAAMxJ,IAASuJ,EAAU1D,SAAV0D,CAAoBzD,YAApByD,CAAiCj9B,MAAhD;EAAA,MACMk+B,IAAU7C,EAAa7B,YAAb6B,CAA0Br7B,MAD1C;EAAA,MAGI0f,IAAO,KAHX,CAIgB,MAAZwe,CAAY,GACdxe,IAAO,OADO,GAEO,MAAZwe,CAAY,GACrBxe,IAAO,OADc,GAEA,MAAZwe,CAAY,KACrBxe,IAAO,OADc,CAJP,CAOhB,IAAMye,IAAgBC,iBAClBnB,EAAU1D,SAAV0D,CAAoBzD,YADF4E,EACgB/C,EAAa7B,YAD7B4E,CAAtB;EAAA,MAEMC,IAAWH,IAAUxK,CAF3B,CAoBA,OAAO,iBACGwJ,CADH,iBAAA,GAEDxd,CAFC,yCAAA,IAhBQ,MAAXgU,CAAW,GACG,EADH,GAEJwK,IAAU,CAAVA,IAAeC,EAAcn+B,MAAdm+B,IAAwB,CAAvCD,GACO,aADPA,GAILC,EAAcz2B,GAAdy2B,CAAkB,UAAA5gB,CAAA;EAAK,WAAA,aAAUA,IAAI8gB,CAAd,YAAA;KAAvBF,EAAuDv2B,IAAvDu2B,CAA4D,IAA5DA,CAUC,wBAAA,GAISF,CAJT,MAAA,IAPHC,IAAU,CAAVA,IAAexK,IAAS,CAAxBwK,GACsB,QADtBA,GAGsBjB,EAAU1D,SAAV0D,CAAoBzD,YAApByD,CACKv1B,GADLu1B,CACS,UAAC3a,CAAD,EAAI3hB,CAAJ;EAAU,WAAA,aAAUA,IAAI09B,CAAd,OAAA;KADnBpB,EAEKr1B,IAFLq1B,CAEU,IAFVA,CAInB,mBAAP;EASF,kCAAA,CACIA,CADJ,EAC0B5B,CAD1B,EAEIiD,CAFJ;EAGE,MAAM5J,IAAUuI,EAAU94B,IAA1B;EAAA,MACM85B,IAAiBvJ,EAAQE,MAARF,CAAe,CAAfA,EAAkBG,WAAlBH,KAAkCA,EAAQjxB,KAARixB,CAAc,CAAdA,CADzD;EAAA,MAEMwI,IAAW,QAAQe,CAAR,GAAyB,aAF1C;EAAA,MAIME,IAAgBC,iBAClBnB,EAAU1D,SAAV0D,CAAoBzD,YADF4E,EACgB/C,EAAa7B,YAD7B4E,CAJtB;EAAA,MAMM1K,IAASuJ,EAAU1D,SAAV0D,CAAoBzD,YAApByD,CAAiCj9B,MANhD;EAAA,MAOMk+B,IAAU7C,EAAa7B,YAAb6B,CAA0Br7B,MAP1C;EAAA,MAQMu+B,IACFD,MAA0BJ,IAAUxK,CAAVwK,IAAqBC,EAAcn+B,MAAdm+B,GAAuB,CAAtEG,CATJ;EAAA,MAUME,IACFC,sBAAqCN,CAArCM,CAXJ;EAAA,MAYMhF,IAAYwD,EAAU1D,SAAV0D,CAAoBxD,SAZtC,CAcA,IAAI8E,MAAgBC,CAApB,EACE,OAAOE,gCACHzB,CADGyB,EACQrD,CADRqD,EACsBT,CADtBS,EACsCxB,CADtCwB,CAAP,CAIF,IAAMzW,IAAS1e,cAAmB0zB,EAAU1D,SAAV0D,CAAoBzD,YAAvCjwB,CAAf;EAAA,MACIo1B,IAAmB,EADvB,CAEIJ,KAAeC,CAAfD,KACFI,IAAmB,sCACU1W,CADV,oCAAA,GAEOA,CAFP,cADjBsW,EAOJ,IAAMzE,IAAcuB,EAAatB,QAAjC,CACA,IAAIN,CAAJ,EACE,OAAe,MAAXxR,CAAW,GACN,WAASiV,CAAT,gBAAA,GAA+BxI,CAA/B,OADM,GAGR,mBACGwI,CADH,4FAAA,GAG0BpD,EAAY,CAAZA,CAH1B,OAAA,GAG6CA,EAAY,CAAZA,CAH7C,2CAAA,GAIwBA,EAAY,CAAZA,CAJxB,6BAAA,GAKD6E,CALC,yBAAA,GAMSV,CANT,gCAHP,CAeF,IAAMW,IAAa3B,EAAU1D,SAAV0D,CAAoBlD,QAAvC,CACA,OAAIpjB,YAAiBioB,CAAjBjoB,EAA6BmjB,CAA7BnjB,IACK,mBACGumB,CADH,wCAAA,GAEoBxI,CAFpB,gCADL/d,GAQG,iBACGumB,CADH,yFAAA,GAG2BpD,EAAY,CAAZA,CAH3B,OAAA,GAG8CA,EAAY,CAAZA,CAH9C,yCAAA,GAIwBA,EAAY,CAAZA,CAJxB,2BAAA,GAKD6E,CALC,gCAAA,GAMkBC,EAAW,CAAXA,CANlB,wCAAA,GAOyBA,EAAW,CAAXA,CAPzB,6EAAA,GASeA,EAAW,CAAXA,CATf,SAAA,GASmCA,EAAW,CAAXA,CATnC,wCAAA,GAWoBlK,CAXpB,sBARP;EAwBF,2BAAA,CAAkC1vB,CAAlC;EACE,MAAIA,KAAQ,CAAZ,EACE,OAAO,KAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,OAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,OAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,OAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,OAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,OAAP,CAEA,MAAM7F,MAAM,kBAAgB6F,CAAhB,0BAAN7F,CAAN;EAKJ,0BAAA,CACIs7B,CADJ,EACuB6C,CADvB;EAGE,MAAMuB,IAA0BC,KAAKC,KAALD,CAAWA,KAAKE,SAALF,CAAerE,CAAfqE,CAAXA,CAAhC,CAEA,OADAD,EAAatF,SAAbsF,CAAuBrF,YAAvBqF,GAAsCvB,CAAtCuB,EACOA,CAAP;EAGF,2BAAA,CAA2Bzc,CAA3B,EAA6Cze,CAA7C;EACE,SAAOA,EAAS+D,GAAT/D,CAAa,UAAA4Z,CAAA;EAAK,WAAA6E,EAAO7E,CAAP6E,CAAA;KAAlBze,EAA6BiE,IAA7BjE,CAAkC,IAAlCA,CAAP;ECnzCF;EAmCA,SA9BE,UAAY9B,CAAZ,EAA6BgK,CAA7B,EAAiDC,CAAjD;EAJAlG,sBAAAA,IAAiB,IAAjBA,EAKEA,KAAK6N,WAAL7N,GAAmB/D,CALrB+D,CAME,IAAMZ,IAAOnD,EAAM7B,MAAnB;EAAA,QACMi/B,IAAWp9B,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CADjB;EAAA,QAEMq9B,IAAapzB,IAAU,GAAVA,GAAgB,GAFnC,CAIAlG,KAAK4sB,QAAL5sB,GAAgB,6CAEVkG,IAAU,YAAUmzB,CAAV,aAAVnzB,GAAyC,WAF/B,gDAAA,GAMVqzB,kBAAkBn6B,CAAlBm6B,CANU,qDAAA,GAOAC,cAAcp6B,CAAdo6B,EAAoB,QAApBA,CAPA,uDAAA,GASGH,CATH,kFAAA,GAWAC,CAXA,8EAAA,GAcUrzB,CAdV,wDAAA,GAiBRuzB,cAAcp6B,CAAdo6B,EAAoB,QAApBA,CAjBQ,oCAAA,GAkBIC,UAAUr6B,CAAVq6B,EAAgB,QAAhBA,CAlBJ,0DAAhBz5B;KAwBJ;KAnCA,oBAqCmBZ,GAAcb;EAC/B,MAAa,MAATa,CAAJ,EACE,OAAO,KAAGb,CAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,UAAAA,GAAWA,CAAXA,OAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,UAAAA,GAAWA,CAAXA,SAAAA,GAAsBA,CAAtBA,OAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,UAAAA,GAAWA,CAAXA,SAAAA,GAAsBA,CAAtBA,SAAAA,GAAiCA,CAAjCA,OAAV,CAEA,MAAMhF,MAAM,6BAA2B6F,CAA3B,0BAAN7F,CAAN;EAIJ,uBAAA,CAAuB6F,CAAvB,EAAqCb,CAArC;EACE,MAAa,MAATa,CAAJ,EACE,OAAO,KAAGb,CAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,QAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,QAAV,CACK,IAAa,MAATa,CAAJ,EACL,OAAUb,QAAV,CAEA,MAAMhF,MAAM,6BAA2B6F,CAA3B,0BAAN7F,CAAN;EC9DJ;EAOE,YAAA,CACIsU,CADJ,EAC2BkC,CAD3B,EAC8CxC,CAD9C;EANAvN,sBAAAA,IAAiB,IAAjBA,EACAA,gBAAAA,KADAA,EAQEA,KAAK6N,WAAL7N,GAAmB6N,CARrB7N,EASEA,KAAK+P,SAAL/P,GAAiB+P,CATnB/P,EAUEA,KAAKuN,UAALvN,GAAkBuN,CAVpBvN,EAWEA,KAAK4sB,QAAL5sB,GAAgB,2GAIJA,KAAK05B,oBAAL15B,EAJI,sBAAA,GAKJA,KAAK25B,mBAAL35B,EALI,sBAAA,GAMJA,KAAK45B,mBAAL55B,EANI,+BAAA,GAQG+P,CARH,qCAAA,GASWA,CATX,8BAAA,GAUGA,CAVH,qCAAA,GAWWA,CAXX,0CAAA,GAYeA,CAZf,6BAAA,GAaV/P,KAAK65B,kBAAL75B,EAbU,+DAAA,GAgBGA,KAAK85B,sBAAL95B,EAhBH,2CAXlBA;EAwEF,UAvCU+5B,WAAAA,qBAAAA,GAAR;EACE,WAAwB,WAApB/5B,KAAKuN,UAAe,GACf,WADe,GAGf,WAHT;KADMwsB,EAQAA,WAAAA,oBAAAA,GAAR;EACE,WAAwB,WAApB/5B,KAAKuN,UAAe,GACf,WADe,GAGf,WAHT;KATMwsB,EAgBAA,WAAAA,oBAAAA,GAAR;EACE,WAAwB,WAApB/5B,KAAKuN,UAAe,GACf,WADe,GAGf,WAHT;KAjBMwsB,EAwBAA,WAAAA,mBAAAA,GAAR;EACE,WAAwB,WAApB/5B,KAAKuN,UAAe,GACfvN,KAAK6N,WAAL7N,CAAiB,CAAjBA,CADe,GAGfA,KAAK6N,WAAL7N,CAAiB,CAAjBA,CAHT;KAzBM+5B,EAgCAA,WAAAA,uBAAAA,GAAR;EACE,WAAwB,WAApB/5B,KAAKuN,UAAe,GACf,2BADe,GAGf,2BAHT;KAjCMwsB,GAuCV;KAzEA;EAAA;ECqDA,SAhDE,UAAYlsB,CAAZ;EAJA7N,sBAAAA,IAAiB,IAAjBA,EAKEA,KAAK6N,WAAL7N,GAAmB6N,CALrB7N,EAMEA,KAAK4sB,QAAL5sB,GAAgB,gqCANlBA;KAoDF;KDrDA;EAAA,IEAag6B,gBACX7K,MAAM,qCACNC,MAAM,qCFFR;EAAA;EEqDA,SA3CE,UAAY5J,CAAZ,EAAwBrF,CAAxB,EAAsD4J,CAAtD;EAJA/pB,sBAAAA,IAAiB,QAAQ,OAAzBA,CAKE,IAAMi6B,IAAW9Z,EAAW,CAAXA,CAAjB,CACAngB,KAAK6N,WAAL7N,GAAmBmgB,CAAnBngB,CAEA,IAAMk6B,IACFnQ,IAAU,WAASxvB,KAAKyvB,EAAxBD,GAA+B,YAAUxvB,KAAKyvB,EADlD;EAAA,QAEMmQ,IAAoBpQ,IAAakQ,QAAblQ,GAA4B,KAFtD,CAIA/pB,KAAK4sB,QAAL5sB,GAAgB,8CACqBk6B,CADrB,gGAAA,GAIV1U,CAJU,iHAAA,GAQ8ByU,CAR9B,uKAAA,GAcUA,CAdV,qWAAA,GAuBqCE,CAvBrC,0LAAhBn6B;KAmCJ;KFrDA;EAAA;EGgCA,SA3BE,UAAY6N,CAAZ;EAJA7N,sBAAAA,IAAiB,IAAjBA,CAKS,IAAAo6B,QAAA;EAAA,QAAQC,QAAR,CACPr6B,KAAK6N,WAAL7N,GAAmB6N,CAAnB7N,EACAA,KAAK4sB,QAAL5sB,GAAgB,0NAMmCq6B,CANnC,SAAA,GAM+CD,CAN/C,2YADhBp6B;KAyBJ;KHhCA;EAAA;EIsBA,SAfE,UAAYuhB,CAAZ,EAA8B+Y,CAA9B,EAAqDx8B,CAArD;EALAkC,sBAAAA,IAAiB,KAAK,UAAtBA,CAME,IAAM6N,IAAwB0T,EAAO1jB,KAAP0jB,EAA9B,CACA1T,EAAY/P,CAAZ+P,IAAoBysB,CAApBzsB,EACA7N,KAAK6N,WAAL7N,GAAmB6N,CADnBA,EAEA7N,KAAKZ,IAALY,GAAY6N,EAAYzT,MAFxByT,CAGA,IAAM5P,IAAQs7B,kBAAkBv5B,KAAKZ,IAAvBm6B,CAAd;EAAA,QACMgB,IAAeC,gBAAgBjZ,CAAhBiZ,EAAwB18B,CAAxB08B,CADrB,CAGAx6B,KAAK4sB,QAAL5sB,GAAgB,oCAEV/B,CAFU,yDAAA,GAGKs8B,CAHL,uBAAhBv6B;KAOJ;KJtBA,0BIwByBuhB,GAAkBzjB;EACzC,MAAMsB,IAAOmiB,EAAOnnB,MAApB,CACA,IAAIgF,IAAO,CAAX,EACE,MAAM7F,MAAM,qBAAmB6F,CAAnB,0BAAN7F,CAAN,CAEF,IAAa,MAAT6F,CAAJ,EACE,OAAO,wBAAP,CAMF,KAHA,IAAMq7B,KAAiB,WAAW,WAAW,WAAW,UAAxD,EAEMF,MAFN,EAGSx/B,IAAI,CAAb,EAAgBA,IAAIwmB,EAAOnnB,MAA3B,EAAmCW,GAAnC,EACMA,MAAM+C,CAAN/C,GACFw/B,EAAav+B,IAAbu+B,CAAkB,oBAAkBE,EAAc1/B,CAAd0/B,CAAlB,OAAlBF,CADEx/B,GAGFw/B,EAAav+B,IAAbu+B,CAAkB,KAAGE,EAAc1/B,CAAd0/B,CAArBF,CAHEx/B,CAMN,OAAOw/B,EAAav4B,IAAbu4B,EAAP;EC3CF,KCEYG,YDFZ;EAAA,ICSYC,mBDTZ;EAAA;EAuBA,SAnBE,UACYnY,CADZ,EACsCnjB,CADtC,EACyDpD,CADzD;EACY+D,iBAAAA,GAAAwiB,CAAAxiB,EAA0BA,YAAAA,GAAAX,CAA1BW,EAJZA,kBAAAA,IAAiB,KAAK,UAIVA,EACVA,KAAK6N,WAAL7N,GAAmB/D,CADT+D,CAEV,IAAM46B,IAAcrB,kBAAkBl6B,EAAQjF,MAA1Bm/B,CAApB;EAAA,QACMt7B,IAAQs7B,kBAAkBt9B,EAAM7B,MAAxBm/B,CADd;EAAA,QAEMsB,IAAe76B,KAAKwiB,QAALxiB,GAAgB,CAAhBA,GAAoB,YAApBA,GAAmC,SAFxD,CAGAA,KAAK4sB,QAAL5sB,GAAgB,eACV46B,CADU,gBAAA,GACeA,CADf,MAAA,GAC8B56B,KAAKX,OADnC,2CAAA,GAGRpB,CAHQ,kGAAA,GAKY+B,KAAKwiB,QALjB,6GAAA,GAOkBqY,CAPlB,2FAAhB76B;KAaJ;KAvBA,CCiCA,iDAAA,CACIoF,CADJ,EACkBC,CADlB;EAEE,UAAQA,GAASD,EAAjB;EAGF,4CAAA,CACI01B,CADJ,EACwBC,CADxB;EAEE,SAAOD,IAAaC,CAApB;EAGF,4CAAA,CAMIC,CANJ,EAM0BD,CAN1B;EAOE,MAAIC,IAAeD,CAAfC,IAAsC,CAA1C,EACE,MAAM,IAAIzhC,KAAJ,CACF,mBAAiByhC,CAAjB,6BAAA,GACGD,CAFD,CAAN,CAIF,OAAOC,IAAeD,CAAtB;EAKF,qCAAA,CACIE,CADJ,EACwBC,CADxB,EACmDH,CADnD;EAEE,MAAMI,IACFC,mCAAmCH,EAAO7gC,MAA1CghC,EAAkDL,CAAlDK,CADJ,CAEA,IAAIF,EAAc9gC,MAAd8gC,GAAuBC,CAA3B,EACE,MAAM,IAAI5hC,KAAJ,CACF,2BAAyB2hC,EAAc9gC,MAAvC,kBAAA,GACG+gC,CAFD,CAAN,CAKF,KADA,IAAIE,IAAM,CAAV,EACSC,IAAM,CAAf,EAAkBA,IAAML,EAAO7gC,MAA/B,IAAyCkhC,CAAzC,EACEJ,EAAcG,CAAdH,IAAqBD,EAAOK,CAAPL,CAArBC,EACAG,KAAON,CADPG;EAKJ,uCAAA,CACIA,CADJ,EACiCD,CADjC,EAEIF,CAFJ;EAGE,MAAMI,IAAeI,mCACjBL,EAAc9gC,MADGmhC,EACKR,CADLQ,CAArB,CAEA,IAAIN,EAAO7gC,MAAP6gC,GAAgBE,CAApB,EACE,MAAM,IAAI5hC,KAAJ,CACF,oBAAkB0hC,EAAO7gC,MAAzB,kBAAA,GAA+C+gC,CAD7C,CAAN,CAIF,KADA,IAAIE,IAAM,CAAV,EACSC,IAAM,CAAf,EAAkBA,IAAMJ,EAAc9gC,MAAtC,EAA8CkhC,KAAOP,CAArD,EACEE,EAAOI,GAAPJ,IAAgBC,EAAcI,CAAdJ,CAAhBD;EAIJ,gDAAA,CAgBI71B,CAhBJ,EAgBkBC,CAhBlB;EAiBE,UAAQ9K,KAAKuQ,IAALvQ,CAAU8K,IAAU,CAApB9K,GAAwBA,KAAKuQ,IAALvQ,CAAU6K,IAAO,CAAjB7K,EAAhC;EAGF,+CAAA,CACI6K,CADJ,EACkBC,CADlB;EAEQ,MAAA8T,gDAAA,CACN,qBAAe,CAAf;EAsBF,kCAAA,CACI8hB,CADJ,EAC0BO,CAD1B,EAC2Cp2B,CAD3C,EACyDC,CADzD,EAEIo2B,CAFJ;EAGE,MAAMN,IAAeO,sCAAsCt2B,CAAtCs2B,EAA4Cr2B,CAA5Cq2B,CAArB,CACA,IAAID,EAAWrhC,MAAXqhC,GAAoBN,CAAxB,EACE,MAAM,IAAI5hC,KAAJ,CAAU,wBAAsBkiC,EAAWrhC,MAAjC,2BAAA,GACV+gC,CADA,CAAN,CAeF,KAXA,IAAMQ,IAAYt2B,IAAU,CAAVA,IAAiB,CAAnC,EACMu2B,IAAax2B,IAAO,CAAPA,IAAc,CADjC,EAEMy2B,IAAoBthC,KAAKkC,KAALlC,CAAW8K,IAAU,CAArB9K,CAF1B,EAGMuhC,IAAqBvhC,KAAKkC,KAALlC,CAAW6K,IAAO,CAAlB7K,CAH3B,EAKMwhC,IAAexhC,KAAKuQ,IAALvQ,CAAU8K,IAAU,CAApB9K,CALrB,EAMMyhC,IAAiBD,IAAexhC,KAAKuQ,IAALvQ,CAAU6K,IAAO,CAAjB7K,CANtC,EAQM0hC,IACFC,kBAAuB92B,CAAvB82B,IAA+BA,kBAAuB72B,CAAvB62B,CATnC,EAWSrQ,IAAQ,CAAjB,EAAoBA,IAAQ2P,CAA5B,EAAqC3P,GAArC,EAA8C;EAS1C,SARF,IAAMsQ,IAAetQ,IAAQzmB,CAARymB,GAAexmB,CAApC,EACM+2B,IAAcvQ,IAAQoQ,CAD5B,EAKQI,IAAaV,IAAW,CAAXA,GAAe,CALpC,EAMQW,IAASj3B,CANjB,EAOMg2B,IAAMe,CAPZ,EAQWG,IAAS,CAAlB,EAAqBA,IAAST,CAA9B,IAAoDS,CAApD,EAA4D;EAE1D,WADA,IAAMC,IAAyB,IAATD,CAAS,GAAIl3B,CAAnC,EACSo3B,IAAS,CAAlB,EAAqBA,IAASZ,CAA9B,IAAmDY,CAAnD,EAA2D;EACzD,YACMnB,IAAMa,IAAeK,CAAfL,GADkB,IAATM,CAArB,CAEAhB,EAAWJ,CAAXI,IAAkBR,EAAOK,CAAPL,CAAlBQ,EACAA,EAAWJ,IAAM,CAAjBI,IAAsBR,EAAOK,IAAM,CAAbL,CADtBQ,EAEAA,EAAWJ,IAAM,CAAjBI,IAAsBR,EAAOK,IAAMgB,CAAbrB,CAFtBQ,EAGAA,EAAWJ,IAAM,CAAjBI,IAAsBR,EAAOK,IAAMgB,CAANhB,GAAe,CAAtBL,CAHtBQ,EAIAJ,KAAO,CAJPI;EAMFJ,YAAOgB,CAAPhB;EAKJ,SAAIM,CAAJ,EACE;EAAIL,UAAMa,IAAe92B,CAAf82B,GAAyB,CAA/Bb,EACAD,IAAMe,IAAmC,KAApBL,IAAe,CAAK,CADzCT,CAAJ,IAEMoB,IAAY,IAAIr3B,CAFtB,CAIA,KADMg3B,IAA2B,IAAfN,CAAZM,EACGE,IAAS,CAAlB,EAAqBA,IAAST,CAA9B,IAAoDS,CAApD,EACEd,EAAWJ,CAAXI,IAAkBR,EAAOK,CAAPL,CAAlBQ,EACAA,EAAWJ,IAAM,CAAjBI,IAAsBR,EAAOK,IAAMj2B,CAAb41B,CADtBQ,EAEAH,KAAOoB,CAFPjB,EAGAJ,KAAOgB,CAHPZ;EAQJ,SAAIG,CAAJ,EAAe;EAGb,WAFIN,IAAMa,KAAgB/2B,IAAO,KAAKC,CAAlCi2B,EACAD,IAAMe,IAAgD,KAAjCJ,IAAiBD,CAAgB,CADtDT,EAEKmB,IAAS,CAAlB,EAAqBA,IAASZ,CAA9B,IAAmDY,CAAnD,EACEhB,EAAWJ,GAAXI,IAAoBR,EAAOK,GAAPL,CAApBQ,EACAA,EAAWJ,GAAXI,IAAoBR,EAAOK,GAAPL,CADpBQ,EAEAJ,KAAO,CAFPI,CAMEE,KAAYC,CAAZD,KACFF,EAAWW,IAAcH,CAAdG,GAAoC,CAA/CX,IAAoDR,EAAOK,CAAPL,CADlDU;;EAMR,UAAOF,CAAP;EAGF,oCAAA,CACIA,CADJ,EAC8BD,CAD9B,EAC+Cp2B,CAD/C,EAC6DC,CAD7D,EAEI41B,CAFJ;EAGE,MAAME,IAAe/1B,IAAOC,CAA5B,CACA,IAAI41B,EAAO7gC,MAAP6gC,GAAgBE,CAApB,EACE,MAAM,IAAI5hC,KAAJ,CACF,oBAAkB0hC,EAAO7gC,MAAzB,kBAAA,GAA+C+gC,CAD7C,CAAN,CAeF,KAXA,IAAMQ,IAAYt2B,IAAU,CAAVA,IAAiB,CAAnC,EACMu2B,IAAax2B,IAAO,CAAPA,IAAc,CADjC,EAEMy2B,IAAoBthC,KAAKkC,KAALlC,CAAW8K,IAAU,CAArB9K,CAF1B,EAGMuhC,IAAqBvhC,KAAKkC,KAALlC,CAAW6K,IAAO,CAAlB7K,CAH3B,EAKMwhC,IAAexhC,KAAKuQ,IAALvQ,CAAU8K,IAAU,CAApB9K,CALrB,EAMMyhC,IAAiBD,IAAexhC,KAAKuQ,IAALvQ,CAAU6K,IAAO,CAAjB7K,CANtC,EAQM0hC,IACFC,kBAAuB92B,CAAvB82B,IAA+BA,kBAAuB72B,CAAvB62B,CATnC,EAWSrQ,IAAQ,CAAjB,EAAoBA,IAAQ2P,CAA5B,EAAqC3P,GAArC,EAA8C;EAW1C,SAVF,IAAMuQ,IAAcvQ,IAAQzmB,CAARymB,GAAexmB,CAAnC,EACM82B,IAAetQ,IAAQoQ,CAD7B,EAKQS,IAAYf,IAAW,CAAXA,GAAe,CALnC,EAMQU,IAAYh3B,KAAWs2B,IAAW,CAAXA,GAAe,CAA1Bt2B,CANpB,EAOMi2B,IAAMa,CAPZ,EAQMQ,IAAUP,CARhB,EASMQ,IAAUR,IAAc/2B,CAT9B,EAUWk3B,IAAS,CAAlB,EAAqBA,IAAST,CAA9B,IAAoDS,CAApD,EAA4D;EAC1D,WAAK,IAAIE,IAAS,CAAlB,EAAqBA,IAASZ,CAA9B,IAAmDY,CAAnD,EACExB,EAAO0B,GAAP1B,IAAoBQ,EAAWH,GAAXG,CAApBR,EACAA,EAAO0B,GAAP1B,IAAoBQ,EAAWH,GAAXG,CADpBR,EAEAA,EAAO2B,GAAP3B,IAAoBQ,EAAWH,GAAXG,CAFpBR,EAGAA,EAAO2B,GAAP3B,IAAoBQ,EAAWH,GAAXG,CAHpBR,CAKFK,KAAOoB,CAAPpB,EACAqB,KAAWN,CADXf,EAEAsB,KAAWP,CAFXf;EAOJ,SAAIK,CAAJ,EACE;EAAIL,UAAMa,IAAoC,KAApBJ,IAAe,CAAK,CAA1CT,CAAJ,IACID,IAAMe,IAAc/2B,CAAd+2B,GAAwB,CADlC,CAIA,KAFMM,IAA2B,IAAfX,CAAZW,EACAL,IAAY,IAAIh3B,CADhBq3B,EAEGH,IAAS,CAAlB,EAAqBA,IAAST,CAA9B,IAAoDS,CAApD,EACEtB,EAAOI,CAAPJ,IAAcQ,EAAWH,CAAXG,CAAdR,EACAA,EAAOI,IAAMh2B,CAAb41B,IAAwBQ,EAAWH,IAAM,CAAjBG,CADxBR,EAEAK,KAAOoB,CAFPzB,EAGAI,KAAOgB,CAHPpB;EAQJ,SAAIW,CAAJ,EAAe;EAGb,WAFIN,IAAMa,IAAiD,KAAjCH,IAAiBD,CAAgB,CAAvDT,EACAD,IAAMe,KAAeh3B,IAAO,KAAKC,CADjCi2B,EAEKmB,IAAS,CAAlB,EAAqBA,IAASZ,CAA9B,IAAmDY,CAAnD,EACExB,EAAOI,GAAPJ,IAAgBQ,EAAWH,GAAXG,CAAhBR,EACAA,EAAOI,GAAPJ,IAAgBQ,EAAWH,GAAXG,CADhBR,EAEAK,KAAO,CAFPL,CAMEU,MACFV,EAAOmB,IAAeh3B,IAAOC,CAAtB+2B,GAAiC,CAAxCnB,IAA6CQ,EAAWH,CAAXG,CAD3CE;;EAMR,UAAOV,CAAP;yBChS8BziC,GAA2BqkC;EACzD,MAAMC,IAAcD,GAApB,CAEA,OADAE,gBAAgBvkC,CAAhBukC,GACOD,CAAP;IDFF,UAAYpC,CAAZ;EACEA,gBAAAA,YAAAA,EACAA,cAAAA,YADAA,EAEAA,cAAAA,YAFAA,EAGAA,gBAAAA,cAHAA;EADF,CAAA,CAAYA,iBAAAA,iBAAAA,CAAZ,GAOA,UAAYC,CAAZ;EACEA,0BAAAA,sBAAAA,EACAA,wBAAAA,sBADAA,EAEAA,gCAAAA,8BAFAA,EAGAA,0BAAAA,wBAHAA,EAIAA,0BAAAA,wBAJAA;EADF,CAAA,CAAYA,wBAAAA,wBAAAA,CAAZ,ECFA,IAAIqC,kCAAiC,CAArC,CAEA,sCAAA,CAA8CC,CAA9C;EACED,mCAAiCC,CAAjCD;EAGF,yBAAA,CAAgCxkC,CAAhC;EACE,MAAIwkC,8BAAJ,EAAoC;EAClC,QAAMhoB,IAAQxc,EAAG0kC,QAAH1kC,EAAd,CACA,IAAIwc,MAAUxc,EAAG2kC,QAAjB,EACE,MAAM,IAAI5jC,KAAJ,CAAU,kBAAkB6jC,qBAAqB5kC,CAArB4kC,EAAyBpoB,CAAzBooB,CAA5B,CAAN;;EAKN,8BAAA,CACI5kC,CADJ,EAC+B6kC,CAD/B;EAEE,UAAQA,CAAR,GACE,KAAK7kC,EAAG2kC,QAAR;EACE,aAAO,UAAP,CACF,KAAK3kC,EAAG8kC,YAAR;EACE,aAAO,cAAP,CACF,KAAK9kC,EAAG+kC,aAAR;EACE,aAAO,eAAP,CACF,KAAK/kC,EAAGglC,iBAAR;EACE,aAAO,mBAAP,CACF,KAAKhlC,EAAGilC,6BAAR;EACE,aAAO,+BAAP,CACF,KAAKjlC,EAAGklC,aAAR;EACE,aAAO,eAAP,CACF,KAAKllC,EAAGmlC,kBAAR;EACE,aAAO,oBAAP,CACF;EACE,aAAO,wBAAsBN,CAA7B,CAhBJ;EAoBF,6BAAA,CACI7kC,CADJ,EAC+ByiB,CAD/B;EAEE,SAAO2iB,YACHplC,CADGolC,EACC;EAAM,WAAAplC,EAAG0iB,YAAH1iB,CAAgByiB,CAAhBziB,CAAA;KADPolC,EAEH,gBAAgB3iB,CAAhB,GAAgC,kCAF7B2iB,CAAP;EAKF,4BAAA,CACIplC,CADJ,EAC+BqlC,CAD/B;EAEE,MAAMC,IAA4BF,YAC9BplC,CAD8BolC,EAC1B;EAAM,WAAAplC,EAAGulC,YAAHvlC,CAAgBA,EAAGwlC,aAAnBxlC,CAAA;KADoBolC,EAE9B,sCAF8BA,CAAlC,CAKA,IAFAK,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG0lC,YAAH1lC,CAAgBslC,CAAhBtlC,EAA8BqlC,CAA9BrlC,CAAA;KAAvBylC,GACAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG2lC,aAAH3lC,CAAiBslC,CAAjBtlC,CAAA;KAAvBylC,CADAA,GAE+D,MAA3DzlC,EAAG4lC,kBAAH5lC,CAAsBslC,CAAtBtlC,EAAoCA,EAAG6lC,cAAvC7lC,CAAJ,EAEE,MADA0I,QAAQC,GAARD,CAAY1I,EAAG8lC,gBAAH9lC,CAAoBslC,CAApBtlC,CAAZ0I,GACM,IAAI3H,KAAJ,CAAU,kCAAV,CAAN,CAEF,OAAOukC,CAAP;EAGF,8BAAA,CACItlC,CADJ,EAC+B+lC,CAD/B;EAEE,MAAMC,IAA8BZ,YAChCplC,CADgColC,EAC5B;EAAM,WAAAplC,EAAGulC,YAAHvlC,CAAgBA,EAAGimC,eAAnBjmC,CAAA;KADsBolC,EAEhC,wCAFgCA,CAApC,CAKA,IAFAK,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG0lC,YAAH1lC,CAAgBgmC,CAAhBhmC,EAAgC+lC,CAAhC/lC,CAAA;KAAvBylC,GACAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG2lC,aAAH3lC,CAAiBgmC,CAAjBhmC,CAAA;KAAvBylC,CADAA,GAEiE,MAA7DzlC,EAAG4lC,kBAAH5lC,CAAsBgmC,CAAtBhmC,EAAsCA,EAAG6lC,cAAzC7lC,CAAJ,EAGE,MAFAkmC,0BACIH,CADJG,EAC0BlmC,EAAG8lC,gBAAH9lC,CAAoBgmC,CAApBhmC,CAD1BkmC,GAEM,IAAInlC,KAAJ,CAAU,oCAAV,CAAN,CAEF,OAAOilC,CAAP;EAGF,KAAMG,kBAAkB,0BAAxB,CACA,kCAAA,CACIT,CADJ,EAC0BU,CAD1B;EAEE,MAAMC,IAAwBF,gBAAgBG,IAAhBH,CAAqBC,CAArBD,CAA9B,CACA,IAA6B,QAAzBE,CAAJ,EAGE,OAFA39B,QAAQC,GAARD,CAAY,0CAAwC09B,CAApD19B,QACAA,QAAQC,GAARD,CAAYg9B,CAAZh9B,CACA,CAWF,KARA,IAAM69B,KAAcF,EAAsB,CAAtBA,CAApB,EAEMG,IAAcd,EAAa72B,KAAb62B,CAAmB,IAAnBA,CAFpB,EAGMz7B,IAAMu8B,EAAY5kC,MAAZ4kC,CAAmB/9B,QAAnB+9B,GAA8B5kC,MAA9B4kC,GAAuC,CAHnD,EAIMC,IAAuBD,EAAYl9B,GAAZk9B,CACzB,UAACE,CAAD,EAAOH,CAAP;EACI,WAAAh+B,UAAeg+B,IAAa,GAAG99B,UAA/BF,EAA2C0B,CAA3C1B,IAAkDm+B,CAAlD;KAFqBF,CAJ7B,EAOIG,IAAgB,CAPpB,EAQSpkC,IAAI,CAAb,EAAgBA,IAAIkkC,EAAqB7kC,MAAzC,EAAiDW,GAAjD,EACEokC,IAAgB5kC,KAAKI,GAALJ,CAAS0kC,EAAqBlkC,CAArBkkC,EAAwB7kC,MAAjCG,EAAyC4kC,CAAzC5kC,CAAhB4kC,CAGF,IAAMC,IAAmBH,EAAqBphC,KAArBohC,CAA2B,CAA3BA,EAA8BF,IAAa,CAA3CE,CAAzB;EAAA,MACMI,IAAYJ,EAAqBphC,KAArBohC,CAA2BF,IAAa,CAAxCE,EAA2CF,CAA3CE,CADlB;EAAA,MAEMK,IAAkBL,EAAqBphC,KAArBohC,CAA2BF,CAA3BE,CAFxB,CAIA/9B,QAAQC,GAARD,CAAYk+B,EAAiBp9B,IAAjBo9B,CAAsB,IAAtBA,CAAZl+B,GACAA,QAAQC,GAARD,CAAY09B,EAAcv3B,KAAdu3B,CAAoB,IAApBA,EAA0B,CAA1BA,CAAZ19B,CADAA,EAEAA,QAAQC,GAARD,CACI,QAAMH,SAAcs+B,EAAU,CAAVA,CAAdt+B,EAA4Bo+B,CAA5Bp+B,CADVG,EAEI,+DAFJA,CAFAA,EAKAA,QAAQC,GAARD,CAAYo+B,EAAgBt9B,IAAhBs9B,CAAqB,IAArBA,CAAZp+B,CALAA;EAQF,uBAAA,CAA8B1I,CAA9B;EACE,SAAOolC,YACHplC,CADGolC,EACC;EAAM,WAAAplC,EAAG+mC,aAAH/mC,EAAA;KADPolC,EAC2B,gCAD3BA,CAAP;EAIF,qBAAA,CAA4BplC,CAA5B,EAAuDgnC,CAAvD;EAEE,MADAvB,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGinC,WAAHjnC,CAAegnC,CAAfhnC,CAAA;KAAvBylC,IACwD,MAApDzlC,EAAGknC,mBAAHlnC,CAAuBgnC,CAAvBhnC,EAAgCA,EAAGmnC,WAAnCnnC,CAAJ,EAEE,MADA0I,QAAQC,GAARD,CAAY1I,EAAGonC,iBAAHpnC,CAAqBgnC,CAArBhnC,CAAZ0I,GACM,IAAI3H,KAAJ,CAAU,6CAAV,CAAN;EAIJ,yBAAA,CACIf,CADJ,EAC+BgnC,CAD/B;EAGE,MADAvB,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGqnC,eAAHrnC,CAAmBgnC,CAAnBhnC,CAAA;KAAvBylC,IAC4D,MAAxDzlC,EAAGknC,mBAAHlnC,CAAuBgnC,CAAvBhnC,EAAgCA,EAAGsnC,eAAnCtnC,CAAJ,EAEE,MADA0I,QAAQC,GAARD,CAAY1I,EAAGonC,iBAAHpnC,CAAqBgnC,CAArBhnC,CAAZ0I,GACM,IAAI3H,KAAJ,CAAU,mCAAV,CAAN;EAIJ,kCAAA,CACIf,CADJ,EAC+BwM,CAD/B;EAEE,MAAMQ,IAAsBo4B,YACxBplC,CADwBolC,EACpB;EAAM,WAAAplC,EAAGunC,YAAHvnC,EAAA;KADcolC,EACK,8BADLA,CAA5B,CAIA,OAFAK,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGwnC,UAAHxnC,CAAcA,EAAGynC,YAAjBznC,EAA+BgN,CAA/BhN,CAAA;KAAvBylC,GACAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG0nC,UAAH1nC,CAAcA,EAAGynC,YAAjBznC,EAA+BwM,CAA/BxM,EAAqCA,EAAG2nC,WAAxC3nC,CAAA;KAAvBylC,CADAA,EAEOz4B,CAAP;EAGF,iCAAA,CACIhN,CADJ,EAC+BwM,CAD/B;EAEE,MAAMQ,IAAsBo4B,YACxBplC,CADwBolC,EACpB;EAAM,WAAAplC,EAAGunC,YAAHvnC,EAAA;KADcolC,EACK,8BADLA,CAA5B,CAKA,OAHAK,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGwnC,UAAHxnC,CAAcA,EAAG4nC,oBAAjB5nC,EAAuCgN,CAAvChN,CAAA;KAAvBylC,GACAA,aACIzlC,CADJylC,EACQ;EAAM,WAAAzlC,EAAG0nC,UAAH1nC,CAAcA,EAAG4nC,oBAAjB5nC,EAAuCwM,CAAvCxM,EAA6CA,EAAG2nC,WAAhD3nC,CAAA;KADdylC,CADAA,EAGOz4B,CAAP;EAGF,wBAAA;EACE,SAAiC,MAA7B2X,IAAIxG,GAAJwG,CAAQ,eAARA,CAA6B,GACxB,CADwB,GAG1B,CAHP;EAMF,uBAAA,CAA8B3kB,CAA9B;EACE,SAAOolC,YACHplC,CADGolC,EACC;EAAM,WAAAplC,EAAG8iB,aAAH9iB,EAAA;KADPolC,EAC2B,gCAD3BA,CAAP;EAIF,6BAAA,CAAoCvD,CAApC,EAAmDD,CAAnD;EACE,MAAMiG,IAAiBljB,IAAIxG,GAAJwG,CAAQ,wBAARA,CAAvB,CACA,IAAKkd,KAAS,CAATA,IAAgBD,KAAU,CAA/B,EAAmC;EACjC,QAAMkG,IAAY,MAAIjG,CAAJ,MAAA,GAAaD,CAAb,MAAlB,CACA,MAAM,IAAI7gC,KAAJ,CAAU,4BAA4B+mC,CAA5B,GAAwC,cAAlD,CAAN;EAEF,OAAKjG,IAAQgG,CAARhG,IAA4BD,IAASiG,CAA1C,EAA2D;EACnDC,QAAY,MAAIjG,CAAJ,MAAA,GAAaD,CAAb,MAAZkG,CAEN,MAAM,IAAI/mC,KAAJ,CACF,4BAA4B+mC,CAA5B,GACA,oDADA,IAFQ,MAAID,CAAJ,MAAA,GAAsBA,CAAtB,MAER,IAC6D,GAF3D,CAAN;;EAMJ,2BAAA,CAAkC7nC,CAAlC;EACE,SAAOolC,YACHplC,CADGolC,EACC;EAAM,WAAAplC,EAAG4iB,iBAAH5iB,EAAA;KADPolC,EAC+B,oCAD/BA,CAAP;EAIF,4CAAA,CACIplC,CADJ,EAC+BgnC,CAD/B,EACsDe,CADtD,EAEI/6B,CAFJ,EAEyBg7B,CAFzB,EAEsDC,CAFtD,EAGIC,CAHJ;EAIE,MAAMvf,IAAM3oB,EAAGmoC,iBAAHnoC,CAAqBgnC,CAArBhnC,EAA8B+nC,CAA9B/nC,CAAZ,CACA,QAAa,MAAT2oB,MAKJ8c,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGwnC,UAAHxnC,CAAcA,EAAGynC,YAAjBznC,EAA+BgN,CAA/BhN,CAAA;KAAvBylC,GACAA,aACIzlC,CADJylC,EAEI;EAAM,WAAAzlC,EAAGooC,mBAAHpoC,CACF2oB,CADE3oB,EACGgoC,CADHhoC,EACwBA,EAAGqjB,KAD3BrjB,GACkC,CADlCA,EACyCioC,CADzCjoC,EAEFkoC,CAFEloC,CAAA;KAFVylC,CADAA,EAMAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGqoC,uBAAHroC,CAA2B2oB,CAA3B3oB,CAAA;KAAvBylC,CANAA,GAOO,EAZP;EAeF,yBAAA,CACIzlC,CADJ,EAC+B6iB,CAD/B,EACsDylB,CADtD;EAEEC,sBAAoBvoC,CAApBuoC,EAAwBD,CAAxBC,GACA9C,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGwoC,aAAHxoC,CAAiBA,EAAGyoC,QAAHzoC,GAAcsoC,CAA/BtoC,CAAA;KAAvBylC,CADA8C,EAEA9C,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B6iB,CAA9B7iB,CAAA;KAAvBylC,CAFA8C;EAKF,2BAAA,CACIvoC,CADJ,EAC+BsoC,CAD/B;EAEEC,sBAAoBvoC,CAApBuoC,EAAwBD,CAAxBC,GACA9C,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGwoC,aAAHxoC,CAAiBA,EAAGyoC,QAAHzoC,GAAcsoC,CAA/BtoC,CAAA;KAAvBylC,CADA8C,EAEA9C,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B,IAA9BA,CAAA;KAAvBylC,CAFA8C;EAKF,0CAAA,CACIvoC,CADJ,EAC+BgnC,CAD/B,EAEI0B,CAFJ;EAGE,SAAOtD,YACHplC,CADGolC,EACC;EAAM,WAAAplC,EAAG2oC,kBAAH3oC,CAAsBgnC,CAAtBhnC,EAA+B0oC,CAA/B1oC,CAAA;KADPolC,EAEH,cAAcsD,CAAd,GAA4B,2BAFzBtD,CAAP;EAKF,mCAAA,CACIplC,CADJ,EAC+BgnC,CAD/B,EAEI0B,CAFJ;EAGE,SAAO1oC,EAAG2oC,kBAAH3oC,CAAsBgnC,CAAtBhnC,EAA+B0oC,CAA/B1oC,CAAP;EAGF,4CAAA,CACIA,CADJ,EAC+BgnC,CAD/B,EACsDnkB,CADtD,EAEI+lB,CAFJ,EAEkDN,CAFlD;EAGE7C,eAAazlC,CAAbylC,EAAiB;EAAM,WAAAoD,gBAAgB7oC,CAAhB6oC,EAAoBhmB,CAApBgmB,EAA6BP,CAA7BO,CAAA;KAAvBpD,GACAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG8oC,SAAH9oC,CAAa4oC,CAAb5oC,EAAqCsoC,CAArCtoC,CAAA;KAAvBylC,CADAA;EAIF,iCAAA,CAAwCzlC,CAAxC;EACEylC,eAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmC,IAAnCA,CAAA;KAAvBylC,GACAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAG+oC,QAAH/oC,CAAY,CAAZA,EAAe,CAAfA,EAAkBA,EAAGgB,MAAHhB,CAAU6hC,KAA5B7hC,EAAmCA,EAAGgB,MAAHhB,CAAU4hC,MAA7C5hC,CAAA;KAAvBylC,CADAA,EAEAA,aAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGgpC,OAAHhpC,CAAW,CAAXA,EAAc,CAAdA,EAAiBA,EAAGgB,MAAHhB,CAAU6hC,KAA3B7hC,EAAkCA,EAAGgB,MAAHhB,CAAU4hC,MAA5C5hC,CAAA;KAAvBylC,CAFAA;EAKF,uCAAA,CACIzlC,CADJ,EAC+B6iB,CAD/B,EAEIomB,CAFJ;EAGExD,eAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmCipC,CAAnCjpC,CAAA;KAAvBylC,GACAA,aACIzlC,CADJylC,EAEI;EAAM,WAAAzlC,EAAGwjB,oBAAHxjB,CACFA,EAAGujB,WADDvjB,EACcA,EAAGyjB,iBADjBzjB,EACoCA,EAAGgjB,UADvChjB,EACmD6iB,CADnD7iB,EAC4D,CAD5DA,CAAA;KAFVylC,CADAA;EAOF,2CAAA,CACIzlC,CADJ,EAC+BipC,CAD/B;EAEExD,eAAazlC,CAAbylC,EAAiB;EAAM,WAAAzlC,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmCipC,CAAnCjpC,CAAA;KAAvBylC,GACAA,aACIzlC,CADJylC,EAEI;EAAM,WAAAzlC,EAAGwjB,oBAAHxjB,CACFA,EAAGujB,WADDvjB,EACcA,EAAGyjB,iBADjBzjB,EACoCA,EAAGgjB,UADvChjB,EACmD,IADnDA,EACyD,CADzDA,CAAA;KAFVylC,CADAA;EAOF,6BAAA,CAAoCzlC,CAApC;EACE,MAAM6kC,IAAS7kC,EAAG2jB,sBAAH3jB,CAA0BA,EAAGujB,WAA7BvjB,CAAf,CACA,IAAI6kC,MAAW7kC,EAAG4jB,oBAAlB,EACE,MAAM,IAAI7iB,KAAJ,CACF,gCAAgCmoC,2BAA2BlpC,CAA3BkpC,EAA+BrE,CAA/BqE,CAD9B,CAAN;EAKJ,oCAAA,CACIlpC,CADJ,EAC+B6kC,CAD/B;EAEE,UAAQA,CAAR,GACE,KAAK7kC,EAAGmpC,iCAAR;EACE,aAAO,mCAAP,CACF,KAAKnpC,EAAGopC,yCAAR;EACE,aAAO,2CAAP,CACF,KAAKppC,EAAGqpC,iCAAR;EACE,aAAO,mCAAP,CACF,KAAKrpC,EAAGspC,uBAAR;EACE,aAAO,yBAAP,CACF;EACE,aAAO,mBAAiBzE,CAAxB,CAVJ;EAcF,qBAAA,CACI7kC,CADJ,EAC+BupC,CAD/B,EAEIC,CAFJ;EAGE,MAAMC,IAAkBhE,aAAazlC,CAAbylC,EAAiB;EAAM,WAAA8D,GAAA;KAAvB9D,CAAxB,CACA,IAAe,QAAXgE,CAAJ,EACE,MAAM,IAAI1oC,KAAJ,CAAUyoC,CAAV,CAAN,CAEF,OAAOC,CAAP;EAGF,6BAAA,CAA6BzpC,CAA7B,EAAwDsoC,CAAxD;EACE,MAAMoB,IAAiB1pC,EAAG2pC,gCAAH3pC,GAAsC,CAA7D;EAAA,MACM4pC,IAAgBtB,IAActoC,EAAGyoC,QADvC,CAEA,IAAImB,IAAgB5pC,EAAGyoC,QAAnBmB,IAA+BA,IAAgBF,CAAnD,EAEE,MAAM,IAAI3oC,KAAJ,CAAU,6BADS,6BAA2B2oC,CAA3B,MACT,OAAV,CAAN;EAIJ,yCAAA,CACIG,CADJ,EACwB/N,CADxB;qBACwBA,QACtB,IAAIgO,IAAanlB,IAAIxG,GAAJwG,CAAQ,wBAARA,CAAjB,CAgBA,IAfImX,MACFgO,KAA0B,CAA1BA,EAOAD,IAAWA,EAASvgC,GAATugC,CACP,UAAC1qB,CAAD,EAAI5c,CAAJ;EAAU,WAAAA,KAAKsnC,EAASjoC,MAATioC,GAAkB,CAAvBtnC,GACNmhC,kBAAuBmG,EAAStnC,CAATsnC,CAAvBnG,CADMnhC,GAENsnC,EAAStnC,CAATsnC,CAFM;KADHA,CART/N,GAeoB,MAApB+N,EAASjoC,MAAb,EAA2B;EACzB,QAAMmoC,IAAgBC,aAAkBH,CAAlBG,CAAtB,CACAH,IAAWE,EAAc3kC,QAAzBykC;EAGF,OAAMnmC,IAAOyH,cAAmB0+B,CAAnB1+B,CAAb,CACA,OAAI0+B,EAASjoC,MAATioC,IAAmB,CAAnBA,IAAwBnmC,KAAQomC,CAAhCD,IACMnmC,GAAM,EADZmmC,GAGoB,MAApBA,EAASjoC,MAAW,IAAKioC,EAAS,CAATA,KAAeC,CAApB,IACpBD,EAAS,CAATA,KAAeC,CADK,GAEfD,CAFe,GAIA,MAApBA,EAASjoC,MAAW,IAAKioC,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,IAA6BC,CAAlC,IACpBD,EAAS,CAATA,KAAeC,CADK,IAEdD,EAAS,CAATA,IAAcA,EAAS,CAATA,GAAaA,EAAS,CAATA,EAFb,GAIA,MAApBA,EAASjoC,MAAW,IAAKioC,EAAS,CAATA,KAAeC,CAApB,IACpBD,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,IAA6BC,CADT,IAEdD,EAAS,CAATA,GAAaA,EAAS,CAATA,IAAcA,EAAS,CAATA,EAFb,GAIA,MAApBA,EAASjoC,MAAW,IACpBioC,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,GAA4BA,EAAS,CAATA,CAA5BA,IAA2CC,CADvB,IAEpBD,EAAS,CAATA,KAAeC,CAFK,IAGdD,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,GAA4BA,EAAS,CAATA,GAAaA,EAAS,CAATA,EAH3B,GAKA,MAApBA,EAASjoC,MAAW,IAAKioC,EAAS,CAATA,KAAeC,CAApB,IACpBD,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,GAA4BA,EAAS,CAATA,CAA5BA,IAA2CC,CADvB,IAEdD,EAAS,CAATA,GAAaA,EAAS,CAATA,IAAcA,EAAS,CAATA,CAAdA,GAA4BA,EAAS,CAATA,EAF3B,GAIfI,oBAAyBvmC,CAAzBumC,CAxBT;EA4BF,gBAAA,CAAgB9lC,CAAhB;EACE,SAAOA,IAAI,CAAJA,IAAU,CAAjB;EAOF,uBAAA,CAA8B+lC,CAA9B,EAAgDC,CAAhD;EAIE,MAAI5xB,YAHJ2xB,IAASA,EAAO7kC,KAAP6kC,EAAc,CAAdA,CAGL3xB,EAFJ4xB,IAASA,EAAO9kC,KAAP8kC,EAAc,CAAdA,CAEL5xB,CAAJ,EACE,QAAO,CAAP,CAGF,KAAK2xB,EAAOtoC,WAAWuoC,EAAOvoC,MAA9B,EACE,QAAO,CAAP,CAGF,IAAkB,MAAdsoC,EAAO,CAAPA,CAAc,IAAmB,MAAdA,EAAO,CAAPA,CAAL,IAAsC,MAAdC,EAAO,CAAPA,CAAxB,IACA,MAAdA,EAAO,CAAPA,CADJ,EAEE,QAAO,CAAP,CAGF,IAAID,EAAOtoC,MAAPsoC,KAAkBC,EAAOvoC,MAA7B;EACE,QAAI2W,YACIyxB,aAAkBE,CAAlBF,EAA0B5kC,QAD9BmT,EAEIyxB,aAAkBG,CAAlBH,EAA0B5kC,QAF9BmT,CAAJ,EAGE,QAAO,CAAP;KAJJ,MAOE,IAAI6xB,OAAOF,EAAO,CAAPA,CAAPE,KAAqBA,OAAOD,EAAO,CAAPA,CAAPC,CAAzB,EAA4C;EAC1C,QAAIA,OAAOF,EAAO,CAAPA,CAAPE,KAAqBA,OAAOD,EAAO,CAAPA,CAAPC,CAAzB,EACE,QAAO,CAAP,CAEF,IAAIF,EAAO,CAAPA,MAAcC,EAAO,CAAPA,CAAlB,EACE,QAAO,CAAP;EAKN,WAAO,CAAP;k4CCjYiCnqC;EAWjC,SAAOqqC,mBAA8BrqC,CAA9BqqC,EAVoB,kNAUpBA,CAAP;EAGF,4BAAA,CAAmCrqC,CAAnC;EAIE,SAAOsqC,yBAAoCtqC,CAApCsqC,EAFa,IAAI3kC,YAAJ,GACd,GAAG,GAAG,GAAG,GAAG,IAAI,IAAI,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,IAAI,GAAG,GAAG,GAAG,EAD9C,CAEb2kC,CAAP;EAGF,2BAAA,CAAkCtqC,CAAlC;EAGE,SAAOuqC,wBAAmCvqC,CAAnCuqC,EADuB,IAAIC,WAAJ,EAAiB,GAAG,GAAG,GAAG,GAAG,GAAG,EAAhC,CACvBD,CAAP;EAGF,0BAAA,CAEIvqC,CAFJ,EAE+ByqC,CAF/B;EAIE,MAEIC,CAFJ;EAAA,MAGIC,CAHJ;EAAA,MAIIC,CAJJ;EAAA,MAKIC,CALJ;EAAA,MAQIC,CARJ;EAAA,MAUIC,CAVJ;EAAA,MAWIC,CAXJ;EAAA,MAAMC,IAAQjrC,CAAd,CAkCA,OArBiC,MAA7B2kB,IAAIxG,GAAJwG,CAAQ,eAARA,CAA6B,IAC/B+lB,IAAsBO,EAAMC,IAA5BR,EACAC,IAA0BM,EAAME,IADhCT,EAEAE,IAA4BK,EAAM/nB,OAFlCwnB,EAGAG,IAAqBI,EAAMG,GAH3BV,EAIAI,IAA4B,CAJ5BJ,EAKAK,IAAqB,CALrBL,EAMAM,IAAuBC,EAAMI,UAPE,KAS/BX,IAAsB1qC,EAAGmjB,IAAzBunB,EACAC,IAA0B3qC,EAAGmjB,IAD7BunB,EAEAE,IAA4BK,EAAM9nB,IAFlCunB,EAGAG,IAAqB7qC,EAAGmjB,IAHxBunB,EAIAI,IAA4B,CAJ5BJ,EAKAK,IAAqB,CALrBL,EAMAM,IAAoD,QAA7BP,CAA6B,GAChDA,EAA0Ba,cADsB,GAEhD,IAjB2B,KAsB/BZ,wBACAC,4BACAC,8BACAC,uBACAU,uBAPsBvrC,EAAGmjB,MAQzB2nB,8BACAC,uBACAC,yBARF;EAYF,mCAAA,CACIhrC,CADJ,EAC+B6hC,CAD/B,EAC8CD,CAD9C,EAEI3e,CAFJ,EAE4BuoB,CAF5B,EAGIC,CAHJ;EAIEC,sBAA+B7J,CAA/B6J,EAAsC9J,CAAtC8J,EACA,IAAM7oB,IAAU8oB,cAAyB3rC,CAAzB2rC,CAAhB;EAAA,MAEMC,IAAQ5rC,EAAGgjB,UAFjB,CAkBA,OAfA6oB,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAe4rC,CAAf5rC,EAAsB6iB,CAAtB7iB,CAAA;KAAlC6rC,GACAA,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAG8rC,aAAH9rC,CAAiB4rC,CAAjB5rC,EAAwBA,EAAG+rC,cAA3B/rC,EAA2CA,EAAGgsC,aAA9ChsC,CAAA;KADd6rC,CADAA,EAGAA,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAG8rC,aAAH9rC,CAAiB4rC,CAAjB5rC,EAAwBA,EAAGisC,cAA3BjsC,EAA2CA,EAAGgsC,aAA9ChsC,CAAA;KADd6rC,CAHAA,EAKAA,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAG8rC,aAAH9rC,CAAiB4rC,CAAjB5rC,EAAwBA,EAAGksC,kBAA3BlsC,EAA+CA,EAAGmsC,OAAlDnsC,CAAA;KADd6rC,CALAA,EAOAA,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAG8rC,aAAH9rC,CAAiB4rC,CAAjB5rC,EAAwBA,EAAGosC,kBAA3BpsC,EAA+CA,EAAGmsC,OAAlDnsC,CAAA;KADd6rC,CAPAA,EASAA,aACI7rC,CADJ6rC,EAEI;EAAM,WAAA7rC,EAAGojB,UAAHpjB,CACF4rC,CADE5rC,EACK,CADLA,EACQijB,CADRjjB,EACwB6hC,CADxB7hC,EAC+B4hC,CAD/B5hC,EACuC,CADvCA,EAC0CwrC,CAD1CxrC,EAEFyrC,CAFEzrC,EAEW,IAFXA,CAAA;KAFV6rC,CATAA,EAcAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B,IAA9BA,CAAA;KAAlC6rC,CAdAA,EAeOhpB,CAAP;EAGF,oCAAA,CACI7iB,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,kDAAA,CAEN,OAAO2rB,0BACHtsC,CADGssC,MAAAA,MAAAA,EACgBD,EAAc3B,mBAD9B4B,EAEHD,EAAcxB,kBAFXyB,EAE+BtsC,EAAGqjB,KAFlCipB,CAAP;EAKF,oCAAA,CACItsC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,kDAAA,CAEN,OAAO2rB,0BACHtsC,CADGssC,MAAAA,MAAAA,EACgBD,EAAc3B,mBAD9B4B,EAEHD,EAAcxB,kBAFXyB,EAE+BD,EAAcrB,oBAF7CsB,CAAP;EAKF,0CAAA,CACItsC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,kDAAA,CAEN,OAAO2rB,0BACHtsC,CADGssC,MAAAA,MAAAA,EACgBtsC,EAAGmjB,IADnBmpB,EACyBtsC,EAAGmjB,IAD5BmpB,EACkCtsC,EAAGusC,aADrCD,CAAP;EAIF,mCAAA,CACItsC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,gDAAA,CAEN,OAAO2rB,0BACHtsC,CADGssC,MAAAA,MAAAA,EACgBD,EAAczB,yBAD9B0B,EACyDtsC,EAAGmjB,IAD5DmpB,EAEHtsC,EAAGqjB,KAFAipB,CAAP;EAKF,0CAAA,CACItsC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,gDAAA,CAEN,OAAO2rB,0BACHtsC,CADGssC,MAAAA,MAAAA,EACgBD,EAAc1B,uBAD9B2B,EACuDtsC,EAAGmjB,IAD1DmpB,EAEHD,EAAcrB,oBAFXsB,CAAP;EAKF,2CAAA,CACItsC,CADJ,EAC+BgnC,CAD/B,EAEIwF,CAFJ;EAUE,SAJAX,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAGwnC,UAAHxnC,CAAcA,EAAGynC,YAAjBznC,EAA+BwsC,CAA/BxsC,CAAA;KADd6rC,GAEgBY,mCACZzsC,CADYysC,EACRzF,CADQyF,EACC,cADDA,EACiBD,CADjBC,EAC+B,CAD/BA,EAHD,EAGCA,EALE,CAKFA,KAGZA,mCACIzsC,CADJysC,EACQzF,CADRyF,EACiB,IADjBA,EACuBD,CADvBC,EACqC,CADrCA,EANW,EAMXA,EAPa,EAObA,CADJ;EAKF,kCAAA,CACIzsC,CADJ,EAC+B6iB,CAD/B,EAEI/B,CAFJ;EAGE+qB,eAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B6iB,CAA9B7iB,CAAA;KAAlC6rC,GACAA,aACI7rC,CADJ6rC,EAEI;EAAM,WAAA7rC,EAAGojB,UAAHpjB,CACFA,EAAGgjB,UADDhjB,EACa,CADbA,EACgBA,EAAGmjB,IADnBnjB,EACyBA,EAAGmjB,IAD5BnjB,EACkCA,EAAGusC,aADrCvsC,EACoD8gB,CADpD9gB,CAAA;KAFV6rC,CADAA,EAKAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B,IAA9BA,CAAA;KAAlC6rC,CALAA;EAQF,6BAAA,CACI7rC,CADJ,EAC+B6iB,CAD/B,EACsDgf,CADtD,EAEID,CAFJ,EAEoBp1B,CAFpB,EAEwCg/B,CAFxC;EAGEE,sBAA+B7J,CAA/B6J,EAAsC9J,CAAtC8J,GACAG,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B6iB,CAA9B7iB,CAAA;KAAlC6rC,CADAH,EAEAG,aACI7rC,CADJ6rC,EAEI;EAAM,WAAA7rC,EAAG0sC,aAAH1sC,CACFA,EAAGgjB,UADDhjB,EACa,CADbA,EACgB,CADhBA,EACmB,CADnBA,EACsB6hC,CADtB7hC,EAC6B4hC,CAD7B5hC,EACqCwrC,CADrCxrC,EACoDA,EAAGqjB,KADvDrjB,EAEFwM,CAFExM,CAAA;KAFV6rC,CAFAH,EAQAG,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,WAAA7rC,EAAG+iB,WAAH/iB,CAAeA,EAAGgjB,UAAlBhjB,EAA8B,IAA9BA,CAAA;KAAlC6rC,CARAH;EAWF,+BAAA,CACI1rC,CADJ,EAC+B6iB,CAD/B,EACsDjW,CADtD,EAEIC,CAFJ,EAEqB41B,CAFrB,EAE2C1hB,CAF3C,EAGIsrB,CAHJ;EAIQ,MAGF3J,CAHE;EAAA,MAAA/hB,kDAAA;EAAA,MAACgsB,QAAD;EAAA,MAAIC,QAAJ,CAImC,MAArCP,EAActB,kBAAuB,GAEvCrI,IAAgBD,CAFuB,GAOvCoK,4BAAqCpK,CAArCoK,EAHAnK,IACI,IAAI/8B,YAAJ,CAAiBmnC,mCACbrK,EAAO7gC,MADMkrC,EACE/rB,CADF+rB,CAAjB,CAEJD,EAA4D9rB,CAA5D8rB,CAPuC,EAUzCE,oBACI/sC,CADJ+sC,EACQlqB,CADRkqB,EACiBJ,CADjBI,EACoBH,CADpBG,EACuBrK,CADvBqK,EACsCV,EAAcxB,kBADpDkC,CAVyC;EAc3C,qCAAA,CACI/sC,CADJ,EAC+B6iB,CAD/B,EACsDwQ,CADtD,EAEIzmB,CAFJ,EAEkBC,CAFlB,EAEmC41B,CAFnC,EAGI4J,CAHJ;EAIQ,MAAA1rB,gDAAA;EAAA,MAACgsB,QAAD;EAAA,MAAIC,QAAJ;EAAA,MACA3J,IAAa,IAAIt9B,YAAJ,CACfqnC,sCAA+CpgC,CAA/CogC,EAAqDngC,CAArDmgC,CADe,CADb,CAGNC,yBAAkCxK,CAAlCwK,EAA0C5Z,CAA1C4Z,EAAiDrgC,CAAjDqgC,EAAuDpgC,CAAvDogC,EAAgEhK,CAAhEgK,GACAF,oBAAoB/sC,CAApB+sC,EAAwBlqB,CAAxBkqB,EAAiCJ,CAAjCI,EAAoCH,CAApCG,EAAuC9J,CAAvC8J,EAAmD/sC,EAAGmjB,IAAtD4pB,CADAE;EAIF,4CAAA,CACIjtC,CADJ,EAC+B6iB,CAD/B,EACsDjW,CADtD,EAEIC,CAFJ,EAEqBw/B,CAFrB;EAGE,MAAIa,IAA4CrqB,CAAhD,CAEA,IAAiC,MAA7B8B,IAAIxG,GAAJwG,CAAQ,eAARA,CAAJ,EAAoC;EAClC,QAAMwoB,IAAMntC,CAAZ;EAAA,QAGMotC,IAASD,EAAI5F,YAAJ4F,EAHf,CAIAtB,aACI7rC,CADJ6rC,EACQ;EAAM,aAAA7rC,EAAGwnC,UAAHxnC,CAAcmtC,EAAIE,iBAAlBrtC,EAAqCotC,CAArCptC,CAAA;OADd6rC,EAIA,IACMyB,IADgB,IAElBR,mCACIlgC,IAAOC,CADXigC,EACoBT,EAAcvB,yBADlCgC,CAFJ,CAKAjB,aACI7rC,CADJ6rC,EAEI;EAAM,aAAA7rC,EAAG0nC,UAAH1nC,CACFmtC,EAAIE,iBADFrtC,EACqBstC,CADrBttC,EACsCA,EAAG2nC,WADzC3nC,CAAA;OAFV6rC,GAOAA,aACI7rC,CADJ6rC,EACQ;EAAM,aAAAsB,EAAII,UAAJJ,CAAe,CAAfA,EAAkB,CAAlBA,EAAqBtgC,CAArBsgC,EAA8BvgC,CAA9BugC,EAAoCntC,EAAGmjB,IAAvCgqB,EAA6CntC,EAAGqjB,KAAhD8pB,EAAuD,CAAvDA,CAAA;OADdtB,CAPAA,EAUAA,aACI7rC,CADJ6rC,EACQ;EAAM,aAAA7rC,EAAGwnC,UAAHxnC,CAAcmtC,EAAIE,iBAAlBrtC,EAAqC,IAArCA,CAAA;OADd6rC,CAVAA,EAaAqB,IAAkBE,CAblBvB;EAgBF,UAAOqB,CAAP;EAGF,yCAAA,CACIltC,CADJ,EAC+BgN,CAD/B,EACoDJ,CADpD,EAEIC,CAFJ,EAEqBw/B,CAFrB;EAGE,MAAMmB,IAAMxtC,CAAZ;EAAA,MAEMytC,IACF,IAAI9nC,YAAJ,CAAiBmnC,mCACblgC,IAAOC,CADMigC,EACGT,EAAcvB,yBADjBgC,CAAjB,CAHJ,CAMAU,EAAIhG,UAAJgG,CAAextC,EAAGynC,YAAlB+F,EAAgCxgC,CAAhCwgC,GACAA,EAAIE,gBAAJF,CAAqBxtC,EAAGynC,YAAxB+F,EAAsC,CAAtCA,EAAyCC,CAAzCD,CADAA,EAEAA,EAAIhG,UAAJgG,CAAextC,EAAGynC,YAAlB+F,EAAgC,IAAhCA,CAFAA,CAIA,IAAM/K,IAAS,IAAI98B,YAAJ,CAAiBiH,IAAOC,CAAxB,CAAf,CAKA,OAJA8gC,8BACIF,CADJE,EACoClL,CADpCkL,EAEItB,EAAcvB,yBAFlB6C,GAIOlL,CAAP;EAGF,gDAAA,CACIziC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,kDAAA;EAAA,MAACgsB,QAAD;EAAA,MAAIC,QAAJ;EAAA,MAGAa,IACF,IAAI9nC,YAAJ,CAAiBmnC,mCACblgC,IAAOC,CADMigC,EACGT,EAAcvB,yBADjBgC,CAAjB,CAJE,CAONjB,aACI7rC,CADJ6rC,EAEI;EAAM,WAAA7rC,EAAGutC,UAAHvtC,CACF,CADEA,EACC,CADDA,EACI2sC,CADJ3sC,EACO4sC,CADP5sC,EACUqsC,EAAcd,qBADxBvrC,EAC+CA,EAAGqjB,KADlDrjB,EAEFytC,CAFEztC,CAAA;KAFV6rC,EAMA,IAAMpJ,IAAS,IAAI98B,YAAJ,CAAiBiH,IAAOC,CAAxB,CAAf,CAIA,OAHA8gC,8BACIF,CADJE,EACoClL,CADpCkL,EAEItB,EAAcvB,yBAFlB6C,GAGOlL,CAAP;EAGF,yDAAA,CACIziC,CADJ,EAC+B4M,CAD/B,EAC6CC,CAD7C,EAEIw/B,CAFJ;EAGQ,MAAA1rB,kDAAA;EAAA,MAACgsB,QAAD;EAAA,MAAIC,QAAJ;EAAA,MAIAa,IAAiB,IAAI5nC,UAAJ,CACnBinC,mCAA4ClgC,IAAOC,CAAnDigC,EAFgB,CAEhBA,CADmB,CAJjB,CAeN,OARAjB,aACI7rC,CADJ6rC,EAEI;EAAM,WAAA7rC,EAAGutC,UAAHvtC,CACF,CADEA,EACC,CADDA,EACI2sC,CADJ3sC,EACO4sC,CADP5sC,EACUqsC,EAAcd,qBADxBvrC,EAC+CA,EAAGusC,aADlDvsC,EAEFytC,CAFEztC,CAAA;KAFV6rC,GAQO,IAAIlmC,YAAJ,CAAiB8nC,EAAezgC,MAAhC,CAAP;EAGF,+CAAA,CACIhN,CADJ,EAC+BqzB,CAD/B,EAC8CzmB,CAD9C,EAC4DghC,CAD5D,EAEIC,CAFJ,EAE0BC,CAF1B,EAGIzB,CAHJ;EAIQ,MAAA1rB,gDAAA;EAAA,MAACgsB,QAAD;EAAA,MAAIC,QAAJ;EAAA,MAGA3J,IACF,IAAIt9B,YAAJ,CAAiBqnC,sCACba,CADab,EACCc,CADDd,CAAjB,CAJE,CAMNnB,aACI7rC,CADJ6rC,EACQ;EAAM,WAAA7rC,EAAGutC,UAAHvtC,CAAc,CAAdA,EAAiB,CAAjBA,EAAoB2sC,CAApB3sC,EAAuB4sC,CAAvB5sC,EAA0BA,EAAGmjB,IAA7BnjB,EAAmCA,EAAGqjB,KAAtCrjB,EAA6CijC,CAA7CjjC,CAAA;KADd6rC,EAEA,IAAMpJ,IAAS,IAAI98B,YAAJ,CAAiBwF,eAAoBkoB,GAAOzmB,GAAMghC,EAAjCziC,CAAjB,CAAf,CACA,OAAO4iC,2BACH9K,CADG8K,EACS1a,CADT0a,EACgBnhC,CADhBmhC,EACsBH,CADtBG,EAC4BtL,CAD5BsL,CAAP;;;ECzUA,YAAA,CAAY/tC,CAAZ;EAPAwH,sBAAAA,GAAmC,IAAnCA,EACAA,YAAAA,GAA6B,IAD7BA,EAEQA,aAAAA,IAAW,CAFnBA,EAGQA,sBAAAA,IAAoB,CAH5BA,EA6NQA,wBAAAA,IAAsB,CA7N9BA,EAkdQA,gBAAAA,KAldRA,EASIA,KAAKxH,EAALwH,GADQ,QAANxH,CAAM,GACEA,CADF,GAGEE,gBAAgBykB,IAAIxG,GAAJwG,CAAQ,eAARA,CAAhBzkB,CAXdsH,EAcmC,MAA7Bmd,IAAIxG,GAAJwG,CAAQ,eAARA,CAA6B,IAC/Bnd,KAAKwmC,qBAALxmC,GACIymC,oBAA+BzmC,KAAKxH,EAApCiuC,EAAwC,mBAAxCA,CADJzmC,EAEAA,KAAK0mC,yBAAL1mC,GACIA,KAAKxH,EAALwH,CAAQkb,YAARlb,CAAqB,0BAArBA,CAHJA,EAKKmd,IAAIxG,GAAJwG,CAAQ,8BAARA,MACHnd,KAAKijC,yBAALjjC,GACIymC,oBAA+BzmC,KAAKxH,EAApCiuC,EAAwC,wBAAxCA,CADJzmC,EAEAA,KAAK2mC,6BAAL3mC,GACIA,KAAKxH,EAALwH,CAAQkb,YAARlb,CAAqB,6BAArBA,CAJDmd,CAN0B,IAa/Bnd,KAAK0mC,yBAAL1mC,GACIymC,oBAA+BzmC,KAAKxH,EAApCiuC,EAAwC,wBAAxCA,CA5BRzmC,EA+BEA,KAAKglC,YAALhlC,GAAoB4mC,mBAA8B5mC,KAAKxH,EAAnCouC,CA/BtB5mC,EAgCEA,KAAK6mC,WAAL7mC,GAAmB8mC,kBAA6B9mC,KAAKxH,EAAlCsuC,CAhCrB9mC,EAiCEA,KAAKyhC,WAALzhC,GAAmB+mC,kBAA6B/mC,KAAKxH,EAAlCuuC,CAjCrB/mC,EAmCEA,KAAK6kC,aAAL7kC,GACIgnC,iBAA4BhnC,KAAKxH,EAAjCwuC,EAAqChnC,KAAKijC,yBAA1C+D,CApCNhnC;EA6iBF,UAtgBSinC,WAAAA,QAAAA,GAAP;EAAA,gBAAA,CACE,KAAIjnC,KAAKknC,QAAT,EAAA;EAGoB,cAAhBlnC,KAAKw/B,OAAW,IAClBt+B,QAAQ8Z,IAAR9Z,CACI,sKADJA,CADkB,EAMM,QAAtBlB,KAAKmnC,aAAiB,IACxBjmC,QAAQ8Z,IAAR9Z,CACI,oMADJA,CAPkB,CAapB,IAAM1I,IAAKwH,KAAKxH,EAAhB,CACA6rC,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,eAAA7rC,EAAG4uC,MAAH5uC,EAAA;SAAlC6rC,GACAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,eAAA7rC,EAAGsjB,eAAHtjB,CAAmBA,EAAGujB,WAAtBvjB,EAAmC,IAAnCA,CAAA;SAAlC6rC,CADAA,EAEAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,eAAA7rC,EAAG8jB,iBAAH9jB,CAAqBmI,EAAK8gC,WAA1BjpC,CAAA;SAAlC6rC,CAFAA,EAGAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,eAAA7rC,EAAGwnC,UAAHxnC,CAAcA,EAAGynC,YAAjBznC,EAA+B,IAA/BA,CAAA;SAAlC6rC,CAHAA,EAIAA,aACI7rC,CADJ6rC,EACQ;EAAM,eAAA7rC,EAAGwnC,UAAHxnC,CAAcA,EAAG4nC,oBAAjB5nC,EAAuC,IAAvCA,CAAA;SADd6rC,CAJAA,EAMAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,eAAA7rC,EAAG6uC,YAAH7uC,CAAgBmI,EAAKkmC,WAArBruC,CAAA;SAAlC6rC,CANAA,EAOArkC,KAAKknC,QAALlnC,IAAgB,CAPhBqkC;;KAlBK4C,EA4BAA,WAAAA,+BAAAA,GAAP,UAAsChK,CAAtC;EACEj9B,SAAKsnC,iBAALtnC,GAAyBi9B,CAAzBj9B,EACAunC,8BAAyCtK,CAAzCsK,CADAvnC;KA7BKinC,EAiCAA,WAAAA,2BAAAA,GAAP,UAAkC7hC,CAAlC,EAAgDC,CAAhD;EAGE,WADArF,KAAKiF,eAALjF,IACOwnC,2BACHxnC,KAAKxH,EADFgvC,EACMpiC,CADNoiC,EACYniC,CADZmiC,EACqBxnC,KAAK6kC,aAD1B2C,CAAP;KApCKP,EAwCAA,WAAAA,2BAAAA,GAAP,UAAkC7hC,CAAlC,EAAgDC,CAAhD;EAGE,WADArF,KAAKiF,eAALjF,IACOynC,2BACHznC,KAAKxH,EADFivC,EACMriC,CADNqiC,EACYpiC,CADZoiC,EACqBznC,KAAK6kC,aAD1B4C,CAAP;KA3CKR,EA+CAA,WAAAA,iCAAAA,GAAP,UAAwC7hC,CAAxC,EAAsDC,CAAtD;EAGE,WADArF,KAAKiF,eAALjF,IACO0nC,iCACH1nC,KAAKxH,EADFkvC,EACMtiC,CADNsiC,EACYriC,CADZqiC,EACqB1nC,KAAK6kC,aAD1B6C,CAAP;KAlDKT,EAsDAA,WAAAA,yBAAAA,GAAP,UACI5rB,CADJ,EAEI/B,CAFJ;EAGEtZ,SAAKiF,eAALjF,IACA2nC,yBAAoC3nC,KAAKxH,EAAzCmvC,EAA6CtsB,CAA7CssB,EAAsDruB,CAAtDquB,CADA3nC;KAzDKinC,EA6DAA,WAAAA,iCAAAA,GAAP,UAAwC7hC,CAAxC,EAAsDC,CAAtD;EAGE,WADArF,KAAKiF,eAALjF,IACO4nC,iCACH5nC,KAAKxH,EADFovC,EACMxiC,CADNwiC,EACYviC,CADZuiC,EACqB5nC,KAAK6kC,aAD1B+C,CAAP;KAhEKX,EAoEAA,WAAAA,0BAAAA,GAAP,UAAiC7hC,CAAjC,EAA+CC,CAA/C;EAGE,WADArF,KAAKiF,eAALjF,IACO6nC,0BACH7nC,KAAKxH,EADFqvC,EACMziC,CADNyiC,EACYxiC,CADZwiC,EACqB7nC,KAAK6kC,aAD1BgD,CAAP;KAvEKZ,EA2EAA,WAAAA,oBAAAA,GAAP,UAA2B5rB,CAA3B;EAAA,gBAAA,CACErb,KAAKiF,eAALjF,IACIA,KAAKmnC,aAALnnC,KAAuBqb,CAAvBrb,KACF8nC,kCAA6C9nC,KAAKxH,EAAlDsvC,EAAsD9nC,KAAKyhC,WAA3DqG,GACA9nC,KAAKmnC,aAALnnC,GAAqB,IAFnBA,CADJA,EAKAqkC,aAAwBrkC,KAAKxH,EAA7B6rC,EAAiC;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQ0b,aAAR1b,CAAsB0a,CAAtB1a,CAAA;OAAvC0jC,CALArkC;KA5EKinC,EAoFAA,WAAAA,sBAAAA,GAAP,UACI5rB,CADJ,EAC2BjW,CAD3B,EACyCC,CADzC,EAEI41B,CAFJ;EAGEj7B,SAAKiF,eAALjF,GACA,IAAMuZ,IAAcwuB,gBAApB,CACA,OAAOC,sBACHhoC,KAAKxH,EADFwvC,EACM3sB,CADN2sB,EACe5iC,CADf4iC,EACqB3iC,CADrB2iC,EAC8B/M,CAD9B+M,EACsCzuB,CADtCyuB,EAEHhoC,KAAK6kC,aAFFmD,CAAP;KAzFKf,EA8FAA,WAAAA,4BAAAA,GAAP,UACI5rB,CADJ,EAC2BwQ,CAD3B,EAC0CzmB,CAD1C,EACwDC,CADxD,EAEI41B,CAFJ;EAIE,WADAj7B,KAAKiF,eAALjF,IACOioC,4BACHjoC,KAAKxH,EADFyvC,EACM5sB,CADN4sB,EACepc,CADfoc,EACsB7iC,CADtB6iC,EAC4B5iC,CAD5B4iC,EACqChN,CADrCgN,EAC6CjoC,KAAK6kC,aADlDoD,CAAP;KAlGKhB,EAsGAA,WAAAA,uCAAAA,GAAP,UACI5rB,CADJ,EAC2BjW,CAD3B,EACyCC,CADzC;EAAA,gBAAA,CAEE,OAAOrF,KAAKkoC,oBAALloC,CACHqb,CADGrb,EAEH;EAAM,aAAAmoC,uCACFxnC,EAAKnI,EADH2vC,EACO/iC,CADP+iC,EACa9iC,CADb8iC,EACsBxnC,EAAKkkC,aAD3BsD,CAAA;OAFHnoC,CAAP;KAxGKinC,EA8GAA,WAAAA,gDAAAA,GAAP,UACI5rB,CADJ,EAC2BjW,CAD3B,EACyCC,CADzC;EAAA,gBAAA,CAEE,OAAOrF,KAAKkoC,oBAALloC,CACHqb,CADGrb,EAEH;EAAM,aAAAooC,gDACFznC,EAAKnI,EADH4vC,EACOhjC,CADPgjC,EACa/iC,CADb+iC,EACsBznC,EAAKkkC,aAD3BuD,CAAA;OAFHpoC,CAAP;KAhHKinC,EAsHAA,WAAAA,gCAAAA,GAAP,UACIzhC,CADJ,EACyBJ,CADzB,EACuCC,CADvC;EAEE,WAAOgjC,gCACHroC,KAAKxH,EADF6vC,EACM7iC,CADN6iC,EACcjjC,CADdijC,EACoBhjC,CADpBgjC,EAC6BroC,KAAK6kC,aADlCwD,CAAP;KAxHKpB,EA4HAA,WAAAA,6BAAAA,GAAP,UACI5rB,CADJ,EAC2BjW,CAD3B,EACyCC,CADzC;EAGErF,SAAKsoC,wBAALtoC,CAA8Bqb,CAA9Brb,EACA,IAAM9E,IAASqtC,mCACXvoC,KAAKxH,EADM+vC,EACFltB,CADEktB,EACOnjC,CADPmjC,EACaljC,CADbkjC,EACsBvoC,KAAK6kC,aAD3B0D,CAAf,CAGA,OADAvoC,KAAKwoC,0BAALxoC,IACO9E,CAAP;KAnIK+rC,EAsIAA,WAAAA,sBAAAA,GAAP;EACE,QAAMwB,IAAezoC,KAAK0oC,WAAL1oC,CAAiBA,KAAKxH,EAAtBwH,CAArB,CACA,OAAOA,KAAK2oC,SAAL3oC,CAAeyoC,CAAfzoC,CAAP;KAxIKinC,EA2ICA,WAAAA,YAAAA,GAAR,UAAoBzuC,CAApB;EAAA,QACMgf,CADN;EAAA,QAEMoxB,CAFN;EAAA,gBAAA,CAIE,IAAIzrB,IAAIxG,GAAJwG,CAAQ,yBAARA,CAAJ,EAAwC;EACtC,UAAMwoB,IAAMntC,CAAZ;EAAA,UAEMqwC,IAAOlD,EAAIrrB,SAAJqrB,CAAcA,EAAImD,0BAAlBnD,EAA8C,CAA9CA,CAFb,CAGAntC,EAAGuwC,KAAHvwC,IAEAowC,IAAgB;EACd,YAAMvL,IAASsI,EAAIqD,cAAJrD,CAAmBkD,CAAnBlD,EAAyB,CAAzBA,EAA4B,CAA5BA,CAAf,CACA,OAAOtI,MAAWsI,EAAIsD,gBAAf5L,IACHA,MAAWsI,EAAIuD,mBADnB;SAJF1wC,EAQAgf,IAAQqxB,CARRrwC;OAJF,MAaW2kB,IAAIxG,GAAJwG,CAAQ,8CAARA,IAA0D,CAA1DA,IACT3F,IAAQxX,KAAKmpC,UAALnpC,EAARwX,EACAxX,KAAKopC,QAALppC,EADAwX,EAEAoxB,IAAgB;EAAM,aAAAjoC,EAAK0oC,gBAAL1oC,CAClB6W,CADkB7W,EACXwc,IAAIxG,GAAJwG,CAAQ,8CAARA,CADWxc,CAAA;OAHbwc,IAUTyrB,IAAgB;EAAM,cAAA,CAAA;OAVbzrB,CAaX,SAAQ3F,UAAOoxB,kBAAf;KAzKK3B,EA4KAA,WAAAA,gCAAAA,GAAP,UACI5rB,CADJ,EAC2BwQ,CAD3B,EAC0CzmB,CAD1C,EACwDC,CADxD,EAEIghC,CAFJ,EAE0BC,CAF1B;EAAA,gBAAA,CAGE,OAAOtmC,KAAKkoC,oBAALloC,CACHqb,CADGrb,EAEH;EAAM,aAAAspC,sCACF3oC,EAAKnI,EADH8wC,EACOzd,CADPyd,EACclkC,CADdkkC,EACoBjkC,CADpBikC,EAC6BjD,CAD7BiD,EAC2ChD,CAD3CgD,EAEF3oC,EAAKkkC,aAFHyE,CAAA;OAFHtpC,CAAP;KA/KKinC,EAwLAA,WAAAA,cAAAA,GAAP,UAAqB1I,CAArB;EACEv+B,SAAKiF,eAALjF,GACA,IAAMxH,IAAKwH,KAAKxH,EAAhB;EAAA,QACMgmC,IACF+K,qBAAgC/wC,CAAhC+wC,EAAoChL,CAApCgL,CAFJ;EAAA,QAGMzL,IAA4B0L,qBAA8BhxC,CAA9BgxC,CAHlC;EAAA,QAIMhK,IAAwBiK,cAAyBjxC,CAAzBixC,CAJ9B,CAgBA,OAXApF,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,aAAA7rC,EAAGkxC,YAAHlxC,CAAgBgnC,CAAhBhnC,EAAyBslC,CAAzBtlC,CAAA;OAAlC6rC,GACAA,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,aAAA7rC,EAAGkxC,YAAHlxC,CAAgBgnC,CAAhBhnC,EAAyBgmC,CAAzBhmC,CAAA;OAAlC6rC,CADAA,EAEAsF,YAAuBnxC,CAAvBmxC,EAA2BnK,CAA3BmK,CAFAtF,EAGIrkC,KAAKsnC,iBAALtnC,IACF4pC,gBAA2BpxC,CAA3BoxC,EAA+BpK,CAA/BoK,CAJFvF,EAMKrkC,KAAK6pC,mBAAL7pC,KACHA,KAAK8pC,UAAL9pC,CAAgBw/B,CAAhBx/B,GACAA,KAAK6pC,mBAAL7pC,GAA2B+pC,kCACvBvxC,CADuBuxC,EACnB/pC,KAAKw/B,OADcuK,EACL/pC,KAAKglC,YADA+E,CAFxB/pC,CANLqkC,EAWO7E,CAAP;KA1MKyH,EA6MAA,WAAAA,cAAAA,GAAP,UAAqBzH,CAArB;EAAA,gBAAA,CACEx/B,KAAKiF,eAALjF,IACIw/B,MAAYx/B,KAAKw/B,OAAjBA,KACFx/B,KAAKw/B,OAALx/B,GAAe,IADbw/B,CADJx/B,EAIe,QAAXw/B,CAAW,IACb6E,aAAwBrkC,KAAKxH,EAA7B6rC,EAAiC;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQqpC,aAARrpC,CAAsB6+B,CAAtB7+B,CAAA;OAAvC0jC,CALFrkC;KA9MKinC,EAuNAA,WAAAA,WAAAA,GAAP,UAAkBzH,CAAlB;EAAA,gBAAA,CACEx/B,KAAKiF,eAALjF,IACAA,KAAKw/B,OAALx/B,GAAew/B,CADfx/B,EAEqB,QAAhBA,KAAKw/B,OAAW,IAASx/B,KAAKsnC,iBAAd,IACnBsC,gBAA2B5pC,KAAKxH,EAAhCoxC,EAAoC5pC,KAAKw/B,OAAzCoK,CAHF5pC,EAKAqkC,aAAwBrkC,KAAKxH,EAA7B6rC,EAAiC;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQspC,UAARtpC,CAAmB6+B,CAAnB7+B,CAAA;OAAvC0jC,CALArkC;KAxNKinC,EAgOAA,WAAAA,mBAAAA,GAAP,UACIzH,CADJ,EAC2B0B,CAD3B,EAEIgJ,CAFJ;EAIE,4BAFEA,SACFlqC,KAAKiF,eAALjF,IACIkqC,IACKC,iCACHnqC,KAAKxH,EADF2xC,EACM3K,CADN2K,EACejJ,CADfiJ,CADLD,GAIKE,0BACHpqC,KAAKxH,EADF4xC,EACM5K,CADN4K,EACelJ,CADfkJ,CAJT;KApOKnD,EA6OAA,WAAAA,qBAAAA,GAAP,UAA4BzH,CAA5B,EAAmDe,CAAnD;EAAA,gBAAA,CAGE,OADAvgC,KAAKiF,eAALjF,IACOqkC,aACHrkC,KAAKxH,EADF6rC,EACM;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQggC,iBAARhgC,CAA0B6+B,CAA1B7+B,EAAmC4/B,CAAnC5/B,CAAA;OADZ0jC,CAAP;KAhPK4C,EAoPAA,WAAAA,0BAAAA,GAAP,UAAiCzH,CAAjC,EAAwD0B,CAAxD;EAGE,WADAlhC,KAAKiF,eAALjF,IACOA,KAAKxH,EAALwH,CAAQmhC,kBAARnhC,CAA2Bw/B,CAA3Bx/B,EAAoCkhC,CAApClhC,CAAP;KAvPKinC,EA0PAA,WAAAA,sBAAAA,GAAP,UACIoD,CADJ,EACsCC,CADtC,EAEIxJ,CAFJ;EAGE9gC,SAAKiF,eAALjF,IACAA,KAAKuqC,gBAALvqC,EADAA,EAEAwqC,mCACIxqC,KAAKxH,EADTgyC,EACaxqC,KAAKw/B,OADlBgL,EAC2BH,CAD3BG,EAC+CF,CAD/CE,EAEI1J,CAFJ0J,CAFAxqC;KA7PKinC,EAoQAA,WAAAA,uBAAAA,GAAP,UACIwD,CADJ,EACuCrlC,CADvC,EACqDC,CADrD;EAEErF,SAAK0qC,4BAAL1qC,CAAkCyqC,CAAlCzqC,EAAuDqF,CAAvDrF,EAAgEoF,CAAhEpF;KAtQKinC,EAyQAA,WAAAA,6BAAAA,GAAP,UACI0D,CADJ,EAC6CvlC,CAD7C,EAC2DC,CAD3D;EAEErF,SAAKiF,eAALjF,GACM,IAAAmZ,gDAAA;EAAA,QAACkhB,QAAD;EAAA,QAAQD,QAAR,CAENp6B,KAAK0qC,4BAAL1qC,CAAkC2qC,CAAlC3qC,EAA6Dq6B,CAA7Dr6B,EAAoEo6B,CAApEp6B;KA9QKinC,EAiRAA,WAAAA,2BAAAA,GAAP,UACI2D,CADJ,EACsBC,CADtB,EACuCC,CADvC,EAEIC,CAFJ;EAGE/qC,SAAKgrC,gCAALhrC,CACI8qC,CADJ9qC,EACiB4qC,CADjB5qC,EAC2B+qC,CAD3B/qC,EACuC6qC,CADvC7qC;KApRKinC,EAwRAA,WAAAA,iCAAAA,GAAP,UACI2D,CADJ,EACsBC,CADtB,EACuCC,CADvC,EAEIC,CAFJ;EAGE,UAAM,IAAIxxC,KAAJ,CAAU,mDAAV,CAAN;KA3RK0tC,EA8RAA,WAAAA,cAAAA,GAAP;EACsB,YAAhBjnC,KAAKw/B,OAAW,IAClBoK,gBAA2B5pC,KAAKxH,EAAhCoxC,EAAoC5pC,KAAKw/B,OAAzCoK,CADkB,EAGpBqB,oBAA+BjrC,KAAKxH,EAApCyyC,CAHoB;KA/RfhE,EAqSAA,WAAAA,eAAAA,GAAP;EACEjnC,SAAKiF,eAALjF,IACAA,KAAKuqC,gBAALvqC,EADAA,CAEA,IAAMxH,IAAKwH,KAAKxH,EAAhB,CACIwH,KAAKsnC,iBAALtnC,IACFA,KAAKkrC,aAALlrC,EADEA,EAGJqkC,aACI7rC,CADJ6rC,EACQ;EAAM,aAAA7rC,EAAG2yC,YAAH3yC,CAAgBA,EAAG4yC,SAAnB5yC,EAA8B,CAA9BA,EAAiCA,EAAG6yC,cAApC7yC,EAAoD,CAApDA,CAAA;OADd6rC,CAHIrkC;KAzSCinC,EAgTAA,WAAAA,+BAAAA,GAAP;EAAA,gBAAA,CACEjnC,KAAKiF,eAALjF,IACAqkC,aAAwBrkC,KAAKxH,EAA7B6rC,EAAiC;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQymC,MAARzmC,EAAA;OAAvC0jC,CADArkC;KAjTKinC,EAqTCA,WAAAA,uBAAAA,GAAR;EAYE,WAVwC,QAApCjnC,KAAKsrC,2BAA+B,KACtCtrC,KAAKsrC,2BAALtrC,GACIymC,oBACIzmC,KAAKxH,EADTiuC,EAEgE,MAA5DtpB,IAAIxG,GAAJwG,CAAQ,8CAARA,CAA4D,GACxD,iCADwD,GAExD,0BAJRspB,CAFkC,GAUjCzmC,KAAKsrC,2BAAZ;KAjUKrE,EAoUCA,WAAAA,6BAAAA,GAAR;EACE,WAAOjnC,KAAKurC,sBAALvrC,EAAP;KArUKinC,EAwUCA,WAAAA,6BAAAA,GAAR;EACE,WAAOjnC,KAAKurC,sBAALvrC,EAAP;KAzUKinC,EA4UPA,WAAAA,WAAAA,GAAA;EACE,QAAgE,MAA5D9pB,IAAIxG,GAAJwG,CAAQ,8CAARA,CAAJ,EAAmE;EACjE,UAAM6oB,IAAMhmC,KAAKxH,EAAjB;EAAA,UACMgzC,IAAMxrC,KAAKyrC,4BAALzrC,EADZ;EAAA,UAGM0rC,IAAQ1F,EAAI2F,WAAJ3F,EAHd,CAKA,OADAA,EAAImD,UAAJnD,CAAewF,EAAII,gBAAnB5F,EAAqC0F,CAArC1F,GACO0F,CAAP;EAEF,SAAMG,IAAM7rC,KAAK8rC,4BAAL9rC,EAAZ;EAAA,QACMwX,IAAQq0B,EAAIE,cAAJF,EADd,CAGA,OADAA,EAAIG,aAAJH,CAAkBA,EAAID,gBAAtBC,EAAwCr0B,CAAxCq0B,GACOr0B,CAAP;KAxVKyvB,EA2VPA,WAAAA,SAAAA,GAAA;EACE,QAAgE,MAA5D9pB,IAAIxG,GAAJwG,CAAQ,8CAARA,CAAJ,EAAA;EAMA,UAAM0uB,IAAM7rC,KAAK8rC,4BAAL9rC,EAAZ,CACA6rC,EAAII,WAAJJ,CAAgBA,EAAID,gBAApBC;OAPA,MAAA;EACE,UAAM7F,IAAMhmC,KAAKxH,EAAjB;EAAA,UACM0zC,IAAMlsC,KAAKyrC,4BAALzrC,EADZ,CAEAgmC,EAAIoD,QAAJpD,CAAakG,EAAIN,gBAAjB5F;;KA/VGiB,EAsWMA,WAAAA,uBAAAA,GAAb,UAAoCzvB,CAApC;;;;EACE,uBAAM20B,YACF;EAAM,qBAAAxrC,EAAKumC,QAALvmC,IAGFA,EAAK0oC,gBAAL1oC,CACI6W,CADJ7W,EAEIwc,IAAIxG,GAAJwG,CAAQ,8CAARA,CAFJxc,CAHE;eADJwrC,EAAN;EAOA,mBAPAhzB,MAAAA,QAOOnZ,KAAKosC,YAALpsC,CACHwX,CADGxX,EACImd,IAAIxG,GAAJwG,CAAQ,8CAARA,CADJnd,EAAP;;;KA9WKinC,EAkXCA,WAAAA,aAAAA,GAAR,UAAqBzvB,CAArB,EAAwC60B,CAAxC;EACE,QAA0B,MAAtBA,CAAJ,EACE,OAAO,IAAP,CAGF,IAA0B,MAAtBA,CAAJ,EAA6B;EAC3B,UAAMrG,IAAMhmC,KAAKxH,EAAjB,CAIA,OAFyBwtC,EAAIsG,iBAAJtG,CAAsBxuB,CAAtBwuB,EAA6BA,EAAIuG,YAAjCvG,IAEC,GAA1B;EAEA,SAAM6F,IAAM7rC,KAAK8rC,4BAAL9rC,EAAZ,CAKA,OAFI6rC,EAAIW,iBAAJX,CAAsBr0B,CAAtBq0B,EAA6BA,EAAIY,gBAAjCZ,IAEsB,GAA1B;KAnYG5E,EAuYCA,WAAAA,iBAAAA,GAAR,UAAyBzvB,CAAzB,EAA4C60B,CAA5C;EAEE,QAA0B,MAAtBA,CAAJ,EACE,QAAO,CAAP,CAGF,IAA0B,MAAtBA,CAAJ,EAA6B;EAC3B,UAAMrG,IAAMhmC,KAAKxH,EAAjB;EAAA,UACMqzC,IAAM7rC,KAAKyrC,4BAALzrC,EADZ;EAAA,UAGM0sC,IACF1G,EAAIsG,iBAAJtG,CAAsBxuB,CAAtBwuB,EAA6BA,EAAI2G,sBAAjC3G,CAJJ,CASA,OAJqB,QAAjBhmC,KAAK4sC,QAAY,KACnB5sC,KAAK4sC,QAAL5sC,GAAgBA,KAAKxH,EAALwH,CAAQma,YAARna,CAAqB6rC,EAAIgB,gBAAzB7sC,CADG,GAId0sC,MAAc1sC,KAAK4sC,QAA1B;EAIMF,UAFAb,IAAM7rC,KAAK8rC,4BAAL9rC,IAGJwsC,kBAAkBh1B,GAAOq0B,EAAIiB,2BAD/BJ,CAMN,OAJqB,QAAjB1sC,KAAK4sC,QAAY,KACnB5sC,KAAK4sC,QAAL5sC,GAAgBA,KAAKxH,EAALwH,CAAQma,YAARna,CAAqB6rC,EAAIgB,gBAAzB7sC,CADG,GAId0sC,MAAc1sC,KAAK4sC,QAA1B;KAjaG3F,EAqaPA,WAAAA,UAAAA,GAAA,UAAUwB,CAAV;EAAA,gBAAA,CACE,OAAO,IAAItrC,OAAJ,CAAkB,UAAAC,CAAA;EACvBuD,QAAKosC,aAALpsC,CAAmB;EAAM,eAAA8nC,EAAaG,aAAbH,EAAA;SAAzB9nC,EAAuD;EAAM,eAAAvD,GAAA;SAA7DuD;OADK,CAAP;KAtaKsmC,EA6aPA,WAAAA,UAAAA,GAAA;EAIE,SADA,IAAM3sC,IAAQ0yC,kBAAkBhtC,KAAKitC,WAALjtC,CAAiB8B,GAAjB9B,CAAqB,UAAAtF,CAAA;EAAK,aAAAA,EAAEwyC,QAAF;OAA1BltC,CAAlBgtC,CAAd,EACSjyC,IAAI,CAAb,EAAgBA,KAAKT,CAArB,IAA8BS,CAA9B,EAAiC;SAE/BoyC;EAEFntC,UAAKitC,WAALjtC,GAAmBA,KAAKitC,WAALjtC,CAAiBnC,KAAjBmC,CAAuB1F,IAAQ,CAA/B0F,CAAnBA;KArbKinC,EAwbCA,WAAAA,cAAAA,GAAR,UAAsBiG,CAAtB,EAA+CC,CAA/C;EAAA,gBAAA,CACEntC,KAAKitC,WAALjtC,CAAiBhE,IAAjBgE,GAAuBktC,aAAUC,cAAjCntC,GACIA,KAAKitC,WAALjtC,CAAiB5F,MAAjB4F,GAA0B,CAA1BA,IAKJmsC,YAAiB;EAGf,aAFAxrC,EAAKysC,SAALzsC,IAEmC,MAA5BA,EAAKssC,WAALtsC,CAAiBvG,MAAxB;OAHF+xC,CANAnsC;KAzbKinC,EAscCA,WAAAA,yBAAAA,GAAR,UAAiC5rB,CAAjC;EACErb,SAAKiF,eAALjF,IACAqtC,8BACIrtC,KAAKxH,EADT60C,EACahyB,CADbgyB,EACsBrtC,KAAKyhC,WAD3B4L,CADArtC,EAGIA,KAAKsnC,iBAALtnC,IACFirC,oBAA+BjrC,KAAKxH,EAApCyyC,CAJFjrC;KAvcKinC,EA+cCA,WAAAA,2BAAAA,GAAR;EAC4B,YAAtBjnC,KAAKmnC,aAAiB,IACxBkG,8BACIrtC,KAAKxH,EADT60C,EACartC,KAAKmnC,aADlBkG,EACiCrtC,KAAKyhC,WADtC4L,GAEIrtC,KAAKsnC,iBAALtnC,IACFirC,oBAA+BjrC,KAAKxH,EAApCyyC,CAJsB,IAOxBnD,kCAA6C9nC,KAAKxH,EAAlDsvC,EAAsD9nC,KAAKyhC,WAA3DqG,CAPwB;KAhdrBb,EA2dCA,WAAAA,qBAAAA,GAAR,UACI5rB,CADJ,EAEIiyB,CAFJ;EAGEttC,SAAKsoC,wBAALtoC,CAA8Bqb,CAA9Brb,EACA,IAAM9E,IAASoyC,GAAf,CAGA,OAFAttC,KAAKwoC,0BAALxoC,IAEO9E,CAAP;KAleK+rC,EAqeCA,WAAAA,6BAAAA,GAAR,UACIsG,CADJ,EACkDlT,CADlD,EAEID,CAFJ;EAGEp6B,SAAKiF,eAALjF,GACA,IAAMxH,IAAKwH,KAAKxH,EAAhB,CACA60C,8BACI70C,CADJ60C,EACQE,CADRF,EACwCrtC,KAAKyhC,WAD7C4L,GAEIrtC,KAAKsnC,iBAALtnC,IACFirC,oBAA+BzyC,CAA/ByyC,CAHFoC,EAKArtC,KAAKmnC,aAALnnC,GAAqButC,CALrBF,EAMAhJ,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,aAAA7rC,EAAG+oC,QAAH/oC,CAAY,CAAZA,EAAe,CAAfA,EAAkB6hC,CAAlB7hC,EAAyB4hC,CAAzB5hC,CAAA;OAAlC6rC,CANAgJ,EAOAhJ,aAAwB7rC,CAAxB6rC,EAA4B;EAAM,aAAA7rC,EAAGgpC,OAAHhpC,CAAW,CAAXA,EAAc,CAAdA,EAAiB6hC,CAAjB7hC,EAAwB4hC,CAAxB5hC,CAAA;OAAlC6rC,CAPAgJ;KA1eKpG,EAofCA,WAAAA,iCAAAA,GAAR,UACIvsC,CADJ,EACe0W,CADf,EAC0BipB,CAD1B,EACyCD,CADzC;EAAA,gBAAA,CAEEp6B,KAAKiF,eAALjF,IACAqkC,aACIrkC,KAAKxH,EADT6rC,EACa;EAAM,aAAA1jC,EAAKnI,EAALmI,CAAQ6gC,OAAR7gC,CAAgBjG,CAAhBiG,EAAmByQ,CAAnBzQ,EAAsB05B,CAAtB15B,EAA6By5B,CAA7Bz5B,CAAA;OADnB0jC,CADArkC;KAtfKinC,EA2fCA,WAAAA,gBAAAA,GAAR;EACE,QAAIjnC,KAAKknC,QAAT,EACE,MAAM,IAAI3tC,KAAJ,CAAU,yCAAV,CAAN;KA7fG0tC,EAigBCA,WAAAA,iBAAAA,GAAR;EACE,QAAoB,QAAhBjnC,KAAKw/B,OAAT,EACE,MAAM,IAAIjmC,KAAJ,CAAU,kCAAV,CAAN;KAngBG0tC,GAsgBT;iCAWkCpsC;EAIhC,OAHA,IAAIsE,IAAQ,CAAZ,EACIoE,IAAM1I,EAAIT,MAAJS,GAAa,CADvB,EAEI2yC,KAAQ,CACZ,EAAOruC,KAASoE,CAAhB,GAAqB;EACnB,QAAMkqC,IAAOtuC,IAAQoE,CAARpE,IAAgB,CAA7B,CACetE,EAAI4yC,CAAJ5yC,OAEb2yC,IAAOC,CAAPD,EACAruC,IAAQsuC,IAAM,CAHD5yC,IAKb0I,IAAMkqC,IAAM,CALC5yC;EAQjB,UAAO2yC,CAAP;2BC7jBE5c,GAAqB4O,GAAuB/tB,GAC5CK;EAyBF,OAxBA,IAAM8a,IAAW4S,EAAQ5S,QAAzB,EACM8gB,IAA0Bj8B,EAAO3P,GAAP2P,CAAW,UAACE,CAAD,EAAQ5W,CAAR;EACzC,QAAM44B,MACJC,cAAcjiB,EAAM1V,OACpBk4B,UAAUxiB,EAAMkiB,SAANliB,GAAkB,IAAlBA,GAAyBA,EAAMg8B,OAANh8B,CAAcwiB,UACjDN,WAAWliB,EAAMkiB,WACjBS,WAAU3iB,EAAMkiB,aAAoBliB,EAAMg8B,OAANh8B,CAAc2iB,UAJpD,CAMA,SAAQ/1B,MAAMihC,EAAQ/S,aAAR+S,CAAsBzkC,CAAtBykC,GAA0B7L,cAAxC;KAP8BliB,CADhC,EAUMm8B,IAAeF,EAAW5rC,GAAX4rC,CAAe,UAAAhzC,CAAA;EAAK,WAAAA,EAAEi5B,SAAF;KAApB+Z,CAVrB,EAWMjY,MACJ7B,cAAc9hB,EAAO7V,OACrBk4B,UAAUriB,EAAO67B,OAAP77B,CAAeqiB,UACzBN,YAAW,GACXS,UAAUxiB,EAAO67B,OAAP77B,CAAewiB,UAf3B,EAiBMuZ,IAASC,WACXJ,CADWI,EACCrY,CADDqY,EACelhB,CADfkhB,GAC0D,MAAjCtO,EAAQ9G,oBADjCoV,EAEXtO,EAAQ/L,kBAFGqa,CAjBf,EAqBMjd,IAAeD,EAAM2O,aAAN3O,CAAoBid,CAApBjd,CArBrB,EAuBMmd,MAvBN,EAwBShzC,IAAI,CAAb,EAAgBA,IAAIykC,EAAQ/S,aAAR+S,CAAsBplC,MAA1C,EAAkDW,GAAlD,EAAuD;EACrD,QAAMmmC,IAAc1B,EAAQ/S,aAAR+S,CAAsBzkC,CAAtBykC,CAApB,CAEAuO,EAAiB7M,CAAjB6M,IACInd,EAAMuQ,kBAANvQ,CAAyBC,CAAzBD,EAAuCsQ,CAAvCtQ,GAFgB,CAEhBA,CADJmd;EAIF,YACEvO,YACAqO,WACAhd,iBACAkd,qBACAnd,UACAgd,iBACAnY,iBAPF;EAWF,kCAAA,CACIuY,CADJ,EAC6Bv8B,CAD7B;EAEE,MAAIu8B,EAAW5zC,MAAX4zC,KAAsBv8B,EAAOrX,MAAjC,EACE,MAAMb,MACF,8BAA4By0C,EAAW5zC,MAAvC,oCAAA,GACqBqX,EAAOrX,MAD5B,YADEb,CAAN,CAKFy0C,EAAW5tC,OAAX4tC,CAAmB,UAACtxB,CAAD,EAAI3hB,CAAJ;EACjB,QAAMQ,IAASmhB,EAAEkX,YAAjB;EAAA,QACMjiB,IAAQF,EAAO1W,CAAP0W,CADd;EAAA,QAEMjW,IAASmW,EAAM1V,KAFrB,CAIA,KAAK8U,YAAiBxV,CAAjBwV,EAAyBvV,CAAzBuV,CAAL,EACE,MAAMxX,MACF,6EAC4BgC,CAD5B,UAAA,GAC0CC,CAD1C,gBADEjC,CAAN,CAKF,KAAImjB,EAAEmX,cAAaliB,EAAMkiB,SAAzB,EAAA;EAIA,UAAMoa,IAAYvxB,EAAEyX,QAApB;EAAA,UACM+Z,IAAYv8B,EAAMkiB,SAANliB,GAAkB,IAAlBA,GAAyBA,EAAMg8B,OAANh8B,CAAcwiB,QADzD,CAEA,KAAKpjB,YAAiBk9B,CAAjBl9B,EAA4Bm9B,CAA5Bn9B,CAAL,EACE,MAAMxX,MACF,oFACwB00C,CADxB,UAAA,GACyCC,CADzC,gBADE30C,CAAN;;KAlBJy0C;EAyBF,oBAAA,CACIG,CADJ,EACyB18B,CADzB,EAC+CK,CAD/C,EAEIs8B,CAFJ;EAIEC,2BAAyBF,EAAOP,YAAhCS,EAA8C58B,CAA9C48B,GACAA,0BAA0BF,EAAO1Y,aAAjC4Y,GAAiDv8B,EAAjDu8B,CADAA,CAGA,IAAMC,IAASx8B,EAAO67B,OAAP77B,CAAeuJ,OAA9B;EAAA,MACM6Y,IAAcpiB,EAAO67B,OAAP77B,CAAeqiB,QADnC;EAAA,MAEMvD,IAAQud,EAAOvd,KAFrB,CAGI9e,EAAO67B,OAAP77B,CAAewiB,QAAfxiB,GACF8e,EAAM2d,4BAAN3d,CAAmC0d,CAAnC1d,EAA2CsD,EAAY,CAAZA,CAA3CtD,EAA2DsD,EAAY,CAAZA,CAA3DtD,CADE9e,GAGF8e,EAAM4d,sBAAN5d,CAA6B0d,CAA7B1d,EAAqCsD,EAAY,CAAZA,CAArCtD,EAAqDsD,EAAY,CAAZA,CAArDtD,CAHE9e,EAKJ8e,EAAMkZ,UAANlZ,CAAiBud,EAAOtd,YAAxBD,CALI9e,EAMJL,EAAOrR,OAAPqR,CAAe,UAACE,CAAD,EAAQ5W,CAAR;EACb,QAAM0zC,IAAeN,EAAO3O,OAAP2O,CAAe1hB,aAAf0hB,CAA6BpzC,CAA7BozC,CAArB;EAAA,QACMO,IAA0BP,EAAOJ,gBAAPI,CAAwBM,CAAxBN,CADhC,CAEA,IAA+B,QAA3BO,CAAJ,EAAqC;EACnC,UAAI/8B,EAAMkiB,SAAV,EAAqB;EACnB,YAAwC,MAApClwB,cAAmBgO,EAAM1V,KAAzB0H,CAAJ,EACEitB,EAAMp4B,EAANo4B,CAASI,SAATJ,CAAmB8d,CAAnB9d,EAA4Cjf,EAAMg9B,aAANh9B,CAAoB,CAApBA,CAA5Cif,EADF,KAEO;EACL,cAAItyB,IAAOqT,EAAMg9B,aAAjB,CACMrwC,aAAgBH,YAAhBG,KACJA,IAAO,IAAIH,YAAJ,CAAiBG,CAAjB,CADHA,GAGNsyB,EAAMp4B,EAANo4B,CAASge,UAAThe,CAAoB8d,CAApB9d,EAA6CtyB,CAA7CsyB,CAHMtyB;EAKR;EAEF,WAAMuwC,IAAMl9B,EAAMg8B,OAANh8B,CAAc0J,OAA1B,CACAuV,EAAMke,qBAANle,CAA4Bie,CAA5Bje,EAAiC8d,CAAjC9d,EAA0D71B,CAA1D61B;;KAjBJnf,CANIK,EA2Be,QAAfs8B,CAAe,IACjBA,EAAYxd,CAAZwd,EAAmBD,EAAOtd,YAA1Bud,CA5BEt8B,EA8BJ8e,EAAMme,cAANne,EA9BI9e;EAiCN,uBAAA,CACI0tB,CADJ,EAC2B/tB,CAD3B,EACiDK,CADjD;EAEE,MAAIk9B,IAAY,EAAhB,CACAv9B,EAAOtK,MAAPsK,CAAcK,CAAdL,EAAsBrR,OAAtBqR,CAA8B,UAAA/W,CAAA;EAC5Bs0C,SAAgBt0C,EAAEuB,KAAFvB,MAAAA,IAAWA,EAAEm5B,SAAFn5B,GAAc,SAAdA,GAA0BA,EAAEizC,OAAFjzC,CAAUy5B,QAA/Cz5B,CAAhBs0C;KADFv9B,EAGA,IAAMw9B,IAAczP,EAAQ5S,QAA5B;EAAA,MACMsiB,MAAiD,MAAjC1P,EAAQ9G,sBAA+Bz3B,UAD7D;EAAA,MAEIsV,IAAMipB,EAAQxgC,WAARwgC,CAAoBjhC,IAF9B,CAKA,OADAgY,KAAO,MAAM24B,CAAN,GAAqB,GAArB,GAA2BF,CAA3B,GAAuC,GAAvC,GAA6CC,CACpD;ECnKF;EA0DA,SArDE,UACIphC,CADJ,EAC2BsS,CAD3B,EACiD4H,CADjD;EAJA/nB,sBAAAA,IAAiB,IAAjBA,EAMEA,KAAK6N,WAAL7N,GAAmB6N,CANrB7N,CASI,IAAA+sB,iBAAA;EAAA,QACAyE,gBADA;EAAA,QAEAvE,iBAFA;EAAA,QAGAD,kBAHA;EAAA,QAIAO,aAJA;EAAA,QAKAM,cALA;EAAA,QAMAV,mBANA;EAAA,QAOAD,oBAPA;EAAA,QASKQ,UATL;EAAA,QASWF,SATX;EAAA,QAUI2hB,IAAmB3d,IAAazE,CAVpC,CAYF/sB,KAAK4sB,QAAL5sB,GAAgB,uSAWW6N,EAAY,CAAZA,CAXX,gBAAA,GAYZA,EAAY,CAAZA,CAZY,gEAAA,GAc0BggB,CAd1B,UAAA,GAc0Cb,CAd1C,QAAA,GAeZQ,CAfY,uCAAA,GAgBaN,CAhBb,eAAA,GAgBwCiiB,CAhBxC,gCAAA,GAkBGhvB,EAAW,CAAXA,CAlBH,mFAAA,GAoBmC0N,CApBnC,UAAA,GAqBZZ,CArBY,SAAA,GAqBMS,CArBN,yCAAA,GAsBaP,CAtBb,6BAAA,GAuBZgiB,CAvBY,UAAA,GAuBY3d,CAvBZ,kCAAA,GAyBGrR,EAAW,CAAXA,CAzBH,mGAAA,GA4BZqR,CA5BY,mFAAhBxxB;KAoCJ;KA1DA;EAAA;EC8CA,SA1CE,UACI0rB,CADJ,EACsBpd,CADtB,EACsCC,CADtC,EACoD7W,CADpD,EAEI8W,CAFJ;EAJAxO,sBAAAA,IAAiB,IAAjBA,EACAA,gBAAAA,KADAA,CAOE,IAQIovC,CARJ;EAAA,QAAMC,IAAM/gC,CAAZ;EAAA,QACMghC,IAAO5jB,EAAO,CAAPA,IAAY,CADzB,CAEA1rB,KAAK6N,WAAL7N,GAAmB0rB,CAAnB1rB,CAOA,IAAMuvC,IAAQ,WAAShhC,CAAT,eAAA,GAA0B7W,CAA1B,YAAd,CAEE03C,IADW,OAAT5gC,CAAS,GACG,iBAAe+gC,CAAf,MADH,GAEO,MAAT/gC,CAAS,GACJ,UAAQ+gC,CAAR,MADI,GAGJ,aAAWA,CAAX,gBAAA,GAA8B/gC,CAA9B,QAJd4gC,EAOFpvC,KAAK4sB,QAAL5sB,GAAgB,4QASIqvC,CATJ,YAAA,GASiBA,CATjB,6EAAA,GAWgBC,CAXhB,gIAAA,GAgBMF,CAhBN,8CAPdA;KA4BN;KD9CA;EAAA;EE2EA,SAlEE,UACIjvB,CADJ,EAC0BqvB,CAD1B,EAC+CjhC,CAD/C,EAC6D7W,CAD7D,EAEI8W,CAFJ;EATAxO,sBAAAA,IAAiB,cAAc,eAAe,KAA9CA,EACAA,gBAAAA,KADAA,EAYEA,KAAK6N,WAAL7N,GAAmBmgB,CAZrBngB,EAaEA,KAAKlI,KAALkI,GAAamgB,EAAW,CAAXA,CAbfngB,EAcEA,KAAKwvC,WAALxvC,GAAmBwvC,CAdrBxvC,EAeEA,KAAKuO,IAALvO,GAAYuO,CAfdvO,EAgBEA,KAAKtI,KAALsI,GAAatI,CAhBfsI,EAiBEA,KAAKwO,IAALxO,GAAYwO,CAjBdxO,EAkBEA,KAAK4sB,QAAL5sB,GAAgB,sNAQUA,KAAKlI,KARf,iEAAA,GASgC03C,CAThC,kDAAA,GAUqBxvC,KAAKlI,KAV1B,iCAAA,GAWM03C,CAXN,+FAAA,GAckBxvC,KAAKlI,KAdvB,yaAAA,GA6BKJ,CA7BL,sBAAA,GA6B8B6W,CA7B9B,qPAAA,GAoCqB7W,CApCrB,gCAAA,GAqCM8W,CArCN,gLAAA,GAyCsBA,CAzCtB,gSAlBlBxO;KA2EF;KF3EA;EAAA;EGkEA,SA5DE,UAAY+nB,CAAZ;EAJA/nB,sBAAAA,IAAiB,MAAM,SAAvBA,EAKEA,KAAK6N,WAAL7N,GAAmB+nB,EAAS8E,OAL9B7sB,CAME,IAAMgtB,IAAejF,EAASiF,YAA9B;EAAA,QACMC,IAAclF,EAASkF,WAD7B;EAAA,QAEMC,IAAiBnF,EAASmF,cAFhC;EAAA,QAGME,IAAwBrF,EAASqF,qBAHvC;EAAA,QAIMC,IAAuBtF,EAASsF,oBAJtC;EAAA,QAMMC,IAASF,IAAwB,CAAxBA,GAA4BrF,EAASwF,OAATxF,CAAiByF,GAN5D;EAAA,QAOMC,IAAUJ,IAAuB,CAAvBA,GAA2BtF,EAASwF,OAATxF,CAAiB2F,IAP5D;EAAA,QASM+hB,IAAYriB,IAAwBC,CAAxBD,GAA+C,CATjE,CAUAptB,KAAK4sB,QAAL5sB,GAAgB,sCACastB,CADb,OAAA,GACwBG,CADxB,mdAAA,GAeYL,CAfZ,wBAAA,GAgBFF,CAhBE,wDAAA,GAiB4BF,CAjB5B,8CAAA,GAmBgBjF,EAAS6F,SAnBzB,sIAAA,GAwBcP,CAxBd,gEAAA,GAyB8BJ,CAzB9B,gDAAA,GA2BkBlF,EAAS8F,QA3B3B,iNAAA,GAkCY4hB,CAlCZ,4LAAA,GAsCiBpiB,CAtCjB,kMAAhBrtB;KAgDJ;KHlEA;EAAA;EI8EA,SA1EE,UACIuhB,CADJ,EACsC8N,CADtC,EAEI1oB,CAFJ,EAEwBC,CAFxB;uBAEID,0BAAoBC,SANxB5G,kBAAAA,IAAiB,WAAW,WAO1B,IAAMusB,IAAYhL,EAAO,CAAPA,CAAlB;EAAA,QACMmuB,IAAc/oC,IAAa4a,EAAO,CAAPA,CAAb5a,GAAyB4a,EAAO,CAAPA,CAD7C;EAAA,QAEMouB,IAAc/oC,IAAayoB,EAAO,CAAPA,CAAbzoB,GAAyByoB,EAAO,CAAPA,CAF7C;EAAA,QAGMugB,IAAYjpC,IAAa4a,EAAO,CAAPA,CAAb5a,GAAyB4a,EAAO,CAAPA,CAH3C,CAIAvhB,KAAK6N,WAAL7N,IAAoBusB,GAAWmjB,GAAaC,EAA5C3vC,CAEA,IAAM6vC,IAAqB,UAACC,CAAD,EAAqBC,CAArB;EACvB,aAAAppC,IAAa,YAAUopC,CAAV,QAAA,GAAwBD,CAAxB,WAAbnpC,GACa,kBAAgBopC,CAAhB,QAAA,GAA8BD,CAD3C;OADJ;EAAA,QAGME,IAAqB,UAACF,CAAD,EAAqBC,CAArB;EACvB,aAAAnpC,IAAa,kBAAgBmpC,CAAhB,QAAA,GAA8BD,CAA3ClpC,GACa,YAAUmpC,CAAV,QAAA,GAAwBD,CAAxB,WADb;OAJJ;EAAA,QAOMG,IAAmD,IAA5B11C,KAAKkC,KAALlC,CAAWq1C,IAAY,CAAvBr1C,CAP7B;EAAA,QAQM21C,IAAyBN,IAAY,CAR3C,CAUA5vC,KAAK4sB,QAAL5sB,GAAgB,+GAEQiwC,CAFR,+DAAA,GAIGJ,EAAmB,CAAnBA,EAAsB,GAAtBA,CAJH,8BAAA,GAKGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CALH,8BAAA,GAMGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CANH,8BAAA,GAOGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CAPH,iEAAA,GAUGG,EAAmB,CAAnBA,EAAsB,GAAtBA,CAVH,8BAAA,GAWGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CAXH,8BAAA,GAYGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CAZH,8BAAA,GAaGA,EAAmB,CAAnBA,EAAsB,GAAtBA,CAbH,yEAAA,IAmBmB,MAA3BE,CAnBQ,wCAAA,GAoBWL,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CApBX,+BAAA,GAqBGG,EAAmB,CAAnBA,EAAsBC,CAAtBD,CArBH,0BAAA,IAsB0B,MAA3BE,CAtBC,wDAAA,GAwBGL,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CAxBH,8BAAA,GAyBGA,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CAzBH,iEAAA,GA4BGG,EAAmB,CAAnBA,EAAsBC,CAAtBD,CA5BH,8BAAA,GA6BGA,EAAmB,CAAnBA,EAAsBC,CAAtBD,CA7BH,mEAAA,IAgC0B,MAA3BE,CAhCC,wDAAA,GAkCGL,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CAlCH,8BAAA,GAmCGA,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CAnCH,8BAAA,GAoCGA,EAAmB,CAAnBA,EAAsBI,CAAtBJ,CApCH,iEAAA,GAuCGG,EAAmB,CAAnBA,EAAsBC,CAAtBD,CAvCH,8BAAA,GAwCGA,EAAmB,CAAnBA,EAAsBC,CAAtBD,CAxCH,8BAAA,GAyCGA,EAAmB,CAAnBA,EAAsBC,CAAtBD,CAzCH,gOAAhBhwC;KAuDJ;KJ9EA;EAAA;EKuCA,SAlCE,UACIuhB,CADJ,EAC8B8N,CAD9B,EAEIxhB,CAFJ,EAEmClH,CAFnC,EAEuDC,CAFvD;uBAEmCD,0BAAoBC,SAPvD5G,kBAAAA,IAAiB,WAAW,YAC5BA,uBAAAA,IAAqB,GAOnBA,KAAK6N,WAAL7N,GAAmB6N,EAEnB,IAAM+hC,IAAYjpC,IAAa4a,EAAO,CAAPA,CAAb5a,GAAyB4a,EAAO,CAAPA,CAA3C;EAAA,QACM4uB,IAAwB51C,KAAKuQ,IAALvQ,CAAUq1C,IAAY,CAAtBr1C,CAD9B;EAAA,QAGM61C,IAAUzpC,IAAa,aAAbA,GAA6B,aAH7C;EAAA,QAIM0pC,IAAUzpC,IAAa,aAAbA,GAA6B,aAJ7C;EAAA,QAKM0pC,IAAW3pC,KAAc,UAAU,SAAxBA,IAAqC,UAAU,SALhE;EAAA,QAMM4pC,IAAW3pC,KAAc,UAAU,SAAxBA,IAAqC,UAAU,SANhE,CAQA5G,KAAK4sB,QAAL5sB,GAAgB,2CACkBmwC,CADlB,+GAAA,GAKUA,CALV,6CAAA,GAMYC,CANZ,uCAAA,GAOYC,CAPZ,gCAAA,GASGC,EAAS,CAATA,CATH,QAAA,GASoBC,EAAS,CAATA,CATpB,UAAA,GASuCD,EAAS,CAATA,CATvC,QAAA,GAUZC,EAAS,CAATA,CAVY,wKAAhBvwC;KAqBJ;KLvCA;EAAA;EMQE,YAAA,CAAYusB,CAAZ,EAA+BikB,CAA/B,EAAoDjoB,CAApD;EAPAvoB,sBAAAA,IAAiB,QAAjBA,EAQEA,KAAK6N,WAAL7N,IAAoBusB,GAAWhE,EARjCvoB,EAUEA,KAAK4sB,QAAL5sB,GAAgB,gOAUUwwC,IAAc,CAVxB,wPAAA,IAoBMA,IAAc,CApBpB,wBAVlBxwC;EA2CF,UAREywC,WAAAA,mBAAAA,GAAA,UAAmBjoB,CAAnB;EAAA,gBAAA,CACE,OAAO,UAACoI,CAAD,EAAsBC,CAAtB;EACe,cAAhBlwB,EAAK+vC,OAAW,KAClB/vC,EAAK+vC,OAAL/vC,GAAeiwB,EAAMuQ,kBAANvQ,CAAyBC,CAAzBD,EAAuC,MAAvCA,CADG,GAGpBA,EAAMp4B,EAANo4B,CAASI,SAATJ,CAAmBjwB,EAAK+vC,OAAxB9f,EAAiCpI,CAAjCoI,CAHoB;OADtB;KADF6f,GAQF;KN5CA;EAAA;EOoBA,SAbE,UACIE,CADJ,EACwB74C,CADxB,EACuC2wB,CADvC,EACwDC,CADxD;EAPA1oB,sBAAAA,IAAiB,UAAjBA,EASEA,KAAK6N,WAAL7N,IAAoB2wC,GAAY74C,EATlCkI,EAWEA,KAAK4sB,QAAL5sB,GAAgB,qJAIU0oB,CAJV,cAAA,GAI8BD,CAJ9B,yEAXlBzoB;KAoBF;KPpBA,yBQH+BzB,GAAca;EAC3C,UAAQ,KAAK,KAAK,KAAK,KAAK,KAAK,KAAKvB,MAAM,GAAGuB,GAAM0C,IAAI,UAAA6V,CAAA;EAAK,WAAGpZ,OAAAA,GAAQoZ,CAAX;MAA9D;EAGF,qBAAA,CAA4BpZ,CAA5B,EAA0Ca,CAA1C;EACE,SAAa,MAATA,CAAS,IACHb,EADG,GAGNqyC,eAAeryC,CAAfqyC,EAAqBxxC,CAArBwxC,CAHP;EAMF,2BAAA,CAAgCxxC,CAAhC,EAA8C2uB,CAA9C;EACE,MAAa,MAAT3uB,CAAJ,EACE,OAAO,IAAP,CAIF,KADA,IAAI2zB,IAAS,EAAb,EACSh4B,IAAI,CAAb,EAAgBA,IAAIqE,CAApB,EAA0BrE,GAA1B,EACEg4B,KAAUhF,EAAKhzB,CAALgzB,CAAVgF,EACIh4B,IAAIqE,IAAO,CAAXrE,KACFg4B,KAAU,GADRh4B,CADJg4B,CAKF,OAAOA,CAAP;EClBF;EAmCA,SA9BE,UACIllB,CADJ;EAJA7N,sBAAAA,IAAiB,IAAjBA,EAQEA,KAAK6N,WAAL7N,GAAmB6N,CARrB7N,CASE,IAAMZ,IAAOyO,EAAYzT,MAAzB;EAAA,QAEMy2C,IAAWC,YAAY,IAAZA,EAAkB1xC,CAAlB0xC,CAFjB;EAAA,QAGM7yC,IAAQs7B,kBAAkBn6B,CAAlBm6B,CAHd;EAAA,QAIMwX,IACFC,wBAAwB5xC,CAAxB4xC,EAA8BnjC,CAA9BmjC,EAA2CH,CAA3CG,CALJ;EAAA,QAMMC,IAAQC,SACV9xC,CADU8xC,EACJrjC,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,CADIqjC,EAEVrjC,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,CAFUqjC,EAE2BL,CAF3BK,CANd;EAAA,QASMp/B,IAASq/B,UAAUtjC,CAAVsjC,EAAuBN,CAAvBM,CATf,CAWAnxC,KAAK4sB,QAAL5sB,GAAgB,oCAEV/B,CAFU,4CAAA,GAIP8yC,CAJO,yEAAA,GAORE,CAPQ,kCAAA,GASOn/B,CATP,kCAAhB9R;KAcJ;KAnCA,6BAqC4BZ,GAAc2uB;EAGxC,OAFA,IAAMgF,MAAN,EAESzwB,IAAM,CAAf,EAAkBA,KAAO,CAAzB,EAA4BA,GAA5B,EACE,KAAK,IAAI8uC,IAAM,CAAf,EAAkBA,KAAO,CAAzB,EAA4BA,GAA5B,EAAmC;EAGjC,SAFA,IAAIC,KAAmB,MAAR/uC,CAAQ,GAAI,GAAJ,GAAU,iBAAkB,MAAR8uC,CAAQ,GAAI,GAAJ,GAAU,MAA7D,EAESz5B,IAAI,CAAb,EAAgBA,IAAIvY,CAApB,EAA0BuY,GAA1B,EACE05B,IAAWtjB,EAAKA,EAAK3zB,MAAL2zB,GAAc,CAAdA,GAAkBpW,CAAvBoW,OAAAA,GAA+BsjB,CAA1CA,CAGFte,EAAO/2B,IAAP+2B,CAAYse,CAAZte;EAGJ,UAAOA,CAAP;EAGF,iCAAA,CACI3zB,CADJ,EACkBnD,CADlB,EACmC8xB,CADnC;EAEE,MAAa,MAAT3uB,CAAJ,EACE,OAAO,UAAQnD,EAAM,CAANA,CAAf,CAIF,KADA,IAAIq1C,IAAO,EAAX,EACSv2C,IAAIqE,IAAO,CAApB,EAAuBrE,IAAIqE,CAA3B,EAAiCrE,GAAjC,EACEu2C,KAAWvjB,EAAKhzB,CAALgzB,UAAAA,GAAc9xB,EAAMlB,CAANkB,CAAzBq1C,EACIv2C,IAAIqE,IAAO,CAAXrE,KACFu2C,KAAQ,IADNv2C,CADJu2C,CAMF,OAAOA,CAAP;EAGF,kBAAA,CACIlyC,CADJ,EACkBgnC,CADlB,EACgChhC,CADhC,EAC8C2oB,CAD9C;EAEE,MAAa,MAAT3uB,CAAJ,EACE,OAAO,EAAP,CAGF,IAAMmyC,IAAYxjB,EAAKlwB,KAALkwB,EAAY,CAAZA,CAAlB,CAEA,OAAO,mBACKwjB,EAAU,CAAVA,CADL,oBAAA,GAEKA,EAAU,CAAVA,CAFL,8EAAA,GAMiBnL,CANjB,gCAAA,GAOiBhhC,CAPjB,UAAP;EAWF,mBAAA,CAAmBnJ,CAAnB,EAAoC8xB,CAApC;EACE,MAAM3uB,IAAOnD,EAAM7B,MAAnB;EAAA,MACMmgC,IAAeiX,mBAAmBpyC,CAAnBoyC,EAAyBzjB,CAAzByjB,CADrB,CAEA,OAAa,MAATpyC,CAAS,GACJ,sCACanD,EAAM,CAANA,CADb,4CADI,GAMN,UAAQs+B,EAAa,CAAbA,CAAR,qCAAA,GACqBA,EAAa,CAAbA,CADrB,qCAAA,GAEqBA,EAAa,CAAbA,CAFrB,8CAAA,GAG8BA,EAAa,CAAbA,CAH9B,MANP;EC/FF;EAiDA,SA5CE,UACI7O,CADJ,EACsBlkB,CADtB,EAEIC,CAFJ;EAJAzH,sBAAAA,IAAiB,IAAjBA,EAOEA,KAAK6N,WAAL7N,GAAmBwH,EAAS1F,GAAT0F,CACf,UAACiqC,CAAD,EAAI12C,CAAJ;EAAU,aAAA02C,EAAE,CAAFA,IAAuB/lB,EAAO3wB,CAAP2wB,CAAvB+lB,GAAmCA,EAAE,CAAFA,CAAnC;OADKjqC,CAPrBxH,CASE,IAAMZ,IAAOssB,EAAOtxB,MAApB;EAAA,QACM0f,IAAOyf,kBAAkBn6B,CAAlBm6B,CADb;EAAA,QAGMp6B,IAAQqI,EAAS1F,GAAT0F,CAAa,UAAAiqC,CAAA;EAAK,aAAAA,EAAE,CAAFA,CAAA;OAAlBjqC,EAAwBxF,IAAxBwF,CAA6B,GAA7BA,CAHd;EAAA,QAIMjE,IAAMiE,EAAS1F,GAAT0F,CAAa,UAACiqC,CAAD,EAAI12C,CAAJ;EAAU,aAAA02C,EAAE,CAAFA,IAAO/lB,EAAO3wB,CAAP2wB,CAAP;OAAvBlkB,EAAyCxF,IAAzCwF,CAA8C,GAA9CA,CAJZ;EAAA,QAKMkqC,KACA,aAAa,aAAa,aAAY,aAAa7zC,MAAM,GAAGuB,EANlE,CAwBAY,KAAK4sB,QAAL5sB,GAhBa,MAATZ,CAAS,GAgBG,aACZ0a,CADY,cAAA,GACIA,CADJ,MAAA,GACY3a,CADZ,eAAA,GAEZ2a,CAFY,YAAA,GAEEA,CAFF,MAAA,GAEUvW,CAFV,wCAAA,GAKVuW,CALU,0IAAA,GAOQrS,CAPR,sCAAA,GASRqS,CATQ,uDAAA,GAUO43B,CAVP,kCAhBH,GACK,2BACAvyC,CADA,0BAAA,GAEFoE,CAFE,oJAAA,GAOQkE,CAPR,wGAelBzH;KAeJ;KAjDA;EAAA;ECiMA,SA5LE,UACI+nB,CADJ,EAC0B4pB,CAD1B,EACiDC,CADjD;EAEE,QANF5xC,kBAAAA,IAAiB,IAAjBA,EAMmB,UAAb2xC,CAAa,IAASC,CAA1B,EACE,MAAM,IAAIr4C,KAAJ,CAAU,4CAAV,CAAN,CAGF,IAAMwzB,IAAchF,EAASgF,WAA7B;EAAA,QACMC,IAAejF,EAASiF,YAD9B;EAAA,QAEMC,IAAclF,EAASkF,WAF7B;EAAA,QAGMC,IAAiBnF,EAASmF,cAHhC;EAAA,QAIMC,IAAgBpF,EAASoF,aAJ/B;EAAA,QAKMC,IAAwBrF,EAASqF,qBALvC;EAAA,QAMMC,IAAuBtF,EAASsF,oBANtC;EAAA,QAQMC,IAASvF,EAASwF,OAATxF,CAAiByF,GARhC;EAAA,QASMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IATjC,CAUA1tB,KAAK6N,WAAL7N,GAAmB+nB,EAASvG,QAA5BxhB,CAEA,IAAM6xC,IAAyB,UAAbF,CAAlB;EAAA,QAEIG,IAAsB,KAF1B,CAOA,IAJKD,MACHC,IAAsB,YADnBD,GAIDD,CAAJ,EAGE5xC,KAAK4sB,QAAL5sB,GAAgB,2CACgBgtB,CADhB,OAAA,GACiCC,CADjC,0CAAA,GAEaK,CAFb,OAAA,GAEwBG,CAFxB,siBAAA,GAoBYL,CApBZ,4BAAA,GAqBAF,CArBA,iFAAA,GAwBYnF,EAASqJ,QAxBrB,sFAAA,GA4Bc/D,CA5Bd,8BAAA,GA6BEF,CA7BF,qFAAA,GAgCcpF,EAASsJ,OAhCvB,ifAAA,GA6CkBhE,CA7ClB,wHAAhBrtB,CAHF,KAAA;EA0DA,UAEI88B,IAAiB6U,OAAAA,GAAYA,CAAZA,MAAAA,GAAwBA,CAAxBA,uEAFrB,CAIiB,UAAbA,CAAa,KACf7U,IAAc,kBADC,EAIjB,IAAMiV,IAAuD,IAA9Bx3C,KAAKkC,KAALlC,CAAWwyB,IAAc,CAAzBxyB,CAA/B;EAAA,UACMy3C,IAA2BjlB,IAAc,CAD/C;EAAA,UAGMklB,IAAgB,iBACdJ,CADc,gIAHtB,CAWA7xC,KAAK4sB,QAAL5sB,GAAgB,yCACgBgtB,CADhB,OAAA,GACiCC,CADjC,wCAAA,GAEaK,CAFb,OAAA,GAEwBG,CAFxB,iDAAA,GAGsBqkB,CAHtB,+KAAA,GASU/pB,EAASsJ,OATnB,4fAAA,GA2BcygB,CA3Bd,8FAAA,GA+BY1kB,CA/BZ,0BAAA,GAgCAF,CAhCA,6EAAA,GAmCYnF,EAASqJ,QAnCrB,gFAAA,GAuCc2gB,CAvCd,wDAAA,GAwCmB5kB,CAxCnB,6HAAA,GA4CqBA,CA5CrB,uDAAA,GA6CyBA,CA7CzB,uDAAA,GA8CyBA,CA9CzB,yCAAA,GAiDN8kB,CAjDM,oDAAA,GAoDYF,CApDZ,sBAAA,IAqDyB,MAA7BC,CArDI,gOAAA,GA6DNC,CA7DM,4BAAA,IA8DgC,MAA7BD,CA9DH,8HAAA,GAiEqB7kB,CAjErB,iHAAA,GAsEN8kB,CAtEM,4BAAA,IAuEgC,MAA7BD,CAvEH,8HAAA,GA0EqB7kB,CA1ErB,uDAAA,GA2EyBA,CA3EzB,6EAAA,GA+EN8kB,CA/EM,iDAAA,GAkFAnV,CAlFA,sBAAhB98B;;KAsFJ;KDjMA;EAAA;EEoJA,SA/IE,UACIosB,CADJ,EAEI8lB,CAFJ;EAJAlyC,sBAAAA,IAAiB,IAAjBA,CAOE,IAAMssB,IAAaF,EAAWE,UAA9B;EAAA,QACMC,IAAYH,EAAWG,SAD7B;EAAA,QAEMlK,IAAS+J,EAAW/J,MAF1B;EAAA,QAGMmK,IAAUjyB,KAAKuQ,IAALvQ,CAAU8nB,IAASiK,CAAnB/xB,CAHhB,CAIAyF,KAAK6N,WAAL7N,IAAoBusB,GAAWC,EAA/BxsB,CAEA,IAAI8xC,IAAsB,KAA1B;EAAA,QACIK,IAAY,EADhB,CAGmB,WAAfD,CAAe,GACjBJ,IAAsB,KADL,GAEO,UAAfI,CAAe,IACxBJ,IAAsB,WAAtBA,EACAK,IAAY,KAFY,IAGA,UAAfD,CAAe,KACxBJ,IAAsB,YAAtBA,EACAK,IAAY,KAFY,CALP,CAUnB,IAAIrV,IAAiBoV,OAAAA,GAAcA,CAAdA,MAAAA,GAA4BA,CAA5BA,uEAArB,CAGmB,UAAfA,CAAe,GACjBpV,IAAc,UADG,GAEO,WAAfoV,CAAe,GACxBpV,IAAc,WADU,GAEA,UAAfoV,CAAe,GACxBpV,IAAc,UADU,GAEA,UAAfoV,CAAe,KACxBpV,IAAc,UADU,CANP,CAUnB,IAAMsV,IAAqD,IAA7B73C,KAAKkC,KAALlC,CAAW+xB,IAAa,CAAxB/xB,CAA9B;EAAA,QACM83C,IAA0B/lB,IAAa,CAD7C;EAAA,QAGI2lB,IAAgB,kBACG,UAAfC,CADY,oEAAA,IAGU,WAAfA,CAHK,qKAAA,GAOAC,CAPA,0CAHpB;EAAA,QAcIG,IAAU,MAdd,CAgBmB,UAAfJ,CAAe,IACjBJ,IAAsB,KAAtBA,EACAG,IAAgB,oMADhBH,EAMAQ,IAAU,OAPO,IAQO,UAAfJ,CAAe,KACxBJ,IAAsB,KAAtBA,EACAG,IAAgB,oMADhBH,EAMAQ,IAAU,OAPc,CARP,CAkBnB,IAAIC,IAAmB,EAAvB,CACIlwB,IAASiK,CAATjK,GAAsB,CAAtBA,KACFkwB,IAAmB,yCACWlwB,CADX,kEADjBA,GAOJriB,KAAK4sB,QAAL5sB,GAAgB,+CACsB8xC,CADtB,mHAAA,GAKVS,CALU,uNAAA,GAacjmB,CAbd,0CAAA,GAecwlB,CAfd,oKAAA,GAqBUM,CArBV,iEAAA,GAuBRE,CAvBQ,eAAA,GAuBYA,CAvBZ,iMAAA,GA8BRL,CA9BQ,mDAAA,GAiCaG,CAjCb,oBAAA,IAkCsB,MAA5BC,CAlCM,qBAAA,GAmCRC,CAnCQ,eAAA,GAmCYA,CAnCZ,4KAAA,GA0CRL,CA1CQ,0BAAA,IA2C6B,MAA5BI,CA3CD,qBAAA,GA4CRC,CA5CQ,eAAA,GA4CYA,CA5CZ,mLAAA,GAmDRL,CAnDQ,0BAAA,IAoD6B,MAA5BI,CApDD,qBAAA,GAqDRC,CArDQ,eAAA,GAqDYA,CArDZ,0LAAA,GA4DRL,CA5DQ,oCAAA,GA8DAnV,CA9DA,sBAPZza;KAyER;KFpJA;EAAA;EGuDA,SAhDE,UAAYxU,CAAZ,EAAmDsS,CAAnD;EALAngB,sBAAAA,IAAiB,IAAjBA,EACAA,uBAAAA,IAAqB,CADrBA,EAQEA,KAAK6N,WAAL7N,GAAmB6N,CARrB7N,CAWE,KADA,IAAIwyC,IAAW,EAAf,EACSz3C,IAAI,CAAb,EAAgBA,IAAI,CAApB,EAAuBA,GAAvB,EAA4B;EAC1B,UAAI03C,IAAS,cAAb,CACI13C,IAAI,CAAJA,IAAU,CAAVA,KACF03C,KAAU,gBADR13C,GAGAA,IAAI,CAAJA,KACF03C,KAAU,gBADR13C,CAHAA,EAOJy3C,KAAY,eACRC,CADQ,eAAA,IAER13C,IAAI,CAAJA,GAAQ,yCAARA,GAAoD,EAF5C,kOAAA,GAQCA,CARD,oGAAA,IAURA,IAAI,CAAJA,GAAQ,GAARA,GAAc,EAVN,cAPRA;EAqBNiF,UAAK4sB,QAAL5sB,GAAgB,aACZ0yC,uBAAuBvyB,CAAvBuyB,CADY,aAAA,GAEZC,aAAa9kC,CAAb8kC,CAFY,oJAAA,GAUC9kC,EAAY,CAAZA,CAVD,2BAAA,GAWCA,EAAY,CAAZA,CAXD,kBAAA,GAaV2kC,CAbU,kDAAhBxyC;KAmBJ;KHvDA,uBGyDsB/D;EAKpB,SAAO,gEAJsB22C,QACxB,YAAY,YAAY,WADAA,EAEzB5f,eAAoB/2B,CAApB+2B,EAA2BlxB,GAA3BkxB,CAA+B,UAAArb,CAAA;EAAK,WAAAA,EAAE1W,QAAF0W,EAAA;KAApCqb,EAAkD7rB,MAAlD6rB,EAA0D,KAA1DA,CAFyB4f,CAItB,kBAAP;EAOF,gCAAA,CAAgC32C,CAAhC;EAIE,SAAO,sEAFHk7B,oCAAgD,KAAK,KAAK,IAA1DA,EAAgEl7B,CAAhEk7B,CAEG,8CAAP;ECzEF;EAsHA,SAjHE,UAAY1kB,CAAZ,EAA0B/X,CAA1B,EAAuCwS,CAAvC;EAJAlN,sBAAAA,IAAiB,KAAjBA,EACAA,gBAAAA,KADAA,EAKEA,KAAK6N,WAAL7N,GAAmBtF,EAAEuB,KALvB+D,CAMQ,IAAAmZ,WAAA;EAAA,QAAG05B,QAAH;EAAA,QAAYC,QAAZ;EAAA,QACAvgB,WADA;EAAA,QACGwgB,QADH;EAAA,QACYC,QADZ;EAAA,QAOAC,KACH/lC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B2lC,IAAU,CAAzC3lC,GAA6C2lC,GAC7C3lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B4lC,IAAS,CAAvC5lC,GAA2C4lC,EATxC;EAAA,QAYAI,KACHhmC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B6lC,IAAU,CAAzC7lC,GAA6C6lC,GAC7C7lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B8lC,IAAS,CAAvC9lC,GAA2C8lC,EAdxC;EAAA,QAiBAvgB,IAAcwgB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CAjBlC;EAAA,QAkBArgB,IAAaogB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CAlBjC;EAAA,QAoBAC,IAAiB,IAAI1gB,CApBrB;EAAA,QAqBA2gB,IAAgB,IAAIvgB,CArBpB;EAAA,QAyBAwgB,IAAyC,IAA5B94C,KAAKuQ,IAALvQ,CAAU44C,CAAV54C,CAA4B,GAAK,CAzB9C;EAAA,QA0BA+4C,IAAuC,IAA3B/4C,KAAKuQ,IAALvQ,CAAU64C,CAAV74C,CAA2B,GAAK,CA1B5C,CA4BNyF,KAAK4sB,QAAL5sB,GAAgB,qQAUsByyB,CAVtB,gDAAA,GAWqBI,CAXrB,sDAAA,GAayBsgB,CAbzB,mDAAA,GAcwBC,CAdxB,6CAAA,GAgBgBC,CAhBhB,0CAAA,GAiBeC,CAjBf,gjBAAA,GA+BcP,CA/Bd,6QAAA,GAuCgBC,CAvChB,2MAAA,IA6CkCH,IAAU,CA7C5C,+QAAA,IAmDiCC,IAAS,CAnD1C,k9BAAhB9yC;KAmFJ;KAtHA;EAAA;ECuDA,SAnDE,UACImgB,CADJ,EACkD8H,CADlD,EAEIC,CAFJ,EAEsBhb,CAFtB;EAJAlN,sBAAAA,IAAiB,IAAjBA,EACAA,gBAAAA,KADAA,CAOS,IAAA6rB,QAAA;EAAA,QAAO0nB,QAAP;EAAA,QAAkBC,QAAlB;EAAA,QAA4B17C,QAA5B,CACPkI,KAAK6N,WAAL7N,IAAoB6rB,GAAO5D,GAAWC,GAAUpwB,EAAhDkI,CAEA,IAAMyzC,KACHvmC,KAAgB+a,IAAY,CAA5B/a,GAAiCqmC,IAAY,CAA7CrmC,GAAiDqmC,GACjDrmC,KAAgBgb,IAAW,CAA3Bhb,GAAgCsmC,IAAW,CAA3CtmC,GAA+CsmC,EAFlD;EAAA,QAKME,KACHxmC,KAAgB+a,IAAY,CAA5B/a,GAAiC+a,IAAY,CAA7C/a,GAAiD+a,GACjD/a,KAAgBgb,IAAW,CAA3Bhb,GAAgCgb,IAAW,CAA3Chb,GAA+Cgb,EAPlD,CAUAloB,KAAK4sB,QAAL5sB,GAAgB,2EAERyzC,EAAgB,CAAhBA,IAAqBC,EAAiB,CAAjBA,CAFb,kBAAA,GAGRD,EAAgB,CAAhBA,IAAqBC,EAAiB,CAAjBA,CAHb,8CAAA,GAImBH,CAJnB,SAAA,GAImCC,CAJnC,ikCAAhBxzC;KAmCJ;KDvDA;EAAA;EE2GA,SAtGE,UAAYyS,CAAZ,EAA0B/X,CAA1B,EAAuCwS,CAAvC;EAJAlN,sBAAAA,IAAiB,KAAjBA,EACAA,gBAAAA,KADAA,EAKEA,KAAK6N,WAAL7N,GAAmBtF,EAAEuB,KALvB+D,CAMQ,IAAAmZ,WAAA;EAAA,QAAG05B,QAAH;EAAA,QAAYC,QAAZ;EAAA,QACAvgB,WADA;EAAA,QACGwgB,QADH;EAAA,QACYC,QADZ;EAAA,QAOAC,KACH/lC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B2lC,IAAU,CAAzC3lC,GAA6C2lC,GAC7C3lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B4lC,IAAS,CAAvC5lC,GAA2C4lC,EATxC;EAAA,QAYAI,KACHhmC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B6lC,IAAU,CAAzC7lC,GAA6C6lC,GAC7C7lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B8lC,IAAS,CAAvC9lC,GAA2C8lC,EAdxC;EAAA,QAiBAvgB,IAAcwgB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CAjBlC;EAAA,QAkBArgB,IAAaogB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CAlBjC;EAAA,QAoBAC,IAAiB,IAAI1gB,CApBrB;EAAA,QAqBA2gB,IAAgB,IAAIvgB,CArBpB;EAAA,QAyBAwgB,IAAyC,IAA5B94C,KAAKuQ,IAALvQ,CAAU44C,CAAV54C,CAA4B,GAAK,CAzB9C;EAAA,QA0BA+4C,IAAuC,IAA3B/4C,KAAKuQ,IAALvQ,CAAU64C,CAAV74C,CAA2B,GAAK,CA1B5C,CA4BNyF,KAAK4sB,QAAL5sB,GAAgB,qQAUsByyB,CAVtB,gDAAA,GAWqBI,CAXrB,sDAAA,GAayBsgB,CAbzB,mDAAA,GAcwBC,CAdxB,6CAAA,GAgBgBC,CAhBhB,0CAAA,GAiBeC,CAjBf,8jBAAA,GA+BcP,CA/Bd,6QAAA,GAuCgBC,CAvChB,2GAAA,GA4CEC,EAAe,CAAfA,CA5CF,8CAAA,GA6CkBC,EAAe,CAAfA,CA7ClB,qEAAA,GAgDID,EAAe,CAAfA,CAhDJ,gDAAA,GAiDoBC,EAAe,CAAfA,CAjDpB,mFAAA,GAoDQL,CApDR,8BAAA,GAqDF3lC,CArDE,kLAAA,GAyDQ4lC,CAzDR,8BAAA,GA0DF5lC,CA1DE,uVAAhBlN;KAwEJ;KF3GA;EAAA;EGgDA,SA5CE,UACImgB,CADJ,EACkD8H,CADlD,EAEIC,CAFJ,EAEsBhb,CAFtB;EAJAlN,sBAAAA,IAAiB,IAAjBA,EACAA,gBAAAA,KADAA,CAOS,IAAA6rB,QAAA;EAAA,QAAO0nB,QAAP;EAAA,QAAkBC,QAAlB;EAAA,QAA4B17C,QAA5B,CACPkI,KAAK6N,WAAL7N,IAAoB6rB,GAAO5D,GAAWC,GAAUpwB,EAAhDkI,CAEA,IAAMyzC,KACHvmC,KAAgB+a,IAAY,CAA5B/a,GAAiCqmC,IAAY,CAA7CrmC,GAAiDqmC,GACjDrmC,KAAgBgb,IAAW,CAA3Bhb,GAAgCsmC,IAAW,CAA3CtmC,GAA+CsmC,EAFlD;EAAA,QAKME,KACHxmC,KAAgB+a,IAAY,CAA5B/a,GAAiC+a,IAAY,CAA7C/a,GAAiD+a,GACjD/a,KAAgBgb,IAAW,CAA3Bhb,GAAgCgb,IAAW,CAA3Chb,GAA+Cgb,EAPlD;EAAA,QAWMyrB,IAAYzmC,IAAe,KAAfA,GAAuB,KAXzC,CAaAlN,KAAK4sB,QAAL5sB,GAAgB,2EAERyzC,EAAgB,CAAhBA,IAAqBC,EAAiB,CAAjBA,CAFb,kBAAA,GAGRD,EAAgB,CAAhBA,IAAqBC,EAAiB,CAAjBA,CAHb,8CAAA,GAImBH,CAJnB,SAAA,GAImCC,CAJnC,icAAA,GAiB0CG,CAjB1C,sIAAhB3zC;KAyBJ;KHhDA;EAAA;EIsCA,SAjCE,UAAY0rB,CAAZ,EAA8B5tB,CAA9B;EAJAkC,sBAAAA,IAAiB,IAAjBA,CAKE,IAAMZ,IAAOssB,EAAOtxB,MAApB,CACA,IAAIgF,IAAO,CAAX,EACE,MAAM,IAAI7F,KAAJ,CACF,oCAAkC6F,CAAlC,iCADE,CAAN,CAKF,IAFAY,KAAK6N,WAAL7N,GAAmB0rB,CAAnB1rB,EAEa,MAATZ,CAAJ,EAAA;EASA,UAMMw0C,IAAWloB,EAAO5pB,GAAP4pB,CAAW,UAACmoB,CAAD,EAAI94C,CAAJ;EAAU,eANnB,UAACA,CAAD;EACjB,kBAAyB,MAArB+C,EAAKwjB,OAALxjB,CAAa/C,CAAb+C,KAAwC,MAAd4tB,EAAO3wB,CAAP2wB,IAClBA,EAAO3wB,CAAP2wB,gBAAAA,GAAsB3wB,CAAtB2wB,aAEL,YAAU3wB,CAAV,MAHP;EAKoC+4C,SANnB,CAM8B/4C,CAN9B,CAMmB;SAArB2wB,EAAoC1pB,IAApC0pB,CAAyC,GAAzCA,CANjB;EAAA,UAOM5R,IAAOyf,kBAAkBn6B,CAAlBm6B,CAPb,CASAv5B,KAAK4sB,QAAL5sB,GAAgB,oCAEV8Z,CAFU,0DAAA,GAGK85B,CAHL,uBAAhB5zC;OAlBA,MACEA,KAAK4sB,QAAL5sB,GAAgB,iGAGK0rB,EAAO,CAAPA,CAHL,uCAAhB1rB;KAwBN;KJtCA;EAAA;EKmDA,SA9CE,UACI+zC,CADJ,EACwBvxB,CADxB,EAC0CwxB,CAD1C,EAEIC,CAFJ,EAEyB50C,CAFzB,EAE4CpD,CAF5C,EAGIi4C,CAHJ;uBAGIA,SAPJl0C,kBAAAA,IAAiB,WAAW,WAAW,iBAQrCA,KAAK6N,WAAL7N,GAAmB/D,EACnB,IAAM2+B,IAAcrB,kBAAkBl6B,EAAQjF,MAA1Bm/B,CAApB;EAAA,QACMt7B,IAAQs7B,kBAAkBt9B,EAAM7B,MAAxBm/B,CADd;EAAA,QAEI4a,IAAgB,EAFpB,CAGoB,MAAhBH,CAAgB,GAClBG,IAAgB,GADE,GAEO,MAAhBH,CAAgB,KACzBG,IAAgB,MADS,CAFP,CAKpB,IAAMC,IAAiB,gBAAcD,CAAd,MAAvB;EAAA,QAEIE,IAAgB,EAFpB,CAGoB,MAAhBJ,CAAgB,GAClBI,IAAgB,GADE,GAEO,MAAhBJ,CAAgB,KACzBI,IAAgB,cADS,CAFP,CAKpB,IAAMC,IAAiB,gBAAcD,CAAd,MAAvB;EAAA,QAEMxZ,IAAerY,IAAW,CAAXA,GAAe,YAAfA,GAA8B,SAFnD,CAGAxiB,KAAK4sB,QAAL5sB,GAAgB,eACV46B,CADU,gBAAA,GACeA,CADf,MAAA,GAC8Bv7B,CAD9B,4CAAA,GAIRpB,CAJQ,4HAAA,GAOY81C,CAPZ,oFAAA,GAScvxB,CATd,+CAAA,GAUc4xB,CAVd,iDAAA,GAWsBvZ,CAXtB,4FAAA,GAcGyZ,CAdH,mJAAhBt0C;KAsBJ;KLnDA;EAAA;EM2IA,SAtIE,UAAYu0C,CAAZ,EAAkCC,CAAlC;EAJAx0C,sBAAAA,IAAiB,KAAK,aAAtBA,CAKE,IAAMssB,IAAaioB,EAAUjoB,UAA7B;EAAA,QACMC,IAAYgoB,EAAUhoB,SAD5B;EAAA,QAEMlK,IAASkyB,EAAUlyB,MAFzB;EAAA,QAGMlT,IAAcolC,EAAUplC,WAH9B;EAAA,QAIMqd,IAAUrd,IAAc5U,KAAKuQ,IAALvQ,CAAU8nB,IAASiK,CAAnB/xB,CAJ9B,CAKAyF,KAAK6N,WAAL7N,IAAoBusB,GAAWC,EAA/BxsB,CAEA,IAGMoyC,IAAqD,IAA7B73C,KAAKkC,KAALlC,CAAW+xB,IAAa,CAAxB/xB,CAH9B;EAAA,QAIM83C,IAA0B/lB,IAAa,CAJ7C;EAAA,QAMM2lB,IAAgB,kDANtB;EAAA,QAUIwC,IAAwB,EAV5B,CAWIpyB,IAASiK,CAATjK,GAAsB,CAAtBA,KACFoyB,IAAwB,yCACMpyB,CADN,kEADtBA,EAQJ,IAAIqyB,IAA4B,EAAhC,CACIryB,IAASiK,CAATjK,GAAsB,CAAtBA,KACFqyB,IAA4B,yCACEryB,CADF,mDAD1BA,GAQJriB,KAAK4sB,QAAL5sB,GAAgB,6GAIVy0C,CAJU,4GAAA,GASVC,CATU,4PAAA,GAkBRvlC,CAlBQ,gBAAA,GAkBiBmd,CAlBjB,gEAAA,GAmBoCnd,CAnBpC,0EAAA,GAuBUijC,CAvBV,ylBAAA,GAuCRH,CAvCQ,mDAAA,GA0CaG,CA1Cb,oBAAA,IA2CsB,MAA5BC,CA3CM,6aAAA,GA4DRJ,CA5DQ,0BAAA,IA6D6B,MAA5BI,CA7DD,mbAAA,GA4ERJ,CA5EQ,0BAAA,IA6E6B,MAA5BI,CA7ED,8eAAA,GA4FRJ,CA5FQ,6DARZ5vB;KA0GR;KN3IA;EAAA;EO6CA,SAxCE,UAAYsyB,CAAZ,EAA2B14C,CAA3B,EAA4CmD,CAA5C;EAGE,QAAIw1C,CAAJ,EACIC,CADJ,CAEA,IATF70C,kBAAAA,IAAiB,KAAK,KAAK,IAA3BA,EAKEA,KAAK6N,WAAL7N,GAAmB/D,CALrB+D,EASMZ,IAAO,CAAX,EACE,MAAM7F,MAAM,oBAAkB6F,CAAlB,0BAAN7F,CAAN,CAGF,IAAa,MAAT6F,CAAJ,EACEy1C,IAAW,OAAXA,EACAD,IAAU,OADVC,CADF,KAGO;EAIL,WAHA,IAAMpa,KAAiB,WAAW,WAAW,WAAW,UAAxD,EACMqa,MADN,EAEMC,MAFN,EAGSh6C,IAAI,CAAb,EAAgBA,IAAIkB,EAAM7B,MAA1B,EAAkCW,GAAlC,EACEg6C,EAAY/4C,IAAZ+4C,CAAiB,KAAGta,EAAc1/B,CAAd0/B,CAApBsa,GACIh6C,IAAI45C,CAAJ55C,IACF+5C,EAAW94C,IAAX84C,CAAgB,KAAGra,EAAc1/B,CAAd0/B,CAAnBqa,CAFFC,CAKFH,IAAUE,EAAW9yC,IAAX8yC,EAAVF,EACAC,IAAWE,EAAY/yC,IAAZ+yC,EADXH;EAIF,SAAM32C,IAAQs7B,kBAAkBn6B,CAAlBm6B,CAAd,CAEAv5B,KAAK4sB,QAAL5sB,GAAgB,oCAEV/B,CAFU,4DAAA,GAGQ22C,CAHR,8DAAA,GAKOC,CALP,qDAAA,GAOOA,CAPP,kCAAhB70C;KAYJ;KP7CA;EAAA;EQUE,YAAA,CAAYg1C,CAAZ;EARAh1C,sBAAAA,IAAiB,SAAjBA,EASEA,KAAK6N,WAAL7N,GAAmBg1C,CATrBh1C,EAUEA,KAAKZ,IAALY,GAAYg1C,EAAS56C,MAVvB4F,CAYE,IAAM/B,IAAQs7B,kBAAkBv5B,KAAKZ,IAAvBm6B,CAAd;EAAA,QACMgB,IAAed,YAAUz5B,KAAKZ,IAAfq6B,CADrB,CAGAz5B,KAAK4sB,QAAL5sB,GAAgB,qBACJ/B,CADI,6CAAA,GAIVA,CAJU,0EAAA,GAKUs8B,CALV,uBAAhBv6B;EAuCJ,UA7BEi1C,WAAAA,mBAAAA,GAAA,UAAmB91C,CAAnB;EAAA,gBAAA,CACE,IAAIA,EAAM/E,MAAN+E,KAAiBa,KAAKZ,IAA1B,EACE,MAAM7F,MACF,eAAayG,KAAKZ,IAAlB,sDAAA,GACoBD,EAAM/E,MAD1B,MADEb,CAAN,CAIF,OAAO,UAACq3B,CAAD,EAAsBC,CAAtB;EACL,UAAqB,QAAjBlwB,EAAKmwB,QAAY,KACnBnwB,EAAKmwB,QAALnwB,GAAgBiwB,EAAMG,yBAANH,CAAgCC,CAAhCD,EAA8C,OAA9CA,CAAhBjwB,EACqB,QAAjBA,EAAKmwB,QAFU,CAArB,EAQA,IAAkB,MAAdnwB,EAAKvB,IAAT,EACEwxB,EAAMp4B,EAANo4B,CAAS0Q,SAAT1Q,CAAmBjwB,EAAKmwB,QAAxBF,EAAkCzxB,EAAM,CAANA,CAAlCyxB,EADF,KAEO,IAAkB,MAAdjwB,EAAKvB,IAAT,EACLwxB,EAAMp4B,EAANo4B,CAASskB,SAATtkB,CAAmBjwB,EAAKmwB,QAAxBF,EAAkCzxB,EAAM,CAANA,CAAlCyxB,EAA4CzxB,EAAM,CAANA,CAA5CyxB,EADK,KAEA,IAAkB,MAAdjwB,EAAKvB,IAAT,EACLwxB,EAAMp4B,EAANo4B,CAASukB,SAATvkB,CAAmBjwB,EAAKmwB,QAAxBF,EAAkCzxB,EAAM,CAANA,CAAlCyxB,EAA4CzxB,EAAM,CAANA,CAA5CyxB,EAAsDzxB,EAAM,CAANA,CAAtDyxB,EADK,KAEA;EAAA,YAAkB,MAAdjwB,EAAKvB,IAAT,EAIL,MAAM7F,MAAM,sBAAoBoH,EAAKvB,IAAzB,0BAAN7F,CAAN,CAHAq3B,EAAMp4B,EAANo4B,CAASwkB,SAATxkB,CACIjwB,EAAKmwB,QADTF,EACmBzxB,EAAM,CAANA,CADnByxB,EAC6BzxB,EAAM,CAANA,CAD7ByxB,EACuCzxB,EAAM,CAANA,CADvCyxB,EACiDzxB,EAAM,CAANA,CADjDyxB;;OAhBJ;KANFqkB,GA6BF;KRxDA,sBQ0DmB71C;EACjB,MAAa,MAATA,CAAJ,EACE,OAAO,WAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,0BAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,uCAAP,CACK,IAAa,MAATA,CAAJ,EACL,OAAO,oDAAP,CAEA,MAAM7F,MAAM,sBAAoB6F,CAApB,0BAAN7F,CAAN;ECpEJ;EA2CA,SAtCE,UACI2N,CADJ,EACqB7H,CADrB,EACwCnD,CADxC,EAEIqnB,CAFJ;EAJAvjB,sBAAAA,IAAiB,IAAjBA,CAOE,IAAM/D,IAAQC,EAAKoR,MAALpR,CAAY,UAAC8a,CAAD,EAAI1c,CAAJ;EAAc,cAA+B,MAA/BipB,EAAWjC,OAAXiC,CAAmBjpB,CAAnBipB,CAAA;OAA1BrnB,CAAd,CACA8D,KAAK6N,WAAL7N,GAAmB/D,CAAnB+D,CACA,IAAMZ,IAAOlD,EAAK9B,MAAlB;EAAA,QACMi7C,IAAa9b,kBAAkBr9B,EAAK9B,MAAvBm/B,CADnB;EAAA,QAEMt7B,IAAQs7B,kBAAkBt9B,EAAM7B,MAAxBm/B,CAFd;EAAA,QAII+b,IAAY,EAJhB,CAKA,IAAa,MAATl2C,CAAJ,EACEk2C,IAAY,0BAAZA,CADF,KAEO;EACL,UAAIC,IAAa,CAAjB,CACAD,IACIp5C,EAAK4F,GAAL5F,CAAS,UAAC23C,CAAD,EAAI94C,CAAJ;EACH,gBAA+B,MAA3BwoB,EAAWjC,OAAXiC,CAAmBxoB,CAAnBwoB,KACFgyB,KACwB,MAAjBt5C,EAAM7B,MAAW,GACpB,sBAAoBW,CAApB,eAAA,GAAkCA,CAAlC,MADoB,GAEpB,aAAUw6C,IAAa,CAAvB,kBAAA,GAAuCx6C,CAAvC,eAAA,GAAqDA,CAArD,UAEG,WAASA,CAAT,MANT;SADNmB,EAUK8F,IAVL9F,CAUU,GAVVA,CADJo5C;EAcFt1C,UAAK4sB,QAAL5sB,GAAgB,aACZq1C,CADY,cAAA,GACUA,CADV,MAAA,GACwBnuC,CADxB,eAAA,GAEZmuC,CAFY,gBAAA,GAEYA,CAFZ,MAAA,GAE0Bh2C,CAF1B,wCAAA,GAKVpB,CALU,0DAAA,GAMKq3C,CANL,uBAAhBt1C;KAUJ;KA3CA;EAAA;ECSE,YAAA,CAAoB4wB,CAApB;EAAoB5wB,cAAAA,GAAA4wB,CAAA5wB,EANZA,oBAAAA,GAAkB,CAMNA,EALZA,oBAAAA,GAAkB,CAKNA,EAJZA,iBAAAA,KAIYA,EAHZA,eAAAA,IAAa,CAGDA,EAFZA,iBAAAA,KAEYA;EAkHtB,UAhHEw1C,WAAAA,eAAAA,GAAA,UACIC,CADJ,EAC+BC,CAD/B,EAEIphB,CAFJ;EAGE,QAqBIqhB,CArBJ;EAAA,QAAMC,IAAkBC,kCAAkCH,CAAlCG,EAAyCvhB,CAAzCuhB,CAAxB;EAAA,QAEMC,IAAWC,uBAAuBN,CAAvBM,EAAgCH,CAAhCG,EAAiDzhB,CAAjDyhB,CAFjB,CAUA,IAPMD,KAAY91C,KAAKg2C,YAAjBF,KACJ91C,KAAKg2C,YAALh2C,CAAkB81C,CAAlB91C,MADI81C,GAGAA,KAAY91C,KAAKi2C,YAAjBH,KACJ91C,KAAKi2C,YAALj2C,CAAkB81C,CAAlB91C,MADI81C,CAHAA,EAOF91C,KAAKg2C,YAALh2C,CAAkB81C,CAAlB91C,EAA4B5F,MAA5B4F,GAAqC,CAAzC,EAA4C;EAC1CA,WAAKk2C,eAALl2C,IACAA,KAAKm2C,eAALn2C,EADAA,EAEAA,KAAKmB,GAALnB,EAFAA,CAGA,IAAMo2C,IAAap2C,KAAKg2C,YAALh2C,CAAkB81C,CAAlB91C,EAA4Bq2C,KAA5Br2C,EAAnB,CAEA,OADAA,KAAKi2C,YAALj2C,CAAkB81C,CAAlB91C,EAA4BhE,IAA5BgE,CAAiCo2C,CAAjCp2C,GACOo2C,CAAP;EAyBF,YAvBAp2C,KAAKm2C,eAALn2C,IACAA,KAAKmB,GAALnB,EADAA,EAII41C,MAAoBjb,oBAAoB2b,kBAAxCV,GACFD,IAAa31C,KAAK4wB,KAAL5wB,CAAWu2C,yBAAXv2C,CAAqCy1C,EAAQ,CAARA,CAArCz1C,EAAiDy1C,EAAQ,CAARA,CAAjDz1C,CADX41C,GAEOA,MAAoBjb,oBAAoB6b,kBAAxCZ,GACTD,IACI31C,KAAK4wB,KAAL5wB,CAAWy2C,gCAAXz2C,CAA4Cy1C,EAAQ,CAARA,CAA5Cz1C,EAAwDy1C,EAAQ,CAARA,CAAxDz1C,CAFK41C,GAGAA,MAAoBjb,oBAAoB+b,gBAAxCd,GACTD,IACI31C,KAAK4wB,KAAL5wB,CAAW22C,0BAAX32C,CAAsCy1C,EAAQ,CAARA,CAAtCz1C,EAAkDy1C,EAAQ,CAARA,CAAlDz1C,CAFK41C,GAGAA,MAAoBjb,oBAAoBic,gBAAxChB,GACTD,IACI31C,KAAK4wB,KAAL5wB,CAAW62C,0BAAX72C,CAAsCy1C,EAAQ,CAARA,CAAtCz1C,EAAkDy1C,EAAQ,CAARA,CAAlDz1C,CAFK41C,GAKPA,MAAoBjb,oBAAoBmc,wBAAxClB,KACFD,IACI31C,KAAK4wB,KAAL5wB,CAAW+2C,gCAAX/2C,CAA4Cy1C,EAAQ,CAARA,CAA5Cz1C,EAAwDy1C,EAAQ,CAARA,CAAxDz1C,CAFF41C,CAjBJ51C,EAqBAA,KAAKi2C,YAALj2C,CAAkB81C,CAAlB91C,EAA4BhE,IAA5BgE,CAAiC21C,CAAjC31C,CArBAA,EAuBO21C,CAAP;KA5CFH,EA+CAA,WAAAA,eAAAA,GAAA,UACIn6B,CADJ,EAC2Bpf,CAD3B,EAEI+6C,CAFJ,EAEkC1iB,CAFlC;EAGE,QAAyB,QAArBt0B,KAAKg2C,YAAT,EAAA;EAIA,UAEMF,IAAWC,uBAAuB95C,CAAvB85C,EADbF,kCAAkCmB,CAAlCnB,EAAkDvhB,CAAlDuhB,CACaE,EAA+CzhB,CAA/CyhB,CAFjB,CAGMD,KAAY91C,KAAKg2C,YAAjBF,KACJ91C,KAAKg2C,YAALh2C,CAAkB81C,CAAlB91C,MADI81C,GAGN91C,KAAKg2C,YAALh2C,CAAkB81C,CAAlB91C,EAA4BhE,IAA5BgE,CAAiCqb,CAAjCrb,CAHM81C,EAIN91C,KAAKk2C,eAALl2C,EAJM81C,EAKN91C,KAAKm2C,eAALn2C,EALM81C,CAMN,IAAMmB,IAAUj3C,KAAKi2C,YAALj2C,CAAkB81C,CAAlB91C,CAAhB;EAAA,UACMk3C,IAAWD,EAAQ31B,OAAR21B,CAAgB57B,CAAhB47B,CADjB,CAEA,IAAIC,IAAW,CAAf,EACE,MAAM,IAAI39C,KAAJ,CACF,0EADE,CAAN,CAIF09C,EAAQE,MAARF,CAAeC,CAAfD,EAAyB,CAAzBA,GACAj3C,KAAKmB,GAALnB,EADAi3C;;KAtEFzB,EA0EQA,WAAAA,IAAAA,GAAR;EACE,QAAKx1C,KAAKo3C,UAAV,EAAA;EAGA,UAAMC,IAAQr3C,KAAKk2C,eAALl2C,GAAuBA,KAAKm2C,eAA1C,CACAj1C,QAAQC,GAARD,CACI,WADJA,EACoBlB,KAAKk2C,eAALl2C,QAAAA,GAA0BA,KAAKm2C,eADnDj1C,EAEI,MAAIm2C,CAAJ,MAFJn2C;;KA/EFs0C,EAoFAA,WAAAA,mBAAAA,GAAA;EACE,WAAOx1C,KAAKm2C,eAAZ;KArFFX,EAwFAA,WAAAA,mBAAAA,GAAA;EACE,WAAOx1C,KAAKk2C,eAAZ;KAzFFV,EA4FAA,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACE,IAAyB,QAArBx1C,KAAKg2C,YAAT,EAAA;EAIA,WAAK,IAAM7hB,CAAX,IAAuBn0B,KAAKg2C,YAA5B,EACEh2C,KAAKg2C,YAALh2C,CAAkBm0B,CAAlBn0B,EAA4BI,OAA5BJ,CAAoC,UAAA6uC,CAAA;EAClCluC,UAAKiwB,KAALjwB,CAAW22C,mBAAX32C,CAA+BkuC,CAA/BluC;SADFX,EAIF,KAAK,IAAMm0B,CAAX,IAAuBn0B,KAAKi2C,YAA5B,EACEj2C,KAAKi2C,YAALj2C,CAAkBm0B,CAAlBn0B,EAA4BI,OAA5BJ,CAAoC,UAAA6uC,CAAA;EAClCluC,UAAKiwB,KAALjwB,CAAW22C,mBAAX32C,CAA+BkuC,CAA/BluC;SADFX,EAIFA,KAAKg2C,YAALh2C,GAAoB,IAApBA,EACAA,KAAKi2C,YAALj2C,GAAoB,IADpBA,EAEAA,KAAKm2C,eAALn2C,GAAuB,CAFvBA,EAGAA,KAAKk2C,eAALl2C,GAAuB,CAHvBA;;KA3GFw1C,GAgHF;KD3HA,4CC8HIwB,GAA8B1iB;EAChC,MAAIA,CAAJ,EACE,OAAOnX,IAAIxG,GAAJwG,CAAQ,8BAARA,IACHwd,oBAAoB2b,kBADjBn5B,GAEHwd,oBAAoB6b,kBAFxB,CAGK,IACHQ,MAAmBtc,aAAa6c,QAAhCP,IACAA,MAAmBtc,aAAa8c,MAF7B,EAGL,OAAO7c,oBAAoBmc,wBAA3B,CACK,IAAIE,MAAmBtc,aAAa+c,MAApC,EACL,OAAO9c,oBAAoB+b,gBAA3B,CACK,IAAIM,MAAmBtc,aAAagd,MAApC,EACL,OAAOv6B,IAAIxG,GAAJwG,CAAQ,8BAARA,IACHwd,oBAAoB+b,gBADjBv5B,GAEHwd,oBAAoBic,gBAFxB,CAIF,MAAM,IAAIr9C,KAAJ,CAAU,kCAAgCy9C,CAA1C,CAAN;EAGF,gCAAA,CACIW,CADJ,EACoC/B,CADpC,EAEIthB,CAFJ;EAGE,SAAUqjB,EAAa,CAAbA,OAAAA,GAAmBA,EAAa,CAAbA,CAAnBA,MAAAA,GAAsC/B,CAAtC+B,MAAAA,GAAyDrjB,CAAnE;ECpJF;EAuBA,SAjBE,UAAY/S,CAAZ,EAA8Bhb,CAA9B;EALAvG,sBAAAA,IAAiB,IAAjBA,CAOE,KADA,IAAM6N,IAAwB,IAAIhS,KAAJ,CAAU0lB,EAAOnnB,MAAjB,CAA9B,EACSW,IAAI,CAAb,EAAgBA,IAAI8S,EAAYzT,MAAhC,EAAwCW,GAAxC,EACE8S,EAAY9S,CAAZ8S,IAAiB0T,EAAOxmB,CAAPwmB,IAAYhb,EAAKxL,CAALwL,CAA7BsH,CAEF7N,KAAK6N,WAAL7N,GAAmB6N,CAAnB7N,EACAA,KAAKZ,IAALY,GAAY6N,EAAYzT,MADxB4F,CAEA,IAAM/B,IAAQs7B,kBAAkBv5B,KAAKZ,IAAvBm6B,CAAd;EAAA,QACMgB,IAAeC,kBAAgBjZ,CAAhBiZ,CADrB,CAGAx6B,KAAK4sB,QAAL5sB,GAAgB,oCAEV/B,CAFU,yDAAA,GAGKs8B,CAHL,uBAAhBv6B;KAOJ;KAvBA,4BAyByBuhB;EACvB,MAAMniB,IAAOmiB,EAAOnnB,MAApB,CACA,IAAIgF,IAAO,CAAX,EACE,MAAM7F,MAAM,mBAAiB6F,CAAjB,0BAAN7F,CAAN,CAEF,IAAa,MAAT6F,CAAJ,EACE,OAAO,iBAAemiB,EAAO,CAAPA,CAAf,MAAP,CAMF,KAHA,IAAMkZ,KAAiB,WAAW,WAAW,WAAW,WAAW,UAAnE,EAEMF,MAFN,EAGSx/B,IAAI,CAAb,EAAgBA,IAAIwmB,EAAOnnB,MAA3B,EAAmCW,GAAnC,EACEw/B,EAAav+B,IAAbu+B,CAAkB,UAAQE,EAAc1/B,CAAd0/B,CAAR,OAAA,GAA6BlZ,EAAOxmB,CAAPwmB,CAA7B,MAAlBgZ,EAEF,OAAOA,EAAav4B,IAAbu4B,EAAP;ECxCF;EAuBA,SAjBE,UAAYhZ,CAAZ,EAA8Bq2B,CAA9B;EALA53C,sBAAAA,IAAiB,IAAjBA,CAOE,KADA,IAAM6N,IAAwB,IAAIhS,KAAJ,CAAU0lB,EAAOnnB,MAAjB,CAA9B,EACSW,IAAI,CAAb,EAAgBA,IAAI8S,EAAYzT,MAAhC,EAAwCW,GAAxC,EACE8S,EAAY9S,CAAZ8S,IAAiB0T,EAAOq2B,EAAO78C,CAAP68C,CAAPr2B,CAAjB1T,CAEF7N,KAAK6N,WAAL7N,GAAmB6N,CAAnB7N,EACAA,KAAKZ,IAALY,GAAY6N,EAAYzT,MADxB4F,CAEA,IAAM/B,IAAQs7B,kBAAkBv5B,KAAKZ,IAAvBm6B,CAAd;EAAA,QACMse,IAAWC,kBAAkBF,CAAlBE,CADjB,CAGA93C,KAAK4sB,QAAL5sB,GAAgB,gCAEZ/B,CAFY,uDAAA,GAGG45C,CAHH,qBAAhB73C;KAOJ;KAvBA,4BAyB2B43C;EACzB,MAAMx4C,IAAOw4C,EAAOx9C,MAApB,CACA,IAAIgF,IAAO,CAAX,EACE,MAAM7F,MAAM,wBAAsB6F,CAAtB,0BAAN7F,CAAN,CAKF,KAHA,IAAMw+C,KACD,WAAW,WAAW,WAAW,WAAW,WAAW,UAD5D,EAEMC,IAAiB,IAAIn8C,KAAJ,CAAUuD,CAAV,CAFvB,EAGSrE,IAAI,CAAb,EAAgBA,IAAI68C,EAAOx9C,MAA3B,EAAmCW,GAAnC,EACEi9C,EAAeJ,EAAO78C,CAAP68C,CAAfI,IAA4BD,EAAch9C,CAAdg9C,CAA5BC,CAEF,OAAOA,EAAeh2C,IAAfg2C,EAAP;ECvCK,KAAMC,QAAQ,QAAd;EAAA,IACMC,SAAS,UADf;EAAA,IAEMC,UAAU,UAFhB;EAAA,IAGMC,SAAS,WAHf;EAAA,IAIMC,UAAU,WAJhB;EAAA,IAKMC,SAAS,WALf;EAAA,ICAMC,kBAAkB,kBDAxB;EAAA,ICCMC,aAAa,kBDDnB;EAAA;EEcL,YAAA,CAAYj3B,CAAZ,EAA8Bk3B,CAA9B;EAPAz4C,sBAAAA,IAAiB,IAAjBA,EAQEA,KAAK6N,WAAL7N,GAAmBuhB,CARrBvhB,EASEA,KAAK4sB,QAAL5sB,GAAgB,gFAGVy4C,CAHU,yJATlBz4C;EAqCF,UAbE04C,WAAAA,mBAAAA,GAAA;EAAA,gBAAA,CACE,OAAO,UAAC9nB,CAAD,EAAsBC,CAAtB;EACgB,cAAjBlwB,EAAKmwB,QAAY,KACnBnwB,EAAKmwB,QAALnwB,GAAgBiwB,EAAMG,yBAANH,CAAgCC,CAAhCD,EAA8C,KAA9CA,CAAhBjwB,EACqB,QAAjBA,EAAKmwB,QAFU,KAQrBF,EAAMp4B,EAANo4B,CAASI,SAATJ,CAAmBjwB,EAAKmwB,QAAxBF,EAAkCK,GAAlCL,CARqB;OADvB;KADF8nB,GAaF;KF5CO;EAAA,IE8CDppB,sBAAoB,yBF9CnB;EAAA,IEgDMqpB,MAAM,gBFhDZ;EAAA,IEkDMC,OAAOtpB,sBAAoB,mCFlDjC;EAAA,IEsDMupB,MAAM,yCFtDZ;EAAA,IEwDMC,OAAO,+IAGGC,eAHH,wBAAA,GAIFC,UAJE,wEFxDb,CEgEP,aAAA,CAAqBthD,CAArB;EACE,0BADmBA,QACZ43B,sBAAoB,qCAApBA,GAC0B53B,CAD1B43B,WAAP;EAKF,KAAa2pB,MAAM,YAAnB;EAAA,IAEaC,OAAO,iBAFpB;EAAA,IAIaC,QAAQ,kBAJrB;EAAA,IAMaC,OAAO,wDANpB;EAAA,IAWaC,QAAQ,gWAXrB;EAAA,IA4BaC,MAAM,gBA5BnB;EAAA,IA8BaC,QAAQ,sBA9BrB;EAAA,IAgCaC,MAAM,4CAhCnB;EAAA,IAmCaC,QAAQ,sBAnCrB;EAAA,IAqCaC,OAAO,iBArCpB;EAAA,IAuCaC,QAAQ,wBAvCrB;EAAA,IAyCaC,UAAU,qCAzCvB;EAAA,IAwDaC,WAAW,yWAxDxB;EAAA,IA8EaC,MAAMxqB,sBAAoB,sBA9EvC;EAAA,IAkFayqB,MAAMzqB,sBAAoB,sBAlFvC;EAAA,IAsFa0qB,MAAM,gBAtFnB;EAAA,IAwFaC,OAAO,iBAxFpB;EAAA,IA0FaC,OAAO,iBA1FpB;EAAA,IA4FaC,OAAO7qB,sBAAoB,uBA5FxC;EAAA,IAgGa8qB,OAAO,8DAhGpB;EAAA,IAqGaC,OAAO,+DArGpB;EAAA,IA0GaC,OAAO,sFA1GpB;EAAA,IA+GaC,QAAQ,oCA/GrB;EAAA,IAiHaC,QAAQlrB,sBAAoB,oEAjHzC;EAAA,IAqHamrB,QAAQnrB,sBAAoB,6FArHzC;EAAA,IAyHaorB,MAAM,0NAILC,KAJK,qBAAA,GAKJC,MALI,qBAAA,GAMJC,MANI,qBAAA,GAOJC,MAPI,qBAAA,GAQJC,MARI,qBAAA,GASJC,MATI,kHAzHnB;EAAA,IAwIaC,SAAS,eAxItB;EAAA,IA0IaC,aAAa,iBA1I1B;EAAA,IA4IaC,cAAc,4BA5I3B;EAAA,IA8IaC,SAAS,uBA9ItB;EAAA;ECxCA,SAnBE,UAAYvtC,CAAZ;EALA7N,sBAAAA,IAAiB,IAAjBA,EACAA,uBAAAA,IAAqB,CADrBA,EAMEA,KAAK6N,WAAL7N,GAAmB6N,CANrB7N,CAOE,IAAMZ,IAAOyO,EAAYzT,MAAzB;EAAA,QAEMy2C,IAAWC,YAAY,IAAZA,EAAkB1xC,CAAlB0xC,CAFjB;EAAA,QAGM7yC,IAAQs7B,kBAAkBn6B,CAAlBm6B,CAHd;EAAA,QAIMgB,IAAeC,kBAAgBp7B,CAAhBo7B,EAAsBqW,CAAtBrW,CAJrB;EAAA,QAKM+W,IAAYV,EAAShzC,KAATgzC,EAAgB,CAAhBA,CALlB;EAAA,QAMM9d,IAAkB,MAAT3zB,CAAS,GAAI,IAAJ,GAAW,UAAQmyC,EAAUvvC,IAAVuvC,CAAe,GAAfA,CAAR,MANnC,CAQAvxC,KAAK4sB,QAAL5sB,GAAgB,oCAEV/B,CAFU,+DAAA,GAGcs8B,CAHd,qDAAA,GAKwBxH,CALxB,uBAAhB/yB;KASJ;KDwCA,CEjDA,kBAAA,CAAmB8nB,CAAnB;EACE,SAAO3gB,OAAO2gB,CAAP3gB,EAAgB,CAAhBA,CAAP;EA8BF,mBAAA,CAAmB2gB,CAAnB,EAAqDhqB,CAArD;EACE,SAAOqJ,OAAO2gB,CAAP3gB,EAAgBrJ,CAAhBqJ,CAAP;EAiCF,mBAAA,CAAmB2gB,CAAnB,EAAqDhqB,CAArD;EACE,SAAOqJ,OAAO2gB,CAAP3gB,EAAgBrJ,CAAhBqJ,CAAP;EAUF,mBAAA,CAAmB2gB,CAAnB,EAAqDhqB,CAArD;EACE,SAAOqJ,OAAO2gB,CAAP3gB,EAAgBrJ,CAAhBqJ,CAAP;EA0CF,iBAAA,CAAmC2gB,CAAnC,EAA8DhqB,CAA9D;qBAA8DA,QAC5DpC,OAAOosB,EAAQ1tB,MAAR0tB,IAAkB,CAAzBpsB,EAA4B,oCAA5BA,EACA,IAAI2/C,IAAWC,qBAAqBxzB,CAArBwzB,EAA8B,SAA9BA,EAAyC,QAAzCA,CAAf,CACAx9C,IAAOy9C,eAAez9C,CAAfy9C,EAAqBF,EAAS,CAATA,EAAYp/C,KAAjCs/C,EAAwC,CAAxCA,CAAPz9C,CACA,IAAM0jB,IAAWg6B,gBAAgBH,EAASv5C,GAATu5C,CAAa,UAAAniC,CAAA;EAAK,WAAAA,EAAEjd,KAAF;KAAlBo/C,CAAhBG,EAA4C19C,CAA5C09C,CAAjB,CACA,IAAgC,MAA5Bv5C,cAAcuf,CAAdvf,CAAJ,EACE,OAAOgR,SAAAA,EAAWuO,CAAXvO,CAAP,CAIF,IAAwB,OADxBooC,IAAWA,EAAS/tC,MAAT+tC,CAAgB,UAAAniC,CAAA;EAAK,WAAAA,EAAEhd,IAAFgd,GAAS,CAAT;KAArBmiC,GACEjhD,MAAb,EACE,OAAOihD,EAAS,CAATA,CAAP,CAGF,IAAMv5B,IAASu5B,EAASv5C,GAATu5C,CAAa,UAAAniC,CAAA;EAAK,WAAAA,EAAEjd,KAAF;KAAlBo/C,CAAf,CACAI,uBAAuB35B,CAAvB25B,EAA+B39C,CAA/B29C,EACA,IAKMhqC,IAAS4pC,CALf,CAMA,OAAOl+B,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1M,MAAR0M,CAAewnC,CAAfxnC,EAAyB/V,CAAzB+V,CAAA;KADRsJ,EAC6C1L,CAD7C0L,EANK,UAAC1K,CAAD;EACV,QAAM2W,IAAatH,EAAOhgB,GAAPggB,CAAW,UAAApF,CAAA;EAAK,aAAAA,EAAE5e,CAAF4e,CAAA;OAAhBoF,CAAnB,CAEA,OADmBza,QAAMoL,CAANpL,EAAU+hB,CAAV/hB,EAAsBvJ,CAAtBuJ,EACDvF,GADCuF,CACG,UAAA6R,CAAA;EAAK,aAAA;EAAM,eAAAA,CAAA;SAAN;OADR7R,CACnB;KAGK8V,CAAP;EAqCF,gBAAA,CACIziB,CADJ,EACqB0M,CADrB,EACuDtJ,CADvD;qBACuDA,OACrD,IAGI49C,CAHJ;EAAA,MAAMj1B,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAEA7mB,IAAOy9C,eAAez9C,CAAfy9C,EAAqB90B,EAAGxqB,KAAxBs/C,EAA+B,CAA/BA,CAAPz9C,EAEiC,oBAAA,IAC/BpC,OACI+qB,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,IAAiBrf,CAAjBqf,IAAqC,CADzC/qB,EAEI,+CAFJA,GAGAggD,IAAa7/C,MAAMuL,CAANvL,EAAuBsG,IAAvBtG,CAA4B4qB,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,IAAiBrf,CAA7CvL,CAJkB,KAM/BH,OACI+qB,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,MAAmBrf,EAAgBu0C,MAAhBv0C,CAAuB,UAAC1N,CAAD,EAAIsB,CAAJ;EAAU,WAAAtB,IAAIsB,CAAJ;KAAjCoM,CADvB1L,EAEI,6DAFJA,GAGAggD,IAAat0C,CATkB,CAFjCtJ,CAcA,OAAOqf,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxM,KAARwM,CAAc4S,CAAd5S,EAAkB6nC,CAAlB7nC,EAA8B/V,CAA9B+V,CAAA;KADRsJ,IAC8CsJ,OAD9CtJ,EADK,UAAC1K,CAAD;EAAa,aAAEgU,IAAI;EAAM,eAAAtf,OAAOsL,CAAPtL,EAAWrJ,CAAXqJ,CAAA;WAAZ;KAClBgW,CAAP;EAIF,KAAahW,SAASqe,KAAIo2B,kBAAJp2B,CAAtB;EAAA,IACaq2B,WAAWr2B,KAAIs2B,sBAAJt2B,CADxB;EAAA,IAEau2B,WAAWv2B,KAAIw2B,sBAAJx2B,CAFxB;EAAA,IAGay2B,WAAWz2B,KAAI02B,sBAAJ12B,CAHxB;EAAA,IAIa22B,WAAW32B,KAAI42B,sBAAJ52B,CAJxB;EAAA,IAKane,UAAQme,KAAI62B,gBAAJ72B,CALrB;AAAA;;;KCnNA,UAAU82B,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EA0BA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAKE,aAJAA,EAAEiR,CAAFjR,GAAMna,EAAEorB,CAARjR,EACAA,EAAEwjC,EAAFxjC,GAAOna,EAAE29C,EADTxjC,EAEAA,EAAEyjC,EAAFzjC,GAAOna,EAAE49C,EAFTzjC,EAGAA,EAAE0jC,EAAF1jC,GAAOna,EAAE69C,EAHT1jC,EAIOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACE,UAAIC,IAAK,IAjCX,UAAcv0B,CAAd;EACE,YAgDI7rB,CAhDJ;EAAA,YAAIqgD,IAAKh9C,IAAT;EAAA,YAAei9C,KAgDXtgD,IAAI,UAAJA,EAEO,UAASqI,CAAT;EACTA,cAAOA,EAAK/D,QAAL+D,EAAPA,CACA,KAAK,IAAIjK,IAAI,CAAb,EAAgBA,IAAIiK,EAAK5K,MAAzB,EAAiCW,GAAjC,EAAsC;EAEpC,gBAAIqqC,IAAI,sBADRzoC,KAAKqI,EAAKk4C,UAALl4C,CAAgBjK,CAAhBiK,CACG,CAAR,CAEAogC,KADAzoC,IAAIyoC,MAAM,CACVA,EAEAzoC,KADAyoC,KAAKzoC,OACK,CAFVyoC,EAIAzoC,KAAS,cADTyoC,KAAKzoC,CACI,CAJTyoC;EAMF,kBAAmB,0BAAXzoC,MAAM,CAAK,CAAnB;WA9DasgD,CAAf,CAEAD,EAAGG,IAAHH,GAAU;EACR,cAAI9jC,IAAI,UAAU8jC,EAAGN,EAAb,GAAyB,yBAAPM,EAAG7yB,CAA7B,CAGA,OAFA6yB,EAAGN,EAAHM,GAAQA,EAAGL,EAAXK,EACAA,EAAGL,EAAHK,GAAQA,EAAGJ,EADXI,EAEOA,EAAGJ,EAAHI,GAAQ9jC,KAAK8jC,EAAG7yB,CAAH6yB,GAAW,IAAJ9jC,CAAZA,CAAf;WAJF8jC,EAQAA,EAAG7yB,CAAH6yB,GAAO,CARPA,EASAA,EAAGN,EAAHM,GAAQC,EAAK,GAALA,CATRD,EAUAA,EAAGL,EAAHK,GAAQC,EAAK,GAALA,CAVRD,EAWAA,EAAGJ,EAAHI,GAAQC,EAAK,GAALA,CAXRD,EAYAA,EAAGN,EAAHM,IAASC,EAAKz0B,CAALy0B,CAZTD,EAaIA,EAAGN,EAAHM,GAAQ,CAARA,KAAaA,EAAGN,EAAHM,IAAS,CAAtBA,CAbJA,EAcAA,EAAGL,EAAHK,IAASC,EAAKz0B,CAALy0B,CAdTD,EAeIA,EAAGL,EAAHK,GAAQ,CAARA,KAAaA,EAAGL,EAAHK,IAAS,CAAtBA,CAfJA,EAgBAA,EAAGJ,EAAHI,IAASC,EAAKz0B,CAALy0B,CAhBTD,EAiBIA,EAAGJ,EAAHI,GAAQ,CAARA,KAAaA,EAAGJ,EAAHI,IAAS,CAAtBA,CAjBJA,EAkBAC,IAAO,IAlBPD;EA8BS,OAAA,CAASx0B,CAAT,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAON,EAAGI,IAFd,CAYA,OATAE,EAAK/1B,KAAL+1B,GAAa;EAAa,eAAoB,aAAZN,EAAGI,IAAHJ,EAAY,GAAe,CAAnC;SAA1BM,EACAA,EAAKC,MAALD,GAAc;EACZ,eAAOA,MAAmC,0BAAhB,UAATA,GAAS,GAAW,CAAK,CAA1C;SAFFA,EAIAA,EAAKE,KAALF,GAAaA,CAJbA,EAKID,MACmB,oBAAA,IAAUX,EAAKW,CAALX,EAAYM,CAAZN,CAAV,EACrBY,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CALJC,EASOA,CAAP;EAyBEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAK09C,IAAL19C,GAAY68C,CALVN;EAxEJ,GAAA,CAiFEv8C,CAjFF,EAkFEu8C,CAlFF,GAmFE,CAnFF;;;KCxBA,UAAUD,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EAkCA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAKE,aAJAA,EAAExe,CAAFwe,GAAMna,EAAErE,CAARwe,EACAA,EAAE9H,CAAF8H,GAAMna,EAAEqS,CADR8H,EAEAA,EAAEykC,CAAFzkC,GAAMna,EAAE4+C,CAFRzkC,EAGAA,EAAEisB,CAAFjsB,GAAMna,EAAEomC,CAHRjsB,EAIOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACE,UAAIC,IAAK,IAzCX,UAAgBv0B,CAAhB;EACE,YAAIw0B,IAAKh9C,IAAT;EAAA,YAAe49C,IAAU,EAAzB,CAEAZ,EAAGtiD,CAAHsiD,GAAO,CAAPA,EACAA,EAAG5rC,CAAH4rC,GAAO,CADPA,EAEAA,EAAGW,CAAHX,GAAO,CAFPA,EAGAA,EAAG7X,CAAH6X,GAAO,CAHPA,EAMAA,EAAGG,IAAHH,GAAU;EACR,cAAI9jC,IAAI8jC,EAAGtiD,CAAHsiD,GAAQA,EAAGtiD,CAAHsiD,IAAQ,EAAxB,CAIA,OAHAA,EAAGtiD,CAAHsiD,GAAOA,EAAG5rC,CAAV4rC,EACAA,EAAG5rC,CAAH4rC,GAAOA,EAAGW,CADVX,EAEAA,EAAGW,CAAHX,GAAOA,EAAG7X,CAFV6X,EAGOA,EAAG7X,CAAH6X,IAASA,EAAG7X,CAAH6X,KAAS,EAATA,GAAe9jC,CAAf8jC,GAAoB9jC,MAAM,CAA1C;WAXF8jC,EAcIx0B,OAAiB,IAAPA,CAAVA,IAEFw0B,EAAGtiD,CAAHsiD,GAAOx0B,CAFLA,GAKFo1B,KAAWp1B,CAnBbw0B,CAuBA,KAAK,IAAIvtC,IAAI,CAAb,EAAgBA,IAAImuC,EAAQxjD,MAARwjD,GAAiB,EAArC,EAAyCnuC,GAAzC,EACEutC,EAAGtiD,CAAHsiD,IAAgC,IAAxBY,EAAQV,UAARU,CAAmBnuC,CAAnBmuC,CAARZ,EACAA,EAAGG,IAAHH,EADAA;EAcO,OAAA,CAAWx0B,CAAX,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAO;EAAa,gBAAQN,EAAGI,IAAHJ,OAAc,KAAK,UAA3B;SAFxB,CAiBA,OAdAM,EAAKC,MAALD,GAAc;EACZ;EACE,cAEIniD,MAFM6hD,EAAGI,IAAHJ,OAAc,OACbA,EAAGI,IAAHJ,OAAc,KAAK,eACF,KAAK,GAFjC;WADF,QAIoB,MAAX7hD,CAJT,EAKA,OAAOA,CAAP;SANFmiD,EAQAA,EAAK/1B,KAAL+1B,GAAaN,EAAGI,IARhBE,EASAA,EAAKE,KAALF,GAAaA,CATbA,EAUID,MACmB,oBAAA,IAAUX,EAAKW,CAALX,EAAYM,CAAZN,CAAV,EACrBY,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CAVJC,EAcOA,CAAP;EAGEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAK69C,MAAL79C,GAAc68C,CALZN;EA/DJ,GAAA,CAwEEv8C,CAxEF,EAyEEu8C,CAzEF,GA0EE,CA1EF;;;KCAA,UAAUD,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EAqCA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAOE,aANAA,EAAExe,CAAFwe,GAAMna,EAAErE,CAARwe,EACAA,EAAE9H,CAAF8H,GAAMna,EAAEqS,CADR8H,EAEAA,EAAEykC,CAAFzkC,GAAMna,EAAE4+C,CAFRzkC,EAGAA,EAAEisB,CAAFjsB,GAAMna,EAAEomC,CAHRjsB,EAIAA,EAAElC,CAAFkC,GAAMna,EAAEiY,CAJRkC,EAKAA,EAAEvB,CAAFuB,GAAMna,EAAE4Y,CALRuB,EAMOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACE,UAAIC,IAAK,IA9CX,UAAgBv0B,CAAhB;EACE,YAAIw0B,IAAKh9C,IAAT;EAAA,YAAe49C,IAAU,EAAzB,CAGAZ,EAAGG,IAAHH,GAAU;EACR,cAAI9jC,IAAK8jC,EAAGtiD,CAAHsiD,GAAQA,EAAGtiD,CAAHsiD,KAAS,CAA1B,CAEA,OADAA,EAAGtiD,CAAHsiD,GAAOA,EAAG5rC,CAAV4rC,EAAaA,EAAG5rC,CAAH4rC,GAAOA,EAAGW,CAAvBX,EAA0BA,EAAGW,CAAHX,GAAOA,EAAG7X,CAApC6X,EAAuCA,EAAG7X,CAAH6X,GAAOA,EAAGhmC,CAAjDgmC,GACQA,EAAGrlC,CAAHqlC,GAAQA,EAAGrlC,CAAHqlC,GAAO,MAAPA,GAAgB,MAC5BA,EAAGhmC,CAAHgmC,GAAQA,EAAGhmC,CAAHgmC,GAAQA,EAAGhmC,CAAHgmC,IAAQ,CAAhBA,GAAuB9jC,CAAvB8jC,GAA4B9jC,KAAK,KAAO,CADpD;WAHF8jC,EAOAA,EAAGtiD,CAAHsiD,GAAO,CAPPA,EAQAA,EAAG5rC,CAAH4rC,GAAO,CARPA,EASAA,EAAGW,CAAHX,GAAO,CATPA,EAUAA,EAAG7X,CAAH6X,GAAO,CAVPA,EAWAA,EAAGhmC,CAAHgmC,GAAO,CAXPA,EAaIx0B,OAAiB,IAAPA,CAAVA,IAEFw0B,EAAGtiD,CAAHsiD,GAAOx0B,CAFLA,GAKFo1B,KAAWp1B,CAlBbw0B,CAsBA,KAAK,IAAIvtC,IAAI,CAAb,EAAgBA,IAAImuC,EAAQxjD,MAARwjD,GAAiB,EAArC,EAAyCnuC,GAAzC,EACEutC,EAAGtiD,CAAHsiD,IAAgC,IAAxBY,EAAQV,UAARU,CAAmBnuC,CAAnBmuC,CAARZ,EACIvtC,KAAKmuC,EAAQxjD,MAAbqV,KACFutC,EAAGrlC,CAAHqlC,GAAOA,EAAGtiD,CAAHsiD,IAAQ,EAARA,GAAaA,EAAGtiD,CAAHsiD,KAAS,CAD3BvtC,CADJutC,EAIAA,EAAGG,IAAHH,EAJAA;EAmBO,OAAA,CAAWx0B,CAAX,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAO;EAAa,gBAAQN,EAAGI,IAAHJ,OAAc,KAAK,UAA3B;SAFxB,CAiBA,OAdAM,EAAKC,MAALD,GAAc;EACZ;EACE,cAEIniD,MAFM6hD,EAAGI,IAAHJ,OAAc,OACbA,EAAGI,IAAHJ,OAAc,KAAK,eACF,KAAK,GAFjC;WADF,QAIoB,MAAX7hD,CAJT,EAKA,OAAOA,CAAP;SANFmiD,EAQAA,EAAK/1B,KAAL+1B,GAAaN,EAAGI,IARhBE,EASAA,EAAKE,KAALF,GAAaA,CATbA,EAUID,MACmB,oBAAA,IAAUX,EAAKW,CAALX,EAAYM,CAAZN,CAAV,EACrBY,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CAVJC,EAcOA,CAAP;EAGEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAK89C,MAAL99C,GAAc68C,CALZN;EApEJ,GAAA,CA6EEv8C,CA7EF,EA8EEu8C,CA9EF,GA+EE,CA/EF;;;KCEA,UAAUD,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EAkDA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAGE,aAFAA,EAAExe,CAAFwe,GAAMna,EAAErE,CAAFqE,CAAIlB,KAAJkB,EAANma,EACAA,EAAEne,CAAFme,GAAMna,EAAEhE,CADRme,EAEOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACc,cAARt0B,CAAQ,KAAMA,SAAau1B,MAAnB,EACZ,IAAIhB,IAAK,IAxDX,UAAgBv0B,CAAhB;EACE,YAAIw0B,IAAKh9C,IAAT,CAGAg9C,EAAGG,IAAHH,GAAU;EAER,cAAwB9jC,CAAxB;EAAA,cAA2BlC,CAA3B;EAAA,cAAIgnC,IAAIhB,EAAGtiD,CAAX;EAAA,cAAcK,IAAIiiD,EAAGjiD,CAArB,CAQA,OAPAme,IAAI8kC,EAAEjjD,CAAFijD,CAAJ9kC,EAA0BlC,KAAhBkC,KAAMA,MAAM,KAAaA,KAAK,EAAxCA,EACoBlC,MAApBkC,IAAI8kC,EAAGjjD,IAAI,CAAJA,GAAS,CAAZijD,KAA0B9kC,MAAM,EADpCA,EAEoBlC,MAApBkC,IAAI8kC,EAAGjjD,IAAI,CAAJA,GAAS,CAAZijD,KAA0B9kC,MAAM,CAFpCA,EAGoBlC,MAApBkC,IAAI8kC,EAAGjjD,IAAI,CAAJA,GAAS,CAAZijD,KAA0B9kC,KAAK,CAHnCA,EAIAA,IAAI8kC,EAAGjjD,IAAI,CAAJA,GAAS,CAAZijD,CAJJ9kC,EAIuClC,MAAnBkC,KAASA,KAAK,MAAeA,KAAK,CAJtDA,EAKA8kC,EAAEjjD,CAAFijD,IAAOhnC,CALPkC,EAMA8jC,EAAGjiD,CAAHiiD,GAAQjiD,IAAI,CAAJA,GAAS,CANjBme,EAOOlC,CAAP;WAVFgmC,EAaA,UAAcA,CAAd,EAAkBx0B,CAAlB;EACE,cAAIxqB,CAAJ;EAAA,cAAUggD,MAAV,CAEA,IAAIx1B,OAAiB,IAAPA,CAAVA,CAAJ,EAEMw1B,EAAE,CAAFA,IAAOx1B,CAAPw1B,CAFN,KAME,KADAx1B,IAAO,KAAKA,CAAZA,EACKxqB,IAAI,CAAT,EAAYA,IAAIwqB,EAAKpuB,MAArB,IAA+B4D,CAA/B,EACEggD,EAAM,IAAJhgD,CAAFggD,IAAYA,EAAM,IAAJhgD,CAAFggD,KAAY,EAAZA,GACPx1B,EAAK00B,UAAL10B,CAAgBxqB,CAAhBwqB,IAAqBw1B,EAAGhgD,IAAI,CAAJA,GAAS,CAAZggD,CAArBx1B,IAAuC,EAD5Cw1B,CAKJ,OAAOA,EAAE5jD,MAAF4jD,GAAW,CAAlB,GAAqBA,EAAEhiD,IAAFgiD,CAAO,CAAPA,EACrB,KAAKhgD,IAAI,CAAT,EAAYA,IAAI,CAAJA,IAAkB,MAATggD,EAAEhgD,CAAFggD,CAArB,IAAmChgD,CAAnC,EAOA,KANS,KAALA,CAAK,GAAOggD,EAAE,CAAFA,KAAQ,CAAf,GAA2BA,EAAEhgD,CAAFggD,CAA3B,EAEThB,EAAGtiD,CAAHsiD,GAAOgB,CAFE,EAGThB,EAAGjiD,CAAHiiD,GAAO,CAHE,EAMJh/C,IAAI,GAAT,EAAcA,IAAI,CAAlB,IAAuBA,CAAvB,EACEg/C,EAAGG,IAAHH;EAIJiB,SA5BA,CA4BKjB,CA5BL,EA4BSx0B,CA5BT,CAbAw0B;EAoDS,OAAA,CAAWx0B,CAAX,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAO;EAAa,gBAAQN,EAAGI,IAAHJ,OAAc,KAAK,UAA3B;SAFxB,CAiBA,OAdAM,EAAKC,MAALD,GAAc;EACZ;EACE,cAEIniD,MAFM6hD,EAAGI,IAAHJ,OAAc,OACbA,EAAGI,IAAHJ,OAAc,KAAK,eACF,KAAK,GAFjC;WADF,QAIoB,MAAX7hD,CAJT,EAKA,OAAOA,CAAP;SANFmiD,EAQAA,EAAK/1B,KAAL+1B,GAAaN,EAAGI,IARhBE,EASAA,EAAKE,KAALF,GAAaA,CATbA,EAUID,MACEA,EAAM1iD,CAAN0iD,IAASX,EAAKW,CAALX,EAAYM,CAAZN,CAATW,EACJC,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CAVJC,EAcOA,CAAP;EAGEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAKk+C,SAALl+C,GAAiB68C,CALfN;EA9EJ,GAAA,CAuFEv8C,CAvFF,EAwFEu8C,CAxFF,GAyFE,CAzFF;;;KCoBA,UAAUD,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EA+EA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAIE,aAHAA,EAAEne,CAAFme,GAAMna,EAAEhE,CAARme,EACAA,EAAEisB,CAAFjsB,GAAMna,EAAEomC,CADRjsB,EAEAA,EAAE8kC,CAAF9kC,GAAMna,EAAEi/C,CAAFj/C,CAAIlB,KAAJkB,EAFNma,EAGOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACc,cAARt0B,CAAQ,KAAMA,SAAau1B,MAAnB,EACZ,IAAIhB,IAAK,IAtFX,UAAgBv0B,CAAhB;EACE,YAAIw0B,IAAKh9C,IAAT,CAGAg9C,EAAGG,IAAHH,GAAU;EACR,cACwB9jC,CADxB;EAAA,cAC2BlC,CAD3B;EAAA,cAAImuB,IAAI6X,EAAG7X,CAAX;EAAA,cACI6Y,IAAIhB,EAAGgB,CADX;EAAA,cACcjjD,IAAIiiD,EAAGjiD,CADrB,CAeA,OAZAiiD,EAAG7X,CAAH6X,GAAO7X,IAAKA,IAAI,UAAJA,GAAkB,CAA9B6X,EAEAhmC,IAAIgnC,EAAGjjD,IAAI,EAAJA,GAAU,GAAbijD,CAFJhB,EAGA9jC,IAAI8kC,EAAEjjD,IAAMA,IAAI,CAAJA,GAAS,GAAjBijD,CAHJhB,EAIAhmC,KAAKA,KAAK,EAJVgmC,EAKA9jC,KAAKA,KAAK,EALV8jC,EAMAhmC,KAAKA,MAAM,EANXgmC,EAOA9jC,KAAKA,MAAM,EAPX8jC,EASAhmC,IAAIgnC,EAAEjjD,CAAFijD,IAAOhnC,IAAIkC,CATf8jC,EAUAA,EAAGjiD,CAAHiiD,GAAOjiD,CAVPiiD,EAYQhmC,KAAKmuB,IAAKA,MAAM,EAAhBnuB,IAAwB,CAAhC;WAhBFgmC,EAmBA,UAAcA,CAAd,EAAkBx0B,CAAlB;EACE,cAAItP,CAAJ;EAAA,cAAOlC,CAAP;EAAA,cAAUjc,CAAV;EAAA,cAAaiD,CAAb;EAAA,cAAgBmnC,CAAhB;EAAA,cAAmB6Y,MAAnB;EAAA,cAA2BG,IAAQ,GAAnC,CAYA,KAXI31B,OAAiB,IAAPA,CAAVA,KAEFxR,IAAIwR,CAAJxR,EACAwR,IAAO,IAHLA,KAMFA,KAAc,IAAdA,EACAxR,IAAI,CADJwR,EAEA21B,IAAQ5jD,KAAKI,GAALJ,CAAS4jD,CAAT5jD,EAAgBiuB,EAAKpuB,MAArBG,CARNiuB,GAWCztB,IAAI,CAXLytB,EAWQxqB,KAAK,EAAjB,EAAqBA,IAAImgD,CAAzB,IAAkCngD,CAAlC,EAEMwqB,MAAMxR,KAAKwR,EAAK00B,UAAL10B,EAAiBxqB,IAAI,MAAMwqB,EAAKpuB,MAAhCouB,CAAXA,GAEM,MAANxqB,CAAM,KAAGmnC,IAAInuB,CAAP,CAFNwR,EAGJxR,KAAKA,KAAK,EAHNwR,EAIJxR,KAAKA,MAAM,EAJPwR,EAKJxR,KAAKA,KAAK,CALNwR,EAMJxR,KAAKA,MAAM,EANPwR,EAOAxqB,KAAK,CAALA,KACFmnC,IAAKA,IAAI,UAAJA,GAAkB,CAAvBA,EAEApqC,IAAK,MADLme,IAAK8kC,EAAM,MAAJhgD,CAAFggD,KAAehnC,IAAImuB,CACnB,IAAUpqC,IAAI,CAAd,GAAkB,CAHrBiD,CAPAwqB,CAqBN,KAPIztB,KAAK,GAALA,KACFijD,EAA+B,OAA5Bx1B,KAAQA,EAAKpuB,MAAbouB,IAAuB,CAAK,CAA/Bw1B,KAAuC,CADrCjjD,GAMJA,IAAI,GANAA,EAOCiD,IAAI,GAAT,EAAkBA,IAAI,CAAtB,IAA2BA,CAA3B,EACEgZ,IAAIgnC,EAAGjjD,IAAI,EAAJA,GAAU,GAAbijD,CAAJhnC,EACAkC,IAAI8kC,EAAEjjD,IAAMA,IAAI,CAAJA,GAAS,GAAjBijD,CADJhnC,EAEAA,KAAKA,KAAK,EAFVA,EAGAkC,KAAKA,KAAK,EAHVlC,EAIAA,KAAKA,MAAM,EAJXA,EAKAkC,KAAKA,MAAM,EALXlC,EAMAgnC,EAAEjjD,CAAFijD,IAAOhnC,IAAIkC,CANXlC,CASFgmC,EAAG7X,CAAH6X,GAAO7X,CAAP6X,EACAA,EAAGgB,CAAHhB,GAAOgB,CADPhB,EAEAA,EAAGjiD,CAAHiiD,GAAOjiD,CAFPiiD;EAKFiB,SAnDA,CAmDKjB,CAnDL,EAmDSx0B,CAnDT,CAnBAw0B;EAkFS,OAAA,CAAWx0B,CAAX,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAO;EAAa,gBAAQN,EAAGI,IAAHJ,OAAc,KAAK,UAA3B;SAFxB,CAiBA,OAdAM,EAAKC,MAALD,GAAc;EACZ;EACE,cAEIniD,MAFM6hD,EAAGI,IAAHJ,OAAc,OACbA,EAAGI,IAAHJ,OAAc,KAAK,eACF,KAAK,GAFjC;WADF,QAIoB,MAAX7hD,CAJT,EAKA,OAAOA,CAAP;SANFmiD,EAQAA,EAAK/1B,KAAL+1B,GAAaN,EAAGI,IARhBE,EASAA,EAAKE,KAALF,GAAaA,CATbA,EAUID,MACEA,EAAMY,CAANZ,IAASX,EAAKW,CAALX,EAAYM,CAAZN,CAATW,EACJC,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CAVJC,EAcOA,CAAP;EAGEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAKo+C,OAALp+C,GAAe68C,CALbN;EA5GJ,GAAA,CAqHEv8C,CArHF,EAsHEu8C,CAtHF,GAuHE,CAvHF;;;KCrBA,UAAUD,CAAV,EAAkBC,CAAlB,EAA0BC,CAA1B;EAuDA,aAASC,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAKE,aAJAA,EAAExf,CAAFwf,GAAMna,EAAErF,CAARwf,EACAA,EAAEle,CAAFke,GAAMna,EAAE/D,CADRke,EAEAA,EAAEiR,CAAFjR,GAAMna,EAAEorB,CAFRjR,EAGAA,EAAEvB,CAAFuB,GAAMna,EAAE4Y,CAHRuB,EAIOA,CAAP;EAGF,cAAS2jC,CAAT,CAAcr0B,CAAd,EAAoBs0B,CAApB;EACE,UAAIC,IAAK,IA9DX,UAAgBv0B,CAAhB;EACE,YAAIw0B,IAAKh9C,IAAT;EAAA,YAAe49C,IAAU,EAAzB,CAGAZ,EAAGG,IAAHH,GAAU;EACR,cAAIhiD,IAAIgiD,EAAGhiD,CAAX;EAAA,cAAcmvB,IAAI6yB,EAAG7yB,CAArB;EAAA,cAAwBxS,IAAIqlC,EAAGrlC,CAA/B;EAAA,cAAkCje,IAAIsjD,EAAGtjD,CAAzC,CAQA,OAPAsB,IAAKA,KAAK,EAALA,GAAYA,MAAM,CAAlBA,GAAuBmvB,CAA5BnvB,EACAmvB,IAAKA,IAAIxS,CAAJwS,GAAS,CADdnvB,EAEA2c,IAAKA,KAAK,EAALA,GAAYA,MAAM,CAAlBA,GAAuBje,CAF5BsB,EAGAtB,IAAKA,IAAIsB,CAAJtB,GAAS,CAHdsB,EAIAgiD,EAAGhiD,CAAHgiD,GAAOhiD,IAAKA,KAAK,EAALA,GAAYA,MAAM,EAAlBA,GAAwBmvB,CAJpCnvB,EAKAgiD,EAAG7yB,CAAH6yB,GAAO7yB,IAAKA,IAAIxS,CAAJwS,GAAS,CALrBnvB,EAMAgiD,EAAGrlC,CAAHqlC,GAAQrlC,KAAK,EAALA,GAAYwS,MAAM,EAAlBxS,GAAwBje,CANhCsB,EAOOgiD,EAAGtjD,CAAHsjD,GAAQtjD,IAAIsB,CAAJtB,GAAS,CAAxB;WATFsjD,EA4BAA,EAAGtjD,CAAHsjD,GAAO,CA5BPA,EA6BAA,EAAGhiD,CAAHgiD,GAAO,CA7BPA,EA8BAA,EAAG7yB,CAAH6yB,IAAO,UA9BPA,EA+BAA,EAAGrlC,CAAHqlC,GAAO,UA/BPA,EAiCIx0B,MAASjuB,KAAKkC,KAALlC,CAAWiuB,CAAXjuB,CAATiuB,IAEFw0B,EAAGtjD,CAAHsjD,GAAQx0B,IAAO,UAAPA,GAAsB,CAA9Bw0B,EACAA,EAAGhiD,CAAHgiD,GAAc,IAAPx0B,CAHLA,IAMFo1B,KAAWp1B,CAvCbw0B,CA2CA,KAAK,IAAIvtC,IAAI,CAAb,EAAgBA,IAAImuC,EAAQxjD,MAARwjD,GAAiB,EAArC,EAAyCnuC,GAAzC,EACEutC,EAAGhiD,CAAHgiD,IAAgC,IAAxBY,EAAQV,UAARU,CAAmBnuC,CAAnBmuC,CAARZ,EACAA,EAAGG,IAAHH,EADAA;EAcO,OAAA,CAAWx0B,CAAX,CAAT;EAAA,UACI40B,IAAQN,KAAQA,EAAKM,KADzB;EAAA,UAEIC,IAAO;EAAa,gBAAQN,EAAGI,IAAHJ,OAAc,KAAK,UAA3B;SAFxB,CAiBA,OAdAM,EAAKC,MAALD,GAAc;EACZ;EACE,cAEIniD,MAFM6hD,EAAGI,IAAHJ,OAAc,OACbA,EAAGI,IAAHJ,OAAc,KAAK,eACF,KAAK,GAFjC;WADF,QAIoB,MAAX7hD,CAJT,EAKA,OAAOA,CAAP;SANFmiD,EAQAA,EAAK/1B,KAAL+1B,GAAaN,EAAGI,IARhBE,EASAA,EAAKE,KAALF,GAAaA,CATbA,EAUID,MACmB,oBAAA,IAAUX,EAAKW,CAALX,EAAYM,CAAZN,CAAV,EACrBY,EAAKD,KAALC,GAAa;EAAa,eAAOZ,EAAKM,CAALN,IAAAA,CAAP;SAFxBW,CAVJC,EAcOA,CAAP;EAGEd,UAAUA,EAAOiB,OAAjBjB,GACFA,EAAOiB,OAAPjB,GAAiBM,CADfN,GAEOC,KAAUA,EAAOiB,GAAjBjB,GACTA,EAAO;EAAa,aAAOK,CAAP;OAApBL,CADSA,GAGTx8C,KAAKq+C,MAALr+C,GAAc68C,CALZN;EApFJ,GAAA,CA6FEv8C,CA7FF,EA8FEu8C,CA9FF,GA+FE,CA/FF;;;KCoBA,UAAWztC,CAAX,EAAiBwvC,CAAjB;EAIA,QASIC,CATJ;EAAA,QAAIjC,IAASt8C,IAAb;EAAA,QACIq6B,IAAQ,GADZ;EAAA,QAEImkB,IAAS,CAFb;EAAA,QAIIC,IAAU,QAJd;EAAA,QAKIC,IAAaJ,EAAK51C,GAAL41C,CAASjkB,CAATikB,EAAgBE,CAAhBF,CALjB;EAAA,QAMIK,IAAeL,EAAK51C,GAAL41C,CAAS,CAATA,EAHN,EAGMA,CANnB;EAAA,QAOIM,IAA0B,IAAfD,CAPf;EAAA,QAQIE,IAAOxkB,IAAQ,CARnB,CAeA,SAASykB,CAAT,CAAoBt2B,CAApB,EAA0Bu2B,CAA1B,EAAmCC,CAAnC;EACE,UAAIzoC,MAAJ;EAAA,UAII0oC,IAAYC,EAoHlB,SAASnjD,CAAT,CAAiB4X,CAAjB,EAAsB7b,CAAtB;EACE,YAAqCqnD,CAArC;EAAA,YAAIjkD,MAAJ;EAAA,YAAiBkkD,WAAczrC,CAA/B,CACA,IAAI7b,KAAgB,YAAPsnD,CAAb,EACE,KAAKD,CAAL,IAAaxrC,CAAb,EACE;EAAMzY,YAAOc,IAAPd,CAAYa,EAAQ4X,EAAIwrC,CAAJxrC,CAAR5X,EAAmBjE,IAAQ,CAA3BiE,CAAZb;EAA8C,SAApD,CAAoD,OAAOgf,CAAP,WAGhDhf,EAAOd,MAAPc,GAAgBA,CAAhBA,GAAgC,YAAPkkD,CAAO,GAAWzrC,CAAX,GAAiBA,IAAM,IAA/D;EA3HuB5X,OAoHzB,EAvHEgjD,IAAsB,KAAXA,CAAW,KAAUM,UAAS,GAAnB,GAA6BN,SAIzCM,WAAW72B,GAAM82B,EAASxwC,CAATwwC,KAChB,QAAR92B,CAAQ,GA+Ib;EACE;EACE,cAAI+2B,CAAJ,CAQA,OAPIhB,MAAegB,IAAMhB,EAAWiB,WAAhCjB,IAEFgB,IAAMA,EAAIllB,CAAJklB,CAFJhB,IAIFgB,IAAM,IAAIlhD,UAAJ,CAAeg8B,CAAf,CAANklB,GACCjD,EAAOmD,MAAPnD,IAAiBA,EAAOoD,UAAUC,gBAAgBJ,EALjDhB,GAOGe,EAASC,CAATD,CAAP;EACA,SAVF,CAUE,OAAOplC,CAAP;EACA,cAAI0lC,IAAUtD,EAAO3iD,SAArB;EAAA,cACIkmD,IAAUD,KAAWA,EAAQC,OADjC,CAEA,SAAS,IAAI9B,IAAJ,IAAUzB,GAAQuD,GAASvD,EAAOwD,QAAQR,EAASxwC,CAATwwC,EAAnD;;EA7JiBS,OA+IrB,EA/Ia,GAAqBv3B,CAkHlC,EAlHwC,CAkHxC,CApHkB02B,EAE0B3oC,CAF1B2oC,CAJhB;EAAA,UASIc,IAAO,IA+Db,UAAczpC,CAAd;EACE,YAAI2C,CAAJ;EAAA,YAAO+mC,IAAS1pC,EAAInc,MAApB;EAAA,YACI4iD,IAAKh9C,IADT;EAAA,YACejF,IAAI,CADnB;EAAA,YACsBiD,IAAIg/C,EAAGjiD,CAAHiiD,GAAOA,EAAGh/C,CAAHg/C,GAAO,CADxC;EAAA,YAC2CtgC,IAAIsgC,EAAGkD,CAAHlD,KAD/C,CAIKiD,MAAU1pC,KAAO0pC,IAAjBA,EAGL,OAAOllD,IAAIs/B,CAAX,GACE3d,EAAE3hB,CAAF2hB,IAAO3hB,GAAP2hB,CAEF,KAAK3hB,IAAI,CAAT,EAAYA,IAAIs/B,CAAhB,EAAuBt/B,GAAvB,EACE2hB,EAAE3hB,CAAF2hB,IAAOA,EAAE1e,IAAI6gD,IAAQ7gD,IAAIuY,EAAIxb,IAAIklD,CAAR1pC,CAAJvY,IAAuBkb,IAAIwD,EAAE3hB,CAAF2hB,CAA3B1e,CAAd0e,CAAPA,EACAA,EAAE1e,CAAF0e,IAAOxD,CADPwD,EAKDsgC,EAAGh9B,CAAHg9B,GAAO,UAASt5B,CAAT;EAIN,eAFA,IAAIxK,CAAJ,EAAOje,IAAI,CAAX,EACIF,IAAIiiD,EAAGjiD,CADX,EACciD,IAAIg/C,EAAGh/C,CADrB,EACwB0e,IAAIsgC,EAAGkD,CAC/B,EAAOx8B,GAAP,GACExK,IAAIwD,EAAE3hB,IAAI8jD,IAAQ9jD,IAAI,CAAlB2hB,CAAJxD,EACAje,IAAIA,IAAIo/B,CAAJp/B,GAAYyhB,EAAEmiC,KAASniC,EAAE3hB,CAAF2hB,IAAOA,EAAE1e,IAAI6gD,IAAQ7gD,IAAIkb,CAAlBwD,MAA0BA,EAAE1e,CAAF0e,IAAOxD,EAAnDwD,CADhBxD,CAIF,OADA8jC,EAAGjiD,CAAHiiD,GAAOjiD,CAAPiiD,EAAUA,EAAGh/C,CAAHg/C,GAAOh/C,CAAjBg/C,EACO/hD,CAAP;aAICo/B;EA7FQ,OAAA,CAAS9jB,CAAT,CATX;EAAA,UAaI8mC,IAAO;EAIT,aAHA,IAAI1gD,IAAIqjD,EAAKhgC,CAALggC,CAAOxB,CAAPwB,CAAR,EACIroC,IAAI+mC,CADR,EAEIhkD,IAAI,CACR,EAAOiC,IAAIgiD,CAAX,GACEhiD,KAAKA,IAAIjC,KAAK2/B,CAAd19B,EACAgb,KAAK0iB,CADL19B,EAEAjC,IAAIslD,EAAKhgC,CAALggC,CAAO,CAAPA,CAFJrjD,CAIF,OAAOA,KAAKiiD,CAAZ,GACEjiD,KAAK,CAALA,EACAgb,KAAK,CADLhb,EAEAjC,OAAO,CAFPiC,CAIF,QAAQA,IAAIjC,KAAKid,CAAjB;SA3BF,CAsCA,OARA0lC,EAAK/1B,KAAL+1B,GAAa;EAAa,eAAmB,IAAZ2C,EAAKhgC,CAALggC,CAAO,CAAPA,CAAP;SAA1B3C,EACAA,EAAKE,KAALF,GAAa;EAAa,eAAO2C,EAAKhgC,CAALggC,CAAO,CAAPA,IAAY,UAAnB;SAD1B3C,EAEAA,EAAKC,MAALD,GAAcA,CAFdA,EAKA6B,EAAOI,EAASU,EAAKE,CAAdZ,CAAPJ,EAAyBpwC,CAAzBowC,CALA7B,GAQQ0B,EAAQoB,IAARpB,IAAgBC,CAAhBD,IACJ,UAAS1B,CAAT,EAAe70B,CAAf,EAAqB43B,CAArB,EAAmChD,CAAnC;EAUE,eATIA,MAEEA,EAAM8C,CAAN9C,IAAWX,EAAKW,CAALX,EAAYuD,CAAZvD,CAAXW,EAEJC,EAAKD,KAALC,GAAa;EAAa,iBAAOZ,EAAKuD,CAALvD,IAAAA,CAAP;WAJxBW,GASAgD,KAAgB9B,EAAKG,CAALH,IAAgBjB,CAAhBiB,EAA6B91B,CAA7C43B,IAIQ/C,CAJZ;WAMNA,GACA4B,GACA,YAAYF,CAAZ,GAAsBA,EAAQzC,MAA9B,GAAwCt8C,QAAQs+C,GAChDS,EAAQ3B,MApBR;EAuEF,cAASX,CAAT,CAAc19C,CAAd,EAAiBma,CAAjB;EAIE,aAHAA,EAAEne,CAAFme,GAAMna,EAAEhE,CAARme,EACAA,EAAElb,CAAFkb,GAAMna,EAAEf,CADRkb,EAEAA,EAAEgnC,CAAFhnC,GAAMna,EAAEmhD,CAAFnhD,CAAIlB,KAAJkB,EAFNma,EAGOA,CAAP;EAsBF,cAASgmC,CAAT,CAAgB12B,CAAhB,EAAsBjS,CAAtB;EAEE,WADA,IAA4B8pC,CAA5B,EAAIC,IAAa93B,IAAO,EAAxB,EAAmCxqB,IAAI,CACvC,EAAOA,IAAIsiD,EAAWlmD,MAAtB,GACEmc,EAAIsoC,IAAO7gD,CAAXuY,IACEsoC,KAASwB,KAAyB,KAAhB9pC,EAAIsoC,IAAO7gD,CAAXuY,KAAsB+pC,EAAWpD,UAAXoD,CAAsBtiD,GAAtBsiD,CAD1C/pC,CAGF,OAAO+oC,EAAS/oC,CAAT+oC,CAAP;EA8BF,cAASA,CAAT,CAAkB5lD,CAAlB;EACE,aAAOkb,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0B,CAA1BA,EAA6Blb,CAA7Bkb,CAAP;EAgBF,SAhIA0pC,EAAK,SAASG,CAAdH,IAAyBQ,CAAzBR,EA0HAY,EAAOZ,EAAK9jD,MAAL8jD,EAAPY,EAAsBpwC,CAAtBowC,CA1HAZ,EAgII/B,EAAsCiB,OAA1C,EAAmD;EACjDjB,eAAAA,GAAiBuC,CAAjBvC,CAEA;EACEgC,YAAaiC,QAAQ,QAARA,CAAbjC;EACA,OAFF,CAEE,OAAO3tC,CAAP;OALJ;EAhNA,GAAA,GAAA,EA6NErW,IA7NF;KC4BAkmD,WAAG/C,IAAH+C,GAAU/C,IAAV+C,EACAA,WAAG5C,MAAH4C,GAAY5C,MADZ4C,EAEAA,WAAG3C,MAAH2C,GAAY3C,MAFZ2C,EAGAA,WAAGvC,SAAHuC,GAAevC,SAHfuC,EAIAA,WAAGrC,OAAHqC,GAAarC,OAJbqC,EAKAA,WAAGpC,MAAHoC,GAAYpC,MALZoC,CAOA,mBAAiBA,UAAjB;EAAA,oCAAA;EAAA;ECpBE,YAAA,CACI/4C,CADJ,EACkBg5C,CADlB,EACwCziD,CADxC,EAEI0iD,CAFJ,EAEyBn4B,CAFzB;EAGExoB,SAAK0H,IAAL1H,GAAY0H,CAAZ1H,EACAA,KAAK4gD,MAAL5gD,GAAc0gD,CADd1gD,EAEAA,KAAK/B,KAAL+B,GAAa/B,CAFb+B,EAGAA,KAAK6gD,OAAL7gD,GAAeixB,GAHfjxB,EAIAA,KAAK2gD,SAAL3gD,GAAiB2gD,CAJjB3gD,EAKIA,KAAK2gD,SAAL3gD,KACFA,KAAK8gD,KAAL9gD,GAAaA,KAAK0H,IAAL1H,GAA0B,IAAdA,KAAK4gD,MAA9B5gD,EACAA,KAAK+gD,KAAL/gD,GAAaA,KAAK0H,IAAL1H,GAA0B,IAAdA,KAAK4gD,MAF5B5gD,CALJA,CASA,IAAMghD,IAAYx4B,KAAcjuB,KAAKC,MAALD,EAAhC,CACAyF,KAAKxF,MAALwF,GAAcihD,aAAgBD,EAAU//C,QAAV+/C,EAAhBC,CAAdjhD;EAgDJ,UA5CSkhD,WAAAA,UAAAA,GAAP;EACE,SAAK1iD,MAAMwB,KAAK6gD,OAAXriD,CAAL,EAA0B;EACxB,UAAMuF,IAAQ/D,KAAK6gD,OAAnB,CAEA,OADA7gD,KAAK6gD,OAAL7gD,GAAeixB,GAAfjxB,EACO+D,CAAP;EAKF,UAFA,IAAIo9C,CAAJ,EAAqBC,CAArB,EACIC,KAAU,CACd,GAAQA,CAAR,GAAiB;EACf,UAAIC,UAAJ;EAAA,UAAgBC,UAAhB;EAAA,UAA4B7kC,UAA5B,CACA;EAGEA,aAFA4kC,IAAK,IAAIthD,KAAKxF,MAALwF,EAAJ,GAAoB,KAEhBshD,KADTC,IAAK,IAAIvhD,KAAKxF,MAALwF,EAAJ,GAAoB,KACNuhD,CAAnB7kC;SAHF,QAISA,KAAK,CAALA,IAAgB,MAANA,CAJnB,EAMA,IAAM9T,IAAMrO,KAAKmC,IAALnC,EAAW,IAAMA,KAAK4G,GAAL5G,CAASmiB,CAATniB,IAAcmiB,CAA/BniB,CAAZ,CACA4mD,IAAUnhD,KAAK0H,IAAL1H,GAAYA,KAAK4gD,MAAL5gD,GAAcshD,CAAdthD,GAAmB4I,CAAzCu4C,EACAC,IAAUphD,KAAK0H,IAAL1H,GAAYA,KAAK4gD,MAAL5gD,GAAcuhD,CAAdvhD,GAAmB4I,CADzCu4C,EAGKnhD,KAAK2gD,SAAL3gD,KAAkBA,KAAKwhD,gBAALxhD,CAAsBmhD,CAAtBnhD,CAAlBA,KACHqhD,KAAU,CADPrhD,CAHLmhD;EAWF,YAHKnhD,KAAK2gD,SAAL3gD,KAAkBA,KAAKwhD,gBAALxhD,CAAsBohD,CAAtBphD,CAAlBA,KACHA,KAAK6gD,OAAL7gD,GAAeA,KAAKyhD,YAALzhD,CAAkBohD,CAAlBphD,CADZA,GAGEA,KAAKyhD,YAALzhD,CAAkBmhD,CAAlBnhD,CAAP;KA7BKkhD,EAiCCA,WAAAA,aAAAA,GAAR,UAAqBn9C,CAArB;EACE,WAAkB,QAAd/D,KAAK/B,KAAS,IAAuB,cAAf+B,KAAK/B,KAAb,GACT8F,CADS,GAGXxJ,KAAKuE,KAALvE,CAAWwJ,CAAXxJ,CAHP;KAlCK2mD,EAyCCA,WAAAA,iBAAAA,GAAR,UAAyBn9C,CAAzB;EACE,WAAOA,KAAS/D,KAAK8gD,KAAd/8C,IAAuBA,KAAS/D,KAAK+gD,KAA5C;KA1CKG,GA4CT;KDzCA,CElBA,eAAA,CAAkCxmD,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EACI,WAAAzP,OAAOC,IAAPD,CAAYqiB,EAAGxqB,KAAfmI,IAAuBM,QAAQ+hB,EAAG/hB,QAAlCN,EAA2CqiB,EAAGxoB,KAA9CmG,CAAA;KAFR+Y,IAGKsJ,OAHLtJ,EAJK,UAAC1K,CAAD;EACV,aAAQgU,IAAI;EAAM,eAAAhU,EAAG2S,OAAH3S,EAAA;WAAlB;KAGK0K,CAAP;EAmBF,cAAA,CACI0tB,CADJ,EACqBE,CADrB,EAEI2W,CAFJ,EAOIzjD,CAPJ;qBAOIA,gBACgB,QAAd8sC,CAAc,KAChBA,IAAaF,CADG,EAKlB,KAFA,IAAM8W,IAAOn8C,QAAQqlC,GAASE,EAAjBvlC,EAA8BvH,CAA9BuH,CAAb,EACM7I,IAAIkuC,KAAWE,CAAXF,GAAwBA,CAAxBA,GAAkCE,CAD5C,EAEShwC,IAAI,CAAb,EAAgBA,IAAI4B,CAApB,IAAyB5B,CAAzB,EACE4mD,EAAK7qC,GAAL6qC,CAAS,CAATA,EAAY5mD,CAAZ4mD,EAAe5mD,CAAf4mD,EAEF,IAAMpC,IAAMoC,EAAKC,QAALD,GAAgBE,IAAhBF,CAAqB9W,CAArB8W,EAA8B5W,CAA9B4W,CAAZ,CACA,IAAkB,QAAdD,CAAJ,EACE,OAAOnC,CAAP,CAEA,IAA0B,MAAtBmC,EAAWtnD,MAAf,EACE,OAAOoM,KAAKR,WAAWu5C,CAAXv5C,EAAgB,CAAhBA,CAALQ,GAA0Bk7C,EAAW,CAAXA,GAAe,GAAG,EAA5Cl7C,CAAP,CACK,IAA0B,MAAtBk7C,EAAWtnD,MAAf,EACL,OAAOoM,KACHR,WAAWA,WAAWu5C,CAAXv5C,EAAgB,CAAhBA,CAAXA,EAA+B,CAA/BA,CADGQ,GAEFk7C,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAe,GAAG,EAF/Bl7C,CAAP,CAGK,IAA0B,MAAtBk7C,EAAWtnD,MAAf,EACL,OAAOoM,KACHR,WAAWA,WAAWA,WAAWu5C,CAAXv5C,EAAgB,CAAhBA,CAAXA,EAA+B,CAA/BA,CAAXA,EAA8C,CAA9CA,CADGQ,GAEFk7C,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAe,GAAG,EAF9Cl7C,CAAP,CAIA,MAAM,IAAIjN,KAAJ,CACF,uEAE8BmoD,EAAmBtnD,MAFjD,OADE,CAAN;EAsBN,uBAAA,CACI6B,CADJ,EACwByL,CADxB,EACkCk5C,CADlC,EAC8C3iD,CAD9C,EAEIuqB,CAFJ;EAGE,uBAFsB9gB,yBAAUk5C,QAEnB,QAAT3iD,CAAS,IAAgC,WAAvBA,CAAtB,EACE,MAAM,IAAI1E,KAAJ,CAAU,2BAAyB0E,CAAnC,CAAN,CAKF,KAHA,IAAM6jD,IACF,IAAIZ,WAAJ,CAAgBx5C,CAAhB,EAAsBk5C,CAAtB,EAA8B3iD,CAA9B,GAAqC,CAArC,EAA4DuqB,CAA5D,CADJ,EAEMvT,IAAMzP,OAAOvJ,CAAPuJ,EAAcvH,CAAduH,CAFZ,EAGSzK,IAAI,CAAb,EAAgBA,IAAIka,EAAI/W,MAAJ+W,CAAW7a,MAA/B,EAAuCW,GAAvC,EACEka,EAAI/W,MAAJ+W,CAAWla,CAAXka,IAAgB6sC,EAAUC,SAAVD,EAAhB7sC,CAEF,OAAOA,EAAI2sC,QAAJ3sC,EAAP;EAsBF,0BAAA,CACIhZ,CADJ,EACwByL,CADxB,EACkCk5C,CADlC,EAC8C3iD,CAD9C,EAEIuqB,CAFJ;EAGE,uBAFsB9gB,yBAAUk5C,QAEnB,QAAT3iD,CAAS,IAAgC,WAAvBA,CAAtB,EACE,MAAM,IAAI1E,KAAJ,CAAU,2BAAyB0E,CAAnC,CAAN,CAKF,KAHA,IAAM6jD,IACF,IAAIZ,WAAJ,CAAgBx5C,CAAhB,EAAsBk5C,CAAtB,EAA8B3iD,CAA9B,GAAqC,CAArC,EAA2DuqB,CAA3D,CADJ,EAEMvT,IAAMzP,OAAOvJ,CAAPuJ,EAAcvH,CAAduH,CAFZ,EAGSzK,IAAI,CAAb,EAAgBA,IAAIka,EAAI/W,MAAJ+W,CAAW7a,MAA/B,EAAuCW,GAAvC,EACEka,EAAI/W,MAAJ+W,CAAWla,CAAXka,IAAgB6sC,EAAUC,SAAVD,EAAhB7sC,CAEF,OAAOA,EAAI2sC,QAAJ3sC,EAAP;EAsBF,wBAAA,CACIhZ,CADJ,EACwB+lD,CADxB,EACoCC,CADpC,EAEIhkD,CAFJ;qBACwB+jD,yBAAYC,yBAChChkD,eAEF,KADA,IAAMgX,IAAMzP,OAAOvJ,CAAPuJ,EAAcvH,CAAduH,CAAZ,EACSzK,IAAI,CAAb,EAAgBA,IAAIka,EAAI/W,MAAJ+W,CAAW7a,MAA/B,EAAuCW,GAAvC,EACEka,EAAI/W,MAAJ+W,CAAWla,CAAXka,IAAgBitC,YAAiBF,CAAjBE,EAAyBD,CAAzBC,CAAhBjtC,CAEF,OAAOA,EAAI2sC,QAAJ3sC,EAAP;EAYF,eAAA,CACIhZ,CADJ,EACwBkmD,CADxB,EAEIlkD,CAFJ;EAGE,MAAM/B,IAAOyH,cAAmB1H,CAAnB0H,CAAb;EAAA,MAEIzF,IAAS,IAFb,CAGA,IAAa,QAATD,CAAS,IAAkB,cAAVA,CAArB,EACEC,IAAS,IAAIC,YAAJ,CAAiBjC,CAAjB,CAATgC,CADF,KAEO,IAAc,YAAVD,CAAJ,EACLC,IAAS,IAAIE,UAAJ,CAAelC,CAAf,CAATgC,CADK,KAEA;EAAA,QAAc,WAAVD,CAAJ,EAGL,MAAM,IAAI1E,KAAJ,CAAU,uBAAqB0E,CAA/B,CAAN,CAFAC,IAAS,IAAIG,UAAJ,CAAenC,CAAf,CAATgC;EAKF,QAAK,IAAInD,IAAI,CAAb,EAAgBA,IAAImB,CAApB,EAA0BnB,GAA1B,EACEmD,EAAOnD,CAAPmD,IAAYikD,GAAZjkD,CAEF,OAAOkG,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAoBlG,WAApBkG,EAA6BnG,CAA7BmG,CAAP;EAsBF,sBAAA,CACI6gB,CADJ,EAC0CsD,CAD1C,EAC8DC,CAD9D,EAEIF,CAFJ;qBAEIA,QACF,IAAMpD,IAAUP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,aAAlCA,CAAhB;EAAA,MACM6rB,IAActrB,EAAQhpB,IAD5B;EAAA,MAEMkmD,IAAWl9B,EAAQ9lB,IAFzB,CAGA,IAAIoxC,IAAc,CAAlB,EACE,MAAM,IAAIj3C,KAAJ,CACF,iEACGi3C,CADH,MADE,CAAN,CAIF,IAAI4R,IAAW,CAAf,EACE,MAAM,IAAI7oD,KAAJ,CAAU,kDAAgD6oD,CAA1D,CAAN,CAEF55B,IAAOA,KAAQjuB,KAAKC,MAALD,EAAfiuB,CACA,IAAM65B,IAAwB,MAAbD,CAAa,GAAIl9B,EAAQ28B,IAAR38B,CAAa,CAAbA,GAAiB,CAAjBA,CAAJ,GAA0BA,CAAxD;EAAA,MACMjQ,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQyuC,WAARzuC,CAAoBwuC,CAApBxuC,EAA8ByU,CAA9BzU,EAA0C0U,CAA1C1U,EAAsD2U,CAAtD3U,CAAA;KADHsJ,IAEPklC,aAFOllC,CADZ,CAKA,OAAoB,MAAbilC,CAAa,GAAIntC,EAAI/P,IAAJ+P,EAAJ,GAAiBA,CAArC;EAoBF,iBAAA,CACIxO,CADJ,EACoC3O,CADpC,EACmD2wB,CADnD,EAEIC,CAFJ;qBACmDD,yBAC/CC,OACF,IAAM65B,IAAW59B,gBAAgBle,CAAhBke,EAAyB,SAAzBA,EAAoC,QAApCA,EAA8C,OAA9CA,CAAjB,CAGA,IAFA/gB,OAA+B,YAAnB2+C,EAAStkD,KAArB2F,EAAwC,kCAAxCA,GAEI9L,IAAQ,CAAZ,EACE,MAAM,IAAIyB,KAAJ,CAAU,mDAAiDzB,CAA3D,CAAN,CAKF,OAAOqlB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ2uC,MAAR3uC,CAAe0uC,CAAf1uC,EAAyB/b,CAAzB+b,EAAgC4U,CAAhC5U,EAAyC6U,CAAzC7U,CAAA;KADRsJ,IAC6DolC,aAD7DplC,EAHM,UAAC1K,CAAD;EACX,aAAQ8vC,UAAU;EAAM,eAAAx2C,UAAUw2C,CAAVx2C,CAAA;WAAxB;KAEKoR,CAAP;EAyBF,qBAAA,CACI7D,CADJ,EAEIC,CAFJ;EAGE,uBADEA,QACEA,IAAc,CAAlB,EACE,MAAM,IAAIhgB,KAAJ,CACF,gEADE,CAAN,CAGF,OAAO4jB,IAAIE,MAAJF,CAAW3D,UAAX2D,CAAsB7D,CAAtB6D,EAA8B5D,CAA9B4D,CAAP;EAqBF,kBAAA,CACIslC,CADJ,EAEIjpD,CAFJ;;;;EAIE,cAAkB,OADZkpD,IAAO/9B,gBAAgB89B,CAAhB99B,EAAqB,KAArBA,EAA4B,UAA5BA,EAAwC,OAAxCA,GACJvlB,IAAS,IAAmB,MAAdsjD,EAAKtjD,IAA5B,EACE,MAAM,IAAI7F,KAAJ,CACF,0DAAwDmpD,EAAKtjD,IAA7D,MADE,CAAN,CAMF,IAHM+Z,IAAkBupC,EAAKzmD,KAALymD,CAAW7kD,KAAX6kD,CAAiB,CAAjBA,EAAoB,CAApBA,CAAlBvpC,EAACihB,QAADjhB,EAASkhB,QAATlhB,GACArhB,IAAsB,MAAd4qD,EAAKtjD,IAAS,GAAI,CAAJ,GAAQsjD,EAAKzmD,KAALymD,CAAW,CAAXA,KAExB,KAAe,MAAV5qD,CAAjB,EACE,MAAM,IAAIyB,KAAJ,CACF,4DACqBzB,CAFnB,CAAN,CAOW,OAFP6qD,IAAYD,EAAKjoD,GAALioD,EAAZC,EACAC,IAAYF,EAAK/nD,GAAL+nD,EADZC,MAEaA,EAAU39C,IAAV29C,GAAN;EACA,iBADPloD,IAAO83B,MAAAA,GAAwB,CAAxBA,CAAP93B,MACamoD,EAAU59C,IAAV49C,GAAN;EAGb,cAHMjoD,IAAO43B,MAAAA,GAAwB,CAAxBA,CAAP53B,EACNgoD,EAAU3vC,OAAV2vC,EADMhoD,EAENioD,EAAU5vC,OAAV4vC,EAFMjoD,EAGa,cAAf+nD,EAAKzkD,KAAT;EACE,gBAAIxD,IAAM,CAANA,IAAWE,IAAM,CAArB,EACE,MAAM,IAAIpB,KAAJ,CACF,oFACgCkB,CADhC,QAAA,GACyCE,CADzC,OADE,CAAN;aAFJ,MAMO;EAAA,gBAAmB,YAAf+nD,EAAKzkD,KAAT,EAOL,MAAM,IAAI1E,KAAJ,CACF,oCAAkCmpD,EAAKzkD,KAAvC,2CADE,CAAN,CANA,IAAIxD,IAAM,CAANA,IAAWE,IAAM,GAArB,EACE,MAAM,IAAIpB,KAAJ,CACF,oFACkCkB,CADlC,QAAA,GAC2CE,CAD3C,OADE,CAAN;EAUS,sBAAM+nD,EAAK19C,IAAL09C,GAAN;EAIb,eAJM19C,IAAOutB,MAAAA,EAAPvtB,EACA69C,IAA4B,cAAfH,EAAKzkD,KAAU,GAAY,GAAZ,GAAkB,CAD9C+G,EAEA89C,IAAQ,IAAIC,iBAAJ,CAAsB1oB,IAAQD,CAARC,GAAiB,CAAvC,CAFRr1B,EAIGjK,IAAI,CAAb,EAAgBA,IAAIq/B,IAASC,CAA7B,IAAsCt/B,CAAtC,EACME,UAAAA,EAAG+kB,UAAH/kB,EAAMD,UAANC,EAASvB,UAATuB,EACU,MAAVnD,CAAU,IACZmD,IAAI+J,EAAKjK,CAALiK,IAAU69C,CAAd5nD,EACA+kB,IAAIhb,EAAKjK,CAALiK,IAAU69C,CADd5nD,EAEAD,IAAIgK,EAAKjK,CAALiK,IAAU69C,CAFd5nD,EAGAvB,IAAI,GAJQ,IAKO,MAAV5B,CAAU,IACnBmD,IAAI+J,EAAS,IAAJjK,CAALiK,IAAc69C,CAAlB5nD,EACA+kB,IAAIhb,EAAS,IAAJjK,CAAI,GAAI,CAAbiK,IAAkB69C,CADtB5nD,EAEAD,IAAIgK,EAAS,IAAJjK,CAAI,GAAI,CAAbiK,IAAkB69C,CAFtB5nD,EAGAvB,IAAI,GAJe,IAKA,MAAV5B,CAAU,KACnBmD,IAAI+J,EAAS,IAAJjK,CAALiK,IAAc69C,CAAlB5nD,EACA+kB,IAAIhb,EAAS,IAAJjK,CAAI,GAAI,CAAbiK,IAAkB69C,CADtB5nD,EAEAD,IAAIgK,EAAS,IAAJjK,CAAI,GAAI,CAAbiK,IAAkB69C,CAFtB5nD,EAGAvB,IAAIsL,EAAS,IAAJjK,CAAI,GAAI,CAAbiK,IAAkB69C,CAJH,CAXjB5nD,EAmBJ6nD,GADM9kD,IAAQ,IAAJjD,KACA,CAAV+nD,IAAevoD,KAAKuE,KAALvE,CAAWU,CAAXV,CAnBXU,EAoBJ6nD,EAAM9kD,IAAI,CAAV8kD,IAAevoD,KAAKuE,KAALvE,CAAWylB,CAAXzlB,CApBXU,EAqBJ6nD,EAAM9kD,IAAI,CAAV8kD,IAAevoD,KAAKuE,KAALvE,CAAWS,CAAXT,CArBXU,EAsBJ6nD,EAAM9kD,IAAI,CAAV8kD,IAAevoD,KAAKuE,KAALvE,CAAWb,CAAXa,CAtBXU,CAmCN,OAVc,QAAVzB,CAAU,KACZA,EAAO6gC,KAAP7gC,GAAe6gC,CAAf7gC,EACAA,EAAO4gC,MAAP5gC,GAAgB4gC,CADhB5gC,EAEMwpD,IAAMxpD,EAAOC,UAAPD,CAAkB,IAAlBA,CAFZA,EAGMypD,IAAY,IAAIC,SAAJ,CAAcJ,CAAd,EAAqBzoB,CAArB,EAA4BD,CAA5B,CAHlB5gC,EAIAwpD,EAAIG,YAAJH,CAAiBC,CAAjBD,EAA4B,CAA5BA,EAA+B,CAA/BA,CALY,GAOVN,MAASD,CAATC,IACFA,EAAK1vC,OAAL0vC,EARY,MAUPI,EAAP;;;EA4BF,kBAAA,CACIpoD,CADJ,EAC0BuB,CAD1B;EAEE,MAAMwqB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CACA1oB,IAAQmnD,uBAA4BnnD,CAA5BmnD,EAAmC38B,EAAGvqB,IAAtCknD,CAARnnD,EACA2H,OACI6iB,EAAGvqB,IAAHuqB,KAAY9iB,cAAmB1H,CAAnB0H,CADhBC,EAEI,gEAFJA,CADA3H,CAQA,OAAOkhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1O,OAAR0O,CAAgB4S,CAAhB5S,EAAoB5X,CAApB4X,CAAA;KADRsJ,IACqCsJ,OADrCtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGtN,OAAHsN,CAAWgU,EAAGxqB,KAAdwW,CAAA;WAAlB;KAEK0K,CAAP;EAkBF,kBAAA,CAAoCziB,CAApC,EAA0DoD,CAA1D;EACE,MAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CACA,OAAOxf,QAAQshB,CAARthB,EAAYq9B,aAAkB/b,EAAGxqB,KAArBumC,EAA4B1kC,CAA5B0kC,EAAkC5kC,QAA9CuH,CAAP;EAcF,eAAA,CAAiCzK,CAAjC,EAAkDuD,CAAlD;EACE,MAAMwoB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtO,IAARsO,CAAa4S,CAAb5S,EAAiB5V,CAAjB4V,CAAA;KAAhCsJ,IAA0DsJ,OAA1DtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGpM,KAAHoM,EAAA;WAAlB;KAEK0K,CAAP;EA4BF,eAAA,CAAiCziB,CAAjC,EAAkD6L,CAAlD;EACE,MAAMkgB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAEA/gB,OACI6iB,EAAGrnB,IAAHqnB,KAAYlgB,EAAKnM,MADrBwJ,EAEI,uCAAqC6iB,EAAGrnB,IAAxC,gCAAA,GACiCmH,CADjC,MAFJ3C,EAuDA,OAAOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQrN,IAARqN,CAAa4S,CAAb5S,EAAiBtN,CAAjBsN,CAAA;KAAhCsJ,IAAyDsJ,OAAzDtJ,EAnDM,UAAC1K,CAAD;EAiDX,aAAQgU,IAhDK;EACX,YAAI48B,IAAQt3C,UAAU0a,CAAV1a,CAAZ,CAGA,IAAgB,MAAZ0a,EAAGrnB,IAAP,EACE,KAAK,IAAIrE,IAAI,CAAb,EAAgBA,IAAIwL,EAAK,CAALA,CAApB,IAA+BxL,CAA/B,EACEsoD,IAAQA,EAAMh7C,GAANg7C,CAAU5wC,EAAG5U,KAAH4U,EAAU1X,IAAI0rB,EAAGxqB,KAAHwqB,CAAS,CAATA,EAAdhU,GAA6BgU,EAAGxqB,KAAHwqB,CAAS,CAATA,EAA7BhU,CAAV4wC,CAARA,CAFJ,KAIO,IAAgB,MAAZ58B,EAAGrnB,IAAP,EACL,KAASrE,IAAI,CAAb,EAAgBA,IAAIwL,EAAK,CAALA,CAApB,IAA+BxL,CAA/B,EACE,KAAK,IAAIiD,IAAI,CAAb,EAAgBA,IAAIuI,EAAK,CAALA,CAApB,IAA+BvI,CAA/B,EACEqlD,IAAQA,EAAMh7C,GAANg7C,CAAU5wC,EAAG5U,KAAH4U,EACb1X,IAAI0rB,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAazoB,IAAIyoB,EAAGxqB,KAAHwqB,CAAS,CAATA,EADRhU,GAEbgU,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,EAFAhU,CAAV4wC,CAARA,CAHC,KAQA,IAAgB,MAAZ58B,EAAGrnB,IAAP,EACL,KAASrE,IAAI,CAAb,EAAgBA,IAAIwL,EAAK,CAALA,CAApB,IAA+BxL,CAA/B,EACE,KAASiD,IAAI,CAAb,EAAgBA,IAAIuI,EAAK,CAALA,CAApB,IAA+BvI,CAA/B,EACE,KAAK,IAAIyR,IAAI,CAAb,EAAgBA,IAAIlJ,EAAK,CAALA,CAApB,IAA+BkJ,CAA/B,EACE4zC,IAAQA,EAAMh7C,GAANg7C,CAAU5wC,EAAG5U,KAAH4U,EACb1X,IAAI0rB,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAazoB,IAAIyoB,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAahX,IAAIgX,EAAGxqB,KAAHwqB,CAAS,CAATA,EADzBhU,GAEbgU,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,EAFbhU,CAAV4wC,CAARA,CAJD,KAUA;EAAA,cAAgB,MAAZ58B,EAAGrnB,IAAP,EAgBL,MAAM,IAAI7F,KAAJ,CACF,6DACGktB,EAAGrnB,IADN,kBADE,CAAN,CAfA,KAASrE,IAAI,CAAb,EAAgBA,IAAIwL,EAAK,CAALA,CAApB,IAA+BxL,CAA/B,EACE,KAASiD,IAAI,CAAb,EAAgBA,IAAIuI,EAAK,CAALA,CAApB,IAA+BvI,CAA/B,EACE,KAASyR,IAAI,CAAb,EAAgBA,IAAIlJ,EAAK,CAALA,CAApB,IAA+BkJ,CAA/B,EACE,KAAK,IAAI1N,IAAI,CAAb,EAAgBA,IAAIwE,EAAK,CAALA,CAApB,IAA+BxE,CAA/B,EACEshD,IAAQA,EAAMh7C,GAANg7C,CAAU5wC,EAAG5U,KAAH4U,EAEZ1X,IAAI0rB,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAazoB,IAAIyoB,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAahX,IAAIgX,EAAGxqB,KAAHwqB,CAAS,CAATA,GACtC1kB,IAAI0kB,EAAGxqB,KAAHwqB,CAAS,CAATA,EAHQhU,GAKbgU,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,GAAaA,EAAGxqB,KAAHwqB,CAAS,CAATA,EAL1BhU,CAAV4wC,CAARA;EAeV,gBAAOA,CAAP;WAEF;KAEKlmC,CAAP;EAMF,gBAAA,CACIziB,CADJ,EAC4B8M,CAD5B,EAEIC,CAFJ;EAME,0BAJEA,QACF7D,OACwB,MAApB4D,EAASpN,MADbwJ,EAEI,kDAFJA,GAGOnB,IAAI/H,CAAJ+H,GAAQ+E,EAAR/E,EAAmBgF,CAAnBhF,CAAP;EAMF,gBAAA,CACI/H,CADJ,EAC4B8M,CAD5B,EAEIC,CAFJ;EAOE,0BALEA,QACF7D,OACwB,MAApB4D,EAASpN,MAAW,IAA4B,MAAvBoN,EAAS,CAATA,EAAYpN,MAAjB,IACO,MAAvBoN,EAAS,CAATA,EAAYpN,MAFpBwJ,EAGI,uDAHJA,GAIOnB,IAAI/H,CAAJ+H,EAAO+E,CAAP/E,EAAiBgF,CAAjBhF,CAAP;EAMF,gBAAA,CACI/H,CADJ,EAEI8M,CAFJ,EAGIC,CAHJ;EAQE,0BALEA,QACF7D,OACwB,MAApB4D,EAASpN,MAAW,IAA4B,MAAvBoN,EAAS,CAATA,EAAYpN,MAAjB,IACO,MAAvBoN,EAAS,CAATA,EAAYpN,MADI,IACmC,MAAvBoN,EAAS,CAATA,EAAYpN,MAFhDwJ,EAGI,uDAHJA,GAIOnB,IAAI/H,CAAJ+H,EAAO+E,CAAP/E,EAAiBgF,CAAjBhF,CAAP;EAMF,gBAAA,CACI/H,CADJ,EAEI8M,CAFJ,EAMIC,CANJ;EAYE,0BANEA,QACF7D,OACwB,MAApB4D,EAASpN,MAAW,IAA4B,MAAvBoN,EAAS,CAATA,EAAYpN,MAAjB,IACO,MAAvBoN,EAAS,CAATA,EAAYpN,MADI,IACmC,MAAvBoN,EAAS,CAATA,EAAYpN,MADxB,IAEO,MAAvBoN,EAAS,CAATA,EAAYpN,MAHpBwJ,EAII,uDAJJA,GAKOnB,IAAI/H,CAAJ+H,EAAO+E,CAAP/E,EAAiBgF,CAAjBhF,CAAP;EA0BF,cAAA,CACI/H,CADJ,EACqB8M,CADrB,EACwDC,CADxD;qBACwDA,OACtD,IAAMgf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAEA,IAAgB,MAAZ8B,EAAGrnB,IAAP,EACE,MAAM,IAAI7F,KAAJ,CAAU,oDAAV,CAAN,CAIF,IAAM2N,IAAQM,EAAS1F,GAAT0F,CAAa,UAAAiqC,CAAA;EAAK,WAAAA,EAAE,CAAFA,CAAA;KAAlBjqC,CAAd,CAIA,OAAO2V,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQpR,GAARoR,CAAY4S,CAAZ5S,EAAgBrM,CAAhBqM,EAA0BpM,CAA1BoM,CAAA;KADfsJ,IAC0DsJ,OAD1DtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5U,KAAH4U,CAASvL,CAATuL,EAAgBgU,EAAGxqB,KAAnBwW,CAAA;WAAlB;KAEK0K,CAAP;EAmBF,gBAAA,CAAkC2K,CAAlC,EAA6DhqB,CAA7D;qBAA6DA,OAC3D,IAAMu9C,IAAWC,qBAAqBxzB,CAArBwzB,EAA8B,SAA9BA,EAAyC,OAAzCA,CAAjB,CAGA,IADA13C,OAAYy3C,EAASjhD,MAATihD,IAAmB,CAA/Bz3C,EAAkC,sCAAlCA,GACwB,MAApBy3C,EAASjhD,MAAb,EACE,OAAOihD,EAAS,CAATA,EAAYr1C,UAAZq1C,CAAuBv9C,CAAvBu9C,CAAP,CAEF,IAAMj8C,IAAOi8C,EAAS,CAATA,EAAYj8C,IAAzB;EAAA,MACMnD,IAAQo/C,EAAS,CAATA,EAAYp/C,KAD1B;EAAA,MAEMgC,IAAQo9C,EAAS,CAATA,EAAYp9C,KAF1B,CAIA2F,OAAY9F,KAAQsB,CAApBwE,EAA0B,oCAA1BA,GAEAy3C,EAASj7C,OAATi7C,CAAiB,UAAAniC,CAAA;EACfqG,sBACItjB,CADJsjB,EACWrG,EAAEjd,KADbsjB,EAEI,uDAFJA;KADF87B,CAFAz3C,EAQAy3C,EAASj7C,OAATi7C,CAAiB,UAAAniC,CAAA;EACftV,WACI3F,MAAUib,EAAEjb,KADhB2F,EAEI,uDAFJA;KADFy3C,CARAz3C,CAaA,IAAM0/C,IAAkBjI,EAASv5C,GAATu5C,CAAa,UAAAniC,CAAA;EAAK,WAAAA,EAAElT,UAAFkT,CAAapb,CAAbob,CAAA;KAAlBmiC,CAAxB,CACA,OAAOl0C,OAAOm8C,CAAPn8C,EAAwBrJ,CAAxBqJ,CAAP;EAmDF,yBAAA,CACIzM,CADJ,EACqB2U,CADrB,EAC2CC,CAD3C;EAEE,MAAMmX,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,gBAAxBA,CAAX;EAAA,MACMzc,IAAOmH,EAAWssC,MAAXtsC,CAAkB,UAAC3V,CAAD,EAAIsB,CAAJ;EAAU,WAAAtB,IAAIsB,CAAJ;KAA5BqU,CADb,CAGAzL,OACI6iB,EAAGrnB,IAAHqnB,IAAW,IAAIpX,EAAWjV,MAD9BwJ,EAEI,mBAAiB6iB,EAAGrnB,IAApB,6CAAA,GACIiQ,EAAWjV,MAHnBwJ,GAKAA,OACI0L,EAAMlV,MAANkV,KAAiBD,EAAWjV,MADhCwJ,EAEI,qBACI0L,EAAMlV,MADV,gDAAA,GAEIiV,EAAWjV,MAJnBwJ,CALAA,EAWAA,OACI6iB,EAAGxqB,KAAHwqB,CAAS,CAATA,IAAcve,CAAdue,IAAuB,CAD3B7iB,EAEI,2BACI6iB,EAAGxqB,KAAHwqB,CAAS,CAATA,CADJ,wEAAA,GAEkCpX,EAAWrN,IAAXqN,CAAgB,KAAhBA,CAFlC,UAAA,GAEgEnH,CAJpEtE,CAXAA,CAqBA,OAAOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtE,cAARsE,CAAuB4S,CAAvB5S,EAA2BxE,CAA3BwE,EAAuCvE,CAAvCuE,CAAA;KADRsJ,IACwDsJ,OADxDtJ,EAJM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGjD,cAAHiD,CAAkBpD,CAAlBoD,EAA8BnD,CAA9BmD,CAAA;WAAlB;KAGK0K,CAAP;EAqDF,yBAAA,CACIziB,CADJ,EACqB2U,CADrB,EAC2C7H,CAD3C;EAEE,MAAMif,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,gBAAxBA,CAAX,CAEA/gB,OACI6iB,EAAGrnB,IAAHqnB,IAAW,IAAIpX,EAAWjV,MAD9BwJ,EAEI,gBAAc6iB,EAAGrnB,IAAjB,oCAAA,GACIiQ,EAAWjV,MAHnBwJ,GAKAA,OACI4D,EAASpN,MAAToN,KAAoB6H,EAAWjV,MADnCwJ,EAEI,uBAAqB4D,EAASpN,MAA9B,oCAAA,GACIiV,EAAWjV,MAHnBwJ,CALAA,EAUAA,OACI6iB,EAAGxqB,KAAHwqB,CAASk1B,MAATl1B,CACI,UAAC/sB,CAAD,EAAIsB,CAAJ,EAAOD,CAAP;EACE,WAAIA,IAAI,CAAJA,IAASA,KAAKsU,EAAWjV,MAAzBW,GACKrB,MACDsB,IAAIwM,EAASzM,IAAI,CAAbyM,EAAgB,CAAhBA,CAAJxM,GAAyBwM,EAASzM,IAAI,CAAbyM,EAAgB,CAAhBA,KACtB6H,EAAWtU,IAAI,CAAfsU,KACJ,CAJHtU,GAMGrB,CANP;KAFN+sB,GAUI,CAVJA,CADJ7iB,EAYI,8BAA4B6iB,EAAGxqB,KAAHwqB,CAAS5oB,KAAT4oB,CAAe,CAAfA,CAA5B,oBAAA,GACIjf,EAASvG,QAATuG,EADJ,uCAAA,GAEI6H,EAAWpO,QAAXoO,EAdRzL,CAVAA,CA8BA,OAAOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQrE,cAARqE,CAAuB4S,CAAvB5S,EAA2BxE,CAA3BwE,EAAuCrM,CAAvCqM,CAAA;KADRsJ,IAC2DsJ,OAD3DtJ,EAJM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGlD,cAAHkD,CAAkBpD,CAAlBoD,EAA8BjL,CAA9BiL,CAAA;WAAlB;KAGK0K,CAAP;EAiBF,kBAAA,CAAoCziB,CAApC,EAAqDoD,CAArD;qBAAqDA,OAKnD,KAJA,IAWI49C,CAXJ,EAAMj1B,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,EACM+B,IAAMD,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,CADZ,EAEM5Y,IAAwBhS,MAAM4qB,EAAGrnB,IAAHqnB,GAAU,CAAhB5qB,EAAmBsG,IAAnBtG,CAAwB,CAAxBA,CAF9B,EAGI0nD,IAAW,CAHf,EAISxoD,IAAI,CAAb,EAAgBA,IAAI0rB,EAAGrnB,IAAvB,EAA6BrE,GAA7B,EACMA,MAAM+C,CAAN/C,KACF8S,EAAY01C,CAAZ11C,IAAwB4Y,EAAGxqB,KAAHwqB,CAAS1rB,CAAT0rB,CAAxB5Y,EACA01C,GAFExoD,EAON2gD,IAAa7/C,MAAM6qB,CAAN7qB,EAAWsG,IAAXtG,CAAgB,CAAhBA,CAAb6/C,CACA,IAAMx0C,IAAQrL,MAAM4qB,EAAGrnB,IAATvD,EAAesG,IAAftG,CAAoB,CAApBA,CAAd;EAAA,MACMK,IAAOuqB,EAAGxqB,KAAHwqB,CAAS5oB,KAAT4oB,EADb,CAEA,OAAOi1B,EAAW55C,GAAX45C,CAAe,UAAAh/B,CAAA;EACpBxgB,MAAK4B,CAAL5B,IAAawgB,CAAbxgB,CACA,IAAM2B,IAAQ4oB,EAAG5oB,KAAH4oB,CAASvf,CAATuf,EAAgBvqB,CAAhBuqB,CAAd,CAEA,OADAvf,EAAMpJ,CAANoJ,KAAewV,CAAfxV,EACOrJ,EAAMsH,OAANtH,CAAcgQ,CAAdhQ,CAAP;KAJK69C,CAAP;EA8BF,iBAAA,CACIhhD,CADJ,EAC0BoD,CAD1B,EACoCmI,CADpC,EACuDC,CADvD;qBAC0BpI,yBAAUmI,0BAAmBC,QACrD,IAAMugB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX;EAAA,MAGM6+B,IAAcC,oBADpB3lD,KAAc,EACM2lD,EAA2Bh9B,EAAGrnB,IAA9BqkD,CAHpB;EAAA,MAIIC,IAAYj9B,CAJhB,CAKmB,QAAf+8B,CAAe,KACjBE,IAAYj9B,EAAG/c,SAAH+c,CAAa+8B,CAAb/8B,CADK,EAGnB,IAAMk9B,IAAeC,iBAAiB,CAAjBA,EAAoBn9B,EAAGrnB,IAAvBwkD,EAA6B,CAA7BA,CAArB;EAAA,MAKI7/C,IAAQoZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1N,MAAR0N,CACP6vC,CADO7vC,EACI8vC,CADJ9vC,EACkB5N,CADlB4N,EAC6B3N,CAD7B2N,CAAA;KADfsJ,IAGKumC,cAHLvmC,EAHC,UAAC1K,CAAD;EACX,aAAQixC,WAAW;EAAM,eAAAjxC,EAAGtM,MAAHsM,CAAU3U,CAAV2U,EAAgBxM,CAAhBwM,GAA4BvM,CAA5BuM,CAAA;WAAzB;KAEU0K,CALZ,CAaA,OAHmB,QAAfqmC,CAAe,KACjBz/C,IAAQA,EAAM2F,SAAN3F,CAAgBy/C,CAAhBz/C,CADS,GAGZA,CAAP;EAkBF,qBAAA,CACIrJ,CADJ,EAC0BoD,CAD1B;qBAC0BA,OACxB,IAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,YAAxBA,CAAX,CAEA/gB,OAAY9F,KAAQ2oB,EAAGrnB,IAAvBwE,EAA6B,oCAA7BA,EACA,IAAMhG,IAAW6oB,EAAGxqB,KAAHwqB,CAAS5oB,KAAT4oB,EAAjB,CASA,OARI3oB,IAAO,CAAPA,KAEF8F,SACM6iB,EAAGrnB,IAAHqnB,GAAU,MAAM3oB,CADtB8F,EAEI,qCAAoC6iB,EAAGrnB,IAAHqnB,GAAU,EAA9C,OAAA,GAAqDA,EAAGrnB,IAAxD,MAFJwE,GAGA9F,IAAO2oB,EAAGrnB,IAAHqnB,GAAU3oB,CAAV2oB,GAAiB,CALtB3oB,GAOJF,EAASu5C,MAATv5C,CAAgBE,CAAhBF,EAAsB,CAAtBA,EAAyB,CAAzBA,CAPIE,EAQGqH,QAAQshB,CAARthB,EAAYvH,CAAZuH,CAAP;EAuCF,uBAAA,CACIzK,CADJ,EAC8BqV,CAD9B,EAEIxC,CAFJ;qBAEIA,YACF,IAAMkZ,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,cAAxBA,CAAX;EAAA,MAEMk/B,IAA8B,WAAft2C,CAAe,GAAUkZ,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAV,GAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAF5D;EAAA,MAGMq9B,IAA6B,WAAfv2C,CAAe,GAAUkZ,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAV,GAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAH3D;EAAA,MAIMs9B,IAA6B,WAAfx2C,CAAe,GAAUkZ,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAV,GAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAJ3D,CAwBA,OAlBA7iB,OACIigD,IAAc9zC,CAAd8zC,IAA2B,CAD/BjgD,EAEI,wEACEigD,CADF,UAAA,GACqB9zC,CADrB,gDAAA,GAEE0W,EAAGxqB,KAJT2H,GAMAA,OACIkgD,IAAa/zC,CAAb+zC,IAA0B,CAD9BlgD,EAEI,wEACEkgD,CADF,UAAA,GACoB/zC,CADpB,mDAAA,GAEM0W,EAAGxqB,KAJb2H,CANAA,EAYAA,OACKmgD,KAAch0C,IAAYA,CAA1Bg0C,KAAyC,CAD9CngD,EAEI,gDACImM,IAAYA,CADhB,aAAA,GAEIg0C,CAFJ,wCAAA,GAEoDt9B,EAAGxqB,KAJ3D2H,CAZAA,EAkBOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ7D,YAAR6D,CAAqB4S,CAArB5S,EAAyB9D,CAAzB8D,EAAoCtG,CAApCsG,CAAA;KADRsJ,IAC0DsJ,OAD1DtJ,CAAP;EAgCF,yBAAA,CACIziB,CADJ,EAC0B0W,CAD1B;;;;EAcgB,iBAZRqV,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAL8B,EACAu9B,IAAKr/B,gBAAgBvT,CAAhBuT,EAAmB,GAAnBA,EAAwB,WAAxBA,CADL8B,EAGN7iB,OACI6iB,EAAGxoB,KAAHwoB,KAAau9B,EAAG/lD,KADpB2F,EAEI,oDAAkD6iB,EAAGxoB,KAArD,cAAA,GACI+lD,EAAG/lD,KADP,OAFJ2F,CAHM6iB,EAQN7iB,OAAwB,MAAZ6iB,EAAGrnB,IAAfwE,EAA2B,uCAAqC6iB,EAAGxqB,KAAxC,OAA3B2H,CARM6iB,EAUN7iB,OAAwB,MAAZogD,EAAG5kD,IAAfwE,EAA2B,uCAAqCogD,EAAG/nD,KAAxC,OAA3B2H,CAVM6iB,MAYcA,EAAGzhB,IAAHyhB,GAAN;EACA,iBADRw9B,IAAQ9qC,MAAAA,EAAR8qC,MACcD,EAAGh/C,IAAHg/C,GAAN;EAId,eAJME,IAAQ/qC,MAAAA,EAAR+qC,EACAC,IAAO,IAAI9wC,GAAJ,CAAQ6wC,CAAR,CADPA,EAGFlhC,IAAa,CAHXkhC,EAIGnpD,IAAI,CAAb,EAAgBA,IAAIkpD,EAAM7pD,MAA1B,EAAkCW,GAAlC,EACOopD,EAAKzwC,GAALywC,CAASF,EAAMlpD,CAANkpD,CAATE,KACHnhC,GADGmhC,CAOP,KAFM3+C,IAAS,IAAI1B,YAAJ,EAAkBkf,EAAlB,EAA+ByD,EAAGxoB,KAAlC,CAATuH,EACAiB,IAAU,IAAI3C,YAAJ,EAAkBkf,EAAlB,EAA+B,OAA/B,CADVxd,EAEGzK,IAAI,CAFPyK,EAEUisC,IAAI,CAApB,EAAuB12C,IAAIkpD,EAAM7pD,MAAjC,EAAyCW,GAAzC,EACOopD,EAAKzwC,GAALywC,CAASF,EAAMlpD,CAANkpD,CAATE,MACH3+C,EAAOtH,MAAPsH,CAAcisC,CAAdjsC,IAAmBy+C,EAAMlpD,CAANkpD,CAAnBz+C,EACAiB,EAAQvI,MAARuI,CAAegrC,CAAfhrC,IAAoB1L,CADpByK,EAEAisC,GAHG0S,EAMP,YAAQ3+C,EAAOo8C,QAAPp8C,IAAmBiB,EAAQm7C,QAARn7C,IAA3B;;;EA4BF,gBAAA,CACIxK,CADJ,EACwBgC,CADxB,EAEIC,CAFJ;EAGE,0BAFsBD,gBAEf,IAAI6F,YAAJ,CAAoB7H,CAApB,EAA2BgC,CAA3B,EAAkCC,CAAlC,CAAP;EAeF,eAAA,CAAiCxD,CAAjC,EAAuC6G,CAAvC;qBAAuCA,SACrCL,QAAQC,GAARD,CAAYxG,EAAEuG,QAAFvG,CAAW6G,CAAX7G,CAAZwG;EAGF,KAMaqO,iBAAiBiW,KAAI4+B,kCAAJ5+B,CAN9B;EAAA,IAOajgB,OAAOigB,KAAI6+B,cAAJ7+B,CAPpB;EAAA,IAQanf,QAAQmf,KAAI8+B,gBAAJ9+B,CARrB;EAAA,IASarf,SAASqf,KAAI++B,kBAAJ/+B,CATtB;EAAA,IAUaxV,eAAewV,KAAIg/B,8BAAJh/B,CAV5B;EAAA,IAWaxf,aAAawf,KAAIi/B,0BAAJj/B,CAX1B;EAAA,IAYak/B,MAAMl/B,KAAIm/B,YAAJn/B,CAZnB;EAAA,IAaahM,aAAagM,KAAIo/B,0BAAJp/B,CAb1B;EAAA,IAca88B,cAAc98B,KAAIq/B,4BAAJr/B,CAd3B;EAAA,IAeag9B,SAASh9B,KAAIs/B,kBAAJt/B,CAftB;EAAA,IAgBa/iB,MAAM+iB,KAAIu/B,YAAJv/B,CAhBnB;EAAA,IAiBaw/B,QAAQx/B,KAAIy/B,gBAAJz/B,CAjBrB;EAAA,IAkBa0/B,QAAQ1/B,KAAI2/B,gBAAJ3/B,CAlBrB;EAAA,IAmBa4/B,QAAQ5/B,KAAI6/B,gBAAJ7/B,CAnBrB;EAAA,IAoBa8/B,QAAQ9/B,KAAI+/B,gBAAJ//B,CApBrB;EAAA,IAqBaggC,OAAOhgC,KAAIigC,cAAJjgC,CArBpB;EAAA,IAsBakgC,eAAelgC,KAAImgC,8BAAJngC,CAtB5B;EAAA,IAuBaogC,gBAAgBpgC,KAAIqgC,gCAAJrgC,CAvB7B;EAAA,IAwBargB,UAAUqgB,KAAIsgC,oBAAJtgC,CAxBvB;EAAA,IAyBahW,iBAAiBgW,KAAIugC,kCAAJvgC,CAzB9B;EAAA,IA0Bapf,UAAUof,KAAIwgC,oBAAJxgC,CA1BvB;EAAA,IA2Bale,QAAQke,KAAIygC,gBAAJzgC,CA3BrB;EAAA,IA4Bahf,OAAOgf,KAAI0gC,cAAJ1gC,CA5BpB;EAAA,IA6Ba2gC,kBAAkB3gC,KAAI4gC,oCAAJ5gC,CA7B/B;EAAA,IA8Baje,UAAUie,KAAI6gC,oBAAJ7gC,CA9BvB;EAAA,IA+Ba8gC,iBAAiBC,eA/B9B,oBC3pC0BC,GAAqBC;EAE7C,OADA,IAAMhgD,MAAN,EACS1L,IAAI,CAAb,EAAgBA,IAAI0rD,EAASrsD,MAA7B,EAAqCW,GAArC,EACM0rD,EAAS1rD,CAAT0rD,KACFhgD,EAAQzK,IAARyK,CAAa1L,CAAb0L,CADEggD,CAKN,IAAMC,IAAWlhD,OAAOghD,CAAPhhD,EAAkB,OAAlBA,CAAjB;EAAA,MAEM+5C,IAAM/5C,QAAQiB,EAAQrM,QAAQosD,EAAUpsD,OAAlCoL,EAA2C,OAA3CA,CAFZ,CAGA,KAASzK,IAAI,CAAb,EAAgBA,IAAI0L,EAAQrM,MAA5B,EAAoCW,GAApC,EAAyC;EACvC,QAAMomB,IAAMulC,EAASC,UAATD,CAAoBjgD,EAAQ1L,CAAR0L,CAApBigD,CAAZ;EAAA,QACMnkD,IAASxH,IAAIyrD,EAAUpsD,MAD7B,CAEAmlD,EAAIrhD,MAAJqhD,CAAWzoC,GAAXyoC,CAAep+B,CAAfo+B,EAAoBh9C,CAApBg9C;EAEF,UAAOA,EAAIqC,QAAJrC,EAAP;EC+FF,KAAMqH,6BAA6B,EAAnC;EAAA,IAGMC,yBAAyB,GAH/B;EAAA,IASaC,8BAA8B,GAT3C;EAAA;EAkWE,YAAA,CAAoBl2B,CAApB,EAAkDm2B,CAAlD;EACE,yBADgDA,SAA9B/mD,UAAAA,GAAA4wB,GAA8B5wB,mBAAAA,GAAA+mD,GApV1C/mD,gBAAAA,GAAc,IAAI+T,OAAJ,IAGd/T,oBAAAA,GAAkB,IAAIgnD,OAAJ,IAGlBhnD,eAAAA,OACAA,kBAAAA,GAAgB,GAahBA,iBAAAA,GAAe,GAEfA,mBAAAA,GAAiB,GA2TjBA,gBAAAA,OA84CAA,aAAAA,IAAW,GA14Cbmd,IAAIxG,GAAJwG,CAAQ,eAARA,IAA2B,CAA/B,EACE,MAAM,IAAI5jB,KAAJ,CAAU,uCAAV,CAAN,CAGF,IAAa,QAATq3B,CAAJ,EAAmB;EACjB,UAAMp4B,IAAKE,gBAAgBykB,IAAIxG,GAAJwG,CAAQ,eAARA,CAAhBzkB,CAAX,CACAsH,KAAK4wB,KAAL5wB,GAAa,IAAIinC,YAAJ,CAAiBzuC,CAAjB,CAAbwH,EACAA,KAAKxG,MAALwG,GAAcxH,EAAGgB,MADjBwG,EAEAA,KAAKinD,mBAALjnD,IAA2B,CAF3BA;OAFF,MAMEA,KAAKinD,mBAALjnD,IAA2B,CAA3BA,EACAA,KAAKxG,MAALwG,GAAc4wB,EAAMp4B,EAANo4B,CAASp3B,MADvBwG,CAGEmd,IAAIxG,GAAJwG,CAAQ,sBAARA,MAGFnd,KAAKknD,uBAALlnD,GACKlG,OAAOgmD,MAAPhmD,CAAcsgC,MAAdtgC,GAAuBA,OAAOgmD,MAAPhmD,CAAcugC,KAArCvgC,GACAA,OAAOqtD,gBADPrtD,GAED+sD,sBANF1pC,GAQJnd,KAAKonD,cAALpnD,GAAsB,IAAIw1C,cAAJ,CAAmBx1C,KAAK4wB,KAAxB,CARlBzT;EAy/CR,UAl0DEkqC,WAAAA,SAAAA,GAAA,UAAS3iD,CAAT,EAAyBzI,CAAzB,EAA0CgC,CAA1C;EACE,QAAI+B,KAAK2tC,OAAL3tC,CAAa0T,GAAb1T,CAAiB0E,CAAjB1E,CAAJ,EACE,MAAM,IAAIzG,KAAJ,CAAU,mCAAV,CAAN,CAEFyG,KAAK2tC,OAAL3tC,CAAa8W,GAAb9W,CAAiB0E,CAAjB1E,IACE/D,UACAgC,UACAC,QAAQ,MACRmd,SAAS,MACTisC,gBAAgB,MAChBnzB,UAAU,MACVuhB,OAAOhb,aAAagd,QACpBpjB,WAAU,GARZt0B;KAJFqnD,EAgBAA,WAAAA,aAAAA,GAAA,UAAa1/B,CAAb;EACE3nB,SAAK2tC,OAAL3tC,GAAe,IAAI4nB,WAAJ,CAAgBD,CAAhB,CAAf3nB;KAjBFqnD,EAoBAA,WAAAA,WAAAA,GAAA,UACI/tC,CADJ,EAEIC,CAFJ;EAGE,QAAc,QAAVD,CAAJ,EACE,MAAM,IAAI/f,KAAJ,CAAU,kDAAV,CAAN,CAEF,IAAM46B,KAA8B7a,EAAO8gB,QAAQ9gB,EAAO+gB,MAA1D;EAAA,QACM7Y,KAAYlI,EAAO8gB,QAAQ9gB,EAAO+gB,OAAO9gB,EAD/C,CAGA,MAAMD,aAAkBiuC,gBAAlBjuC,IACAA,aAAkBkuC,gBADlBluC,IAEAA,aAAkBmuC,iBAFlBnuC,IAGAA,aAAkB4pC,UAHxB,EAIE,MAAM,IAAI3pD,KAAJ,CACF,oIAEuB+f,EAActa,WAAdsa,CAA0B/a,IAH/C,CAAN,CAKF,IAAI+a,aAAkBiuC,gBAAtB,EAAwC;EACtC,UAAgC,QAA5BvnD,KAAK0nD,mBAAT,EAAsC;EACpC,aAAKvqC,IAAIxG,GAAJwG,CAAQ,YAARA,CAAL,EACE,MAAM,IAAI5jB,KAAJ,CACF,8DADE,CAAN,CAGF,IAA4B,eAAxBrB,SAASyvD,UAAb,EACE,MAAM,IAAIpuD,KAAJ,CACF,2KADE,CAAN,CAKFyG,KAAK0nD,mBAAL1nD,GACI9H,SAASC,aAATD,CAAuB,QAAvBA,EAAiCuB,UAAjCvB,CAA4C,IAA5CA,CADJ8H;EAGFA,YAAK0nD,mBAAL1nD,CAAyBxG,MAAzBwG,CAAgCq6B,KAAhCr6B,GAAwCsZ,EAAO+gB,KAA/Cr6B,EACAA,KAAK0nD,mBAAL1nD,CAAyBxG,MAAzBwG,CAAgCo6B,MAAhCp6B,GAAyCsZ,EAAO8gB,MADhDp6B,EAEAA,KAAK0nD,mBAAL1nD,CAAyB4nD,SAAzB5nD,CACIsZ,CADJtZ,EACY,CADZA,EACe,CADfA,EACkBsZ,EAAO+gB,KADzBr6B,EACgCsZ,EAAO8gB,MADvCp6B,CAFAA,EAIAsZ,IAAStZ,KAAK0nD,mBAAL1nD,CAAyBxG,MAJlCwG;EAMF,SAAM6nD,IAAkB7nD,KAAK8nD,gBAAL9nD,CAAsBm0B,CAAtBn0B,EAAgC,OAAhCA,CAAxB,CAEAA,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB6nD,EAAgBnjD,MAAjC1E,EAAyC01C,KAAzC11C,GAAiD06B,aAAa8c,MAA9Dx3C,EACAA,KAAK4wB,KAAL5wB,CAAW+nD,wBAAX/nD,CACIA,KAAKgoD,UAALhoD,CAAgB6nD,EAAgBnjD,MAAhC1E,CADJA,EAC6CsZ,CAD7CtZ,CADAA,CAGA,IAAMw/B,IAAU,IAAIyoB,iBAAJ,CAAsBzmC,CAAtB,CAAhB;EAAA,QACMvM,IAAMjV,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6B6nD,EAA7B7nD,CADZ,CAKA,OAFAA,KAAKoX,WAALpX,CAAiB6nD,EAAgBnjD,MAAjC1E,GAEOiV,CAAP;KArEFoyC,EAwEQA,WAAAA,iBAAAA,GAAR,UAAyBprD,CAAzB,EAA0CgC,CAA1C;EACE,QAAMyG,MAAN,CAEA,OADA1E,KAAK+W,QAAL/W,CAAc0E,CAAd1E,EAAsB/D,CAAtB+D,EAA6B/B,CAA7B+B,KACQ0E,WAAQzI,UAAOgC,UAAvB;KA3EFopD,EA8EAA,WAAAA,MAAAA,GAAA,UAAM3iD,CAAN,EAAsBxG,CAAtB;EACE,QAAc,QAAVA,CAAJ,EACE,MAAM,IAAI3E,KAAJ,CAAU,kDAAV,CAAN,CAEF,IAAMo0C,IAAU3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,CAAhB;EAAA,QACOqb,aADP;EAAA,QACgB8Y,cADhB;EAAA,QAC0BuhB,WAD1B;EAAA,QACiCz3C,WADjC;EAAA,QACwCq2B,cADxC,CAEA,IAAc,gBAAVr2B,CAAJ,EACE,MAAM,IAAI1E,KAAJ,CACF,uEADE,CAAN,CAKa,QAAX8hB,CAAW,KAEbrb,KAAKmoD,cAALnoD,CAAoB0E,CAApB1E,EAA4Bqb,CAA5Brb,EAAqCm0B,CAArCn0B,EAA+C01C,CAA/C11C,EAAsDs0B,CAAtDt0B,GACA2tC,EAAQtyB,OAARsyB,GAAkB,IADlB3tC,EAEA2tC,EAAQxZ,QAARwZ,GAAmB,IAJN,GAMfA,EAAQ+H,KAAR/H,GAAgBjT,aAAa+c,MANd,EAOf9J,EAAQzvC,MAARyvC,GAAiBzvC,CAPF,EASV8B,KAAK+mD,cAAL/mD,IACHA,KAAKooD,WAALpoD,CAAiB0E,CAAjB1E,CAVa;KA1FjBqnD,EAuGAA,WAAAA,SAAAA,GAAA,UAAS3iD,CAAT;EACE,QAAMipC,IAAU3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,CAAhB;EAAA,QACO9B,YADP;EAAA,QACeD,WADf;EAAA,QACsBqpD,oBADtB,CAEA,IAAc,QAAVppD,CAAJ,EACE,OAAO8B,KAAKqoD,oBAALroD,CAA0B0E,CAA1B1E,CAAP,CAEF,IACIb,CADJ;EAAA,QAMIjE,CANJ;EAAA,QAAMotD,IAAyC,QAArBtoD,KAAKuoD,YAA/B,EAEID,MACFnpD,IAAQM,YAAYC,GAAZD,EADN6oD,GAKU,gBAAVrqD,KAGF/C,IAASstD,uBAFUlB,EAAe3hC,IAAf2hC,CAAoBjnD,QAApBinD,EAEVkB,EADUlB,EAAe1hC,IAAf0hC,CAAoBjnD,QAApBinD,EACVkB,IAETttD,IAAS8E,KAAKyoD,oBAALzoD,CAA0B0E,CAA1B1E,EAMX,OAHIsoD,MACFtoD,KAAK0oD,cAAL1oD,IAAuBP,YAAYC,GAAZD,KAAoBN,CADzCmpD,GAGGtoD,KAAKqoD,oBAALroD,CAA0B0E,CAA1B1E,EAAkC9E,CAAlC8E,CAAP;KA/HFqnD,EAkIMA,WAAAA,KAAAA,GAAN,UAAW3iD,CAAX;;;;EACE,gBAAI1E,KAAK2oD,WAAL3oD,CAAiB0T,GAAjB1T,CAAqB0E,CAArB1E,CAAJ,EAEE,OADM4oD,IAAc5oD,KAAK2oD,WAAL3oD,CAAiB2W,GAAjB3W,CAAqB0E,CAArB1E,CAAd4oD,MACC,IAAIzrD,OAAJ,CAAwB,UAAAC,CAAA;EAAW,qBAAAwrD,EAAY5sD,IAAZ4sD,CAAiBxrD,CAAjBwrD,CAAA;eAAnC,EAAP,CAIF,IAFMjb,IAAU3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,CAAV2tC,EACCtyB,IAA6BsyB,SAD9BA,EACUzvC,IAAoByvC,QAD9BA,EACkBxZ,IAAYwZ,UAD9BA,EAEQ,QAAVzvC,CAAJ,EACE,WAAO8B,KAAKqoD,oBAALroD,CAA0B0E,CAA1B1E,EAAP,CAKF,IAFAA,KAAK2oD,WAAL3oD,CAAiB8W,GAAjB9W,CAAqB0E,CAArB1E,IAAAA,IAEKmd,IAAIxG,GAAJwG,CAAQ,8BAARA,KAC4B,MAA7BA,IAAIxG,GAAJwG,CAAQ,eAARA,CADJ,EAEE,MAAM,IAAI5jB,KAAJ,CACF,8FADE,CAAN,CAUF,OAJMmsC,IAAkB1lC,KAAK4wB,KAAL5wB,CAAW6oD,4BAAX7oD,CACpBqb,CADoBrb,EACXm0B,EAAS,CAATA,CADWn0B,EACEm0B,EAAS,CAATA,CADFn0B,CAAlB0lC,MAIA1lC,KAAK4wB,KAAL5wB,CAAW8oD,qBAAX9oD,GAAN;EAqBA,mBArBAmZ,MAAAA,IAKE7a,IADEonC,aAA2BqjB,YAA3BrjB,GACK1lC,KAAKyoD,oBAALzoD,CAA0B0E,CAA1B1E,CADL0lC,GAGK1lC,KAAK4wB,KAAL5wB,CAAWgpD,+BAAXhpD,CACH0lC,CADG1lC,EACcm0B,EAAS,CAATA,CADdn0B,EAC2Bm0B,EAAS,CAATA,CAD3Bn0B,CAPTmZ,EAUM8vC,IAAYjpD,KAAKqoD,oBAALroD,CAA0B0E,CAA1B1E,EAAkC1B,CAAlC0B,CAVlBmZ,EAYM+vC,IAAclpD,KAAK2oD,WAAL3oD,CAAiB2W,GAAjB3W,CAAqB0E,CAArB1E,CAZpBmZ,EAaAnZ,KAAK2oD,WAAL3oD,CAAiBmX,MAAjBnX,CAAwB0E,CAAxB1E,CAbAmZ,EAgBA+vC,EAAY9oD,OAAZ8oD,CAAoB,UAAA9rD,CAAA;EAAW,qBAAAA,EAAQ6rD,CAAR7rD,CAAA;eAA/B8rD,CAhBA/vC,EAiBInZ,KAAKmpD,eAALnpD,CAAqB0T,GAArB1T,CAAyB0E,CAAzB1E,MACFA,KAAKmpD,eAALnpD,CAAqBmX,MAArBnX,CAA4B0E,CAA5B1E,GACAA,KAAKoX,WAALpX,CAAiB0E,CAAjB1E,CAFEA,CAjBJmZ,MAqBO8vC,EAAP;;;KAhLF5B,EAmLQA,WAAAA,qBAAAA,GAAR,UAA6B3iD,CAA7B;EACQ,QAAAyU,uBAAA;EAAA,QAACld,WAAD;EAAA,QAAQgC,WAAR;EAAA,QAAeod,aAAf;EAAA,QAAwB8Y,cAAxB,CACN,IAAIhX,IAAIxG,GAAJwG,CAAQ,8BAARA,CAAJ,EAA6C;EAC3C,UAAInd,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,EAAyBs0B,QAA7B,EAAuC;EACrC,YAAMzI,IAAQloB,cAAmB1H,EAAM4B,KAAN5B,CAAY,CAAZA,EAAeA,EAAM7B,MAAN6B,GAAe,CAA9BA,CAAnB0H,CAAd;EAAA,YACMyB,IAAOnJ,EAAM7B,MAAN6B,GAAe,CAAfA,GAAmBA,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CAAnBA,GAA6C,CAD1D;EAAA,YAEMmqC,IAAOnqC,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CAFb,CAGA,OAAO+D,KAAK4wB,KAAL5wB,CAAWopD,+BAAXppD,CACHqb,CADGrb,EACM6rB,CADN7rB,EACaoF,CADbpF,EACmBomC,CADnBpmC,EACyBm0B,EAAS,CAATA,CADzBn0B,EACsCm0B,EAAS,CAATA,CADtCn0B,CAAP;EAGA,cAAOA,KAAK4wB,KAAL5wB,CAAWqpD,sCAAXrpD,CACHqb,CADGrb,EACMm0B,EAAS,CAATA,CADNn0B,EACmBm0B,EAAS,CAATA,CADnBn0B,CAAP;EAKJ,SAAMspD,IAAYtpD,KAAK8nD,gBAAL9nD,CAAsB/D,CAAtB+D,EAA6B,SAA7BA,CAAlB,CAEAspD,EAAUptD,IAAVotD,GAAiBrnD,cAAchG,CAAdgG,CAAjBqnD,EACAtpD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBspD,EAAU5kD,MAA3B1E,EAAmC01C,KAAnC11C,GAA2C06B,aAAa6c,QADxD+R,CAEA,IAAM9pB,IAAU,IAAI+pB,kBAAJ,CAAuBttD,CAAvB,CAAhB,CAEA+D,KAAKkoD,aAALloD,CACIw/B,CADJx/B,KACe/D,UAAOgC,UAAOyG,YAD7B1E,EACuCspD,CADvCtpD,EACkD,IADlDA,GADkB,CAClBA,EAEA,IAAMwpD,IAAUxpD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBspD,EAAU5kD,MAA3B1E,CAAhB;EAAA,QACM1B,IAAO0B,KAAK4wB,KAAL5wB,CAAWypD,+CAAXzpD,CACTwpD,EAAQnuC,OADCrb,EACQwpD,EAAQr1B,QAARq1B,CAAiB,CAAjBA,CADRxpD,EAC6BwpD,EAAQr1B,QAARq1B,CAAiB,CAAjBA,CAD7BxpD,CADb,CAKA,OAFAA,KAAKoX,WAALpX,CAAiBspD,EAAU5kD,MAA3B1E,GAEO1B,CAAP;KA/MF+oD,EAkNMA,WAAAA,KAAAA,GAAN,UAAWtoD,CAAX;;;;EA6BmB,mBA5BX2qD,IAAkB1pD,KAAKuoD,YAAvBmB,EACAC,MADAD,EAGFE,KAAgB,CAHdF,EAIyB,QAA3B1pD,KAAK6pD,kBAAsB,IAC7B7pD,KAAK6pD,kBAAL7pD,GAA0B2pD,CAA1B3pD,EACA4pD,KAAgB,CAFa,IAI7B5pD,KAAKuoD,YAALvoD,CAAkBhE,IAAlBgE,CAAuB2pD,CAAvB3pD,CARI0pD,EAUN1pD,KAAKuoD,YAALvoD,GAAoB2pD,CAVdD,EAYN3qD,GAZM2qD,EAeAI,IACFC,QAAa/pD,KAAKuoD,YAALvoD,CAAkB8B,GAAlB9B,CAAsB,UAAC2X,CAAD;EAAmB,qBAAAA,EAAEH,KAAF;eAAzCxX,CAAb+pD,EACKz8C,MADLy8C,CACY,UAAApyC,CAAA;EAAK,qBAAK,QAALA,CAAA;eADjBoyC,CAhBEL,EAkBAM,IACFD,QAAa/pD,KAAKuoD,YAALvoD,CAAkB8B,GAAlB9B,CAAsB,UAAC2X,CAAD;EAAmB,qBAAAA,EAAEpZ,IAAF;eAAzCyB,CAAb+pD,EACKz8C,MADLy8C,CACY,UAAApyC,CAAA;EAAK,qBAAK,QAALA,CAAA;eADjBoyC,CAnBEL,EAsBN1pD,KAAKuoD,YAALvoD,GAAoB0pD,CAtBdA,EAwBFE,MACF5pD,KAAK6pD,kBAAL7pD,GAA0B,IADxB4pD,CAxBEF,MA4BiBvsD,QAAQ4K,GAAR5K,CAAY2sD,CAAZ3sD,EAAN;EAcjB,mBAdM0D,IAAWsY,MAAAA,EAAXtY,EAEAoU,MACJg1C,cAAcjqD,KAAKiqD,cACnBvB,gBAAgB1oD,KAAK0oD,gBACrB7nD,UAAUqpD,IAASrpD,CAATqpD,GACVxpD,qBAAqB;EACjB,uBAAAG,EAASiB,GAATjB,CAAa,UAAC8W,CAAD,EAAI5c,CAAJ;EAAU,2BAAEwD,MAAMyrD,EAA0BjvD,CAA1BivD,GAA8BG,IAAIxyC,GAA1C;mBAAvB9W,EACKiB,GADLjB,CACS,UAAA8W,CAAA;EAAK,yBAAGA,EAAEpZ,IAAFoZ,OAAAA,GAAWA,EAAEwyC,EAAhB;mBADdtpD,EAEKmB,IAFLnB,CAEU,IAFVA,CAAA;mBAGJ6Y,QAAQ,MAVJ7Y,EAYNb,KAAKiqD,YAALjqD,GAAoB,CAZda,EAaNb,KAAK0oD,cAAL1oD,GAAsB,CAbhBa,MAcCoU,EAAP;;;KA7PFoyC,EA+PAA,WAAAA,OAAAA,GAAA;EACE,aAAQ+C,aAAY,GAAOC,eAAerqD,KAAKqqD,eAA/C;KAhQFhD,EAoQQA,WAAAA,WAAAA,GAAR;EACE,WAAIlqC,IAAIxG,GAAJwG,CAAQ,8CAARA,IAA0D,CAA1DA,GACKnd,KAAK4wB,KAAL5wB,CAAWmpC,UAAXnpC,EADLmd,KAGImtC,SAAS7qD,YAAYC,GAAZD,IAAmB8qD,OAAO,MAH3C;KArQFlD,EA2QQA,WAAAA,SAAAA,GAAR,UAAiB7vC,CAAjB;EACE,WAAI2F,IAAIxG,GAAJwG,CAAQ,8CAARA,IAA0D,CAA1DA,IACFnd,KAAK4wB,KAAL5wB,CAAWopC,QAAXppC,IACOwX,CAFL2F,KAIH3F,EAAwB+yC,KAAxB/yC,GAAgC/X,YAAYC,GAAZD,EAAhC+X,EACMA,CALH2F,CAAJ;KA5QFkqC,EAoRcA,WAAAA,aAAAA,GAAd,UAA2B7vC,CAA3B;;;EACE,eAAI2F,IAAIxG,GAAJwG,CAAQ,8CAARA,IAA0D,CAA1DA,OACKnd,KAAK4wB,KAAL5wB,CAAWwqD,sBAAXxqD,CAAkCwX,CAAlCxX,EADLmd,QAGEstC,IAAajzC,GACD+yC,QAAQE,EAAWH,QAJrC;;;KArRFjD,EA4RAA,WAAAA,YAAAA,GAAA,UAAY3iD,CAAZ;EACE,SAAI1E,KAAKmpD,eAALnpD,CAAqB0T,GAArB1T,CAAyB0E,CAAzB1E,CAAJ,EAGA,IAAIA,KAAK2oD,WAAL3oD,CAAiB0T,GAAjB1T,CAAqB0E,CAArB1E,CAAJ,EACEA,KAAKmpD,eAALnpD,CAAqBqI,GAArBrI,CAAyB0E,CAAzB1E,EADF,KAIA,IAAIA,KAAK2tC,OAAL3tC,CAAa0T,GAAb1T,CAAiB0E,CAAjB1E,CAAJ,EAA8B;EACtB,UAAAmZ,uBAAA;EAAA,UAACkC,aAAD;EAAA,UAAU8Y,cAAV;EAAA,UAAoBuhB,WAApB;EAAA,UAA2B4R,oBAA3B;EAAA,UAA2ChzB,cAA3C,CAES,QAAXjZ,CAAW,IACbrb,KAAKmoD,cAALnoD,CAAoB0E,CAApB1E,EAA4Bqb,CAA5Brb,EAAqCm0B,CAArCn0B,EAA+C01C,CAA/C11C,EAAsDs0B,CAAtDt0B,CADa,EAGO,QAAlBsnD,CAAkB,KACpBA,EAAe3hC,IAAf2hC,CAAoBt0C,OAApBs0C,IACAA,EAAe1hC,IAAf0hC,CAAoBt0C,OAApBs0C,EAFoB,CAHP,EAOftnD,KAAK2tC,OAAL3tC,CAAamX,MAAbnX,CAAoB0E,CAApB1E,CAPe;;KAvSnBqnD,EAkTAA,WAAAA,WAAAA,GAAA,UAAW3iD,CAAX;EAEE,WADA1E,KAAKooD,WAALpoD,CAAiB0E,CAAjB1E,GACOA,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,EAAyBqb,OAAhC;KApTFgsC,EAoVQA,WAAAA,cAAAA,GAAR;EACE,WAAKlqC,IAAIxG,GAAJwG,CAAQ,mBAARA,KAIkB,QAAnBnd,KAAK0qD,UAAc,KACrB1qD,KAAK0qD,UAAL1qD,GAAkBmd,IAAIG,WAAJH,CAAgB,KAAhBA,CADG,GAIhBnd,KAAK0qD,UARPvtC,IACI,IADT;KArVFkqC,EAuWQA,WAAAA,mBAAAA,GAAR,UACI51C,CADJ,EACsBk5C,CADtB;EAAA,gBAAA,CAEE,wBADoBA,iCACW,QAAxB3qD,KAAK4qD,aAAL5qD,EAAwB,IAC3ByR,EAAOwH,KAAPxH,CACI,UAAAE,CAAA;EAAS,aAA0C,QAA1ChR,EAAKgtC,OAALhtC,CAAagW,GAAbhW,CAAiBgR,EAAMjN,MAAvB/D,EAA+B0a,OAAW,IAC/C1J,EAAMzV,IAANyV,GAAag5C,CADR;OADbl5C,CADJ;KAzWF41C,EA+WAA,WAAAA,gBAAAA,GAAA;EACE,WAAOrnD,KAAK4wB,KAAZ;KAhXFy2B,EAkXAA,WAAAA,UAAAA,GAAA;EACE,WAAOrnD,KAAKxG,MAAZ;KAnXF6tD,EAsXAA,WAAAA,QAAAA,GAAA,UAA0B1hC,CAA1B,EAAmCC,CAAnC;EACE,QAAM1qB,IAAS8E,KAAK6qD,eAAL7qD,CAAqB2lB,EAAK1pB,KAA1B+D,EAAiC,WAAjCA,CAAf,CAUA,OATmBA,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB9E,EAAOwJ,MAAxB1E,EAIRsnD,cAJQtnD,KAKjB2lB,MAAMxI,IAAIE,MAAJF,CAAWO,IAAXP,CAAgBwI,EAAKtf,KAALsf,EAAhBxI,GACNyI,MAAMzI,IAAIE,MAAJF,CAAWO,IAAXP,CAAgByI,EAAKvf,KAALuf,EAAhBzI,GANWnd,EASZ9E,CAAP;KAjYFmsD,EAmYAA,WAAAA,KAAAA,GAAA,UAAuB11C,CAAvB;EAEE,WADmB3R,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB2R,EAAMjN,MAAvB1E,EACDsnD,cADCtnD,CACc2lB,IADd3lB,CACmBqG,KADnBrG,EACnB;KArYFqnD,EAuYAA,WAAAA,KAAAA,GAAA,UAAuB11C,CAAvB;EAEE,WADmB3R,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB2R,EAAMjN,MAAvB1E,EACDsnD,cADCtnD,CACc4lB,IADd5lB,CACmBqG,KADnBrG,EACnB;KAzYFqnD,EA4YAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB,EAA8BwM,CAA9B,EAA+ChL,CAA/C;EACE,QAAI8D,KAAK8qD,kBAAL9qD,EAAyBtF,EAAzBsF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBnC,KAAhBmC,CAAsBtF,CAAtBsF,EAAyBkH,CAAzBlH,EAAgC9D,CAAhC8D,CAAP,CAGF,IAAMw/B,IAAU,IAAIyV,YAAJ,CAAiB/4C,CAAjB,CAAhB;EAAA,QACMkyC,IAAc5O,EAAQurB,kBAARvrB,CAA2Bt4B,CAA3Bs4B,CADpB,CAEA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC,IAAjCA,EAAuCouC,CAAvCpuC,CAAP;KAnZFqnD,EAsZAA,WAAAA,aAAAA,GAAA,UACI3sD,CADJ,EACUwM,CADV,EAC2B3D,CAD3B,EAC0ClE,CAD1C,EAEIuQ,CAFJ,EAEuBC,CAFvB,EAEwCqT,CAFxC,EAGIC,CAHJ,EAGyBC,CAHzB;EAIE,QAAIpjB,KAAK8qD,kBAAL9qD,EAAyBtF,EAAzBsF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgB8P,YAAhB9P,CACHtF,CADGsF,EACAkH,CADAlH,EACOuD,CADPvD,EACYX,CADZW,EACqB4P,CADrB5P,EACgC6P,CADhC7P,EACyCkjB,CADzCljB,EACuDmjB,CADvDnjB,EAEHojB,CAFGpjB,CAAP,CAKI,IAAAmZ,yDAAA;EAAA,QAAC6xC,QAAD;EAAA,QAAa9uD,QAAb;EAAA,QAAmBqnB,QAAnB;EAAA,QAIAtnB,IAAQC,EAAKoR,MAALpR,CAAY,UAAC8a,CAAD,EAAI1c,CAAJ;EAAc,cAA+B,MAA/BipB,EAAWjC,OAAXiC,CAAmBjpB,CAAnBipB,CAAA;OAA1BrnB,CAJR,CAKN,IAAID,EAAM8jB,IAAN9jB,CAAW,UAAA6B,CAAA;EAAQ,aAAS,MAATA,CAAA;OAAnB7B,CAAJ,EACE,OAAOgX,SAAAA,EAAWhX,CAAXgX,CAAP,CAGF,IAAMusB,IACF,IAAIyrB,mBAAJ,CAAwBD,CAAxB,EAAoC3rD,CAApC,EAA6CnD,CAA7C,EAAmDqnB,CAAnD,CADJ,CAEA,OAAOvjB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA3aFqnD,EA8aAA,WAAAA,QAAAA,GAAA,UAA0B3sD,CAA1B,EAAgCoD,CAAhC;EACE,QAAM0hC,IAAU,IAAI0rB,cAAJ,CAAmBxwD,EAAEuB,KAArB,EAA4B6B,CAA5B,CAAhB,CACA,OAAOkC,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAhbFqnD,EAmbQA,WAAAA,eAAAA,GAAR,UAAyC3tD,CAAzC,EAA+CsB,CAA/C,EAAqD8C,CAArD;EAQE,QAAM0jB,IAAWg6B,iBAAiB9hD,EAAEuC,OAAOjB,EAAEiB,MAA5Bu/C,EAAoC19C,CAApC09C,CAAjB;EAAA,QACM2P,IAAMzxD,EAAEmoD,IAAFnoD,EAAQ,CAARA,EAAWuI,cAAcvI,EAAEuC,KAAFvC,CAAQmE,KAARnE,CAAcoE,CAAdpE,CAAduI,CAAXvI,CADZ;EAAA,QAEM0xD,IAAMpwD,EAAE6mD,IAAF7mD,EAAQ,CAARA,EAAWiH,cAAcjH,EAAEiB,KAAFjB,CAAQ6C,KAAR7C,CAAc8C,CAAd9C,CAAdiH,CAAXjH,CAFZ;EAAA,QAGMwkC,IAAU,IAAI6rB,aAAJ,CAAkBF,EAAIlvD,KAAtB,EAA6BmvD,EAAInvD,KAAjC,CAHhB,CAKA,OADY+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BmrD,GAAKC,EAAlCprD,EACDmF,OADCnF,CACOwhB,CADPxhB,CACZ;KAhcFqnD,EAmcAA,WAAAA,OAAAA,GAAA,UAAOv/B,CAAP,EAA0BhqB,CAA1B;EACE,QAAIkC,KAAK8qD,kBAAL9qD,CAAwB8nB,CAAxB9nB,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBmH,MAAhBnH,CAAuB8nB,CAAvB9nB,EAAgClC,CAAhCkC,CAAP,CAGF,IAAuB,MAAnB8nB,EAAQ1tB,MAAZ,EACE,OAAO0tB,EAAQ,CAARA,CAAP,CAGF,KADA,IAAI5sB,IAAS4sB,EAAQ,CAARA,CAAb,EACS/sB,IAAI,CAAb,EAAgBA,IAAI+sB,EAAQ1tB,MAA5B,IAAsCW,CAAtC,EACEG,IAAS8E,KAAKsrD,cAALtrD,CAAoB9E,CAApB8E,EAA4B8nB,EAAQ/sB,CAAR+sB,CAA5B9nB,EAAwClC,CAAxCkC,CAAT9E,CAEF,OAAOA,CAAP;KA/cFmsD,EAkdAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4BsvD,GAA5B,CAAhB,CACA,OAAOvrD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KApdFqnD,EAudAA,WAAAA,YAAAA,GAAA,UACI3tD,CADJ,EACiBsB,CADjB,EAC8B2L,CAD9B,EAEIC,CAFJ;EAGE,QAAM8oC,IAAc/oC,IAAajN,EAAEuC,KAAFvC,CAAQ,CAARA,CAAbiN,GAA0BjN,EAAEuC,KAAFvC,CAAQ,CAARA,CAA9C;EAAA,QACMi2C,IAAc/oC,IAAa5L,EAAEiB,KAAFjB,CAAQ,CAARA,CAAb4L,GAA0B5L,EAAEiB,KAAFjB,CAAQ,CAARA,CAD9C;EAAA,QAEM40C,IAAYjpC,IAAajN,EAAEuC,KAAFvC,CAAQ,CAARA,CAAbiN,GAA0BjN,EAAEuC,KAAFvC,CAAQ,CAARA,CAF5C;EAAA,QAGOmyB,cAHP,CAOA,KAAqB,MAAhB6jB,CAAgB,IAAqB,MAAhBC,MACtBC,IAAYkX,2BADhB,EAC6C;EACvCngD,YACFjN,IAAIA,EAAEgQ,SAAFhQ,EAAa,GAAG,GAAG,EAAnBA,CADFiN,GAGAC,MACF5L,IAAIA,EAAE0O,SAAF1O,EAAa,GAAG,GAAG,EAAnBA,CADF4L,CAHAD,CAOJ,IAAM6kD,IAAsB,MAAhB7b,CAAgB,GAAIj2C,CAAJ,GAAQA,EAAE+xD,IAAF/xD,CAAOmyB,CAAPnyB,EAAck2C,CAAdl2C,EAAyB,CAAzBA,CAApC;EAAA,UACMoE,IAAuB,MAAhB6xC,CAAgB,GAAI,CAAJ,GAAQ,CADrC;EAAA,UAEM+b,IAAsB,MAAhB/b,CAAgB,GAAI30C,EAAEywD,IAAFzwD,CAAO6wB,CAAP7wB,EAAc,CAAdA,EAAiB40C,CAAjB50C,CAAJ,GAAkCA,CAF9D,CAGA,OAAOgF,KAAK2rD,QAAL3rD,CAAcwrD,CAAdxrD,EAAmB0rD,CAAnB1rD,EAAwBlF,GAAxBkF,CAA4BlC,CAA5BkC,GAAkC,CAAlCA,CAAP;EAIF,SAAc,MAAV6rB,CAAJ,EAAiB;EACf,UAAM+/B,IAAYlyD,EAAEmoD,IAAFnoD,CAAOA,EAAEuC,KAAFvC,CAAQ,CAARA,CAAPA,EAAmBA,EAAEuC,KAAFvC,CAAQ,CAARA,CAAnBA,CAAlB;EAAA,UACMmyD,IAAY7wD,EAAE6mD,IAAF7mD,CAAOA,EAAEiB,KAAFjB,CAAQ,CAARA,CAAPA,EAAmBA,EAAEiB,KAAFjB,CAAQ,CAARA,CAAnBA,CADlB;EAAA,UAGMwkC,IAAU,IAAIssB,mBAAJ,CACZF,EAAU3vD,KADE,EACK4vD,EAAU5vD,KADf,GACuByzC,GAAaC,EADpC,EAEZhpC,CAFY,EAEAC,CAFA,CAHhB;EAAA,UAMI1L,IAAS8E,KAAKkoD,aAALloD,CACTw/B,CADSx/B,GACC4rD,GAAWC,EADZ7rD,EAETA,KAAK+rD,gBAAL/rD,CAAgCw/B,EAAQ3xB,WAAxC7N,CAFSA,CANb,CAcA,QAJuC,MAAnCmd,IAAIxG,GAAJwG,CAAQ,qBAARA,MACFjiB,IAAS8E,KAAKgsD,YAALhsD,CAAkB9E,CAAlB8E,IAGJ9E,EAAOiK,OAAPjK,EAAgB,GAAGA,EAAOe,KAAPf,CAAa,CAAbA,GAAiBA,EAAOe,KAAPf,CAAa,CAAbA,EAApCA,CAAP;EAEA,YAAO8E,KAAKkoD,aAALloD,CACH,IAAIisD,aAAJ,CAAkBvyD,EAAEuC,KAApB,EAA2BjB,EAAEiB,KAA7B,EAAoC0K,CAApC,EAAgDC,CAAhD,CADG5G,GAC2DtG,GAAGsB,EAD9DgF,CAAP;KAlgBJqnD,EAugBAA,WAAAA,SAAAA,GAAA,UAAS3tD,CAAT,EAAoBsB,CAApB;EACE,QAAgB,gBAAZtB,EAAEuE,KAAN,EAA6B;EAC3B,UAAMiuD,IAAQlsD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBtG,EAAEgL,MAAnB1E,CAAd;EAAA,UACMmsD,IAAQnsD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBhF,EAAE0J,MAAnB1E,CADd;EAAA,UAGMosD,IAAc,IAAIC,sBAAJ,CAChBC,iBAAsCn9B,IADtB,EAC4Bz1B,EAAEuC,KAD9B,EACqCjB,EAAEiB,KADvC,CAHpB;EAAA,UAKMswD,IAAc,IAAIF,sBAAJ,CAChBC,iBAAsCl9B,IADtB,EAC4B11B,EAAEuC,KAD9B,EACqCjB,EAAEiB,KADvC,CALpB;EAAA,UAQMwV,KACJzR,KAAKwsD,gCAALxsD,CAAsCtG,CAAtCsG,EAAyCksD,EAAM5E,cAAN4E,CAAqBvmC,IAA9D3lB,GACAA,KAAKwsD,gCAALxsD,CAAsCtG,CAAtCsG,EAAyCksD,EAAM5E,cAAN4E,CAAqBtmC,IAA9D5lB,GACAA,KAAKwsD,gCAALxsD,CAAsChF,CAAtCgF,EAAyCmsD,EAAM7E,cAAN6E,CAAqBxmC,IAA9D3lB,GACAA,KAAKwsD,gCAALxsD,CAAsChF,CAAtCgF,EAAyCmsD,EAAM7E,cAAN6E,CAAqBvmC,IAA9D5lB,EAZF;EAAA,UAcM2lB,IAAO3lB,KAAKkoD,aAALloD,CAA2BosD,CAA3BpsD,EAAwCyR,CAAxCzR,CAdb;EAAA,UAeM4lB,IAAO5lB,KAAKkoD,aAALloD,CAA2BusD,CAA3BvsD,EAAwCyR,CAAxCzR,CAfb;EAAA,UAiBMgmB,IAAUhmB,KAAKgmB,OAALhmB,CAAa2lB,CAAb3lB,EAAmB4lB,CAAnB5lB,CAjBhB,CAoBA,OAFA2lB,EAAK3S,OAAL2S,IACAC,EAAK5S,OAAL4S,EADAD,EAEOK,CAAP;EAGF,SAAIhmB,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgB2rD,QAAhB3rD,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoB87B,GAApB,EAAsC/yD,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB;EAAA,QACM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0CtG,EAAEuE,KAA5C+B,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KAtiBFqnD,EAyiBAA,WAAAA,mBAAAA,GAAA,UACI3sD,CADJ,EACiBgN,CADjB,EAC0CC,CAD1C,EAEIC,CAFJ,EAE6BC,CAF7B,EAGItF,CAHJ;EAIE,QAAMkP,KAAU/W,GAAGgN,GAAMC,EAAzB;EAAA,QAEI2mB,IAAc,IAFlB,CAGc,QAAV/rB,CAAU,KACZ+rB,IAAc/rB,EAAOtG,KAArBqyB,EACA7c,EAAOzV,IAAPyV,CAAYlP,CAAZkP,CAFY,EAKd,IAAI8c,IAAa,IAAjB,CACa,QAAT1mB,CAAS,KACX0mB,IAAa1mB,EAAM5L,KAAnBsyB,EACA9c,EAAOzV,IAAPyV,CAAY5J,CAAZ4J,CAFW,EAKb,IAAIK,IAAS,IAAb;EAAA,QACI46C,IAA8BC,gBADlC,CAGIxvC,IAAIxG,GAAJwG,CAAQ,+BAARA,MACFrL,IAAS9R,KAAK+rD,gBAAL/rD,CAAsBtF,EAAEuB,KAAxB+D,CAAT8R,EACA46C,IAA8BE,sBAF5BzvC,EAKJ,IAAMqiB,IAAU,IAAIktB,CAAJ,CACZhyD,EAAEuB,KADU,EACHyL,EAAKzL,KADF,EACS0L,EAAS1L,KADlB,EACyBqyB,CADzB,EACsCC,CADtC,EAEZ3mB,CAFY,CAAhB,CAGA,OAAO5H,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,EAA4ByR,CAA5BzR,EAAoC8R,CAApC9R,CAAP;KAtkBFqnD,EAykBAA,WAAAA,6BAAAA,GAAA,UACI3sD,CADJ,EACiB4T,CADjB,EACiCC,CADjC,EAC+C7W,CAD/C,EAEI8W,CAFJ;EAGE,QAAMgxB,IAAU,IAAIqtB,UAAJ,CAAenyD,EAAEuB,KAAjB,EAAwBqS,CAAxB,EAAgCC,CAAhC,EAAsC7W,CAAtC,EAA6C8W,CAA7C,CAAhB,CACA,OAAOxO,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA7kBFqnD,EAglBAA,WAAAA,QAAAA,GAAA,UACI50C,CADJ,EACkB2V,CADlB,EACwCC,CADxC,EAEImnB,CAFJ,EAEyBjhC,CAFzB,EAEuC7W,CAFvC,EAGI8W,CAHJ;EAIE,QAAMgxB,IACF,IAAIstB,cAAJ,CAAmB1kC,EAAWnsB,KAA9B,EAAqCuzC,CAArC,EAAkDjhC,CAAlD,EAAwD7W,CAAxD,EAA+D8W,CAA/D,CADJ,CAEA,OAAOxO,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BooB,GAAYC,GAAa5V,EAAtDzS,CAAP;KAtlBFqnD,EAylBAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB,EAA6B6L,CAA7B;EACE,QAAMi5B,IAAU,IAAIutB,WAAJ,CAAgBryD,EAAEuB,KAAlB,EAAyBsK,CAAzB,CAAhB,CACA,OAAOvG,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA3lBFqnD,EA8lBAA,WAAAA,IAAAA,GAAA,UACI3sD,CADJ,EACU8M,CADV,EAC6CC,CAD7C;EAEE,QAAM+3B,IAAU,IAAIwtB,UAAJ,CAAetyD,EAAEuB,KAAjB,EAAwBuL,CAAxB,EAAkCC,CAAlC,CAAhB,CACA,OAAOzH,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAjmBFqnD,EAomBAA,WAAAA,UAAAA,GAAA,UAA4B3sD,CAA5B,EAAkC+O,CAAlC;EACE,QAAM+1B,IAAU,IAAIytB,gBAAJ,CAAqBvyD,EAAEuB,KAAvB,EAA8BwN,CAA9B,CAAhB,CACA,OAAOzJ,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAtmBFqnD,EAymBAA,WAAAA,OAAAA,GAAA,UAAyB3sD,CAAzB,EAA+B+L,CAA/B,EAAkD3I,CAAlD;EACE,QAAM0hC,IAAU,IAAI0tB,aAAJ,CAAkBxyD,EAAEuB,KAApB,EAA2BwK,EAAQvK,IAAnC,EAAyC4B,CAAzC,CAAhB,CACA,OAAOkC,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAG+L,EAAhCzG,CAAP;KA3mBFqnD,EA8mBAA,WAAAA,eAAAA,GAAA,UACI3sD,CADJ,EACU2U,CADV,EACgCC,CADhC;EAEE1L,WACIlJ,EAAE0E,IAAF1E,IAAU,CADdkJ,EAEI,sEAFJA,EAGA,IAAMsE,IAAOmH,EAAWssC,MAAXtsC,CAAkB,UAAC3V,CAAD,EAAIsB,CAAJ;EAAU,aAAAtB,IAAIsB,CAAJ;OAA5BqU,CAAb;EAAA,QAEMgR,IAAW8sC,YAA2BzyD,EAAEuB,KAA7BkxD,EAAoC99C,CAApC89C,EAAgDjlD,CAAhDilD,CAFjB;EAAA,QAGM1sC,IACF2sC,YAA2B/sC,EAASjmB,MAApCgzD,EAA4C/9C,EAAWjV,MAAvDgzD,CAJJ;EAAA,QAKMxsC,IACFysC,oBAAmC3yD,EAAEuB,KAArCoxD,EAA4Ch+C,CAA5Cg+C,EAAwDnlD,CAAxDmlD,CANJ;EAAA,QAOMxsC,IACFysC,oBAAmCh+C,CAAnCg+C,EAA0Cj+C,EAAWjV,MAArDkzD,CARJ;EAAA,QASMvsC,IACFwsC,aAA4B3sC,CAA5B2sC,EAA8Cj+C,CAA9Ci+C,EAAqDl+C,EAAWjV,MAAhEmzD,CAVJ,CAYA,OAAO7yD,EAAEyK,OAAFzK,CAAU2lB,CAAV3lB,EACKgP,SADLhP,CACe+lB,CADf/lB,EAEKyK,OAFLzK,CAEakmB,CAFblmB,EAGKmD,KAHLnD,CAGWmmB,CAHXnmB,EAG6BqmB,CAH7BrmB,CAAP;KA/nBF2sD,EAqoBAA,WAAAA,eAAAA,GAAA,UACI3sD,CADJ,EACU2U,CADV,EACgC7H,CADhC;EAEE5D,WACIlJ,EAAE0E,IAAF1E,IAAU,CADdkJ,EAEI,sEAFJA,EAIA,IAAMsE,IAAOmH,EAAWssC,MAAXtsC,CAAkB,UAAC3V,CAAD,EAAIsB,CAAJ;EAAU,aAAAtB,IAAIsB,CAAJ;OAA5BqU,CAAb;EAAA,QAEMm+C,MAA8C,GAAG,GAFvD,CAGAA,EAAiBxxD,IAAjBwxD,MAAAA,CAAAA,CAAAA,EAAyBhmD,CAAzBgmD,EACA,KAAK,IAAIzyD,IAAI,IAAIsU,EAAWjV,MAA5B,EAAoCW,IAAIL,EAAEuB,KAAFvB,CAAQN,MAAhD,IAA0DW,CAA1D,EACEyyD,EAAiBxxD,IAAjBwxD,EAAuB,GAAG,EAA1BA,EAGF,IAAMC,IAAU/yD,EAAE+H,GAAF/H,CAAM8yD,CAAN9yD,CAAhB;EAAA,QAEMgzD,IACFP,YAA2BM,EAAQxxD,KAAnCkxD,EAA0C99C,CAA1C89C,EAAsDjlD,CAAtDilD,GAA4D,CAA5DA,CAHJ;EAAA,QAKMQ,IAAoCP,YACtCM,EAAoBtzD,MADkBgzD,EACV/9C,EAAWjV,MADDgzD,GACS,CADTA,CAL1C;EAAA,QAQMQ,IAAeP,oBACjBI,EAAQxxD,KADSoxD,EACFh+C,CADEg+C,EACUnlD,CADVmlD,GACgB,CADhBA,CARrB,CAWA,OAAOI,EAAQtoD,OAARsoD,CAAgBC,CAAhBD,EACK/jD,SADL+jD,CACeE,CADfF,EAEKtoD,OAFLsoD,CAEaG,CAFbH,CAAP;KA9pBFpG,EAmqBQA,WAAAA,OAAAA,GAAR,UACI3sD,CADJ,EACiBw3C,CADjB,EAEIj0C,CAFJ;EAGE,QAAMsuB,IAAY7xB,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlB;EAAA,QACM2nB,IAAS3nB,EAAEuB,KAAFvB,CAAQ,CAARA,CADf;EAAA,QAEM4xB,IAAauhC,yBAAqCxrC,CAArCwrC,CAFnB;EAAA,QAIMruB,IAAU,IAAIsuB,aAAJ,GADIxhC,eAAYjK,WAAQkK,cACxB,EAA8B2lB,CAA9B,CAJhB;EAAA,QAKM/4B,iBALN;EAAA,QAKO/T,QALP;EAAA,QAKaghC,QALb;EAAA,QAMMt0B,IAAS9R,KAAK6qD,eAAL7qD,EAAgCoF,GAAMghC,EAAtCpmC,EAA6C/B,CAA7C+B,CANf,CAUA,OAFAA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC8R,CAAjC9R,GAEwB,MAApB8R,EAAO7V,KAAP6V,CAAa,CAAbA,CAAoB,GACfA,CADe,GAGjB9R,KAAK27C,MAAL37C,CAAY8R,CAAZ9R,EAAoBkyC,CAApBlyC,EAAgC/B,CAAhC+B,CAHP;KAhrBFqnD,EAsrBQA,WAAAA,UAAAA,GAAR,UACI3sD,CADJ,EACiBw3C,CADjB,EAEI6b,CAFJ;uBAEIA,UACF,IAAIxhC,IAAY7xB,EAAEuB,KAAFvB,CAAQ,CAARA,CAAhB;EAAA,QACI2nB,IAAS3nB,EAAEuB,KAAFvB,CAAQ,CAARA,CADb,CAEoB,QAAhBqzD,CAAgB,KAClBxhC,IAAYwhC,EAAa9xD,KAAb8xD,CAAmB,CAAnBA,CAAZxhC,EACAlK,IAAS0rC,EAAa9xD,KAAb8xD,CAAmB,CAAnBA,CAFS,EAIpB,IAAMzhC,IAAauhC,yBAAqCxrC,CAArCwrC,CAAnB;EAAA,QAEMruB,IACF,IAAIwuB,gBAAJ,GAFgB1hC,eAAYjK,WAAQkK,cAEpC,EAAiC2lB,CAAjC,EAA6D,QAAhB6b,CAA7C,CAHJ;EAAA,QAIM50C,iBAJN;EAAA,QAIO/T,QAJP;EAAA,QAIaghC,QAJb;EAAA,QAKMt0B,IAAS9R,KAAK6qD,eAAL7qD,EAAgCoF,GAAMghC,EAAtCpmC,EAA6C,OAA7CA,CALf;EAAA,QAMMyR,KAAU/W,EANhB,CAYA,OALoB,QAAhBqzD,CAAgB,IAClBt8C,EAAOzV,IAAPyV,CAAYs8C,CAAZt8C,CADkB,EAGpBzR,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,EAA4ByR,CAA5BzR,EAAoC8R,CAApC9R,CAHoB,EAKI,MAApB8R,EAAO7V,KAAP6V,CAAa,CAAbA,CAAoB,GACfA,CADe,GAGjB9R,KAAKiuD,SAALjuD,CAAetF,CAAfsF,EAAkBkyC,CAAlBlyC,EAA8B8R,CAA9B9R,CAHP;KA3sBFqnD,EAitBAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ,EAAesmB,CAAf;EACEktC,+BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN;EAAA,QAIAyzD,IAAcC,WAAW1zD,EAAEuD,KAAbmwD,CAJd,CAKN,OAAOpuD,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,KAAjBA,EAAwBmuD,CAAxBnuD,EAAqCmF,OAArCnF,CAA6CwhB,CAA7CxhB,CAAP;KAxtBFqnD,EA2tBAA,WAAAA,KAAAA,GAAA,UAAK3sD,CAAL,EAAgBsmB,CAAhB;EACQ,QAAA7H,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN;EAAA,QAIAyzD,IAAcC,WAAW1zD,EAAEuD,KAAbmwD,CAJd,CAKN,OAAOpuD,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,MAAjBA,EAAyBmuD,CAAzBnuD,EAAsCmF,OAAtCnF,CAA8CwhB,CAA9CxhB,CAAP;KAjuBFqnD,EAouBAA,WAAAA,mBAAAA,GAAA,UACI3sD,CADJ,EACUwU,CADV,EACgCC,CADhC;EAEE,QAAIrR,IAAO,CAAX;EAAA,QACM0lD,IAAc6K,oBAA8BvwD,EAA9BuwD,EAAqC3zD,EAAE0E,IAAvCivD,CADpB;EAAA,QAEI3K,IAAYhpD,CAFhB,CAGmB,QAAf8oD,CAAe,KACjBE,IAAYhpD,EAAEgP,SAAFhP,CAAY8oD,CAAZ9oD,CAAZgpD,EACA5lD,IAAOwwD,iBAA2B,CAA3BA,EAA8B5zD,EAAE0E,IAAhCkvD,EAAsC,CAAtCA,CAFU,EAKnB,IAAM9sC,IACF+sC,kBAA6B7K,EAAUznD,KAAvCsyD,EAA8CzwD,CAA9CywD,EAAoDp/C,CAApDo/C,CADJ;EAAA,QAEMlsC,IAAS1e,eAAoB+/C,EAAUznD,KAAVynD,CAAgB5lD,CAAhB4lD,EAApB//C,CAFf;EAAA,QAGMwnD,IAAMzH,EAAU7B,IAAV6B,EAAgB,CAAhBA,EAAmBrhC,CAAnBqhC,CAHZ;EAAA,QAIMyK,IAAcC,WAAW1zD,EAAEuD,KAAbmwD,CAJpB;EAAA,QAKIlzD,IACA8E,KAAKwuD,YAALxuD,CACQmrD,CADRnrD,EACa,oBADbA,EACmCkP,CADnClP,EAC+CmuD,CAD/CnuD,EAC4DmP,CAD5DnP,EAEKmF,OAFLnF,CAEawhB,CAFbxhB,CANJ,CAYA,OAHmB,QAAfwjD,CAAe,KACjBtoD,IAASA,EAAOwO,SAAPxO,CAAiBuzD,uBAAiCjL,CAAjCiL,CAAjBvzD,CADQ,GAGZA,CAAP;KA1vBFmsD,EA6vBQA,WAAAA,aAAAA,GAAR,UACI3sD,CADJ,EACiB85C,CADjB,EACkDtlC,CADlD,EAEIjR,CAFJ,EAEqBkR,CAFrB;EAGE,QAAMod,IAAY7xB,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlB;EAAA,QACM2nB,IAAS3nB,EAAEuB,KAAFvB,CAAQ,CAARA,CADf;EAAA,QAEM4xB,IACFoiC,8BAA2CrsC,CAA3CqsC,EAAmDv/C,CAAnDu/C,CAHJ;EAAA,QAKMlvB,IAAU,IAAImvB,gBAAJ,GADGriC,eAAYjK,WAAQkK,cAAWpd,gBAClC,EAAgCqlC,CAAhC,CALhB;EAAA,QAMMr7B,iBANN;EAAA,QAMO/T,QANP;EAAA,QAMaghC,QANb;EAAA,QAOMt0B,IAAS9R,KAAK6qD,eAAL7qD,EAAgCoF,GAAMghC,EAAtCpmC,EAA6C/B,CAA7C+B,CAPf,CAUA,OAFAA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAGwU,EAAhClP,EAA6C8R,CAA7C9R,GAEI8R,EAAO7V,KAAP6V,CAAa,CAAbA,MAAoB3C,CAApB2C,GACKA,CADLA,IAGJ5C,IAAa0/C,MAAM,CAANA,EAASz/C,CAATy/C,EAAsBpoD,IAAtBooD,EAA4BvsC,IAASiK,EAArCsiC,CAAb1/C,EACOlP,KAAKwuD,YAALxuD,CAAkB8R,CAAlB9R,EAA0Bw0C,CAA1Bx0C,EAAqCkP,CAArClP,EAAiD/B,CAAjD+B,EAAwDmP,CAAxDnP,CAJH8R,CAAJ;KA1wBFu1C,EAixBAA,WAAAA,OAAAA,GAAA,UAAO3sD,CAAP,EAAkBoD,CAAlB;EACE,QAAMkjB,KAAQljB,EAAd,CACAowD,2BAAqC,QAArCA,EAA+CltC,CAA/CktC,EAAqDxzD,EAAE0E,IAAvD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAKiuD,SAALjuD,CAAemrD,CAAfnrD,EAAoB,KAApBA,EAA2BmF,OAA3BnF,CAAmCwhB,CAAnCxhB,CAAP;KAxxBFqnD,EA2xBAA,WAAAA,OAAAA,GAAA,UAAO3sD,CAAP,EAAkBoD,CAAlB;EACE,QAAMkjB,KAAQljB,EAAd,CACAowD,2BAAqC,QAArCA,EAA+CltC,CAA/CktC,EAAqDxzD,EAAE0E,IAAvD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAKiuD,SAALjuD,CAAemrD,CAAfnrD,EAAoB,KAApBA,EAA2BmF,OAA3BnF,CAAmCwhB,CAAnCxhB,CAAP;KAlyBFqnD,EAqyBAA,WAAAA,OAAAA,GAAA,UAAO3sD,CAAP,EAAkBoD,CAAlB,EAAgCmI,CAAhC,EAAoDC,CAApD;EAEE,QAAIpI,MAASpD,EAAE0E,IAAF1E,GAAS,CAAtB,EACE,MAAM,IAAInB,KAAJ,CACF,qDAAkDmB,EAAE0E,IAAF1E,GAAS,CAA3D,oBAAA,GACgBoD,CAFd,CAAN,CAIF,IAAM0hC,IAAU,IAAIqvB,aAAJ,CAAkBn0D,EAAEuB,KAApB,EAA2BgK,CAA3B,EAAsCC,CAAtC,CAAhB,CACA,OAAOlG,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA7yBFqnD,EAgzBAA,WAAAA,MAAAA,GAAA,UAAM3tD,CAAN,EAAiBsB,CAAjB;EACE,QAAMwkC,IAAU,IAAI7O,eAAJ,CAAoBm+B,KAApB,EAAwCp1D,EAAEuC,KAA1C,EAAiDjB,EAAEiB,KAAnD,CAAhB;EAAA,QACM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KAnzBFqnD,EAszBAA,WAAAA,SAAAA,GAAA,UAAS3tD,CAAT,EAAoBsB,CAApB;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoBo+B,SAApB,EAA4Cr1D,EAAEuC,KAA9C,EAAqDjB,EAAEiB,KAAvD,CADJ;EAAA,QAEM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CAFf,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KA1zBFqnD,EA6zBAA,WAAAA,KAAAA,GAAA,UAAK3tD,CAAL,EAAgBsB,CAAhB;EACE,QAAIgF,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgB6J,IAAhB7J,CAAqBtG,CAArBsG,EAAwBhF,CAAxBgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoBq+B,IAApB,EAAuCt1D,EAAEuC,KAAzC,EAAgDjB,EAAEiB,KAAlD,CAAhB;EAAA,QACM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KAp0BFqnD,EAu0BAA,WAAAA,UAAAA,GAAA,UAAU3tD,CAAV,EAAqBsB,CAArB;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoBs+B,UAApB,EAA6Cv1D,EAAEuC,KAA/C,EAAsDjB,EAAEiB,KAAxD,CADJ;EAAA,QAEM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CAFf,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KA30BFqnD,EA80BAA,WAAAA,QAAAA,GAAA,UAAQ3tD,CAAR,EAAmBsB,CAAnB;EACE,QAAIgF,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBmK,OAAhBnK,CAAwBtG,CAAxBsG,EAA2BhF,CAA3BgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoBu+B,OAApB,EAA0Cx1D,EAAEuC,KAA5C,EAAmDjB,EAAEiB,KAArD,CAAhB;EAAA,QACM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KAr1BFqnD,EAw1BAA,WAAAA,aAAAA,GAAA,UAAa3tD,CAAb,EAAwBsB,CAAxB;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoBw+B,aAApB,EAAgDz1D,EAAEuC,KAAlD,EAAyDjB,EAAEiB,KAA3D,CADJ;EAAA,QAEM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CAFf,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KA51BFqnD,EA+1BAA,WAAAA,WAAAA,GAAA,UAA6B3sD,CAA7B;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4BmzD,WAA5B,CAAhB,CACA,OAAOpvD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAj2BFqnD,EAo2BAA,WAAAA,WAAAA,GAAA,UAAW3tD,CAAX,EAAsBsB,CAAtB;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoB0+B,WAApB,EAA8C31D,EAAEuC,KAAhD,EAAuDjB,EAAEiB,KAAzD,CADJ;EAAA,QAEM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CAFf,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KAx2BFqnD,EA22BAA,WAAAA,UAAAA,GAAA,UAAU3tD,CAAV,EAAqBsB,CAArB;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoB2+B,UAApB,EAA6C51D,EAAEuC,KAA/C,EAAsDjB,EAAEiB,KAAxD,CADJ;EAAA,QAEM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,MAA1CA,CAFf,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC8R,CAApC9R,CAAP;KA/2BFqnD,EAk3BAA,WAAAA,OAAAA,GAAA,UAAO18C,CAAP,EAA0BjR,CAA1B,EAAqCsB,CAArC;EACE,QAAMwkC,IAAU,IAAI+vB,aAAJ,CAAkB5kD,EAAUvL,IAA5B,EAAkC1F,EAAEuC,KAApC,EAA2CvC,EAAE0F,IAA7C,CAAhB;EAAA,QACM0S,IACF9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CAA1C1nB,CAFJ,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6B2K,GAAWjR,GAAGsB,EAA3CgF,EAA+C8R,CAA/C9R,CAAP;KAt3BFqnD,EAy3BAA,WAAAA,MAAAA,GAAA,UAAM18C,CAAN;EACEqQ,SACI,uEADJA,EAGA,IAAMyrC,IAAW97C,EAAUtK,QAAVsK,EAAjB,CACA,OAAO6kD,UAAU7kD,EAAU1O,KAApBuzD,EAA2B/I,CAA3B+I,CAAP;KA93BFnI,EAi4BAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB,EAA6B+U,CAA7B,EAAwCC,CAAxC;EAEE,WAAO+/C,SADO/0D,EAAE2F,QAAF3F,EACP+0D,EAAgB/0D,EAAEuB,KAAlBwzD,EAAyB/0D,EAAEuD,KAA3BwxD,EAAkChgD,CAAlCggD,EAAqC//C,CAArC+/C,CAAP;KAn4BFpI,EAs4BAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ,EAAesmB,CAAf;EACEktC,+BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,KAAjBA,EAAwBmrD,EAAIltD,KAA5B+B,EAAmCmF,OAAnCnF,CAA2CwhB,CAA3CxhB,CAAP;KA54BFqnD,EA+4BAA,WAAAA,QAAAA,GAAA,UAAQ3tD,CAAR,EAAmBsB,CAAnB;EACE,QAAIgF,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBiJ,OAAhBjJ,CAAwBtG,CAAxBsG,EAA2BhF,CAA3BgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoB++B,GAApB,EAAsCh2D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB,CACA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,CAAP;KAr5BFqnD,EAw5BAA,WAAAA,IAAAA,GAAA,UAAI3tD,CAAJ,EAAesB,CAAf;EACE,QAAMwkC,IAAU,IAAI7O,eAAJ,CAAoBg/B,GAApB,EAAsCj2D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB;EAAA,QACMmyC,IAAc5O,EAAQurB,kBAARvrB,EADpB,CAEA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,EAAoC,IAApCA,EAA0CouC,CAA1CpuC,CAAP;KA35BFqnD,EA85BAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ,EAAesmB,CAAf;EACEktC,+BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,KAAjBA,EAAwBmrD,EAAIltD,KAA5B+B,EAAmCmF,OAAnCnF,CAA2CwhB,CAA3CxhB,CAAP;KAp6BFqnD,EAu6BAA,WAAAA,QAAAA,GAAA,UAAQ3tD,CAAR,EAAmBsB,CAAnB;EACE,QAAIgF,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBmJ,OAAhBnJ,CAAwBtG,CAAxBsG,EAA2BhF,CAA3BgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoBi/B,GAApB,EAAsCl2D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB,CACA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,CAAP;KA76BFqnD,EAg7BAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ,EAAesmB,CAAf;EACEktC,+BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,KAAjBA,EAAwBmrD,EAAIltD,KAA5B+B,EAAmCmF,OAAnCnF,CAA2CwhB,CAA3CxhB,CAAP;KAt7BFqnD,EAy7BAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ,EAAesmB,CAAf;EACEktC,+BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,EACM,IAAA/0C,yCAAA;EAAA,QAACqI,QAAD;EAAA,QAEAa,IAAS1e,kBAAAA,CAFT;EAAA,QAGAwnD,IAAMzwD,EAAEmnD,IAAFnnD,EAAQ,CAARA,EAAW2nB,CAAX3nB,CAHN,CAIN,OAAOsF,KAAK27C,MAAL37C,CAAYmrD,CAAZnrD,EAAiB,KAAjBA,EAAwBmrD,EAAIltD,KAA5B+B,EAAmCmF,OAAnCnF,CAA2CwhB,CAA3CxhB,CAAP;KA/7BFqnD,EAk8BAA,WAAAA,kBAAAA,GAAA,UAAkB3tD,CAAlB,EAA6BsB,CAA7B;EACE,QAAMwkC,IACF,IAAI7O,eAAJ,CAAoBk/B,kBAApB,EAAqDn2D,EAAEuC,KAAvD,EAA8DjB,EAAEiB,KAAhE,CADJ,CAEA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,CAAP;KAr8BFqnD,EAw8BAA,WAAAA,WAAAA,GAAA,UAAW3tD,CAAX,EAAsBsB,CAAtB;EACE,QAEMwkC,IAAU,IAAI7O,eAAJ,CAFLm/B,GAEK,EAAwBp2D,EAAEuC,KAA1B,EAAiCjB,EAAEiB,KAAnC,CAFhB;EAAA,QAGM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAFK,SAELA,CAHf,CAIA,OAAOA,KAAKkoD,aAALloD,CAA2Bw/B,CAA3Bx/B,GAAqCtG,GAAGsB,EAAxCgF,EAA4C8R,CAA5C9R,CAAP;KA78BFqnD,EAg9BAA,WAAAA,SAAAA,GAAA,UAAS3tD,CAAT,EAAoBsB,CAApB;EACE,QAEMwkC,IAAU,IAAI7O,eAAJ,CAFLo/B,OAEK,EAAwBr2D,EAAEuC,KAA1B,EAAiCjB,EAAEiB,KAAnC,CAFhB;EAAA,QAGM6V,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAFK,OAELA,CAHf,CAIA,OAAOA,KAAKkoD,aAALloD,CAA2Bw/B,CAA3Bx/B,GAAqCtG,GAAGsB,EAAxCgF,EAA4C8R,CAA5C9R,CAAP;KAr9BFqnD,EAw9BAA,WAAAA,IAAAA,GAAA,UAAI3tD,CAAJ,EAAesB,CAAf;EACE,QAAgB,gBAAZtB,EAAEuE,KAAU,IAA2B,gBAAZjD,EAAEiD,KAAjC,EACE,OAAO+B,KAAKgwD,wBAALhwD,CAA8BtG,CAA9BsG,EAAiChF,CAAjCgF,EAAoCiwD,GAApCjwD,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoBs/B,GAApB,EAAsCv2D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB;EAAA,QACM6V,IACF9R,KAAK6qD,eAAL7qD,CACIw/B,EAAQ3xB,WADZ7N,EACyB0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADzB1nB,CAFJ,CAIA,OAAOA,KAAKkoD,aAALloD,CAA2Bw/B,CAA3Bx/B,GAAqCtG,GAAGsB,EAAxCgF,EAA4C8R,CAA5C9R,CAAP;KAj+BFqnD,EAw+BQA,WAAAA,yBAAAA,GAAR,UAAiC3tD,CAAjC,EAA4CsB,CAA5C,EAAuDwqB,CAAvD;EAAA,gBAAA;EAAA,QACQ0mC,IAAQlsD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBtG,EAAEgL,MAAnB1E,CADhB;EAAA,QAEQmsD,IAAQnsD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBhF,EAAE0J,MAAnB1E,CAFhB;EAAA,QAIQmZ;;;;;;;QAJR;EAAA,QAISwM,QAJT;EAAA,QAIeC,QAJf;EAAA,QAqBQI,IAAUhmB,KAAKgmB,OAALhmB,CAAa2lB,CAAb3lB,EAAmB4lB,CAAnB5lB,CArBlB,CAwBE,OAFA2lB,EAAK3S,OAAL2S,IACAC,EAAK5S,OAAL4S,EADAD,EAEOK,CAAP;KAhgCFqhC,EAsgCQA,WAAAA,iCAAAA,GAAR,UACI6I,CADJ,EAC2BC,CAD3B;EAEE,aACEzrD,QAAQyrD,EAAYzrD,QACpBzG,OAAOkyD,EAAYlyD,OACnBhC,OAAOi0D,EAAcj0D,OAHvB;KAxgCForD,EA+gCAA,WAAAA,KAAAA,GAAA,UAAuBv/B,CAAvB;EAEE,SADA,IAAI7S,IAAM6S,EAAQ,CAARA,CAAV,EACS/sB,IAAI,CAAb,EAAgBA,IAAI+sB,EAAQ1tB,MAA5B,EAAoCW,GAApC,EACEka,IAAMjV,KAAKqI,GAALrI,CAASiV,CAATjV,EAAc8nB,EAAQ/sB,CAAR+sB,CAAd9nB,CAANiV,CAEF,OAAOA,CAAP;KAphCFoyC,EAuhCAA,WAAAA,SAAAA,GAAA,UAAS3tD,CAAT,EAAoBsB,CAApB;EACE,QAAgB,gBAAZtB,EAAEuE,KAAU,IAA2B,gBAAZjD,EAAEiD,KAAjC,EACE,OAAO+B,KAAKgwD,wBAALhwD,CAA8BtG,CAA9BsG,EAAiChF,CAAjCgF,EAAoCowD,GAApCpwD,CAAP,CAGF,IAAIA,KAAK8qD,kBAAL9qD,EAAyBtG,GAAGsB,EAA5BgF,CAAJ,EACE,OAAOA,KAAK0qD,UAAL1qD,CAAgBqwD,QAAhBrwD,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,CAAP,CAGF,IAAMw/B,IAAU,IAAI7O,eAAJ,CAAoBy/B,GAApB,EAAsC12D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB;EAAA,QACM6V,IACF9R,KAAK6qD,eAAL7qD,CACIw/B,EAAQ3xB,WADZ7N,EACyB0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADzB1nB,CAFJ,CAIA,OAAOA,KAAKkoD,aAALloD,CAA2Bw/B,CAA3Bx/B,GAAqCtG,GAAGsB,EAAxCgF,EAA4C8R,CAA5C9R,CAAP;KApiCFqnD,EAuiCAA,WAAAA,IAAAA,GAAA,UAAsB3tD,CAAtB,EAA4BsB,CAA5B;EACE,QAAMwkC,IAAU,IAAI7O,eAAJ,CAAoB2/B,GAApB,EAAsC52D,EAAEuC,KAAxC,EAA+CjB,EAAEiB,KAAjD,CAAhB;EAAA,QACMmyC,IAAc5O,EAAQurB,kBAARvrB,EADpB;EAAA,QAEM1tB,IAAS9R,KAAK6qD,eAAL7qD,CACIw/B,EAAQ3xB,WADZ7N,EACyB0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADzB1nB,CAFf,CAIA,OAAOA,KAAKkoD,aAALloD,CAAsBw/B,CAAtBx/B,GAAgCtG,GAAGsB,EAAnCgF,EAAuC8R,CAAvC9R,EAA+CouC,CAA/CpuC,CAAP;KA5iCFqnD,EA+iCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bs0D,IAA5B,CAAhB,CACA,OAAOvwD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAjjCFqnD,EAojCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bu0D,KAA5B,CAAhB,CACA,OAAOxwD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAtjCFqnD,EAyjCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bw0D,IAA5B,CAAhB,CACA,OAAOzwD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA3jCFqnD,EA8jCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4By0D,KAA5B,CAAhB,CACA,OAAO1wD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAhkCFqnD,EAmkCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B00D,GAA5B,CAAhB,CACA,OAAO3wD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KArkCFqnD,EAwkCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B20D,KAA5B,CAAhB,CACA,OAAO5wD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA1kCFqnD,EA6kCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B40D,GAA5B,CAAhB;EAAA,QACMziB,IAAc5O,EAAQurB,kBAARvrB,EADpB,CAEA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC,IAAjCA,EAAuCouC,CAAvCpuC,CAAP;KAhlCFqnD,EAmlCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B60D,KAA5B,CAAhB,CACA,OAAO9wD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KArlCFqnD,EAwlCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B80D,IAA5B,CAAhB,CACA,OAAO/wD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA1lCFqnD,EA6lCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B+0D,KAA5B,CAAhB,CACA,OAAOhxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA/lCFqnD,EAkmCAA,WAAAA,OAAAA,GAAA,UAAyB3sD,CAAzB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bg1D,MAA5B,CAAhB,CACA,OAAOjxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KApmCFqnD,EAumCAA,WAAAA,WAAAA,GAAA,UAA6B3sD,CAA7B;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bi1D,UAA5B,CAAhB,CACA,OAAOlxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAzmCFqnD,EA4mCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bk1D,IAA5B,CAAhB,CACA,OAAOnxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA9mCFqnD,EAinCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bm1D,GAA5B,CAAhB,CACA,OAAOpxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAnnCFqnD,EAsnCAA,WAAAA,OAAAA,GAAA,UAAyB50C,CAAzB,EAAgCrB,CAAhC;EACE,QAAMouB,IACF,IAAI7O,eAAJ,CAAoB0gC,OAApB,EAA0C5+C,EAAGxW,KAA7C,EAAoDmV,EAAEnV,KAAtD,CADJ,CAEA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByS,GAAIrB,EAAjCpR,CAAP;KAznCFqnD,EA4nCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bq1D,IAA5B,CAAhB,CACA,OAAOtxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA9nCFqnD,EAioCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bs1D,MAA5B,CAAhB;EAAA,QACMz/C,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,OAA1CA,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC8R,CAAjC9R,CAAP;KApoCFqnD,EAuoCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB,EAA6BD,CAA7B,EAA0CE,CAA1C;EACE,QAAM6kC,IAAU,IAAIgyB,WAAJ,CAAgB92D,EAAEuB,KAAlB,EAAyBxB,CAAzB,EAA8BE,CAA9B,CAAhB,CACA,OAAOqF,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAzoCFqnD,EA4oCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bw1D,GAA5B,CAAhB,CACA,OAAOzxD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA9oCFqnD,EAipCAA,WAAAA,WAAAA,GAAA,UAA6B3sD,CAA7B;EACE,QAAMg3D,IAAQ1xD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBtF,EAAEgK,MAAnB1E,CAAd;EAAA,QAEMw/B,IAAU,IAAImyB,iBAAJ,CAAsBj3D,EAAEuB,KAAxB,CAFhB;EAAA,QAGMwV,KACJzR,KAAKwsD,gCAALxsD,CAAsCtF,CAAtCsF,EAAyC0xD,EAAMpK,cAANoK,CAAqB/rC,IAA9D3lB,GACAA,KAAKwsD,gCAALxsD,CAAsCtF,CAAtCsF,EAAyC0xD,EAAMpK,cAANoK,CAAqB9rC,IAA9D5lB,EALF,CAQA,OAAOA,KAAKkoD,aAALloD,CAA2Bw/B,CAA3Bx/B,EAAoCyR,CAApCzR,CAAP;KA1pCFqnD,EA6pCAA,WAAAA,QAAAA,GAAA,UAA0B3sD,CAA1B;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B21D,OAA5B,CAAhB,CACA,OAAO5xD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA/pCFqnD,EAkqCAA,WAAAA,SAAAA,GAAA,UAA2B3sD,CAA3B;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B41D,QAA5B,CAAhB,CACA,OAAO7xD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KApqCFqnD,EAuqCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B61D,GAA5B,CAAhB,CACA,OAAO9xD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAzqCFqnD,EA4qCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B81D,GAA5B,CAAhB,CACA,OAAO/xD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA9qCFqnD,EAirCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B+1D,GAA5B,CAAhB,CACA,OAAOhyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAnrCFqnD,EAsrCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bg2D,IAA5B,CAAhB,CACA,OAAOjyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAxrCFqnD,EA2rCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bi2D,IAA5B,CAAhB,CACA,OAAOlyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA7rCFqnD,EAgsCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bk2D,IAA5B,CAAhB,CACA,OAAOnyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAlsCFqnD,EAqsCAA,WAAAA,MAAAA,GAAA,UAAwB3tD,CAAxB,EAA8BsB,CAA9B;EACE,QAAMwkC,IAAU,IAAI7O,eAAJ,CAAoByhC,KAApB,EAAwC14D,EAAEuC,KAA1C,EAAiDjB,EAAEiB,KAAnD,CAAhB,CACA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtG,GAAGsB,EAAhCgF,CAAP;KAvsCFqnD,EA0sCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bo2D,IAA5B,CAAhB,CACA,OAAOryD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA5sCFqnD,EA+sCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bq2D,IAA5B,CAAhB,CACA,OAAOtyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAjtCFqnD,EAotCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bs2D,IAA5B,CAAhB,CACA,OAAOvyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAttCFqnD,EAytCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bu2D,KAA5B,CAAhB,CACA,OAAOxyD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA3tCFqnD,EA8tCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4Bw2D,KAA5B,CAAhB;EAAA,QACMrkB,IAAc5O,EAAQurB,kBAARvrB,EADpB,CAEA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC,IAAjCA,EAAuCouC,CAAvCpuC,CAAP;KAjuCFqnD,EAouCAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4By2D,KAA5B,CAAhB;EAAA,QACMtkB,IAAc5O,EAAQurB,kBAARvrB,EADpB,CAEA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC,IAAjCA,EAAuCouC,CAAvCpuC,CAAP;KAvuCFqnD,EA0uCAA,WAAAA,IAAAA,GAAA,UAAsB3sD,CAAtB;EACE,QAAM8kC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B02D,GAA5B,CAAhB,CACA,OAAO3yD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA5uCFqnD,EA+uCAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB,EAA6BhD,CAA7B;EACE,QAAM8nC,IAAU,IAAIkZ,cAAJ,CAAmBh+C,EAAEuB,KAArB,EAA4B22D,KAAcl7D,CAAdk7D,CAA5B,CAAhB,CACA,OAAO5yD,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAjvCFqnD,EAovCAA,WAAAA,iBAAAA,GAAA,UAAiB3sD,CAAjB,EAA8B4S,CAA9B,EAAgDya,CAAhD;EASI,QAAAgF,iBAAA;EAAA,QACAD,kBADA;EAAA,QAEA0E,gBAFA;EAAA,QAGA3D,cAHA;EAAA,QAIAD,eAJA;EAAA,QAOIgiB,IAAY7iB,IAAcD,CAAdC,GAA6ByE,CAP7C;EAAA,QAQItvB,IAAU0rB,IAAYC,CAR1B;EAAA,QASIglC,KAAcjjB,GAAW1tC,EAT7B;EAAA,QAWI4wD,IAAYp4D,EAAE0L,OAAF1L,EAAW,EAAXA,CAXhB;EAAA,QAYIq4D,IAAQzlD,EAAOnI,OAAPmI,EAAgBsiC,IAAY,EAA5BtiC,CAZZ;EAAA,QAcI0lD,IACF,IAAIC,aAAJ,CAAkBJ,CAAlB,EAA8BC,EAAU72D,KAAxC,EAA+C8rB,CAA/C,CAfF;EAAA,QAgBImrC,IAASlzD,KAAKkoD,aAALloD,CACXgzD,CADWhzD,GACK8yD,EADL9yD,EAEXA,KAAK+rD,gBAAL/rD,CAAgC6yD,CAAhC7yD,CAFWA,CAhBb;EAAA,QAoBImzD,IAAgB,IAAIrH,mBAAJ,CAClBoH,EAAOj3D,KADW,EACJ82D,EAAM92D,KADF,GACUiG,GAAS6lB,EAASuJ,YAD5B,GAC0C,CAD1C,GAElB,CAFkB,CApBpB;EAAA,QAuBE8hC,IAAUpzD,KAAKkoD,aAALloD,CACVmzD,CADUnzD,GACMkzD,GAAQH,EADd/yD,EAEVA,KAAK+rD,gBAAL/rD,CAAgCmzD,EAActlD,WAA9C7N,CAFUA,CAvBZ,CA+BF,QAJuC,MAAnCmd,IAAIxG,GAAJwG,CAAQ,qBAARA,MACFi2C,IAAUpzD,KAAKgsD,YAALhsD,CAAkBozD,CAAlBpzD,IAGLozD,EAAQjuD,OAARiuD,EAAiB,GAAGxlC,GAAWC,GAAU9F,EAASuJ,YAAlD8hC,CAAP;KA5xCF/L,EA+xCAA,WAAAA,OAAAA,GAAA,UAAO3sD,CAAP,EAAoB4S,CAApB,EAAsCya,CAAtC;EACE,QAAI5K,IAAIxG,GAAJwG,CAAQ,mBAARA,KAA+C,MAAfziB,EAAEuB,KAAFvB,CAAQ,CAARA,CAApC,EACE,OAAOsF,KAAKqzD,gBAALrzD,CAAsBtF,CAAtBsF,EAAyBsN,CAAzBtN,EAAiC+nB,CAAjC/nB,CAAP,CAEF,IAAMw/B,IAAU,IAAI8zB,aAAJ,CAAkBvrC,CAAlB,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAG4S,EAAhCtN,CAAP;KApyCFqnD,EAuyCAA,WAAAA,eAAAA,GAAA,UAAe50C,CAAf,EAA6BnF,CAA7B,EAA+Cya,CAA/C;EAEE,QAAMyX,IAAU,IAAI+zB,qBAAJ,CAA0BxrC,CAA1B,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByS,GAAInF,EAAjCtN,CAAP;KA1yCFqnD,EA6yCAA,WAAAA,gBAAAA,GAAA,UAAgB3sD,CAAhB,EAA6B+X,CAA7B,EAA2CsV,CAA3C;EACE,QAAMyX,IAAU,IAAIg0B,sBAAJ,CAA2BzrC,CAA3B,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAG+X,EAAhCzS,CAAP;KA/yCFqnD,EAkzCAA,WAAAA,gBAAAA,GAAA,UAAgB3sD,CAAhB,EAA6B4S,CAA7B,EAA+Cya,CAA/C;EAEE,QAAMyX,IAAU,IAAIi0B,sBAAJ,CAA2B1rC,CAA3B,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAG4S,EAAhCtN,CAAP;KArzCFqnD,EAwzCAA,WAAAA,wBAAAA,GAAA,UAAwB50C,CAAxB,EAAsCnF,CAAtC,EAAwDya,CAAxD;EAEE,QAAMyX,IAAU,IAAIk0B,8BAAJ,CAAmC3rC,CAAnC,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByS,GAAInF,EAAjCtN,CAAP;KA3zCFqnD,EA8zCAA,WAAAA,yBAAAA,GAAA,UAAyB3sD,CAAzB,EAAsC+X,CAAtC,EAAoDsV,CAApD;EAEE,QAAMyX,IAAU,IAAIm0B,+BAAJ,CAAoC5rC,CAApC,CAAhB,CACA,OAAO/nB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,GAAG+X,EAAhCzS,CAAP;KAj0CFqnD,EAo0CAA,WAAAA,QAAAA,GAAA,UAAQ3sD,CAAR,EAAqBqtB,CAArB;EACE,QAAMyX,IAAU,IAAIo0B,aAAJ,CAAkB7rC,CAAlB,EAA4B,KAA5B,GAAmC,CAAnC,CAAhB;EAAA,QACMjW,IACF9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0CtF,EAAEuD,KAA5C+B,CAFJ,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC8R,CAAjC9R,CAAP;KAx0CFqnD,EA20CAA,WAAAA,QAAAA,GAAA,UAAQ3sD,CAAR,EAAqBqtB,CAArB;EACE,QAAMyX,IAAU,IAAIo0B,aAAJ,CAAkB7rC,CAAlB,EAA4B,KAA5B,GAAmC,CAAnC,CAAhB;EAAA,QACMjW,IAAS9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,SAA1CA,CADf,CAEA,OAAOA,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,EAAiC8R,CAAjC9R,CAAP;KA90CFqnD,EAi1CAA,WAAAA,gBAAAA,GAAA,UAAgB50C,CAAhB,EAA8B/X,CAA9B,EAA2C0W,CAA3C,EAAwD2W,CAAxD;EAEE,QACM8rC,IACF,IAAID,aAAJ,CAAkB7rC,CAAlB,EAA4B,KAA5B,GAFiB,CAEjB,CAFJ;EAAA,QAGM+rC,IACF9zD,KAAKkoD,aAALloD,CAAmB6zD,CAAnB7zD,GAA6CtF,EAA7CsF,CAJJ;EAAA,QAMM+zD,IAAyB,IAAIC,wBAAJ,CAA6BjsC,CAA7B,CAN/B;EAAA,QAOMjW,IACF9R,KAAK6qD,eAAL7qD,CAAqB+zD,EAAuBlmD,WAA5C7N,EAAyDtF,EAAEuD,KAA3D+B,CARJ;EAAA,QASM9E,IAAS8E,KAAKkoD,aAALloD,CACX+zD,CADW/zD,GACcyS,GAAIqhD,EADlB9zD,EACqC8R,CADrC9R,CATf,CAYA,OADA8zD,EAAiB9gD,OAAjB8gD,IACO54D,CAAP;KA/1CFmsD,EAk2CAA,WAAAA,gBAAAA,GAAA,UAAgB50C,CAAhB,EAA8B/X,CAA9B,EAA2CqtB,CAA3C;EACE,QAAMksC,IAAyB,IAAIC,wBAAJ,CAA6BnsC,CAA7B,CAA/B;EAAA,QACMjW,IACF9R,KAAK6qD,eAAL7qD,CAAqBi0D,EAAuBpmD,WAA5C7N,EAAyDtF,EAAEuD,KAA3D+B,CAFJ,CAGA,OAAOA,KAAKkoD,aAALloD,CAAmBi0D,CAAnBj0D,GAA4CyS,EAA5CzS,EAAiD8R,CAAjD9R,CAAP;KAt2CFqnD,EAy2CAA,WAAAA,KAAAA,GAAA,UAAuB3sD,CAAvB,EAA6BuD,CAA7B;EACE,WAAOk2D,WAAwBz5D,CAAxBy5D,EAA2Bl2D,CAA3Bk2D,EAAkCn0D,IAAlCm0D,CAAP;KA12CF9M,EA62CAA,WAAAA,QAAAA,GAAA,UAAwB3sD,CAAxB,EAAmCuB,CAAnC;EACE,WAAI+D,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBtF,EAAEgK,MAAnB1E,EAA2Bs0B,QAA3Bt0B,KACCo0D,cAAyB15D,EAAEuB,KAA3Bm4D,EAAkCn4D,CAAlCm4D,CADDp0D,GAEKA,KAAKq0D,aAALr0D,CAAmBtF,CAAnBsF,EAAsB/D,CAAtB+D,CAFLA,GAIGs0D,cAA2B55D,CAA3B45D,EAA8Br4D,CAA9Bq4D,CAJP;KA92CFjN,EAq3CAA,WAAAA,eAAAA,GAAA,UACI3sD,CADJ,EACiButB,CADjB,EACoCC,CADpC,EAEIhb,CAFJ;EAGE,QAAMsyB,IACF,IAAI+0B,qBAAJ,CAA0B75D,EAAEuB,KAA5B,EAAmCgsB,CAAnC,EAA8CC,CAA9C,EAAwDhb,CAAxD,CADJ,CAEA,OAAOlN,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA13CFqnD,EA63CAA,WAAAA,uBAAAA,GAAA,UAAuB50C,CAAvB,EAAqC/X,CAArC,EAAkDwS,CAAlD;EAEE,QAAMsyB,IAAU,IAAIg1B,6BAAJ,CAAkC/hD,CAAlC,EAAsC/X,CAAtC,EAAyCwS,CAAzC,CAAhB,CAEA,OAAOlN,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByS,EAA7BzS,CAAP;KAj4CFqnD,EAo4CAA,WAAAA,sBAAAA,GAAA,UACI3sD,CADJ,EACiButB,CADjB,EACoCC,CADpC,EAEIhb,CAFJ;EAGE,QAAMsyB,IAAU,IAAIi1B,4BAAJ,CACZ/5D,EAAEuB,KADU,EACHgsB,CADG,EACQC,CADR,EACkBhb,CADlB,CAAhB,CAEA,OAAOlN,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KAz4CFqnD,EA44CAA,WAAAA,8BAAAA,GAAA,UACI50C,CADJ,EACkB/X,CADlB,EAC+BwS,CAD/B;EAEE,QAAMsyB,IACF,IAAIk1B,mCAAJ,CAAwCjiD,CAAxC,EAA4C/X,CAA5C,EAA+CwS,CAA/C,CADJ,CAEA,OAAOlN,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByS,EAA7BzS,CAAP;KAh5CFqnD,EAm5CAA,WAAAA,YAAAA,GAAA,UACIpiC,CADJ,EACsBqD,CADtB,EAC2CC,CAD3C,EAEIC,CAFJ;EAGE,QAAMmsC,IAAQrsC,IAAarD,CAAbqD,GAAsBvb,QAAQkY,CAARlY,CAApC;EAAA,QACMwf,IAAYooC,EAAM14D,KAAN04D,CAAY,CAAZA,CADlB;EAAA,QAEMnkB,IAAcmkB,EAAM14D,KAAN04D,CAAY,CAAZA,CAFpB;EAAA,QAGMn1B,IAAU,IAAIiR,kBAAJ,CAAuBlkB,CAAvB,EAAkCikB,CAAlC,EAA+CjoB,CAA/C,CAHhB;EAAA,QAIMzW,IACF9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0C,OAA1CA,CALJ;EAAA,QAMMouC,IAAc5O,EAAQurB,kBAARvrB,CAA2BhX,CAA3BgX,CANpB,CAOA,OAAOx/B,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6B20D,EAA7B30D,EAAqC8R,CAArC9R,EAA6CouC,CAA7CpuC,CAAP;KA75CFqnD,EAg6CAA,WAAAA,OAAAA,GAAA,UAAO5gD,CAAP,EAA0B3O,CAA1B,EAAyC2wB,CAAzC,EAA0DC,CAA1D;EAEE,QAAM8W,IAAU,IAAIo1B,aAAJ,CAAkBnuD,EAAQvK,IAA1B,EAAgCpE,CAAhC,EAAuC2wB,CAAvC,EAAgDC,CAAhD,CAAhB,CACA,OAAO1oB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6ByG,EAA7BzG,CAAP;KAn6CFqnD,EAs6CAA,WAAAA,kBAAAA,GAAA,UACI1+B,CADJ,EACqBC,CADrB,EACuCC,CADvC,EAEIC,CAFJ,EAE0BC,CAF1B;EAQE,WALA/N,KACI,+FADJA,GAKO65C,sBAFWlsC,EAAMtoB,QAANsoB,EAEXksC,EADYjsC,EAAOvoB,QAAPuoB,EACZisC,EACoBhsC,CADpBgsC,EACmC/rC,CADnC+rC,EACiD9rC,CADjD8rC,CAAP;KA96CFxN,EAk7CAA,WAAAA,cAAAA,GAAA,UACIl6C,CADJ,EACqBwb,CADrB,EACsCK,CADtC,EAEIC,CAFJ,EAEgCC,CAFhC,EAGIC,CAHJ;EAIE,QAAMqW,IAAU,IAAIs1B,oBAAJ,CACZ3nD,EAAMlR,KADM,EACC0sB,EAAM1sB,KADP,EACcgtB,CADd,EACwBC,CADxB,EACgCC,CADhC,CAAhB,CAEA,OAAOnpB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BmN,GAAOwb,GAAOK,EAA3ChpB,CAAP;KAx7CFqnD,EA27CAA,WAAAA,aAAAA,GAAA,UAAa3sD,CAAb,EAA0BqV,CAA1B,EAA6CxC,CAA7C;EAEE3J,WACImM,IAAY,CADhBnM,EAEI,wDAAsDmM,CAF1DnM,EAIA,IAAM2oB,IAAY7xB,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlB;EAAA,QAKMq6D,KAJ8B,WAAfxnD,CAAe,GAAU7S,EAAEuB,KAAFvB,CAAQ,CAARA,CAAV,GAAuBA,EAAEuB,KAAFvB,CAAQ,CAARA,KAIxBqV,CALnC;EAAA,QAMMilD,KAJ6B,WAAfznD,CAAe,GAAU7S,EAAEuB,KAAFvB,CAAQ,CAARA,CAAV,GAAuBA,EAAEuB,KAAFvB,CAAQ,CAARA,KAIzBqV,CANjC;EAAA,QAOMklD,KAJ6B,WAAf1nD,CAAe,GAAU7S,EAAEuB,KAAFvB,CAAQ,CAARA,CAAV,GAAuBA,EAAEuB,KAAFvB,CAAQ,CAARA,MAIxBqV,IAAYA,EAP9C;EAAA,QAaMyvB,IAAU,IAAIzF,mBAAJ,CAJoB,WAAfxsB,CAAe,IAC/Bgf,GAAWwoC,GAAcC,GAAaC,EADP,IAE/B1oC,GAAW0oC,GAAaF,GAAcC,EAE3B,EAAqCjlD,CAArC,EAAgDxC,CAAhD,CAbhB,CAcA,OAAOvN,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6BtF,EAA7BsF,CAAP;KA/8CFqnD,EAk9CAA,WAAAA,MAAAA,GAAA,UAAwB3sD,CAAxB,EAA8B0uB,CAA9B,EAAoDtrB,CAApD;EACE,WAAOuJ,MAAM3M,CAAN2M,EAAS+hB,CAAT/hB,EAAqBvJ,CAArBuJ,CAAP;KAn9CFggD,EAs9CAA,WAAAA,UAAAA,GAAA,UACI5gD,CADJ,EACqB8b,CADrB,EACsCtmB,CADtC;EAEQ,QAAAkd,4BAAA;EAAA,QAAC8I,eAAD;EAAA,QAAYa,gBAAZ;EAAA,QAAwB/B,eAAxB;EAAA,QAAmC1hB,aAAnC;EAAA,QAA4C2jB,gBAA5C;EAAA,QAGA4qC,KAAgB5qC,IAAajC,GAAWA,EAHxC;EAAA,QAIAm0C,IAAiBzuD,EAAQtB,OAARsB,EAAiBqc,GAAYb,EAA7Bxb,CAJjB;EAAA,QAKA0uD,IAAW5yC,EAAQpd,OAARod,EAAiBO,GAAY/B,EAA7BwB,CALX,CAON,IAAmB,MAAfS,CAAJ,EACE,OAAOsxC,cAA2BrhD,SAAAA,CAA3BqhD,EAAuCr4D,CAAvCq4D,CAAP,CAEF,IAAM/qC,IAAeM,OAAO,CAAPA,CAArB;EAAA,QACM2V,IAAU,IAAI41B,cAAJ,CACZtyC,CADY,EACAb,CADA,EACWizC,EAAe91D,IAD1B,EACgC+1D,EAAS/1D,IADzC,EAC+CC,CAD/C,EAEZuuD,CAFY,CADhB,CAIA,OAAQ5tD,KAAKkoD,aAALloD,CACIw/B,CADJx/B,GACcm1D,GAAUD,GAAgB3rC,EADxCvpB,EAEHmF,OAFGnF,CAEK/D,CAFL+D,CAAR;KAt+CFqnD,EA2+CAA,WAAAA,cAAAA,GAAA,UACIh+B,CADJ,EAC2BC,CAD3B,EACiDzb,CADjD,EAEI0b,CAFJ;EAGQ,QAAApQ,4BAAA;EAAA,QAAC8I,eAAD;EAAA,QAAYa,gBAAZ;EAAA,QAAwBzjB,aAAxB;EAAA,QAAiC2jB,gBAAjC;EAAA,QAKAwc,IAAU,IAAI41B,cAAJ,CACZtyC,CADY,EACAb,CADA,EACWoH,EAAcjqB,IADzB,EAC+BkqB,EAAalqB,IAD5C,EACkDC,CADlD,GAEX2jB,GAAY,EAFD,GADO,CACP,CALV,CAQN,OAAQhjB,KAAKkoD,aAALloD,CACIw/B,CADJx/B,GACcspB,GAAcD,GAAeE,EAD3CvpB,EAEHmF,OAFGnF,CAEK6N,CAFL7N,CAAR;KAt/CFqnD,EA2/CAA,WAAAA,IAAAA,GAAA,UAAI3sD,CAAJ;EAEE,WAAOsF,KAAKq1D,OAALr1D,CAAatF,CAAbsF,GADS,CACTA,CAAP;KA7/CFqnD,EAggDAA,WAAAA,KAAAA,GAAA,UAAK3sD,CAAL;EAEE,WAAOsF,KAAKq1D,OAALr1D,CAAatF,CAAbsF,GADS,CACTA,CAAP;KAlgDFqnD,EAqgDQA,WAAAA,QAAAA,GAAR,UAAgB3sD,CAAhB,EAA6BqvB,CAA7B;EACE,QAAM2nC,IAAQ1xD,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiBtF,EAAEgK,MAAnB1E,CAAd;EAAA,QAEMosD,IACF,IAAIkJ,UAAJ,CAAeC,YAAoBpmC,IAAnC,EAAyCz0B,EAAEuB,KAA3C,EAAkD8tB,CAAlD,CAHJ;EAAA,QAIMwiC,IACF,IAAI+I,UAAJ,CAAeC,YAAoBnmC,IAAnC,EAAyC10B,EAAEuB,KAA3C,EAAkD8tB,CAAlD,CALJ;EAAA,QAMMtY,KACJzR,KAAKwsD,gCAALxsD,CAAsCtF,CAAtCsF,EAAyC0xD,EAAMpK,cAANoK,CAAqB/rC,IAA9D3lB,GACAA,KAAKwsD,gCAALxsD,CAAsCtF,CAAtCsF,EAAyC0xD,EAAMpK,cAANoK,CAAqB9rC,IAA9D5lB,EARF;EAAA,QAWM2lB,IAAO3lB,KAAKkoD,aAALloD,CAA2BosD,CAA3BpsD,EAAwCyR,CAAxCzR,CAXb;EAAA,QAYM4lB,IAAO5lB,KAAKkoD,aAALloD,CAA2BusD,CAA3BvsD,EAAwCyR,CAAxCzR,CAZb;EAAA,QAaMgmB,IAAUhmB,KAAKgmB,OAALhmB,CAAa2lB,CAAb3lB,EAAmB4lB,CAAnB5lB,EAAyB6hD,IAAzB7hD,CAA8BtF,EAAEuB,KAAFvB,CAAQ,CAARA,CAA9BsF,EAA0CtF,EAAEuB,KAAFvB,CAAQ,CAARA,CAA1CsF,CAbhB,CAgBA,OAFA2lB,EAAK3S,OAAL2S,IACAC,EAAK5S,OAAL4S,EADAD,EAEOK,CAAP;KAthDFqhC,EAyhDAA,WAAAA,SAAAA,GAAA,UAAS3sD,CAAT,EAAoB+L,CAApB;EACE,QAAMub,IAAevb,EAAQxK,KAA7B;EAAA,QACMgmB,IAAYD,EAAaA,EAAa5nB,MAAb4nB,GAAsB,CAAnCA,CADlB;EAAA,QAGM7I,4BAHN;EAAA,QAGOgJ,QAHP;EAAA,QAGoBqzC,QAHpB;EAAA,QAG+Bz0C,QAH/B;EAAA,QAG0C1hB,QAH1C;EAAA,QAMM61D,IAAiBzuD,EAAQtB,OAARsB,EAAiB+uD,GAAWvzC,EAA5Bxb,CANvB;EAAA,QAOM0uD,IAAWz6D,EAAEyK,OAAFzK,EAAWA,EAAEwB,IAAFxB,GAASqmB,GAAWA,EAA/BrmB,CAPjB;EAAA,QAQM8kC,IACF,IAAIi2B,eAAJ,CAAoBxzC,CAApB,EAA+B5iB,CAA/B,GAAyCm2D,GAAWz0C,EAApD,CATJ,CAUA,OAAQ/gB,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6Bm1D,GAAUD,EAAvCl1D,EACHmF,OADGnF,CACKmiB,CADLniB,CAAR;KApiDFqnD,EAwiDQA,WAAAA,gBAAAA,GAAR,UAA0CprD,CAA1C,EAA2DgC,CAA3D;EAEE,WAAOmG,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAAA,EAAuBnG,CAAvBmG,CAAP;KA1iDFijD,EA6iDQA,WAAAA,iBAAAA,GAAR,UAA2CprD,CAA3C;EACE,QAAMy5D,IAAetxD,OAAOC,IAAPD,CAAYnI,CAAZmI,IAAAA,CAArB,CAEA,OADApE,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB01D,EAAahxD,MAA9B1E,EAAsCs0B,QAAtCt0B,IAAiD,CAAjDA,EACO01D,CAAP;KAhjDFrO,EAmjDQA,WAAAA,aAAAA,GAAR,UAAuC11C,CAAvC;EACE,QAAM6tB,IAAU,IAAIm2B,aAAJ,CAAkBhkD,EAAM1V,KAAxB,CAAhB,CACA,OAAO+D,KAAKkoD,aAALloD,CAAmBw/B,CAAnBx/B,GAA6B2R,EAA7B3R,CAAP;KArjDFqnD,EAwjDQA,WAAAA,YAAAA,GAAR,UAAoBprD,CAApB,EAAqC25D,CAArC;EACE,4BADmCA,QAC5BjyD,cAAmB1H,EAAM4B,KAAN5B,CAAY,CAAZA,EAAeA,EAAM7B,MAAN6B,GAAe25D,CAA9B35D,CAAnB0H,CAAP;KAzjDF0jD,EA4jDQA,WAAAA,YAAAA,GAAR,UAAoBprD,CAApB;EACE,QAAqB,MAAjBA,EAAM7B,MAAV,EACE,MAAMb,MAAM,sDAANA,CAAN,CAGF,QACE0C,EAAM7B,MAAN6B,GAAe,CAAfA,GAAmBA,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CAAnBA,GAA6C,GAAGA,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,EADlD;KAjkDForD,EAskDQA,WAAAA,cAAAA,GAAR,UAAsC11C,CAAtC,EAAqDkkD,CAArD;EAEE,QAAMC,IAAYnkD,EAAMxM,OAANwM,EACb3R,KAAK+1D,WAAL/1D,CAAiB2R,EAAM1V,KAAvB+D,UAAkCA,KAAKg2D,WAALh2D,CAAiB2R,EAAM1V,KAAvB+D,EADrB2R,CAAlB;EAAA,QAEMskD,KACDj2D,KAAK+1D,WAAL/1D,CAAiB61D,CAAjB71D,UAAiCA,KAAKg2D,WAALh2D,CAAiB61D,CAAjB71D,EAHtC;EAAA,QAIMw/B,IAAU,IAAI02B,oBAAJ,CACZD,CADY,EAEZH,EAAU75D,KAFE,CAJhB,CAOA,OAAO+D,KACFkoD,aADEloD,CAECw/B,CAFDx/B,GAEW81D,EAFX91D,EAEuBA,KAAK+rD,gBAAL/rD,CAAsBi2D,CAAtBj2D,CAFvBA,EAGFmF,OAHEnF,CAGM61D,CAHN71D,CAAP;KA/kDFqnD,EAqlDOA,WAAAA,cAAAA,GAAP,UAEI7nB,CAFJ,EAE2B/tB,CAF3B,EAEmDK,CAFnD,EAGIs8B,CAHJ,EAII+nB,CAJJ;EAAA,gBAAA,CASE,qBALEA,SACY,QAAVrkD,CAAU,KACZA,IACI9R,KAAK6qD,eAAL7qD,CAAqBw/B,EAAQ3xB,WAA7B7N,EAA0CyR,EAAO,CAAPA,EAAUxT,KAApD+B,CAFQ,GAIM,MAAhB8R,EAAO5V,IAAX,EAKE,OAFA8D,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB8R,EAAOpN,MAAxB1E,EAAgC9B,MAAhC8B,GACIwmB,uBAAuB1U,EAAO7T,KAA9BuoB,EAAqC,CAArCA,CADJxmB,EAEO8R,CAAP,CAGF,IAAMskD,IAA2B3kD,EAAO3P,GAAP2P,CAAW,UAAAE,CAAA;EAC1C,UAAoB,gBAAhBA,EAAM1T,KAAV,EACE,MAAM,IAAI1E,KAAJ,CACF,iIADE,CAAN,CAMF,IAAIo0C,IAAUhtC,EAAKgtC,OAALhtC,CAAagW,GAAbhW,CAAiBgR,EAAMjN,MAAvB/D,CAAd,CAMA,IAAuB,QAAnBgtC,EAAQtyB,OAAW,KAChBsyB,EAAQrZ,QAARqZ,KAAoBnO,EAAQ/L,kBADZ,KAEnB9vB,cAAmBgO,EAAM1V,KAAzB0H,KACIwZ,IAAIxG,GAAJwG,CAAQ,2BAARA,CAHR,EAIE,SACElhB,OAAO0V,EAAM1V,OACb0xC,SAAS,MACT9Z,YAAW,GACX8a,eAAehuC,EAAK+E,QAAL/E,CAAcgR,EAAMjN,MAApB/D,GAJjB,CAQF,IAAIgtC,EAAQrZ,QAARqZ,OAAuBnO,EAAQ/L,kBAAnC,EAAuD;EACrD,YAAI4iC,UAAJ;EAAA,YACIC,UADJ,CAEI3oB,EAAQrZ,QAARqZ,IACF0oB,IAAoB,IAAIV,aAAJ,CAAkBhkD,EAAM1V,KAAxB,CAApBo6D,EACAC,IAAiB31D,EAAKunD,aAALvnD,CAAmB01D,CAAnB11D,GAAuCgR,EAAvChR,CAFfgtC,KAIF0oB,IAAoB,IAAIE,WAAJ,CAAgB5kD,EAAM1V,KAAtB,CAApBo6D,EACAC,IAAiB31D,EAAKunD,aAALvnD,CACb01D,CADa11D,GACOgR,EADPhR,EACeA,EAAKorD,gBAALprD,CAAsBgR,EAAM1V,KAA5B0E,CADfA,CALfgtC,GASJA,IAAUhtC,EAAKgtC,OAALhtC,CAAagW,GAAbhW,CAAiB21D,EAAe5xD,MAAhC/D,CATNgtC,EAUJh8B,IAAQ2kD,CAVJ3oB;EAcN,cADAhtC,EAAKynD,WAALznD,CAAiBgR,EAAMjN,MAAvB/D,KACQ1E,OAAO0V,EAAM1V,OAAO0xC,YAAS9Z,YAAW,GAAhD;OA3C+BpiB,CAAjC,CA8CAzR,KAAKooD,WAALpoD,CAAiB8R,EAAOpN,MAAxB1E,EACA,IAWIwX,CAXJ;EAAA,QAAMg/C,MACJv6D,OAAO6V,EAAO7V,OACd0xC,SAAS3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB8R,EAAOpN,MAAxB1E,GACT6zB,YAAW,GAHb;EAAA,QAKMtd,IAAMkgD,cAAyBj3B,CAAzBi3B,EAAkCL,CAAlCK,EAA8CD,CAA9CC,CALZ;EAAA,QAMMtoB,IAASnuC,KAAK02D,gBAAL12D,CAAsBuW,CAAtBvW,EAA2B;EACxC,aAAO22D,eACHh2D,EAAKiwB,KADF+lC,EACSn3B,CADTm3B,EACkBP,CADlBO,EAC8BH,CAD9BG,CAAP;OADa32D,CANf;EAAA,QAUMsoD,IAAyC,QAArBtoD,KAAKuoD,YAV/B,CAkBA,IANID,MACF9wC,IAAQxX,KAAK42D,UAAL52D,EADNsoD,GAIJuO,WAAsB1oB,CAAtB0oB,EAA8BT,CAA9BS,EAA0CL,CAA1CK,EAAsDzoB,CAAtDyoB,CAJIvO,EAMAnrC,IAAIxG,GAAJwG,CAAQ,sBAARA,KAAmCg5C,CAAnCh5C,IACAnd,KAAKqqD,aAALrqD,GAAqBA,KAAKknD,uBAD9B,EAGE,KADA,IAAI4P,IAAiB92D,KAAKqqD,aAALrqD,GAAqBA,KAAKknD,uBAC/C,EAAO4P,IAAiB,CAAjBA,IAAsB92D,KAAK+2D,UAAL/2D,CAAgB5F,MAAhB4F,GAAyB,CAAtD,GAAyD;EACvD,UAAM0E,IAAS1E,KAAK+2D,UAAL/2D,CAAgBq2C,KAAhBr2C,EAAf;EAAA,UACMmZ,uBADN;EAAA,UACOld,WADP;EAAA,UACcgC,WADd,CAEA64D,KAAkB92D,KAAKg3D,YAALh3D,CAAkB/D,CAAlB+D,EAAyB/B,CAAzB+B,CAAlB82D,EACA92D,KAAKyF,IAALzF,CAAU0E,CAAV1E,CADA82D;EAUJ,YALIxO,MACF9wC,IAAQxX,KAAKi3D,QAALj3D,CAAcwX,CAAdxX,CAARwX,EACAxX,KAAKuoD,YAALvoD,CAAkBhE,IAAlBgE,GACKzB,MAAMihC,EAAQxgC,WAARwgC,CAAoBjhC,MAAMiZ,OAAOxX,KAAKosC,YAALpsC,CAAkBwX,CAAlBxX,GAD5CA,CAFEsoD,GAKGx2C,CAAP;KAvrDFu1C,EA0rDQA,WAAAA,iBAAAA,GAAR,UAAyB9wC,CAAzB,EAAsC2gD,CAAtC;EAKE,WAHM3gD,KAAOvW,KAAKm3D,WAAZ5gD,KACJvW,KAAKm3D,WAALn3D,CAAiBuW,CAAjBvW,IAAwBk3D,GADpB3gD,GAGCvW,KAAKm3D,WAALn3D,CAAiBuW,CAAjBvW,CAAP;KA/rDFqnD,EAksDAA,WAAAA,kBAAAA,GAAA;EACE,WAAOrnD,KAAKonD,cAAZ;KAnsDFC,EAwsDAA,WAAAA,QAAAA,GAAA;EACE,SAAIrnD,KAAKknC,QAAT,EAAA;EAGA,WAAK,IAAM3wB,CAAX,IAAkBvW,KAAKm3D,WAAvB,EACEn3D,KAAK4wB,KAAL5wB,CAAWgqC,aAAXhqC,CAAyBA,KAAKm3D,WAALn3D,CAAiBuW,CAAjBvW,EAAsB6wB,YAA/C7wB,EAEFA,KAAKonD,cAALpnD,CAAoBgT,OAApBhT,IACAA,KAAKxG,MAALwG,CAAYo3D,MAAZp3D,EADAA,EAEgC,QAA5BA,KAAK0nD,mBAAuB,IAC9B1nD,KAAK0nD,mBAAL1nD,CAAyBxG,MAAzBwG,CAAgCo3D,MAAhCp3D,EAHFA,EAKIA,KAAKinD,mBAALjnD,IACFA,KAAK4wB,KAAL5wB,CAAWgT,OAAXhT,EANFA,EAQAA,KAAKknC,QAALlnC,IAAgB,CARhBA;;KA/sDFqnD,EA0tDAA,WAAAA,eAAAA,GAAA;EAAA,gBAAA,CACE,OAAO3uC,KAAK;EACV,aAAI/X,EAAK0K,GAAL1K,CAASkpB,OAAO,IAAPA,CAATlpB,EAAuBgW,GAAvBhW,KAA+B,CAA/BA,GACK,EADLA,GAGG,EAHP;OADK+X,CAAP;KA3tDF2uC,EAmuDQA,WAAAA,YAAAA,GAAR,UAAoB3iD,CAApB;EACE,QAAMipC,IAAU3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,CAAhB;EAAA,QACO/D,WADP;EAAA,QACciC,YADd;EAAA,QACsBmd,aADtB;EAAA,QAC+Bpd,WAD/B;EAAA,QACsCy3C,WADtC;EAAA,QAC6CphB,cAD7C,CAEA,IAAe,QAAXjZ,CAAJ,EAAA;EAYA,UACIlc,CADJ;EAAA,UAAMmpD,IAAyC,QAArBtoD,KAAKuoD,YAA/B,CAEID,MACFnpD,IAAQM,YAAYC,GAAZD,EADN6oD,EAGJ,IAAMn0B,IACFkjC,gCAA2Cp7D,CAA3Co7D,EAAkD/iC,CAAlD+iC,CADJ,CAEA1pB,EAAQxZ,QAARwZ,GAAmBxZ,CAAnBwZ,CACA,IAAMgI,IAAa31C,KAAKs3D,cAALt3D,CAAoB0E,CAApB1E,EAA4Bm0B,CAA5Bn0B,EAAsC01C,CAAtC11C,EAA6Cs0B,CAA7Ct0B,CAAnB,CAEA,IADA2tC,EAAQtyB,OAARsyB,GAAkBgI,CAAlBhI,EACc,QAAVzvC,CAAJ,EAAoB;EAElB,YAAIo2B,CAAJ,EAAc;EACZ,cAAMzI,IAAQloB,cAAmB1H,EAAM4B,KAAN5B,CAAY,CAAZA,EAAeA,EAAM7B,MAAN6B,GAAe,CAA9BA,CAAnB0H,CAAd;EAAA,cACMyB,IAAOnJ,EAAM7B,MAAN6B,GAAe,CAAfA,GAAmBA,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CAAnBA,GAA6C,CAD1D;EAAA,cAEMmqC,IAAOnqC,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,CAFb,CAGA+D,KAAK4wB,KAAL5wB,CAAWu3D,2BAAXv3D,CACI21C,CADJ31C,EACgB6rB,CADhB7rB,EACuBoF,CADvBpF,EAC6BomC,CAD7BpmC,EACmCw3D,oBAAoBt5D,CAApBs5D,EAA4Bv5D,CAA5Bu5D,CADnCx3D;WAJF,MAOEA,KAAK4wB,KAAL5wB,CAAWy3D,qBAAXz3D,CACI21C,CADJ31C,EACgBm0B,EAAS,CAATA,CADhBn0B,EAC6Bm0B,EAAS,CAATA,CAD7Bn0B,EAEIw3D,oBAAoBt5D,CAApBs5D,EAA4Bv5D,CAA5Bu5D,CAFJx3D,EAKF2tC,EAAQzvC,MAARyvC,GAAiB,IAAjBA,EACI2a,MACFtoD,KAAKiqD,YAALjqD,IAAqBP,YAAYC,GAAZD,KAAoBN,CADvCmpD,CADJ3a;;OApCF,MAAA;EAGMxwB,UAAIxG,GAAJwG,CAAQ,sBAARA,KACYnd,KAAK+2D,UAAL/2D,CAAgBshB,OAAhBthB,CAAwB0E,CAAxB1E,KACD,CAFXmd,KAGAnd,KAAK+2D,UAAL/2D,CAAgBm3C,MAAhBn3C,CAAuBA,KAAK+2D,UAAL/2D,CAAgBshB,OAAhBthB,CAAwB0E,CAAxB1E,CAAvBA,EAAwD,CAAxDA,GACAA,KAAK+2D,UAAL/2D,CAAgBhE,IAAhBgE,CAAqB0E,CAArB1E,CAJAmd;;KAzuDRkqC,EAixDQA,WAAAA,qBAAAA,GAAR,UAA6B3iD,CAA7B,EAA6CgzD,CAA7C;EAKE,QAAMC,IAAoB33D,KAAK+mD,cAA/B;EAAA,QACMpZ,IAAU3tC,KAAK2tC,OAAL3tC,CAAa2W,GAAb3W,CAAiB0E,CAAjB1E,CADhB;EAAA,QAEOqb,aAFP;EAAA,QAEgB8Y,cAFhB;EAAA,QAE0Bl2B,WAF1B;EAAA,QAEiCy3C,WAFjC;EAAA,QAEwCphB,cAFxC,CAYA,OATIqjC,KAAgC,QAAXt8C,CAArBs8C,KACF33D,KAAKmoD,cAALnoD,CAAoB0E,CAApB1E,EAA4Bqb,CAA5Brb,EAAqCm0B,CAArCn0B,EAA+C01C,CAA/C11C,EAAsDs0B,CAAtDt0B,GACA2tC,EAAQtyB,OAARsyB,GAAkB,IADlB3tC,EAEA2tC,EAAQxZ,QAARwZ,GAAmB,IAHjBgqB,GAKJhqB,EAAQ+H,KAAR/H,GAAgBjT,aAAa+c,MALzBkgB,EAMiB,QAAjBD,CAAiB,KACnB/pB,EAAQzvC,MAARyvC,GAAiBiqB,oBAAoBF,CAApBE,EAAmC35D,CAAnC25D,CADE,CANjBD,EASGhqB,EAAQzvC,MAAf;KAlyDFmpD,EAqyDQA,WAAAA,eAAAA,GAAR,UACI3iD,CADJ,EACoB2W,CADpB,EAC2C8Y,CAD3C,EAEI0jC,CAFJ,EAE2BvjC,CAF3B;EAGQ,QAAAnb,uBAAA;EAAA,QAACld,WAAD;EAAA,QAAQgC,WAAR,CAEN,IAAIkf,IAAIxG,GAAJwG,CAAQ,sBAARA,CAAJ,EAAqC;EACnC,UAAMrF,IAAM9X,KAAK+2D,UAAL/2D,CAAgBshB,OAAhBthB,CAAwB0E,CAAxB1E,CAAZ,CACI8X,KAAO,CAAPA,IACF9X,KAAK+2D,UAAL/2D,CAAgBm3C,MAAhBn3C,CAAuB8X,CAAvB9X,EAA4B,CAA5BA,CADE8X;EAIN9X,UAAKqqD,aAALrqD,IAAsBA,KAAKg3D,YAALh3D,CAAkB/D,CAAlB+D,EAAyB/B,CAAzB+B,CAAtBA,EACAA,KAAKonD,cAALpnD,CAAoBmoD,cAApBnoD,CAAmCqb,CAAnCrb,EAA4Cm0B,CAA5Cn0B,EAAsD63D,CAAtD73D,EAA+Ds0B,CAA/Dt0B,CADAA;KAhzDFqnD,EAozDQA,WAAAA,eAAAA,GAAR,UACI3iD,CADJ,EACoByvB,CADpB,EACgD0jC,CADhD,EAEIvjC,CAFJ;EAGQ,QAAAnb,uBAAA;EAAA,QAACld,WAAD;EAAA,QAAQgC,WAAR,CAKN,OAJIkf,IAAIxG,GAAJwG,CAAQ,sBAARA,KACFnd,KAAK+2D,UAAL/2D,CAAgBhE,IAAhBgE,CAAqB0E,CAArB1E,CADEmd,EAGJnd,KAAKqqD,aAALrqD,IAAsBA,KAAKg3D,YAALh3D,CAAkB/D,CAAlB+D,EAAyB/B,CAAzB+B,CAHlBmd,EAIGnd,KAAKonD,cAALpnD,CAAoBs3D,cAApBt3D,CAAmCm0B,CAAnCn0B,EAA6C63D,CAA7C73D,EAAsDs0B,CAAtDt0B,CAAP;KA5zDFqnD,EA+zDQA,WAAAA,aAAAA,GAAR,UAAqBprD,CAArB,EAAsCgC,CAAtC;EACE,WAAO0F,cAAmB1H,CAAnB0H,IAA4BkT,gBAAqB5Y,CAArB4Y,CAAnC;KAh0DFwwC,GAk0DF;KAz2DA,CAi3DA,4BAAA,CACI3tD,CADJ,EACqBuE,CADrB;EAEE,MAAc,cAAVA,CAAU,IAAuB,gBAAVA,CAA3B,EACE,OAAOvE,CAAP,CACK,IAAc,YAAVuE,CAAU,IAAqB,WAAVA,CAAzB,EAA2C;EAGhD,SAFA,IAAM/C,IAAoB,YAAV+C,CAAU,GAAW,IAAIG,UAAJ,CAAe1E,EAAEU,MAAjB,CAAX,GACW,IAAIiE,UAAJ,CAAe3E,EAAEU,MAAjB,CADrC,EAESW,IAAI,CAAb,EAAgBA,IAAIG,EAAOd,MAA3B,IAAqCW,CAArC,EACEG,EAAOH,CAAPG,IAAYX,KAAKuE,KAALvE,CAAWb,EAAEqB,CAAFrB,CAAXa,CAAZW,CAEF,OAAOA,CAAP;EAEA,SAAM,IAAI3B,KAAJ,CAAU,mBAAiB0E,CAA3B,CAAN;EAIJ,6BAAA,CACIvE,CADJ,EACuBuE,CADvB;EAEE,SAAQvE,aAAayE,YAAbzE,GAA6BA,CAA7BA,GAAiC,IAAIyE,YAAJ,CAAiBzE,CAAjB,CAAzC;ECn+DF,cAAA,CAAgCgB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQhJ,GAARgJ,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5H,GAAH4H,EAAA;WAAlB;KAEK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAMA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/I,IAAR+I,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0G,CAAV1G,CAAA;WAAlB;KAEKoR,CAAP;EAcF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQpX,KAARoX,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0G,CAAV1G,CAAA;WAAlB;KAEKoR,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ9I,IAAR8I,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0G,CAAV1G,CAAA;WAAlB;KAEKoR,CAAP;EAeF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/U,KAAR+U,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0G,CAAV1G,CAAA;WAAlB;KAEKoR,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAMA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQrX,GAARqX,CAAY4S,CAAZ5S,CAALikD,CAAA;KADhB36C,IACwCsJ,OADxCtJ,EAJK,UAAC1K,CAAD,EAAQ2C,CAAR;EACH,QAAAhE,QAAA,CACP,SAAQqV,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAarB,CAAbqB,CAAA;WAAlB;KAEK0K,CAAP;EAgBF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ7I,KAAR6I,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAagU,EAAGjqB,GAAHiqB,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1S,GAAR0S,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGrB,OAAHqB,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAeF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ5I,KAAR4I,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGpe,GAAHoe,CAAOoD,OAAO,CAAPA,CAAPpD,CAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQnX,IAARmX,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGrB,OAAHqB,GAAa/pB,IAAb+pB,GAAoB7d,GAApB6d,CAAwBoD,OAAO,CAAPA,CAAxBpD,CAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAeF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3I,KAAR2I,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAG/d,GAAH+d,CAAOoD,OAAO,GAAPA,CAAPpD,EAAoB7d,GAApB6d,CAAwBoD,OAAO,CAAPA,CAAxBpD,CAAbhU,EAAiD5H,GAAjD4H,EAAA;WAAlB;KAEK0K,CAAP;EAcF,iBAAA,CAAmCziB,CAAnC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1I,MAAR0I,CAAe4S,CAAf5S,CAAA;KAAhCsJ,IAAqDsJ,OAArDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAagU,EAAGrB,OAAHqB,GAAa7d,GAAb6d,CAAiBoD,OAAO,CAAPA,CAAjBpD,CAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,qBAAA,CAAuCziB,CAAvC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,YAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQzI,UAARyI,CAAmB4S,CAAnB5S,CAAA;KAAhCsJ,IAAyDsJ,OAAzDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGtb,MAAHsb,GAAY5b,GAAZ4b,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAEA,IAAiB,gBAAb8B,EAAGxoB,KAAP,EACE,OAAOkf,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQkkD,UAARlkD,CAAmB4S,CAAnB5S,CAAA;KAAhCsJ,IAAyDsJ,OAAzDtJ,CAAP,CAMF,OAAOA,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxI,GAARwI,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAagU,EAAGrB,OAAHqB,GAAa5Z,IAAb4Z,EAAmB,CAAnBA,CAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAgBF,sBAAA,CACIziB,CADJ,EACqBs9D,CADrB,EAC2CC,CAD3C;EAEE,MAAMxxC,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,aAAxBA,CAAX,CACA/gB,OACKo0D,KAAgBC,CADrBr0D,EAEI,yBAAuBo0D,CAAvB,0CAAA,GACkCC,CADlC,OAFJr0D,EAaA,OAAOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQqkD,IAARrkD,CAAa4S,CAAb5S,EAAiBmkD,CAAjBnkD,EAA+BokD,CAA/BpkD,CAAA;KADRsJ,IACuDsJ,OADvDtJ,EARM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EAAM,eAAAhU,EAAG7H,KAAH6H,CACIgU,EAAGpc,YAAHoc,CAAgBoD,OAAOmuC,CAAPnuC,CAAhBpD,EACKlc,UADLkc,CACgBA,EAAGxc,SAAHwc,CAAaoD,OAAOouC,CAAPpuC,CAAbpD,CADhBA,CADJhU,EAGI1G,UAAU0G,CAAV1G,CAHJ0G,CAAA;WADZ;KAOK0K,CAAP;EAeF,kBAAA,CAAoCziB,CAApC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAMA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQjI,OAARiI,CAAgB4S,CAAhB5S,CAALikD,CAAA;KADhB36C,IAC4CsJ,OAD5CtJ,EAJM,UAAC1K,CAAD,EAAQ2C,CAAR;EACJ,QAAAhE,QAAA,CACP,SAAQqV,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAarB,EAAExI,GAAFwI,CAAMyY,OAAO,CAAPA,EAAUrhB,GAAVqhB,CAAczY,CAAdyY,CAANzY,CAAbqB,CAAA;WAAlB;KAEK0K,CAAP;EAgBF,qBAAA,CAAuCziB,CAAvC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,YAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/H,QAAR+H,CAAiB4S,EAAG5b,GAAH4b,EAAjB5S,EAA2BhJ,GAA3BgJ,EAAA;KADRsJ,IAC2CsJ,OAD3CtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAagU,EAAG5b,GAAH4b,GAAS7a,OAAT6a,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAeF,mBAAA,CAAqCziB,CAArC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,UAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/H,QAAR+H,CAAiB4S,CAAjB5S,CAAA;KAAhCsJ,IAAuDsJ,OAAvDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAagU,EAAG7a,OAAH6a,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ5H,GAAR4H,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAA,EAAGrB,OAAHqB,GAAava,GAAbua,GAAmB5d,SAAnB4d,CAA6BhU,CAA7BgU,CAAA;WAAlB;KAEKtJ,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3H,GAAR2H,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAA,EAAGrB,OAAHqB,GAAaxa,GAAbwa,GAAmB5b,GAAnB4b,GAAyB5d,SAAzB4d,CAAmChU,CAAnCgU,CAAA;WAAlB;KAEKtJ,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1H,GAAR0H,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGva,GAAHua,GAAStb,MAATsb,EAAbhU,CAAA;WAAlB;KAEK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQzH,IAARyH,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EALM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAaoX,OAAO,CAAPA,EAAUrhB,GAAVqhB,CAAcpD,EAAGrB,OAAHqB,GAAatb,MAAbsb,EAAdoD,EAAqCntB,IAArCmtB,EAAbpX,CAAA;WADZ;KAIK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAQA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxH,IAARwH,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EANM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EACA,eAAAhU,EAAGzJ,SAAHyJ,CAAaoX,OAAO,CAAPA,EAAUrhB,GAAVqhB,CAAcpD,EAAGrB,OAAHqB,GAAatb,MAAbsb,EAAdoD,EAAqCntB,IAArCmtB,EAAbpX,EAA+D5H,GAA/D4H,EAAA;WAFN;KAKK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQvH,IAARuH,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAaoX,OAAO,CAAPA,EAAUxhB,GAAVwhB,CAAcpD,EAAGrB,OAAHqB,GAAatb,MAAbsb,EAAdoD,CAAbpX,CAAA;WAAlB;KAEK0K,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtH,IAARsH,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAA,EAAGrB,OAAHqB,GAAaja,IAAbia,GAAoB5d,SAApB4d,CAA8BhU,CAA9BgU,CAAA;WAAlB;KAEKtJ,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQrH,IAARqH,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAA,EAAGrB,OAAHqB,GAAala,IAAbka,GAAoB5d,SAApB4d,CAA8BhU,CAA9BgU,CAAA;WAAlB;KAEKtJ,CAAP;EAcF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAMA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQxX,IAARwX,CAAa4S,CAAb5S,CAALikD,CAAA;KADhB36C,IACyCsJ,OADzCtJ,EAJM,UAAC1K,CAAD,EAAQ2C,CAAR;EACJ,QAAAhE,QAAA,CACP,SAAQqV,IAAI;EAAM,eAAAoD,OAAO,CAAPA,EAAUrhB,GAAVqhB,CAAczY,EAAEjG,MAAFiG,EAAdyY,EAA0BhhB,SAA1BghB,CAAoCpX,CAApCoX,CAAA;WAAlB;KAEK1M,CAAP;EAgBF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQpH,KAARoH,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EALM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAaoX,OAAO,CAAPA,EAAUxhB,GAAVwhB,CAAcpD,EAAGrB,OAAHqB,GAAatb,MAAbsb,EAAdoD,EAAqCntB,IAArCmtB,EAAbpX,CAAA;WADZ;KAIK0K,CAAP;EAeF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQnH,KAARmH,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EALM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAagU,EAAGrB,OAAHqB,GAAatb,MAAbsb,GAAsBje,GAAtBie,CAA0BoD,OAAO,CAAPA,CAA1BpD,EAAqC/pB,IAArC+pB,EAAbhU,CAAA;WADZ;KAIK0K,CAAP;EAeF,gBAAA,CAAkCziB,CAAlC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAKA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQlH,KAARkH,CAAc4S,CAAd5S,CAAA;KAAhCsJ,IAAoDsJ,OAApDtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGzJ,SAAHyJ,CAAaoX,OAAO,CAAPA,EAAUrhB,GAAVqhB,CAAcpD,EAAGrB,OAAHqB,GAAatb,MAAbsb,EAAdoD,CAAbpX,CAAA;WAAlB;KAEK0K,CAAP;EAeF,cAAA,CAAgCziB,CAAhC;EACE,MAAI+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAT,CACA/gB,OACiB,YAAb6iB,EAAGxoB,KAAU,IAAwB,cAAbwoB,EAAGxoB,KAD/B2F,EAEI,2CAFJA,GAIiB,YAAb6iB,EAAGxoB,KAAU,KACfwoB,IAAKA,EAAGrB,OAAHqB,EADU,CAJjB7iB,CAcA,OAAOuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQjH,GAARiH,CAAY4S,CAAZ5S,CAAA;KAAhCsJ,IAAkDsJ,OAAlDtJ,EANM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CACNoX,OAAO,IAAItvB,KAAKmC,IAALnC,CAAUA,KAAKyvB,EAAfzvB,CAAXsvB,EAA+BjhB,GAA/BihB,CAAmCpD,EAAGtb,MAAHsb,GAAY5b,GAAZ4b,GAAkBjqB,GAAlBiqB,EAAnCoD,CADMpX,CAAA;WADZ;KAKK0K,CAAP;EAeF,eAAA,CAAiCziB,CAAjC,EAAkDhD,CAAlD;qBAAkDA,OAChD,IAAM+uB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAOA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQhH,IAARgH,CAAa4S,CAAb5S,EAAiBnc,CAAjBmc,CAAA;KAAhCsJ,IAA0DsJ,OAA1DtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0G,CAAV1G,CAAA;WAAlB;KAEKoR,CAAP;ED6xCEA,KAAIxG,GAAJwG,CAAQ,YAARA,KACFA,IAAIg7C,eAAJh7C,CACI,OADJA,EACa;EAAM,SAAA,IAAIkqC,gBAAJ,EAAA;GADnBlqC,EAC2C,CAD3CA,EAEIkC,gBAFJlC,CADEA,CC1xCJ,IAAa9R,MAAMma,KAAI4yC,YAAJ5yC,CAAnB;EAAA,IACanZ,OAAOmZ,KAAI6yC,cAAJ7yC,CADpB;EAAA,IAEa9Y,QAAQ8Y,KAAI8yC,gBAAJ9yC,CAFrB;EAAA,IAGapZ,OAAOoZ,KAAI+yC,cAAJ/yC,CAHpB;EAAA,IAIa/Y,QAAQ+Y,KAAIgzC,gBAAJhzC,CAJrB;EAAA,IAKalZ,OAAOkZ,KAAIizC,cAAJjzC,CALpB;EAAA,IAMa7Y,QAAQ6Y,KAAIkzC,gBAAJlzC,CANrB;EAAA,IAOa1a,OAAO0a,KAAImzC,cAAJnzC,CAPpB;EAAA,IAQala,cAAcka,KAAIozC,4BAAJpzC,CAR3B;EAAA,IASatZ,MAAMsZ,KAAIqzC,YAAJrzC,CATnB;EAAA,IAUahZ,OAAOgZ,KAAIszC,cAAJtzC,CAVpB;EAAA,IAWa5Y,MAAM4Y,KAAIuzC,YAAJvzC,CAXnB;EAAA,IAYahpB,MAAMgpB,KAAIwzC,YAAJxzC,CAZnB;EAAA,IAaaxa,QAAQwa,KAAIyzC,gBAAJzzC,CAbrB;EAAA,IAca/oB,QAAQ+oB,KAAI0zC,gBAAJ1zC,CAdrB;EAAA,IAearkB,QAAMqkB,KAAI2zC,YAAJ3zC,CAfnB;EAAA,IAgBava,QAAQua,KAAI4zC,gBAAJ5zC,CAhBrB;EAAA,IAiBa3Z,aAAa2Z,KAAI6zC,0BAAJ7zC,CAjB1B;EAAA,IAkBa3a,MAAM2a,KAAI8zC,YAAJ9zC,CAlBnB;EAAA,IAmBapa,aAAaoa,KAAI+zC,0BAAJ/zC,CAnB1B;EAAA,IAoBa1mB,QAAQ0mB,KAAIg0C,gBAAJh0C,CApBrB;EAAA,IAqBata,QAAQsa,KAAIi0C,gBAAJj0C,CArBrB;EAAA,IAsBa5Z,UAAU4Z,KAAIk0C,oBAAJl0C,CAtBvB;EAAA,IAuBaza,OAAOya,KAAIm0C,cAAJn0C,CAvBpB;EAAA,IAwBavZ,MAAMuZ,KAAIo0C,YAAJp0C,CAxBnB;EAAA,IAyBajZ,OAAOiZ,KAAIq0C,cAAJr0C,CAzBpB;EAAA,IA0Ba1Z,WAAW0Z,KAAIs0C,sBAAJt0C,CA1BxB;EAAA,IA2Ba9oB,OAAO8oB,KAAIu0C,cAAJv0C,CA3BpB;EAAA,IA4Bara,SAASqa,KAAIw0C,kBAAJx0C,CA5BtB;EAAA,IA6Ba3Y,OAAO2Y,KAAIy0C,cAAJz0C,CA7BpB;EAAA,IA8BarZ,MAAMqZ,KAAI00C,YAAJ10C,CA9BnB;EAAA,IA+BanpB,SAAOmpB,KAAI20C,cAAJ30C,CA/BpB,CC/qBA,8BAAA,CACI9qB,CADJ,EAC4BgN,CAD5B,EAEIC,CAFJ,EAE4CC,CAF5C,EAGIC,CAHJ,EAIItF,CAJJ;qBAE4CqF,UAG1C,IAGIwyD,CAHJ;EAAA,MAOIC,CAPJ;EAAA,MAAM5zC,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACM21C,IAAQ31C,gBAAgBjd,CAAhBid,EAAsB,MAAtBA,EAA8B,oBAA9BA,CADd;EAAA,MAEM41C,IAAY51C,gBAAgBhd,CAAhBgd,EAA0B,UAA1BA,EAAsC,oBAAtCA,CAFlB,CAoCA,OAhCa,QAAT9c,CAAS,KACXuyD,IAASz1C,gBAAgB9c,CAAhB8c,EAAuB,OAAvBA,EAAgC,oBAAhCA,CADE,GAIC,QAAVpiB,CAAU,KACZ83D,IAAU11C,gBAAgBpiB,CAAhBoiB,EAAwB,QAAxBA,EAAkC,oBAAlCA,CADE,CAJD,EAOb/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,kEACO6iB,EAAGrnB,IADV,MAFJwE,CAPa,EAWbA,OACmB,MAAf02D,EAAMl7D,IAAS,IAAoB,MAAfk7D,EAAMl7D,IAD9BwE,EAEI,+EACgB02D,EAAMl7D,IADtB,MAFJwE,CAXa,EAebA,OACuB,MAAnB22D,EAAUn7D,IAAS,IAAwB,MAAnBm7D,EAAUn7D,IADtCwE,EAEI,mFACoB22D,EAAUn7D,IAD9B,MAFJwE,CAfa,EAmBC,QAAVw2D,CAAU,IACZx2D,OACoB,MAAhBw2D,EAAOh7D,IAAS,IAAqB,MAAhBg7D,EAAOh7D,IADhCwE,EAEI,gFACoBw2D,EAAOh7D,IAD3B,MAFJwE,CApBW,EAyBE,QAAXy2D,CAAW,IACbz2D,OACqB,MAAjBy2D,EAAQj7D,IAAS,IAAsB,MAAjBi7D,EAAQj7D,IADlCwE,EAEI,iFACoBy2D,EAAQj7D,IAD5B,MAFJwE,CA1BW,EAgCNkE,mBACH2e,CADG3e,EACCwyD,CADDxyD,EACQyyD,CADRzyD,EACmBF,CADnBE,EACoCsyD,CADpCtyD,EAC4CuyD,CAD5CvyD,CAAP;EAeF,+BAAA,CACIpN,CADJ,EAC4BgN,CAD5B,EAEIC,CAFJ,EAE4CC,CAF5C,EAGIC,CAHJ,EAIItF,CAJJ;qBAE4CqF,UAG1C,IAGIwyD,CAHJ;EAAA,MAOIC,CAPJ;EAAA,MAAM5zC,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACM21C,IAAQ31C,gBAAgBjd,CAAhBid,EAAsB,MAAtBA,EAA8B,oBAA9BA,CADd;EAAA,MAEM41C,IAAY51C,gBAAgBhd,CAAhBgd,EAA0B,UAA1BA,EAAsC,oBAAtCA,CAFlB,CAoCA,OAhCa,QAAT9c,CAAS,KACXuyD,IAASz1C,gBAAgB9c,CAAhB8c,EAAuB,OAAvBA,EAAgC,oBAAhCA,CADE,GAIC,QAAVpiB,CAAU,KACZ83D,IAAU11C,gBAAgBpiB,CAAhBoiB,EAAwB,QAAxBA,EAAkC,oBAAlCA,CADE,CAJD,EAOb/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,kEACO6iB,EAAGrnB,IADV,MAFJwE,CAPa,EAWbA,OACmB,MAAf02D,EAAMl7D,IAAS,IAAoB,MAAfk7D,EAAMl7D,IAD9BwE,EAEI,+EACgB02D,EAAMl7D,IADtB,MAFJwE,CAXa,EAebA,OACuB,MAAnB22D,EAAUn7D,IAAS,IAAwB,MAAnBm7D,EAAUn7D,IADtCwE,EAEI,mFACoB22D,EAAUn7D,IAD9B,MAFJwE,CAfa,EAmBC,QAAVw2D,CAAU,IACZx2D,OACoB,MAAhBw2D,EAAOh7D,IAAS,IAAqB,MAAhBg7D,EAAOh7D,IADhCwE,EAEI,gFACoBw2D,EAAOh7D,IAD3B,MAFJwE,CApBW,EAyBE,QAAXy2D,CAAW,IACbz2D,OACqB,MAAjBy2D,EAAQj7D,IAAS,IAAsB,MAAjBi7D,EAAQj7D,IADlCwE,EAEI,iFACoBy2D,EAAQj7D,IAD5B,MAFJwE,CA1BW,EAgCNkE,mBACH2e,CADG3e,EACCwyD,CADDxyD,EACQyyD,CADRzyD,EACmBF,CADnBE,EACoCsyD,CADpCtyD,EAC4CuyD,CAD5CvyD,CAAP;EAeF,+BAAA,CACIpN,CADJ,EAC4BgN,CAD5B,EAEIC,CAFJ,EAE4CC,CAF5C,EAGIC,CAHJ,EAIItF,CAJJ;qBAE4CqF,UAG1C,IAGIwyD,CAHJ;EAAA,MAOIC,CAPJ;EAAA,MAAM5zC,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACM21C,IAAQ31C,gBAAgBjd,CAAhBid,EAAsB,MAAtBA,EAA8B,oBAA9BA,CADd;EAAA,MAEM41C,IAAY51C,gBAAgBhd,CAAhBgd,EAA0B,UAA1BA,EAAsC,oBAAtCA,CAFlB,CAmCA,OA/Ba,QAAT9c,CAAS,KACXuyD,IAASz1C,gBAAgB9c,CAAhB8c,EAAuB,OAAvBA,EAAgC,oBAAhCA,CADE,GAIC,QAAVpiB,CAAU,KACZ83D,IAAU11C,gBAAgBpiB,CAAhBoiB,EAAwB,QAAxBA,EAAkC,oBAAlCA,CADE,CAJD,EAOb/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,kEACO6iB,EAAGrnB,IADV,MAFJwE,CAPa,EAWbA,OACmB,MAAf02D,EAAMl7D,IAAS,IAAoB,MAAfk7D,EAAMl7D,IAD9BwE,EAEI,+EACgB02D,EAAMl7D,IADtB,MAFJwE,CAXa,EAebA,OACuB,MAAnB22D,EAAUn7D,IAAS,IAAwB,MAAnBm7D,EAAUn7D,IADtCwE,EAEI,mFACoB22D,EAAUn7D,IAD9B,MAFJwE,CAfa,EAmBC,QAAVw2D,CAAU,IACZx2D,OACoB,MAAhBw2D,EAAOh7D,IAAS,IAAqB,MAAhBg7D,EAAOh7D,IADhCwE,EAEI,gFACoBw2D,EAAOh7D,IAD3B,MAFJwE,CApBW,EAyBE,QAAXy2D,CAAW,IACbz2D,OACqB,MAAjBy2D,EAAQj7D,IAAS,IAAsB,MAAjBi7D,EAAQj7D,IADlCwE,EAEI,iFACoBy2D,EAAQj7D,IAD5B,MAFJwE,CA1BW,EA+BNkE,mBACH2e,CADG3e,EACCwyD,CADDxyD,EACQyyD,CADRzyD,EACmBF,CADnBE,EACoCsyD,CADpCtyD,EAC4CuyD,CAD5CvyD,CAAP;EA6BF,6BAAA,CACIpN,CADJ,EACsCgN,CADtC,EAEIC,CAFJ,EAE6CC,CAF7C,EAGIC,CAHJ,EAIItF,CAJJ;qBAE6CqF,UAG3C,IAGIwyD,CAHJ;EAAA,MAOIC,CAPJ;EAAA,MAyBIG,CAzBJ;EAAA,MAAM/zC,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACM21C,IAAQ31C,gBAAgBjd,CAAhBid,EAAsB,MAAtBA,EAA8B,oBAA9BA,CADd;EAAA,MAEM41C,IAAY51C,gBAAgBhd,CAAhBgd,EAA0B,UAA1BA,EAAsC,oBAAtCA,CAFlB,CAIa,QAAT9c,CAAS,KACXuyD,IAASz1C,gBAAgB9c,CAAhB8c,EAAuB,OAAvBA,EAAgC,oBAAhCA,CADE,GAIC,QAAVpiB,CAAU,KACZ83D,IAAU11C,gBAAgBpiB,CAAhBoiB,EAAwB,QAAxBA,EAAkC,oBAAlCA,CADE,CAJD,EAQb/gB,OACI02D,EAAMl7D,IAANk7D,KAAeC,EAAUn7D,IAD7BwE,EAEI,8EAFJA,CARa,EAYbA,OACe,QAAXy2D,CAAW,IAAQC,EAAMl7D,IAANk7D,KAAeD,EAAQj7D,IAD9CwE,EAEI,4EAFJA,CAZa,EAgBbA,OACc,QAAVw2D,CAAU,IAAQE,EAAMl7D,IAANk7D,KAAeF,EAAOh7D,IAD5CwE,EAEI,2EAFJA,CAhBa,EAuBX42D,IADc,MAAZ/zC,EAAGrnB,IAAS,IAAiB,MAAZqnB,EAAGrnB,IAAR,GACRqnB,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAW,CAAXA,EAAc,CAAdA,EAAiBA,EAAGvqB,IAApBuqB,CADQ,GAEO,MAAZA,EAAGrnB,IAAS,GACfqnB,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAW,CAAXA,EAAcA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAdA,EAA2BA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAA3BA,CADe,GAEA,MAAZA,EAAGrnB,IAAS,GACfqnB,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CADe,GAGfA,CA7BK,CAwGb,OANYtJ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/L,kBAAR+L,CACP2mD,CADO3mD,EACF6mD,mBAAmBJ,CAAnBI,CADE7mD,EACyB6mD,mBAAmBH,CAAnBG,CADzB7mD,EAEPjM,CAFOiM,EAEU6mD,mBAAmBN,CAAnBM,CAFV7mD,EAGP6mD,mBAAmBL,CAAnBK,CAHO7mD,CAAA;KADHsJ,IAKPsJ,OAAI6zC,UAAOC,cAAWH,WAAQC,YALvBl9C,EAlEA,UAAC1K,CAAD;EACV,QAAMkoD,IAAuB,QAAVP,CAAU,GAAOvwC,OAAO,CAAPA,CAAP,GAAmBuwC,CAAhD;EAAA,QACMQ,IAAgBC,iBAAiBP,EAAMr+D,KAAvB4+D,EAA8BL,EAAIv+D,KAAlC4+D,CADtB;EAAA,QAEMC,MAFN,CAGA,IAAmB,MAAfR,EAAMl7D,IAAV,EAAsB;EACpB,WAAK,IAAIrE,IAAI,CAAb,EAAgBA,IAAIy/D,EAAIv+D,KAAJu+D,CAAUpgE,MAAVogE,GAAmB,CAAvC,IAA4Cz/D,CAA5C,EACE+/D,EAAU9+D,IAAV8+D,CAAeN,EAAIv+D,KAAJu+D,CAAUz/D,CAAVy/D,CAAfM,EAEFA,EAAU9+D,IAAV8+D,CAAe,CAAfA;EAGF,SAAMC,IAAat0C,EAAGje,GAAHie,CAAO6zC,CAAP7zC,CAAnB;EAAA,QACMu0C,IAAoBvoD,EAAG7J,GAAH6J,CAAOkoD,CAAPloD,CAD1B;EAAA,QAEMwoD,IAAsB/vD,MAAMqvD,EAAUlyD,GAAVkyD,CAAc1wC,OAAOjiB,CAAPiiB,CAAd0wC,CAANrvD,CAF5B;EAAA,QAGMgwD,IAAiBD,EAAoBryD,GAApBqyD,CAAwBA,CAAxBA,EACKryD,GADLqyD,CACSA,CADTA,EAEKryD,GAFLqyD,CAESpxC,QAAQ,EAARA,CAFToxC,CAHvB,CA8CA,SACEx0C,IAzCW;EACX,eAAmB,MAAf6zC,EAAMl7D,IAAS,GACVqT,EACF7J,GADE6J,CACEjM,KACDy0D,EAAoBR,IAApBQ,CAAyB,CAAzBA,EAA4B,CAA5BA,EAA+B,CAA/BA,EAAkCX,EAAMr+D,KAANq+D,CAAY,CAAZA,CAAlCW,CADCz0D,EACkDs0D,CADlDt0D,CADFiM,EAGF7J,GAHE6J,CAGEkoD,CAHFloD,EAIFtN,OAJEsN,CAIMgU,EAAGxqB,KAJTwW,CADU,GAOVA,EAAG7J,GAAH6J,CAAOwoD,CAAPxoD,EAA4B7J,GAA5B6J,CAAgCkoD,CAAhCloD,EAA4CtN,OAA5CsN,CAAoDgU,EAAGxqB,KAAvDwW,CAPT;WAyCA6nD,OA/Bc;EACd,YAAIa,IAAUF,EAAoBryD,GAApBqyD,CAAwBpxC,QAAQ,CAARA,CAAxBoxC,EAAoCryD,GAApCqyD,CAAwCD,CAAxCC,CAAd,CAIA,OAHmB,MAAfX,EAAMl7D,IAAS,KACjB+7D,IAAUA,EAAQrgE,GAARqgE,CAAYP,CAAZO,CADO,GAGZA,EAAQh2D,OAARg2D,CAAgBb,EAAMr+D,KAAtBk/D,CAAP;WA2BAZ,WAzBkB;EAClB,YAAIa,IAAcF,EAAetyD,GAAfsyD,CAAmBH,CAAnBG,EAA+BtyD,GAA/BsyD,CAAmCF,CAAnCE,CAAlB,CAIA,OAHmB,MAAfZ,EAAMl7D,IAAS,KACjBg8D,IAAcA,EAAYtgE,GAAZsgE,CAAgBR,CAAhBQ,CADG,GAGZA,EAAYj2D,OAAZi2D,CAAoBd,EAAMr+D,KAA1Bm/D,CAAP;WAqBAhB,QAnBe;EACf,YAAMiB,IAAwBN,EAAWnyD,GAAXmyD,CAAeE,CAAfF,CAA9B;EAAA,YACIO,IAAW7oD,EAAG7J,GAAH6J,CAAO4oD,CAAP5oD,CADf,CAKA,OAHmB,MAAf6nD,EAAMl7D,IAAS,KACjBk8D,IAAWA,EAASxgE,GAATwgE,CAAaV,CAAbU,CADM,GAGZA,EAASn2D,OAATm2D,CAAiBhB,EAAMr+D,KAAvBq/D,CAAP;WAcAjB,SAZgB;EAChB,YAAIkB,IAAY9oD,CAAhB,CAIA,OAHmB,MAAf6nD,EAAMl7D,IAAS,KACjBm8D,IAAYA,EAAUzgE,GAAVygE,CAAcX,CAAdW,CADK,GAGZA,EAAUp2D,OAAVo2D,CAAkBjB,EAAMr+D,KAAxBs/D,CAAP;WAEF;KASUp+C,EAMDhY,OANCgY,CAMOsJ,EAAGxqB,KANVkhB,CAMZ;EAGF,4BAAA,CAA4BziB,CAA5B;EACE,SAAS,QAALA,CAAK,GACA,IADA,GAGM,MAAXA,EAAE0E,IAAS,GACN1E,EAAEwK,IAAFxK,EADM,GAEO,MAAXA,EAAE0E,IAAS,GACb1E,CADa,GAEA,MAAXA,EAAE0E,IAAS,GACb1E,EAAE+/D,IAAF//D,CAAO,CAAPA,EAAU,CAAVA,EAAaA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAbA,EAAyBA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAzBA,CADa,GAEA,MAAXA,EAAE0E,IAAS,GACb1E,EAAE+/D,IAAF//D,CAAO,CAAPA,EAAUA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAVA,EAAsBA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAtBA,EAAkCA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlCA,CADa,GAGfA,CAZP;EAeF,KAAa8gE,uBAAuBh2C,KAAIi2C,8CAAJj2C,CAApC;EAAA,IACak2C,uBAAuBl2C,KAAIm2C,8CAAJn2C,CADpC;EAAA,IAEao2C,uBAAuBp2C,KAAIq2C,8CAAJr2C,CAFpC;EAAA,IAGa1d,qBAAqB0d,KAAIs2C,0CAAJt2C,CAHlC,4BCzSIqH,GACA1e,GAAqC9O,GACrCsO,GAAoClL,GACpCs5D,GACAxuD;qBAAAA,oBACI,IAEF4jB,CAFE;EAAA,MAAAhY,sBAAA;EAAA,MAAC2T,QAAD;EAAA,MAAeC,QAAf,CAGN,IAAmB,mBAAfxf,CAAJ,EACE4jB,KAAerE,GAAcC,GAAaF,EAAQ,CAARA,GAAYA,EAAQ,CAARA,EAAtDsE,CADF,KAEO;EAAA,QAAmB,oBAAf5jB,CAAJ,EAGL,MAAM,IAAIhU,KAAJ,CAAU,wBAAsBgU,CAAhC,CAAN,CAFA4jB,KAAerE,GAAcC,GAAaF,EAAQ,CAARA,GAAYA,EAAQ,CAARA,EAAtDsE;EAKF,UAAO6qC,kBACHnvC,CADGmvC,EACM7qC,CADN6qC,EACmB38D,CADnB28D,EAC4BruD,CAD5BquD,EACuCv5D,CADvCu5D,EAC4CD,CAD5CC,GAC0D,CAD1DA,EAEHzuD,CAFGyuD,CAAP;EASF,2BAAA,CACInvC,CADJ,EAEIsE,CAFJ,EAGI9xB,CAHJ,EAGsCsO,CAHtC,EAIIlL,CAJJ,EAIgCs5D,CAJhC,EAKIE,CALJ,EAMI1uD,CANJ;qBAKI0uD,0BACA1uD,oBACE,IAAA4L,oBAAA;EAAA,MAACoT,QAAD;EAAA,MAAY6E,QAAZ;EAAA,MAAsBC,QAAtB;EAAA,MAA+BG,QAA/B,CACJ,IAAmB,mBAAfjkB,CAAJ,EACGgf,QAAAA,EAAW6E,QAAX7E,EAAqB8E,QAArB9E,EAA8BiF,QAA9BjF,CADH,KAEO;EAAA,QAAmB,oBAAfhf,CAAJ,EAGL,MAAM,IAAIhU,KAAJ,CAAU,wBAAsBgU,CAAhC,CAAN,CAFCgf,QAAAA,EAAWiF,QAAXjF,EAAuB6E,QAAvB7E,EAAiC8E,QAAjC9E;EAKI,OAcH/K,CAdG;EAAA,MAAAsL,QAAA;EAAA,MAAcC,QAAd;EAAA,MAA6BmvC,QAA7B;EAAA,MACD3pC,sBADC;EAAA,MACAvF,QADA;EAAA,MACcC,QADd;EAAA,MAED0F,sBAFC;EAAA,MAEAzF,QAFA;EAAA,MAEgBC,QAFhB;EAAA,MAIDC,IACF+uC,uBAAuBrvC,CAAvBqvC,EAAqCjvC,CAArCivC,CALG;EAAA,MAMD9uC,IACF8uC,uBAAuBpvC,CAAvBovC,EAAoChvC,CAApCgvC,CAPG;EAAA,MAQDC,4CARC;EAAA,MAQA7uC,aARA;EAAA,MAQSK,eART;EAAA,MAQoBC,cARpB;EAAA,MAYDyD,IAAc2qC,IAAYC,IAAiB1qC,CAA7ByqC,GAA0CC,CAZvD,CAqBP,OANmB,oBAAf3uD,CAAe,GACjBiU,KAAY+K,GAAW+E,GAAa1D,GAAWC,EAD9B,GAEO,mBAAftgB,CAAe,KACxBiU,KAAY+K,GAAWqB,GAAWC,GAAUyD,EADpB,CAFP,IAOjB/E,cACAhf,eACA6jB,aACAC,YACAG,eACA5D,cACAC,aACAyD,gBACA/D,YACAP,iBACAC,gBACAH,iBACAC,gBACAK,0BACAC,yBACAH,mBACAC,kBACAN,YACArL,aACA2P,gBApBF;EAwBF,8BAAA,CACItE,CADJ,EACuCwvC,CADvC,EAC0DC,CAD1D,EAEIh5D,CAFJ,EAEoBi5D,CAFpB,EAGIR,CAHJ;EAIiB,UAAXQ,CAAW,KACbA,IAAUC,kBAAkB3vC,CAAlB2vC,EAA2BH,CAA3BG,EAAsCl5D,CAAtCk5D,CADG,EAGf,IAAMC,IAAY5vC,EAAQ,CAARA,CAAlB;EAAA,MACM6vC,IAAY7vC,EAAQ,CAARA,CADlB;EAAA,MAGM8vC,IAAaC,kBACdH,IAAYJ,CAAZI,GAAwB,IAAIF,KAAWj5D,IAAS,CADlCs5D,EACqCb,CADrCa,CAHnB,CAKAh5D,OACI+d,MAAWg7C,CAAXh7C,CADJ/d,EAEI,2BAAyB+4D,CAAzB,uEAFJ/4D,EAKA,IAAMi5D,IAAaD,kBACdF,IAAYL,CAAZK,GAAwB,IAAIH,KAAWj5D,IAAS,CADlCs5D,EACqCb,CADrCa,CAAnB,CAOA,OALAh5D,OACI+d,MAAWk7C,CAAXl7C,CADJ/d,EAEI,8BAA4Bi5D,CAA5B,uEAFJj5D,IAKQ+4D,GAAYE,GAAYP,EAAhC;EAGF,2BAAA,CACIn8C,CADJ,EAC0Ck8C,CAD1C,EAC6D/4D,CAD7D,EAEIkK,CAFJ;qBAEIA,OACF,IAAMsvD,IAAqBX,uBAAuBE,CAAvBF,EAAkC3uD,CAAlC2uD,CAA3B,CACA,OAAO5hE,KAAKkC,KAALlC,EACF4lB,EAAW,CAAXA,KAAiB7c,IAAS,CAA1B6c,IAA+B7c,CAA/B6c,GAAwC28C,KAAsB,CAD5DviE,CAAP;EAIF,yBAAA,CAAyBwiE,CAAzB;EACE,SAAwB,mBAAVA,CAAU,IAAYA,GAAOA,EAAnB,GAA4BA,CAApD;EAcF,gCAAA,CAAgC5uD,CAAhC,EAAoDX,CAApD;EACE,SAAIA,KAAY,CAAZA,GACKW,CADLX,GAIGW,KAAcA,IAAa,MAAMX,IAAW,EAJnD;EAOF,0BAAA,CACI/K,CADJ,EACgC2uB,CADhC,EACkDC,CADlD,EAEIrE,CAFJ,EAE0BC,CAF1B,EAE+CH,CAF/C,EAGIC,CAHJ,EAGyBgvC,CAHzB;EAKE,MAAIxuC,CAAJ,EACIK,CADJ,EAEIC,CAFJ,CAIA,IAAmB,mBAARprB,CAAX,EAA6B;EAE3B8qB,UAAWC,KAAK/qB,GAAKu6D,QAAQv6D,GAAKirB,MAAMjrB,GAAKw6D,OAAOx6D,GAAKqX,MADhC,MAARrX,CAAQ,GAAK,OAAL,GAAe,UACxC8qB,CACA,IAAM/L,IAAW07C,sBACZ9rC,GAAUC,GAAS,EADP6rC,EACWpwC,CADXowC,EACyB,CADzBA,EAC4BlwC,CAD5BkwC,EAC0Cz6D,CAD1Cy6D,EAEbnB,CAFamB,CAAjB,CAGAtvC,IAAYpM,EAAS,CAATA,CAAZoM,EACAC,IAAWrM,EAAS,CAATA,CADXoM;KANF,MAQO,IAAY,WAARnrB,CAAJ,EAAoB;EAGzB,QAAM06D,MAFNvvC,IAAYrzB,KAAKuQ,IAALvQ,CAAU62B,IAAWpE,CAArBzyB,KAGK,KAAKyyB,IAAeF,IAAesE,CADpD;EAAA,QAEMgsC,MAHNvvC,IAAWtzB,KAAKuQ,IAALvQ,CAAU82B,IAAUpE,CAApB1yB,KAGuB,KAAK0yB,IAAcF,IAAcsE,CAFnE;EAAA,QAGMgsC,IAAM9iE,KAAKkC,KAALlC,CAAW4iE,IAAiB,CAA5B5iE,CAHZ;EAAA,QAIMyiE,IAASG,IAAiBE,CAJhC;EAAA,QAKM3vC,IAAOnzB,KAAKkC,KAALlC,CAAW6iE,IAAgB,CAA3B7iE,CALb,CAOAgzB,MAAWC,QAAKwvC,WAAQtvC,SAAMuvC,OADhBG,IAAgB1vC,GACO5T,MAAM,QAA3CyT;KAVK,MAWA;EAAA,QAAY,YAAR9qB,CAAJ,EAKL,MAAMlJ,MAAM,gCAA8BkJ,CAApClJ,CAAN,CAJAg0B,MAAWC,KAAK,GAAGwvC,QAAQ,GAAGtvC,MAAM,GAAGuvC,OAAO,GAAGnjD,MAAM,SAAvDyT,EACAK,IAAYrzB,KAAKuQ,IAALvQ,EAAW62B,IAAWtE,CAAXsE,GAA0B,KAAKpE,CAA1CzyB,CADZgzB,EAEAM,IAAWtzB,KAAKuQ,IAALvQ,EAAW82B,IAAUtE,CAAVsE,GAAwB,KAAKpE,CAAxC1yB,CAFXgzB;EAMF,YAAQA,YAASK,cAAWC,aAA5B;EAQF,0BAAA,CACI9pB,CADJ,EACmBg4D,CADnB;EAEE,OAAKA,CAAL,EACE,OAAOh4D,CAAP,CAEF,QAAQg4D,CAAR,GACE,KAAK,OAAL;EAEE,aAAOxhE,KAAKuE,KAALvE,CAAWwJ,CAAXxJ,CAAP,CACF,KAAK,MAAL;EAEE,aAAOA,KAAKuQ,IAALvQ,CAAUwJ,CAAVxJ,CAAP,CACF,KAAK,OAAL;EACE,aAAOA,KAAKkC,KAALlC,CAAWwJ,CAAXxJ,CAAP,CACF;EACE,YAAM,IAAIhB,KAAJ,CAAU,0BAAwBwiE,CAAlC,CAAN,CAVJ;EAcF,2BAAA,CAAkCgB,CAAlC;EACQ,MAAA5jD,sBAAA;EAAA,MAACmkD,QAAD;EAAA,MAAOC,QAAP,CACN,OAAgB,MAATD,CAAS,IAAc,MAATC,CAArB;EAGF,wCAAA,CACIl+D,CADJ,EAEIsO,CAFJ;EAGE,SAAO6vD,kBAAkBn+D,CAAlBm+D,KAA8BA,kBAAkB7vD,CAAlB6vD,CAArC;ECvOF,iBAAA,CACI9jE,CADJ,EACqBsB,CADrB,EACsC2L,CADtC,EAEIC,CAFJ;qBACsCD,0BAClCC,QACF,IAAM62D,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,QAAxBA,CADX;EAAA,MAGMg5C,IACFh3D,IAAa82D,EAAGxhE,KAAHwhE,CAASA,EAAGr+D,IAAHq+D,GAAU,CAAnBA,CAAb92D,GAAqC82D,EAAGxhE,KAAHwhE,CAASA,EAAGr+D,IAAHq+D,GAAU,CAAnBA,CAJzC;EAAA,MAKMG,IACFh3D,IAAa82D,EAAGzhE,KAAHyhE,CAASA,EAAGt+D,IAAHs+D,GAAU,CAAnBA,CAAb92D,GAAqC82D,EAAGzhE,KAAHyhE,CAASA,EAAGt+D,IAAHs+D,GAAU,CAAnBA,CANzC;EAAA,MAQMhuB,IACF/oC,IAAa82D,EAAGxhE,KAAHwhE,CAASA,EAAGr+D,IAAHq+D,GAAU,CAAnBA,CAAb92D,GAAqC82D,EAAGxhE,KAAHwhE,CAASA,EAAGr+D,IAAHq+D,GAAU,CAAnBA,CATzC;EAAA,MAUM9tB,IACF/oC,IAAa82D,EAAGzhE,KAAHyhE,CAASA,EAAGt+D,IAAHs+D,GAAU,CAAnBA,CAAb92D,GAAqC82D,EAAGzhE,KAAHyhE,CAASA,EAAGt+D,IAAHs+D,GAAU,CAAnBA,CAXzC;EAAA,MAaMG,IAAaJ,EAAGxhE,KAAHwhE,CAAS5/D,KAAT4/D,CAAe,CAAfA,GAAmB,CAAnBA,CAbnB;EAAA,MAcMK,IAAaJ,EAAGzhE,KAAHyhE,CAAS7/D,KAAT6/D,CAAe,CAAfA,GAAmB,CAAnBA,CAdnB;EAAA,MAeMK,IAAYp6D,cAAmBk6D,CAAnBl6D,CAflB;EAAA,MAgBMq6D,IAAYr6D,cAAmBm6D,CAAnBn6D,CAhBlB,CAkBAC,OACI65D,EAAGr+D,IAAHq+D,IAAW,CAAXA,IAAgBC,EAAGt+D,IAAHs+D,IAAW,CAA3BD,IAAgCA,EAAGr+D,IAAHq+D,KAAYC,EAAGt+D,IADnDwE,EAEI,8EACiB65D,EAAGr+D,IADpB,UAAA,GACgCs+D,EAAGt+D,IADnC,MAFJwE,GAKAA,OACImN,YAAiB8sD,CAAjB9sD,EAA6B+sD,CAA7B/sD,CADJnN,EAEI,wCAAsCi6D,CAAtC,YAAA,GACOC,CADP,8BAAA,GAC6CL,EAAGxhE,KADhD,UAAA,GAEOyhE,EAAGzhE,KAFV,iBAFJ2H,CALAA,EAWAA,OACI+5D,MAAgBC,CADpBh6D,EAEI,oCAAkC+5D,CAAlC,YAAA,GACOC,CADP,8BAAA,GAC8CH,EAAGxhE,KADjD,UAAA,GAEOyhE,EAAGzhE,KAFV,qBAAA,GAEkC0K,CAFlC,GAGI,kBAHJ,GAGuBC,CAHvB,iBAFJhD,CAXAA,CAkBA,IAAM4d,IAAWi8C,EAAGxhE,KAAHwhE,CAAS5/D,KAAT4/D,CAAe,CAAfA,GAAmB,CAAnBA,EAAsBt2D,MAAtBs2D,EAA8B/tB,GAAaC,EAA3C8tB,CAAjB;EAAA,MAEMjS,IAAM7kD,IAAa82D,EAAGhS,IAAHgS,CAAQM,CAARN,EAAmBE,CAAnBF,EAAgC/tB,CAAhC+tB,CAAb92D,GACa82D,EAAGhS,IAAHgS,CAAQM,CAARN,EAAmB/tB,CAAnB+tB,EAAgCE,CAAhCF,CAHzB;EAAA,MAIM/R,IAAM9kD,IAAa82D,EAAGjS,IAAHiS,CAAQM,CAARN,EAAmB/tB,CAAnB+tB,EAAgCE,CAAhCF,CAAb92D,GACa82D,EAAGjS,IAAHiS,CAAQM,CAARN,EAAmBE,CAAnBF,EAAgC/tB,CAAhC+tB,CALzB,CAkCA,OAHYvgD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQoqD,WAARpqD,CAAoB23C,CAApB33C,EAAyB63C,CAAzB73C,EAA8BlN,CAA9BkN,EAA0CjN,CAA1CiN,CAAA;KADHsJ,IAEPsgD,IAAIjS,GAAKkS,IAAIhS,GAFNvuC,EAxBC,UAAC1K,CAAD;EACX,WAAK9L,KAAeC,CAAfD,IAKOA,KAAcC,MAEtB62D,IAAI;EAAM,eAAAhrD,EAAG5L,MAAH4L,CAAUi5C,EAAItmC,OAAJsmC,EAAVj5C,GAAyB,CAAzBA,GAAgC,CAAhCA,CAAA;WACVirD,IAAI;EAAM,eAAAjrD,EAAG5L,MAAH4L,CAAU+4C,EAAIpmC,OAAJomC,EAAV/4C,GAAyB,CAAzBA,GAA+B,CAA/BA,CAAA;cAEH9L,MAAeC,CAAfD,KAEP82D,IAAI;EAAM,eAAA/R,EAAItmC,OAAJsmC,GAAc7kD,MAAd6kD,CAAqBj5C,CAArBi5C,GAAyB,CAAzBA,GAAgC,CAAhCA,CAAA;WACVgS,IAAI;EAAM,eAAAlS,EAAIpmC,OAAJomC,GAAc3kD,MAAd2kD,CAAqB/4C,CAArB+4C,GAAyB,CAAzBA,GAAgC,CAAhCA,CAAA;WAHH7kD,KAOP82D,IAAI;EAAM,eAAA/R,EAAItmC,OAAJsmC,GAAc7kD,MAAd6kD,CAAqBj5C,CAArBi5C,GAAyB,CAAzBA,GAA+B,CAA/BA,CAAA;WACVgS,IAAI;EAAM,eAAAjrD,EAAG5L,MAAH4L,CAAU+4C,EAAIpmC,OAAJomC,EAAV/4C,GAAyB,CAAzBA,GAA+B,CAA/BA,CAAA;WAlBT9L,KAED82D,IAAI;EAAM,eAAAhrD,EAAG5L,MAAH4L,CAAUi5C,EAAItmC,OAAJsmC,EAAVj5C,GAAyB,CAAzBA,GAAgC,CAAhCA,CAAA;WACVirD,IAAI;EAAM,eAAAlS,EAAIpmC,OAAJomC,GAAc3kD,MAAd2kD,CAAqB/4C,CAArB+4C,GAAyB,CAAzBA,GAA+B,CAA/BA,CAAA;WAHd;KAuBUruC,EAGDhY,OAHCgY,CAGOqE,CAHPrE,CAGZ;EAgBF,uBAAA,CACImkC,CADJ,EAC6BC,CAD7B;EAEE,MAAM2c,IAAMv5C,gBAAgB28B,CAAhB38B,EAAoB,IAApBA,EAA0B,cAA1BA,CAAZ;EAAA,MACMw5C,IAAMx5C,gBAAgB48B,CAAhB58B,EAAoB,IAApBA,EAA0B,cAA1BA,CADZ,CAQA,OALA/gB,OACiB,MAAbs6D,EAAI9+D,IAAS,IAAkB,MAAb++D,EAAI/+D,IAD1BwE,EAEI,iEACOs6D,EAAI9+D,IADX,UAAA,GACuB++D,EAAI/+D,IAD3B,MAFJwE,GAKOs6D,EAAIrc,IAAJqc,EAAU,CAAVA,EAAa,CAAbA,EAAgBr3D,MAAhBq3D,CAAuBC,EAAItc,IAAJsc,CAAS,CAATA,GAAa,CAAbA,CAAvBD,CAAP;EAmBF,cAAA,CAAcE,CAAd,EAAqCC,CAArC;EACE,MAAMC,IAAM35C,gBAAgBy5C,CAAhBz5C,EAAoB,IAApBA,EAA0B,KAA1BA,CAAZ;EAAA,MACM45C,IAAM55C,gBAAgB05C,CAAhB15C,EAAoB,IAApBA,EAA0B,KAA1BA,CADZ,CAEA/gB,SACkB,MAAb06D,EAAIl/D,IAAS,IAAkB,MAAbk/D,EAAIl/D,IAAT,IAAsC,MAAbm/D,EAAIn/D,IAAS,IAAkB,MAAbm/D,EAAIn/D,KADjEwE,EAEI,iEACO06D,EAAIl/D,IADX,UAAA,GACuBm/D,EAAIn/D,IAD3B,MAFJwE,EAKA,IAAM46D,IAAwB,MAAbF,EAAIl/D,IAAS,GAAIk/D,EAAIpiE,IAAR,GAAeoiE,EAAIriE,KAAJqiE,CAAU,CAAVA,CAA7C;EAAA,MACMG,IAAwB,MAAbF,EAAIn/D,IAAS,GAAIm/D,EAAIriE,IAAR,GAAeqiE,EAAItiE,KAAJsiE,CAAU,CAAVA,CAD7C,CAQA,OALA36D,OACI46D,MAAYC,CADhB76D,EAEI,kEACO46D,CADP,UAAA,GACsBC,CADtB,MAFJ76D,GAKiB,MAAb06D,EAAIl/D,IAAS,IAAkB,MAAbm/D,EAAIn/D,IAAT,GACRk/D,EAAIzc,IAAJyc,CAAS,CAATA,GAAa,CAAbA,EAAgBz3D,MAAhBy3D,CAAuBC,EAAI1c,IAAJ0c,EAAU,CAAVA,EAAa,CAAbA,CAAvBD,EAAwCI,QAAxCJ,EADQ,GAEO,MAAbA,EAAIl/D,IAAS,IAAkB,MAAbm/D,EAAIn/D,IAAT,GACfk/D,EAAIzc,IAAJyc,CAAS,CAATA,GAAa,CAAbA,EAAgBz3D,MAAhBy3D,CAAuBC,EAAI1c,IAAJ0c,CAASA,EAAItiE,KAAJsiE,CAAU,CAAVA,CAATA,EAAuBA,EAAItiE,KAAJsiE,CAAU,CAAVA,CAAvBA,CAAvBD,EAA6Dp5D,IAA7Do5D,EADe,GAEA,MAAbA,EAAIl/D,IAAS,IAAkB,MAAbm/D,EAAIn/D,IAAT,GACfk/D,EAAIz3D,MAAJy3D,CAAWC,EAAI1c,IAAJ0c,EAAU,CAAVA,EAAa,CAAbA,CAAXD,EAA4Bp5D,IAA5Bo5D,EADe,GAGfA,EAAIz3D,MAAJy3D,CAAWC,EAAI1c,IAAJ0c,CAASA,EAAItiE,KAAJsiE,CAAU,CAAVA,CAATA,EAAuBA,EAAItiE,KAAJsiE,CAAU,CAAVA,CAAvBA,CAAXD,CAPT;EAWF,KAAaz3D,SAAS2e,KAAIm5C,kBAAJn5C,CAAtB;EAAA,IACa1e,MAAM0e,KAAIo5C,YAAJp5C,CADnB;EAAA,IAEaq5C,eAAer5C,KAAIs5C,8BAAJt5C,CAF5B,CClIA,gBAAA,CACI9qB,CADJ,EACqB4S,CADrB,EACkDhK,CADlD,EAEIb,CAFJ,EAEgC8K,CAFhC,EAEiEC,CAFjE,EAGIC,CAHJ;qBAEgCF,6BAAiCC,OAE/D,IAAMiZ,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX;EAAA,MACMo6C,IAAUp6C,gBAAgBrX,CAAhBqX,EAAwB,QAAxBA,EAAkC,QAAlCA,CADhB;EAAA,MAGIq6C,IAAMv4C,CAHV;EAAA,MAIIw4C,KAAe,CAJnB,CAKgB,MAAZx4C,EAAGrnB,IAAS,KACd6/D,KAAe,CAAfA,EACAD,IAAMv4C,EAAGglC,IAAHhlC,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,CAFQ,GAKhB7iB,OACiB,MAAbo7D,EAAI5/D,IADRwE,EAEI,yDAAuDo7D,EAAI5/D,IAA3D,MAFJwE,CALgB,EAQhBA,OACqB,MAAjBm7D,EAAQ3/D,IADZwE,EAEI,0DACOm7D,EAAQ3/D,IADf,MAFJwE,CARgB,EAYO,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,yEACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CAbc,EAmBhBA,OACIo7D,EAAI/iE,KAAJ+iE,CAAU,CAAVA,MAAiBD,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CADrBn7D,EAEI,sCAAoCo7D,EAAI/iE,KAAJ+iE,CAAU,CAAVA,CAApC,yCAAA,GAC8BD,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAD9B,MAFJn7D,CAnBgB,EAuBhBA,OACIs7D,+BAAyC57D,CAAzC47D,EAAiD1xD,CAAjD0xD,CADJt7D,EAEI,sEACkBN,CADlB,oBAAA,GAC0CkK,CAD1C,MAFJ5J,CAvBgB,EA2BhBA,OACmB,UAAf2J,CADJ3J,EAEI,wCACI2J,CADJ,0CAFJ3J,CA3BgB,CAgChB,IAAMu7D,IACFJ,EAAQtE,IAARsE,CAAa,CAAbA,EAAgBA,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAAhBA,EAAkCA,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAAlCA,EAAoDA,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAApDA,CADJ;EAAA,MAEMK,IAAUJ,EAAIvE,IAAJuE,CAASA,EAAI/iE,KAAJ+iE,CAAU,CAAVA,CAATA,EAAuB,CAAvBA,EAA0BA,EAAI/iE,KAAJ+iE,CAAU,CAAVA,CAA1BA,EAAwCA,EAAI/iE,KAAJ+iE,CAAU,CAAVA,CAAxCA,CAFhB;EAAA,MAQM/pD,IAAMrH,OACRwxD,CADQxxD,EACCuxD,CADDvxD,GALuB,GAAGtK,EAK1BsK,EACoBnL,CADpBmL,EAFa,MAEbA,GAJyB,GAAGJ,EAI5BI,EAERH,CAFQG,CARZ,CAYA,OAAIqxD,IACKhqD,EAAI4sC,IAAJ5sC,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,CADLgqD,GAGGhqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CAHP;EAsCF,iBAAA,CACIva,CADJ,EACqB4S,CADrB,EAEIjO,CAFJ,EAEsCoD,CAFtC,EAGI8K,CAHJ,EAIII,CAJJ,EAKIF,CALJ;qBAGIF,8BACAI,KAAsC,GAAG,IAE3C,IAAM8Y,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX;EAAA,MACMo6C,IAAUp6C,gBAAgBrX,CAAhBqX,EAAwB,QAAxBA,EAAkC,QAAlCA,CADhB;EAAA,MAGI61C,IAAM/zC,CAHV;EAAA,MAII44C,KAAe,CAJnB,CAMgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAIhB7iB,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,yDAAuD42D,EAAIp7D,IAA3D,MAFJwE,CAJgB,EAOhBA,OACqB,MAAjBm7D,EAAQ3/D,IADZwE,EAEI,0DACOm7D,EAAQ3/D,IADf,MAFJwE,CAPgB,EAWO,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,yEACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CAZc,EAkBhBA,OACI42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,MAAiBuE,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CADrBn7D,EAEI,sCAAoC42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,CAApC,yCAAA,GAC8BuE,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAD9B,MAFJn7D,CAlBgB,EAsBhBA,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,yEACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CAtBgB,EA0BhBA,OACmB,WAAf2J,CADJ3J,EAEI,wCACI2J,CADJ,2CAFJ3J,CA1BgB,CA+BhB,IAGIqR,CAHJ;EAAA,MAAM8S,IAAWu3C,kBACb9E,EAAIv+D,KADSqjE,EACFP,EAAQ9iE,KADNqjE,EACajgE,CADbigE,EACsB3xD,CADtB2xD,EACiC78D,CADjC68D,EACsC7xD,CADtC6xD,CAAjB,CAIA,IAA8B,MAA1Bv3C,EAAS+E,YAAiB,IAA8B,MAAzB/E,EAASgF,WAAd,IACE,MAA5BhF,EAASmF,cADiB,IACkC,MAA3BnF,EAASoF,aADhB,IAEA,MAA1BpF,EAASiF,YAFiB,IAE8B,MAAzBjF,EAASkF,WAFd,IAGC,WAA1BlF,EAASwF,OAATxF,CAAiBjO,IAAS,IAAoC,YAA1BiO,EAASwF,OAATxF,CAAiBjO,IAH1D,EASO;EAaL7E,QAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACF,UAAAtJ,CAAA;EAAW,aAAAA,EAAQjG,MAARiG,CAAe2mD,CAAf3mD,EAAoBkrD,CAApBlrD,EAA6BkU,CAA7BlU,CAAA;OADTsJ,IACkDziB,GAAG8/D,GAAKuE,YAD1D5hD,EAZO,UAAC1K,CAAD;EAMX,aALA7O,OACI27D,kBAA4B5xD,CAA5B4xD,CADJ37D,EAEI,kHACkD+J,CADlD,MAFJ/J,KAMElJ,GAAG;EAAM,iBAAA8kE,gBAAgBhF,EAAIv+D,KAApBujE,EAA2B/sD,CAA3B+sD,EAA+BT,CAA/BS,EAAwCngE,CAAxCmgE,EAAiD/8D,CAAjD+8D,CAAA;aACTT,SAAS;EAAM,iBAAAU,iBAAiBjF,CAAjBiF,EAAsBhtD,CAAtBgtD,EAA0BV,EAAQ9iE,KAAlCwjE,EAAyCpgE,CAAzCogE,EAAkDh9D,CAAlDg9D,CAAA;aAFjB;OAMItiD,CAANlI;KAtBF,MAG6E;EAC3E,QAAMyqD,IAAMlF,EAAIr1D,OAAJq1D,GAAc,GAAGzyC,EAASyJ,WAA1BgpC,CAAZ;EAAA,QACMmF,IAAMZ,EAAQ55D,OAAR45D,EAAiBh3C,EAASyJ,YAAYzJ,EAASuJ,YAA/CytC,CADZ,CAIA9pD,IAAMpO,OAAO64D,CAAP74D,EAAY84D,CAAZ94D,EAAiB1B,OAAjB0B,CAAkCkhB,EAASvG,QAA3C3a,CAANoO;EAkBF,UAAIoqD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EA2BF,yBAAA,CACIyW,CADJ,EACuEjZ,CADvE,EAEInF,CAFJ,EAEsBjO,CAFtB,EAGIoD,CAHJ,EAGgCgL,CAHhC;EAIE7J,SACI8nB,EAAOtxB,MAAPsxB,KAAkBjZ,EAAGrT,IADzBwE,EAEI,wBACQ8nB,EAAOtxB,MADf,uBAAA,GAC0CqY,EAAGrT,IAD7C,iBAFJwE,EAKA,IAAIg8D,IAAWl0C,CAAf;EAAA,MACIm0C,IAAOptD,CADX;EAAA,MAEI4sD,KAAe,CAFnB,CAGgB,MAAZ5sD,EAAGrT,IAAS,KACdigE,KAAe,CAAfA,EACAQ,IAAOptD,EAAGgoD,IAAHhoD,CAAQ,CAARA,EAAWA,EAAGxW,KAAHwW,CAAS,CAATA,CAAXA,EAAwBA,EAAGxW,KAAHwW,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxW,KAAHwW,CAAS,CAATA,CAArCA,CADP4sD,EAEAO,KAAY,GAAGl0C,EAAO,CAAPA,GAAWA,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAHvB,EAMhB,IAAMo0C,IAAUF,EAAS,CAATA,CAAhB;EAAA,MACMtD,IAAWuD,EAAK5jE,KAAL4jE,CAAW,CAAXA,CADjB,CAEAj8D,OACwB,MAApBg8D,EAASxlE,MADbwJ,EAEI,uEACOg8D,EAASxlE,MADhB,MAFJwJ,GAIAA,OACkB,MAAdi8D,EAAKzgE,IADTwE,EAEI,8DACYi8D,EAAKzgE,IAHrBwE,CAJAA,EAQAA,OACoB,MAAhB0J,EAAOlO,IADXwE,EAEI,kEACY0J,EAAOlO,IAHvBwE,CARAA,EAYAA,OACIk8D,MAAYxyD,EAAOrR,KAAPqR,CAAa,CAAbA,CADhB1J,EAEI,8CAA4Ck8D,CAA5C,yCAAA,GACoCxyD,EAAOrR,KAAPqR,CAAa,CAAbA,CADpC,MAFJ1J,CAZAA,EAgBAA,OACI04D,MAAahvD,EAAOrR,KAAPqR,CAAa,CAAbA,CADjB1J,EAEI,+CAA6C04D,CAA7C,0CAAA,GACqChvD,EAAOrR,KAAPqR,CAAa,CAAbA,CADrC,MAFJ1J,CAhBAA,EAoBuB,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,iFACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CArBFA,CA2BA,IAYMmkB,IAAWu3C,kBACbM,CADaN,EACHhyD,EAAOrR,KADJqjE,EACWjgE,CADXigE,EAZC,CAYDA,EAC+B78D,CAD/B68D,EACoC7xD,CADpC6xD,CAZjB;EAAA,MAcMrqD,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQksD,cAARlsD,CAAuBgsD,CAAvBhsD,EAA6BvG,CAA7BuG,EAAqCkU,CAArClU,CAAA;KADHsJ,IACoD0iD,SAAMvyD,WAD1D6P,EAZC,UAAC6iD,CAAD;EAEX,aACEH,MAAM;EAAM,eAAAjyD,OACRoyD,CADQpyD,EACHN,CADGM,EACKvO,CADLuO,EACcnL,CADdmL,EAFK,MAELA,EALE,CAKFA,EAC0CH,CAD1CG,CAAA;WAEZN,QAAQ;EAAM,eAAA2yD,gBACVD,CADUC,EACLJ,CADKI,EACC3yD,EAAOrR,KADRgkE,EACe5gE,CADf4gE,EACwBx9D,CADxBw9D,EAC6BxyD,CAD7BwyD,CAAA;WAHhB;KAUU9iD,CAdZ,CAiBA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAwBF,0BAAA,CACIva,CADJ,EACU+X,CADV,EACiB0e,CADjB,EAEI9xB,CAFJ,EAEsCoD,CAFtC,EAGIgL,CAHJ;EAIE,MAAI+sD,IAAM9/D,CAAV,CACe,MAAXA,EAAE0E,IAAS,KACbo7D,IAAM9/D,EAAE+/D,IAAF//D,CAAO,CAAPA,EAAUA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAVA,EAAsBA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAtBA,EAAkCA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlCA,CADO,EAGf,IAAImlE,IAAOptD,CAAX,CACkB,MAAdotD,EAAKzgE,IAAS,KAChBygE,IAAOptD,EAAGgoD,IAAHhoD,CAAQ,CAARA,EAAWA,EAAGxW,KAAHwW,CAAS,CAATA,CAAXA,EAAwBA,EAAGxW,KAAHwW,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxW,KAAHwW,CAAS,CAATA,CAArCA,CADS,GAGlB7O,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,mEACO42D,EAAIv+D,KADX,MAFJ2H,CAHkB,EAOlBA,OACkB,MAAdi8D,EAAKzgE,IADTwE,EAEI,gEACOi8D,EAAK5jE,KADZ,MAFJ2H,CAPkB,EAWlBA,OAC2B,MAAvButB,EAAY/2B,MADhBwJ,EAEI,qEACOutB,CADP,MAFJvtB,CAXkB,EAelBA,OACI42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,MAAiBrpC,EAAY,CAAZA,CADrBvtB,EAEI,8CAA4C42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,CAA5C,yCAAA,GACoCrpC,EAAY,CAAZA,CADpC,MAFJvtB,CAfkB,EAmBlBA,OACIi8D,EAAK5jE,KAAL4jE,CAAW,CAAXA,MAAkB1uC,EAAY,CAAZA,CADtBvtB,EAEI,4CAA0Ci8D,EAAK5jE,KAAL4jE,CAAW,CAAXA,CAA1C,2CAAA,GACsC1uC,EAAY,CAAZA,CADtC,OAFJvtB,CAnBkB,EAuBK,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,kFACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CAxBgB,CA8BlB,IAEMmkB,IAAWu3C,kBACb9E,EAAIv+D,KADSqjE,EACFnuC,CADEmuC,EACWjgE,CADXigE,EAFC,CAEDA,EAC+B78D,CAD/B68D,EACoC7xD,CADpC6xD,CAFjB,CAIA,OAAOniD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQosD,eAARpsD,CAAwB2mD,CAAxB3mD,EAA6BgsD,CAA7BhsD,EAAmCkU,CAAnClU,CAAA;KADRsJ,IACuDq9C,QAAKqF,SAD5D1iD,CAAP;EAwBF,0BAAA,CACIziB,CADJ,EACqB4S,CADrB,EAEIO,CAFJ,EAGIxO,CAHJ,EAGsCoD,CAHtC,EAIIgL,CAJJ;EAQE,SAAO+xD,gBACH3xD,CADG2xD,EAHI76C,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,iBAAxBA,CAGJ66C,EAFS76C,gBAAgBrX,CAAhBqX,EAAwB,QAAxBA,EAAkC,iBAAlCA,CAET66C,EACuBngE,CADvBmgE,EACgC/8D,CADhC+8D,EACqC/xD,CADrC+xD,CAAP;EAiDF,0BAAA,CACI9kE,CADJ,EACqB4S,CADrB,EAEIjO,CAFJ,EAEsCoD,CAFtC,EAGI8K,CAHJ,EAIII,CAJJ,EAKIF,CALJ;qBAGIF,8BACAI,KAAsC,GAAG,IAE3C,IAAM8Y,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,iBAAxBA,CAAX;EAAA,MACMo6C,IAAUp6C,gBAAgBrX,CAAhBqX,EAAwB,QAAxBA,EAAkC,iBAAlCA,CADhB;EAAA,MAGI61C,IAAM/zC,CAHV;EAAA,MAII44C,KAAe,CAJnB,CAKgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAIhB7iB,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,kEACY42D,EAAIp7D,IADhB,MAFJwE,CAJgB,EAQhBA,OACqB,MAAjBm7D,EAAQ3/D,IADZwE,EAEI,mEACOm7D,EAAQ3/D,IADf,MAFJwE,CARgB,EAYhBA,OACI42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,MAAiBuE,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CADrBn7D,EAEI,yDACQ42D,EAAIv+D,KAAJu+D,CAAU,CAAVA,CADR,qDAAA,GAEcuE,EAAQ9iE,KAAR8iE,CAAc,CAAdA,CAFd,MAFJn7D,CAZgB,EAiBC,QAAb+J,CAAa,KACfA,KAAa,GAAG,EADD,CAjBD,EAoBhB/J,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,kFACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CApBgB,EAyBO,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,kFACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CA1Bc,CAgChB,IAAMmkB,IAAWu3C,kBACb9E,EAAIv+D,KADSqjE,EACFP,EAAQ9iE,KADNqjE,EACajgE,CADbigE,EACsB3xD,CADtB2xD,EACiC78D,CADjC68D,EACsC7xD,CADtC6xD,GAEb,CAFaA,CAAjB;EAAA,MAeMrqD,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQqsD,eAARrsD,CAAwB2mD,CAAxB3mD,EAA6BkrD,CAA7BlrD,EAAsCkU,CAAtClU,CAAA;KADHsJ,IAEPziB,GAAG8/D,GAAKuE,YAFD5hD,EAXC,UAAC1K,CAAD;EAKX,WAJA7O,OACI27D,kBAA4B5xD,CAA5B4xD,CADJ37D,EAEI,+GAC+C+J,CAD/C,MAFJ/J,KAKElJ,GAAG;EAAM,eAAAylE,wBAAwB3F,EAAIv+D,KAA5BkkE,EAAmC1tD,CAAnC0tD,EAAuCpB,CAAvCoB,EAAgDp4C,CAAhDo4C,CAAA;WACTpB,SAAS;EAAM,eAAAqB,yBAAyB5F,CAAzB4F,EAA8B3tD,CAA9B2tD,EAAkCrB,EAAQ9iE,KAA1CmkE,EAAiDr4C,CAAjDq4C,CAAA;WAFjB;KAMUjjD,CAfZ,CAkBA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAkDF,0BAAA,CACIva,CADJ,EACqBsT,CADrB,EAEIC,CAFJ,EAE0C5O,CAF1C,EAGIoD,CAHJ,EAGyB+K,CAHzB,EAIID,CAJJ;qBAGyBC,KAAqC,GAAG,sBAC7DD,YACF,IAAMkZ,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,iBAAxBA,CAAX;EAAA,MACM07C,IACF17C,gBAAgB3W,CAAhB2W,EAAiC,iBAAjCA,EAAoD,iBAApDA,CAFJ;EAAA,MAGM27C,IACF37C,gBAAgB1W,CAAhB0W,EAAiC,iBAAjCA,EAAoD,iBAApDA,CAJJ;EAAA,MAMI61C,IAAM/zC,CANV;EAAA,MAOI44C,KAAe,CAPnB,CAaA,IALgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAKG,WAAflZ,CAAJ,EACE,MAAM,IAAIhU,KAAJ,CACF,oFADE,CAAN,CAKFqK,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,kEACY42D,EAAIp7D,IADhB,MAFJwE,GAIAA,OAC8B,MAA1By8D,EAAiBjhE,IADrBwE,EAEI,6EACYy8D,EAAiBjhE,IAD7B,MAFJwE,CAJAA,EAQAA,OAC8B,MAA1B08D,EAAiBlhE,IADrBwE,EAEI,6EACYy8D,EAAiBjhE,IAD7B,MAFJwE,CARAA,EAYAA,OACkC,MAA9B08D,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,CADJ18D,EAEI,2FAC2B08D,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,CAD3B,MAFJ18D,CAZAA,EAgBAA,OACkC,MAA9B08D,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,CADJ18D,EAEI,4FAC2B08D,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,CAD3B,MAFJ18D,CAhBAA,CAqBA,IAAM4tB,IAAa6uC,EAAiBpkE,KAAjBokE,CAAuB,CAAvBA,CAAnB;EAAA,MACME,IAAoBF,EAAiBpkE,KAAjBokE,CAAuB,CAAvBA,CAD1B,CAEAz8D,OACI08D,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,MAA8B9uC,IAAa+uC,CAD/C38D,EAEI,+EACe4tB,IAAa+uC,CAD5B,eAAA,GAEeD,EAAiBrkE,KAAjBqkE,CAAuB,CAAvBA,CAFf,MAFJ18D,EAMA,IAAMq4D,IAAYluD,gBACdysD,CADczsD,EACTsyD,CADStyD,EACS1O,CADT0O,EACkBtL,CADlBsL,EACuBR,CADvBQ,EACmCP,CADnCO,CAAlB;EAAA,MAGMkH,IACFrH,OAAOquD,CAAPruD,EAAkB0yD,CAAlB1yD,EAFoB,CAEpBA,EAAqD,OAArDA,EAA8DL,CAA9DK,CAJJ,CAKA,OAAIyxD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAMF,iCAAA,CACIyW,CADJ,EACuEjZ,CADvE,EAEInF,CAFJ,EAEsBya,CAFtB;EAGE,MAAI83C,IAAOptD,CAAX;EAAA,MACI4sD,KAAe,CADnB,CAEgB,MAAZ5sD,EAAGrT,IAAS,KACdigE,KAAe,CAAfA,EACAQ,IAAOptD,EAAGgoD,IAAHhoD,CAAQ,CAARA,EAAWA,EAAGxW,KAAHwW,CAAS,CAATA,CAAXA,EAAwBA,EAAGxW,KAAHwW,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxW,KAAHwW,CAAS,CAATA,CAArCA,CAFO,EAIhB,IAAMwC,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ2sD,uBAAR3sD,CAAgCgsD,CAAhChsD,EAAsCvG,CAAtCuG,EAA8CkU,CAA9ClU,CAAA;KADHsJ,IAEP0iD,SAFO1iD,CAAZ,CAGA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAMF,kCAAA,CACIva,CADJ,EACU+X,CADV,EACiB0e,CADjB,EAEIpJ,CAFJ;EAGE,MAAIyyC,IAAM9/D,CAAV,CACe,MAAXA,EAAE0E,IAAS,KACbo7D,IAAM9/D,EAAE+/D,IAAF//D,CAAO,CAAPA,EAAUA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAVA,EAAsBA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAtBA,EAAkCA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlCA,CADO,EAGf,IAAImlE,IAAOptD,CAAX,CAIA,OAHkB,MAAdotD,EAAKzgE,IAAS,KAChBygE,IAAOptD,EAAGgoD,IAAHhoD,CAAQ,CAARA,EAAWA,EAAGxW,KAAHwW,CAAS,CAATA,CAAXA,EAAwBA,EAAGxW,KAAHwW,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxW,KAAHwW,CAAS,CAATA,CAArCA,CADS,GAGX0K,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ4sD,wBAAR5sD,CAAiC2mD,CAAjC3mD,EAAsCgsD,CAAtChsD,EAA4CkU,CAA5ClU,CAAA;KADRsJ,IAEFq9C,QAAKqF,SAFH1iD,CAAP;EAKF,KAAazP,SAAS8X,KAAIk7C,kBAAJl7C,CAAtB;EAAA,IACa5X,SAAS4X,KAAIm7C,kBAAJn7C,CADtB;EAAA,IAEay6C,kBAAkBz6C,KAAIi6C,oCAAJj6C,CAF/B;EAAA,IAGazX,kBAAkByX,KAAIo7C,oCAAJp7C,CAH/B;EAAA,IAIatX,kBAAkBsX,KAAIq7C,oCAAJr7C,CAJ/B;EAAA,IAKa1X,kBAAkB0X,KAAIs7C,oCAAJt7C,CAL/B,CC5nBA,mBAAA,CAAoB9qB,CAApB;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAGA,OAFA/gB,OAAwB,MAAZ6iB,EAAGrnB,IAAfwE,EAA2B,qEACT6iB,EAAGrnB,IADM,MAA3BwE,GAEOsC,QAAQugB,CAARvgB,EAAY,CAAZA,CAAP;EAUF,oBAAA,CAAoBxL,CAApB,EAA4CoD,CAA5C;EACE,MAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAGA,OAFA/gB,OAAwB,MAAZ6iB,EAAGrnB,IAAfwE,EAA2B,qEACT6iB,EAAGrnB,IADM,MAA3BwE,GAEOsC,QAAQugB,CAARvgB,EAAYpI,CAAZoI,CAAP;EAUF,oBAAA,CAAoBxL,CAApB,EAA4CoD,CAA5C;EACE,MAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAGA,OAFA/gB,OAAwB,MAAZ6iB,EAAGrnB,IAAfwE,EAA2B,qEACT6iB,EAAGrnB,IADM,MAA3BwE,GAEOsC,QAAQugB,CAARvgB,EAAYpI,CAAZoI,CAAP;EAUF,oBAAA,CAAoBxL,CAApB,EAA4CoD,CAA5C;EACE,MAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAGA,OAFA/gB,OAAwB,MAAZ6iB,EAAGrnB,IAAfwE,EAA2B,qEACT6iB,EAAGrnB,IADM,MAA3BwE,GAEOsC,QAAQugB,CAARvgB,EAAYpI,CAAZoI,CAAP;EAiCF,kBAAA,CACIxL,CADJ,EACqBoD,CADrB;EAEE,MAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAEA,IAAgB,MAAZ8B,EAAGrnB,IAAP,EACE,OAAOqnB,EAAGpgB,KAAHogB,EAAP,CAEF,IAAMzF,IAAOu6B,eAAez9C,CAAfy9C,EAAqB90B,EAAGxqB,KAAxBs/C,CAAb,CAMA,OADIp+B,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3N,OAAR2N,CAAgB4S,CAAhB5S,EAAoBmN,CAApBnN,CAAA;KAAhCsJ,IAA4DsJ,OAA5DtJ,EAJS,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAAhU,EAAGvM,OAAHuM,CAAWuO,CAAXvO,CAAA;WAAlB;KAGE0K,EACO4jD,SADP5jD,CACiBsJ,CADjBtJ,CACJ;EAGF,KAAajX,UAAUsf,KAAIw7C,oBAAJx7C,CAAvB;EAAA,IACay7C,YAAYz7C,KAAI07C,wBAAJ17C,CADzB;EAAA,IAEa27C,YAAY37C,KAAI47C,wBAAJ57C,CAFzB;EAAA,IAGa67C,YAAY77C,KAAI87C,wBAAJ97C,CAHzB;EAAA,IAIa+7C,YAAY/7C,KAAIg8C,wBAAJh8C,CAJzB,CCzEA,qBAAA,CACI9qB,CADJ,EACqByT,CADrB,EAEI9O,CAFJ,EAEsCsO,CAFtC,EAGIlL,CAHJ,EAGgCgL,CAHhC;EAIE,MAAMgZ,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX;EAAA,MAEI61C,IAAM/zC,CAFV;EAAA,MAGI44C,KAAe,CAHnB,CAIgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAIC,QAAb9Y,CAAa,KACfA,KAAa,GAAG,EADD,CAJD,EAOhB/J,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,yDAAuD42D,EAAIp7D,IAA3D,MAFJwE,CAPgB,EAUhBA,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,0EACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CAVgB,EAcO,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,0EACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CAfc,CAoBhB,IAAMmkB,IAAW05C,kBACbjH,EAAIv+D,KADSwlE,EACFtzD,CADEszD,EACUpiE,CADVoiE,EACmB9zD,CADnB8zD,EAC8Bh/D,CAD9Bg/D,EACmCh0D,CADnCg0D,CAAjB;EAAA,MAWMxsD,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQxF,OAARwF,CAAgB2mD,CAAhB3mD,EAAqBkU,CAArBlU,CAALikD,CAAA;KADX36C,IACkDziB,GAAG8/D,GADrDr9C,EARC,UAAC1K,CAAD,EAAe2C,CAAf;EACJ,QAAAssD,QAAA,CACP,SACEhnE,GAAG;EAAM,eAAAinE,gBACLlvD,CADKkvD,EACDnH,CADCmH,EACID,CADJC,EACqBxzD,CADrBwzD,EACiCtiE,CADjCsiE,EAC0Ch0D,CAD1Cg0D,EACqDl/D,CADrDk/D,CAAA;WADX;KAMUxkD,CAXZ,CAaA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EA4BF,kBAAA,CACIva,CADJ,EACqByT,CADrB,EAEI9O,CAFJ,EAEsCoD,CAFtC,EAGIgL,CAHJ;EAIE,SAAOm0D,aAAalnE,CAAbknE,EAAgBzzD,CAAhByzD,EAA4BviE,CAA5BuiE,EAAqC,CAArCA,EAAwCn/D,CAAxCm/D,EAA6Cn0D,CAA7Cm0D,CAAP;EA6BF,sBAAA,CACIlnE,CADJ,EACqByT,CADrB,EAEI9O,CAFJ,EAEsCsO,CAFtC,EAGIlL,CAHJ,EAGgCgL,CAHhC;EAIE,MAAMgZ,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CACA/gB,OACiB,cAAb6iB,EAAGxoB,KADP2F,EAC4B,4CAD5BA,GAEiB,QAAb+J,CAAa,KACfA,KAAa,GAAG,EADD,CAFjB/J,EAKAA,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,0EACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CALAA,CASA,IAAI42D,IAAM/zC,CAAV;EAAA,MACI44C,KAAe,CADnB,CAEgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAIhB7iB,OACiB,MAAb42D,EAAIp7D,IADRwE,EAEI,qDAAmD42D,EAAIp7D,IAAvD,MAFJwE,CAJgB,EAOO,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,0EACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CARc,CAchB,IAAMmkB,IAAW05C,kBACbjH,EAAIv+D,KADSwlE,EACFtzD,CADEszD,EACUpiE,CADVoiE,EACmB9zD,CADnB8zD,EAC8Bh/D,CAD9Bg/D,CAAjB;EAAA,MAQIxsD,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACN,UAAAtJ,CAAA;EAAW,WAAAA,EAAQzF,OAARyF,CAAgB2mD,CAAhB3mD,EAAqBkU,CAArBlU,CAAA;KADLsJ,IACsCziB,GAAG8/D,GADzCr9C,EALG,UAAC1K,CAAD;EACX,aACE/X,GAAG;EAAM,eAAAmnE,gBAAgBpvD,CAAhBovD,EAAoBrH,CAApBqH,EAAyB1zD,CAAzB0zD,EAAqCxiE,CAArCwiE,EAA8Cl0D,CAA9Ck0D,EAAyDp/D,CAAzDo/D,CAAA;WADX;KAIQ1kD,CARV,CAWA,OADAlI,IAAMA,EAAI1P,IAAJ0P,CAASwR,EAAGxoB,KAAZgX,CAANA,EACIoqD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EA4BF,kBAAA,CACIva,CADJ,EACqByT,CADrB,EAEI9O,CAFJ,EAEsCoD,CAFtC,EAGIgL,CAHJ;EAIE,SAAOq0D,aAAapnE,CAAbonE,EAAgB3zD,CAAhB2zD,EAA4BziE,CAA5ByiE,EAAqC,CAArCA,EAAwCr/D,CAAxCq/D,EAA6Cr0D,CAA7Cq0D,CAAP;EA4BF,eAAA,CACInwD,CADJ,EACyBjD,CADzB,EAEIC,CAFJ,EAE8BlM,CAF9B,EAGIkL,CAHJ,EAGyCtO,CAHzC;EAImB,UAAbsO,CAAa,KACfA,KAAa,GAAG,EADD,GAGF,QAAXtO,CAAW,KACbA,IAAU,CADG,CAHE,EAML,MAARoD,CAAQ,KACVA,IAAM,OADI,CANK,CASjB,IAAMgkB,IAAK9B,gBAAgBhT,CAAhBgT,EAAuB,GAAvBA,EAA4B,SAA5BA,CAAX;EAAA,MACI61C,IAAM/zC,CADV;EAAA,MAEI44C,KAAe,CAFnB,CAGgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,GAIhB7iB,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,uEACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CAJgB,CAQhB,IAUIm+D,CAVJ;EAAA,MAAMh6C,IAAW05C,kBACbjH,EAAIv+D,KADSwlE,EACF/yD,CADE+yD,EACWpiE,CADXoiE,EACoB9zD,CADpB8zD,EAC+Bh/D,CAD/Bg/D,CAAjB;EAAA,MAEMj0D,KACDua,EAASmF,gBAAgBnF,EAASoF,cAHvC,CAYE40C,IADU,WAARt/D,CAAQ,GACIu/D,8BACTj6C,EAAS+E,cAAc/E,EAASgF,YADvBi1C,EACqCx0D,CADrCw0D,CADJ,KAIM,GAAG,KAAK,GAAG,GAH3BD,CAKF,IAAME,IAAgC,MAAhBz0D,EAAS,CAATA,CAAgB,IAAqB,MAAhBA,EAAS,CAATA,CAA3C;EAAA,MACM2L,+DADN;EAAA,MACO+oD,QADP;EAAA,MACwBC,QADxB;EAAA,MAGMC,IAAeH,IAAgBx/D,CAAhBw/D,GAAsB,OAH3C;EAAA,MAIMI,IACFJ,IAAgBzH,CAAhByH,GAAsBzyD,eAAegrD,CAAfhrD,EAAoBhC,CAApBgC,EAA8B0yD,CAA9B1yD,CAL1B;EAAA,MAWM4B,KAL4B,UAAhBzC,CAAgB,GAC9B;EAAM,WAAAmzD,aACFO,CADEP,EACUpzD,CADVozD,EACuBziE,CADvByiE,EACgC,CADhCA,EACkDM,CADlDN,CAAA;KADwB,GAG9B;EAAM,WAAAF,aACFS,CADET,EACUlzD,CADVkzD,EACuBviE,CADvBuiE,EACgC,CADhCA,EACkDQ,CADlDR,CAAA;QATV;EAAA,MAYM3sD,IAAMgtD,IAAgB7wD,CAAhB6wD,GAAoB1yD,eAAe6B,CAAf7B,EAAkB/B,CAAlB+B,EAA4B4yD,CAA5B5yD,CAZhC,CAaA,OAAI8vD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EA2BF,yBAAA,CACIxC,CADJ,EAC6Bd,CAD7B,EAEIG,CAFJ,EAEiC3D,CAFjC,EAGI9O,CAHJ,EAGsCsO,CAHtC,EAIIlL,CAJJ,EAKIgL,CALJ;EAME,MAAM60D,IAAM39C,gBAAgBlS,CAAhBkS,EAAoB,IAApBA,EAA0B,iBAA1BA,CAAZ;EAAA,MACMsB,IAAStB,gBAAgBhT,CAAhBgT,EAAuB,OAAvBA,EAAgC,iBAAhCA,CADf;EAAA,MAEM49C,IAAU59C,gBAAgB7S,CAAhB6S,EAAwB,QAAxBA,EAAkC,iBAAlCA,CAFhB,CAGA/gB,OACIqiB,EAAO7mB,IAAP6mB,KAAgBq8C,EAAIljE,IADxBwE,EAEI,oBAAkBqiB,EAAO7mB,IAAzB,kCAAA,GAA6DkjE,EAAIljE,IAAjE,MAFJwE,GAGiB,QAAb+J,CAAa,KACfA,KAAa,GAAG,EADD,CAHjB/J,EAMAA,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,kFACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CANAA,EAWAA,OACiB,MAAb0+D,EAAIljE,IADRwE,EAEI,8DACO0+D,EAAIljE,IADX,MAFJwE,CAXAA,EAeAA,OACoB,MAAhBqiB,EAAO7mB,IADXwE,EAEI,iEACOqiB,EAAO7mB,IADd,MAFJwE,CAfAA,EAmBuB,QAAnB6J,CAAmB,IACrB7J,OACI+d,MAAWlf,CAAXkf,CADJ/d,EAEI,kFACuB6J,CADvB,kBAAA,GACsDhL,CADtD,MAFJmB,CApBFA,CA0BA,IAAMmkB,IAAW05C,kBACbx7C,EAAOhqB,KADMwlE,EACCtzD,CADDszD,EACapiE,CADboiE,EACsB9zD,CADtB8zD,EACiCh/D,CADjCg/D,EACsCh0D,CADtCg0D,CAAjB,CAKA,OAHYtkD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ8tD,eAAR9tD,CAAwByuD,CAAxBzuD,EAA6BoS,CAA7BpS,EAAqC0uD,CAArC1uD,EAA8CkU,CAA9ClU,CAAA;KADHsJ,IAEPmlD,QAAKr8C,WAFE9I,CAGZ;EAmBF,yBAAA,CACI1K,CADJ,EACsBd,CADtB,EAC2CxD,CAD3C,EAEI9O,CAFJ,EAEsCsO,CAFtC,EAGIlL,CAHJ;EAIE,MAAM6/D,IAAM39C,gBAAgBlS,CAAhBkS,EAAoB,IAApBA,EAA0B,iBAA1BA,CAAZ;EAAA,MACMsB,IAAStB,gBAAgBhT,CAAhBgT,EAAuB,OAAvBA,EAAgC,iBAAhCA,CADf,CAEA/gB,OACIqiB,EAAO7mB,IAAP6mB,KAAgBq8C,EAAIljE,IADxBwE,EAEI,oBAAkBqiB,EAAO7mB,IAAzB,kCAAA,GAA6DkjE,EAAIljE,IAAjE,MAFJwE,GAGiB,QAAb+J,CAAa,KACfA,KAAa,GAAG,EADD,CAHjB/J,EAMAA,OACIs7D,+BAAyC7/D,CAAzC6/D,EAAkDvxD,CAAlDuxD,CADJt7D,EAEI,kFACmBvE,CADnB,qBAAA,GAC6CsO,CAD7C,MAFJ/J,CANAA,CAWA,IAAIw7D,IAAUn5C,CAAd;EAAA,MACI45C,IAAOyC,CADX;EAAA,MAEIjD,KAAe,CAFnB,CAGoB,MAAhBp5C,EAAO7mB,IAAS,KAClBigE,KAAe,CAAfA,EACAD,IAAUn5C,EAAOw0C,IAAPx0C,CAAY,CAAZA,EAAeA,EAAOhqB,KAAPgqB,CAAa,CAAbA,CAAfA,EAAgCA,EAAOhqB,KAAPgqB,CAAa,CAAbA,CAAhCA,EAAiDA,EAAOhqB,KAAPgqB,CAAa,CAAbA,CAAjDA,CADVo5C,EAEAQ,IAAOyC,EAAI7H,IAAJ6H,CAAS,CAATA,EAAYA,EAAIrmE,KAAJqmE,CAAU,CAAVA,CAAZA,EAA0BA,EAAIrmE,KAAJqmE,CAAU,CAAVA,CAA1BA,EAAwCA,EAAIrmE,KAAJqmE,CAAU,CAAVA,CAAxCA,CAHW,GAMpB1+D,OACkB,MAAdi8D,EAAKzgE,IADTwE,EAEI,8DACOi8D,EAAKzgE,IADZ,MAFJwE,CANoB,EAUpBA,OACqB,MAAjBw7D,EAAQhgE,IADZwE,EAEI,iEACOw7D,EAAQhgE,IADf,MAFJwE,CAVoB,CAepB,IAAMmkB,IAAW05C,kBACbrC,EAAQnjE,KADKwlE,EACEtzD,CADFszD,EACcpiE,CADdoiE,EACuB9zD,CADvB8zD,EACkCh/D,CADlCg/D,CAAjB;EAAA,MAEMxsD,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQguD,eAARhuD,CAAwBgsD,CAAxBhsD,EAA8BurD,CAA9BvrD,EAAuCkU,CAAvClU,CAAA;KADHsJ,IAEP0iD,SAAMT,YAFCjiD,CAFZ,CAKA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EASF,sCAAA,CACIkL,CADJ,EACkC9Q,CADlC,EAEI0yD,CAFJ;EAGE,MAAMS,IAAWT,EAAYjgE,GAAZigE,CAAgB,UAAA/mE,CAAA;EAAK,WAAAA,EAAE,CAAFA,CAAA;KAArB+mE,CAAjB;EAAA,MACMU,IAAaV,EAAYjgE,GAAZigE,CAAgB,UAAA/mE,CAAA;EAAK,WAAAA,EAAE,CAAFA,CAAA;KAArB+mE,CADnB;EAAA,MAEMW,IAAiBviD,EAAWhZ,MAAXgZ,CAAkBqiD,CAAlBriD,EAA4BsiD,CAA5BtiD,CAFvB;EAAA,MAGMwiD,IAActzD,EAAWvN,GAAXuN,CAAe,UAACrU,CAAD,EAAID,CAAJ;EAAU,YAACC,IAAI0nE,EAAe3nE,CAAf2nE,IAAoB1nE,KAAKA,CAA9B;KAAzBqU,CAHpB;EAAA,MAIMuzD,IAASH,EAAW3gE,GAAX2gE,CAAe,UAAC/lD,CAAD,EAAI3hB,CAAJ;EAAU,WAAA2hB,IAAIimD,EAAY5nE,CAAZ4nE,CAAJ;KAAzBF,CAJf,CAOA,QAFiBpzD,EAAWvN,GAAXuN,CAAe,UAACwkC,CAAD,EAAI94C,CAAJ;EAAU,YAACynE,EAASznE,CAATynE,GAAaI,EAAO7nE,CAAP6nE,EAAd;KAAzBvzD,GACHA,EAAWvN,GAAXuN,CAAe,UAACwkC,CAAD,EAAI94C,CAAJ;EAAU,YAAC,GAAG4nE,EAAY5nE,CAAZ4nE,EAAJ;KAAzBtzD,EACd;EAMF,sCAAA,CACI8hB,CADJ,EACmC3jB,CADnC;EAIE,MAGMq1D,IAHqB1xC,EAAYrvB,GAAZqvB,CAAgB,UAACzU,CAAD,EAAI3hB,CAAJ;EACzC,WAAO2hB,KAAKA,IAAI,MAAMlP,EAASzS,CAATyS,IAAc,EAApC;KADyB2jB,EAGcrvB,GAHdqvB,CAGkB,UAAAzU,CAAA;EAAK,WAAAA,IAAI,CAAJ;KAHvByU,CAA3B;EAAA,MAOM2xC,IAAgBD,EAAc/gE,GAAd+gE,CAAkB,UAAAnmD,CAAA;EAAK,WAAAniB,KAAKkC,KAALlC,CAAWmiB,IAAI,CAAfniB,CAAA;KAAvBsoE,CAPtB;EAAA,MAQME,IAAcF,EAAc/gE,GAAd+gE,CAAkB,UAACnmD,CAAD,EAAI3hB,CAAJ;EAAU,WAAA2hB,IAAIomD,EAAc/nE,CAAd+nE,CAAJ;KAA5BD,CARpB,CASA,OAAOA,EAAc/gE,GAAd+gE,CAAkB,UAAChvB,CAAD,EAAI94C,CAAJ;EACvB,YAAQ+nE,EAAc/nE,CAAd+nE,GAAkBC,EAAYhoE,CAAZgoE,EAA1B;KADKF,CAAP;EAKF,KAAax0D,UAAUmX,KAAIw9C,oBAAJx9C,CAAvB;EAAA,IACapX,UAAUoX,KAAIy9C,oBAAJz9C,CADvB;EAAA,IAEa1W,OAAO0W,KAAI09C,cAAJ19C,CAFpB,CCjcA,iBAAA,CACI9qB,CADJ,EAC4BwM,CAD5B,EAC2ChL,CAD3C;EAEE,MAAMuqB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAIA,OAHA/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,qDAAmD6iB,EAAGrnB,IAAtD,YAFJwE,GAGO/F,MAAM4oB,CAAN5oB,GAAWqJ,EAAXrJ,GAAoB3B,EAApB2B,CAAP;EAOF,kBAAA,CACInD,CADJ,EAC4BwM,CAD5B,EAEIhL,CAFJ;EAGE,MAAMuqB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAIA,OAHA/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,qDAAmD6iB,EAAGrnB,IAAtD,YAFJwE,GAGO/F,MAAM4oB,CAAN5oB,EAAUqJ,CAAVrJ,EAAiB3B,CAAjB2B,CAAP;EAOF,kBAAA,CACInD,CADJ,EAC4BwM,CAD5B,EAEIhL,CAFJ;EAGE,MAAMuqB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAIA,OAHA/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,qDAAmD6iB,EAAGrnB,IAAtD,YAFJwE,GAGO/F,MAAM4oB,CAAN5oB,EAAUqJ,CAAVrJ,EAAiB3B,CAAjB2B,CAAP;EAOF,kBAAA,CACInD,CADJ,EAC4BwM,CAD5B,EAEIhL,CAFJ;EAGE,MAAMuqB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX,CAIA,OAHA/gB,OACgB,MAAZ6iB,EAAGrnB,IADPwE,EAEI,qDAAmD6iB,EAAGrnB,IAAtD,YAFJwE,GAGO/F,MAAM4oB,CAAN5oB,EAAUqJ,CAAVrJ,EAAiB3B,CAAjB2B,CAAP;EAoCF,gBAAA,CACInD,CADJ,EACqBwM,CADrB,EAC6ChL,CAD7C;EAEE,MAMIinE,CANJ;EAAA,MAcIC,CAdJ;EAAA,MAAM38C,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX,CAEA,IAAgB,MAAZ8B,EAAGrnB,IAAP,EACE,MAAM,IAAI7F,KAAJ,CAAU,gCAAV,CAAN,CAKA4pE,IADmB,mBAAVj8D,CAAU,IACTA,UAAU,IAAIrL,KAAJ,CAAU4qB,EAAGrnB,IAAHqnB,GAAU,CAApB,EAAuBtkB,IAAvB,CAA4B,CAA5B,EADD,GAEV+E,EAAM9M,MAAN8M,GAAeuf,EAAGrnB,IAAlB8H,GACAA,EAAMC,MAAND,CAAa,IAAIrL,KAAJ,CAAU4qB,EAAGrnB,IAAHqnB,GAAUvf,EAAM9M,MAA1B,EAAkC+H,IAAlC,CAAuC,CAAvC,CAAb+E,CADAA,GAGAA,EAAMrJ,KAANqJ,EAJTi8D,EAgBFC,KAREA,IADU,QAARlnE,CAAQ,GACF,IAAIL,KAAJ,CAAU4qB,EAAGrnB,IAAb,EAAmB+C,IAAnB,EAAyB,CAAzB,CADE,GAEe,mBAATjG,CAAS,IAChBA,UAAS,IAAIL,KAAJ,CAAU4qB,EAAGrnB,IAAHqnB,GAAU,CAApB,EAAuBtkB,IAAvB,EAA6B,CAA7B,EADO,GAEhBjG,EAAK9B,MAAL8B,GAAcuqB,EAAGrnB,IAAjBlD,GACDA,EAAKiL,MAALjL,CAAY,IAAIL,KAAJ,CAAU4qB,EAAGrnB,IAAHqnB,GAAUvqB,EAAK9B,MAAzB,EAAiC+H,IAAjC,EAAuC,CAAvC,CAAZjG,CADCA,GAGDA,GAEI4F,IAAI,UAAC6V,CAAD,EAAI5c,CAAJ;EAChB,WAAI4c,KAAK,CAALA,GACKA,CADLA,IAGF/T,QAAmB,MAAP+T,CAAZ/T,EAAsB,mBAAtBA,GACO6iB,EAAGxqB,KAAHwqB,CAAS1rB,CAAT0rB,IAAc08C,EAAOpoE,CAAPooE,CAJnBxrD,CAAJ;MAjBAwrD,EAwBFE,kBAA6B58C,CAA7B48C,EAAiCF,CAAjCE,EAAyCD,CAAzCC,CAxBEF,CAyBF,IAAMhjD,IAAasG,EAAGxqB,KAAtB,CAcA,OAAOkhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQhW,KAARgW,CAAc4S,CAAd5S,EAAkBsvD,CAAlBtvD,EAA0BuvD,CAA1BvvD,CAAA;KADfsJ,IACkDsJ,OADlDtJ,EAbM,UAAC1K,CAAD;EAQX,SADA,IAAMjL,MAAN,EACSzM,IAAI,CAAb,EAAgBA,IAAI0X,EAAGrT,IAAvB,EAA6BrE,GAA7B,EACEyM,EAASxL,IAATwL,EAAe27D,EAAOpoE,CAAPooE,GAAWhjD,EAAWplB,CAAXolB,IAAgBgjD,EAAOpoE,CAAPooE,CAAhBhjD,GAA4BijD,EAAMroE,CAANqoE,EAAtD57D,EAEF,SAAQif,IAAI;EAAM,eAAAhU,EAAGhQ,GAAHgQ,CAAOjL,CAAPiL,CAAA;WAAlB;KAEK0K,CAAP;EAIF,KAAatf,QAAQ2nB,KAAI89C,gBAAJ99C,CAArB;EAAA,IACa+9C,UAAU/9C,KAAIg+C,oBAAJh+C,CADvB;EAAA,IAEai+C,UAAUj+C,KAAIk+C,oBAAJl+C,CAFvB;EAAA,IAGam+C,UAAUn+C,KAAIo+C,oBAAJp+C,CAHvB;EAAA,IAIaq+C,UAAUr+C,KAAIs+C,oBAAJt+C,CAJvB,CC/GA,mBAAA,CACI9qB,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAMyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAX;EAAA,MAEM3D,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAFb;EAAA,MAGMz+C,IAAOmB,EAAG9rB,GAAH8rB,CAAOzF,CAAPyF,GAAa,CAAbA,CAHb;EAAA,MAOM9O,IAHI8O,EAAGje,GAAHie,CAAOnB,CAAPmB,EACEjqB,GADFiqB,GAEE3rB,GAFF2rB,CAEMzF,CAFNyF,EAGEtlB,GAHFslB,EAJV;EAAA,MAQMxR,IAAMqQ,EAAKngB,OAALmgB,CAAa3N,EAAE1b,KAAfqpB,EAAsBjd,GAAtBid,CAA0B3N,CAA1B2N,CARZ,CAUA,IAAIte,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+B/uD,EAAIhZ,KAAnC+nE,EAA0ChjD,CAA1CgjD,CAAjB,CACA,OAAO/uD,EAAI9P,OAAJ8P,CAAYrX,CAAZqX,CAAP;EAEF,UAAOA,CAAP;EAgCF,cAAA,CACIva,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAT,CAEiB,WAAb8B,EAAGxoB,KAAU,KACfwoB,IAAKA,EAAGw9C,KAAHx9C,EADU,EAGjB,IAAMzF,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAb,CA+BA,OA3BiB7jD,WAAW,UAAAxlB,CAAA;EAC1B,QAAM8oD,IAAc6K,mBAA6BrtC,CAA7BqtC,EAAmC3zD,EAAE0E,IAArCivD,CAApB;EAAA,QACIuM,IAAgB55C,CADpB;EAAA,QAEI0iC,IAAYhpD,CAFhB,CAGmB,QAAf8oD,CAAe,KACjBE,IAAYhpD,EAAEgP,SAAFhP,CAAY8oD,CAAZ9oD,CAAZgpD,EACAkX,IAAgBtM,iBAA2BsM,EAAcxgE,MAAzCk0D,EAAiD5zD,EAAE0E,IAAnDkvD,CAFC,EAInB,IAAIvqD,IAAQoZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,aAAAA,EAAQ/Y,GAAR+Y,CAAY6vC,CAAZ7vC,EAAuB+mD,CAAvB/mD,CAAA;OADHsJ,IAC2CumC,cAD3CvmC,CAAZ,CAEA,IAAInW,CAAJ,EAAc;EACZ,UAAMpJ,IAAWomE,qBAA+BjgE,EAAM9H,KAArC+nE,EAA4ChjD,CAA5CgjD,CAAjB,CACAjgE,IAAQA,EAAMoB,OAANpB,CAAcnG,CAAdmG,CAARA;EAYF,cAAQA,UAAOqV,UATE,UAAC3G,CAAD;EACf,YAAMyxD,IAAkBxpE,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAAxB,CAMA,OALAsmB,EAAK5gB,OAAL4gB,CAAa,UAAAljB,CAAA;EACXomE,YAAgBpmE,CAAhBomE,IAAwB,CAAxBA;WADFljD,GAGmBvO,EAAGtN,OAAHsN,CAAWyxD,CAAXzxD,EACK7J,GADL6J,CACSoG,OAAKne,EAAEuB,KAAP4c,EAAc,SAAdA,CADTpG,CAEnB;WAEF;KAxBeyN,EA2BDuG,CA3BCvG,CA2BjB;EAgCF,eAAA,CACIxlB,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAT,CAEiB,WAAb8B,EAAGxoB,KAAU,KACfwoB,IAAKA,EAAGw9C,KAAHx9C,EADU,EAGjB,IAAMzF,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAb;EAAA,MAEMvgB,IAAc6K,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CAFpB;EAAA,MAGIuM,IAAgB55C,CAHpB;EAAA,MAII0iC,IAAYj9B,CAJhB,CAKmB,QAAf+8B,CAAe,KACjBE,IAAYj9B,EAAG/c,SAAH+c,CAAa+8B,CAAb/8B,CAAZi9B,EACAkX,IAAgBtM,iBAA2BsM,EAAcxgE,MAAzCk0D,EAAiD7nC,EAAGrnB,IAApDkvD,CAFC,EAInB,IAAIvqD,IAAQoZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3L,IAAR2L,CAAa6vC,CAAb7vC,EAAwB+mD,CAAxB/mD,CAAA;KADHsJ,IAC4CumC,cAD5CvmC,CAAZ,CAEA,IAAInW,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+BjgE,EAAM9H,KAArC+nE,EAA4ChjD,CAA5CgjD,CAAjB,CACAjgE,IAAQA,EAAMoB,OAANpB,CAAcnG,CAAdmG,CAARA;EAGF,UAAOA,CAAP;EA8BF,eAAA,CACIrJ,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAMyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX;EAAA,MAEM3D,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAFb;EAAA,MAKMI,IAAaxgE,cAFJygE,0BAAoC39C,EAAGxqB,KAAvCmoE,EAA8CpjD,CAA9CojD,EACY,CADZA,CAEIzgE,CALnB,CA8BA,OArBiBuc,WAAW,UAAAxlB,CAAA;EAC1B,QAAM2pE,IAAmBx6C,OAAOs6C,CAAPt6C,CAAzB,CAiBA,SAAQ9lB,QAdJsgE,EAAiBpmE,KAAjBomE,KAA2B3pE,EAAEuD,KAA7BomE,GAAqC3pE,CAArC2pE,GAAyC3pE,EAAE6K,IAAF7K,CAAO2pE,EAAiBpmE,KAAxBvD,GACzBoO,IAAIu7D,GACNvpE,IAAIgD,GAAMkJ,IAYboS,UAVE,UAAC3G,CAAD;EACf,YAAMyxD,IAAkBxpE,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAAxB,CAOA,OANAsmB,EAAK5gB,OAAL4gB,CAAa,UAAAljB,CAAA;EACXomE,YAAgBpmE,CAAhBomE,IAAwB,CAAxBA;WADFljD,GAGmBvO,EAAGtN,OAAHsN,CAAWyxD,CAAXzxD,EAEJ7J,GAFI6J,CAEAoG,OAAKne,EAAEuB,KAAP4c,EAAc,SAAdA,CAFApG,EAE0B3J,GAF1B2J,CAE8B4xD,CAF9B5xD,CAGnB;WAEF;KAlBeyN,EAqBDuG,CArBCvG,CAqBjB;EAMF,0BAAA,CACIzN,CADJ,EACW2C,CADX,EAC4BkvD,CAD5B,EAC2CC,CAD3C,EAEIC,CAFJ;EAGO,MAAApzD,QAAA,CAOL,OANIA,EAAEhS,IAAFgS,GAASkzD,EAAMllE,IAAfgS,KACFA,IAAIA,EAAEjM,OAAFiM,CAAU4yD,qBAA+B5yD,EAAEnV,KAAjC+nE,EAAwCO,CAAxCP,CAAV5yD,CADFA,GAGAqB,EAAGrT,IAAHqT,GAAU6xD,EAAMllE,IAAhBqT,KACFA,IAAKA,EAAGtN,OAAHsN,CAAWuxD,qBAA+BvxD,EAAGxW,KAAlC+nE,EAAyCO,CAAzCP,CAAXvxD,CADHA,CAHArB,IAOFqV,IAAI;EACF,UAAM3T,IAAKL,EAAG7J,GAAH6J,CAAO6xD,EAAMv6D,KAANu6D,CAAYlzD,CAAZkzD,EAAe/+D,IAAf++D,CAAoB7xD,EAAGxU,KAAvBqmE,CAAP7xD,CAAX,CACA,OAAuB,QAAhB+xD,CAAgB,GAAO1xD,CAAP,GAAYA,EAAGpJ,SAAHoJ,CAAa0xD,CAAb1xD,CAAnC;SAHJ;EAoCF,cAAA,CACIpY,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAT;EAAA,MACM2/C,IAAQ79C,CADd;EAAA,MAGM89C,IAAWR,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAHjB;EAAA,MAII/iD,IAAOujD,CAJX;EAAA,MAKMC,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CALrB,CAMoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAKpB,IAEIr5C,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACN,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQpZ,GAARoZ,CAAY4S,CAAZ5S,EAAgBmN,CAAhBnN,CAALikD,CAAA;KADb36C,IAC2CsJ,OAD3CtJ,EAFG,UAAC1K,CAAD,EAAQ2C,CAAR;EACT,WAAAqvD,iBAAiBhyD,CAAjBgyD,EAAqBrvD,CAArBqvD,EAA4BH,CAA5BG,EAAmCF,CAAnCE,EAA6CD,CAA7CC,CAAA;KACMtnD,CAFV,CAIA,IAAInW,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+B/uD,EAAIhZ,KAAnC+nE,EAA0CO,CAA1CP,CAAjB,CACA/uD,IAAMA,EAAI9P,OAAJ8P,CAAYrX,CAAZqX,CAANA;EAEF,UAAOA,CAAP;EA+BF,cAAA,CACIva,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAT;EAAA,MACM2/C,IAAQ79C,CADd;EAAA,MAGM89C,IAAWR,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAHjB;EAAA,MAII/iD,IAAOujD,CAJX;EAAA,MAKMC,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CALrB,CAMoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAKpB,IAEIr5C,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACN,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQlZ,GAARkZ,CAAY4S,CAAZ5S,EAAgBmN,CAAhBnN,CAALikD,CAAA;KADb36C,IAC2CsJ,OAD3CtJ,EAFG,UAAC1K,CAAD,EAAQ2C,CAAR;EACT,WAAAqvD,iBAAiBhyD,CAAjBgyD,EAAqBrvD,CAArBqvD,EAA4BH,CAA5BG,EAAmCF,CAAnCE,EAA6CD,CAA7CC,CAAA;KACMtnD,CAFV,CAIA,IAAInW,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+B/uD,EAAIhZ,KAAnC+nE,EAA0CO,CAA1CP,CAAjB,CACA/uD,IAAMA,EAAI9P,OAAJ8P,CAAYrX,CAAZqX,CAANA;EAEF,UAAOA,CAAP;EA2BF,iBAAA,CAAmCva,CAAnC,EAAyDoD,CAAzD;qBAAyDA,OACvD,IAAI2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAT,CAEY,QAAR7mB,CAAQ,KACVA,IAAO,CADG,EAGZ,IAAIkjB,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAX;EAAA,MACMS,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CADrB,CAEoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAOpB,OAAOnxC,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1L,MAAR0L,CAAe4S,CAAf5S,EAAmBmN,EAAK,CAALA,CAAnBnN,CAAA;KADfsJ,IAC6CsJ,OAD7CtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0a,CAAV1a,CAAA;WAAlB;KAEKoR,CAAP;EA2BF,iBAAA,CAAmCziB,CAAnC,EAAyDoD,CAAzD;qBAAyDA,OACvD,IAAI2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAT,CAEY,QAAR7mB,CAAQ,KACVA,IAAO,CADG,EAGZ,IAAIkjB,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAX;EAAA,MACMS,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CADrB,CAEoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAOpB,OAAOnxC,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQzL,MAARyL,CAAe4S,CAAf5S,EAAmBmN,EAAK,CAALA,CAAnBnN,CAAA;KADfsJ,IAC6CsJ,OAD7CtJ,EAHM,UAAC1K,CAAD;EACX,aAAQgU,IAAI;EAAM,eAAA1a,UAAU0a,CAAV1a,CAAA;WAAlB;KAEKoR,CAAP;EAgCF,cAAA,CACIziB,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,EAA+B,MAA/BA,CAAT,CACA/gB,OACiB,WAAb6iB,EAAGxoB,KADP2F,EAEI,6CAA2C6iB,EAAGxoB,KAFlD2F,EAIA,IAAM2gE,IAAWR,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAjB;EAAA,MACI/iD,IAAOujD,CADX;EAAA,MAEMC,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CAFrB,CAGoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAIpB,IAAMr5C,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ9L,GAAR8L,CAAY4S,CAAZ5S,EAAgBmN,CAAhBnN,CAAA;KAAhCsJ,IAAwDsJ,OAAxDtJ,CAAZ,CACA,IAAInW,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+B/uD,EAAIhZ,KAAnC+nE,EAA0CO,CAA1CP,CAAjB,CACA,OAAO/uD,EAAI9P,OAAJ8P,CAAYrX,CAAZqX,CAAP;EAEF,UAAOA,CAAP;EA+BF,cAAA,CACIva,CADJ,EAC0BoD,CAD1B,EACwDkJ,CADxD;qBAC0BlJ,4BAA8BkJ,QACtD,IAAIyf,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,EAA+B,MAA/BA,CAAT,CACA/gB,OACiB,WAAb6iB,EAAGxoB,KADP2F,EAEI,6CAA2C6iB,EAAGxoB,KAFlD2F,EAIA,IAAM2gE,IAAWR,eAAyBjmE,CAAzBimE,EAA+Bt9C,EAAGxqB,KAAlC8nE,CAAjB;EAAA,MACI/iD,IAAOujD,CADX;EAAA,MAEMC,IAAenW,mBAA6BrtC,CAA7BqtC,EAAmC5nC,EAAGrnB,IAAtCivD,CAFrB,CAGoB,QAAhBmW,CAAgB,KAClB/9C,IAAKA,EAAG/c,SAAH+c,CAAa+9C,CAAb/9C,CAALA,EACAzF,IAAOstC,iBAA2BttC,EAAK5mB,MAAhCk0D,EAAwC7nC,EAAGrnB,IAA3CkvD,CAFW,EAIpB,IAAMr5C,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ7L,GAAR6L,CAAY4S,CAAZ5S,EAAgBmN,CAAhBnN,CAAA;KAAhCsJ,IAAwDsJ,OAAxDtJ,CAAZ,CACA,IAAInW,CAAJ,EAAc;EACZ,QAAMpJ,IAAWomE,qBAA+B/uD,EAAIhZ,KAAnC+nE,EAA0CO,CAA1CP,CAAjB,CACA,OAAO/uD,EAAI9P,OAAJ8P,CAAYrX,CAAZqX,CAAP;EAEF,UAAOA,CAAP;EAgBF,kBAAA,CACIva,CADJ,EAC0BoD,CAD1B,EAEIkJ,CAFJ;qBAC0BlJ,4BACtBkJ,QAEF,IAAMga,IAAO+iD,eAAyBjmE,CAAzBimE,GADbrpE,IAAIiqB,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,SAAxBA,GAC0C1oB,KAAjC8nE,CAAb;EAAA,MACMr8D,IAAOhN,EAAEgN,IAAFhN,CAAOsmB,CAAPtmB,EAAasM,CAAbtM,CADb;EAAA,MAEIgqE,IAAgBh9D,EAAKzL,KAFzB,CAQA,OALK+K,MACH09D,IAAgBV,qBAA+Bt8D,EAAKzL,KAApC+nE,EAA2ChjD,CAA3CgjD,CADbh9D,KAKGU,SAAMC,UAFKjN,EAAE0qB,OAAF1qB,GAAY8N,GAAZ9N,CAAgBgN,EAAKvC,OAALuC,CAAag9D,CAAbh9D,CAAhBhN,EAA6CyQ,MAA7CzQ,GACSgN,IADThN,CACcsmB,CADdtmB,EACoBsM,CADpBtM,GAEnB;EAGF,KAAaqN,MAAMyd,KAAIm/C,YAAJn/C,CAAnB;EAAA,IAEaxd,MAAMwd,KAAIo/C,YAAJp/C,CAFnB;EAAA,IAGapd,SAASod,KAAIq/C,kBAAJr/C,CAHtB;EAAA,IAIard,SAASqd,KAAIs/C,kBAAJt/C,CAJtB;EAAA,IAKavd,YAAYud,KAAIu/C,wBAAJv/C,CALzB;EAAA,IAMa7qB,MAAM6qB,KAAIw/C,YAAJx/C,CANnB;EAAA,IAOa9d,OAAO8d,KAAIy/C,cAAJz/C,CAPpB;EAAA,IAQa/qB,MAAM+qB,KAAI0/C,YAAJ1/C,CARnB;EAAA,IASa2/C,UAAU3/C,KAAI4/C,oBAAJ5/C,CATvB;EAAA,IAUa1qB,QAAM0qB,KAAI6/C,YAAJ7/C,CAVnB;EAAA,IAWatd,OAAOsd,KAAI8/C,cAAJ9/C,CAXpB,CC5iBA,kBAAA,CACI9rB,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,UAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,UAAxBA,CADX,CAIA,OAFA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,EAEOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQlK,QAARkK,CAAiB4pD,CAAjB5pD,EAAqB6pD,CAArB7pD,CAAA;KAAhCsJ,IAA2DsgD,OAAIC,OAA/DvgD,CAAP;EAYF,yBAAA,CACIzjB,CADJ,EACqBsB,CADrB;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,gBAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,gBAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,2BAAtCA,GACOo3C,EAAG9zD,QAAH8zD,CAAYC,CAAZD,CAAP;EAmBF,eAAA,CACI/jE,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,MAAxBA,CADX,CAKA,OAHA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,EAGOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQhK,IAARgK,CAAa4pD,CAAb5pD,EAAiB6pD,CAAjB7pD,CAAA;KAAhCsJ,IAAuDsgD,OAAIC,OAA3DvgD,CAAP;EAWF,qBAAA,CAAuCzjB,CAAvC,EAAwDsB,CAAxD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,YAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,YAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,uBAAtCA,GACOo3C,EAAG5zD,IAAH4zD,CAAQC,CAARD,CAAP;EAoBF,gBAAA,CACI/jE,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,OAAxBA,CADX,CAKA,OAHA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,EAGOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ9J,KAAR8J,CAAc4pD,CAAd5pD,EAAkB6pD,CAAlB7pD,CAAA;KAAhCsJ,IAAwDsgD,OAAIC,OAA5DvgD,CAAP;EAGF,sBAAA,CAAwCzjB,CAAxC,EAAyDsB,CAAzD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,aAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,aAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,wBAAtCA,GACOo3C,EAAG1zD,KAAH0zD,CAASC,CAATD,CAAP;EAoBF,oBAAA,CACI/jE,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,WAAxBA,CADX,CAKA,OAHA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,EAGOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ5J,SAAR4J,CAAkB4pD,CAAlB5pD,EAAsB6pD,CAAtB7pD,CAAA;KAAhCsJ,IAA4DsgD,OAAIC,OAAhEvgD,CAAP;EAIF,0BAAA,CACIzjB,CADJ,EACqBsB,CADrB;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,iBAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,iBAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,4BAAtCA,GACOo3C,EAAGxzD,SAAHwzD,CAAaC,CAAbD,CAAP;EAoBF,kBAAA,CACI/jE,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,SAAxBA,CADX,CAKA,OAHA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,EAGOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1J,OAAR0J,CAAgB4pD,CAAhB5pD,EAAoB6pD,CAApB7pD,CAAA;KAAhCsJ,IAA0DsgD,OAAIC,OAA9DvgD,CAAP;EAIF,wBAAA,CAA0CzjB,CAA1C,EAA2DsB,CAA3D;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,eAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,eAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,0BAAtCA,GACOo3C,EAAGtzD,OAAHszD,CAAWC,CAAXD,CAAP;EAoBF,uBAAA,CACI/jE,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,cAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,cAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GACAC,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CADAD,CAMA,OAAOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxJ,YAARwJ,CAAqB4pD,CAArB5pD,EAAyB6pD,CAAzB7pD,CAAA;KADfsJ,IAC8CsgD,OAAIC,OADlDvgD,EAHM,UAAC1K,CAAD;EACX,aAAQgrD,IAAI;EAAM,eAAA1xD,UAAU0xD,CAAV1xD,CAAA;WAAe2xD,IAAI;EAAM,eAAA3xD,UAAU2xD,CAAV3xD,CAAA;WAA3C;KAEKoR,CAAP;EAIF,6BAAA,CACIzjB,CADJ,EACqBsB,CADrB;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CADX,CAGA,OADA0B,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,+BAAtCA,GACOo3C,EAAGpzD,YAAHozD,CAAgBC,CAAhBD,CAAP;EAGF,KAAa1zD,QAAQyb,KAAIigD,gBAAJjgD,CAArB;EAAA,IACaxb,cAAcwb,KAAIkgD,4BAAJlgD,CAD3B;EAAA,IAEarb,UAAUqb,KAAImgD,oBAAJngD,CAFvB;EAAA,IAGanb,eAAemb,KAAIogD,8BAAJpgD,CAH5B;EAAA,IAIalb,qBAAqBkb,KAAIqgD,0CAAJrgD,CAJlC;EAAA,IAKapb,gBAAgBob,KAAIsgD,gCAAJtgD,CAL7B;EAAA,IAMa3b,OAAO2b,KAAIugD,cAAJvgD,CANpB;EAAA,IAOavb,YAAYub,KAAIwgD,wBAAJxgD,CAPzB;EAAA,IAQatb,kBAAkBsb,KAAIygD,oCAAJzgD,CAR/B;EAAA,IASa1b,aAAa0b,KAAI0gD,0BAAJ1gD,CAT1B;EAAA,IAUa7b,WAAW6b,KAAI2gD,sBAAJ3gD,CAVxB;EAAA,IAWa5b,iBAAiB4b,KAAI4gD,kCAAJ5gD,CAX9B,CCxMA,aAAA,CAAgC9rB,CAAhC,EAAsDsB,CAAtD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,KAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IAAM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAsBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxL,GAARwL,CAAY4pD,CAAZ5pD,EAAgB6pD,CAAhB7pD,CAAA;KAAhCsJ,IAAsDsgD,OAAIC,OAA1DvgD,EAnBK,UAAC1K,CAAD;EAiBV,aAAQgrD,IAhBK;EACX,YAAIxoD,IAAMxC,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAKA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAYwoD,EAAGxhE,KAAfgZ,CAAP;WAUgByoD,IARL;EACX,YAAIzoD,IAAMxC,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAKA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAYyoD,EAAGzhE,KAAfgZ,CAAP;WAEF;KAEKkI,CAAP;EAiBF,eAAA,CAAiC2K,CAAjC;EACElkB,SACI/H,MAAMC,OAAND,CAAcisB,CAAdjsB,CADJ+H,EAEI;EAAM,WAAA,4DAAA;KAFVA,GAGAA,OACIkkB,EAAQ1tB,MAAR0tB,IAAkB,CADtBlkB,EAEI;EAAM,WAAA,yDACCkkB,EAAQ1tB,MADT;KAFVwJ,CAHAA,CAOA,IAAMy3C,IACFvzB,EAAQhmB,GAARgmB,CAAY,UAAC5O,CAAD,EAAIne,CAAJ;EAAU,WAAA4pB,gBAAgBzL,CAAhByL,EAAmB,YAAU5pB,CAA7B4pB,EAAkC,MAAlCA,CAAA;KAAtBmD,CADJ;EAAA,MAEMy+C,IAAclrB,EAAS,CAATA,CAFpB,CAGAA,EAASj7C,OAATi7C,CAAiB,UAAAniC,CAAA;EACf,QAAIA,EAAEjb,KAAFib,KAAYqtD,EAAYtoE,KAA5B,EACE,MAAM,IAAI1E,KAAJ,CACF,0DADE,CAAN;KAFJ8hD,GAMAA,EAASj7C,OAATi7C,CAAiB,UAAAniC,CAAA;EACf,SAAKnI,YAAiBmI,EAAEjd,KAAnB8U,EAA0Bw1D,EAAYtqE,KAAtC8U,CAAL,EACE,MAAM,IAAIxX,KAAJ,CACF,0DADE,CAAN;KAFJ8hD,CANAA,CAaA,IAOM5pC,IAAyB4pC,CAP/B,CAQA,OAAOl+B,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ2yD,IAAR3yD,CAAawnC,CAAbxnC,CAAA;KAAhCsJ,EAAwD1L,CAAxD0L,EARK,UAAC1K,CAAD;EACV,QAAMg0D,MAAN,CAIA,OAHAprB,EAASj7C,OAATi7C,CAAiB,UAACniC,CAAD,EAAIne,CAAJ;EACf0rE,QAAK1rE,CAAL0rE,IAAU;EAAM,eAAAh0D,EAAGpM,KAAHoM,EAAA;SAAhBg0D;OADFprB,GAGOorB,CAAP;KAGKtpD,CAAP;EAWF,oBAAA,CAAsCzjB,CAAtC,EAA4CsB,CAA5C;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,sBAAzCA,GACO7lB,EAAE2O,GAAF3O,CAAMsB,CAANtB,CAAP;EA4BF,cAAA,CAAgCA,CAAhC,EAAsDsB,CAAtD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,KAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IAAM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAsBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQw8C,QAARx8C,CAAiB4pD,CAAjB5pD,EAAqB6pD,CAArB7pD,CAAA;KADfsJ,IAC0CsgD,OAAIC,OAD9CvgD,EAnBK,UAAC1K,CAAD;EAiBV,aAAQgrD,IAhBK;EACX,YAAIxoD,IAAMxC,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAKA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAYwoD,EAAGxhE,KAAfgZ,CAAP;WAUgByoD,IARL;EACX,YAAIzoD,IAAMxC,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAKA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAIpK,GAAJoK,GAAU9P,OAAV8P,CAAkByoD,EAAGzhE,KAArBgZ,CAAP;WAEF;KAEKkI,CAAP;EAaF,oBAAA,CAAsCzjB,CAAtC,EAA4CsB,CAA5C;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,sBAAzCA,GACO7lB,EAAE8O,GAAF9O,CAAMsB,CAANtB,CAAP;EA8BF,cAAA,CAAgCgtE,CAAhC,EAAoDlqE,CAApD;EACE,MAAMmqE,IAAQhiD,gBAAgB+hD,CAAhB/hD,EAAsB,MAAtBA,EAA8B,KAA9BA,CAAd;EAAA,MACMiiD,IAAOjiD,gBAAgBnoB,CAAhBmoB,EAAqB,KAArBA,EAA4B,KAA5BA,CADb;EAAA,MAGMnD,IACFgN,2BAA0Cm4C,EAAM1qE,KAAhDuyB,EAAuDo4C,EAAK3qE,KAA5DuyB,CAJJ,CAKAk4C,IAAOC,EAAMphE,IAANohE,CAAWj/C,WAAWi/C,EAAM1oE,KAAjBypB,EAAwBk/C,EAAK3oE,KAA7BypB,CAAXi/C,CAAPD,EACAlqE,IAAMoqE,EAAKrhE,IAALqhE,CAAUl/C,WAAWi/C,EAAM1oE,KAAjBypB,EAAwBk/C,EAAK3oE,KAA7BypB,CAAVk/C,CADNF,CAuBA,OAAOvpD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQnL,GAARmL,CAAY8yD,CAAZ9yD,EAAmB+yD,CAAnB/yD,CAALikD,CAAA;KADvB36C,IACwDwpD,UAAOC,SAD/DzpD,EArBM,UAAC1K,CAAD,EAAa2C,CAAb;EACJ,QAAAhE,QAAA,CAkBP,SAAQu1D,OAjBQ;EACd,YAAME,IAAWD,EAAKxhD,OAALwhD,EAAjB;EAAA,YACI3xD,IAAMxC,EAAG7J,GAAH6J,CAAOo0D,EAASj+D,GAATi+D,CAAaF,EAAMj+D,GAANi+D,CAAUE,EAASr+D,GAATq+D,CAAah9C,OAAO,CAAPA,CAAbg9C,CAAVF,CAAbE,CAAPp0D,CADV;EAAA,YAEM4zD,IAAaC,iBAAgCK,EAAM1qE,KAAtCqqE,EAA6C9kD,CAA7C8kD,CAFnB,CAMA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAY0xD,EAAM1qE,KAAlBgZ,CAAP;WAUsB2xD,MART;EACb,YAAI3xD,IAAMxC,EAAG7J,GAAH6J,CAAOrB,EAAExI,GAAFwI,CAAMu1D,EAAMxlE,GAANwlE,EAANv1D,EAAmBgU,OAAnBhU,EAAPqB,CAAV;EAAA,YACM4zD,IAAaC,iBAAgCM,EAAK3qE,KAArCqqE,EAA4C9kD,CAA5C8kD,CADnB,CAKA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAY2xD,EAAK3qE,KAAjBgZ,CAAP;WAEF;KAEKkI,CAAP;EAcF,oBAAA,CAAsCupD,CAAtC,EAA+ClqE,CAA/C;EAEE,SADA+iB,kBAAuBmnD,EAAKzqE,KAA5BsjB,EAAmC/iB,EAAIP,KAAvCsjB,EAA8C,sBAA9CA,GACOmnD,EAAKh+D,GAALg+D,CAASlqE,CAATkqE,CAAP;EA2BF,cAAA,CAAgChtE,CAAhC,EAAsDsB,CAAtD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,KAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IAAM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAsBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ83C,QAAR93C,CAAiB4pD,CAAjB5pD,EAAqB6pD,CAArB7pD,CAAA;KADfsJ,IAC0CsgD,OAAIC,OAD9CvgD,EAnBK,UAAC1K,CAAD;EAiBV,aAAQgrD,IAhBK;EACX,YAAMxoD,IAAMxC,EAAG7J,GAAH6J,CAAOirD,EAAGt4C,OAAHs4C,EAAPjrD,CAAZ;EAAA,YACM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACKpxD,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4BwoD,EAAGxhE,KAA/BgZ,CADLoxD,GAGGpxD,CAHP;WAagByoD,IARL;EACX,YAAMzoD,IAAMxC,EAAG7J,GAAH6J,CAAOgrD,EAAGr4C,OAAHq4C,EAAPhrD,CAAZ;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACKpxD,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4ByoD,EAAGzhE,KAA/BgZ,CADLoxD,GAGGpxD,CAHP;WAKF;KAEKkI,CAAP;EAaF,oBAAA,CAAsCzjB,CAAtC,EAA4CsB,CAA5C;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,2BAAzCA,GACO7lB,EAAEkP,GAAFlP,CAAMsB,CAANtB,CAAP;EA6BF,cAAA,CAAgCA,CAAhC,EAAsDsB,CAAtD;EACE,MAIIka,CAJJ;EAAA,MAAMuoD,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,KAAxBA,CADX,CAKA,IAHA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GAGiB,YAAb9H,EAAGx/D,KAAU,IAAwB,YAAby/D,EAAGz/D,KAA/B,EACE,OAAO8K,SAAS00D,CAAT10D,EAAa20D,CAAb30D,CAAP,CAEAmM,IAAc,UAACrB,CAAD;EAA4B,WAAAA,EAAQizD,UAARjzD,CAAmB4pD,CAAnB5pD,EAAuB6pD,CAAvB7pD,CAAA;KAA1CqB,CAGF,IAAMsM,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAsBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqBjI,CAArBiI,IAAmCsgD,OAAIC,OAAvCvgD,EApBK,UAAC1K,CAAD;EAkBV,aAAQgrD,IAjBK;EACX,YAAMxoD,IAAMxC,EAAG3J,GAAH2J,CAAOirD,EAAGt4C,OAAHs4C,EAAPjrD,CAAZ;EAAA,YACM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACKpxD,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4BwoD,EAAGxhE,KAA/BgZ,CADLoxD,GAGGpxD,CAHP;WAcgByoD,IATL;EACX,YAAIzoD,IAAMxC,EAAG7J,GAAH6J,CAAOgrD,EAAGr4C,OAAHq4C,EAAPhrD,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4ByoD,EAAGzhE,KAA/BgZ,CADJoxD,EAGJ,IAAMU,IAAMrJ,EAAGvyD,MAAHuyD,EAAZ,CACA,OAAOzoD,EAAInM,GAAJmM,CAAQ8xD,EAAI3hD,OAAJ2hD,EAAR9xD,EAAuBpK,GAAvBoK,EAAP;WAEF;KAEKkI,CAAP;EA4BF,mBAAA,CACIzjB,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,UAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,UAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IACM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CAFJ,CAuBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CAvBa,UAACtJ,CAAD;EAA4B,WAAAA,EAAQ9K,QAAR8K,CAAiB4pD,CAAjB5pD,EAAqB6pD,CAArB7pD,CAAA;KAuBzCsJ,IAAmCsgD,OAAIC,OAAvCvgD,EApBK,UAAC1K,CAAD;EAkBV,aAAQgrD,IAjBK;EACX,YAAMxoD,IAAMxC,EAAG3J,GAAH2J,CAAOirD,EAAGt4C,OAAHs4C,EAAPjrD,CAAZ;EAAA,YACM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACKpxD,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4BwoD,EAAGxhE,KAA/BgZ,CADLoxD,GAGGpxD,CAHP;WAcgByoD,IATL;EACX,YAAIzoD,IAAMxC,EAAG7J,GAAH6J,CAAOgrD,EAAGr4C,OAAHq4C,EAAPhrD,CAAV;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4ByoD,EAAGzhE,KAA/BgZ,CADJoxD,EAGJ,IAAMU,IAAMrJ,EAAGvyD,MAAHuyD,EAAZ,CACA,OAAOzoD,EAAInM,GAAJmM,CAAQ8xD,EAAI3hD,OAAJ2hD,EAAR9xD,EAAuBpK,GAAvBoK,EAAP;WAEF;KAEKkI,CAAP;EAUF,oBAAA,CAAsCzjB,CAAtC,EAA4CsB,CAA5C;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,yBAAzCA,GACO7lB,EAAEoP,GAAFpP,CAAMsB,CAANtB,CAAP;EA8BF,cAAA,CAAgCA,CAAhC,EAAsDsB,CAAtD;EACE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,KAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IAAM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAoBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQxK,GAARwK,CAAY4pD,CAAZ5pD,EAAgB6pD,CAAhB7pD,CAAA;KAAhCsJ,IAAsDsgD,OAAIC,OAA1DvgD,EAlBK,UAAC1K,CAAD;EAgBV,aAAQgrD,IAfK;EACX,YAAM4I,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CAAnB,CACA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACK5zD,EAAG3X,GAAH2X,CAAO4zD,CAAP5zD,EAAmBtN,OAAnBsN,CAA2BgrD,EAAGxhE,KAA9BwW,CADL4zD,GAGG5zD,CAHP;WAagBirD,IARL;EACX,YAAMzoD,IAAMxC,EAAG7J,GAAH6J,CAAOgrD,EAAG30D,GAAH20D,CAAOC,CAAPD,EAAWhhE,KAAXghE,GAAmB5yD,GAAnB4yD,EAAPhrD,CAAZ;EAAA,YACM4zD,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CADnB,CAEA,OAAID,EAAWjsE,MAAXisE,GAAoB,CAApBA,GACKpxD,EAAIna,GAAJma,CAAQoxD,CAARpxD,EAAoB9P,OAApB8P,CAA4ByoD,EAAGzhE,KAA/BgZ,CADLoxD,GAGGpxD,CAHP;WAKF;KAEKkI,CAAP;EAWF,oBAAA,CAAsCzjB,CAAtC,EAA4CsB,CAA5C;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,sBAAzCA,GACO7lB,EAAE2P,GAAF3P,CAAMsB,CAANtB,CAAP;EA6BF,kBAAA,CACIA,CADJ,EAC0BsB,CAD1B;EAEE,MAAIyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAT;EAAA,MACI+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,SAAxBA,CADT,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GAEiB,WAAb9H,EAAGx/D,KAAU,KACfw/D,IAAKA,EAAGwG,KAAHxG,EADU,CAFjB8H,EAKiB,WAAb7H,EAAGz/D,KAAU,KACfy/D,IAAKA,EAAGuG,KAAHvG,EADU,CALjB6H,EAQA/2C,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CARA+2C,CAcA,OAAOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ5K,OAAR4K,CAAgB4pD,CAAhB5pD,EAAoB6pD,CAApB7pD,CAAA;KADfsJ,IACyCsgD,OAAIC,OAD7CvgD,EALK,UAAC1K,CAAD;EAGV,aAAQgrD,IAFK;EAAM,eAAAhrD,EAAG7J,GAAH6J,CAAOgrD,EAAGxzD,SAAHwzD,CAAaC,CAAbD,EAAiBr4C,OAAjBq4C,EAAPhrD,CAAA;WAEDirD,IADL;EAAM,eAAAjrD,EAAG7J,GAAH6J,CAAOgrD,EAAGtzD,OAAHszD,CAAWC,CAAXD,EAAer4C,OAAfq4C,EAAPhrD,CAAA;WACnB;KAEK0K,CAAP;EAWF,wBAAA,CAA0CzjB,CAA1C,EAAgDsB,CAAhD;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,0BAAzCA,GACO7lB,EAAEuP,OAAFvP,CAAUsB,CAAVtB,CAAP;EA6BF,kBAAA,CACIA,CADJ,EAC0BsB,CAD1B;EAEE,MAAIyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,SAAxBA,CAAT;EAAA,MACI+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,SAAxBA,CADT,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GAEiB,WAAb9H,EAAGx/D,KAAU,KACfw/D,IAAKA,EAAGwG,KAAHxG,EADU,CAFjB8H,EAKiB,WAAb7H,EAAGz/D,KAAU,KACfy/D,IAAKA,EAAGuG,KAAHvG,EADU,CALjB6H,EAQA/2C,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CARA+2C,CAcA,OAAOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1K,OAAR0K,CAAgB4pD,CAAhB5pD,EAAoB6pD,CAApB7pD,CAAA;KADfsJ,IACyCsgD,OAAIC,OAD7CvgD,EALK,UAAC1K,CAAD;EAGV,aAAQgrD,IAFK;EAAM,eAAAhrD,EAAG7J,GAAH6J,CAAOgrD,EAAGpzD,YAAHozD,CAAgBC,CAAhBD,EAAoBr4C,OAApBq4C,EAAPhrD,CAAA;WAEDirD,IADL;EAAM,eAAAjrD,EAAG7J,GAAH6J,CAAOgrD,EAAG5zD,IAAH4zD,CAAQC,CAARD,EAAYr4C,OAAZq4C,EAAPhrD,CAAA;WACnB;KAEK0K,CAAP;EAWF,wBAAA,CAA0CzjB,CAA1C,EAAgDsB,CAAhD;EAEE,SADAukB,kBAAuB7lB,EAAEuC,KAAzBsjB,EAAgCvkB,EAAEiB,KAAlCsjB,EAAyC,0BAAzCA,GACO7lB,EAAEyP,OAAFzP,CAAUsB,CAAVtB,CAAP;EA8BF,4BAAA,CACIA,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,mBAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,mBAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,GAEA/2C,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CAFA+2C,CASA,OAAOpoD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtK,iBAARsK,CAA0B4pD,CAA1B5pD,EAA8B6pD,CAA9B7pD,CAAA;KADfsJ,IACmDsgD,OAAIC,OADvDvgD,EANK,UAAC1K,CAAD;EACV,QAAMu0D,IAAMn9C,OAAO,CAAPA,CAAZ,CAGA,SAAQ4zC,IAFK;EAAM,eAAAhrD,EAAG7J,GAAH6J,CAAOgrD,EAAGj1D,GAAHi1D,CAAOC,CAAPD,EAAW70D,GAAX60D,CAAeuJ,CAAfvJ,CAAPhrD,CAAA;WAEDirD,IADL;EAAM,eAAAjrD,EAAG7J,GAAH6J,CAAOirD,EAAGl1D,GAAHk1D,CAAOD,CAAPC,EAAW90D,GAAX80D,CAAesJ,CAAftJ,CAAPjrD,CAAA;WACnB;KAEK0K,CAAP;EAaF,kCAAA,CAAoDzjB,CAApD,EAA0DsB,CAA1D;EAGE,SAFAukB,kBACI7lB,EAAEuC,KADNsjB,EACavkB,EAAEiB,KADfsjB,EACsB,oCADtBA,GAEO7lB,EAAE6P,iBAAF7P,CAAoBsB,CAApBtB,CAAP;EAmBF,gBAAA,CACIA,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,OAAxBA,CADX,CAEA4gD,iBAAiB9H,CAAjB8H,EAAqB7H,CAArB6H,EAEA,IAAM/jD,IACFgN,2BAA0CivC,EAAGxhE,KAA7CuyB,EAAoDkvC,EAAGzhE,KAAvDuyB,CADJ,CAwBA,OAAOrR,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtL,KAARsL,CAAc4pD,CAAd5pD,EAAkB6pD,CAAlB7pD,CAAA;KADfsJ,IACuCsgD,OAAIC,OAD3CvgD,EArBK,UAAC1K,CAAD;EAmBV,aAAQgrD,IAlBK;EACX,YAAM9lD,IAAItP,IAAIo1D,EAAGtyD,MAAHsyD,EAAJp1D,EAAiBq1D,EAAGvyD,MAAHuyD,EAAjBr1D,CAAV;EAAA,YACI4M,IAAMxC,EAAG7J,GAAH6J,CAAOirD,EAAG50D,GAAH40D,CAAO/lD,CAAP+lD,CAAPjrD,CADV;EAAA,YAEM4zD,IAAaC,iBAAgC7I,EAAGxhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CAFnB,CAMA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAYwoD,EAAGxhE,KAAfgZ,CAAP;WAWgByoD,IATL;EACX,YAAM/lD,IAAItP,IAAIo1D,EAAGtyD,MAAHsyD,EAAJp1D,EAAiBq1D,EAAGvyD,MAAHuyD,EAAjBr1D,CAAV;EAAA,YACI4M,IAAMpK,IAAI4H,EAAG7J,GAAH6J,CAAOgrD,EAAG30D,GAAH20D,CAAO9lD,CAAP8lD,CAAPhrD,CAAJ5H,CADV;EAAA,YAEMw7D,IAAaC,iBAAgC5I,EAAGzhE,KAAnCqqE,EAA0C9kD,CAA1C8kD,CAFnB,CAMA,OAHID,EAAWjsE,MAAXisE,GAAoB,CAApBA,KACFpxD,IAAMA,EAAIna,GAAJma,CAAQoxD,CAARpxD,CADJoxD,GAGGpxD,EAAI9P,OAAJ8P,CAAYyoD,EAAGzhE,KAAfgZ,CAAP;WAEF;KAEKkI,CAAP;EAIF,KAAa9U,MAAMmd,KAAIyhD,YAAJzhD,CAAnB;EAAA,IACaghD,OAAOhhD,KAAI0hD,cAAJ1hD,CADpB;EAAA,IAEald,YAAYkd,KAAI2hD,wBAAJ3hD,CAFzB;EAAA,IAGajd,QAAQid,KAAI4hD,gBAAJ5hD,CAHrB;EAAA,IAIa1c,MAAM0c,KAAI6hD,YAAJ7hD,CAJnB;EAAA,IAKaxc,YAAYwc,KAAI8hD,wBAAJ9hD,CALzB;EAAA,IAMazc,WAAWyc,KAAI+hD,sBAAJ/hD,CANxB;EAAA,IAOarc,UAAUqc,KAAIgiD,oBAAJhiD,CAPvB;EAAA,IAQapc,gBAAgBoc,KAAIiiD,gCAAJjiD,CAR7B;EAAA,IASavc,UAAUuc,KAAIkiD,oBAAJliD,CATvB;EAAA,IAUatc,gBAAgBsc,KAAImiD,gCAAJniD,CAV7B;EAAA,IAWanc,MAAMmc,KAAIoiD,YAAJpiD,CAXnB;EAAA,IAYalc,YAAYkc,KAAIqiD,wBAAJriD,CAZzB;EAAA,IAaa5c,MAAM4c,KAAIsiD,YAAJtiD,CAbnB;EAAA,IAca3c,YAAY2c,KAAIuiD,wBAAJviD,CAdzB;EAAA,IAea9c,MAAM8c,KAAIwiD,YAAJxiD,CAfnB;EAAA,IAgBa7c,YAAY6c,KAAIyiD,wBAAJziD,CAhBzB;EAAA,IAiBajc,oBAAoBic,KAAI0iD,wCAAJ1iD,CAjBjC;EAAA,IAkBahc,0BAA0Bgc,KAAI2iD,oDAAJ3iD,CAlBvC;EAAA,IAmBahd,MAAMgd,KAAI4iD,YAAJ5iD,CAnBnB;EAAA,IAoBa/c,YAAY+c,KAAI6iD,wBAAJ7iD,CApBzB,CC9uBA,oBAAA,CAAuC9qB,CAAvC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,YAAxBA,EAAsC,MAAtCA,CAAX,CAGA,OAFAjpB,OAAoB,WAAb+qB,EAAGxoB,KAAVvC,EAA4B,mCAA5BA,GAEOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQpJ,UAARoJ,CAAmB4S,CAAnB5S,CAAA;KAAhCsJ,IAAyDsJ,OAAzDtJ,CAAP;EAiBF,qBAAA,CACIzjB,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,YAAxBA,EAAsC,MAAtCA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,YAAxBA,EAAsC,MAAtCA,CADX,CAOA,OALAjpB,OACiB,WAAb+hE,EAAGx/D,KAAU,IAAuB,WAAby/D,EAAGz/D,KAD9BvC,EAEI,mCAFJA,GAGA8pE,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CAHA9pE,EAKOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtJ,UAARsJ,CAAmB4pD,CAAnB5pD,EAAuB6pD,CAAvB7pD,CAAA;KADfsJ,IAC4CsgD,OAAIC,OADhDvgD,CAAP;EAiBF,oBAAA,CACIzjB,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,WAAxBA,EAAqC,MAArCA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,WAAxBA,EAAqC,MAArCA,CADX,CAOA,OALAjpB,OACiB,WAAb+hE,EAAGx/D,KAAU,IAAuB,WAAby/D,EAAGz/D,KAD9BvC,EAEI,mCAFJA,GAGA8pE,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CAHA9pE,EAKOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQrJ,SAARqJ,CAAkB4pD,CAAlB5pD,EAAsB6pD,CAAtB7pD,CAAA;KAAhCsJ,IAA4DsgD,OAAIC,OAAhEvgD,CAAP;EAkBF,qBAAA,CACIzjB,CADJ,EAC0BsB,CAD1B;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,YAAxBA,EAAsC,MAAtCA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,YAAxBA,EAAsC,MAAtCA,CADX,CAQA,OANAjpB,OACiB,WAAb+hE,EAAGx/D,KAAU,IAAuB,WAAby/D,EAAGz/D,KAD9BvC,EAEI,mCAFJA,GAGA8pE,2BAA2B/H,EAAGxhE,KAA9BupE,EAAqC9H,EAAGzhE,KAAxCupE,CAHA9pE,EAMO8O,UAAU9Q,CAAV8Q,EAAaxP,CAAbwP,EAAgBD,UAAhBC,CAA2BD,WAAW7Q,CAAX6Q,EAAcvP,CAAduP,EAAiBE,UAAjBF,EAA3BC,CAAP;EAsBF,gBAAA,CACIG,CADJ,EACkCjR,CADlC,EACmDsB,CADnD;EAEE,MAAMyiE,IAAK94C,gBAAgBjrB,CAAhBirB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX;EAAA,MACM+4C,IAAK/4C,gBAAgB3pB,CAAhB2pB,EAAmB,GAAnBA,EAAwB,OAAxBA,CADX;EAAA,MAEM2jD,IAAa3jD,gBAAgBha,CAAhBga,EAA2B,WAA3BA,EAAwC,OAAxCA,EAAiD,MAAjDA,CAFnB,CAIAjpB,OAA4B,WAArB4sE,EAAWrqE,KAAlBvC,EAAoC,uCAApCA,GACA2qB,kBAAkBo3C,EAAGxhE,KAArBoqB,EAA4Bq3C,EAAGzhE,KAA/BoqB,EAAsC,kBAAtCA,CADA3qB,EAGwB,MAApB4sE,EAAWlpE,IAAS,GAGtB1D,OACI4sE,EAAWrsE,KAAXqsE,CAAiB,CAAjBA,MAAwB7K,EAAGxhE,KAAHwhE,CAAS,CAATA,CAD5B/hE,EAEI,gEAFJA,CAHsB,GAQtB2qB,kBAAkBiiD,EAAWrsE,KAA7BoqB,EAAoCq3C,EAAGzhE,KAAvCoqB,EAA8C,kBAA9CA,CAXF3qB,CAsBA,OAAOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ00D,MAAR10D,CAAey0D,CAAfz0D,EAA2B4pD,CAA3B5pD,EAA+B6pD,CAA/B7pD,CAAA;KADfsJ,IAEKmrD,eAAY7K,OAAIC,OAFrBvgD,EANM,UAAC1K,CAAD;EAAW,aACtB61D,YAAY;EAAM,eAAAv8D,UAAUu8D,CAAVv8D,CAAA;WAClB0xD,IAAI;EAAM,eAAAhrD,EAAG7J,GAAH6J,CAAO61D,EAAW/iE,IAAX+iE,CAAgB7K,EAAGx/D,KAAnBqqE,CAAP71D,CAAA;WACVirD,IAAI;EAAM,eAAAjrD,EAAG7J,GAAH6J,CAAO61D,EAAW79D,UAAX69D,GAAwB/iE,IAAxB+iE,CAA6B5K,EAAGz/D,KAAhCqqE,CAAP71D,CAAA;WAHY;KAMjB0K,CAAP;EAuBF,qBAAA,CAA2BxS,CAA3B;;;;EAGe,iBADbjP,OAA4B,YADtB4sE,IAAa3jD,gBAAgBha,CAAhBga,EAA2B,WAA3BA,EAAwC,OAAxCA,EAAiD,MAAjDA,GACD1mB,KAAlBvC,EAAoC,iCAApCA,OACmB4sE,EAAWtjE,IAAXsjE,GAAN;EAKb,iBALMhqE,IAAO6a,MAAAA,EAAP7a,EACA2W,IAAMu6C,UAAU8Y,EAAWrsE,KAArBuzD,EAA4BlxD,CAA5BkxD,CADNlxD,EAEFqM,MAAc29D,CAAd39D,IACF29D,EAAWt1D,OAAXs1D,EAHIhqE,MAKC2W,EAAP;;;EAGF,KAAa1K,aAAaib,KAAIgjD,0BAAJhjD,CAA1B;EAAA,IACa/a,aAAa+a,KAAIijD,0BAAJjjD,CAD1B;EAAA,IAEahb,YAAYgb,KAAIkjD,wBAAJljD,CAFzB;EAAA,IAGa9a,aAAa8a,KAAImjD,0BAAJnjD,CAH1B;EAAA,IAIa5a,QAAQ4a,KAAIojD,gBAAJpjD,CAJrB;EAAA,IAKaqjD,aAAaC,WAL1B,CCxKA,cAAA,CAAiCpuE,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAEA,IAAiB,WAAb8B,EAAGxoB,KAAP,EACE,OAAOwoB,EAAGw9C,KAAHx9C,EAAP,CAMF,OAAOtJ,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQtI,IAARsI,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAJM,UAAC1K,CAAD;EACX,QAAMs2D,IAAUtiD,EAAG5Z,IAAH4Z,EAAhB,CACA,SAAQA,IAAI;EAAM,eAAAhU,EAAG5J,SAAH4J,CAAas2D,EAAQ3jD,OAAR2jD,EAAbt2D,CAAA;WAAlB;KAEK0K,CAAP;EAcF,cAAA,CAAgCziB,CAAhC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,KAAxBA,CAAX,CASA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQrI,GAARqI,CAAY4S,CAAZ5S,CAALikD,CAAA;KADhB36C,IACwCsJ,OADxCtJ,EAPM,UAAC1K,CAAD,EAAQ2C,CAAR;EACJ,QAAAhE,QAAA,CACP,SACEqV,IAAI;EACA,eAAAtJ,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,iBAAAA,EAAQm1D,MAARn1D,CAAepB,CAAfoB,EAAmBzC,CAAnByC,CAAA;WAAhCsJ,IAAwD1K,OAAIrB,MAA5D+L,CAAA;WAFN;KAKKA,CAAP;EAiBF,eAAA,CAAiCziB,CAAjC;EACE,MAAM+rB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CAiBA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQpI,IAARoI,CAAa4S,CAAb5S,CAAA;KAAhCsJ,IAAmDsJ,OAAnDtJ,EAfM,UAAC1K,CAAD;EACX,aACEgU,IAAI;EACF,YAAMo4B,IAAOp4B,EAAGtc,OAAHsc,CAAWoD,OAAO,CAAPA,CAAXpD,CAAb;EAAA,YAEMwiD,IAAap/C,OAAO0uB,eAAP1uB,CAFnB;EAAA,YAGMhiB,IAAQgiB,OAAO2uB,UAAP3uB,CAHd;EAAA,YAKMq/C,IAAqBz2D,EAAG7J,GAAH6J,CAAO5K,CAAP4K,CAL3B;EAAA,YAMM02D,IAAmB12D,EAAG7J,GAAH6J,CAAOw2D,CAAPx2D,EAAmB7J,GAAnB6J,CAAuBgU,EAAGrB,OAAHqB,GAAajqB,GAAbiqB,EAAvBhU,CANzB,CAQA,OAAO7H,MAAMi0C,CAANj0C,EAAYs+D,CAAZt+D,EAAgCu+D,CAAhCv+D,CAAP;WAVJ;KAcKuS,CAAP;EAmBF,oBAAA,CAAsCziB,CAAtC,EAAuDhD,CAAvD;qBAAuDA,QACrD,IAAM+uB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAX,CACA,OAAOxb,QAAQ0gB,OAAOnyB,CAAPmyB,EAAcjhB,GAAdihB,CAAkBpD,CAAlBoD,CAAR1gB,EAA+Bsd,CAA/Btd,CAAP;EAkBF,gBAAA,CAAkCzO,CAAlC,EAAmDhD,CAAnD;EACE,MAAM+uB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,OAAxBA,CAAX;EAAA,MACMykD,IAASzkD,gBAAgBjtB,CAAhBitB,EAAuB,OAAvBA,EAAgC,OAAhCA,CADf;EAAA,MAGMiF,IAAOC,OAAO,CAAPA,CAHb,CAIA,OAAO1gB,QAAQygB,CAARzgB,EAAcsd,CAAdtd,EAAkBd,GAAlBc,CAAsBigE,EAAOxgE,GAAPwgE,CAAWngE,QAAQ2gB,CAAR3gB,EAAcwd,CAAdxd,CAAXmgE,CAAtBjgE,CAAP;EAGF,KAAaqC,MAAMga,KAAI6jD,YAAJ7jD,CAAnB;EAAA,IACa9Z,YAAY8Z,KAAI8jD,wBAAJ9jD,CADzB;EAAA,IAEa7Z,QAAQ6Z,KAAI+jD,gBAAJ/jD,CAFrB;EAAA,IAGaja,OAAOia,KAAIgkD,cAAJhkD,CAHpB;EAAA,IAIa/Z,OAAO+Z,KAAIikD,cAAJjkD,CAJpB,CChHA,mBAAA,CAAsC9qB,CAAtC,EAAuD+O,CAAvD;EACE,MAAMgd,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,WAAxBA,CAAX,CAgBA,IAdY,QAARlb,CAAQ,KACVA,IAAOgd,EAAGxqB,KAAHwqB,CAAS3kB,GAAT2kB,CAAa,UAAC/J,CAAD,EAAI3hB,CAAJ;EAAU,WAAAA,CAAA;KAAvB0rB,EAA0BvgB,OAA1BugB,EADG,GAGZ7iB,OACI6iB,EAAGrnB,IAAHqnB,KAAYhd,EAAKrP,MADrBwJ,EAEI,uCAAqC6iB,EAAGrnB,IAAxC,gCAAA,GACiCqK,CADjC,MAFJ7F,CAHY,EAOZ6F,EAAKrJ,OAALqJ,CAAa,UAAA3L,CAAA;EACX8F,WACI9F,KAAQ,CAARA,IAAaA,IAAO2oB,EAAGrnB,IAD3BwE,EAEI,kDAA+C6iB,EAAGrnB,IAAHqnB,GAAU,CAAzD,IACI,WADJ,GACgBhd,CAHpB7F;KADF6F,CAPY,EAcRgd,EAAGrnB,IAAHqnB,IAAW,CAAf,EACE,OAAOA,EAAGpgB,KAAHogB,EAAP,CAOF,OAAOtJ,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQnK,SAARmK,CAAkB4S,CAAlB5S,EAAsBpK,CAAtBoK,CAAA;KADRsJ,IACsCsJ,OADtCtJ,EAJK,UAAC1K,CAAD;EACV,QAAMi3D,IAAWjb,uBAAiChlD,CAAjCglD,CAAjB,CACA,SAAQhoC,IAAI;EAAM,eAAAhU,EAAG/I,SAAH+I,CAAai3D,CAAbj3D,CAAA;WAAlB;KAEK0K,CAAP;EAIF,KAAazT,YAAY8b,KAAImkD,wBAAJnkD,CAAzB,CClCA,oCAAA,CACI9qB,CADJ,EACqB80C,CADrB,EACsCjhC,CADtC,EACgD7W,CADhD,EAC2D8W,CAD3D;qBACqBghC,yBAAiBjhC,yBAAU7W,yBAAW8W,QACzD,IAAMiY,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,4BAAxBA,CAAX,CACA/gB,OACgB,MAAZ6iB,EAAGrnB,IAAS,IAAiB,MAAZqnB,EAAGrnB,IADxBwE,EAEI,6FACgB6iB,EAAGrnB,IADnB,MAFJwE,GAIAA,OACI+d,MAAW6tB,CAAX7tB,CADJ/d,EAEI,mHACqC4rC,CADrC,MAFJ5rC,CAJAA,CASA,IAAI42D,IAAM/zC,CAAV;EAAA,MACI44C,KAAe,CADnB,CAEgB,MAAZ54C,EAAGrnB,IAAS,KACdigE,KAAe,CAAfA,EACA7E,IAAM/zC,EAAGg0C,IAAHh0C,CAAQ,CAARA,EAAWA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAXA,EAAwBA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAAxBA,EAAqCA,EAAGxqB,KAAHwqB,CAAS,CAATA,CAArCA,CAFQ,EAIhB,IASMxR,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CACR,UAACtJ,CAAD,EAAUikD,CAAV;EAAmB,WAAAA,EAAKjkD,EAAQ+1D,4BAAR/1D,CACpB2mD,CADoB3mD,EACf27B,CADe37B,EACFtF,CADEsF,EACInc,CADJmc,EACWrF,CADXqF,CAALikD,CAAA;KADX36C,IAGPq9C,QAHOr9C,EATK,UAAC1K,CAAD,EAAe2C,CAAf;EACR,QAAAiT,QAAA,CACP,SACEmyC,KAAK;EAAM,eAAAr9C,IAAIE,MAAJF,CAAW4I,SAAX5I,CACP,UAAAtJ,CAAA;EAAW,iBAAAA,EAAQg2D,OAARh2D,CACPpB,CADOoB,EACH2mD,CADG3mD,EACEwU,CADFxU,EAC2B27B,CAD3B37B,EACwCtF,CADxCsF,EAC8Cnc,CAD9Cmc,EACqDrF,CADrDqF,CAAA;WADJsJ,IAAAA,CAAA;WADb;KAOUA,CATZ,CAaA,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGKpqD,CAHT;EAOF,KAAaxG,6BAA6B+W,KAAIskD,0DAAJtkD,CAA1C,CCfA,cAAA,CACI9qB,CADJ,EAC0BqM,CAD1B,EAEIjJ,CAFJ,EAEkCkJ,CAFlC;qBAC0BD,mCACtBjJ,4BAA8BkJ,QAGhC,IAAMC,IAAO8iE,SAFbrvE,IAAIiqB,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAESolD,EAAYhjE,CAAZgjE,EAAiBjsE,CAAjBisE,CAAb;EAAA,MACIrF,IAAgBz9D,EAAKhL,KADzB,CAEA,IAAI+K,CAAJ,EAAc;EACZ,QAAMga,IAAO+iD,eAAyBjmE,CAAzBimE,EAA+BrpE,EAAEuB,KAAjC8nE,CAAb,CACAW,IAAgBV,qBAA+B/8D,EAAKhL,KAApC+nE,EAA2ChjD,CAA3CgjD,CAAhBU;EAEF,UAAOz9D,EAAK9B,OAAL8B,CAAay9D,CAAbz9D,CAAP;EAGF,kBAAA,CACIvM,CADJ,EACe+2C,CADf,EACiC3zC,CADjC;EAEE,uBAD+BA,WAChB,MAAXpD,EAAE0E,IAAN,EACE,OAAO1E,EAAE2Q,GAAF3Q,EAAP,CAIF,IAAe,MAAXA,EAAE0E,IAAS,IAAc,SAATtB,CAApB,EACE,OAAOisE,SAASrvE,EAAEyK,OAAFzK,GAAY,EAAZA,CAATqvE,EAA0Bt4B,CAA1Bs4B,EAA6BjsE,CAA7BisE,CAAP,CAIF,IAAe,MAAXrvE,EAAE0E,IAAS,IAAqB,mBAATtB,CAAZ,IACXA,aAAgBjC,KAAhBiC,IAAyC,MAAhBA,EAAK1D,MADlC,EACgD;EAC9C,QAAU,MAANq3C,CAAJ,EACE,OAAO/2C,EAAE2Q,GAAF3Q,GAAQI,GAARJ,CAAYoD,CAAZpD,CAAP,CAEF,IAAI+2C,MAAMn1C,IAAAA,CAAV,EACE,OAAO5B,EAAE2Q,GAAF3Q,GAAQC,GAARD,CAAYoD,CAAZpD,CAAP,CAEF,IAAI+2C,OAAOn1C,IAAAA,CAAX,EACE,OAAO5B,EAAE2Q,GAAF3Q,GAAQD,GAARC,CAAYoD,CAAZpD,CAAP,CAEF,IAAU,gBAAN+2C,CAAM,IAAqB,MAANA,CAAzB,EAEE,OAAO/2C,EAAE2Q,GAAF3Q,GAAQgO,GAARhO,CAAYmvB,OAAO,CAAPA,EAAU,OAAVA,CAAZnvB,EAAgCI,GAAhCJ,CAAoCoD,CAApCpD,EAA0CgC,IAA1ChC,EAAP,CAGF,MAAM,IAAInB,KAAJ,CAAU,uCAAqCk4C,CAA/C,CAAN;EAIF,OAAI3zC,aAAgBjC,KAAhBiC,IAAyC,MAAhBA,EAAK1D,MAAlC,EAAgD;EAC9C,QAAU,MAANq3C,CAAJ,EACE,OAAO/2C,EAAE2Q,GAAF3Q,GAAQI,GAARJ,CAAYoD,EAAK,CAALA,CAAZpD,EAAqBC,GAArBD,CAAyBoD,EAAK,CAALA,IAAU,CAAnCpD,CAAP,CAEF,IAAI+2C,MAAMn1C,IAAAA,CAAV,EACE,OAAO5B,EAAE2Q,GAAF3Q,GAAQI,GAARJ,CAAYoD,EAAK,CAALA,CAAZpD,EAAqBC,GAArBD,CAAyBoD,EAAK,CAALA,CAAzBpD,CAAP,CAEF,IAAI+2C,OAAOn1C,IAAAA,CAAX,EACE,OAAO5B,EAAE2Q,GAAF3Q,GAAQI,GAARJ,CAAYoD,EAAK,CAALA,CAAZpD,EAAqBD,GAArBC,CAAyBoD,EAAK,CAALA,CAAzBpD,CAAP,CAEF,IAAU,UAAN+2C,CAAM,IAAe,gBAANA,CAAnB,EAEE,OAAO/2C,EAAEyQ,MAAFzQ,GAAWI,GAAXJ,CAAeoD,CAAfpD,EAAqBgC,IAArBhC,EAAP,CAGF,MAAM,IAAInB,KAAJ,CAAU,uCAAqCk4C,CAA/C,CAAN;EAGF,SAAM,IAAIl4C,KAAJ,CAAU,kCAAgCuE,CAA1C,CAAN;EAGF,KAAamJ,OAAOue,KAAIwkD,cAAJxkD,CAApB,CCjFA,4BAAA,CACI9qB,CADJ,EACqBwU,CADrB,EACsDC,CADtD;EAEE,MAAMsX,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,oBAAxBA,CAAX;EAAA,MACMslD,IACFtlD,gBAAgBzV,CAAhByV,EAA4B,YAA5BA,EAA0C,oBAA1CA,EAAgE,OAAhEA,CAFJ,CAGAjpB,OAA6B,YAAtBuuE,EAAYhsE,KAAnBvC,EAAsC,qCAAtCA,GACAA,OAAOwuE,MAAM/6D,CAAN+6D,CAAPxuE,EAA2B,kCAA3BA,CADAA,CASA,OAAOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EACI,WAAAA,EAAQzE,kBAARyE,CAA2B4S,CAA3B5S,EAA+Bo2D,CAA/Bp2D,EAA4C1E,CAA5C0E,CAAA;KAFRsJ,IAGKsJ,OAHLtJ,EANU,UAAC1K,CAAD;EAIf,aAAQgU,IAHK;EACX,eAAO0jD,oBAAoB13D,CAApB03D,EAAwBF,CAAxBE,CAAP;WAEF;KAEKhtD,CAAP;EA2BF,iBAAA,CACIziB,CADJ,EACqB+L,CADrB,EACmD3I,CADnD;qBACmDA,OACjD,IAAM2oB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,QAAxBA,CAAX;EAAA,MACM49B,IAAW59B,gBAAgBle,CAAhBke,EAAyB,SAAzBA,EAAoC,QAApCA,EAA8C,OAA9CA,CADjB,CAGAjpB,OAA0B,YAAnB6mD,EAAStkD,KAAhBvC,EAAmC,kCAAnCA,GACAoC,IAAOy9C,eAAez9C,CAAfy9C,EAAqB90B,EAAGxqB,KAAxBs/C,EAA+B,CAA/BA,CADP7/C,CAsCA,OAAOyhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQnN,MAARmN,CAAe4S,CAAf5S,EAAmB0uC,CAAnB1uC,EAAyC/V,CAAzC+V,CAAA;KADfsJ,IACgEsJ,OADhEtJ,EApCM,UAAC1K,CAAD;EAkCX,aAAQgU,IAjCK;EACX,YAAa,MAAT3oB,CAAJ,EACE,OAAOsR,mBAAmBqD,CAAnBrD,EAAuBmzC,CAAvBnzC,EAAiCqX,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,CAAjCrX,CAAP,CAEF,IAAMg7D,IAAc3jD,EAAGxqB,KAAvB;EAAA,YACMouE,IAAc9nB,EAASrmD,IAD7B;EAAA,YAGMouE,IAAaF,EAAYvsE,KAAZusE,CAAkB,CAAlBA,EAAqBtsE,CAArBssE,CAHnB;EAAA,YAIMG,IAAYD,EAAWlwE,MAJ7B;EAAA,YAKMowE,IAAaJ,EAAYvsE,KAAZusE,CAAkBtsE,CAAlBssE,EAAwBA,EAAYhwE,MAApCgwE,EAA4CvsE,KAA5CusE,CAAkD,CAAlDA,CALnB;EAAA,YAMM74B,IAAYi5B,EAAWpwE,MAN7B;EAAA,YAQMqwE,IAAmBC,WAAW,CAAXA,EAAcH,CAAdG,CARzB;EAAA,YASMC,IACFD,WAAWH,IAAY,CAAvBG,EAA0BH,IAAY,CAAZA,GAAgBh5B,CAA1Cm5B,CAVJ;EAAA,YAYME,IAAcC,aAAaP,IAAaD,IAAcG,EAAxCK,CAZpB;EAAA,YAcM3sE,IAASuU,EAAGtN,OAAHsN,CAAWm4D,CAAXn4D,CAdf;EAAA,YAeMq4D,IAAkBvoB,EAASp9C,OAATo9C,EAAkB8nB,EAAlB9nB,CAfxB;EAAA,YAiBMwoB,IACFF,cAAcN,IAAYE,GAAkBE,EAA5CE,CAlBJ;EAAA,YAmBMG,IAAkB9sE,EAAOwL,SAAPxL,CAAiB6sE,CAAjB7sE,CAnBxB;EAAA,YAqBI+sE,IAAa77D,mBACb47D,CADa57D,EACI07D,CADJ17D,EACiCqX,EAAGxqB,KAAHwqB,CAAS3oB,CAAT2oB,CADjCrX,CArBjB;EAAA,YAwBM87D,IAAsBC,uBAAuBJ,CAAvBI,CAxB5B,CA2BA,OAFAF,IAAaA,EAAWvhE,SAAXuhE,CAAqBC,CAArBD,CAEb;WAEF;KAEK9tD,CAAP;EAKF,oBAAA,CAAoBhe,CAApB,EAAmC8kB,CAAnC;EAEE,OADA,IAAM/oB,MAAN,EACSH,IAAIoE,CAAb,EAAoBpE,IAAIkpB,CAAxB,IAAgClpB,CAAhC,EACEG,EAAOc,IAAPd,CAAYH,CAAZG,EAEF,OAAOA,CAAP;EAGF,qBAAA,CAAqBkwE,CAArB;EAEE,OADA,IAAMlwE,MAAN,EACSH,IAAI,CAAb,EAAgBA,IAAIqwE,EAAOhxE,MAA3B,IAAqCW,CAArC,EACE,KAAK,IAAIiD,IAAI,CAAb,EAAgBA,IAAIotE,EAAOrwE,CAAPqwE,EAAUhxE,MAA9B,IAAwC4D,CAAxC,EACE9C,EAAOc,IAAPd,CAAYkwE,EAAOrwE,CAAPqwE,EAAUptE,CAAVotE,CAAZlwE,EAGJ,OAAOA,CAAP;EAGF,6BAAA,CAA+CR,CAA/C,EAAqD+L,CAArD;EAQE,OAJA,IAAM4kE,IAAqBliE,QAAQ1C,CAAR0C,EAAiB4C,UAAUtF,CAAVsF,CAAjB5C,CAA3B,EACMmiE,IAAW5kE,OAAOhM,CAAPgM,EAAU2kE,CAAV3kE,CADjB,EAEI6kE,IAAalhE,aAAa5D,CAAb4D,EAAsBwf,OAAO,CAAPA,EAAU,OAAVA,CAAtBxf,CAFjB,EAGMmhE,IAAWF,EAASlsE,IAATksE,GAAgBC,EAAWnsE,IAH5C,EAISrE,IAAI,CAAb,EAAgBA,IAAIywE,CAApB,IAAgCzwE,CAAhC,EACEwwE,IAAavlE,WAAWulE,CAAXvlE,EAAuBjL,IAAI,CAA3BiL,CAAbulE,CAEFA,IAAahhE,WAAWghE,CAAXhhE,EAAuBsO,OAAKyyD,EAASrvE,KAAd4c,EAAqB,MAArBA,CAAvBtO,CAAbghE,CACA,IAAME,IAAY1/D,UAAUu/D,CAAVv/D,CAAlB,CACA,OAAOnB,MAAM2gE,CAAN3gE,EAAkB0gE,CAAlB1gE,EAA4B6gE,CAA5B7gE,CAAP;EAGF,KAAalE,SAAS8e,KAAIkmD,kBAAJlmD,CAAtB;EAAA,IACapW,qBAAqBoW,KAAImmD,0CAAJnmD,CADlC,CC9HA,sBAAA,CACIomD,CADJ,EAC+B5mE,CAD/B,EAEImlB,CAFJ,EAGIib,CAHJ;EAUE,OANA,IAAMymC,IAAQlnD,gBAAgB3f,CAAhB2f,EAAsB,MAAtBA,EAA8B,cAA9BA,CAAd,EACMmnD,IAAKxwB,qBAAqBnxB,CAArBmxB,EAAwB,GAAxBA,EAA6B,cAA7BA,CADX,EAEMywB,IAAKzwB,qBAAqBlW,CAArBkW,EAAwB,GAAxBA,EAA6B,cAA7BA,CAFX,EAII3pC,IAAQk6D,CAJZ,EAKMG,MALN,EAMSjxE,IAAI,CAAb,EAAgBA,IAAI6wE,EAAUxxE,MAA9B,EAAsCW,GAAtC,EAA2C;EACzC,QAAM+W,IAAS85D,EAAU7wE,CAAV6wE,EAAaj6D,CAAbi6D,EAAoBE,EAAG/wE,CAAH+wE,CAApBF,EAA2BG,EAAGhxE,CAAHgxE,CAA3BH,CAAf,CACAI,EAAUhwE,IAAVgwE,CAAel6D,EAAO,CAAPA,CAAfk6D,GACAA,EAAUhwE,IAAVgwE,CAAel6D,EAAO,CAAPA,CAAfk6D,CADAA,EAEAr6D,IAAQG,EAAO,CAAPA,CAFRk6D;EAIF,OAAMC,MAAN;EAAA,MACMC,MADN,CAEA,KAASnxE,IAAI,CAAb,EAAgBA,IAAIixE,EAAU5xE,MAA9B,EAAsCW,KAAK,CAA3C,EACEkxE,EAAKjwE,IAALiwE,CAAUD,EAAUjxE,CAAVixE,CAAVC,GACAC,EAAKlwE,IAALkwE,CAAUF,EAAUjxE,IAAI,CAAdixE,CAAVE,CADAD,CAGF,QAAQA,GAAMC,EAAd;EAkBF,wBAAA,CACIC,CADJ,EACmCC,CADnC,EAEIC,CAFJ,EAEmCrnE,CAFnC,EAGImlB,CAHJ,EAG4Bib,CAH5B;EAIE,MAAMknC,IACF3nD,gBAAgBwnD,CAAhBxnD,EAA4B,YAA5BA,EAA0C,eAA1CA,CADJ;EAAA,MAEM4nD,IACF5nD,gBAAgBynD,CAAhBznD,EAA4B,YAA5BA,EAA0C,eAA1CA,CAHJ;EAAA,MAIM6nD,IAAY7nD,gBAAgB0nD,CAAhB1nD,EAA0B,UAA1BA,EAAsC,eAAtCA,CAJlB;EAAA,MAKMknD,IAAQlnD,gBAAgB3f,CAAhB2f,EAAsB,MAAtBA,EAA8B,eAA9BA,CALd;EAAA,MAMMmnD,IAAKnnD,gBAAgBwF,CAAhBxF,EAAmB,GAAnBA,EAAwB,eAAxBA,CANX;EAAA,MAOMonD,IAAKpnD,gBAAgBygB,CAAhBzgB,EAAmB,GAAnBA,EAAwB,eAAxBA,CAPX;EAAA,MAWM1P,IAFW42D,EAAM1kE,MAAN0kE,CAAaE,CAAbF,EAAiB,CAAjBA,EACShlE,MADTglE,CACgBU,CADhBV,EAEIxjE,GAFJwjE,CAEQW,CAFRX,CATjB;EAAA,MAcMt/C,IAAYtX,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAdlB;EAAA,MAeMw3D,IAAYx3D,EAAIhZ,KAAJgZ,CAAU,CAAVA,IAAe,CAfjC;EAAA,MAgBM8L,KAA+BwL,GAAWkgD,EAhBhD;EAAA,MAiBM1xE,IAAIka,EAAIpX,KAAJoX,EAAW,GAAG,EAAdA,EAAkB8L,CAAlB9L,CAjBV;EAAA,MAkBMjX,IAAIiX,EAAIpX,KAAJoX,EAAW,GAAGw3D,EAAdx3D,EAA0B8L,CAA1B9L,CAlBV;EAAA,MAmBMlW,IAAIkW,EAAIpX,KAAJoX,EAAW,GAAe,IAAZw3D,EAAdx3D,EAA8B8L,CAA9B9L,CAnBV;EAAA,MAoBM1C,IAAI0C,EAAIpX,KAAJoX,EAAW,GAAe,IAAZw3D,EAAdx3D,EAA8B8L,CAA9B9L,CApBV;EAAA,MAsBMg3D,IAAOlxE,EAAE6Q,OAAF7Q,GAAY8N,SAAZ9N,CAAsBiD,EAAE3B,IAAF2B,EAAtBjD,EAAgCuN,SAAhCvN,CACT+wE,EAAGjjE,SAAHijE,CAAaQ,EAAYjkE,GAAZikE,CAAgBvtE,CAAhButE,EAAmB1gE,OAAnB0gE,EAAbR,CADS/wE,CAtBb,CAyBA,QAAQkxE,GADKA,EAAK5vE,IAAL4vE,GAAYpjE,SAAZojE,CAAsB15D,EAAE3G,OAAF2G,EAAtB05D,EACb;EAGF,KAAaS,gBAAgBlnD,KAAImnD,gCAAJnnD,CAA7B;EAAA,IACaonD,eAAepnD,KAAIqnD,8BAAJrnD,CAD5B,CC/DA,uBAAA,CACIxO,CADJ,EACqBtc,CADrB,EACsCoyE,CADtC,EAEIjgE,CAFJ,EAE0BkgE,CAF1B;qBAE0BA,QACxB,IAAMC,IAAKroD,gBAAgB3N,CAAhB2N,EAAmB,GAAnBA,EAAwB,eAAxBA,CAAX;EAAA,MACM8B,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,eAAxBA,CADX;EAAA,MAEMsoD,IAAStoD,gBAAgBmoD,CAAhBnoD,EAAuB,OAAvBA,EAAgC,eAAhCA,CAFf,CAIA4gD,iBAAiByH,CAAjBzH,EAAqB9+C,CAArB8+C,GACA3hE,OACImN,YAAiBi8D,EAAG/wE,KAApB8U,EAA2B0V,EAAGxqB,KAA9B8U,CADJnN,EAC0C,2BAD1CA,CADA2hE,CAIA,IAAM2H,IAAMrjD,OAAO,CAAPA,CAAZ;EAAA,MACMsjD,IAAgBD,EAAI1kE,GAAJ0kE,CAAQD,CAARC,CADtB;EAAA,MAGIE,IAAS3mD,EAAGje,GAAHie,CAAOumD,CAAPvmD,EAAW7d,GAAX6d,CAAe0mD,CAAf1mD,CAHb,CAIA,IAAIsmD,CAAJ,EAAgB;EACdnpE,WAAoB,QAARiJ,CAAZjJ,EAA0B,gDAA1BA,EACA,IAAMypE,IAAQ1oD,gBAAgB9X,CAAhB8X,EAAsB,MAAtBA,EAA8B,eAA9BA,CAAd,CACAyoD,IAASA,EAAOtkE,GAAPskE,CAAWF,EAAI1kE,GAAJ0kE,CAAQxkE,IAAIukE,CAAJvkE,EAAY2kE,CAAZ3kE,CAARwkE,CAAXE,CAATA;EAEF,UAAOJ,EAAG3kE,GAAH2kE,CAAOI,CAAPJ,CAAP;EAGF,KAAaM,gBAAgB9nD,KAAI+nD,gCAAJ/nD,CAA7B,CCrBA,sBAAA,CACI9qB,CADJ,EACqBwM,CADrB,EACsC3D,CADtC,EACqDlE,CADrD,EAEIuQ,CAFJ,EAEmBC,CAFnB,EAEgCqT,CAFhC,EAEkDC,CAFlD,EAGIC,CAHJ;EAIE,uBAFExT,yBAAeC,yBAAaqT,yBAAkBC,yBAC9CC,QACmB,MAAjBF,CAAJ,EACE,MAAM,IAAI3pB,KAAJ,CAAU,oCAAV,CAAN,CAEF,IAAoB,MAAhB4pB,CAAJ,EACE,MAAM,IAAI5pB,KAAJ,CAAU,oCAAV,CAAN,CAEF,IAAMktB,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,cAAxBA,CAAX,CACA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ/D,YAAR+D,CACP4S,CADO5S,EACH3M,CADG2M,EACItQ,CADJsQ,EACSxU,CADTwU,EACkBjE,CADlBiE,EAC6BhE,CAD7BgE,EACsCqP,CADtCrP,EAEPsP,CAFOtP,EAEMuP,CAFNvP,CAAA;KADfsJ,IAIKsJ,OAJLtJ,CAAP;EAOF,KAAarN,eAAe0V,KAAIgoD,8BAAJhoD,CAA5B,CC3BA,cAAA,CACI9qB,CADJ,EACqB+U,CADrB,EAC4BC,CAD5B;qBACqBD,yBAAOC,QAC1B,IAAM+W,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,MAAxBA,CAAX,CACA,IAAgB,MAAZ8B,EAAGrnB,IAAP,EACE,MAAM,IAAI7F,KAAJ,CAAU,oDAAV,CAAN,CAEF,IAAMqyB,IAAUnF,EAAGxqB,KAAHwqB,CAASA,EAAGxqB,KAAHwqB,CAASrsB,MAATqsB,GAAkB,CAA3BA,CAAhB,CACA,IAAIhX,IAAImc,CAAR,EACE,MAAM,IAAIryB,KAAJ,CACF,yDAAuDqyB,CAAvD,eAAA,GACWnc,CAFT,CAAN,CAKI,IAAA0J;;iBAAA,CAEN,SAAQjb,cAAQuI,eAAhB;EAGF,KAAakJ,OAAO6V,KAAIioD,cAAJjoD,CAApB,CCrBA,mBAAA,CACI/e,CADJ,EACgC8b,CADhC,EAEItmB,CAFJ;EAGE,MAAMsmD,IAAW59B,gBAAgBle,CAAhBke,EAAyB,SAAzBA,EAAoC,WAApCA,EAAiD,OAAjDA,CAAjB;EAAA,MACM+oD,IAAW/oD,gBAAgBpC,CAAhBoC,EAAyB,SAAzBA,EAAoC,WAApCA,CADjB,CAIA,OAFAgpD,cAA8BD,CAA9BC,EAAwCprB,CAAxCorB,EAAkD1xE,CAAlD0xE,GAEOxwD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ+5D,SAAR/5D,CAAkB0uC,CAAlB1uC,EAA4B65D,CAA5B75D,EAAsC5X,CAAtC4X,CAAA;KADfsJ,IAEKolC,aAAUmrB,aAFfvwD,CAAP;EAKF,KAAaywD,YAAYpoD,KAAIqoD,wBAAJroD,CAAzB,CCdA,aAAA,CAAc7T,CAAd;EACEjW,SAAuB,gBAAhBiW,EAAM1T,KAAbvC,EACO,+DACeiW,EAAM1T,KADrB,MADPvC,EAKA,IAAMoyE,IAAqBn8D,EAAM1V,KAAN0V,CAAYA,EAAM1V,KAAN0V,CAAYvX,MAAZuX,GAAqB,CAAjCA,CAA3B;EAAA,MACMka,IAAQla,EAAMzV,IAANyV,GAAam8D,CAD3B;EAAA,MAEMC,IAAUp8D,EAAMkwC,IAANlwC,CAAWka,CAAXla,EAAkBm8D,CAAlBn8D,CAFhB,CAMA,OAFYwL,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3D,GAAR2D,CAAYk6D,CAAZl6D,CAAA;KAAhCsJ,IAAuDxL,UAAvDwL,EAEDhY,OAFCgY,CAEOxL,EAAM1V,KAFbkhB,CAEZ;EAqBF,eAAA,CAAexL,CAAf;EACEjW,SAAuB,gBAAhBiW,EAAM1T,KAAbvC,EACO,gEACeiW,EAAM1T,KADrB,MADPvC,EAKA,IAAMoyE,IAAqBn8D,EAAM1V,KAAN0V,CAAYA,EAAM1V,KAAN0V,CAAYvX,MAAZuX,GAAqB,CAAjCA,CAA3B;EAAA,MACMka,IAAQla,EAAMzV,IAANyV,GAAam8D,CAD3B;EAAA,MAEMC,IAAUp8D,EAAMkwC,IAANlwC,CAAWka,CAAXla,EAAkBm8D,CAAlBn8D,CAFhB,CAMA,OAFYwL,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ1D,IAAR0D,CAAak6D,CAAbl6D,CAAA;KAAhCsJ,IAAwDxL,UAAxDwL,EAEDhY,OAFCgY,CAEOxL,EAAM1V,KAFbkhB,CAEZ;EAmBF,eAAA,CAAexL,CAAf;EACEjW,SAAuB,cAAhBiW,EAAM1T,KAAbvC,EAAkC,0DAC1BiW,EAAM1T,KADdvC,EAGA,IAAMoyE,IAAqBn8D,EAAM1V,KAAN0V,CAAYA,EAAM1V,KAAN0V,CAAYvX,MAAZuX,GAAqB,CAAjCA,CAA3B;EAAA,MACMka,IAAQla,EAAMzV,IAANyV,GAAam8D,CAD3B;EAAA,MAIMvnD,IAAQ5U,EAAM5F,SAAN4F,EAJd;EAAA,MAKMq8D,IAAehoD,QAAQrU,CAARqU,EAAeO,CAAfP,EAAsB67B,IAAtB77B,CAA2B6F,CAA3B7F,EAAkC8nD,CAAlC9nD,CALrB;EAAA,MAOMpqB,IAAMuhB,IAAIE,MAAJF,CAAW4I,SAAX5I,CAAqB,UAAAtJ,CAAA;EAAW,WAAAA,EAAQ3D,GAAR2D,CAAYm6D,CAAZn6D,CAAA;KAAhCsJ,IACsB6wD,iBADtB7wD,CAPZ;EAAA,MAWM8wD,IAAO1zE,KAAKkC,KAALlC,CAAWuzE,IAAqB,CAAhCvzE,IAAqC,CAXlD;EAAA,MAYM2zE,IAAavoD,KAAK/pB,CAAL+pB,CAZnB;EAAA,MAaMwoD,IAAavoD,KAAKhqB,CAALgqB,CAbnB;EAAA,MAcMwoD,IAAuBF,EAAW7mE,KAAX6mE,EACvBD,GAAMH,IAAqBG,EADJC,EACYA,EAAWjyE,KAAXiyE,CAAiB9zE,MAAjB8zE,GAA0B,CADtCA,CAd7B;EAAA,MAgBMG,IAAuBF,EAAW9mE,KAAX8mE,EACvBF,GAAMH,IAAqBG,EADJE,EACYA,EAAWlyE,KAAXkyE,CAAiB/zE,MAAjB+zE,GAA0B,CADtCA,CAhB7B;EAAA,MAmBMtgE,IAAc8D,EAAM1V,KAAN0V,CAAY9T,KAAZ8T,EAnBpB,CAsBA,OAFA9D,EAAY8D,EAAM1V,KAAN0V,CAAYvX,MAAZuX,GAAqB,CAAjC9D,IAAsCogE,CAAtCpgE,EAEOmY,QAAQooD,EAAqB,CAArBA,CAARpoD,EAAiCqoD,EAAqB,CAArBA,CAAjCroD,EACF7gB,OADE6gB,CACMnY,CADNmY,CAAP;EAIF,KAAa9V,MAAMsV,KAAI8oD,YAAJ9oD,CAAnB;EAAA,IACarV,OAAOqV,KAAI+oD,cAAJ/oD,CADpB;EAAA,IAEagpD,OAAOhpD,KAAIipD,cAAJjpD,CAFpB;EAAA,sEAAA,0BCvGI6D,GAAuBC,GAAsBzb,GAC7C6gE;EACF,MAA4B,YAAxBrlD,EAAcprB,KAAlB,EACE,MAAM,IAAI1E,KAAJ,CACF,gFACsB8vB,EAAcprB,KADpC,MADE,CAAN,CAIF,IAAIorB,EAAcjqB,IAAdiqB,GAAqB,CAAzB,EACE,MAAM,IAAI9vB,KAAJ,CACF,wEACkB8vB,EAAcptB,KADhC,MADE,CAAN,CAKF,IAAM0yE,IAAWtlD,EAAcjqB,IAAdiqB,GAAqB,CAArBA,GAAyBA,EAAcptB,KAAdotB,CAAoB,CAApBA,CAAzBA,GAAkD,CAAnE;EAAA,MACMulD,IAAUvlD,EAAcjqB,IAAdiqB,GAAqB,CAArBA,GAAyBA,EAAcptB,KAAdotB,CAAoB,CAApBA,CAAzBA,GAAkD,CADlE,CAGA,IAAIxb,EAAYzT,MAAZyT,KAAuB+gE,CAA3B,EACE,MAAM,IAAIr1E,KAAJ,CACF,oDACIsU,EAAYzT,MADhB,kBAAA,GACsCw0E,CADtC,MADE,CAAN,CAKF,IAAMC,IAAYvlD,EAAaptB,IAA/B,CACA,IAA4B,MAAtBotB,EAAalqB,IAAS,KACA,MAAtBkqB,EAAalqB,IAAS,IAAKyvE,MAAcF,CADnB,CAA5B,EAEE,MAAM,IAAIp1E,KAAJ,CACF,sCACG+vB,EAAartB,KADhB,wBAAA,GAC2C0yE,CAD3C,MADE,CAAN,CAKF,IAAIrlD,EAAarrB,KAAbqrB,KAAuBolD,EAAczwE,KAAzC,EACE,MAAM,IAAI1E,KAAJ,CAAU,mDAAV,CAAN;ECJJ,wBAAA,CACI8vB,CADJ,EACsCC,CADtC,EAEIzb,CAFJ,EAE8B0b,CAF9B;EAGE,MAAMulD,IACFnqD,gBAAgB0E,CAAhB1E,EAA+B,eAA/BA,EAAgD,eAAhDA,EAAiE,OAAjEA,CADJ;EAAA,MAEMoqD,IACFpqD,gBAAgB2E,CAAhB3E,EAA8B,cAA9BA,EAA8C,eAA9CA,CAHJ;EAAA,MAIMqqD,IAAgBrqD,gBAClB4E,CADkB5E,EACJ,cADIA,EACY,eADZA,EAC6BoqD,EAAc9wE,KAD3C0mB,CAJtB,CAUA,OAHAsqD,gBACIH,CADJG,EACoBF,CADpBE,EACmCphE,CADnCohE,EACgDD,CADhDC,GAGO9xD,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAtJ,CAAA;EAAW,WAAAA,EAAQq7D,aAARr7D,CACPi7D,CADOj7D,EACSk7D,CADTl7D,EACwBhG,CADxBgG,EACqCm7D,CADrCn7D,CAAA;KADRsJ,IAGF2xD,mBAAgBC,kBAAeC,kBAH7B7xD,CAAP;EAMF,KAAa+xD,gBAAgB1pD,KAAI2pD,gCAAJ3pD,CAA7B,CCnBA,kBAAA,CACI9qB,CADJ,EAC0B+L,CAD1B;EAEE,MAAM87C,IAAW59B,gBAAgBle,CAAhBke,EAAyB,SAAzBA,EAAoC,UAApCA,EAAgD,OAAhDA,CAAjB;EAAA,MACM8B,IAAK9B,gBAAgBjqB,CAAhBiqB,EAAmB,GAAnBA,EAAwB,UAAxBA,CADX,CAEA,OAAOxH,IAAIE,MAAJF,CAAW4I,SAAX5I,CACI,UAAAtJ,CAAA;EAAW,WAAAA,EAAQu7D,QAARv7D,CAAiB4S,CAAjB5S,EAAqB0uC,CAArB1uC,CAAA;KADfsJ,IACgDsJ,OAAI87B,aADpDplC,CAAP;EAIF,KAAaiyD,WAAW5pD,KAAI6pD,sBAAJ7pD,CAAxB,CCrBA,6BAAA,CACI8pD,CADJ,EAC0BC,CAD1B,EAEIC,CAFJ;qBAEIA,IAAYC,kBAAUC,wBACxB,IAAMC,IAAUhrD,gBAAgB2qD,CAAhB3qD,EAAwB,QAAxBA,EAAkC,qBAAlCA,CAAhB;EAAA,MACIirD,IAAmB,IADvB,CAEe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,qBAApCA,CADE,EAIf,IAAMkrD,IAA4B,QAAZD,CAAY,GAAQD,CAAR,GAAkBA,EAAQ/mE,GAAR+mE,CAAYC,CAAZD,CAApD,CAEA,IAAIH,MAAcC,kBAAUK,IAA5B,EACE,OAAOD,CAAP,CAEF,IAAIL,MAAcC,kBAAUM,GAA5B,EACE,OAAOF,EAAa/0E,GAAb+0E,EAAP,CAEF,IAAIL,MAAcC,kBAAUO,IAA5B,EAAkC;EAChC,QAAgB,QAAZJ,CAAJ,EACE,OAAOC,EAAanoE,IAAbmoE,EAAP,CAEA,IAAMI,IACFhuE,cAAc0tE,EAAQ1zE,KAAtBgG,IAA+BA,cAAc2tE,EAAS3zE,KAAvBgG,CADnC;EAAA,QAEM/G,IAAS20E,EAAa/0E,GAAb+0E,GAAmB/mE,GAAnB+mE,CAAuBD,EAAS90E,GAAT80E,EAAvBC,CAFf,CAGA,OAAOI,IAAkB,CAAlBA,GAAsB/0E,EAAO4N,GAAP5N,CAAW2uB,OAAOomD,CAAPpmD,CAAX3uB,CAAtB+0E,GACsB/0E,CAD7B;EAIJ,OAAIs0E,MAAcC,kBAAUC,sBAA5B,EAAoD;EAClD,QAAgB,QAAZE,CAAJ,EACE,OAAOC,EAAa/0E,GAAb+0E,GAAmB/mE,GAAnB+mE,CAAuBhmD,OAAO8lD,EAAQzzE,IAAf2tB,CAAvBgmD,CAAP,CAEA,IAEMK,IAFqBN,EAAShnE,GAATgnE,CAAa/2D,OAAK82D,EAAQ1zE,KAAb4c,CAAb+2D,EAGJjmE,QAHIimE,CAGK/lD,OAAO,CAAPA,CAHL+lD,EAGgB90E,GAHhB80E,GAGsBxqD,OAHtBwqD,EAA3B,CAIA,OAAOC,EAAa/0E,GAAb+0E,GAAmB/mE,GAAnB+mE,CAAuBK,CAAvBL,CAAP;EAIJ,SAAMt2E,MAAM,wBAAsBi2E,CAA5Bj2E,CAAN;EAiBF,6BAAA,CACI42E,CADJ,EAC0BC,CAD1B,EAEIb,CAFJ,EAGIC,CAHJ;qBAGIA,IAAYC,kBAAUC,wBACxB,IAAMW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,oBAAlCA,CAAhB;EAAA,MACM2rD,IACF3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,oBAA5CA,CAFJ;EAAA,MAGIirD,IAAmB,IAHvB,CAIe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,oBAApCA,CADE,GAGf0B,kBACIgqD,EAAQp0E,KADZoqB,EACmBiqD,EAAar0E,KADhCoqB,EACuC,+BADvCA,CAHe,CAMf,IAAMipD,IAASe,EAAQ7nE,GAAR6nE,CAAYC,CAAZD,EAA0BhlE,GAA1BglE,EAAf,CACA,OAAOE,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAiBF,2BAAA,CACIJ,CADJ,EAC0BC,CAD1B,EAEIb,CAFJ,EAGIC,CAHJ;qBAGIA,IAAYC,kBAAUC,wBACxB,IAAMW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,kBAAlCA,CAAhB;EAAA,MACM2rD,IACF3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,kBAA5CA,CAFJ;EAAA,MAGIirD,IAAmB,IAHvB,CAIe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,kBAApCA,CADE,GAGf0B,kBACIgqD,EAAQp0E,KADZoqB,EACmBiqD,EAAar0E,KADhCoqB,EACuC,6BADvCA,CAHe,CAMf,IAAMipD,IAASe,EAAQ9mE,iBAAR8mE,CAA0BC,CAA1BD,CAAf,CACA,OAAOE,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAkBF,yBAAA,CACIJ,CADJ,EAC0BC,CAD1B,EACqDtyE,CADrD,EAEIyxE,CAFJ,EAGIC,CAHJ;qBAGIA,IAAYC,kBAAUC,wBACxB,IAAMW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,gBAAlCA,CAAhB;EAAA,MACM2rD,IACF3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,gBAA5CA,CAFJ;EAAA,MAGIirD,IAAmB,IAHvB,CAIe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,gBAApCA,CADE,GAGf0B,kBACIgqD,EAAQp0E,KADZoqB,EACmBiqD,EAAar0E,KADhCoqB,EACuC,2BADvCA,CAHe,CAMf,IACMipD,IADMzlD,OAAO,CAAPA,EACOrhB,GADPqhB,CACWwmD,EAAQznE,GAARynE,CAAYC,CAAZD,EAA0Bv1E,GAA1Bu1E,CAA8BvyE,CAA9BuyE,GAAoC,CAApCA,CADXxmD,CAAZ,CAEA,OAAO0mD,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAiBF,oBAAA,CACIJ,CADJ,EAC0BC,CAD1B,EAEIb,CAFJ,EAGIC,CAHJ;qBAGIA,IAAYC,kBAAUC,wBACxB,IAAIW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,WAAlCA,CAAd;EAAA,MACM2rD,IAAe3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,WAA5CA,CADrB;EAAA,MAEIirD,IAAmB,IAFvB,CAGe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,WAApCA,CADE,GAGf0B,kBAAkBgqD,EAAQp0E,KAA1BoqB,EAAiCiqD,EAAar0E,KAA9CoqB,EAAqD,sBAArDA,CAHe,CAKf,IAAM6mD,IAAMrjD,OAAO,CAAPA,CAAZ,CAEAwmD,IAAUxmD,OAAO,CAAPA,EAAUjhB,GAAVihB,CAAcwmD,CAAdxmD,EAAuBrhB,GAAvBqhB,CAA2BqjD,CAA3BrjD,CAAVwmD,CACA,IAAMf,IAASpC,EAAI1kE,GAAJ0kE,CAAQmD,EAAQznE,GAARynE,CAAYC,CAAZD,CAARnD,EAAmC3hE,IAAnC2hE,EAAf,CACA,OAAOqD,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAkBF,kBAAA,CACIJ,CADJ,EAC0BC,CAD1B,EAEIb,CAFJ,EAEiCiB,CAFjC,EAGIhB,CAHJ;qBAEiCgB,4BAC7BhB,IAAYC,kBAAUC,wBACxB,IAAMW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,SAAlCA,CAAhB;EAAA,MACM2rD,IAAe3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,SAA5CA,CADrB;EAAA,MAEIirD,IAAmB,IAFvB,CAGe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,SAApCA,CADE,GAGf0B,kBAAkBgqD,EAAQp0E,KAA1BoqB,EAAiCiqD,EAAar0E,KAA9CoqB,EAAqD,oBAArDA,CAHe,CAKf,IAAM6mD,IAAMrjD,OAAO,CAAPA,CAAZ;EAAA,MACM4mD,IAAgB5mD,OAAO2mD,CAAP3mD,CADtB;EAAA,MAEMylD,IAASe,EAAQznE,GAARynE,CAAYC,EAAajoE,GAAbioE,CAAiBG,CAAjBH,EAAgCnvE,GAAhCmvE,EAAZD,EACKxlE,GADLwlE,GAEK7nE,GAFL6nE,CAESnD,EAAI1kE,GAAJ0kE,CAAQmD,CAARnD,EAAiBtkE,GAAjBskE,CACDA,EAAI1kE,GAAJ0kE,CAAQoD,CAARpD,EAAsB7kE,GAAtB6kE,CAA0BuD,CAA1BvD,EAAyC/rE,GAAzC+rE,EADCA,CAFTmD,CAFf,CAMA,OAAOE,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAGF,wCAAA,CACIJ,CADJ,EAC0BlrD,CAD1B;EAEE,MAAMorD,IACF1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,+BAAlCA,CADJ;EAAA,MAEMO,IACFP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,+BAAlCA,CAHJ,CAIA0B,kBACIgqD,EAAQp0E,KADZoqB,EACmBnB,EAAQjpB,KAD3BoqB,EACkC,0CADlCA,EAuBA,IAAMqqD,IAAYxrD,EAAQ3Z,IAAR2Z,EAAlB;EAAA,MACMyrD,IAAgBzrD,EAAQtc,GAARsc,CAAYmrD,CAAZnrD,CADtB;EAAA,MAEM0rD,IAAgB1rD,EAAQ7Z,GAAR6Z,GAAcra,GAAdqa,GAAoB1oB,GAApB0oB,GAA0Bja,KAA1Bia,EAFtB,CAIA,OAAOwrD,EAAUloE,GAAVkoE,CAAcC,CAAdD,EAA6BroE,GAA7BqoE,CAAiCE,CAAjCF,CAAP;EAuBF,8BAAA,CACIG,CADJ,EACoC5rD,CADpC,EAEIsqD,CAFJ,EAEiCuB,CAFjC,EAGItB,CAHJ;qBAEiCsB,yBAC7BtB,IAAYC,kBAAUC,wBACxB,IAAIqB,IAAoBpsD,gBACpBksD,CADoBlsD,EACF,kBADEA,EACkB,qBADlBA,CAAxB;EAAA,MAEMO,IAAUP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,qBAAlCA,CAFhB;EAAA,MAGIirD,IAAmB,IAHvB,CAUA,IANe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,qBAApCA,CADE,GAGf0B,kBACI0qD,EAAkB90E,KADtBoqB,EAC6BnB,EAAQjpB,KADrCoqB,EAC4C,gCAD5CA,CAHe,EAMXyqD,IAAiB,CAArB,EAAwB;EACtB,QAAME,IAAuBnnD,OAAOinD,CAAPjnD,CAA7B;EAAA,QACMqjD,IAAMrjD,OAAO,CAAPA,CADZ;EAAA,QAEMokD,IAAOpkD,OAAO,EAAPA,CAFb,CAIAknD,IAAoBA,EAAkBnoE,GAAlBmoE,CAAsB7D,EAAI1kE,GAAJ0kE,CAAQ8D,CAAR9D,CAAtB6D,EACK1oE,GADL0oE,CACS9C,EAAKrlE,GAALqlE,CAAS+C,CAAT/C,CADT8C,CAApBA;EAGF,OAAMzB,IAAS2B,+BAA+BF,CAA/BE,EAAkD/rD,CAAlD+rD,CAAf,CAEA,OAAOV,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EAkBF,oBAAA,CACIJ,CADJ,EAC0BC,CAD1B,EAEIb,CAFJ,EAEiC2B,CAFjC,EAGI1B,CAHJ;qBAEiC0B,yBAC7B1B,IAAYC,kBAAUC,wBACxB,IAAMW,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,QAAxBA,EAAkC,WAAlCA,CAAhB;EAAA,MACM2rD,IAAe3rD,gBAAgByrD,CAAhBzrD,EAA6B,aAA7BA,EAA4C,WAA5CA,CADrB;EAAA,MAEIirD,IAAmB,IAFvB,CAGe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,WAApCA,CADE,GAGf0B,kBAAkBgqD,EAAQp0E,KAA1BoqB,EAAiCiqD,EAAar0E,KAA9CoqB,EAAqD,sBAArDA,CAHe,CAKf,IAAM8qD,IAActnD,OAAOqnD,CAAPrnD,CAApB;EAAA,MACM7U,IAAQs7D,EAAa9nE,GAAb8nE,CAAiBD,CAAjBC,EAA0BjlE,GAA1BilE,EADd;EAAA,MAEMc,IAAYnoE,QAAQ+L,CAAR/L,EAAekoE,CAAfloE,CAFlB;EAAA,MAGMooE,IAASr8D,EAAMxM,GAANwM,CAAUo8D,CAAVp8D,CAHf;EAAA,MAKMs6D,IACFzlD,OAAO,EAAPA,EAAYjhB,GAAZihB,CAAgBunD,EAAUjmE,MAAVimE,EAAhBvnD,EAAoCxhB,GAApCwhB,CAAwCsnD,EAAYvoE,GAAZuoE,CAAgBE,CAAhBF,CAAxCtnD,CANJ,CAOA,OAAO0mD,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;EA2BF,wCAAA,CACIJ,CADJ,EACelrD,CADf,EAC0BnY,CAD1B;EAME,uBALwBA,KAAO,KAClB,MAATA,MACFA,IAAMmY,EAAO7lB,IAAP6lB,GAAc,IAGlBnY,MAAQmY,EAAO7lB,IAAP6lB,GAAc,CAA1B,EACE,MAAM1rB,MACF,qGACuC0rB,EAAO7lB,IAD9C,kBAAA,GAEe0N,CAHbvT,CAAN,CA4BF,OAtBiB2mB,WAAW,UAACiwD,CAAD,EAASlrD,CAAT;EAI1B,QACME,IAAMF,EAAOhd,SAAPgd,EAAkBnY,EAAlBmY,GADK,CACLA,CADZ;EAAA,QAGMqsD,IAAYrsD,EAAOG,OAAPH,GAAiBzc,GAAjByc,CAAqBE,CAArBF,CAHlB,CAeA,SAAQlhB,OAXWutE,EAAU1oE,GAAV0oE,CAAcnB,CAAdmB,EAAsBzmE,GAAtBymE,GAEMx2E,GAFNw2E,EAEWxkE,EAFXwkE,GAWJl4D,UAPE,UAAC3G,CAAD;EACf,YAAM8+D,IAAUC,qBAAqB/+D,EAAGxW,KAAxBu1E,GAAgC1kE,EAAhC0kE,CAAhB,CACA,QACE/+D,EAAGtN,OAAHsN,CAAW8+D,CAAX9+D,EAAoB7J,GAApB6J,CAAwB09D,EAAO/qD,OAAP+qD,GAAiB3nE,GAAjB2nE,CAAqBmB,EAAU90E,GAAV80E,EAArBnB,CAAxB19D,GACAA,EAAGtN,OAAHsN,CAAW8+D,CAAX9+D,EAAoB7J,GAApB6J,CAAwB6+D,EAAU90E,GAAV80E,GAAgB9oE,GAAhB8oE,CAAoBnB,EAAO/qD,OAAP+qD,EAApBmB,CAAxB7+D,EAFF;WAKF;KAnBeyN,EAsBDiwD,CAtBCjwD,EAsBO+E,CAtBP/E,CAsBjB;EAqBF,8BAAA,CACIuxD,CADJ,EACgCxsD,CADhC,EAEIsqD,CAFJ,EAEiCuB,CAFjC,EAGItB,CAHJ;qBAEiCsB,yBAC7BtB,IAAYC,kBAAUC,wBACxB,IAAIgC,IACA/sD,gBAAgB8sD,CAAhB9sD,EAA8B,cAA9BA,EAA8C,qBAA9CA,CADJ;EAAA,MAEMO,IAAUP,gBAAgBM,CAAhBN,EAAwB,QAAxBA,EAAkC,qBAAlCA,CAFhB;EAAA,MAGIirD,IAAmB,IAHvB,CAYA,IAPe,QAAXL,CAAW,KACbK,IAAWjrD,gBAAgB4qD,CAAhB5qD,EAAyB,SAAzBA,EAAoC,qBAApCA,CADE,GAIf0B,kBACIqrD,EAAcz1E,KADlBoqB,EACyBnB,EAAQjpB,KADjCoqB,EACwC,gCADxCA,CAJe,EAOXyqD,IAAiB,CAArB,EAAwB;EACtB,QAAME,IAAuBnnD,OAAOinD,CAAPjnD,CAA7B;EAAA,QACMqjD,IAAMrjD,OAAO,CAAPA,CADZ;EAAA,QAEM8nD,IAAa9nD,OAAO6nD,EAAcz1E,KAAdy1E,CAAoB,CAApBA,CAAP7nD,CAFnB,CAIA6nD,IAAgBA,EAAc9oE,GAAd8oE,CAAkBxE,EAAI1kE,GAAJ0kE,CAAQ8D,CAAR9D,CAAlBwE,EACKrpE,GADLqpE,CACSV,EAAqBloE,GAArBkoE,CAAyBW,CAAzBX,CADTU,CAAhBA;EAIF,OAAMpC,IAASsC,+BAA+BF,CAA/BE,EAA8C1sD,CAA9C0sD,CAAf,CAEA,OAAOrB,oBAAoBjB,CAApBiB,EAA4BX,CAA5BW,EAAsCf,CAAtCe,CAAP;IAtcF,UAAYd,CAAZ;EACEA,cAAAA,UAAAA,EACAA,YAAAA,UADAA,EAEAA,WAAAA,SAFAA,EAGAA,8BAAAA,4BAHAA;EADF,CAAA,CAAYA,sBAAAA,sBAAAA,CAAZ,EAycA,IAAaoC,qBAAqBrsD,KAAIssD,0CAAJtsD,CAAlC;EAAA,IACa+qD,sBAAsB/qD,KAAIusD,4CAAJvsD,CADnC;EAAA,IAEawsD,iBAAiBxsD,KAAIysD,kCAAJzsD,CAF9B;EAAA,IAGa0sD,YAAY1sD,KAAI2sD,wBAAJ3sD,CAHzB;EAAA,IAIa4sD,YAAY5sD,KAAI6sD,wBAAJ7sD,CAJzB;EAAA,IAKa8sD,UAAU9sD,KAAI+sD,oBAAJ/sD,CALvB;EAAA,IAMagtD,mBAAmBhtD,KAAIitD,sCAAJjtD,CANhC;EAAA,IAOaktD,sBAAsBltD,KAAImtD,4CAAJntD,CAPnC;EAAA,IAQaotD,sBAAsBptD,KAAIqtD,4CAAJrtD,CARnC;EAAA;;gTAAA,CCxaA,qBAAA,CAAsBrU,CAAtB;EACE,MAAI2hE,CAAJ,CACA,IAAIj3E,MAAMC,OAAND,CAAcsV,CAAdtV,CAAJ,EAAuB;EACrBi3E,SAAkB,CAAlBA,EACAp3E,OACU,QAANyV,CAAM,IAAQA,EAAG/W,MAAH+W,GAAY,CAD9BzV,EAEI,mEAFJA,CADAo3E,CAKA,KADA,IAAMhmE,IAAMqE,EAAG,CAAHA,EAAMlV,KAANkV,CAAY,CAAZA,CAAZ,EACSpW,IAAI,CAAb,EAAgBA,IAAIoW,EAAG/W,MAAvB,IAAiCW,CAAjC,EACEW,OACIyV,EAAGpW,CAAHoW,EAAMlV,KAANkV,CAAY,CAAZA,MAAmBrE,CADvBpR,EAEI,mEACQyV,EAAGpW,CAAHoW,EAAMlV,KAANkV,CAAY,CAAZA,CADR,UAAA,GAC8BrE,CAD9B,MAFJpR;KAPJ,MAaEo3E,KAAkB,CAAlBA,EACA3hE,IAAK9J,QAAM8J,CAAN9J,EAAU8J,EAAGlV,KAAHkV,CAAS,CAATA,CAAV9J,EAAuB,CAAvBA,EAA0BvF,GAA1BuF,CAA8B,UAAA3M,CAAA;EAAK,WAAA0L,QAAQ1L,CAAR0L,GAAY,EAAZA,CAAA;KAAnCiB,CADLyrE,CAIFp3E,OACIyV,EAAG/W,MAAH+W,IAAaA,EAAG,CAAHA,EAAMlV,KAANkV,CAAY,CAAZA,CADjBzV,EAEI,sCAAoCyV,EAAG/W,MAAvC,qCAAA,GAC6B+W,EAAG,CAAHA,EAAMlV,KAANkV,CAAY,CAAZA,CAD7B,OAFJzV,EAKA,IAAMq3E,MAAN;EAAA,MACMC,IAAO7hE,CADb;EAAA,oBAESpW;EACPg4E,MAAG/2E,IAAH+2E,CAAQ51D,IAAIE,MAAJF,CAAWzE,IAAXyE,CAAgB;EACtB,UAAIziB,IAAIs4E,EAAKj4E,CAALi4E,CAAR,CACA,IAAIj4E,IAAI,CAAR,EACE,KAAK,IAAIiD,IAAI,CAAb,EAAgBA,IAAIjD,CAApB,IAAyBiD,CAAzB,EAA4B;EAC1B,YAAMi1E,IAAOn4E,MAAIi4E,EAAG/0E,CAAH+0E,EAAMlqE,SAANkqE,CAAgBr4E,CAAhBq4E,CAAJj4E,EAAwB8N,GAAxB9N,CAA4Bi4E,EAAG/0E,CAAH+0E,CAA5Bj4E,CAAb,CACAJ,IAAIA,EAAE8N,GAAF9N,CAAMu4E,CAANv4E,CAAJA;EAGJ,cAAOA,EAAEoO,GAAFpO,CAAMuM,KAAKvM,CAALuM,EAAQ,WAARA,CAANvM,CAAP;OARMyiB,CAAR41D;KAHF,CAEA,KAASh4E,IAAI,CAAb,EAAgBA,IAAIoW,EAAG/W,MAAvB,IAAiCW,CAAjC,IAASA,GAaT,OAAI+3E,IACKxrE,MAAMyrE,CAANzrE,EAAU,CAAVA,CADLwrE,GAGKC,CAHT;EAmDF,aAAA,CAAar4E,CAAb,EAAwBw4E,CAAxB;EACE,uBADsBA,SAClBx4E,EAAE0E,IAAF1E,GAAS,CAAb,EACE,MAAM,IAAInB,KAAJ,CACF,kEACImB,EAAE0E,IAFJ,CAAN,CAGK,IAAe,MAAX1E,EAAE0E,IAAN,EACL,OAAO+zE,KAAKz4E,CAALy4E,EAAoBD,CAApBC,CAAP,CAMA,IAAMC,IAAgB14E,EAAEuB,KAAFvB,CAAQmD,KAARnD,CAAc,CAAdA,EAAiBA,EAAEuB,KAAFvB,CAAQN,MAARM,GAAiB,CAAlCA,EACKihD,MADLjhD,CACY,UAACqJ,CAAD,EAAQsvE,CAAR;EAAiB,WAAAtvE,IAAQsvE,CAAR;KAD7B34E,CAAtB;EAAA,MAQM44E,MARN;EAAA,MASMC,MATN,CAiBA,OAfahsE,QACT7M,EAAEyK,OAAFzK,EACE04E,GAAe14E,EAAEuB,KAAFvB,CAAQA,EAAEuB,KAAFvB,CAAQN,MAARM,GAAiB,CAAzBA,GACfA,EAAEuB,KAAFvB,CAAQA,EAAEuB,KAAFvB,CAAQN,MAARM,GAAiB,CAAzBA,EAFFA,CADS6M,EAKT,CALSA,EAQRnH,OARQmH,CAQA,UAAAm4D,CAAA;EACL,QAAAvmD,cAAA;EAAA,QAACq6D,QAAD;EAAA,QAAMC,QAAN,CACNH,EAAKt3E,IAALs3E,CAAUE,CAAVF,GACAC,EAAKv3E,IAALu3E,CAAUE,CAAVF,CADAD;KAVW/rE,IAaHD,MAAMgsE,CAANhsE,EAAY,CAAZA,EAAenC,OAAfmC,CAAuB5M,EAAEuB,KAAzBqL,GACAA,MAAMisE,CAANjsE,EAAY,CAAZA,EAAenC,OAAfmC,CAAuB5M,EAAEuB,KAAzBqL,EACV;EAIJ,cAAA,CAAc5M,CAAd,EAA2Bw4E,CAA3B;EACE,0BADyBA,SAClB/1D,IAAIE,MAAJF,CAAWzE,IAAXyE,CAAgB;EACrB,QAAuB,MAAnBziB,EAAEuB,KAAFvB,CAAQN,MAAZ,EACE,MAAM,IAAIb,KAAJ,CACF,4CAA0CmB,EAAEuB,KAAFvB,CAAQN,MAAlD,cADE,CAAN,CAcF,KAVA,IAAMs5E,IAAIh5E,EAAEuB,KAAFvB,CAAQ,CAARA,CAAV,EACMiC,IAAIjC,EAAEuB,KAAFvB,CAAQ,CAARA,CADV,EAGIi5E,IAAIjvB,IAAIgvB,CAAJhvB,CAHR,EAIIzpD,IAAIP,EAAE2L,KAAF3L,EAJR,EAMMk5E,IAAQC,WAAW,GAAXA,GAAiB,GAAG,EAApBA,CANd,EAOI1uC,IAAcyuC,EAAMvtE,KAANutE,EAPlB,EASME,IAAQJ,KAAK/2E,CAAL+2E,GAAS/2E,CAAT+2E,GAAaA,CAT3B,gBAUS11E;;YAGD+1E,IAAQ94E;YACR+4E,IAAQ7uC;YACR8uC,IAAQN,EACdx6D;;;;;;;;;UAAAA,EAACgsB,QAADhsB,EAAIle,QAAJke,EAAOw6D,QAAPx6D,EAyCAnG,SAAS+gE,GAAOC,GAAOC,EAAvBjhE,CAzCAmG;OAhBF,EAUSnb,IAAI,CAAb,EAAgBA,IAAI81E,CAApB,IAA6B91E,CAA7B,IAASA,GAuDT,QALKk1E,KAAgBQ,IAAI/2E,MACvBg3E,IAAIA,EAAE91E,KAAF81E,EAAS,GAAG,EAAZA,GAAiBD,GAAG/2E,EAApBg3E,CAAJA,EACA14E,IAAIA,EAAE4C,KAAF5C,EAAS,GAAG,EAAZA,GAAiB0B,GAAGA,EAApB1B,KAGE04E,GAAG14E,EAAX;KAvEKkiB,CAAP;EA2EF,KAAa+2D,cAAc1uD,KAAI2uD,4BAAJ3uD,CAA3B;EAAA,IACa4uD,KAAK5uD,KAAI6uD,UAAJ7uD,CADlB;EAAA,oEAAA,CC9NA,wBAAA,CACI8uD,CADJ,EAC0Bp4E,CAD1B,EACkDgR,CADlD;qBACkDA,QAChD,IAAMqnE,IAAU5vD,gBAAgB2vD,CAAhB3vD,EAAwB,QAAxBA,EAAkC,gBAAlCA,CAAhB,CACA/gB,OACqB,MAAjB2wE,EAAQn1E,IAAS,IAAsB,MAAjBm1E,EAAQn1E,IADlCwE,EAEI,kEACY2wE,EAAQn1E,IADpB,MAFJwE,GAIAA,OACoB,MAAhB1H,EAAK9B,MADTwJ,EAEI,+DACO1H,CADP,MAFJ0H,CAJAA,CASA,IAAI4wE,IAAcD,CAAlB;EAAA,MACIlV,KAAe,CADnB,CAEqB,MAAjBkV,EAAQn1E,IAAS,KACnBigE,KAAe,CAAfA,EACAmV,IACID,EAAQ9Z,IAAR8Z,CAAa,CAAbA,EAAgBA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAAhBA,EAAkCA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAAlCA,EAAoDA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAApDA,CAHe,EAMd,IAAAtsD,QAAA;EAAA,MAAWC,QAAX;EAAA,MAaDjT,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CAZ2B,UAACtJ,CAAD,EAAUikD,CAAV;EACnC,WAAAjkD,EAAQzG,cAARyG,CAAuB2gE,CAAvB3gE,EAAoCoU,CAApCpU,EAA+CqU,CAA/CrU,EAAyD3G,CAAzD2G,CAAA;KAWQsJ,IAA+Bq3D,gBAA/Br3D,EATK,UAAC1K,CAAD,EAAe2C,CAAf;EACf,aACEo/D,aAAa;EAAM,eAAAr3D,IAAIE,MAAJF,CAAW4I,SAAX5I,CACf,UAAAtJ,CAAA;EACI,iBAAAA,EAAQ4gE,sBAAR5gE,CAA+BpB,CAA/BoB,EAAmC2gE,CAAnC3gE,EAAgD3G,CAAhD2G,CAAA;WAFWsJ,IAAAA,CAAA;WADrB;KAQUA,CAbL,CAcP,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAmBF,gCAAA,CACIq/D,CADJ,EAC0Bp4E,CAD1B,EACkDgR,CADlD;qBACkDA,QAChD,IAAMqnE,IAAU5vD,gBAAgB2vD,CAAhB3vD,EAAwB,QAAxBA,EAAkC,uBAAlCA,CAAhB,CACA/gB,OACqB,MAAjB2wE,EAAQn1E,IAAS,IAAsB,MAAjBm1E,EAAQn1E,IADlCwE,EAEI,yEACY2wE,EAAQn1E,IADpB,MAFJwE,GAIAA,OACoB,MAAhB1H,EAAK9B,MADTwJ,EAEI,sEACO1H,CADP,MAFJ0H,CAJAA,EAQAA,OACsB,cAAlB2wE,EAAQt2E,KAAU,IAA+B,YAAlBs2E,EAAQt2E,KAD3C2F,EAEI,kDAFJA,CARAA,CAYA,IAAI4wE,IAAcD,CAAlB;EAAA,MACIlV,KAAe,CADnB,CAEqB,MAAjBkV,EAAQn1E,IAAS,KACnBigE,KAAe,CAAfA,EACAmV,IACID,EAAQ9Z,IAAR8Z,CAAa,CAAbA,EAAgBA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAAhBA,EAAkCA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAAlCA,EAAoDA,EAAQt4E,KAARs4E,CAAc,CAAdA,CAApDA,CAHe,EAKd,IAAAtsD,QAAA;EAAA,MAAWC,QAAX;EAAA,MAeDjT,IAAMkI,IAAIE,MAAJF,CAAW4I,SAAX5I,CAb2B,UAACtJ,CAAD,EAAUikD,CAAV;EACnC,WAAAjkD,EAAQxG,qBAARwG,CACI2gE,CADJ3gE,EACiBoU,CADjBpU,EAC4BqU,CAD5BrU,EACsC3G,CADtC2G,CAAA;KAYQsJ,IAA+Bq3D,gBAA/Br3D,EATK,UAAC1K,CAAD,EAAe2C,CAAf;EACf,aACEo/D,aAAa;EAAM,eAAAr3D,IAAIE,MAAJF,CAAW4I,SAAX5I,CACf,UAAAtJ,CAAA;EAAW,iBAAAA,EAAQ6gE,6BAAR7gE,CACPpB,CADOoB,EACH2gE,CADG3gE,EACU3G,CADV2G,CAAA;WADIsJ,IAAAA,CAAA;WADrB;KAQUA,CAfL,CAiBP,OAAIkiD,IACKpqD,EAAIw2C,IAAJx2C,CAASA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAATA,EAAuBA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAAvBA,EAAqCA,EAAIhZ,KAAJgZ,CAAU,CAAVA,CAArCA,CADLoqD,GAGGpqD,CAHP;EAuBF,4BAAA,CACI0T,CADJ,EACgCC,CADhC,EAEIC,CAFJ,EAE2BC,CAF3B,EAGIC,CAHJ;qBAE2BD,0BACvBC,IAAiB3tB,OAAOu5E,mBAC1B,IAAMC,IAASjwD,gBAAgBgE,CAAhBhE,EAAuB,OAAvBA,EAAgC,mBAAhCA,CAAf;EAAA,MACMkwD,IAAUlwD,gBAAgBiE,CAAhBjE,EAAwB,QAAxBA,EAAkC,mBAAlCA,CADhB;EAAA,MAGMlT,IAASqjE,sBACXF,CADWE,EACHD,CADGC,EACMjsD,CADNisD,EACqBhsD,CADrBgsD,EACmC/rD,CADnC+rD,CAHf,CASA,OAJAjsD,IAAgBpX,EAAOoX,aAAvBA,EACAC,IAAerX,EAAOqX,YADtBD,EAEAE,IAAiBtX,EAAOsX,cAFxBF,EAIO1L,IAAIE,MAAJF,CAAW4I,SAAX5I,CACH,UAAAniB,CAAA;EAAK,WAAAA,EAAE+5E,iBAAF/5E,CACD45E,CADC55E,EACO65E,CADP75E,EACgB6tB,CADhB7tB,EAC+B8tB,CAD/B9tB,EAC6C+tB,CAD7C/tB,CAAA;KADFmiB,IAGFy3D,WAHEz3D,CAAP;EAOF,iCAAA,CACIwL,CADJ,EACgCC,CADhC,EAEIC,CAFJ,EAE2BC,CAF3B,EAGIC,CAHJ;4BAE2BD,0BACvBC,IAAiB3tB,OAAOu5E;;;EAUR,iBATZC,IAASjwD,gBAAgBgE,CAAhBhE,EAAuB,OAAvBA,EAAgC,wBAAhCA,CAATiwD,EACAC,IAAUlwD,gBAAgBiE,CAAhBjE,EAAwB,QAAxBA,EAAkC,wBAAlCA,CADViwD,EAGAnjE,IAASqjE,sBACXF,CADWE,EACHD,CADGC,EACMjsD,CADNisD,EACqBhsD,CADrBgsD,EACmC/rD,CADnC+rD,CAHTF,EAKN/rD,IAAgBpX,EAAOoX,aALjB+rD,EAMN9rD,IAAerX,EAAOqX,YANhB8rD,EAON7rD,IAAiBtX,EAAOsX,cAPlB6rD,MASkBA,EAAO5vE,IAAP4vE,GAAN;EACC,iBADbI,IAAY77D,MAAAA,EAAZ67D,MACmBH,EAAQ7vE,IAAR6vE,GAAN;EASnB,iBATMI,IAAa97D,MAAAA,EAAb87D,EACAhgE,IAAM4/C,sBACRmgB,CADQngB,EACGogB,CADHpgB,EACehsC,CADfgsC,EAC8B/rC,CAD9B+rC,EAC4C9rC,CAD5C8rC,CADNogB,EAGFL,MAAWjsD,CAAXisD,IACFA,EAAO5hE,OAAP4hE,EAJIK,EAMFJ,MAAYjsD,CAAZisD,IACFA,EAAQ7hE,OAAR6hE,EAPII,MASChgE,EAAP;;;EAGF,+BAAA,CACI0T,CADJ,EACqBC,CADrB,EACuCC,CADvC,EAEIC,CAFJ,EAE0BC,CAF1B;EAIsB,UAAhBD,CAAgB,KAClBA,IAAe,EADG,GAGE,QAAlBC,CAAkB,KACpBA,IAAiB3tB,OAAOu5E,iBADJ,CAHF,CAMpB,IAAM1iD,IAAWtJ,EAAM1sB,KAAN0sB,CAAY,CAAZA,CAAjB,CAiBA,OAhBAE,IAAgBtuB,KAAKE,GAALF,CAASsuB,CAATtuB,EAAwB03B,CAAxB13B,CAAhBsuB,EAEAjlB,OACI,KAAKklB,CAAL,IAAqBA,KAAgB,CADzCllB,EAEI,8CAA4CklB,CAA5C,MAFJllB,CAFAilB,EAKAjlB,OACmB,MAAf+kB,EAAMvpB,IADVwE,EAEI,iDAA+C+kB,EAAMvpB,IAArD,MAFJwE,CALAilB,EAQAjlB,OACuB,MAAnB+kB,EAAM1sB,KAAN0sB,CAAY,CAAZA,CADJ/kB,EAEI,sDAAoD+kB,EAAM1sB,KAAN0sB,CAAY,CAAZA,CAFxD/kB,CARAilB,EAWAjlB,OAA4B,MAAhBglB,EAAOxpB,IAAnBwE,EAA+B,4BAA/BA,CAXAilB,EAYAjlB,OACIglB,EAAO3sB,KAAP2sB,CAAa,CAAbA,MAAoBqJ,CADxBruB,EAEI,wDAAsDquB,CAAtD,eAAA,GACerJ,EAAO3sB,KAAP2sB,CAAa,CAAbA,CAHnBhlB,CAZAilB,IAgBQA,kBAAeC,iBAAcC,mBAArC;EAyBF,wBAAA,CACI5b,CADJ,EAEIwb,CAFJ,EAGIusD,CAHJ,EAIIjsD,CAJJ,EAKIC,CALJ,EAMIC,CANJ;EAQE,MAAMgsD,IAASxwD,gBAAgBxX,CAAhBwX,EAAuB,OAAvBA,EAAgC,eAAhCA,EAAiD,SAAjDA,CAAf;EAAA,MACMiwD,IAASjwD,gBAAgBgE,CAAhBhE,EAAuB,OAAvBA,EAAgC,eAAhCA,EAAiD,SAAjDA,CADf;EAAA,MAEMywD,IAAUzwD,gBAAgBuwD,CAAhBvwD,EAAwB,QAAxBA,EAAkC,eAAlCA,EAAmD,OAAnDA,CAFhB,CAGAuE,IAASA,KAAU,UAAnBA,EACAC,IAAqBA,KAAsB,CAD3CD,CAGA,IAAM+I,IAAW2iD,EAAO34E,KAAP24E,CAAa,CAAbA,CAAjB,CAEAhxE,OACoB,MAAhBuxE,EAAO/1E,IADXwE,EAEI,+DACoBuxE,EAAO/1E,IAD3B,MAFJwE,GAIAA,OACoB,MAAhBgxE,EAAOx1E,IAAS,IAAyB,MAApBw1E,EAAO34E,KAAP24E,CAAa,CAAbA,CADzBhxE,EAEI,sDAAoDquB,CAApD,uBAAA,GACqB2iD,EAAO34E,KAD5B,MAFJ2H,CAJAA,EAQAA,OACqB,MAAjBwxE,EAAQh2E,IAAS,IAAKg2E,EAAQn5E,KAARm5E,CAAc,CAAdA,MAAqBnjD,CAD/CruB,EAEI,uDAAqDquB,CAArD,qBAAA,GACqB2iD,EAAO34E,KAD5B,MAFJ2H,CARAA,EAYAA,OACsB,YAAlBwxE,EAAQn3E,KADZ2F,EAEI,0EACOwxE,EAAQn3E,KADf,MAFJ2F,CAZAA,EAgBAA,OACwB,MAApBqlB,EAAS7uB,MADbwJ,EAEI,0EACOqlB,EAAS7uB,MADhB,MAFJwJ,CAhBAA,EAoBAA,OACIqlB,EAAS,CAATA,KAAe,CAAfA,IAAoBA,EAAS,CAATA,KAAe,CADvCrlB,EAEI,6CAA2CqlB,CAF/CrlB,CApBAA,EAuBAA,OACe,eAAXslB,CAAW,IAAyB,cAAXA,CAD7BtlB,EAEI,iDAA+CslB,CAFnDtlB,CAvBAA,CAgCA,OADYuZ,IAAIE,MAAJF,CAAW4I,SAAX5I,CAJ2B,UAACtJ,CAAD,EAAUikD,CAAV;EACnC,WAAAjkD,EAAQwhE,aAARxhE,CACIshE,CADJthE,EACY+gE,CADZ/gE,EACoBuhE,CADpBvhE,EAC6BoV,CAD7BpV,EACuCqV,CADvCrV,EAC+CsV,CAD/CtV,CAAA;KAGQsJ,IAA+Bg4D,WAAQP,WAAvCz3D,CACZ;EAGF,KAAa/P,iBAAiBoY,KAAI8vD,kCAAJ9vD,CAA9B;EAAA,IACanY,wBAAwBmY,KAAI+vD,gDAAJ/vD,CADrC;EAAA,IAEauvD,oBAAoBvvD,KAAIgwD,wCAAJhwD,CAFjC;EAAA,IAGaiwD,yBAAyBC,uBAHtC;EAAA,IAIaL,gBAAgBM,cAJ7B;EAAA,mOAAA;EAAA,2lHAAA;EAAA;ECnPE,YAAA;EANO31E,kBAAAA,GAAY,EAAZA,EAICA,aAAAA,IAAW,CAJZA,EAODmd,IAAIxG,GAAJwG,CAAQ,YAARA,MACFnd,KAAK0nD,mBAAL1nD,GACI9H,SAASC,aAATD,CAAuB,QAAvBA,EAAiCuB,UAAjCvB,CAA4C,IAA5CA,CAFFilB,CAPCnd;EAu6FT,UA15FE41E,WAAAA,aAAAA,GAAA,UAAajuD,CAAb;EACE3nB,SAAKgF,IAALhF,GAAY,IAAI4nB,WAAJ,CAAgBD,CAAhB,CAAZ3nB;KADF41E,EAIAA,WAAAA,SAAAA,GAAA,UAASlxE,CAAT,EAAyBzI,CAAzB,EAA0CgC,CAA1C;EAiBE,QAhBI+B,KAAK61E,QAAL71E,KACFA,KAAK61E,QAAL71E,IAAgB,CAAhBA,EACImd,IAAIxG,GAAJwG,CAAQ,SAARA,KACFnC,KACI,2dADJA,CAHAhb,GAgBAA,KAAKgF,IAALhF,CAAU0T,GAAV1T,CAAc0E,CAAd1E,CAAJ,EACE,MAAM,IAAIzG,KAAJ,CAAU,mCAAV,CAAN,CAEFyG,KAAKgF,IAALhF,CAAU8W,GAAV9W,CAAc0E,CAAd1E,IAAuB/B,UAAvB+B;KAxBF41E,EA0BAA,WAAAA,MAAAA,GAAA,UAAMlxE,CAAN,EAAsBxG,CAAtB;EACE,QAAc,QAAVA,CAAJ,EACE,MAAM,IAAI3E,KAAJ,CAAU,gDAAV,CAAN,CAEFyG,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc0E,CAAd1E,EAAsB9B,MAAtB8B,GAA+B9B,CAA/B8B;KA9BF41E,EAgCAA,WAAAA,WAAAA,GAAA,UACIt8D,CADJ,EAEIC,CAFJ;EAGE,QAAc,QAAVD,CAAJ,EACE,MAAM,IAAI/f,KAAJ,CAAU,kDAAV,CAAN,CAEF,IAAI+E,CAAJ,EAqCIJ,CArCJ,CAEA,IAAIif,IAAIxG,GAAJwG,CAAQ,SAARA,KAAoD,QAA7B7D,EAAe7f,UAA1C,EACE,MAAM,IAAIF,KAAJ,CACF,6GADE,CAAN,CAKF,IAAkC,QAA7B+f,EAAe7f,UAApB,EAEE6E,IAAQgb,EACI7f,UADJ6f,CACe,IADfA,EAEIw8D,YAFJx8D,CAEiB,CAFjBA,EAEoB,CAFpBA,EAEuBA,EAAO+gB,KAF9B/gB,EAEqCA,EAAO8gB,MAF5C9gB,EAGItU,IAHZ1G,CAFF,KAMO,IAAIgb,aAAkB4pC,SAAtB,EACL5kD,IAAOgb,EAAOtU,IAAd1G,CADK,KAEA;EAAA,YACHgb,aAAkBkuC,gBAAlBluC,IACAA,aAAkBiuC,iBAFf,EAgBL,MAAM,IAAIhuD,KAAJ,CACF,oIAEuB+f,EAActa,WAAdsa,CAA0B/a,IAH/C,CAAN,CAbA,IAAgC,QAA5ByB,KAAK0nD,mBAAT,EACE,MAAM,IAAInuD,KAAJ,CACF,8DADE,CAAN,CAIFyG,KAAK0nD,mBAAL1nD,CAAyBxG,MAAzBwG,CAAgCq6B,KAAhCr6B,GAAwCsZ,EAAO+gB,KAA/Cr6B,EACAA,KAAK0nD,mBAAL1nD,CAAyBxG,MAAzBwG,CAAgCo6B,MAAhCp6B,GAAyCsZ,EAAO8gB,MADhDp6B,EAEAA,KAAK0nD,mBAAL1nD,CAAyB4nD,SAAzB5nD,CACIsZ,CADJtZ,EACY,CADZA,EACe,CADfA,EACkBsZ,EAAO+gB,KADzBr6B,EACgCsZ,EAAO8gB,MADvCp6B,CAFAA,EAIA1B,IAAO0B,KAAK0nD,mBAAL1nD,CACK81E,YADL91E,CACkB,CADlBA,EACqB,CADrBA,EACwBsZ,EAAO+gB,KAD/Br6B,EACsCsZ,EAAO8gB,MAD7Cp6B,EAEKgF,IANZhF;EAcF,SAAoB,MAAhBuZ,CAAJ,EACErb,IAAS,IAAIE,UAAJ,CAAeE,CAAf,CAATJ,CADF,KAEO;EACL,UAAM63E,IAAYz8D,EAAO+gB,KAAP/gB,GAAeA,EAAO8gB,MAAxC,CACAl8B,IAAS,IAAIE,UAAJ,CAAe23E,IAAYx8D,CAA3B,CAATrb,CACA,KAAK,IAAInD,IAAI,CAAb,EAAgBA,IAAIg7E,CAApB,EAA+Bh7E,GAA/B,EACE,KAAK,IAAIi7E,IAAU,CAAnB,EAAsBA,IAAUz8D,CAAhC,IAA+Cy8D,CAA/C,EACE93E,EAAOnD,IAAIwe,CAAJxe,GAAkBi7E,CAAzB93E,IAAoCI,EAAS,IAAJvD,CAAI,GAAIi7E,CAAb13E,CAApCJ;EAMN,YAAO+3E,SAAS/3E,CAAT+3E,GADF38D,EAAO8gB,QAAQ9gB,EAAO+gB,OAAO9gB,EAC3B08D,EAA2B,OAA3BA,CAAP;KAzFFL,EA2FMA,WAAAA,KAAAA,GAAN,UAAWlxE,CAAX;;;EACE,mBAAO1E,KAAK0F,QAAL1F,CAAc0E,CAAd1E,EAAP;;;KA5FF41E,EA8FAA,WAAAA,SAAAA,GAAA,UAASlxE,CAAT;EACQ,QAAAyU,oBAAA;EAAA,QAAClb,WAAD;EAAA,QAAQqpD,oBAAR,CACN,OAAc,gBAAVrpD,CAAU,GAGLi4E,uBAFY5uB,EAAe3hC,IAAf2hC,CAAoBjnD,QAApBinD,EAEZ4uB,EADY5uB,EAAe1hC,IAAf0hC,CAAoBjnD,QAApBinD,EACZ4uB,CAHK,GAKPl2E,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc0E,CAAd1E,EAAsB9B,MAL7B;KAhGF03E,EAwGAA,WAAAA,YAAAA,GAAA,UAAYlxE,CAAZ;EACE,QAAI1E,KAAKgF,IAALhF,CAAU0T,GAAV1T,CAAc0E,CAAd1E,CAAJ,EAA2B;EAClB,UAAAsnD,mCAAA,CACe,QAAlBA,CAAkB,KACpBA,EAAe3hC,IAAf2hC,CAAoBt0C,OAApBs0C,IACAA,EAAe1hC,IAAf0hC,CAAoBt0C,OAApBs0C,EAFoB,GAItBtnD,KAAKgF,IAALhF,CAAUmX,MAAVnX,CAAiB0E,CAAjB1E,CAJsB;;KA3G1B41E,EAmHMA,WAAAA,KAAAA,GAAN,UAAW72E,CAAX;;;EAIE,eAHMI,IAAQO,KAARP,EACNJ,GADMI,QAGE0B,UADSnB,QAAQP,IACzB;;;KAvHFy2E,EA0HAA,WAAAA,OAAAA,GAAA;EACE,aAEExrB,aAAY,GAFd;KA3HFwrB,EAiIAA,WAAAA,QAAAA,GAAA,UAA0BjwD,CAA1B,EAAmCC,CAAnC;EACE,QAAM1qB,IAASkJ,OAAOC,IAAPD,CAAYuhB,EAAK1pB,KAAjBmI,IAAAA,EAA4B,WAA5BA,CAAf,CAWA,OATmBpE,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc9E,EAAOwJ,MAArB1E,EAIRsnD,cAJQtnD,KAKjB2lB,MAAMxI,IAAIE,MAAJF,CAAWO,IAAXP,CAAgBwI,EAAKtf,KAALsf,EAAhBxI,GACNyI,MAAMzI,IAAIE,MAAJF,CAAWO,IAAXP,CAAgByI,EAAKvf,KAALuf,EAAhBzI,GANWnd,EASZ9E,CAAP;KA7IF06E,EA+IAA,WAAAA,KAAAA,GAAA,UAAuBjkE,CAAvB;EAEE,WADmB3R,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc2R,EAAMjN,MAApB1E,EACDsnD,cADCtnD,CACc2lB,IADd3lB,CACmBqG,KADnBrG,EACnB;KAjJF41E,EAmJAA,WAAAA,KAAAA,GAAA,UAAuBjkE,CAAvB;EAEE,WADmB3R,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAc2R,EAAMjN,MAApB1E,EACDsnD,cADCtnD,CACc4lB,IADd5lB,CACmBqG,KADnBrG,EACnB;KArJF41E,EAwJQA,WAAAA,iBAAAA,GAAR,UAAyB3iE,CAAzB,EAAkD2R,CAAlD;EACO/oB,UAAMC,OAAND,CAAcoX,CAAdpX,MACHoX,KAAUA,EADPpX,GAGLoX,EAAO7S,OAAP6S,CAAe,UAAAiG,CAAA;EACJ,cAALA,CAAK,IACPtV,OACgB,gBAAZsV,EAAEjb,KADN2F,EAEOghB,0CAFPhhB,CADO;OADXqP,CAHKpX;KAzJP+5E,EAqKAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB,EAA8BwM,CAA9B,EAA+ChL,CAA/C;EACE8D,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAMwF,IAAS4wE,OAAWl6E,CAAXk6E,EAAiB17E,EAAEuD,KAAnBm4E,CAAf,EAESr7E,IAAI,CAAb,EAAgBA,IAAIyK,EAAOtJ,IAA3B,IAAmCnB,CAAnC,EAAsC;EACpC,UAAMomB,IAAM3b,EAAOmhD,UAAPnhD,CAAkBzK,CAAlByK,CAAZ;EAAA,UACM6wE,IAAOl1D,EAAIrf,GAAJqf,CAAQ,UAACrJ,CAAD,EAAM9Z,CAAN;EAAY,eAAA8Z,IAAM5Q,EAAMlJ,CAANkJ,CAAN;SAApBia,CADb,CAEA3b,EAAOsR,GAAPtR,MAAAA,CAAAA,CAAAA,GAAW9K,EAAEic,GAAFjc,MAAAA,CAAAA,CAAAA,EAAS27E,CAAT37E,UAAmBymB,EAA9B3b;EAEF,YAAOA,EAAOo8C,QAAPp8C,EAAP;KA/KFowE,EAkLAA,WAAAA,aAAAA,GAAA,UACIl7E,CADJ,EACUwM,CADV,EAC2B3D,CAD3B,EAC0ClE,CAD1C,EAEIuQ,CAFJ,EAEuBC,CAFvB,EAEwCqT,CAFxC,EAGIC,CAHJ,EAGyBC,CAHzB;EAIEpjB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,cAAzBA,EAEM,IAAAmZ,yDAAA;EAAA,QAAC6xC,QAAD;EAAA,QAAa9uD,QAAb;EAAA,QAAmBqnB,QAAnB;EAAA,QAIAtnB,IAAQC,EAAKoR,MAALpR,CAAY,UAAC8a,CAAD,EAAI1c,CAAJ;EAAc,cAA+B,MAA/BipB,EAAWjC,OAAXiC,CAAmBjpB,CAAnBipB,CAAA;OAA1BrnB,CAJR,CAMN,IAAID,EAAM8jB,IAAN9jB,CAAW,UAAA6B,CAAA;EAAQ,aAAS,MAATA,CAAA;OAAnB7B,CAAJ,EACE,OAAOq6E,SAAAA,EAAer6E,CAAfq6E,CAAP,CAKF,KAFA,IAAM9wE,IAAS4wE,OAAWl6E,CAAXk6E,EAAiB17E,EAAEuD,KAAnBm4E,CAAf,EAESr7E,IAAI,CAAb,EAAgBA,IAAIyK,EAAOtJ,IAA3B,EAAiCnB,GAAjC,EAAsC;EAIpC,WAHA,IAAMomB,IAAM3b,EAAOmhD,UAAPnhD,CAAkBzK,CAAlByK,CAAZ,EAEM+wE,IAAmB,IAAI16E,KAAJ,CAAUslB,EAAI/mB,MAAd,CAFzB,EAGS4D,IAAI,CAAb,EAAgBA,IAAIu4E,EAAOn8E,MAA3B,EAAmC4D,GAAnC,EACEu4E,EAAOv4E,CAAPu4E,IAAYp1D,EAAInjB,CAAJmjB,IAAS9hB,EAAQrB,CAARqB,CAAT8hB,GAAsB6pC,EAAWhtD,CAAXgtD,CAAlCurB,CAEF/wE,EAAOsR,GAAPtR,MAAAA,CAAAA,CAAAA,GAAW9K,EAAEic,GAAFjc,MAAAA,CAAAA,CAAAA,EAAS67E,CAAT77E,UAAqBymB,EAAhC3b;EAGF,YAAOA,EAAOo8C,QAAPp8C,GAAkBL,OAAlBK,CAA0BvJ,CAA1BuJ,CAAP;KA9MFowE,EAiNAA,WAAAA,QAAAA,GAAA,UAA0Bl7E,CAA1B,EAAgCoD,CAAhC;EACEkC,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,SAAzBA,EAKA,KAHA,IAAMwF,IAAS4wE,OAAW17E,EAAEuB,KAAbm6E,EAAoB17E,EAAEuD,KAAtBm4E,CAAf,EACMI,IAAU97E,EAAE8K,MAAF9K,EADhB,gBAGSK;EACP,UAAM07E,IAASjxE,EAAOmhD,UAAPnhD,CAAkBzK,CAAlByK,CAAf;EAAA,UACMkxE,IAAQD,EAAO54E,KAAP44E,EADd,CAEA34E,EAAKsC,OAALtC,CAAa,UAAA4jB,CAAA;EAAM,eAAAg1D,EAAMh1D,CAANg1D,IAAYh8E,EAAEuB,KAAFvB,CAAQgnB,CAARhnB,IAAc,CAAdA,GAAkBg8E,EAAMh1D,CAANg1D,CAA9B;SAAnB54E,GACA0H,EAAOsR,GAAPtR,MAAAA,CAAAA,CAAAA,GAAWgxE,EAAQ7/D,GAAR6/D,MAAAA,CAAAA,CAAAA,EAAeE,CAAfF,UAA0BC,EAArCjxE,CADA1H;OANF,EAGS/C,IAAI,CAAb,EAAgBA,IAAIyK,EAAOtJ,IAA3B,EAAiCnB,GAAjC,IAASA,GAOT,OAAOyK,EAAOo8C,QAAPp8C,EAAP;KA9NFowE,EAiOAA,WAAAA,OAAAA,GAAA,UAAO9tD,CAAP,EAA0BhqB,CAA1B;EACEkC,SAAKm2E,gBAALn2E,CAAsB8nB,CAAtB9nB,EAA+B,QAA/BA,EACA,IAAM22E,IAAY7uD,EAAQhmB,GAARgmB,CAAY,UAAA5O,CAAA;EAC5B,UAAM09D,IAAYjzE,cAAmBuV,EAAEjd,KAAFid,CAAQrb,KAARqb,CAAcpb,CAAdob,CAAnBvV,CAAlB,CACA,OAAOuV,EAAE2oC,IAAF3oC,EAAQ,CAARA,EAAW09D,CAAX19D,CAAP;OAFgB4O,CAAlB;EAAA,QAIMtG,IACF0P,gBAA4BylD,EAAU70E,GAAV60E,CAAc,UAAAz9D,CAAA;EAAK,aAAAA,EAAEjd,KAAF;OAAnB06E,CAA5BzlD,EAAyD,CAAzDA,CALJ;EAAA,QAMMhzB,IACFk4E,OAAoB50D,CAApB40D,EAAkDtuD,EAAQ,CAARA,EAAW7pB,KAA7Dm4E,EACKl4E,MART,CASA,IAA8B,MAA1By4E,EAAU,CAAVA,EAAa16E,KAAb06E,CAAmB,CAAnBA,CAAJ,EAAiC;EAE/B,UAAIE,IAAS,CAAb,CACAF,EAAUv2E,OAAVu2E,CAAkB,UAAAz9D,CAAA;EAChBhb,UAAO4Y,GAAP5Y,CAAWgb,EAAE7Y,QAAF6Y,EAAXhb,EAAyB24E,CAAzB34E,GACA24E,KAAU39D,EAAEhd,IADZgC;SADFy4E;OAHF,MAOO;EACL,UAAIG,IAAY,CAAhB,CACAH,EAAUv2E,OAAVu2E,CAAkB,UAAAz9D,CAAA;EAGhB,aAFA,IAAM69D,IAAQ79D,EAAE7Y,QAAF6Y,EAAd,EACI89D,IAAO,CADX,EAES10E,IAAM,CAAf,EAAkBA,IAAM4W,EAAEjd,KAAFid,CAAQ,CAARA,CAAxB,IAAsC5W,CAAtC,EAEE,KADA,IAAM20E,IAAS30E,IAAMkf,EAAS,CAATA,CAANlf,GAAoBw0E,CAAnC,EACS1lC,IAAM,CAAf,EAAkBA,IAAMl4B,EAAEjd,KAAFid,CAAQ,CAARA,CAAxB,IAAsCk4B,CAAtC,EACElzC,EAAO+4E,IAAS7lC,CAAhBlzC,IAAuB64E,EAAMC,GAAND,CAAvB74E,CAGJ44E,KAAa59D,EAAEjd,KAAFid,CAAQ,CAARA,CAAb49D;SATFH;EAYF,SAAMO,IACFhmD,gBAA4BpJ,EAAQhmB,GAARgmB,CAAY,UAAA5O,CAAA;EAAK,aAAAA,EAAEjd,KAAF;OAAjB6rB,CAA5BoJ,EAAuDpzB,CAAvDozB,CADJ,CAEA,OAAOje,OAAO/U,CAAP+U,EAAeikE,CAAfjkE,EAA8B6U,EAAQ,CAARA,EAAW7pB,KAAzCgV,CAAP;KAnQF2iE,EAsQAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EAGE,WAFAsF,KAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEOA,KAAK2rD,QAAL3rD,CAAcm3E,QAAY,CAAZA,CAAdn3E,EAA8BtF,CAA9BsF,CAAP;KAzQF41E,EA4QAA,WAAAA,IAAAA,GAAA,UAAIl8E,CAAJ,EAAesB,CAAf;EACE,WAAgB,gBAAZtB,EAAEuE,KAAU,IAA2B,gBAAZjD,EAAEiD,KAAjB,GACP+B,KAAKo3E,0BAALp3E,CACItG,EAAE6L,IAAF7L,CAAO,WAAPA,CADJsG,EACyBhF,EAAEuK,IAAFvK,CAAO,WAAPA,CADzBgF,EAEI,UAACq3E,CAAD,EAAQC,CAAR,EAAeC,CAAf,EAAsBC,CAAtB;EACE,eAAQ7xD,MAAM0xD,IAAQE,GAAO3xD,MAAM0xD,IAAQE,GAA3C;OAHNx3E,CADO,GAQTA,KAAKy3E,mBAALz3E,CACItG,CADJsG,EACOhF,CADPgF,EACU0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADV1nB,EAEI,UAAC03E,CAAD,EAASC,CAAT;EAAoB,aAAAD,IAASC,CAAT;OAFxB33E,CARP;KA7QF41E,EA0RAA,WAAAA,KAAAA,GAAA,UAAuB9tD,CAAvB;EACE9nB,SAAKm2E,gBAALn2E,CAAsB8nB,CAAtB9nB,EAA+B,MAA/BA,EAKA,KAHA,IAAM1B,IAAOwpB,EAAQhmB,GAARgmB,CAAY,UAAA5O,CAAA;EAAK,aAAAA,EAAE7Y,QAAF6Y,EAAA;OAAjB4O,CAAb,EACM5sB,IAASk7E,OAAWtuD,EAAQ,CAARA,EAAW7rB,KAAtBm6E,EAA6BtuD,EAAQ,CAARA,EAAW7pB,KAAxCm4E,CADf,EAEMwB,IAAa18E,EAAOgD,MAF1B,EAGSnD,IAAI,CAAb,EAAgBA,IAAI+sB,EAAQ1tB,MAA5B,EAAoCW,GAApC,EAEE,KADA,IAAM88E,IAAWv5E,EAAKvD,CAALuD,CAAjB,EACSN,IAAI,CAAb,EAAgBA,IAAI45E,EAAWx9E,MAA/B,EAAuC4D,GAAvC,EACE45E,EAAW55E,CAAX45E,KAAiBC,EAAS75E,CAAT65E,CAAjBD,CAGJ,OAAO18E,EAAO0mD,QAAP1mD,EAAP;KAtSF06E,EAySAA,WAAAA,SAAAA,GAAA,UAASl8E,CAAT,EAAoBsB,CAApB;EACE,WAAgB,gBAAZtB,EAAEuE,KAAU,IAA2B,gBAAZjD,EAAEiD,KAAjB,GACP+B,KAAKo3E,0BAALp3E,CACItG,EAAE6L,IAAF7L,CAAO,WAAPA,CADJsG,EACyBhF,EAAEuK,IAAFvK,CAAO,WAAPA,CADzBgF,EAEI,UAACq3E,CAAD,EAAQC,CAAR,EAAeC,CAAf,EAAsBC,CAAtB;EACE,eAAQ7xD,MAAM0xD,IAAQE,GAAO3xD,MAAM0xD,IAAQE,GAA3C;OAHNx3E,CADO,GAQTA,KAAKy3E,mBAALz3E,CACItG,CADJsG,EACOhF,CADPgF,EACU0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADV1nB,EAEI,UAAC03E,CAAD,EAASC,CAAT;EAAoB,aAAAD,IAASC,CAAT;OAFxB33E,CARP;KA1SF41E,EAuTAA,WAAAA,IAAAA,GAAA,UAAsBl8E,CAAtB,EAA4BsB,CAA5B;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,KAA9BA,GAEOA,KAAKy3E,mBAALz3E,CACItG,CADJsG,EACOhF,CADPgF,EACUtG,EAAEuE,KADZ+B,EACmB,UAAC03E,CAAD,EAASC,CAAT;EAAoB,aAAAp9E,KAAKmO,GAALnO,CAASm9E,CAATn9E,EAAiBo9E,CAAjBp9E,CAAA;OADvCyF,CAAP;KA1TF41E,EA+TAA,WAAAA,YAAAA,GAAA,UACIl8E,CADJ,EACiBsB,CADjB,EAC8B2L,CAD9B,EAEIC,CAFJ;EAGE5G,SAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,QAA9BA,EAqBA,KAnBA,IAAM4vC,IAAYjpC,IAAajN,EAAEuC,KAAFvC,CAAQ,CAARA,CAAbiN,GAA0BjN,EAAEuC,KAAFvC,CAAQ,CAARA,CAA5C,EACMo+E,IAAUnxE,IAAajN,EAAEuC,KAAFvC,CAAQ,CAARA,CAAbiN,GAA0BjN,EAAEuC,KAAFvC,CAAQ,CAARA,CAD1C,EAEMq+E,IAAWnxE,IAAa5L,EAAEiB,KAAFjB,CAAQ,CAARA,CAAb4L,GAA0B5L,EAAEiB,KAAFjB,CAAQ,CAARA,CAF3C,EAGMynB,IAAW/oB,EAAEuC,KAAFvC,CAAQ,CAARA,CAHjB,EAKMs+E,IAAUt+E,EAAE2G,QAAF3G,EALhB,EAMMu+E,IAAUj9E,EAAEqF,QAAFrF,EANhB,EAOMme,yEAPN,EAOO++D,QAPP,EAOeC,QAPf,EAO2BC,QAP3B,EAUM7lD,yEAVN,EAUO8lD,QAVP,EAUmBC,QAVnB,EAU+BC,QAV/B,EAcMr8E,IAAO47E,IAAUC,CAdvB,EAeM78E,IAAS,IAAIiD,YAAJ,CAAiBskB,IAAWvmB,CAA5B,CAff,EAiBM6T,IAAY/P,KAAK+P,SAjBvB,EAmBSyoE,IAAI,CAAb,EAAgBA,IAAI/1D,CAApB,EAA8B+1D,GAA9B,EACE,KAAK,IAAIC,IAAK,CAAd,EAAiBA,IAAKX,CAAtB,EAA+BW,KAAM1oE,CAArC,EACE,KAAK,IAAI2oE,IAAK,CAAd,EAAiBA,IAAKX,CAAtB,EAAgCW,KAAM3oE,CAAtC,EACE,KAAK,IAAI4oE,IAAK,CAAd,EAAiBA,IAAK/oC,CAAtB,EAAiC+oC,KAAM5oE,CAAvC,EAME,KAJA,IAAM6oE,IAASr+E,KAAKE,GAALF,CAASk+E,IAAK1oE,CAAdxV,EAAyBu9E,CAAzBv9E,CAAf,EACMs+E,IAASt+E,KAAKE,GAALF,CAASm+E,IAAK3oE,CAAdxV,EAAyBw9E,CAAzBx9E,CADf,EAEMu+E,IAASv+E,KAAKE,GAALF,CAASo+E,IAAK5oE,CAAdxV,EAAyBq1C,CAAzBr1C,CAFf,EAISQ,IAAI09E,CAAb,EAAiB19E,IAAI69E,CAArB,EAA6B79E,GAA7B,EACE,KAAK,IAAIiD,IAAI06E,CAAb,EAAiB16E,IAAI66E,CAArB,EAA6B76E,GAA7B,EAAkC;EAGhC,WAFA,IAAIlD,IAAM,CAAV,EAES2U,IAAIkpE,CAAb,EAAiBlpE,IAAIqpE,CAArB,EAA6BrpE,GAA7B,EACE3U,KAAOk9E,EAAQQ,IAAIN,CAAJM,GAAaz9E,IAAIo9E,CAAjBK,GAA8B/oE,IAAI2oE,CAA1CJ,IACHC,EAAQxoE,IAAI4oE,CAAJ5oE,GAAiBzR,IAAIs6E,CAArB7oE,GAAkC+oE,IAAID,CAA9CN,CADJn9E,CAGFI,EAAOs9E,IAAIt8E,CAAJs8E,IAAYz9E,IAAIg9E,CAAJh9E,GAAeiD,CAA3Bw6E,CAAPt9E,KAAyCJ,CAAzCI;EAQZ,YAAO69E,SAAa79E,CAAb69E,GAAsBt2D,GAAUq1D,GAASC,EAAzCgB,CAAP;KAhXFnD,EAmXAA,WAAAA,SAAAA,GAAA,UAASl8E,CAAT,EAAoBsB,CAApB;EACE,WAAgB,gBAAZtB,EAAEuE,KAAU,IAA2B,gBAAZjD,EAAEiD,KAAjB,GACP+B,KAAKo3E,0BAALp3E,CACItG,EAAE6L,IAAF7L,CAAO,WAAPA,CADJsG,EACyBhF,EAAEuK,IAAFvK,CAAO,WAAPA,CADzBgF,EAEI,UAACq3E,CAAD,EAAQC,CAAR,EAAeC,CAAf,EAAsBC,CAAtB;EACE,eACE7xD,MAAM0xD,IAAQE,CAARF,GAAgBC,IAAQE,GAC9B5xD,MAAMyxD,IAAQG,CAARH,GAAgBC,IAAQC,GAFhC;OAHNv3E,CADO,GAWTA,KAAKy3E,mBAALz3E,CACItG,CADJsG,EACOhF,CADPgF,EACU0nB,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CADV1nB,EAEI,UAAC03E,CAAD,EAASC,CAAT;EAAoB,aAAAD,IAASC,CAAT;OAFxB33E,CAXP;KApXF41E,EAoYAA,WAAAA,WAAAA,GAAA,UAAWl8E,CAAX,EAAsBsB,CAAtB;EACEgF,SAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,YAA9BA,EAIA,OAAOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EADa,SACbA,EAFI,UAACtG,CAAD,EAAYsB,CAAZ;EAA0B,aAAAtB,IAAIsB,CAAJ;OAE9BgF,CAAP;KAzYF41E,EA4YAA,WAAAA,SAAAA,GAAA,UAASl8E,CAAT,EAAoBsB,CAApB;EACEgF,SAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,UAA9BA,EAIA,OAAOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EADa,OACbA,EAFI,UAACtG,CAAD,EAAYsB,CAAZ;EAA0B,aAAAT,KAAKkC,KAALlC,CAAWb,IAAIsB,CAAfT,CAAA;OAE9ByF,CAAP;KAjZF41E,EAoZAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ,EAAesmB,CAAf;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEAkuD,2BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,CAFAluD,CAWA,KARM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAGA99E,IAAS+9E,MAAUz3D,CAAVy3D,EADKvxD,WAAWhtB,EAAEuD,KAAbypB,EAAoB,OAApBA,CACLuxD,CAHT,EAIA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAJb,EAKArF,IAAOpD,EAAOmF,QAAPnF,EALP,EAOAg+E,IAAQx+E,EAAE2F,QAAF3F,EAPR,EAQGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACIrpE,IAAM,CADV,EAESkD,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EACElD,KAAOo+E,EAAM32E,IAASvE,CAAfk7E,CAAPp+E,CAEFwD,EAAKvD,CAALuD,IAAUxD,CAAVwD;EAEF,YAAOpD,CAAP;KAxaF06E,EA2aAA,WAAAA,KAAAA,GAAA,UAAKl7E,CAAL,EAAgBsmB,CAAhB;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAUA,KARM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAGA99E,IAAS+9E,MAAUz3D,CAAVy3D,EADKvxD,WAAWhtB,EAAEuD,KAAbypB,EAAoB,OAApBA,CACLuxD,CAHT,EAIA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAJb,EAKArF,IAAOpD,EAAOmF,QAAPnF,EALP,EAOAg+E,IAAQx+E,EAAE2F,QAAF3F,EAPR,EAQGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACIj8D,IAAO,CADX,EAESlK,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EACEkK,KAAQgxE,EAAM32E,IAASvE,CAAfk7E,CAARhxE,CAEF5J,EAAKvD,CAALuD,IAAU4J,CAAV5J;EAEF,YAAOpD,CAAP;KA9bF06E,EAicAA,WAAAA,mBAAAA,GAAA,UACIl7E,CADJ,EACUwU,CADV,EACgCC,CADhC;EAEEnP,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,oBAAzBA,EAOA,KALA,IAAMiV,MAAN,EAIMu2D,IAAW9wE,EAAE0E,IAAF1E,GAASwU,EAAW9P,IAJrC,EAKSrE,IAAI,CAAb,EAAgBA,IAAIywE,CAApB,IAAgCzwE,CAAhC,EACEmU,IAAaA,EAAWlJ,UAAXkJ,CAAsBnU,IAAI,CAA1BmU,CAAbA,CAGF,KAASnU,IAAI,CAAb,EAAgBA,IAAIoU,CAApB,IAAmCpU,CAAnC,EAAsC;EACpC,UAAMo+E,IAAYhC,OAAWp8E,CAAXo8E,EAAc,OAAdA,CAAlB;EAAA,UAEMr8E,IADOs+E,MAAUD,CAAVC,EAAqBlqE,CAArBkqE,EAAiCtzE,MAAjCszE,CAAwC,SAAxCA,EACIxwE,GADJwwE,CACQ1+E,CADR0+E,EACWt+E,GADXs+E,CACe,CADfA,CADb,CAGAnkE,EAAIjZ,IAAJiZ,CAASna,CAATma;EAGF,YAAOokE,MAAUpkE,CAAVokE,CAAP;KArdFzD,EAwdAA,WAAAA,OAAAA,GAAA,UAAOl7E,CAAP,EAAkBoD,CAAlB;EACEkC,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,QAAzBA,EAEA,IAAMghB,KAAQljB,EAAd,CACAowD,2BAAqC,QAArCA,EAA+CltC,CAA/CktC,EAAqDxzD,EAAE0E,IAAvD8uD,EAQA,KAPM,IAAA/0C,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoB,OAApBA,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAIpC,WAHA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACI1pE,IAAMy+E,EAAM32E,CAAN22E,CADV,EAEII,IAAW,CAFf,EAGSt7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACIn1E,IAAQtJ,CAARsJ,KACFtJ,IAAMsJ,CAANtJ,EACA6+E,IAAWt7E,CAFT+F;EAKNzF,SAAKvD,CAALuD,IAAUg7E,CAAVh7E;EAEF,YAAOpD,CAAP;KAjfF06E,EAofAA,WAAAA,OAAAA,GAAA,UAAOl7E,CAAP,EAAkBoD,CAAlB;EACEkC,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,QAAzBA,EAEA,IAAMghB,KAAQljB,EAAd,CACAowD,2BAAqC,QAArCA,EAA+CltC,CAA/CktC,EAAqDxzD,EAAE0E,IAAvD8uD,EAQA,KAPM,IAAA/0C,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoB,OAApBA,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAIpC,WAHA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACIxpE,IAAMu+E,EAAM32E,CAAN22E,CADV,EAEIK,IAAW,CAFf,EAGSv7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACIn1E,IAAQpJ,CAARoJ,KACFpJ,IAAMoJ,CAANpJ,EACA4+E,IAAWv7E,CAFT+F;EAKNzF,SAAKvD,CAALuD,IAAUi7E,CAAVj7E;EAEF,YAAOpD,CAAP;KA7gBF06E,EAghBAA,WAAAA,OAAAA,GAAA,UAAOl7E,CAAP,EAAkBoD,CAAlB,EAAgCmI,CAAhC,EAAoDC,CAApD;EAIE,QAFAlG,KAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,QAAzBA,GAEIlC,MAASpD,EAAE0E,IAAF1E,GAAS,CAAtB,EACE,MAAM,IAAInB,KAAJ,CACF,uDAAoDmB,EAAE0E,IAAF1E,GAAS,CAA7D,oBAAA,GACgBoD,CAFd,CAAN,CAaF,KATA,IAAM07E,IAAc9xD,WAAWhtB,EAAEuD,KAAbypB,EAAoB,OAApBA,CAApB,EACMxsB,IAAS+9E,MAAUv+E,EAAEuB,KAAZg9E,EAAmBO,CAAnBP,CADf,EAEM36E,IAAOpD,EAAOmF,QAAPnF,EAFb,EAIMg+E,IAAQx+E,EAAE2F,QAAF3F,EAJd,EAKM2+B,IAAW3+B,EAAEuB,KAAFvB,CAAQA,EAAE0E,IAAF1E,GAAS,CAAjBA,CALjB,EAMM++E,IAAgBvzE,IAClB,UAACnL,CAAD,EAAYiD,CAAZ;EAA0B,aAAAjD,IAAIs+B,CAAJt+B,GAAeiD,CAAfjD,GAAmB,CAAnB;OADRmL,GAElB,UAACnL,CAAD,EAAYiD,CAAZ;EAA0B,aAAAjD,IAAIiD,CAAJ;OAR9B,EASSjD,IAAI,CAAb,EAAgBA,IAAIm+E,EAAM9+E,MAA1B,EAAkCW,KAAKs+B,CAAvC,EACE,KAAK,IAAIr7B,IAAI,CAAb,EAAgBA,IAAIq7B,CAApB,EAA8Br7B,GAA9B,EAAmC;EACjC,UAAM8Z,IAAM2hE,EAAc1+E,CAAd0+E,EAAiBz7E,CAAjBy7E,CAAZ,CACA,IAAU,MAANz7E,CAAJ,EACEM,EAAKwZ,CAALxZ,IAAY2H,IAAY,CAAZA,GAAgBizE,EAAMphE,CAANohE,CAA5B56E,CADF,KAEO;EACL,YAAMo7E,IAAUD,EAAc1+E,CAAd0+E,EAAiBz7E,IAAI,CAArBy7E,CAAhB,CACAn7E,EAAKwZ,CAALxZ,IAAY2H,IAAYizE,EAAMQ,CAANR,IAAiB56E,EAAKo7E,CAALp7E,CAA7B2H,GACYizE,EAAMphE,CAANohE,IAAa56E,EAAKo7E,CAALp7E,CADrCA;;EAKN,YAAOpD,CAAP;KA9iBF06E,EAijBAA,WAAAA,MAAAA,GAAA,UAAMl8E,CAAN,EAAiBsB,CAAjB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,OAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,MAASC,CAATD,GAAiB,CAAjBA,GAAqB,CAA7B;OADK35E,CAAP;KApjBF41E,EAyjBAA,WAAAA,SAAAA,GAAA,UAASl8E,CAAT,EAAoBsB,CAApB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,UAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,MAASC,CAATD,GAAiB,CAAjBA,GAAqB,CAA7B;OADK35E,CAAP;KA5jBF41E,EAikBAA,WAAAA,KAAAA,GAAA,UAAKl8E,CAAL,EAAgBsB,CAAhB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,MAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,IAAOC,CAAPD,GAAe,CAAfA,GAAmB,CAA3B;OADK35E,CAAP;KApkBF41E,EAykBAA,WAAAA,UAAAA,GAAA,UAAUl8E,CAAV,EAAqBsB,CAArB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,WAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,KAAQC,CAARD,GAAgB,CAAhBA,GAAoB,CAA5B;OADK35E,CAAP;KA5kBF41E,EAilBAA,WAAAA,QAAAA,GAAA,UAAQl8E,CAAR,EAAmBsB,CAAnB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,SAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,IAAOC,CAAPD,GAAe,CAAfA,GAAmB,CAA3B;OADK35E,CAAP;KAplBF41E,EAylBAA,WAAAA,aAAAA,GAAA,UAAal8E,CAAb,EAAwBsB,CAAxB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,cAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAQD,KAAQC,CAARD,GAAgB,CAAhBA,GAAoB,CAA5B;OADK35E,CAAP;KA5lBF41E,EAimBAA,WAAAA,WAAAA,GAAA,UAA6Bl7E,CAA7B;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,YAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAIz7E,UAAJ,CAAeF,EAAO9D,MAAtB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAe37E,EAAOnD,CAAPmD,IAAY,CAAZA,GAAgB,CAA/B27E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,EAA0C,MAA1CA,CAAP;KAzmBFwxE,EA4mBAA,WAAAA,WAAAA,GAAA,UAAWl8E,CAAX,EAAsBsB,CAAtB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,YAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAOD,KAAQC,CAAf;OADK55E,CAAP;KA/mBF41E,EAonBAA,WAAAA,UAAAA,GAAA,UAAUl8E,CAAV,EAAqBsB,CAArB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,WAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+B,MAA/BA,EAAuC,UAAC25E,CAAD,EAAOC,CAAP;EAC5C,aAAOD,KAAQC,CAAf;OADK55E,CAAP;KAvnBF41E,EA4nBAA,WAAAA,OAAAA,GAAA,UAAOjrE,CAAP,EAA0BjR,CAA1B,EAAqCsB,CAArC;EACEgF,SAAKm2E,gBAALn2E,EAAuB2K,GAAWjR,GAAGsB,EAArCgF,EAAyC,QAAzCA,EAYA,KAVA,IAAM9B,IAASyM,EAAUtK,QAAVsK,EAAf,EACMqtE,IAAUt+E,EAAE2G,QAAF3G,EADhB,EAEMu+E,IAAUj9E,EAAEqF,QAAFrF,EAFhB,EAGME,IAAS+9E,MAAUv/E,EAAEuC,KAAZg9E,EAAmBvxD,WAAWhuB,EAAEuE,KAAbypB,EAAoB1sB,EAAEiD,KAAtBypB,CAAnBuxD,CAHf,EAIMY,IAAY3+E,EAAOmF,QAAPnF,EAJlB,EAKIZ,IAAQ,CALZ,EAMMiI,IAA4B,MAAnBoI,EAAUvL,IAAS,IAAKuL,EAAUvL,IAAVuL,GAAiB,CAAtB,IAAsC,MAAXjR,EAAE0F,IAA7B,GAC9B,CAD8B,GAE9B1F,EAAEuC,KAAFvC,CAAQ,CAARA,CARJ,EAUSqB,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,EAAmCW,GAAnC,EACE,KAAK,IAAIiD,IAAI,CAAb,EAAgBA,IAAIuE,CAApB,EAA4BvE,GAA5B,EACoB,MAAdE,EAAOnD,CAAPmD,CAAc,GAChB27E,EAAUv/E,GAAVu/E,IAAqB7B,EAAQj9E,CAARi9E,CADL,GAGhB6B,EAAUv/E,GAAVu/E,IAAqB5B,EAAQl9E,CAARk9E,CAHL,CAOtB,OAAO/8E,CAAP;KAlpBF06E,EAqpBAA,WAAAA,MAAAA,GAAA,UAAMjrE,CAAN;EACE3K,SAAKm2E,gBAALn2E,EAAuB2K,EAAvB3K,EAAmC,OAAnCA,EAEA,IAAMymD,IAAW97C,EAAUtK,QAAVsK,EAAjB,CACA,OAAO6kD,UAAU7kD,EAAU1O,KAApBuzD,EAA2B/I,CAA3B+I,CAAP;KAzpBFomB,EA4pBAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB,EAA6B+U,CAA7B,EAAwCC,CAAxC;EAIE,WAHA1P,KAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,GAGOyvD,SADO/0D,EAAE2F,QAAF3F,EACP+0D,EAAgB/0D,EAAEuB,KAAlBwzD,EAAyB/0D,EAAEuD,KAA3BwxD,EAAkChgD,CAAlCggD,EAAqC//C,CAArC+/C,CAAP;KAhqBFmmB,EAmqBAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ,EAAesmB,CAAf;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEAkuD,2BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,CAFAluD,CAUA,KAPM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoBv+E,EAAEuD,KAAtBg7E,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACI1pE,IAAMy+E,EAAM32E,CAAN22E,CADV,EAESl7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACIn1E,IAAQtJ,CAARsJ,KACFtJ,IAAMsJ,CADJA;EAINzF,SAAKvD,CAALuD,IAAU7D,CAAV6D;EAEF,YAAOpD,CAAP;KAzrBF06E,EA4rBAA,WAAAA,QAAAA,GAAA,UAAQl8E,CAAR,EAAmBsB,CAAnB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,SAA9BA,GAEOA,KAAKy3E,mBAALz3E,CACHtG,CADGsG,EACAhF,CADAgF,EACGtG,EAAEuE,KADL+B,EACY,UAAC25E,CAAD,EAAOC,CAAP;EAAgB,aAAAr/E,KAAKE,GAALF,CAASo/E,CAATp/E,EAAeq/E,CAAfr/E,CAAA;OAD5ByF,CAAP;KA/rBF41E,EAmsBAA,WAAAA,IAAAA,GAAA,UAAIl8E,CAAJ,EAAesB,CAAf;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,KAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+BtG,EAAEuE,KAAjC+B,EAAwC,UAAC25E,CAAD,EAAOC,CAAP;EAC7C,UAAME,IAAMH,IAAOC,CAAnB,CACA,OAAKD,IAAO,CAAPA,IAAYC,IAAO,CAAnBD,IAA0BA,KAAQ,CAARA,IAAaC,KAAQ,CAA/CD,GACIG,CADJH,IAGKG,IAAMF,KAAQA,CAHxB;OAFK55E,CAAP;KAtsBF41E,EAgtBAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ,EAAesmB,CAAf;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEAkuD,2BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,CAFAluD,CAUA,KAPM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoBv+E,EAAEuD,KAAtBg7E,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACIxpE,IAAMu+E,EAAM32E,CAAN22E,CADV,EAESl7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACIn1E,IAAQpJ,CAARoJ,KACFpJ,IAAMoJ,CADJA;EAINzF,SAAKvD,CAALuD,IAAU3D,CAAV2D;EAEF,YAAOpD,CAAP;KAtuBF06E,EAyuBAA,WAAAA,QAAAA,GAAA,UAAQl8E,CAAR,EAAmBsB,CAAnB;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,SAA9BA,GAEOA,KAAKy3E,mBAALz3E,CACHtG,CADGsG,EACAhF,CADAgF,EACGtG,EAAEuE,KADL+B,EACY,UAAC25E,CAAD,EAAOC,CAAP;EAAgB,aAAAr/E,KAAKI,GAALJ,CAASo/E,CAATp/E,EAAeq/E,CAAfr/E,CAAA;OAD5ByF,CAAP;KA5uBF41E,EAgvBAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ,EAAesmB,CAAf;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEAkuD,2BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,CAFAluD,CAUA,KAPM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoBv+E,EAAEuD,KAAtBg7E,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACIp8D,IAAMmxE,EAAM32E,CAAN22E,CADV,EAESl7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACAnxE,IAAMA,KAAOhE,CAAbgE;EAEFzJ,SAAKvD,CAALuD,IAAUyJ,CAAVzJ;EAEF,YAAOpD,CAAP;KApwBF06E,EAuwBAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ,EAAesmB,CAAf;EACEhhB,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,GAEAkuD,2BAAqC,KAArCA,EAA4CltC,CAA5CktC,EAAkDxzD,EAAE0E,IAApD8uD,CAFAluD,CAUA,KAPM,IAAAmZ,yCAAA,EAACqI,QAAD,EAAWw3D,QAAX,EAEA99E,IAAS+9E,MAAUz3D,CAAVy3D,EAAoBv+E,EAAEuD,KAAtBg7E,CAFT,EAGA9U,IAAaxgE,cAAmBq1E,CAAnBr1E,CAHb,EAIArF,IAAOpD,EAAOmF,QAAPnF,EAJP,EAMAg+E,IAAQx+E,EAAE2F,QAAF3F,EANR,EAOGK,IAAI,CAAb,EAAgBA,IAAIuD,EAAKlE,MAAzB,IAAmCW,CAAnC,EAAsC;EAGpC,WAFA,IAAMwH,IAASxH,IAAIopE,CAAnB,EACI4V,IAASb,EAAM32E,CAAN22E,CADb,EAESl7E,IAAI,CAAb,EAAgBA,IAAImmE,CAApB,IAAkCnmE,CAAlC,EAAqC;EACnC,YAAM+F,IAAQm1E,EAAM32E,IAASvE,CAAfk7E,CAAd,CACAa,IAASA,KAAUh2E,CAAnBg2E;EAEFz7E,SAAKvD,CAALuD,IAAUy7E,CAAVz7E;EAEF,YAAOpD,CAAP;KA3xBF06E,EA8xBAA,WAAAA,kBAAAA,GAAA,UAAkBl8E,CAAlB,EAA6BsB,CAA7B;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,mBAA9BA,GAEOA,KAAKy3E,mBAALz3E,CAAyBtG,CAAzBsG,EAA4BhF,CAA5BgF,EAA+BtG,EAAEuE,KAAjC+B,EAAwC,UAAC25E,CAAD,EAAOC,CAAP;EAC7C,UAAMz+E,IAAOw+E,IAAOC,CAApB,CACA,OAAOz+E,IAAOA,CAAd;OAFK6E,CAAP;KAjyBF41E,EAuyBAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAet/E,KAAKuQ,IAALvQ,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAfs/E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA/yBFwxE,EAkzBAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAet/E,KAAKkC,KAALlC,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAfs/E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA1zBFwxE,EA6zBAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,GAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACMmD,EAAOnD,CAAPmD,IAAY,CAAZA,GACF27E,EAAU9+E,CAAV8+E,KAAgB,CADd37E,GAEOA,EAAOnD,CAAPmD,IAAY,CAAZA,GACT27E,EAAU9+E,CAAV8+E,IAAe,CADN37E,GAGT27E,EAAU9+E,CAAV8+E,IAAe,CALb37E,CAQN,OAAOkG,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA30BFwxE,EA80BAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EAEtC,UAAM2rE,IAAOnsE,KAAKkC,KAALlC,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAb,CACI2D,EAAOnD,CAAPmD,IAAYwoE,CAAZxoE,GAAmB,EAAnBA,GACF27E,EAAU9+E,CAAV8+E,IAAet/E,KAAKkC,KAALlC,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CADb2D,GAEOA,EAAOnD,CAAPmD,IAAYwoE,CAAZxoE,GAAmB,EAAnBA,GACT27E,EAAU9+E,CAAV8+E,IAAet/E,KAAKuQ,IAALvQ,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CADN2D,GAIP27E,EAAU9+E,CAAV8+E,IADEnT,IAAO,CAAPA,IAAe,CAAfA,GACaA,CADbA,GAGaA,IAAO,CARtBxoE;EAYN,YAAOkG,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAl2BFwxE,EAq2BAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAet/E,KAAKiC,GAALjC,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CAAfs/E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA72BFwxE,EAg3BAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAet/E,KAAKyQ,KAALzQ,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAfs/E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAx3BFwxE,EA23BAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACA27E,EAAU9+E,CAAV8+E,IAAet/E,KAAK4G,GAAL5G,CAASwJ,CAATxJ,CAAfs/E;EAEF,YAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAp4BFwxE,EAu4BAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACA27E,EAAU9+E,CAAV8+E,IAAet/E,KAAK0Q,KAAL1Q,CAAWwJ,CAAXxJ,CAAfs/E;EAEF,YAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAh5BFwxE,EAm5BAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACA27E,EAAU9+E,CAAV8+E,IAAet/E,KAAKmC,IAALnC,CAAUwJ,CAAVxJ,CAAfs/E;EAEF,YAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA55BFwxE,EA+5BAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACA27E,EAAU9+E,CAAV8+E,IAAe,IAAIt/E,KAAKmC,IAALnC,CAAUwJ,CAAVxJ,CAAnBs/E;EAEF,YAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAx6BFwxE,EA26BAA,WAAAA,OAAAA,GAAA,UAAyBl7E,CAAzB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,QAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACA27E,EAAU9+E,CAAV8+E,IAAe91E,IAAQA,CAAvB81E;EAEF,YAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KAp7BFwxE,EAu7BAA,WAAAA,WAAAA,GAAA,UAA6Bl7E,CAA7B;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,YAAzBA,EAIA,KAFA,IAAM9B,IAASxD,EAAE2F,QAAF3F,EAAf,EACMm/E,IAAY,IAAI17E,YAAJ,CAAiBD,EAAO9D,MAAxB,CADlB,EAESW,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACE8+E,EAAU9+E,CAAV8+E,IAAe,IAAI37E,EAAOnD,CAAPmD,CAAnB27E,CAEF,OAAOz1E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQ27E,GAA9Bz1E,CAAP;KA/7BFwxE,EAk8BAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAKA,KAHA,IAAMiV,IAAMgkE,MAAUv+E,EAAEuB,KAAZg9E,EAAmBv+E,EAAEuD,KAArBg7E,CAAZ,EACMe,IAAU/kE,EAAI5U,QAAJ4U,EADhB,EAEMglE,IAASv/E,EAAE2F,QAAF3F,EAFf,EAGSK,IAAI,CAAb,EAAgBA,IAAIk/E,EAAO7/E,MAA3B,IAAqCW,CAArC,EACEi/E,EAAQj/E,CAARi/E,IAAaz/E,KAAKI,GAALJ,CAAS,CAATA,EAAY0/E,EAAOl/E,CAAPk/E,CAAZ1/E,CAAby/E,CAEF,OAAO/kE,CAAP;KA38BF2gE,EA88BAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMic,IAAI9Y,EAAOnD,CAAPmD,CAAV,CAEEg8E,EAAan/E,CAAbm/E,IADEljE,KAAK,CAALA,GACgBA,CADhBA,GAGiBzc,KAAKiC,GAALjC,CAASyc,CAATzc,IAAc,CAFjC2/E;EAKJ,YAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA39BFwxE,EA89BAA,WAAAA,OAAAA,GAAA,UAAyBnjE,CAAzB,EAAgCrB,CAAhC;EACEpR,SAAKm2E,gBAALn2E,EAAuByS,GAAIrB,EAA3BpR,EAA+B,QAA/BA,EAKA,KAHA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBiT,EAAElV,IAAnB,CAArB,EACMgC,IAASkT,EAAE/Q,QAAF+Q,EADf,EAEM+oE,IAAW1nE,EAAGpS,QAAHoS,EAFjB,EAGS1X,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMic,IAAI9Y,EAAOnD,CAAPmD,CAAV,CAEEg8E,EAAan/E,CAAbm/E,IADEljE,KAAK,CAALA,GACgBmjE,EAASp/E,CAATo/E,CADhBnjE,GAGgBmjE,EAASp/E,CAATo/E,KAAenjE,IAAI,CAAnBmjE,CAFlBD;EAKJ,YAAO91E,OAAOC,IAAPD,CAAYgN,EAAEnV,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA5+BFwxE,EA++BAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EASA,KALA,IAAMipE,IAAalwB,eAAnB,EACMlxC,IAAQmxC,UADd,EAGMkhC,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAHrB,EAIMgC,IAASxD,EAAE2F,QAAF3F,EAJf,EAKSK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMic,IAAI9Y,EAAOnD,CAAPmD,CAAV,CAEEg8E,EAAan/E,CAAbm/E,IADEljE,KAAK,CAALA,GACgBnP,IAAQmP,CADxBA,GAGgBiyD,KAAc1uE,KAAKiC,GAALjC,CAASyc,CAATzc,IAAc,CAA5B0uE,CAFlBiR;EAKJ,YAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAjgCFwxE,EAogCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB,EAA6BD,CAA7B,EAA0CE,CAA1C;EACEqF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMic,IAAI9Y,EAAOnD,CAAPmD,CAAV,CACAg8E,EAAan/E,CAAbm/E,IAAkBljE,IAAIrc,CAAJqc,GAAUrc,CAAVqc,GAAiBA,IAAIvc,CAAJuc,GAAUvc,CAAVuc,GAAgBA,CAAnDkjE;EAEF,YAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA7gCFwxE,EAghCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EAGE,SAFA,IAAMw/E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK8Q,GAAL9Q,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CAAlB2/E,CAGF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAvhCFwxE,EA0hCAA,WAAAA,WAAAA,GAAA,UAA6Bl7E,CAA7B;EAIE,SAHA,IAAMw/E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAGSK,IAAI,CAAb,EAAgBA,IAAIL,EAAEwB,IAAtB,IAA8BnB,CAA9B,EAAiC;EAC/B,UAAM4qB,IAAOznB,EAAW,IAAJnD,CAAPmD,CAAb;EAAA,UACM0nB,IAAO1nB,EAAW,IAAJnD,CAAI,GAAI,CAAfmD,CADb,CAEAg8E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKmC,IAALnC,CAAUorB,IAAOA,CAAPA,GAAcC,IAAOA,CAA/BrrB,CAAlB2/E;EAEF,YAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAniCFwxE,EAsiCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI97E,UAAJ,CAAe1D,EAAEwB,IAAjB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkBh8E,EAAOnD,CAAPmD,CAAlBg8E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,EAA6C,OAA7CA,CAAP;KA9iCFwxE,EAijCAA,WAAAA,QAAAA,GAAA,UAA0Bl7E,CAA1B;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,SAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB,KAAK,IAAI3/E,KAAKiC,GAALjC,EAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAT,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAzjCFwxE,EA4jCAA,WAAAA,SAAAA,GAAA,UAA2Bl7E,CAA3B;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,UAAzBA,EAaA,KANA,IACMo6E,IAAY7/E,KAAK4G,GAAL5G,CADF,qBACEA,IAAoB,CADtC,EAGM2/E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAHrB,EAIMgC,IAASxD,EAAE2F,QAAF3F,EAJf,EAMSK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EAGtC,UAAMs/E,IAAWn8E,EAAOnD,CAAPmD,KAAak8E,CAA9B;EAAA,UAIME,IAAWp8E,EAAOnD,CAAPmD,IAAYk8E,CAJ7B;EAAA,UAMMG,IAAOhgF,KAAKiC,GAALjC,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CANb;EAAA,UAOIW,UAPJ,CAUEA,IADEo/E,IACOC,CADPD,GAEOD,IACAn8E,EAAOnD,CAAPmD,CADAm8E,GAGA9/E,KAAK4G,GAAL5G,CAAS,IAAMggF,CAAfhgF,CAJTW,EAMFg/E,EAAan/E,CAAbm/E,IAAkBh/E,CANhBA;EAQJ,YAAOkJ,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA/lCFwxE,EAkmCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK0R,GAAL1R,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA1mCFwxE,EA6mCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK2R,GAAL3R,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KArnCFwxE,EAwnCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK4R,GAAL5R,CAAS2D,EAAOnD,CAAPmD,CAAT3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAhoCFwxE,EAmoCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK6R,IAAL7R,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA3oCFwxE,EA8oCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK8R,IAAL9R,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAtpCFwxE,EAypCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAK+R,IAAL/R,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAjqCFwxE,EAoqCAA,WAAAA,MAAAA,GAAA,UAAwBl8E,CAAxB,EAA8BsB,CAA9B;EAGE,WAFAgF,KAAKm2E,gBAALn2E,EAAuBtG,GAAGsB,EAA1BgF,EAA8B,OAA9BA,GAEOA,KAAKy3E,mBAALz3E,CACItG,CADJsG,EACOhF,CADPgF,EACUtG,EAAEuE,KADZ+B,EACmB,UAAC03E,CAAD,EAASC,CAAT;EAAoB,aAAAp9E,KAAKgO,KAALhO,CAAWm9E,CAAXn9E,EAAmBo9E,CAAnBp9E,CAAA;OADvCyF,CAAP;KAvqCF41E,EA4qCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKgS,IAALhS,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAprCFwxE,EAurCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKiS,IAALjS,CAAU2D,EAAOnD,CAAPmD,CAAV3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA/rCFwxE,EAksCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkBM,KAAUt8E,EAAOnD,CAAPmD,CAAVs8E,CAAlBN,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA1sCFwxE,EA6sCAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKkS,KAALlS,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KArtCFwxE,EAwtCAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKmS,KAALnS,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAhuCFwxE,EAmuCAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,OAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEm/E,EAAan/E,CAAbm/E,IAAkB3/E,KAAKoS,KAALpS,CAAW2D,EAAOnD,CAAPmD,CAAX3D,CAAlB2/E,CAEF,OAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KA3uCFwxE,EA8uCAA,WAAAA,IAAAA,GAAA,UAAsBl7E,CAAtB;EACEsF,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAUA,KARA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAEM+2C,IAAIkJ,KAFV,EAGM8/B,IAAK7/B,MAHX,EAIM8/B,IAAK7/B,MAJX,EAKM8/B,IAAK7/B,MALX,EAMM8/B,IAAK7/B,MANX,EAOM8/B,IAAK7/B,MAPX,EAQSjgD,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMic,IAAI9Y,EAAOnD,CAAPmD,CAAV;EAAA,UACMgb,IAAI,KAAO,IAAMu4B,IAAIz6B,CAAjB,CADV,CAEAkjE,EAAan/E,CAAbm/E,IAAkB,QACTW,IAAK3hE,CAAL2hE,GAASD,KAAM1hE,IAAKyhE,KAAMzhE,IAAIwhE,KAAMxhE,IAAIuhE,KAAMvhE,IAC/C3e,KAAKiC,GAALjC,EAAUyc,IAAIA,CAAdzc,CAFR2/E;EAIF,YAAO91E,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAhwCFwxE,EAmwCAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB,EAA6BhD,CAA7B;uBAA6BA,QAC3BsI,KAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAIA,KAFA,IAAMk6E,IAAe,IAAI/7E,YAAJ,CAAiBzD,EAAEwB,IAAnB,CAArB,EACMgC,IAASxD,EAAE2F,QAAF3F,EADf,EAESK,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,UAAMgJ,IAAQ7F,EAAOnD,CAAPmD,CAAd,CACIM,MAAMuF,CAANvF,IACF07E,EAAan/E,CAAbm/E,IAAkBjpD,GADhBzyB,GAGF07E,EAAan/E,CAAbm/E,IAAkBn2E,IAAQ,CAARA,GAAY,CAAZA,GAAgBrM,CAHhC8G;EAMN,YAAO4F,OAAOC,IAAPD,CAAY1J,EAAEuB,KAAdmI,IAAsBlG,QAAQg8E,GAA9B91E,CAAP;KAhxCFwxE,EAmxCAA,WAAAA,OAAAA,GAAA,UAAOl7E,CAAP,EAAoB4S,CAApB,EAAsCya,CAAtC;EACE/nB,SAAKm2E,gBAALn2E,EAAuBtF,GAAG4S,EAA1BtN,EAAmC,QAAnCA,EAcA,KAZA,IAAM8sB,IAAe/E,EAAS+E,YAA9B,EACMC,IAAchF,EAASgF,WAD7B,EAEMG,IAAiBnF,EAASmF,cAFhC,EAGMC,IAAgBpF,EAASoF,aAH/B,EAIMM,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAJjC,EAKMJ,IAASvF,EAASwF,OAATxF,CAAiByF,GALhC,EAMMpc,IAAIglE,OAAoBruD,EAASvG,QAA7B40D,EAAuC17E,EAAEuD,KAAzCm4E,CANV,EAQMnyB,IAAQvpD,EAAE2F,QAAF3F,EARd,EASMogF,IAAQxtE,EAAOjN,QAAPiN,EATd,EAUM42C,IAAQ9yC,EAAElT,MAVhB,EAYSlD,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EAGE,KAFA,IAAM+/E,IAAW//E,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAArB,EACMsgF,IAAWhgF,IAAIoW,EAAE/R,OAAF+R,CAAU,CAAVA,CADrB,EAES6pE,IAAK,CAAd,EAAiBA,IAAKlzD,EAAS6F,SAA/B,IAA4CqtD,CAA5C,EAGE,KAFA,IAAMC,IAAWF,IAAWC,IAAK7pE,EAAE/R,OAAF+R,CAAU,CAAVA,CAAjC,EACM+pE,IAAWF,IAAKlzD,EAASiF,YAAdiuD,GAA6BxtD,CAD9C,EAES2tD,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,EAAoCsuD,GAApC,EAA0C;EACxC,UAAMC,IAAKF,IAAWC,IAAKluD,CAA3B,CACA,MAAImuD,IAAK,CAALA,IAAUA,KAAMtzD,EAASqJ,SAA7B,EAKA,KAFA,IAAMkqD,IAAWF,IAAK9tE,EAAOjO,OAAPiO,CAAe,CAAfA,CAAtB,EACMiuE,IAAWR,IAAWM,IAAK3gF,EAAE2E,OAAF3E,CAAU,CAAVA,CADjC,EAES8gF,IAAK,CAAd,EAAiBA,IAAKzzD,EAAS8F,QAA/B,IAA2C2tD,CAA3C,EAGE,KAFA,IAAMC,IAAWP,IAAWM,IAAKzzD,EAASuJ,WAA1C,EACMoqD,IAAWF,IAAKzzD,EAASkF,WAAduuD,GAA4BluD,CAD7C,EAESquD,IAAK,CAAd,EAAiBA,IAAK5uD,CAAtB,EAAmC4uD,GAAnC,EAAyC;EACvC,YAAMC,IAAKF,IAAWC,IAAKxuD,CAA3B,CACA,MAAIyuD,IAAK,CAALA,IAAUA,KAAM7zD,EAASsJ,QAA7B,EAMA,KAHA,IAAMwqD,IAAWP,IAAWK,IAAKruE,EAAOjO,OAAPiO,CAAe,CAAfA,CAAjC,EACMwuE,IAAWP,IAAWK,IAAK7zD,EAASyJ,UAD1C,EAEIuqD,IAAWF,CAFf,EAGSG,IAAK,CAAd,EAAiBA,IAAKj0D,EAASyJ,UAA/B,IAA6CwqD,CAA7C,EAAiD;EAE/C,eADA,IAAMC,IAAOh4B,EAAM63B,IAAWE,CAAjB/3B,CAAb,EACSi4B,IAAK,CAAd,EAAiBA,IAAKn0D,EAASuJ,WAA/B,IAA8C4qD,CAA9C,EACEh4B,EAAMu3B,IAAWS,CAAjBh4B,KAAwB+3B,IAAOnB,EAAMiB,IAAWG,CAAjBpB,CAA/B52B,CAEF63B,KAAYh0D,EAASuJ,WAArByqD;;;EAOZ,YAAO3qE,EAAEwwC,QAAFxwC,EAAP;KAt0CFwkE,EAy0CAA,WAAAA,eAAAA,GAAA,UAAenjE,CAAf,EAA6BnF,CAA7B,EAA+Cya,CAA/C;EAEE/nB,SAAKm2E,gBAALn2E,EAAuByS,GAAInF,EAA3BtN,EAAoC,gBAApCA,EAyBA,KAvBA,IAAM8S,IAAKsjE,OAAoBruD,EAAS8E,OAA7BupD,EAAsC,SAAtCA,CAAX,EACM+F,IAAWrpE,EAAG5U,MADpB,EAEMib,aAFN,EAEOijE,QAFP,EAEaC,QAFb,EAEmBC,QAFnB,EAGMnC,IAAW1nE,EAAGpS,QAAHoS,EAHjB,EAIM8f,aAJN,EAIOgqD,QAJP,EAIaC,QAJb,EAImBC,QAJnB,EAKMC,IAAYpvE,EAAOjN,QAAPiN,EALlB,EAMMqlB,aANN,EAMOgqD,QANP,EAMcC,QANd,EAMqBC,QANrB,EAQEtwD,eARF,EASEO,kBATF,EAUEC,iBAVF,EAWEyE,gBAXF,EAYEJ,cAZF,EAaEC,aAbF,EAcEC,iBAdF,EAeE1D,eAfF,EAgBEC,cAhBF,EAiBEb,kBAjBF,EAkBEC,iBAlBF,EAoBM6vD,IAAShwD,IAAe,CAAfA,GAAmB/E,EAASwF,OAATxF,CAAiByF,GApBnD,EAqBMuvD,IAAUhwD,IAAc,CAAdA,GAAkBhF,EAASwF,OAATxF,CAAiB2F,IArBnD,EAuBS1yB,IAAI,CAAb,EAAgBA,IAAIuxB,CAApB,IAAiCvxB,CAAjC,EACE,KAAK,IAAIghF,IAAK,CAAd,EAAiBA,IAAKxqD,CAAtB,IAAoCwqD,CAApC,EACE,KAAK,IAAIX,IAAK,CAAd,EAAiBA,IAAKjqD,CAAtB,IAAkCiqD,CAAlC,EAME,KALA,IAAMF,IAAWE,IAAKyB,CAAtB,EACME,IAAQziF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,CAAU4gF,IAAWnuD,CAArBzyB,CAAZA,CADd,EAEM0iF,IACF1iF,KAAKE,GAALF,CAASqzB,CAATrzB,GAAqBuyB,IAAequD,KAAYnuD,CAAhDzyB,CAHJ,EAKSqhF,IAAK,CAAd,EAAiBA,IAAKvqD,CAAtB,IAAiCuqD,CAAjC,EAAqC;EAOnC,WANA,IAAMF,IAAWE,IAAKmB,CAAtB,EACMG,IAAQ3iF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,CAAUmhF,IAAWzuD,CAArB1yB,CAAZA,CADd,EAEM4iF,IACF5iF,KAAKE,GAALF,CAASszB,CAATtzB,GAAoBwyB,IAAc2uD,KAAYzuD,CAA9C1yB,CAHJ,EAKI6iF,IAAU,CALd,EAMSnC,IAAK+B,CAAd,EAAqB/B,IAAKgC,CAA1B,IAAmChC,CAAnC,EAGE,KAFA,IAAMG,IAAKH,IAAKjuD,CAALiuD,GAAoBE,CAA/B,EAESK,IAAK0B,CAAd,EAAqB1B,IAAK2B,CAA1B,IAAmC3B,CAAnC,EAME,KALA,IACM6B,IAAWd,IAAOvhF,CAAPuhF,GAAWC,IAAOvB,CAAlBsB,GAAuBE,IAAOjB,CAD/C,EAEM8B,IAAYX,KAAS7vD,IAAe,CAAfA,GAAmBsuD,CAA5BuB,IACdC,KAAS7vD,IAAc,CAAdA,IAHFyuD,IAAKvuD,CAALuuD,GAAmBE,CAGjB3uD,CAAT6vD,CADcD,GACmBE,IAAQb,CAH7C,EAKSE,IAAK,CAAd,EAAiBA,IAAK5qD,CAAtB,IAAqC4qD,CAArC,EAAyC;EAGvCkB,aAFcjD,EAASkD,IAAWnB,CAApB/B,IACCuC,EAAUY,IAAYpB,CAAtBQ,CACfU;EAINjB,SAASC,IAAOphF,CAAPohF,GAAWC,IAAOhB,CAAlBe,GAAuBE,IAAOV,CAA9BQ,GAAmCJ,CAA5CG,IAAkDiB,CAAlDjB;EAKR,YAAOrpE,EAAG8uC,QAAH9uC,EAAP;KAx4CF8iE,EA24CAA,WAAAA,gBAAAA,GAAA,UAAgBl7E,CAAhB,EAA6B+X,CAA7B,EAA2CsV,CAA3C;EACE/nB,SAAKm2E,gBAALn2E,EAAuBtF,GAAG+X,EAA1BzS,EAA+B,iBAA/BA,EAWA,KATA,IAAMgtB,IAAejF,EAASiF,YAA9B,EACMC,IAAclF,EAASkF,WAD7B,EAEMH,IAAe/E,EAAS+E,YAF9B,EAGMC,IAAchF,EAASgF,WAH7B,EAIMwwD,IAAKnH,OAAoBruD,EAASoJ,WAA7BilD,EAA0C,SAA1CA,CAJX,EAMM2G,IAAUh1D,EAASwF,OAATxF,CAAiB2F,IANjC,EAOMovD,IAAS/0D,EAASwF,OAATxF,CAAiByF,GAPhC,EASS4tD,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,IAAsCsuD,CAAtC,EAKE,KAJA,IAAMoC,IAAQjjF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,EAAWuiF,IAAS1B,KAAMpuD,CAA1BzyB,CAAZA,CAAd,EACM0iF,IAAQ1iF,KAAKE,GAALF,CACVwtB,EAAS6F,SADCrzB,GACWwtB,EAASqJ,QAATrJ,GAAoB+0D,CAApB/0D,GAA6BqzD,KAAMpuD,CAD9CzyB,CADd,EAISohF,IAAK,CAAd,EAAiBA,IAAK5uD,CAAtB,IAAqC4uD,CAArC,EAKE,KAJA,IAAM8B,IAAQljF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,EAAWwiF,IAAUpB,KAAM1uD,CAA3B1yB,CAAZA,CAAd,EACM4iF,IAAQ5iF,KAAKE,GAALF,CACVwtB,EAAS8F,QADCtzB,GACUwtB,EAASsJ,OAATtJ,GAAmBg1D,CAAnBh1D,GAA6B4zD,KAAM1uD,CAD7C1yB,CADd,EAISyhF,IAAK,CAAd,EAAiBA,IAAKj0D,EAASyJ,UAA/B,IAA6CwqD,CAA7C,EACE,KAAK,IAAIE,IAAK,CAAd,EAAiBA,IAAKn0D,EAASuJ,WAA/B,IAA8C4qD,CAA9C,EAAkD;EAGhD,WADA,IAAIkB,IAAU,CAAd,EACSpiF,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EACE,KAAK,IAAIigF,IAAKuC,CAAd,EAAqBvC,IAAKgC,CAA1B,IAAmChC,CAAnC,EAEE,KADA,IAAMI,IAAKD,IAAKH,IAAKjuD,CAAVouD,GAAyB0B,CAApC,EACStB,IAAKiC,CAAd,EAAqBjC,IAAK2B,CAA1B,IAAmC3B,CAAnC,EAAuC;EACrC,YAAMI,IAAKD,IAAKH,IAAKvuD,CAAV0uD,GAAwBoB,CAAnC,CACAK,KAAW1iF,EAAEic,GAAFjc,CAAMM,CAANN,EAAS2gF,CAAT3gF,EAAakhF,CAAblhF,EAAiBshF,CAAjBthF,IAAuB+X,EAAGkE,GAAHlE,CAAOzX,CAAPyX,EAAUwoE,CAAVxoE,EAAc+oE,CAAd/oE,EAAkBypE,CAAlBzpE,CAAlC2qE;EAING,SAAGzmE,GAAHymE,CAAOH,CAAPG,EAAgBnC,CAAhBmC,EAAoB5B,CAApB4B,EAAwBvB,CAAxBuB,EAA4BrB,CAA5BqB;EAKR,YAAOA,EAAG37B,QAAH27B,EAAP;KAn7CF3H,EAs7CAA,WAAAA,gBAAAA,GAAA,UAAgBl7E,CAAhB,EAA6B4S,CAA7B,EAA+Cya,CAA/C;EAEE/nB,SAAKm2E,gBAALn2E,EAAuBtF,GAAG4S,EAA1BtN,EAAmC,iBAAnCA,EAcA,KAZA,IAAM8sB,IAAe/E,EAAS+E,YAA9B,EACMC,IAAchF,EAASgF,WAD7B,EAEMG,IAAiBnF,EAASmF,cAFhC,EAGMC,IAAgBpF,EAASoF,aAH/B,EAIMM,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAJjC,EAKMJ,IAASvF,EAASwF,OAATxF,CAAiByF,GALhC,EAMMkwD,IAAQ31D,EAASuJ,WAATvJ,GAAuBA,EAASyJ,UAN9C,EAOMpgB,IAAIglE,OAAoBruD,EAASvG,QAA7B40D,EAAuC17E,EAAEuD,KAAzCm4E,CAPV,EAQMnyB,IAAQvpD,EAAE2F,QAAF3F,EARd,EASMogF,IAAQxtE,EAAOjN,QAAPiN,EATd,EAUM42C,IAAQ9yC,EAAElT,MAVhB,EAYSlD,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EAGE,KAFA,IAAM+/E,IAAW//E,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAArB,EACMsgF,IAAWhgF,IAAIoW,EAAE/R,OAAF+R,CAAU,CAAVA,CADrB,EAES6pE,IAAK,CAAd,EAAiBA,IAAKlzD,EAAS6F,SAA/B,IAA4CqtD,CAA5C,EAGE,KAFA,IAAMC,IAAWF,IAAWC,IAAK7pE,EAAE/R,OAAF+R,CAAU,CAAVA,CAAjC,EACM+pE,IAAWF,IAAKlzD,EAASiF,YAAdiuD,GAA6BxtD,CAD9C,EAES2tD,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,IAAsCsuD,CAAtC,EAA0C;EACxC,UAAMC,IAAKF,IAAWC,IAAKluD,CAA3B,CACA,MAAImuD,IAAK,CAALA,IAAUA,KAAMtzD,EAASqJ,SAA7B,EAKA,KAFA,IAAMkqD,IAAWF,IAAK9tE,EAAOjO,OAAPiO,CAAe,CAAfA,CAAtB,EACMiuE,IAAWR,IAAWM,IAAK3gF,EAAE2E,OAAF3E,CAAU,CAAVA,CADjC,EAES8gF,IAAK,CAAd,EAAiBA,IAAKzzD,EAAS8F,QAA/B,IAA2C2tD,CAA3C,EAGE,KAFA,IAAMC,IAAWP,IAAWM,IAAKpqE,EAAE/R,OAAF+R,CAAU,CAAVA,CAAjC,EACMsqE,IAAWF,IAAKzzD,EAASkF,WAAduuD,GAA4BluD,CAD7C,EAESquD,IAAK,CAAd,EAAiBA,IAAK5uD,CAAtB,IAAqC4uD,CAArC,EAAyC;EACvC,YAAMC,IAAKF,IAAWC,IAAKxuD,CAA3B,CACA,MAAIyuD,IAAK,CAALA,IAAUA,KAAM7zD,EAASsJ,QAA7B,EAOA,KAJA,IAAMwqD,IAAWP,IAAWK,IAAKruE,EAAOjO,OAAPiO,CAAe,CAAfA,CAAjC,EACMwuE,IAAWP,IAAWK,IAAK7zD,EAASyJ,UAD1C,EAEImsD,IAAWlC,CAFf,EAGIM,IAAWF,CAHf,EAISG,IAAK,CAAd,EAAiBA,IAAKj0D,EAASyJ,UAA/B,IAA6CwqD,CAA7C,EAAiD;EAE/C,eADA,IAAMC,IAAOh4B,EAAM63B,IAAWE,CAAjB/3B,CAAb,EACS0vB,IAAI,CAAb,EAAgBA,IAAI+J,CAApB,IAA6B/J,CAA7B,EACEzvB,EAAMy5B,IAAWhK,CAAjBzvB,KAAuB+3B,IAAOnB,EAAMiB,IAAWpI,CAAjBmH,CAA9B52B,CAEFy5B,KAAYD,CAAZC,EACA5B,KAAY2B,CADZC;;;EASZ,YAAOvsE,EAAEwwC,QAAFxwC,EAAP;KA7+CFwkE,EAg/CAA,WAAAA,wBAAAA,GAAA,UAAwBnjE,CAAxB,EAAsCnF,CAAtC,EAAwDya,CAAxD;EAEE/nB,SAAKm2E,gBAALn2E,EAAuByS,GAAInF,EAA3BtN,EAAoC,yBAApCA,EA0BA,KAxBA,IAAM8S,IAAKsjE,OAAoBruD,EAAS8E,OAA7BupD,EAAsC,SAAtCA,CAAX,EACM+F,IAAWrpE,EAAG5U,MADpB,EAEMib,aAFN,EAEOijE,QAFP,EAEaC,QAFb,EAEmBC,QAFnB,EAGMnC,IAAW1nE,EAAGpS,QAAHoS,EAHjB,EAIM8f,aAJN,EAIOgqD,QAJP,EAIaC,QAJb,EAImBC,QAJnB,EAKMC,IAAYpvE,EAAOjN,QAAPiN,EALlB,EAMMqlB,aANN,EAMOgqD,QANP,EAMcC,QANd,EAMqBC,QANrB,EAQEtwD,eARF,EASEO,kBATF,EAUEC,iBAVF,EAWEyE,gBAXF,EAYEJ,cAZF,EAaEC,aAbF,EAcEC,iBAdF,EAeE1D,eAfF,EAgBEC,cAhBF,EAiBEb,kBAjBF,EAkBEC,iBAlBF,EAoBM6vD,IAAShwD,IAAe,CAAfA,GAAmB/E,EAASwF,OAATxF,CAAiByF,GApBnD,EAqBMuvD,IAAUhwD,IAAc,CAAdA,GAAkBhF,EAASwF,OAATxF,CAAiB2F,IArBnD,EAsBMgwD,IAAQpsD,IAAcE,CAtB5B,EAwBSx2B,IAAI,CAAb,EAAgBA,IAAIuxB,CAApB,IAAiCvxB,CAAjC,EACE,KAAK,IAAIghF,IAAK,CAAd,EAAiBA,IAAKxqD,CAAtB,IAAoCwqD,CAApC,EACE,KAAK,IAAIX,IAAK,CAAd,EAAiBA,IAAKjqD,CAAtB,IAAkCiqD,CAAlC,EAME,KALA,IAAMF,IAAWE,IAAKyB,CAAtB,EACME,IAAQziF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,CAAU4gF,IAAWnuD,CAArBzyB,CAAZA,CADd,EAEM0iF,IACF1iF,KAAKE,GAALF,CAASqzB,CAATrzB,GAAqBuyB,IAAequD,KAAYnuD,CAAhDzyB,CAHJ,EAKSqhF,IAAK,CAAd,EAAiBA,IAAKvqD,CAAtB,IAAiCuqD,CAAjC,EAAqC;EAOnC,WANA,IAAMF,IAAWE,IAAKmB,CAAtB,EACMG,IAAQ3iF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,CAAUmhF,IAAWzuD,CAArB1yB,CAAZA,CADd,EAEM4iF,IACF5iF,KAAKE,GAALF,CAASszB,CAATtzB,GAAoBwyB,IAAc2uD,KAAYzuD,CAA9C1yB,CAHJ,EAKI6iF,IAAU,CALd,EAMSnC,IAAK+B,CAAd,EAAqB/B,IAAKgC,CAA1B,IAAmChC,CAAnC,EAGE,KAFA,IAAMG,IAAKH,IAAKjuD,CAALiuD,GAAoBE,CAA/B,EAESK,IAAK0B,CAAd,EAAqB1B,IAAK2B,CAA1B,IAAmC3B,CAAnC,EAME,KALA,IACM6B,IAAWd,IAAOvhF,CAAPuhF,GAAWC,IAAOvB,CAAlBsB,GAAuBE,IAAOjB,CAD/C,EAEM8B,IAAYX,KAAS7vD,IAAe,CAAfA,GAAmBsuD,CAA5BuB,IACdC,KAAS7vD,IAAc,CAAdA,IAHFyuD,IAAKvuD,CAALuuD,GAAmBE,CAGjB3uD,CAAT6vD,CADcD,GACmBE,IAAQb,CAH7C,EAKS4B,IAAK,CAAd,EAAiBA,IAAKF,CAAtB,IAA+BE,CAA/B,EAAmC;EAIjCR,aAFcjD,EAASkD,KADZrB,IAAK0B,CAAL1B,GAAa4B,CACDP,CAATlD,IACCuC,EAAUY,IAAYM,CAAtBlB,CACfU;EAINjB,SAASC,IAAOphF,CAAPohF,GAAWC,IAAOhB,CAAlBe,GAAuBE,IAAOV,CAA9BQ,GAAmCJ,CAA5CG,IAAkDiB,CAAlDjB;EAKR,YAAOrpE,EAAG8uC,QAAH9uC,EAAP;KAjjDF8iE,EAojDAA,WAAAA,yBAAAA,GAAA,UAAyBl7E,CAAzB,EAAsC+X,CAAtC,EAAoDsV,CAApD;EAEE/nB,SAAKm2E,gBAALn2E,EAAuBtF,GAAG+X,EAA1BzS,EAA+B,0BAA/BA,EAYA,KAVA,IAAMgtB,IAAejF,EAASiF,YAA9B,EACMC,IAAclF,EAASkF,WAD7B,EAEMH,IAAe/E,EAAS+E,YAF9B,EAGMC,IAAchF,EAASgF,WAH7B,EAIMwwD,IAAKnH,OAAoBruD,EAASoJ,WAA7BilD,EAA0C,SAA1CA,CAJX,EAMM2G,IAAUh1D,EAASwF,OAATxF,CAAiB2F,IANjC,EAOMovD,IAAS/0D,EAASwF,OAATxF,CAAiByF,GAPhC,EAQMkwD,IAAQ31D,EAASuJ,WAATvJ,GAAuBA,EAASyJ,UAR9C,EAUS4pD,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,IAAsCsuD,CAAtC,EAKE,KAJA,IAAMoC,IAAQjjF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,EAAWuiF,IAAS1B,KAAMpuD,CAA1BzyB,CAAZA,CAAd,EACM0iF,IAAQ1iF,KAAKE,GAALF,CACVwtB,EAAS6F,SADCrzB,GACWwtB,EAASqJ,QAATrJ,GAAoB+0D,CAApB/0D,GAA6BqzD,KAAMpuD,CAD9CzyB,CADd,EAISohF,IAAK,CAAd,EAAiBA,IAAK5uD,CAAtB,IAAqC4uD,CAArC,EAKE,KAJA,IAAM8B,IAAQljF,KAAKI,GAALJ,CAAS,CAATA,EAAYA,KAAKuQ,IAALvQ,EAAWwiF,IAAUpB,KAAM1uD,CAA3B1yB,CAAZA,CAAd,EACM4iF,IAAQ5iF,KAAKE,GAALF,CACVwtB,EAAS8F,QADCtzB,GACUwtB,EAASsJ,OAATtJ,GAAmBg1D,CAAnBh1D,GAA6B4zD,KAAM1uD,CAD7C1yB,CADd,EAIS2hF,IAAK,CAAd,EAAiBA,IAAKn0D,EAASuJ,WAA/B,IAA8C4qD,CAA9C,EAAkD;EAKhD,WAJA,IAAMF,IAAKzhF,KAAKsjF,KAALtjF,CAAW2hF,IAAKwB,CAAhBnjF,CAAX,EACMqjF,IAAK1B,IAAKwB,CADhB,EAGIN,IAAU,CAHd,EAISpiF,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EACE,KAAK,IAAIigF,IAAKuC,CAAd,EAAqBvC,IAAKgC,CAA1B,IAAmChC,CAAnC,EAEE,KADA,IAAMI,IAAKD,IAAKH,IAAKjuD,CAAVouD,GAAyB0B,CAApC,EACStB,IAAKiC,CAAd,EAAqBjC,IAAK2B,CAA1B,IAAmC3B,CAAnC,EAAuC;EACrC,YAAMI,IAAKD,IAAKH,IAAKvuD,CAAV0uD,GAAwBoB,CAAnC,CACAK,KAAW1iF,EAAEic,GAAFjc,CAAMM,CAANN,EAAS2gF,CAAT3gF,EAAakhF,CAAblhF,EAAiBshF,CAAjBthF,IAAuB+X,EAAGkE,GAAHlE,CAAOzX,CAAPyX,EAAUwoE,CAAVxoE,EAAc+oE,CAAd/oE,EAAkBypE,CAAlBzpE,CAAlC2qE;EAING,SAAGzmE,GAAHymE,CAAOH,CAAPG,EAAgBnC,CAAhBmC,EAAoB5B,CAApB4B,EAAwBvB,CAAxBuB,EAA4BK,CAA5BL;EAIN,YAAOA,EAAG37B,QAAH27B,EAAP;KA9lDF3H,EAimDAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB,EAA6B6L,CAA7B;EACEvG,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAGA,KADA,IAAMpC,IAAqB,IAAI/B,KAAJ,CAAUnB,EAAE0E,IAAZ,CAA3B,EACSrE,IAAI,CAAb,EAAgBA,IAAI6C,EAASxD,MAA7B,EAAqCW,GAArC,EACE6C,EAAS7C,CAAT6C,IAAclD,EAAEuB,KAAFvB,CAAQK,CAARL,IAAa6L,EAAKxL,CAALwL,CAA3B3I,CAEF,IAAM1C,IAASk7E,OAAWx4E,CAAXw4E,EAAqB17E,EAAEuD,KAAvBm4E,CAAf;EAAA,QACM0H,IAAOpjF,EAAE8K,MAAF9K,EADb,CAEA,KAASK,IAAI,CAAb,EAAgBA,IAAIG,EAAOgD,MAAPhD,CAAcd,MAAlC,IAA4CW,CAA5C,EAA+C;EAI7C,WAHA,IAAMw7E,IAASr7E,EAAOyrD,UAAPzrD,CAAkBH,CAAlBG,CAAf,EAEM6iF,IAAwB,IAAIliF,KAAJ,CAAUnB,EAAE0E,IAAZ,CAF9B,EAGS4+E,IAAI,CAAb,EAAgBA,IAAID,EAAY3jF,MAAhC,EAAwC4jF,GAAxC,EACED,EAAYC,CAAZD,IAAiBxH,EAAOyH,CAAPzH,IAAY77E,EAAEuB,KAAFvB,CAAQsjF,CAARtjF,CAA7BqjF,CAGF,IAAME,IAAgBH,EAAK55E,UAAL45E,CAAgBC,CAAhBD,CAAtB,CAEA5iF,EAAOgD,MAAPhD,CAAcH,CAAdG,IAAmB4iF,EAAK5/E,MAAL4/E,CAAYG,CAAZH,CAAnB5iF;EAEF,YAAOA,EAAO0mD,QAAP1mD,EAAP;KAtnDF06E,EAynDAA,WAAAA,IAAAA,GAAA,UACIl7E,CADJ,EACU8M,CADV,EAC6CC,CAD7C;EAEEzH,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,KAAzBA,EAEA,IAAMwhB,IAAWha,EAAS1F,GAAT0F,CACb,UAACiqC,CAAD,EAAI12C,CAAJ;EAAU,aAAA02C,EAAE,CAAFA,IAAuB/2C,EAAEuB,KAAFvB,CAAQK,CAARL,CAAvB+2C,GAAoCA,EAAE,CAAFA,CAApC;OADGjqC,CAAjB;EAAA,QAEMrI,IAAQqI,EAAS1F,GAAT0F,CAAa,UAAAiqC,CAAA;EAAK,aAAAA,EAAE,CAAFA,CAAA;OAAlBjqC,CAFd;EAAA,QAGMgvE,IAAU97E,EAAE8K,MAAF9K,EAHhB;EAAA,QAIM8K,IAAS4wE,OAAW50D,CAAX40D,EAAqB17E,EAAEuD,KAAvBm4E,CAJf,CAKsB,MAAlB3uE,CAAkB,IACpBjC,EAAOtH,MAAPsH,CAAcrD,IAAdqD,CAAmBiC,CAAnBjC,CADoB,CAItB,KAAK,IAAIzK,IAAI,CAAb,EAAgBA,IAAIL,EAAEwB,IAAtB,EAA4BnB,GAA5B,EAAiC;EAC/B,UAAMg4B,IAASyjD,EAAQ7vB,UAAR6vB,CAAmBz7E,CAAnBy7E,CAAf;EAAA,UACM0H,IAAYnrD,EAAOjxB,GAAPixB,CAAW,UAAC5I,CAAD,EAAIpvB,CAAJ;EAAU,eAAAovB,IAAIhrB,EAAMpE,CAANoE,CAAJ;SAArB4zB,CADlB,CAEAvtB,EAAOsR,GAAPtR,MAAAA,CAAAA,CAAAA,GAAW9K,EAAEic,GAAFjc,MAAAA,CAAAA,CAAAA,EAASq4B,CAATr4B,UAAqBwjF,EAAhC14E;EAEF,YAAOA,EAAOo8C,QAAPp8C,EAAP;KA3oDFowE,EA8oDAA,WAAAA,UAAAA,GAAA,UAA4Bl7E,CAA5B,EAAkC+O,CAAlC;EACEzJ,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,WAAzBA,EAGA,KADA,IAAMpC,IAAqB,IAAI/B,KAAJ,CAAUnB,EAAE0E,IAAZ,CAA3B,EACSrE,IAAI,CAAb,EAAgBA,IAAI6C,EAASxD,MAA7B,EAAqCW,GAArC,EACE6C,EAAS7C,CAAT6C,IAAclD,EAAEuB,KAAFvB,CAAQ+O,EAAK1O,CAAL0O,CAAR/O,CAAdkD,CAEF,IAAMM,IAASxD,EAAE2F,QAAF3F,EAAf;EAAA,QACMQ,IAASsK,OAAO5H,CAAP4H,EAAiB9K,EAAEuD,KAAnBuH,CADf;EAAA,QAGMs4E,IAAOpjF,EAAE8K,MAAF9K,EAHb,CAIA,KAASK,IAAI,CAAb,EAAgBA,IAAIL,EAAEwB,IAAtB,IAA8BnB,CAA9B,EAAiC;EAK/B,WAJA,IAAMomB,IAAM28D,EAAKn3B,UAALm3B,CAAgB/iF,CAAhB+iF,CAAZ,EAGMvH,IAAmB,IAAI16E,KAAJ,CAAUslB,EAAI/mB,MAAd,CAHzB,EAIS+jF,IAAI,CAAb,EAAgBA,IAAI5H,EAAOn8E,MAA3B,EAAmC+jF,GAAnC,EACE5H,EAAO4H,CAAP5H,IAAYp1D,EAAI1X,EAAK00E,CAAL10E,CAAJ0X,CAAZo1D,CAGF,IAAM6H,IAAWljF,EAAOgJ,UAAPhJ,CAAkBq7E,CAAlBr7E,CAAjB,CACAA,EAAOgD,MAAPhD,CAAckjF,CAAdljF,IAA0BgD,EAAOnD,CAAPmD,CAA1BhD;EAEF,YAAOA,EAAO0mD,QAAP1mD,EAAP;KArqDF06E,EAwqDAA,WAAAA,OAAAA,GAAA,UAAyBl7E,CAAzB,EAA+B+L,CAA/B,EAAkD3I,CAAlD;EACEkC,SAAKm2E,gBAALn2E,EAAuBtF,GAAG+L,EAA1BzG,EAAoC,QAApCA,EAEA,IAAMpC,IAAqBlD,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAA3B;EAAA,QACM2jF,IAAgB53E,EAAQpG,QAARoG,EADtB,CAEA7I,EAASE,CAATF,IAAiBygF,EAAcjkF,MAA/BwD,CAIA,KAHA,IAAM1C,IAASsK,OAAO5H,CAAP4H,EAAiB9K,EAAEuD,KAAnBuH,CAAf,EACMs4E,IAAOpjF,EAAE8K,MAAF9K,EADb,EAGSK,IAAI,CAAb,EAAgBA,IAAIG,EAAOgB,IAA3B,IAAmCnB,CAAnC,EAAsC;EACpC,UAAMw7E,IAASr7E,EAAOyrD,UAAPzrD,CAAkBH,CAAlBG,CAAf;EAAA,UAEM6iF,IAAwBxH,EAAO14E,KAAP04E,EAF9B,CAGAwH,EAAYjgF,CAAZigF,IAAoBM,EAAc9H,EAAOz4E,CAAPy4E,CAAd8H,CAApBN,CAEA,IAAME,IAAgBH,EAAK55E,UAAL45E,CAAgBC,CAAhBD,CAAtB,CACA5iF,EAAOgD,MAAPhD,CAAcH,CAAdG,IAAmB4iF,EAAK5/E,MAAL4/E,CAAYG,CAAZH,CAAnB5iF;EAEF,YAAOA,EAAO0mD,QAAP1mD,EAAP;KA1rDF06E,EA6rDAA,WAAAA,eAAAA,GAAA,UACIl7E,CADJ,EACU2U,CADV,EACgCC,CADhC;EAEEtP,SAAKm2E,gBAALn2E,EAAuBtF,EAAvBsF,EAA2B,gBAA3BA,EAEA,IAAMkI,IAAOmH,EAAWssC,MAAXtsC,CAAkB,UAAC3V,CAAD,EAAIsB,CAAJ;EAAU,aAAAtB,IAAIsB,CAAJ;OAA5BqU,CAAb;EAAA,QAEMgR,IAAW8sC,YAA2BzyD,EAAEuB,KAA7BkxD,EAAoC99C,CAApC89C,EAAgDjlD,CAAhDilD,CAFjB;EAAA,QAGM1sC,IACF2sC,YAA2B/sC,EAASjmB,MAApCgzD,EAA4C/9C,EAAWjV,MAAvDgzD,CAJJ;EAAA,QAKMxsC,IACFysC,oBAAmC3yD,EAAEuB,KAArCoxD,EAA4Ch+C,CAA5Cg+C,EAAwDnlD,CAAxDmlD,CANJ;EAAA,QAOMxsC,IACFysC,oBAAmCh+C,CAAnCg+C,EAA0Cj+C,EAAWjV,MAArDkzD,CARJ;EAAA,QASMvsC,IACFwsC,aAA4B3sC,CAA5B2sC,EAA8Cj+C,CAA9Ci+C,EAAqDl+C,EAAWjV,MAAhEmzD,CAVJ,CAYA,OAAO7yD,EAAEyK,OAAFzK,CAAU2lB,CAAV3lB,EACKgP,SADLhP,CACe+lB,CADf/lB,EAEKyK,OAFLzK,CAEakmB,CAFblmB,EAGKmD,KAHLnD,CAGWmmB,CAHXnmB,EAG6BqmB,CAH7BrmB,CAAP;KA7sDFk7E,EAmtDAA,WAAAA,eAAAA,GAAA,UACIl7E,CADJ,EACU2U,CADV,EACgC7H,CADhC;EAEExH,SAAKm2E,gBAALn2E,EAAuBtF,EAAvBsF,EAA2B,gBAA3BA,EAEA,IAAMkI,IAAOmH,EAAWssC,MAAXtsC,CAAkB,UAAC3V,CAAD,EAAIsB,CAAJ;EAAU,aAAAtB,IAAIsB,CAAJ;OAA5BqU,CAAb;EAAA,QAEMm+C,MAA8C,GAAG,GAFvD,CAGAA,EAAiBxxD,IAAjBwxD,MAAAA,CAAAA,CAAAA,EAAyBhmD,CAAzBgmD,EACA,KAAK,IAAIzyD,IAAI,IAAIsU,EAAWjV,MAA5B,EAAoCW,IAAIL,EAAEuB,KAAFvB,CAAQN,MAAhD,IAA0DW,CAA1D,EACEyyD,EAAiBxxD,IAAjBwxD,EAAuB,GAAG,EAA1BA,EAGF,IAAMC,IAAU/yD,EAAE+H,GAAF/H,CAAM8yD,CAAN9yD,CAAhB;EAAA,QAEMgzD,IACFP,YAA2BM,EAAQxxD,KAAnCkxD,EAA0C99C,CAA1C89C,EAAsDjlD,CAAtDilD,GAA4D,CAA5DA,CAHJ;EAAA,QAIMQ,IAAoCP,YACtCM,EAAoBtzD,MADkBgzD,EACV/9C,EAAWjV,MADDgzD,GACS,CADTA,CAJ1C;EAAA,QAMMQ,IAAeP,oBACjBI,EAAQxxD,KADSoxD,EACFh+C,CADEg+C,EACUnlD,CADVmlD,GACgB,CADhBA,CANrB,CASA,OAAOI,EAAQtoD,OAARsoD,CAAgBC,CAAhBD,EACK/jD,SADL+jD,CACeE,CADfF,EAEKtoD,OAFLsoD,CAEaG,CAFbH,CAAP;KAxuDFmoB,EA6uDQA,WAAAA,KAAAA,GAAR,UAAal7E,CAAb,EAA0BqtB,CAA1B,EAAgD4pB,CAAhD;EAEE3xC,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,MAAzBA,EAwBA,KAtBA,IAAMgtB,IAAejF,EAASiF,YAA9B,EACMC,IAAclF,EAASkF,WAD7B,EAEMC,IAAiBnF,EAASmF,cAFhC,EAGMC,IAAgBpF,EAASoF,aAH/B,EAIMC,IAAwBrF,EAASqF,qBAJvC,EAKMC,IAAuBtF,EAASsF,oBALtC,EAMMC,IAASvF,EAASwF,OAATxF,CAAiByF,GANhC,EAOMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IAPjC,EASMld,IACY,UAAbmhC,CAAa,GAAQv2C,OAAOu5E,iBAAf,GACQv5E,OAAOkjF,iBAXjC,EAaMC,IAAU7jF,EAAE2F,QAAF3F,EAbhB,EAcMoX,IAASskE,OAAoBruD,EAASvG,QAA7B40D,EAAuC17E,EAAEuD,KAAzCm4E,CAdf,EAeMoI,IAAa1sE,EAAO5T,MAf1B,EAiBMugF,IACF12D,EAASvG,QAATuG,CAAkB,CAAlBA,IAAuBA,EAASvG,QAATuG,CAAkB,CAAlBA,CAAvBA,GAA8CA,EAASvG,QAATuG,CAAkB,CAAlBA,CAlBlD,EAmBM22D,IAAmB32D,EAASvG,QAATuG,CAAkB,CAAlBA,IAAuBA,EAASvG,QAATuG,CAAkB,CAAlBA,CAnBhD,EAoBM42D,IAAmB52D,EAASvG,QAATuG,CAAkB,CAAlBA,CApBzB,EAsBS/sB,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EAGE,KAFA,IAAM4jF,IAAoB5jF,IAAIyjF,CAA9B,EACMI,IAAmB7jF,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAD7B,EAESid,IAAI,CAAb,EAAgBA,IAAIoQ,EAASyJ,UAA7B,IAA2C7Z,CAA3C,EACE,KAAK,IAAIsjE,IAAK,CAAd,EAAiBA,IAAKlzD,EAAS6F,SAA/B,IAA4CqtD,CAA5C,EAME,KALA,IAAME,IAAWF,IAAKjuD,CAALiuD,GAAoB3tD,CAArC,EACM0vD,IAAQziF,KAAKI,GAALJ,CAAS,CAATA,EAAY4gF,CAAZ5gF,CADd,EAEMukF,IACFvkF,KAAKE,GAALF,CAASwtB,EAASqJ,QAAlB72B,EAA4B6yB,IAAwB+tD,CAApD5gF,CAHJ,EAIMwkF,IAAkBH,IAAoB3D,IAAKyD,CAJjD,EAKSlD,IAAK,CAAd,EAAiBA,IAAKzzD,EAAS8F,QAA/B,IAA2C2tD,CAA3C,EAA+C;EAQ7C,WAPA,IAAME,IAAWF,IAAKvuD,CAALuuD,GAAmB/tD,CAApC,EACMyvD,IAAQ3iF,KAAKI,GAALJ,CAAS,CAATA,EAAYmhF,CAAZnhF,CADd,EAEMykF,IACFzkF,KAAKE,GAALF,CAASwtB,EAASsJ,OAAlB92B,EAA2B8yB,IAAuBquD,CAAlDnhF,CAHJ,EAII0kF,IAAczuE,CAJlB,EAKI0uE,IAAW,CALf,EAMIx7D,IAAQ,CANZ,EAOS23D,IAAK2B,CAAd,EAAqB3B,IAAKyD,CAA1B,EAAiCzD,KAAMnuD,CAAvC,EAAuD;EAErD,aADA,IAAMiyD,IAAWN,IAAmBxD,IAAK3gF,EAAE2E,OAAF3E,CAAU,CAAVA,CAAzC,EACSkhF,IAAKsB,CAAd,EAAqBtB,IAAKoD,CAA1B,EAAiCpD,KAAMzuD,CAAvC,EAAsD;EACpD,cACMiyD,IAAQb,EADGY,IAAWvD,IAAKlhF,EAAE2E,OAAF3E,CAAU,CAAVA,CAAhBykF,GACgBxnE,CAAnB4mE,CADd,CAEkB,UAAb5sC,CAAa,IAASytC,IAAQH,CAAjB,GAChBA,IAAcG,CADE,GAEM,UAAbztC,CAAa,KACtButC,KAAYE,CAAZF,EACAx7D,GAFsB,CAFN;EAOpB,aAAIllB,MAAMygF,CAANzgF,CAAJ,EACE;EAIJggF,SADqBO,IAAkBvD,IAAKmD,CAAvBI,GAA0CpnE,CAC/D6mE,IACiB,UAAb7sC,CAAa,GAAQutC,IAAWx7D,CAAnB,GAA2Bu7D,CAD5CT;EAMR,YAAO1sE,EAAO8vC,QAAP9vC,EAAP;KAhzDF8jE,EAmzDAA,WAAAA,QAAAA,GAAA,UAAQl7E,CAAR,EAAqBqtB,CAArB;EACE,WAAO/nB,KAAK8O,IAAL9O,CAAUtF,CAAVsF,EAAa+nB,CAAb/nB,EAAuB,KAAvBA,CAAP;KApzDF41E,EAuzDQA,WAAAA,iBAAAA,GAAR,UAAyBl7E,CAAzB,EAAsCqtB,CAAtC;EAWE,SAVA,IAAMs3D,IAAejJ,OAAoBruD,EAASvG,QAA7B40D,EAAuC,OAAvCA,CAArB,EACMppD,IAAejF,EAASiF,YAD9B,EAEMC,IAAclF,EAASkF,WAF7B,EAGMC,IAAiBnF,EAASmF,cAHhC,EAIMC,IAAgBpF,EAASoF,aAJ/B,EAKMC,IAAwBrF,EAASqF,qBALvC,EAMMC,IAAuBtF,EAASsF,oBANtC,EAOMC,IAASvF,EAASwF,OAATxF,CAAiByF,GAPhC,EAQMC,IAAU1F,EAASwF,OAATxF,CAAiB2F,IARjC,EAUS1yB,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EACE,KAAK,IAAI2c,IAAI,CAAb,EAAgBA,IAAIoQ,EAASyJ,UAA7B,IAA2C7Z,CAA3C,EACE,KAAK,IAAIsjE,IAAK,CAAd,EAAiBA,IAAKlzD,EAAS6F,SAA/B,IAA4CqtD,CAA5C,EAAgD;EAG9C,WAFA,IAAME,IAAWF,IAAKjuD,CAALiuD,GAAoB3tD,CAArC,EACI0vD,IAAQ7B,CACZ,EAAO6B,IAAQ,CAAf,GACEA,KAAS9vD,CAAT8vD,CAKF,KAFA,IAAM8B,IACFvkF,KAAKE,GAALF,CAASwtB,EAASqJ,QAAlB72B,EAA4B6yB,IAAwB+tD,CAApD5gF,CADJ,EAESihF,IAAK,CAAd,EAAiBA,IAAKzzD,EAAS8F,QAA/B,IAA2C2tD,CAA3C,EAA+C;EAG7C,aAFA,IAAME,IAAWF,IAAKvuD,CAALuuD,GAAmB/tD,CAApC,EACIyvD,IAAQxB,CACZ,EAAOwB,IAAQ,CAAf,GACEA,KAAS/vD,CAAT+vD,CAOF,KALA,IAAM8B,IACFzkF,KAAKE,GAALF,CAASwtB,EAASsJ,OAAlB92B,EAA2B8yB,IAAuBquD,CAAlDnhF,CADJ,EAEI+kF,IAAWlkF,OAAOu5E,iBAFtB,EAGI4K,KAAe,CAHnB,EAKSlE,IAAK2B,CAAd,EAAqB3B,IAAKyD,CAA1B,EAAiCzD,KAAMnuD,CAAvC,EAEE,KADA,IAAMkuD,IAAKC,IAAKF,CAAhB,EACSS,IAAKsB,CAAd,EAAqBtB,IAAKoD,CAA1B,EAAiCpD,KAAMzuD,CAAvC,EAAsD;EACpD,cAAMwuD,IAAKC,IAAKF,CAAhB;EAAA,cACM0D,IAAQ1kF,EAAEic,GAAFjc,CAAMM,CAANN,EAAS2gF,CAAT3gF,EAAakhF,CAAblhF,EAAiBid,CAAjBjd,CADd,CAEI0kF,IAAQE,CAARF,KACFE,IAAWF,CAAXE,EACAC,IAAcnE,IAAK/tD,CAAL+tD,GAA4BO,CAFxCyD;EAMRC,WAAavoE,GAAbuoE,CAAiBE,CAAjBF,EAA8BrkF,CAA9BqkF,EAAiCpE,CAAjCoE,EAAqC7D,CAArC6D,EAAyC1nE,CAAzC0nE;;EAKR,YAAOA,EAAaz9B,QAAby9B,EAAP;KAx2DFzJ,EA22DAA,WAAAA,gBAAAA,GAAA,UAAgBnjE,CAAhB,EAA8B/X,CAA9B,EAA2C0W,CAA3C,EAAwD2W,CAAxD;EAEE/nB,SAAKm2E,gBAALn2E,EAAuBtF,GAAG0W,EAA1BpR,EAA8B,iBAA9BA,EAaA,KAXA,IAAMq/E,IAAer/E,KAAK8zD,gBAAL9zD,CAAsBtF,CAAtBsF,EAAyB+nB,CAAzB/nB,CAArB,EACMgtB,IAAejF,EAASiF,YAD9B,EAEMC,IAAclF,EAASkF,WAF7B,EAGMC,IAAiBnF,EAASmF,cAHhC,EAIMC,IAAgBpF,EAASoF,aAJ/B,EAKMC,IAAwBrF,EAASqF,qBALvC,EAMMC,IAAuBtF,EAASsF,oBANtC,EAOMI,IAAUJ,IAAuB,CAAvBA,GAA2BtF,EAASwF,OAATxF,CAAiB2F,IAP5D,EAQMJ,IAASF,IAAwB,CAAxBA,GAA4BrF,EAASwF,OAATxF,CAAiByF,GAR5D,EASM1a,IAAKsjE,OAAoB17E,EAAEuB,KAAtBm6E,EAA6B,SAA7BA,CATX,EAWSp7E,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EACE,KAAK,IAAI2c,IAAI,CAAb,EAAgBA,IAAIoQ,EAASyJ,UAA7B,IAA2C7Z,CAA3C,EACE,KAAK,IAAI6nE,IAAM,CAAf,EAAkBA,IAAMz3D,EAASqJ,QAAjC,IAA6CouD,CAA7C,EACE,KAAK,IAAIC,IAAM,CAAf,EAAkBA,IAAM13D,EAASsJ,OAAjC,IAA4CouD,CAA5C,EAAiD;EAK/C,WAHA,IAAMC,IAAYF,IAAMlyD,CAAxB,EACMqyD,IAAYF,IAAMhyD,CADxB,EAEI2vD,IAAU,CAFd,EAGShC,IAAK,CAAd,EAAiBA,IAAKhuD,CAAtB,EAA6CguD,KAAMluD,CAAnD,EAAmE;EACjE,YAAM0yD,KAAOF,IAAYtE,KAAMpuD,CAA/B,CACA,MAAI4yD,IAAM,CAANA,IAAWA,KAAO73D,EAAS6F,SAA3BgyD,IACArlF,KAAKkC,KAALlC,CAAWqlF,CAAXrlF,MAAoBqlF,EADxB,EAIA,KAAK,IAAIjE,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,EAA4CsuD,KAAMxuD,CAAlD,EAAiE;EAC/D,cAAM0yD,KAAOF,IAAYhE,KAAM1uD,CAA/B,CACA,MAAI4yD,IAAM,CAANA,IAAWA,KAAO93D,EAAS8F,QAA3BgyD,IACAtlF,KAAKkC,KAALlC,CAAWslF,CAAXtlF,MAAoBslF,EADxB,EAAA;EAIA,gBAIMhhC,IAJSzxB,IAAwBC,CAAxBD,GACX,CADWA,GACPiyD,EAAa1oE,GAAb0oE,CAAiBrkF,CAAjBqkF,EAAoBO,CAApBP,EAAyBQ,CAAzBR,EAA8B1nE,CAA9B0nE,CADOjyD,KAEAguD,IAAK/tD,CAAL+tD,GAA4BO,CAF5BvuD,GAIkB,CAJlBA,GAIsB,CAJrC,CAKA,IAAa,MAATyxB,CAAJ,EAKAu+B,KADc3qE,EAAGkE,GAAHlE,CAAOzX,CAAPyX,EAAUmtE,CAAVntE,EAAeotE,CAAfptE,EAAoBkF,CAApBlF,IACKosC,CAAnBu+B;;;EAGJtqE,SAAGgE,GAAHhE,CAAOsqE,CAAPtqE,EAAgB9X,CAAhB8X,EAAmB0sE,CAAnB1sE,EAAwB2sE,CAAxB3sE,EAA6B6E,CAA7B7E;EAKR,YAAOA,EAAG8uC,QAAH9uC,EAAP;KAh6DF8iE,EAm6DAA,WAAAA,gBAAAA,GAAA,UAAgBnjE,CAAhB,EAA8B/X,CAA9B,EAA2CqtB,CAA3C;EACE/nB,SAAKm2E,gBAALn2E,EAAuByS,GAAI/X,EAA3BsF,EAA+B,iBAA/BA,EAgBA,KAdA,IAAMgtB,IAAejF,EAASiF,YAA9B,EACMC,IAAclF,EAASkF,WAD7B,EAEMH,IAAe/E,EAAS+E,YAF9B,EAGMC,IAAchF,EAASgF,WAH7B,EAIMG,IAAiBnF,EAASmF,cAJhC,EAKMC,IAAgBpF,EAASoF,aAL/B,EAMMC,IAAwBrF,EAASqF,qBANvC,EAOMC,IAAuBtF,EAASsF,oBAPtC,EAQMI,IAAUJ,IAAuB,CAAvBA,GAA2BtF,EAASwF,OAATxF,CAAiB2F,IAR5D,EASMJ,IAASF,IAAwB,CAAxBA,GAA4BrF,EAASwF,OAATxF,CAAiByF,GAT5D,EAUM1a,IAAKsjE,OAAoB17E,EAAEuB,KAAtBm6E,EAA6B,SAA7BA,CAVX,EAYMzoD,IAAgB,KAAKb,IAAeC,CAApB,CAZtB,EAcS/xB,IAAI,CAAb,EAAgBA,IAAI+sB,EAASwE,SAA7B,IAA0CvxB,CAA1C,EACE,KAAK,IAAI2c,IAAI,CAAb,EAAgBA,IAAIoQ,EAASyJ,UAA7B,IAA2C7Z,CAA3C,EACE,KAAK,IAAI6nE,IAAM,CAAf,EAAkBA,IAAMz3D,EAASqJ,QAAjC,IAA6CouD,CAA7C,EACE,KAAK,IAAIC,IAAM,CAAf,EAAkBA,IAAM13D,EAASsJ,OAAjC,IAA4CouD,CAA5C,EAAiD;EAK/C,WAHA,IAAMC,IAAYF,IAAMlyD,CAAxB,EACMqyD,IAAYF,IAAMhyD,CADxB,EAEI2vD,IAAU,CAFd,EAGShC,IAAK,CAAd,EAAiBA,IAAKhuD,CAAtB,EAA6CguD,KAAMluD,CAAnD,EAAmE;EACjE,YAAM0yD,KAAOF,IAAYtE,KAAMpuD,CAA/B,CACA,MAAI4yD,IAAM,CAANA,IAAWA,KAAO73D,EAAS6F,SAA3BgyD,IACArlF,KAAKkC,KAALlC,CAAWqlF,CAAXrlF,MAAoBqlF,EADxB,EAIA,KAAK,IAAIjE,IAAK,CAAd,EAAiBA,IAAKtuD,CAAtB,EAA4CsuD,KAAMxuD,CAAlD,EAAiE;EAC/D,cAAM0yD,KAAOF,IAAYhE,KAAM1uD,CAA/B,CACA,MAAI4yD,IAAM,CAANA,IAAWA,KAAO93D,EAAS8F,QAA3BgyD,IACAtlF,KAAKkC,KAALlC,CAAWslF,CAAXtlF,MAAoBslF,EADxB,EAMAzC,KADc3qE,EAAGkE,GAAHlE,CAAOzX,CAAPyX,EAAUmtE,CAAVntE,EAAeotE,CAAfptE,EAAoBkF,CAApBlF,CACd2qE;;EAGJtqE,SAAGgE,GAAHhE,CAAOsqE,IAAUzvD,CAAjB7a,EAAgC9X,CAAhC8X,EAAmC0sE,CAAnC1sE,EAAwC2sE,CAAxC3sE,EAA6C6E,CAA7C7E;EAKR,YAAOA,EAAG8uC,QAAH9uC,EAAP;KAl9DF8iE,EAq9DAA,WAAAA,KAAAA,GAAA,UAAuBl7E,CAAvB,EAA6BuD,CAA7B;EACE,WAAOk2D,WAAwBz5D,CAAxBy5D,EAA2Bl2D,CAA3Bk2D,EAAkCn0D,IAAlCm0D,CAAP;KAt9DFyhB,EAy9DAA,WAAAA,QAAAA,GAAA,UAAwBl7E,CAAxB,EAAmCuB,CAAnC;EACE,WAAOq4D,cAA2B55D,CAA3B45D,EAA8Br4D,CAA9Bq4D,CAAP;KA19DFshB,EA69DAA,WAAAA,QAAAA,GAAA,UAAQl7E,CAAR,EAAqBqtB,CAArB;EAGE,WAFA/nB,KAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,SAAzBA,GAEOA,KAAK8O,IAAL9O,CAAUtF,CAAVsF,EAAa+nB,CAAb/nB,EAAuB,KAAvBA,EAA8BolB,OAA9BplB,EAAP;KAh+DF41E,EAm+DAA,WAAAA,eAAAA,GAAA,UACIl7E,CADJ,EACiButB,CADjB,EACoCC,CADpC,EAEIhb,CAFJ;EAGElN,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,gBAAzBA,EAqBA,KAnBM,IAAAmZ,WAAA,EAAC0S,QAAD,EAAQ0nB,QAAR,EAAmBC,QAAnB,EAA6Bj6B,QAA7B,EACAglE,IAAU7jF,EAAE2F,QAAF3F,EADV,EAEAQ,IAAS,IAAIiD,YAAJ,CACXwF,eAAoBkoB,GAAO5D,GAAWC,GAAU3O,EAAhD5V,CADW,CAFT,EAKAm8E,KACH5yE,KAAgB+a,IAAY,CAA5B/a,GAAiCqmC,IAAY,CAA7CrmC,GAAiDqmC,GACjDrmC,KAAgBgb,IAAW,CAA3Bhb,GAAgCsmC,IAAW,CAA3CtmC,GAA+CsmC,EAP5C,EAUAusC,KACH7yE,KAAgB+a,IAAY,CAA5B/a,GAAiC+a,IAAY,CAA7C/a,GAAiD+a,GACjD/a,KAAgBgb,IAAW,CAA3Bhb,GAAgCgb,IAAW,CAA3Chb,GAA+Cgb,EAZ5C,EAcF83D,IAAY,CAdV,EAeAC,IACFH,EAAmB,CAAnBA,IAAwBC,EAAoB,CAApBA,CAhBtB,EAiBAG,IACFJ,EAAmB,CAAnBA,IAAwBC,EAAoB,CAApBA,CAlBtB,EAmBG/kF,IAAI,CAAb,EAAgBA,IAAI6wB,CAApB,EAA2B7wB,GAA3B,EACE,KAAK,IAAIC,IAAI,CAAb,EAAgBA,IAAIgtB,CAApB,EAA+BhtB,GAA/B,EAOE,KANA,IAAMklF,IAAgBF,IAAwBhlF,CAA9C,EACMmlF,IAAiB7lF,KAAKkC,KAALlC,CAAW4lF,CAAX5lF,CADvB,EAEM8lF,IAAUF,IAAgBC,CAFhC,EAGME,IAAgB/lF,KAAKE,GAALF,CAASg5C,IAAY,CAArBh5C,EAAwBA,KAAKuQ,IAALvQ,CAAU4lF,CAAV5lF,CAAxBA,CAHtB,EAIMgmF,IAAevlF,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAAJM,GAAmBolF,IAAiB1lF,EAAE2E,OAAF3E,CAAU,CAAVA,CAJzD,EAKM8lF,IAAexlF,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAAJM,GAAmBslF,IAAgB5lF,EAAE2E,OAAF3E,CAAU,CAAVA,CALxD,EAMSyvB,IAAI,CAAb,EAAgBA,IAAIjC,CAApB,EAA8BiC,GAA9B,EAUE,KATA,IAAMs2D,IAAgBP,IAAwB/1D,CAA9C,EACMu2D,IAAiBnmF,KAAKkC,KAALlC,CAAWkmF,CAAXlmF,CADvB,EAEMomF,IAAUF,IAAgBC,CAFhC,EAGME,IACFrmF,KAAKE,GAALF,CAASi5C,IAAW,CAApBj5C,EAAuBA,KAAKuQ,IAALvQ,CAAUkmF,CAAVlmF,CAAvBA,CAJJ,EAKMsmF,IAAgBN,IAAeG,IAAiBhmF,EAAE2E,OAAF3E,CAAU,CAAVA,CALtD,EAMMomF,IAAgBN,IAAeE,IAAiBhmF,EAAE2E,OAAF3E,CAAU,CAAVA,CANtD,EAOMqmF,IAAiBR,KAAgBK,IAAgBlmF,EAAE2E,OAAF3E,CAAU,CAAVA,CAPvD,EAQMsmF,IAAiBR,IAAeI,IAAgBlmF,EAAE2E,OAAF3E,CAAU,CAAVA,CARtD,EASSid,IAAI,CAAb,EAAgBA,IAAI4B,CAApB,EAAiC5B,GAAjC,EAAsC;EAIpC,UAAMspE,IAAU1C,EAAQsC,IAAgBlpE,CAAxB4mE,CAAhB;EAAA,UACM2C,IAAa3C,EAAQuC,IAAgBnpE,CAAxB4mE,CADnB;EAAA,UAKMlhB,IAAM4jB,KAHK1C,EAAQwC,IAAiBppE,CAAzB4mE,IAGiB0C,KAAWN,CAL7C;EAAA,UAOM7vE,IAAWusD,KADF6jB,KAHK3C,EAAQyC,IAAiBrpE,CAAzB4mE,IAGuB2C,KAAcP,CAA1CO,GACkB7jB,KAAOgjB,CAPxC,CASAnlF,EAAO8kF,GAAP9kF,IAAsB4V,CAAtB5V;EAKR,YAAOo7E,OAAWp7E,CAAXo7E,GAAoBzqD,GAAO5D,GAAWC,GAAU3O,EAAhD+8D,CAAP;KA/hEFV,EAkiEAA,WAAAA,uBAAAA,GAAA,UAAuBnjE,CAAvB,EAAqC/X,CAArC,EAAkDwS,CAAlD;EACElN,SAAKm2E,gBAALn2E,EAAuByS,GAAI/X,EAA3BsF,EAA+B,wBAA/BA,EA+BA,KA7BM,IAAAmZ,WAAA,EAAC0S,QAAD,EAAQgnB,QAAR,EAAiBC,QAAjB,EAAyBh7C,QAAzB,EACAy6B,WADA,EACGwgB,QADH,EACYC,QADZ,EAGAlhC,IAAS,IAAI3T,YAAJ,CAAiB0tB,IAAQgnB,CAARhnB,GAAkBinB,CAAlBjnB,GAA2B/zB,CAA5C,CAHT,EAUAm7C,KACH/lC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B2lC,IAAU,CAAzC3lC,GAA6C2lC,GAC7C3lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B4lC,IAAS,CAAvC5lC,GAA2C4lC,EAZxC,EAeAI,KACHhmC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B6lC,IAAU,CAAzC7lC,GAA6C6lC,GAC7C7lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B8lC,IAAS,CAAvC9lC,GAA2C8lC,EAjBxC,EAoBAvgB,IAAcwgB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CApBlC,EAqBArgB,IAAaogB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CArBjC,EA2BAinC,IAAW1nE,EAAGpS,QAAHoS,EA3BX,EA4BFlQ,IAAS,CA5BP,EA6BGvH,IAAI,CAAb,EAAgBA,IAAI6wB,CAApB,EAA2B7wB,GAA3B,EAEE,KADA,IAAMmmF,IAAUnmF,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAApB,EACSO,IAAI,CAAb,EAAgBA,IAAI83C,CAApB,EAA6B93C,GAA7B,EAUE,KATA,IAAMukF,IAAMvkF,IAAIw3B,CAAhB,EACM2uD,IAAc7mF,KAAKkC,KAALlC,CAAWilF,CAAXjlF,CADpB,EAEM8mF,IAAiB9mF,KAAKE,GAALF,CAASA,KAAKuQ,IAALvQ,CAAUilF,CAAVjlF,CAATA,EAAyBs4C,IAAU,CAAnCt4C,CAFvB,EAIM+mF,IAAeH,IAAUC,IAAc1mF,EAAE2E,OAAF3E,CAAU,CAAVA,CAJ7C,EAKM6mF,IAAkBJ,IAAUE,IAAiB3mF,EAAE2E,OAAF3E,CAAU,CAAVA,CALnD,EAOM8mF,IAAUhC,IAAM4B,CAPtB,EAQMK,IAAiB,IAAMD,CAR7B,EASSr3D,IAAI,CAAb,EAAgBA,IAAI6oB,CAApB,EAA4B7oB,GAA5B,EAmBE,KAlBA,IAAMs1D,IAAMt1D,IAAI0I,CAAhB,EACM6uD,IAAennF,KAAKkC,KAALlC,CAAWklF,CAAXllF,CADrB,EAEMonF,IAAgBpnF,KAAKE,GAALF,CAASA,KAAKuQ,IAALvQ,CAAUklF,CAAVllF,CAATA,EAAyBu4C,IAAS,CAAlCv4C,CAFtB,EAGMqnF,IAAUnC,IAAMiC,CAHtB,EAIMG,IAAiB,IAAMD,CAJ7B,EAMME,IAAkBR,IAAeI,IAAehnF,EAAE2E,OAAF3E,CAAU,CAAVA,CANtD,EAOMqnF,IAAmBT,IAAeK,IAAgBjnF,EAAE2E,OAAF3E,CAAU,CAAVA,CAPxD,EAQMsnF,IACFT,IAAkBG,IAAehnF,EAAE2E,OAAF3E,CAAU,CAAVA,CATrC,EAUMunF,IACFV,IAAkBI,IAAgBjnF,EAAE2E,OAAF3E,CAAU,CAAVA,CAXtC,EAaMwnF,IACFT,IAAiBI,CAdrB,EAeMM,IAA6BV,IAAiBG,CAfpD,EAgBMQ,IAA6BZ,IAAUK,CAhB7C,EAiBMQ,IAAsBb,IAAUI,CAjBtC,EAkBSjqE,IAAI,CAAb,EAAgBA,IAAI7f,CAApB,EAA2B6f,GAA3B,EAAgC;EAC9B,UAAM2qE,IAAQnI,EAAS53E,GAAT43E,CAAd,CACAroE,EAAOgwE,IAAkBnqE,CAAzB7F,KACIwwE,IAAQJ,CADZpwE,EAEAA,EAAOiwE,IAAmBpqE,CAA1B7F,KAAgCwwE,IAAQH,CAFxCrwE,EAGAA,EAAOkwE,IAAqBrqE,CAA5B7F,KACIwwE,IAAQF,CAJZtwE,EAKAA,EAAOmwE,IAAsBtqE,CAA7B7F,KAAmCwwE,IAAQD,CAL3CvwE;EAUR,YAAOywE,SAAazwE,CAAbywE,GAAsB12D,GAAOinB,GAAQD,GAAS/6C,EAA9CyqF,EAAsD7nF,EAAEuD,KAAxDskF,CAAP;KA7mEF3M,EAgnEAA,WAAAA,sBAAAA,GAAA,UACIl7E,CADJ,EACiButB,CADjB,EACoCC,CADpC,EAEIhb,CAFJ;EAGElN,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,uBAAzBA,EAsBA,KApBM,IAAAmZ,WAAA,EAAC0S,QAAD,EAAQ0nB,QAAR,EAAmBC,QAAnB,EAA6Bj6B,QAA7B,EACAglE,IAAU7jF,EAAE2F,QAAF3F,EADV,EAEAoX,IAAS,IAAI3T,YAAJ,CAAiB0tB,IAAQ5D,CAAR4D,GAAoB3D,CAApB2D,GAA+BtS,CAAhD,CAFT,EAIAumE,KACH5yE,KAAgB+a,IAAY,CAA5B/a,GAAiCqmC,IAAY,CAA7CrmC,GAAiDqmC,GACjDrmC,KAAgBgb,IAAW,CAA3Bhb,GAAgCsmC,IAAW,CAA3CtmC,GAA+CsmC,EAN5C,EASAusC,KACH7yE,KAAgB+a,IAAY,CAA5B/a,GAAiC+a,IAAY,CAA7C/a,GAAiD+a,GACjD/a,KAAgBgb,IAAW,CAA3Bhb,GAAgCgb,IAAW,CAA3Chb,GAA+Cgb,EAX5C,EAcA+3D,IACFH,EAAmB,CAAnBA,IAAwBC,EAAoB,CAApBA,CAftB,EAgBAG,IACFJ,EAAmB,CAAnBA,IAAwBC,EAAoB,CAApBA,CAjBtB,EAmBFyC,IAAe,CAnBb,EAoBGxnF,IAAI,CAAb,EAAgBA,IAAI6wB,CAApB,EAA2B7wB,GAA3B,EAEE,KADA,IAAMohC,IAAcphC,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAAxB,EACSO,IAAI,CAAb,EAAgBA,IAAIgtB,CAApB,EAA+BhtB,GAA/B,EAOE,KANA,IAAMklF,IAAgBF,IAAwBhlF,CAA9C,EAKMwnF,IAAYrmD,IAJO7hC,KAAKE,GAALF,CACrBg5C,IAAY,CADSh5C,EAErB2S,IAAe3S,KAAKuE,KAALvE,CAAW4lF,CAAX5lF,CAAf2S,GACe3S,KAAKkC,KAALlC,CAAW4lF,CAAX5lF,CAHMA,IAI0BG,EAAE2E,OAAF3E,CAAU,CAAVA,CALnD,EAMSyvB,IAAI,CAAb,EAAgBA,IAAIjC,CAApB,EAA8BiC,GAA9B,EAOE,KANA,IAAMs2D,IAAgBP,IAAwB/1D,CAA9C,EAKMu4D,IAAYD,IAJOloF,KAAKE,GAALF,CACrBi5C,IAAW,CADUj5C,EAErB2S,IAAe3S,KAAKuE,KAALvE,CAAWkmF,CAAXlmF,CAAf2S,GACe3S,KAAKkC,KAALlC,CAAWkmF,CAAXlmF,CAHMA,IAIwBG,EAAE2E,OAAF3E,CAAU,CAAVA,CALjD,EAMSid,IAAI,CAAb,EAAgBA,IAAI4B,CAApB,EAAiC5B,GAAjC,EAAsC;EAGpC,UAAMgrE,IAASpE,EAAQmE,IAAY/qE,CAApB4mE,CAAf,CACAzsE,EAAO0wE,GAAP1wE,IAAyB6wE,CAAzB7wE;EAKR,YAAOwkE,OACHxkE,CADGwkE,GACMzqD,GAAO5D,GAAWC,GAAU3O,EADlC+8D,EACgD57E,EAAEuD,KADlDq4E,CAAP;KAlqEFV,EAsqEAA,WAAAA,8BAAAA,GAAA,UACInjE,CADJ,EACkB/X,CADlB,EAC+BwS,CAD/B;EAEElN,SAAKm2E,gBAALn2E,EAAuByS,GAAI/X,EAA3BsF,EAA+B,+BAA/BA,EAiCA,KA/BM,IAAAmZ,WAAA,EAAC0S,QAAD,EAAQgnB,QAAR,EAAiBC,QAAjB,EAAyBh7C,QAAzB,EACAy6B,WADA,EACGwgB,QADH,EACYC,QADZ,EAGAlhC,IAAS,IAAI3T,YAAJ,CAAiB0tB,IAAQgnB,CAARhnB,GAAkBinB,CAAlBjnB,GAA2B/zB,CAA5C,CAHT,EAIAqiF,IAAW1nE,EAAGpS,QAAHoS,EAJX,EASAwgC,KACH/lC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B2lC,IAAU,CAAzC3lC,GAA6C2lC,GAC7C3lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B4lC,IAAS,CAAvC5lC,GAA2C4lC,EAXxC,EAcAI,KACHhmC,KAAgB6lC,IAAU,CAA1B7lC,GAA+B6lC,IAAU,CAAzC7lC,GAA6C6lC,GAC7C7lC,KAAgB8lC,IAAS,CAAzB9lC,GAA8B8lC,IAAS,CAAvC9lC,GAA2C8lC,EAhBxC,EAmBAvgB,IAAcwgB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CAnBlC,EAoBArgB,IAAaogB,EAAe,CAAfA,IAAoBC,EAAe,CAAfA,CApBjC,EAsBAC,IAAiB,IAAI1gB,CAtBrB,EAuBA2gB,IAAgB,IAAIvgB,CAvBpB,EA2BAwgB,IAAyC,IAA5B94C,KAAKuQ,IAALvQ,CAAU44C,CAAV54C,CAA4B,GAAK,CA3B9C,EA4BA+4C,IAAuC,IAA3B/4C,KAAKuQ,IAALvQ,CAAU64C,CAAV74C,CAA2B,GAAK,CA5B5C,EA+BGS,IAAI,CAAb,EAAgBA,IAAI6wB,CAApB,EAA2B7wB,GAA3B,EAEE,KADA,IAAMohC,IAAcphC,IAAIN,EAAE2E,OAAF3E,CAAU,CAAVA,CAAxB,EACSO,IAAI,CAAb,EAAgBA,IAAI43C,CAApB,EAA6B53C,GAA7B,EAME,KALA,IAAMwnF,IAAYrmD,IAAcnhC,IAAIP,EAAE2E,OAAF3E,CAAU,CAAVA,CAApC,EAGMkoF,IAAaroF,KAAKkC,KAALlC,CAAWU,IAAIk4C,CAAf54C,CAHnB,EAIMsoF,IAAWtoF,KAAKkC,KAALlC,CAAWqoF,IAAcvvC,IAAY,CAArC94C,CAJjB,EAKS4vB,IAAI,CAAb,EAAgBA,IAAI2oB,CAApB,EAA4B3oB,GAA5B,EAOE,KANA,IAAMu4D,IAAYD,IAAYt4D,IAAIzvB,EAAE2E,OAAF3E,CAAU,CAAVA,CAAlC,EAGMooF,IAAavoF,KAAKkC,KAALlC,CAAW4vB,IAAIipB,CAAf74C,CAHnB,EAIMwoF,IAAWxoF,KAAKkC,KAALlC,CAAWuoF,IAAcxvC,IAAW,CAApC/4C,CAJjB,EAMSod,IAAI,CAAb,EAAgBA,IAAI7f,CAApB,EAA2B6f,GAA3B,EAAgC;EAI9B,WAHA,IAAIqrE,IAAQ,CAAZ,EAGSC,IAAW,CAApB,EAAuBA,IAAW5vC,CAAlC,EAA6C4vC,GAA7C,EAAyD;EACvD,YAAMrD,IAAMqD,IAAWJ,CAAvB,CAEA,MAAIjD,IAAM,CAANA,IAAWA,KAAO7sC,EAAtB,EAAA;EAIA,cAAMmwC,IAAY9mD,IAAcwjD,IAAMntE,EAAGpT,OAAHoT,CAAW,CAAXA,CAAtC;EAAA,cACM0tE,IAAgBP,IAAMntD,CAD5B,CAMA,IAAIx3B,MAJqBV,KAAKE,GAALF,CACrBs4C,IAAU,CADWt4C,EAErB2S,IAAe3S,KAAKuE,KAALvE,CAAW4lF,CAAX5lF,CAAf2S,GACe3S,KAAKkC,KAALlC,CAAW4lF,CAAX5lF,CAHMA,CAIzB,EAGA,KAAK,IAAI4oF,IAAW,CAApB,EAAuBA,IAAW7vC,CAAlC,EAA4C6vC,GAA5C,EAAwD;EACtD,gBAAMtD,IAAMsD,IAAWJ,CAAvB,CAEA,MAAIlD,IAAM,CAANA,IAAWA,KAAO7sC,EAAtB,EAAA;EAIA,kBAAMowC,IAAYF,IAAYrD,IAAMptE,EAAGpT,OAAHoT,CAAW,CAAXA,CAApC;EAAA,kBACMguE,IAAgBZ,IAAMhtD,CAD5B,CAOI1I,MALqB5vB,KAAKE,GAALF,CACrBu4C,IAAS,CADYv4C,EAErB2S,IAAe3S,KAAKuE,KAALvE,CAAWkmF,CAAXlmF,CAAf2S,GACe3S,KAAKkC,KAALlC,CAAWkmF,CAAXlmF,CAHMA,CAKrB4vB,KACF64D,KAAS7I,EAASiJ,IAAYzrE,CAArBwiE,CADPhwD;;;;EAKRrY,SAAO4wE,IAAY/qE,CAAnB7F,IAAwBkxE,CAAxBlxE;EAKR,YAAOywE,SAAazwE,CAAbywE,EAAqB7nF,EAAEuB,KAAvBsmF,EAA8B7nF,EAAEuD,KAAhCskF,CAAP;KApwEF3M,EAuwEAA,WAAAA,mBAAAA,GAAA,UACIl7E,CADJ,EACiBgN,CADjB,EAC0CC,CAD1C,EAEIC,CAFJ,EAE6BC,CAF7B,EAGItF,CAHJ;EAIEvC,SAAKm2E,gBAALn2E,EACKtF,GAAGgN,GAAMC,GAAUE,GAAOtF,EAD/BvC,EACwC,oBADxCA,EAmBA,KAhBA,IAAMikD,IAAQvpD,EAAE2F,QAAF3F,EAAd,EACM2oF,IAAQ37E,EAAKrH,QAALqH,EADd,EAEM47E,IAAU37E,EAAStH,QAATsH,EAFhB,EAGM47E,IAAQ17E,IAAQA,EAAMxH,QAANwH,EAARA,GAA2B,IAAI1J,YAAJ,EAAkB,EAAlB,CAHzC,EAIMqlF,IAAUjhF,IAASA,EAAOlC,QAAPkC,EAATA,GAA6B,IAAIpE,YAAJ,EAAkB,EAAlB,CAJ7C,EAKMslF,IAAU,IAAItlF,YAAJ,CAAiB8lD,EAAM7pD,MAAvB,CALhB,EAOMspF,IAAgBF,EAAQppF,MAP9B,EAQMupF,IAAcJ,EAAMnpF,MAR1B,EASMwpF,IAAgBN,EAAQlpF,MAT9B,EAUMypF,IAAcR,EAAMjpF,MAV1B,EAYI0pF,IAAO,CAZX,EAaIC,IAAK,CAbT,EAcIC,IAAK,CAdT,EAeIC,IAAK,CAfT,EAgBSlpF,IAAI,CAAb,EAAgBA,IAAIkpD,EAAM7pD,MAA1B,IAAoCW,CAApC,EACE0oF,EAAQ1oF,CAAR0oF,IAAaD,EAAQM,GAARN,KACRv/B,EAAMlpD,CAANkpD,IAAWo/B,EAAMU,GAANV,KAAeE,EAAMS,GAANT,IACvBhpF,KAAKmC,IAALnC,CAAU+oF,EAAQW,GAARX,IAAgB17E,CAA1BrN,CAFRkpF,EAGIK,KAAQJ,CAARI,KACFA,IAAO,CADLA,CAHJL,EAMIM,KAAMF,CAANE,KACFA,IAAK,CADHA,CANJN,EASIO,KAAML,CAANK,KACFA,IAAK,CADHA,CATJP,EAYIQ,KAAML,CAANK,KACFA,IAAK,CADHA,CAZJR,CAgBF,OAAOS,SAAST,CAATS,EAAkBxpF,EAAEuB,KAApBioF,CAAP;KA/yEFtO,EAkzEAA,WAAAA,6BAAAA,GAAA,UACIl7E,CADJ,EACiB80C,CADjB,EACsCjhC,CADtC,EACoD7W,CADpD,EAEI8W,CAFJ;EAGExO,SAAKm2E,gBAALn2E,CAAsBtF,CAAtBsF,EAAyB,8BAAzBA,EAEA,IAAM6wC,IAAWn2C,EAAEuB,KAAFvB,CAAQ,CAARA,CAAjB;EAAA,QACM40C,IAAOuB,IAAW,CADxB;EAAA,QAEM0tC,IAAU7jF,EAAE2F,QAAF3F,EAFhB;EAAA,QAGMwB,IAAOyH,cAAmBjJ,EAAEuB,KAArB0H,CAHb;EAAA,QAIMzI,IAAS,IAAIiD,YAAJ,CAAiBjC,CAAjB,CAJf,CAMA,UAAA,CAA2BqG,CAA3B;EAQE,WAPA,IAAM4hF,IAAiB5hF,IAASsuC,CAAhC,EACIuzC,IACA7hF,IAAS4hF,CAAT5hF,GAA0BhI,KAAKI,GAALJ,CAAS,CAATA,EAAY4pF,IAAiB30C,CAA7Bj1C,CAF9B,EAGM8pF,IAAe9hF,IAAS4hF,CAAT5hF,GACjBhI,KAAKE,GAALF,CAAS4pF,IAAiB30C,CAA1Bj1C,EAAuC+0C,CAAvC/0C,CAJJ,EAMIO,IAAM,CACV,EAAOspF,KAAkBC,CAAzB,EAAuCD,GAAvC,EAAyD;EACvD,YAAMzmC,IAAI4gC,EAAQ6F,CAAR7F,CAAV,CACAzjF,KAAO6iD,IAAIA,CAAX7iD;EAEF,cAAOA,CAAP;EAGF,UAAK,IAAIyH,IAAS,CAAlB,EAAqBA,IAASrG,CAA9B,EAAoCqG,GAApC,EAA8C;EAC5C,UAAMzH,IAAMwpF,EAAkB/hF,CAAlB+hF,CAAZ;EAAA,UACM1pF,IAAM2jF,EAAQh8E,CAARg8E,IAAkBhkF,KAAKmO,GAALnO,CAASgU,IAAO7W,IAAQoD,CAAxBP,GAA8BiU,CAA9BjU,CAD9B,CAEAW,EAAOqH,CAAPrH,IAAiBN,CAAjBM;EAGF,YAAOqnF,SAAarnF,CAAbqnF,EAAqB7nF,EAAEuB,KAAvBsmF,CAAP;KAl1EF3M,EAq1EAA,WAAAA,QAAAA,GAAA,UACInjE,CADJ,EACkB2V,CADlB,EACwCC,CADxC,EAEImnB,CAFJ,EAEyBjhC,CAFzB,EAEuC7W,CAFvC,EAGI8W,CAHJ;EAIExO,SAAKm2E,gBAALn2E,CAAsByS,CAAtBzS,EAA0B,SAA1BA,EAQA,KAPA,IAAM6wC,IAAWp+B,EAAGxW,KAAHwW,CAAS,CAATA,CAAjB,EACM0nE,IAAW1nE,EAAGpS,QAAHoS,EADjB,EAEM8xE,IAAmBn8D,EAAW/nB,QAAX+nB,EAFzB,EAGMo8D,IAAoBn8D,EAAYhoB,QAAZgoB,EAH1B,EAIMntB,IAAS,IAAIiD,YAAJ,CAAiBwF,cAAmB8O,EAAGxW,KAAtB0H,CAAjB,CAJf,EAKMzH,IAAOyH,cAAmB8O,EAAGxW,KAAtB0H,CALb,EAOSpB,IAAS,CAAlB,EAAqBA,IAASrG,CAA9B,EAAoCqG,GAApC,EAA8C;EAQ5C,WAPA,IAAM4hF,IAAiB5hF,IAASsuC,CAAhC,EACM4zC,IACDliF,IAAS4hF,CAAT5hF,GAA2BhI,KAAKI,GAALJ,CAAS,CAATA,EAAY4pF,IAAiB30C,CAA7Bj1C,CAFhC,EAGMmqF,IAAYniF,IAAS4hF,CAAT5hF,GACdhI,KAAKE,GAALF,CAASs2C,CAATt2C,EAAmB4pF,IAAiB30C,CAAjB20C,GAA+B,CAAlD5pF,CAJJ,EAMI0M,IAAO,CANX,EAOSwI,IAAIg1E,CAAb,EAAyBh1E,IAAIi1E,CAA7B,EAAuCj1E,GAAvC,EACExI,KAAQ1M,KAAKmO,GAALnO,CAASgqF,EAAiB90E,CAAjB80E,CAAThqF,EAA8B,CAA9BA,CAAR0M,CAEFA,IAAOvP,IAAQuP,CAARvP,GAAe6W,CAAtBtH,CAEA,KAASwI,IAAIg1E,CAAb,EAAyBh1E,IAAIi1E,CAA7B,EAAuCj1E,GAAvC,EAA4C;EAC1C,YAAIk1E,KAAO,IAAIjtF,IAAQ8W,IAAO+1E,EAAiB90E,CAAjB80E,IAC1BC,EAAkBjiF,CAAlBiiF,IAA4Bv9E,CADhC,CAEI1E,MAAWkN,CAAXlN,KACFoiF,KAAOpqF,KAAKmO,GAALnO,CAAS0M,CAAT1M,GAAgBiU,CAAhBjU,CADLgI,GAGJoiF,KAAOxK,EAAS53E,CAAT43E,CAHH53E,EAIJrH,EAAOuU,CAAPvU,KAAaypF,CAJTpiF;;EAOR,YAAOggF,SAAarnF,CAAbqnF,EAAqB9vE,EAAGxW,KAAxBsmF,CAAP;KAx3EF3M,EA23EAA,WAAAA,YAAAA,GAAA,UACI3wD,CADJ,EACsBqD,CADtB,EAC2CC,CAD3C,EAEIC,CAFJ;EAGExoB,SAAKm2E,gBAALn2E,CAAsBilB,CAAtBjlB,EAA8B,aAA9BA,EASA,KAPA,IAAM4kF,IAAgBt8D,IAAarD,CAAbqD,GAAsBu8D,QAAY5/D,CAAZ4/D,CAA5C,EACMt4D,IAAYq4D,EAAc3oF,KAAd2oF,CAAoB,CAApBA,CADlB,EAEME,IAAYF,EAAc3oF,KAAd2oF,CAAoB,CAApBA,CAFlB,EAGM3vE,IAAMgkE,OAAoB1sD,GAAWhE,EAA/B0wD,EAA4C,OAA5CA,CAHZ,EAIMe,IAAU/kE,EAAI5U,QAAJ4U,EAJhB,EAKM8vE,IAAWH,EAAcvkF,QAAdukF,EALjB,EAOS5pF,IAAI,CAAb,EAAgBA,IAAIuxB,CAApB,IAAiCvxB,CAAjC,EAAoC;EAClC,UAAMuH,IAASvH,IAAI8pF,CAAnB;EAAA,UAGME,IAAM,IAAI7mF,YAAJ,CAAiB2mF,IAAY,CAA7B,CAHZ,CAIAE,EAAI,CAAJA,IAASD,EAASxiF,CAATwiF,CAATC,CACA,KAAK,IAAIC,IAAQ,CAAjB,EAAoBA,IAAQD,EAAI5qF,MAAhC,IAA0C6qF,CAA1C,EACED,EAAIC,CAAJD,IAAaA,EAAIC,IAAQ,CAAZD,IAAiBD,EAASxiF,IAAS0iF,CAAlBF,CAA9BC,CAKF,KAFA,IAAMxqF,IAASymD,aAAgBz4B,EAAKvnB,QAALunB,EAAhBy4B,CAAf,EACMh1B,IAAYjxB,IAAIutB,CADtB,EAES28D,IAAW,CAApB,EAAuBA,IAAW38D,CAAlC,IAAgD28D,CAAhD,EAA0D;EACxD,YAAMjqF,IAAIT,GAAV,CAGAw/E,EAAQ/tD,IAAYi5D,CAApBlL,IAAgCgL,EAAI5qF,MAApC4/E,CAEA,KAAK,IAAImL,IAAQ,CAAjB,EAAoBA,IAAQH,EAAI5qF,MAAhC,EAAwC+qF,GAAxC,EACE,IAAIlqF,IAAI+pF,EAAIG,CAAJH,CAAR,EAAoB;EAClBhL,YAAQ/tD,IAAYi5D,CAApBlL,IAAgCmL,CAAhCnL,CACA;;;EAKR,YAAO/kE,CAAP;KAj6EF2gE,EAo6EAA,WAAAA,OAAAA,GAAA,UAAOnvE,CAAP,EAA0B3O,CAA1B,EAAyC2wB,CAAzC,EAA0DC,CAA1D;EAEE1oB,SAAKm2E,gBAALn2E,CAAsByG,CAAtBzG,EAA+B,QAA/BA,EAEA,IAAMiV,IAAM,IAAI9W,YAAJ,CAAiBsI,EAAQvK,IAARuK,GAAe3O,CAAhC,CAAZ,CACAmd,EAAI9S,IAAJ8S,CAASyT,CAATzT,EAEA,KAAK,IAAImwE,IAAQ,CAAjB,EAAoBA,IAAQ3+E,EAAQvK,IAApC,IAA4CkpF,CAA5C,EACM3+E,EAAQkQ,GAARlQ,CAAY2+E,CAAZ3+E,KAAsB,CAAtBA,IAA2BA,EAAQkQ,GAARlQ,CAAY2+E,CAAZ3+E,IAAqB3O,CAAhD2O,KACFwO,EAAImwE,IAAQttF,CAARstF,GAAgB3+E,EAAQkQ,GAARlQ,CAAY2+E,CAAZ3+E,CAApBwO,IAA0CwT,CADxChiB,EAIN,OAAO4+E,SAAapwE,CAAbowE,GAAmB5+E,EAAQvK,MAAMpE,EAAjCutF,EAAyC,OAAzCA,CAAP;KAh7EFzP,EAm7EAA,WAAAA,kBAAAA,GAAA,UACIjtD,CADJ,EACqBC,CADrB,EACuCC,CADvC,EAEIC,CAFJ,EAE0BC,CAF1B;EAOE,WAJA/oB,KAAKm2E,gBAALn2E,CAAsB2oB,CAAtB3oB,EAA6B,mBAA7BA,GAIO60D,sBAFWlsC,EAAMtoB,QAANsoB,EAEXksC,EADYjsC,EAAOvoB,QAAPuoB,EACZisC,EACoBhsC,CADpBgsC,EACmC/rC,CADnC+rC,EACiD9rC,CADjD8rC,CAAP;KA17EF+gB,EA87EAA,WAAAA,IAAAA,GAAA,UAAIl7E,CAAJ;EACE,QAAmB,MAAfA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAJ,EACE,MAAM,IAAInB,KAAJ,CAAU,wCAAV,CAAN,CAGF,OAAOyG,KAAKq1D,OAALr1D,CAAatF,CAAbsF,GADS,CACTA,CAAP;KAn8EF41E,EAs8EAA,WAAAA,KAAAA,GAAA,UAAKl7E,CAAL;EACE,QAAmB,MAAfA,EAAEuB,KAAFvB,CAAQ,CAARA,CAAJ,EACE,MAAM,IAAInB,KAAJ,CAAU,yCAAV,CAAN,CAGF,OAAOyG,KAAKq1D,OAALr1D,CAAatF,CAAbsF,GADS,CACTA,CAAP;KA38EF41E,EA88EQA,WAAAA,QAAAA,GAAR,UAAgBl7E,CAAhB,EAA6BqvB,CAA7B;EACE,QAAMu7D,IAAM5qF,EAAEwK,IAAFxK,EAAZ;EAAA,QAEMiC,IAAI2oF,EAAIppF,IAFd,CAIA,IAAI8D,KAAKulF,aAALvlF,CAAmBrD,CAAnBqD,CAAJ,EAA2B;EACzB,UAAI9E,IAAS8E,KAAKwlF,SAALxlF,CAAeslF,CAAftlF,EAAoBrD,CAApBqD,EAAuB+pB,CAAvB/pB,EAAgC6hD,IAAhC7hD,CAAqCtF,EAAEuB,KAAFvB,CAAQ,CAARA,CAArCsF,EAAiDtF,EAAEuB,KAAFvB,CAAQ,CAARA,CAAjDsF,CAAb,CAMA,OALI+pB,MACF7uB,IAASuqF,QACIC,KAASxqF,CAATwqF,EAAiB58E,GAAjB48E,CAAqB77D,OAAOltB,CAAPktB,CAArB67D,CADJD,EAEIE,KAASzqF,CAATyqF,EAAiB78E,GAAjB68E,CAAqB97D,OAAOltB,CAAPktB,CAArB87D,CAFJF,CADP17D,GAKG7uB,CAAP;EAEA,SAAM8J,IAAOtK,EAAE2F,QAAF3F,EAAb;EAAA,QAGMoX,IAAS8zE,uBADX5lF,KAAK6lF,wBAAL7lF,CAA8BgF,CAA9BhF,EAAoCrD,CAApCqD,EAAuC+pB,CAAvC/pB,CACW4lF,CAHf,CAIA,OAAOH,QAAY3zE,EAAO6T,IAAnB8/D,EAAyB3zE,EAAO8T,IAAhC6/D,EAAsC5jC,IAAtC4jC,CAA2C/qF,EAAEuB,KAAFvB,CAAQ,CAARA,CAA3C+qF,EAAuD/qF,EAAEuB,KAAFvB,CAAQ,CAARA,CAAvD+qF,CAAP;KAh+EJ7P,EAo+EQA,WAAAA,cAAAA,GAAR,UAAsB15E,CAAtB;EACE,WAA6B,MAArBA,IAAOA,IAAO,CAAO,CAA7B;KAr+EF05E,EAy+EQA,WAAAA,UAAAA,GAAR,UAAkBjkE,CAAlB,EAAmCzV,CAAnC,EAAiD6tB,CAAjD;EACE,QAAa,MAAT7tB,CAAJ,EACE,OAAOyV,CAAP,CAEF,IAAM3M,IAAO2M,EAAMtR,QAANsR,EAAb;EAAA,QACMs8D,IAAO/xE,IAAO,CADpB;EAAA,QAEM4pF,IAAcC,qBAAkC/gF,CAAlC+gF,CAFpB;EAAA,QAGIC,IAAaP,QAAYK,EAAYngE,IAAxB8/D,EAA8BK,EAAYlgE,IAA1C6/D,EAAgDvgF,IAAhDugF,EAHjB;EAAA,QAIMQ,IAAaC,oBAAiClhF,CAAjCkhF,CAJnB;EAAA,QAKIC,IAAYV,QAAYQ,EAAWtgE,IAAvB8/D,EAA6BQ,EAAWrgE,IAAxC6/D,EAA8CvgF,IAA9CugF,EALhB,CAQAO,IAAahmF,KAAKwlF,SAALxlF,CAAegmF,CAAfhmF,EAA2BiuE,CAA3BjuE,EAAiC+pB,CAAjC/pB,CAAbgmF,EACAG,IAAYnmF,KAAKwlF,SAALxlF,CAAemmF,CAAfnmF,EAA0BiuE,CAA1BjuE,EAAgC+pB,CAAhC/pB,CADZgmF,CAGA,IAAM9rE,IAAIksE,UAAuBlqF,CAAvBkqF,EAA6Br8D,CAA7Bq8D,CAAV;EAAA,QACMC,IAAWZ,QAAYvrE,EAAEyL,IAAd8/D,EAAoBvrE,EAAE0L,IAAtB6/D,EAA4B78E,GAA5B68E,CAAgCU,CAAhCV,CADjB;EAAA,QAGMa,IAAUN,EAAW39E,GAAX29E,CAAeK,CAAfL,CAHhB;EAAA,QAIMO,IAAUP,EAAWx9E,GAAXw9E,CAAeK,CAAfL,CAJhB;EAAA,QAMMQ,IAAad,KAASY,CAATZ,EAAkBv+E,MAAlBu+E,CAAyBA,KAASa,CAATb,CAAzBA,CANnB;EAAA,QAOMe,IAAad,KAASW,CAATX,EAAkBx+E,MAAlBw+E,CAAyBA,KAASY,CAATZ,CAAzBA,CAPnB,CASA,OAAOF,QAAYe,CAAZf,EAAwBgB,CAAxBhB,EAAoCvgF,IAApCugF,EAAP;KAjgFF7P,EAqgFQA,WAAAA,yBAAAA,GAAR,UACI5wE,CADJ,EACsB9I,CADtB,EACoC6tB,CADpC;EAIE,SAFA,IAAMnuB,IAAM,IAAIuC,YAAJ,CAAwB,IAAPjC,CAAjB,CAAZ,EAESjB,IAAI,CAAb,EAAgBA,IAAIiB,CAApB,EAA0BjB,GAA1B,EAA+B;EAG7B,WAFA,IAAI0qB,IAAO,CAAX,EACIC,IAAO,CADX,EAESuE,IAAI,CAAb,EAAgBA,IAAIjuB,CAApB,EAA0BiuB,GAA1B,EAA+B;EAC7B,YAAMjQ,IAAIwsE,SAAsBzrF,IAAIkvB,CAA1Bu8D,EAA6BxqF,CAA7BwqF,EAAmC38D,CAAnC28D,CAAV;EAAA,YACMC,IAAOC,oBAAiC5hF,CAAjC4hF,EAAuDz8D,CAAvDy8D,CADb,CAEAjhE,KAAQghE,EAAKhhE,IAALghE,GAAYzsE,EAAEyL,IAAdghE,GAAqBA,EAAK/gE,IAAL+gE,GAAYzsE,EAAE0L,IAA3CD,EACAC,KAAQ+gE,EAAKhhE,IAALghE,GAAYzsE,EAAE0L,IAAd+gE,GAAqBA,EAAK/gE,IAAL+gE,GAAYzsE,EAAEyL,IAD3CA;EAGEoE,aACFpE,KAAQzpB,CAARypB,EACAC,KAAQ1pB,CAFN6tB,GAIJ88D,mBAAgCjrF,CAAhCirF,EAAqClhE,CAArCkhE,EAA2CjhE,CAA3CihE,EAAiD5rF,CAAjD4rF,CAJI98D;EAMN,YAAOnuB,CAAP;KAxhFFg6E,EA2hFAA,WAAAA,aAAAA,GAAA,UAAal7E,CAAb,EAA0BqV,CAA1B,EAA6CxC,CAA7C;EAEE3J,WACmB,WAAf2J,CADJ3J,EAEI,iEACI2J,CAHR3J,GAIAA,OACImM,IAAY,CADhBnM,EAEI,wDAAsDmM,CAF1DnM,CAJAA,CAsBA,KAdA,IAAM2oB,IAAY7xB,EAAEuB,KAAFvB,CAAQ,CAARA,CAAlB,EACMmpD,IAAcnpD,EAAEuB,KAAFvB,CAAQ,CAARA,CADpB,EAEMopD,IAAappD,EAAEuB,KAAFvB,CAAQ,CAARA,CAFnB,EAGMqpD,IAAarpD,EAAEuB,KAAFvB,CAAQ,CAARA,CAHnB,EAKMq6D,IAAelR,IAAc9zC,CALnC,EAMMilD,IAAclR,IAAa/zC,CANjC,EAOMklD,IAAclR,KAAch0C,IAAYA,CAA1Bg0C,CAPpB,EASMw6B,IAAU7jF,EAAE2F,QAAF3F,EAThB,EAUMQ,IACF,IAAIiD,YAAJ,CAAiBouB,IAAYwoC,CAAZxoC,GAA2ByoC,CAA3BzoC,GAAyC0oC,CAA1D,CAXJ,EAaI+qB,IAAY,CAbhB,EAcShlF,IAAI,CAAb,EAAgBA,IAAIuxB,CAApB,IAAiCvxB,CAAjC,EACE,KAAK,IAAIoqC,IAAI,CAAb,EAAgBA,IAAI2vB,CAApB,IAAoC3vB,CAApC,EAGE,KAFA,IAAM0hD,IAAMvsF,KAAKkC,KAALlC,CAAW6qC,IAAIr1B,CAAfxV,CAAZ,EACMwsF,IAAW3hD,IAAIr1B,CADrB,EAESo1B,IAAI,CAAb,EAAgBA,IAAI6vB,CAApB,IAAmC7vB,CAAnC,EAIE,KAHA,IAAM6hD,IAAMzsF,KAAKkC,KAALlC,CAAW4qC,IAAIp1B,CAAfxV,CAAZ,EAEM0sF,KAAWF,IAAUh3E,CAAVg3E,GADA5hD,IAAIp1B,KAC6BklD,CAFlD,EAGSt9C,IAAI,CAAb,EAAgBA,IAAIs9C,CAApB,IAAmCt9C,CAAnC,EAAsC;EACpC,UACMuvE,IADMvvE,IAAIsvE,CAAJtvE,GAEFosC,KAAcijC,IAAMljC,KAAcgjC,IAAMjjC,IAAc7oD,CAAlC8oD,CAApBC,CAFV,CAGA7oD,EAAO8kF,GAAP9kF,IAAsBqjF,EAAQ2I,CAAR3I,CAAtBrjF;EAKR,YAAOqnF,SACHrnF,CADGqnF,GACMh2D,GAAWwoC,GAAcC,GAAaC,EAD5CstB,CAAP;KApkFF3M,EAwkFQA,WAAAA,oBAAAA,GAAR,UACIl8E,CADJ,EACesB,CADf,EAC0BiD,CAD1B,EAEIunB,CAFJ;EAGE,QAAM5nB,IACF4wB,2BAA0C90B,EAAEuC,KAA5CuyB,EAAmDxzB,EAAEiB,KAArDuyB,CADJ;EAAA,QAEMtzB,IAASk7E,OAAWx4E,CAAXw4E,EAAqBn4E,CAArBm4E,CAFf;EAAA,QAGM8C,IAAQx/E,EAAE2G,QAAF3G,EAHd;EAAA,QAIMytF,IAAQnsF,EAAEqF,QAAFrF,EAJd;EAAA,QAKMosF,IAAiB5uD,iBAAgC9+B,EAAEuC,KAAlCu8B,EAAyC56B,CAAzC46B,CALvB;EAAA,QAMM6uD,IAAiB7uD,iBAAgCx9B,EAAEiB,KAAlCu8B,EAAyC56B,CAAzC46B,CANvB;EAAA,QAQMwhD,IAAU9+E,EAAOgD,MARvB,CASA,IAAIkpF,EAAehtF,MAAfgtF,GAAwBC,EAAejtF,MAAvCgtF,KAAkD,CAAtD,EACE,KAAK,IAAIrsF,IAAI,CAAb,EAAgBA,IAAIi/E,EAAQ5/E,MAA5B,IAAsCW,CAAtC,EACEi/E,EAAQj/E,CAARi/E,IAAax0D,EAAG0zD,EAAMn+E,IAAIm+E,EAAM9+E,MAAhB8+E,CAAH1zD,EAA4B2hE,EAAMpsF,IAAIosF,EAAM/sF,MAAhB+sF,CAA5B3hE,CAAbw0D,CAFJ,KAKE;EAAA,UAAMsN,IAAO5tF,EAAE8L,MAAF9L,EAAb;EAAA,UACM6tF,IAAOvsF,EAAEwK,MAAFxK,EADb;EAAA,wBAESD;EACP,YAAMomB,IAAMjmB,EAAOyrD,UAAPzrD,CAAkBH,CAAlBG,CAAZ;EAAA,YAEMssF,IAAOrmE,EAAItjB,KAAJsjB,EAAWznB,EAAE0F,IAAb+hB,CAFb,CAGAimE,EAAehnF,OAAfgnF,CAAuB,UAAAzvE,CAAA;EAAK,iBAAA6vE,EAAK7vE,CAAL6vE,IAAU,CAAV;WAA5BJ,EACA,IAAMK,IAASH,EAAKpjF,UAALojF,CAAgBE,CAAhBF,CAAf;EAAA,YAEMI,IAAOvmE,EAAItjB,KAAJsjB,EAAWnmB,EAAEoE,IAAb+hB,CAFb,CAGAkmE,EAAejnF,OAAfinF,CAAuB,UAAA1vE,CAAA;EAAK,iBAAA+vE,EAAK/vE,CAAL+vE,IAAU,CAAV;WAA5BL,EACA,IAAMM,IAASJ,EAAKrjF,UAALqjF,CAAgBG,CAAhBH,CAAf,CAEAvN,EAAQj/E,CAARi/E,IAAax0D,EAAG0zD,EAAMuO,CAANvO,CAAH1zD,EAAkB2hE,EAAMQ,CAANR,CAAlB3hE,CAAbw0D;SAbF,CAEA,KAASj/E,IAAI,CAAb,EAAgBA,IAAIi/E,EAAQ5/E,MAA5B,IAAsCW,CAAtC,IAASA;EAcX,YAAOG,EAAO0mD,QAAP1mD,EAAP;KAzmFF06E,EA4mFQA,WAAAA,2BAAAA,GAAR,UACIl8E,CADJ,EACesB,CADf,EAEIwqB,CAFJ;EAKE,QAAM5nB,IACF4wB,2BAA0C90B,EAAEuC,KAA5CuyB,EAAmDxzB,EAAEiB,KAArDuyB,CADJ;EAAA,QAEMo5D,IAAaxR,OAAWx4E,CAAXw4E,EAAqB,SAArBA,CAFnB;EAAA,QAGMyR,IAAazR,OAAWx4E,CAAXw4E,EAAqB,SAArBA,CAHnB;EAAA,QAKM8C,IAAQx/E,EAAE2G,QAAF3G,EALd;EAAA,QAMMytF,IAAQnsF,EAAEqF,QAAFrF,EANd;EAAA,QAOMosF,IAAiB5uD,iBAAgC9+B,EAAEuC,KAAlCu8B,EAAyC56B,CAAzC46B,CAPvB;EAAA,QAQM6uD,IAAiB7uD,iBAAgCx9B,EAAEiB,KAAlCu8B,EAAyC56B,CAAzC46B,CARvB;EAAA,QAUMsvD,IAAWF,EAAW1pF,MAV5B;EAAA,QAWM6pF,IAAWF,EAAW3pF,MAX5B,CAaA,IAAIkpF,EAAehtF,MAAfgtF,GAAwBC,EAAejtF,MAAvCgtF,KAAkD,CAAtD,EACE,KAAK,IAAIrsF,IAAI,CAAb,EAAgBA,IAAI+sF,EAAS1tF,MAA7B,EAAqCW,GAArC,EAA0C;EACxC,UAAMitF,IAAOjtF,IAAIm+E,EAAM9+E,MAAvB;EAAA,UACM6tF,IAAOltF,IAAIosF,EAAM/sF,MADvB;EAAA,UAGMc,IACFsqB,EAAG0zD,EAAa,IAAP8O,CAAN9O,CAAH1zD,EAAoB0zD,EAAa,IAAP8O,CAAO,GAAI,CAAjB9O,CAApB1zD,EAAyC2hE,EAAa,IAAPc,CAANd,CAAzC3hE,EACG2hE,EAAa,IAAPc,CAAO,GAAI,CAAjBd,CADH3hE,CAJJ,CAOAsiE,EAAS/sF,CAAT+sF,IAAc5sF,EAAOyqB,IAArBmiE,EACAC,EAAShtF,CAATgtF,IAAc7sF,EAAO0qB,IADrBkiE;OATJ,MAaE;EAAA,UAAMI,IAAWloF,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAActG,EAAEgL,MAAhB1E,EAAwBsnD,cAAxBtnD,CAAuC2lB,IAAvC3lB,CAA4CwF,MAA5CxF,EAAjB;EAAA,UACMmoF,IAAWnoF,KAAKgF,IAALhF,CAAU2W,GAAV3W,CAAchF,EAAE0J,MAAhB1E,EAAwBsnD,cAAxBtnD,CAAuC2lB,IAAvC3lB,CAA4CwF,MAA5CxF,EADjB;EAAA,wBAESjF;EACP,YAAMomB,IAAMymE,EAAWjhC,UAAXihC,CAAsB7sF,CAAtB6sF,CAAZ;EAAA,YAEMJ,IAAOrmE,EAAItjB,KAAJsjB,EAAWznB,EAAE0F,IAAb+hB,CAFb,CAGAimE,EAAehnF,OAAfgnF,CAAuB,UAAAzvE,CAAA;EAAK,iBAAA6vE,EAAK7vE,CAAL6vE,IAAU,CAAV;WAA5BJ,EACA,IAAMK,IAASS,EAAShkF,UAATgkF,CAAoBV,CAApBU,CAAf;EAAA,YAEMR,IAAOvmE,EAAItjB,KAAJsjB,EAAWnmB,EAAEoE,IAAb+hB,CAFb,CAGAkmE,EAAejnF,OAAfinF,CAAuB,UAAA1vE,CAAA;EAAK,iBAAA+vE,EAAK/vE,CAAL+vE,IAAU,CAAV;WAA5BL,EACA,IAAMM,IAASQ,EAASjkF,UAATikF,CAAoBT,CAApBS,CAAf;EAAA,YAEMC,IACF5iE,EAAG0zD,EAAe,IAATuO,CAANvO,CAAH1zD,EAAsB0zD,EAAe,IAATuO,CAAS,GAAI,CAAnBvO,CAAtB1zD,EAA6C2hE,EAAe,IAATQ,CAANR,CAA7C3hE,EACG2hE,EAAe,IAATQ,CAAS,GAAI,CAAnBR,CADH3hE,CAHJ,CAMAsiE,EAAS/sF,CAAT+sF,IAAcM,EAASziE,IAAvBmiE,EACAC,EAAShtF,CAATgtF,IAAcK,EAASxiE,IADvBkiE;SAjBF,CAEA,KAAS/sF,IAAI,CAAb,EAAgBA,IAAI+sF,EAAS1tF,MAA7B,EAAqCW,GAArC,IAASA;EAmBX,YAAOiF,KAAKgmB,OAALhmB,CAAa4nF,EAAWhmC,QAAXgmC,EAAb5nF,EAAoC6nF,EAAWjmC,QAAXimC,EAApC7nF,CAAP;KAhqFF41E,EAmqFAA,WAAAA,MAAAA,GAAA,UAAwBl7E,CAAxB,EAA8B0uB,CAA9B,EAAoDtrB,CAApD;EACE,WAAOuJ,MAAM3M,CAAN2M,EAAS+hB,CAAT/hB,EAAqBvJ,CAArBuJ,CAAP;KApqFFuuE,EAuqFAA,WAAAA,QAAAA,GAAA,cAvqFAA,EAyqFAA,WAAAA,eAAAA,GAAA;EACE,WAAO,EAAP;KA1qFFA,EA6qFAA,WAAAA,cAAAA,GAAA,UACItB,CADJ,EAEI3rD,CAFJ,EAGIK,CAHJ,EAIIC,CAJJ,EAKIC,CALJ,EAMIC,CANJ;EAyBE,SAjBM,IAAAhQ,WAAA,EAAC0S,QAAD,EAAQkG,QAAR,EAAqBC,QAArB,EAAiCzY,QAAjC,EACA0Y,IAAWtJ,EAAM1sB,KAAN0sB,CAAY,CAAZA,CADX,EAGCuJ,QAHD,EAGaC,QAHb,EAIArgB,IACFskE,QAAqBnkD,GAAUC,GAAYC,GAAW5Y,EAAtD68D,CALE,EAOAiS,IAAU1/D,EAAMtoB,QAANsoB,EAPV,EAQA2/D,IAAat/D,EAAS3oB,QAAT2oB,EARb,EASAu/D,IAAYjU,EAAOj0E,QAAPi0E,EATZ,EAWAkU,IAAWlU,EAAOj1E,OAXlB,EAYAopF,IAAY32E,EAAOzS,OAZnB,EAiBGrE,IAAI,CAAb,EAAgBA,IAAIi3B,CAApB,EAA8Bj3B,GAA9B,EAAmC;EACjC,UAAM0tF,IAAe,IAAJ1tF,CAAjB;EAAA,UACM2tF,IAAKN,EAAQK,CAARL,CADX;EAAA,UAEMO,IAAKP,EAAQK,IAAW,CAAnBL,CAFX;EAAA,UAGMQ,IAAKR,EAAQK,IAAW,CAAnBL,CAHX;EAAA,UAIMS,IAAKT,EAAQK,IAAW,CAAnBL,CAJX;EAAA,UAMMU,IAAeT,EAAWttF,CAAXstF,CANrB,CAOA,MAAIS,KAAQl9D,EAAZ,EAUA,KANA,IAAM4G,IAAeP,IAAa,CAAbA,IAChB22D,IAAKF,MAAO52D,IAAc,MAAMG,IAAa,EAD7BA,GAEjB,CAFJ,EAGMW,IACDV,IAAY,CAAZA,IAAkB22D,IAAKF,MAAO52D,IAAa,MAAMG,IAAY,EAA7DA,GAAkE,CAJvE,EAMS/gB,IAAI,CAAb,EAAgBA,IAAI8gB,CAApB,EAAgC9gB,GAAhC,EAAqC;EACnC,YAAM43E,IAAgB92D,IAAa,CAAbA,GAClBy2D,KAAM52D,IAAc,CAApB42D,IAAyBv3E,KADP8gB,GAElB,MAAOy2D,IAAKE,CAAZ,KAAmB92D,IAAc,CAAjC,CAFJ,CAIA,IAAIi3D,IAAO,CAAPA,IAAYA,IAAOj3D,IAAc,CAArC,EACE,KAAK,IAAIr3B,IAAI,CAAb,EAAgBA,IAAIy3B,CAApB,EAA+Bz3B,GAA/B,EACE,KAAK,IAAIyvB,IAAI,CAAb,EAAgBA,IAAI5Q,CAApB,EAAiC4Q,GAAjC,EAAsC;EACpC,cAAM8+D,IACF9+D,IAAIzvB,IAAI+tF,EAAU,CAAVA,CAARt+D,GAAuB/Y,IAAIq3E,EAAU,CAAVA,CAA3Bt+D,GAA0CnvB,IAAIytF,EAAU,CAAVA,CADlD,CAEA32E,EAAO5T,MAAP4T,CAAcm3E,CAAdn3E,IAAqBqX,CAArBrX;WALN,MAWA,IAAe,eAAXoX,CAAJ,EACE;EAAA,cAAMggE,IAAS3uF,KAAKkC,KAALlC,CAAWyuF,CAAXzuF,CAAf;EAAA,cACM4uF,IAAY5uF,KAAKuQ,IAALvQ,CAAUyuF,CAAVzuF,CADlB;EAAA,cAEM6uF,IAAQJ,IAAOE,CAFrB,CAIA,KAASxuF,IAAI,CAAb,EAAgBA,IAAIy3B,CAApB,EAA+Bz3B,GAA/B,EAAoC;EAKlC,iBAJM2uF,IAAQl3D,IAAY,CAAZA,GACVy2D,KAAM52D,IAAa,CAAnB42D,IAAwBluF,IAAIm4B,CADlBV,GAEV,MAAOy2D,IAAKE,CAAZ,KAAmB92D,IAAa,CAAhC,KAEO,KAAKq3D,IAAOr3D,IAAa,CAApC,EACE,KAAS7H,IAAI,CAAb,EAAgBA,IAAI5Q,CAApB,EAAiC4Q,GAAjC,EAAsC;EAC9B8+D,kBACF9+D,IAAIzvB,IAAI+tF,EAAU,CAAVA,CAARt+D,GAAuB/Y,IAAIq3E,EAAU,CAAVA,CAA3Bt+D,GAA0CnvB,IAAIytF,EAAU,CAAVA,CAD5CQ,CAENn3E,EAAO5T,MAAP4T,CAAcm3E,CAAdn3E,IAAqBqX,CAArBrX;eAJJ,MASA;EAAA,kBAAMw3E,IAAU/uF,KAAKkC,KAALlC,CAAW8uF,CAAX9uF,CAAhB;EAAA,kBACMgvF,IAAWhvF,KAAKuQ,IAALvQ,CAAU8uF,CAAV9uF,CADjB;EAAA,kBAEMivF,IAAQH,IAAOC,CAFrB,CAIA,KAASn/D,IAAI,CAAb,EAAgBA,IAAI5Q,CAApB,EAAiC4Q,GAAjC,EAAsC;EACpC,oBAEM82D,IAAUsH,EAFZU,IAAM9+D,IAAIm/D,IAAUd,EAAS,CAATA,CAAdr+D,GAA4B++D,IAASV,EAAS,CAATA,CAArCr+D,GACN4+D,IAAOP,EAAS,CAATA,CACKD,CAFhB;EAAA,oBAMMkB,IAAWlB,EAFjBU,IAAM9+D,IAAIo/D,IAAWf,EAAS,CAATA,CAAfr+D,GAA6B++D,IAASV,EAAS,CAATA,CAAtCr+D,GACF4+D,IAAOP,EAAS,CAATA,CACMD,CANjB;EAAA,oBAUMrH,IAAaqH,EAFnBU,IAAM9+D,IAAIm/D,IAAUd,EAAS,CAATA,CAAdr+D,GAA4Bg/D,IAAYX,EAAS,CAATA,CAAxCr+D,GACF4+D,IAAOP,EAAS,CAATA,CACQD,CAVnB;EAAA,oBAgBMmB,IAAMzI,KAAWwI,IAAWxI,KAAWuI,CAhB7C;EAAA,oBAiBMxsB,IAASkkB,KAHKqH,EAFpBU,IAAM9+D,IAAIo/D,IAAWf,EAAS,CAATA,CAAfr+D,GAA6Bg/D,IAAYX,EAAS,CAATA,CAAzCr+D,GACF4+D,IAAOP,EAAS,CAATA,CACSD,IAGuBrH,KAAcsI,CAjBzD,CAmBAP,IAAM9+D,IAAIzvB,IAAI+tF,EAAU,CAAVA,CAARt+D,GAAuB/Y,IAAIq3E,EAAU,CAAVA,CAA3Bt+D,GAA0CnvB,IAAIytF,EAAU,CAAVA,CAApDQ,EACAn3E,EAAO5T,MAAP4T,CAAcm3E,CAAdn3E,IAAqB43E,KAAQ1sB,IAAS0sB,KAAON,CAD7CH;;;;WA3CN,MAgDE,KAASvuF,IAAI,CAAb,EAAgBA,IAAIy3B,CAApB,IAAiCz3B,CAAjC,EAAoC;EAClC,cAAM2uF,CAAN,CAIA,KAJMA,IAAQl3D,IAAY,CAAZA,GACVy2D,KAAM52D,IAAa,CAAnB42D,IAAwBluF,IAAIm4B,CADlBV,GAEV,MAAOy2D,IAAKE,CAAZ,KAAmB92D,IAAa,CAAhC,KAEO,KAAKq3D,IAAOr3D,IAAa,CAApC,EACE,KAAS7H,IAAI,CAAb,EAAgBA,IAAI5Q,CAApB,EAAiC4Q,GAAjC,EAAsC;EAC9B8+D,gBACF9+D,IAAIzvB,IAAI+tF,EAAU,CAAVA,CAARt+D,GAAuB/Y,IAAIq3E,EAAU,CAAVA,CAA3Bt+D,GAA0CnvB,IAAIytF,EAAU,CAAVA,CAD5CQ,CAENn3E,EAAO5T,MAAP4T,CAAcm3E,CAAdn3E,IAAqBqX,CAArBrX;aAJJ,MASA;EAAA,gBAAM63E,IAAWpvF,KAAKuE,KAALvE,CAAW8uF,CAAX9uF,CAAjB;EAAA,gBACMqvF,IAAWrvF,KAAKuE,KAALvE,CAAWyuF,CAAXzuF,CADjB,CAEA,KAAS4vB,IAAI,CAAb,EAAgBA,IAAI5Q,CAApB,EAAiC4Q,GAAjC,EAAsC;EACpC,kBAAM0/D,IAAQ1/D,IAAIw/D,IAAWnB,EAAS,CAATA,CAAfr+D,GACVy/D,IAAWpB,EAAS,CAATA,CADDr+D,GACe4+D,IAAOP,EAAS,CAATA,CADpC;EAAA,kBAEMsB,IACF3/D,IAAIzvB,IAAI+tF,EAAU,CAAVA,CAARt+D,GAAuB/Y,IAAIq3E,EAAU,CAAVA,CAA3Bt+D,GAA0CnvB,IAAIytF,EAAU,CAAVA,CAHlD,CAIA32E,EAAO5T,MAAP4T,CAAcg4E,CAAdh4E,IAAwBy2E,EAAUsB,CAAVtB,CAAxBz2E;;;;;EAMV,YAAOA,EAAO8vC,QAAP9vC,EAAP;KAnzFF8jE,EAszFAA,WAAAA,cAAAA,GAAA,UACIvsD,CADJ,EAC2BC,CAD3B,EACiDzb,CADjD,EAEI0b,CAFJ;EAGQ,QAAApQ,4BAAA;EAAA,QAAC8I,eAAD;EAAA,QAAYa,gBAAZ;EAAA,QAAwB/B,eAAxB;EAAA,QAAmC1hB,aAAnC;EAAA,QAA4C2jB,gBAA5C,CAIN,OAAOhjB,KAAK+pF,OAAL/pF,CACHqpB,CADGrpB,EACYspB,CADZtpB,EAC0B6N,CAD1B7N,EACuCgjB,CADvChjB,EACmD+gB,CADnD/gB,EAEH8iB,CAFG9iB,EAESiiB,CAFTjiB,EAEoBX,CAFpBW,EAE6BupB,CAF7BvpB,GADgB,CAChBA,CAAP;KA7zFF41E,EAk0FAA,WAAAA,SAAAA,GAAA,UAASl7E,CAAT,EAAoB+L,CAApB;EACE,QAAMub,IAAevb,EAAQxK,KAA7B;EAAA,QACMgmB,IAAYD,EAAaA,EAAa5nB,MAAb4nB,GAAsB,CAAnCA,CADlB;EAAA,QAGM7I,4BAHN;EAAA,QAGOgJ,QAHP;EAAA,QAGoBqzC,QAHpB;EAAA,QAG+Bz0C,QAH/B;EAAA,QAG0C1hB,QAH1C,CAKA,IAAkB,MAAdm2D,CAAJ,EACE,OAAOviD,SAAAA,EAAWkP,CAAXlP,EAAwBvY,EAAEuD,KAA1BgV,CAAP,CAOF,KAJA,IAAMzN,IAAS,IAAI1B,YAAJ,EAAkB0xD,GAAWz0C,EAA7B,EAAyCrmB,EAAEuD,KAA3C,CAAf,EACM+rF,IAAcvjF,EAAQpG,QAARoG,EADpB,EAEMirD,IAAQh3D,EAAE2F,QAAF3F,EAFd,EAISK,IAAI,CAAb,EAAgBA,IAAIy6D,CAApB,EAA+Bz6D,GAA/B,EAAoC;EAGlC,WAFA,IAAMT,MAAN,EACI2vF,IAAe,CADnB,EAESjsF,IAAI,CAAb,EAAgBA,IAAIikB,CAApB,EAA+BjkB,GAA/B,EAAoC;EAClC,YAAM8O,IAAMk9E,EAAYjvF,IAAIknB,CAAJlnB,GAAgBiD,CAA5BgsF,CAAZ,CACAC,KAAgBn9E,IAAMzN,EAAQrB,CAARqB,CAAtB4qF,EACA3vF,EAAM0B,IAAN1B,CAAWwS,CAAXxS,CADA2vF;EAGF,WAAIA,IAAe,CAAfA,IAAoBA,KAAgBvvF,EAAEwB,IAAFxB,GAASqmB,CAAjD,EACE,MAAM,IAAIxnB,KAAJ,CACF,sBAAoBe,CAApB,0BAAA,GAAiDI,EAAEuB,KADjD,CAAN,CAIF,KAAK,IAAIwT,IAAI,CAAb,EAAgBA,IAAIsR,CAApB,EAA+BtR,GAA/B,EACEjK,EAAOtH,MAAPsH,CAAczK,IAAIgmB,CAAJhmB,GAAgB0U,CAA9BjK,IAAmCksD,EAAMu4B,IAAelpE,CAAfkpE,GAA2Bx6E,CAAjCiiD,CAAnClsD;EAGJ,YAAOA,EAAOo8C,QAAPp8C,GAAkBL,OAAlBK,CAA0B2c,CAA1B3c,CAAP;KAj2FFowE,EAo2FAA,WAAAA,UAAAA,GAAA,UACInvE,CADJ,EACqB8b,CADrB,EACsCtmB,CADtC;EAEQ,QAAAkd,4BAAA;EAAA,QAAC8I,eAAD;EAAA,QAAYa,gBAAZ;EAAA,QAAwB/B,eAAxB;EAAA,QAAmC1hB,aAAnC;EAAA,QAA4C2jB,gBAA5C;EAAA,QAEAuG,IAAeM,OAAO,CAAPA,CAFf,CAIN,OAAO7pB,KAAK+pF,OAAL/pF,CACHyG,CADGzG,EACMuiB,CADNviB,EACe/D,CADf+D,EACsBgjB,CADtBhjB,EACkC+gB,CADlC/gB,EAC6C8iB,CAD7C9iB,EACyDiiB,CADzDjiB,EAEHX,CAFGW,EAEMupB,CAFNvpB,GADgB,CAChBA,CAAP;KA12FF41E,EA+2FQA,WAAAA,QAAAA,GAAR,UACInvE,CADJ,EACqB8b,CADrB,EACsCtmB,CADtC,EAC0D+mB,CAD1D,EAEIjC,CAFJ,EAEuB+B,CAFvB,EAE2Cb,CAF3C,EAGI5iB,CAHJ,EAGuBkqB,CAHvB,EAII2gE,CAJJ;EAKE,QAAMt8B,KAAgB5qC,IAAajC,GAAWA,EAA9C;EAAA,QACMipE,IAAcvjF,EAAQpG,QAARoG,EADpB;EAAA,QAEM0jF,IAAc5nE,EAAQliB,QAARkiB,EAFpB,CAIA,IAAmB,MAAfS,CAAJ,EACE,OAAO/P,SAAAA,EAAWhX,CAAXgX,EAAkBsP,EAAQtkB,KAA1BgV,CAAP,CAGF,IAAMzN,IAAS,IAAI1B,YAAJ,CAAiB8pD,CAAjB,EAA+BrrC,EAAQtkB,KAAvC,CAAf,CACAuH,EAAOtH,MAAPsH,CAAcrD,IAAdqD,CAAmB+jB,EAAalpB,QAAbkpB,GAAwB,CAAxBA,CAAnB/jB,EAEA,KAAK,IAAIzK,IAAI,CAAb,EAAgBA,IAAI+nB,CAApB,EAAgC/nB,GAAhC,EAAqC;EAGnC,WAFA,IAAMT,MAAN,EACI2vF,IAAe,CADnB,EAESjsF,IAAI,CAAb,EAAgBA,IAAIikB,CAApB,EAA+BjkB,GAA/B,EAAoC;EAClC,YAAM8O,IAAMk9E,EAAYjvF,IAAIknB,CAAJlnB,GAAgBiD,CAA5BgsF,CAAZ,CACA1vF,EAAM0B,IAAN1B,CAAWwS,CAAXxS,GACA2vF,KAAgBn9E,IAAMzN,EAAQrB,CAARqB,CADtB/E;EAIF,WAAI2vF,IAAe,CAAfA,IAAoBA,KAAgBjnE,IAAajC,CAArD,EACE,MAAM,IAAIxnB,KAAJ,CACF,sBAAoBe,CAApB,0BAAA,GAAiD2B,CAD/C,CAAN,CAIF,KAAK,IAAIwT,IAAI,CAAb,EAAgBA,IAAIsR,CAApB,EAA+BtR,GAA/B,EACMy6E,IACF1kF,EAAOtH,MAAPsH,CAAcykF,IAAelpE,CAAfkpE,GAA2Bx6E,CAAzCjK,KACI2kF,EAAYpvF,IAAIgmB,CAAJhmB,GAAgB0U,CAA5B06E,CAFFD,GAIF1kF,EAAOtH,MAAPsH,CAAcykF,IAAelpE,CAAfkpE,GAA2Bx6E,CAAzCjK,IAA+D,MAAjB+c,EAAQnjB,IAAS,GAC3D+qF,EAAY,CAAZA,CAD2D,GAE3DA,EAAYpvF,IAAIgmB,CAAJhmB,GAAgB0U,CAA5B06E,CANFD;EAUR,YAAO1kF,EAAOo8C,QAAPp8C,GAAkBL,OAAlBK,CAA0BvJ,CAA1BuJ,CAAP;KAx5FFowE,GA05FF;KD9qFA,KCgrFIzd,gBACA,OAAO;EAAM,SAAA,IAAIyd,cAAJ,EAAA;KAAsB,GAAkBv2D,kBC/8FzD,IAAM+qE,gBAAiD,sBAA1BC,qBAA0B,GACnDA,qBADmD,GAEnDC,YAFJ,CAaA,kBAAA;EACE,SAAO,IAAIntF,OAAJ,CAAkB,UAAAC,CAAA;EAAW,WAAAgtF,cAAc;EAAM,aAAAhtF,GAAA;OAApBgtF,CAAA;KAA7B,CAAP;ECTK,KAAMG,yBACXljE,SAAW,GACXC,OAAS,GACTkjE,QAAU,GACVC,OAAS,GACT5rF,MAAQ,GALH,wBCkB6BipB;;;;EAKlC,eAAW4iE,CAAX,IAFMC,MAAAA,EACAC,MADAD,EAEa7iE,CAAnB,EAA4B;EAG1B,gBAAgB,eAFV5O,IAAI4O,EAAQ4iE,CAAR5iE,GAEJ7pB,KAAU,IAAyB,YAAZib,EAAEjb,KAAf,IAAgD,WAAZib,EAAEjb,KAAtD,EACE,MAAM,IAAI1E,KAAJ,CAAU,kCAAgCmxF,CAAhC,QAAA,GAA0CxxE,EAAEjb,KAAtD,CAAN,CAEF0sF,EAAM3uF,IAAN2uF,GAAYpsF,SAAMtC,OAAOid,EAAEjd,OAAOgC,OAAOib,EAAEjb,OAA3C0sF,GACAC,EAAa5uF,IAAb4uF,CAAkB1xE,EAAElU,IAAFkU,EAAlB0xE,CADAD;EAGmB,sBAAMxtF,QAAQ4K,GAAR5K,CAAYytF,CAAZztF,EAAN;EACrB,uBAAQ6H,MAAM6lF,uBADO1xE,MAAAA,EACP0xE,GAAsCF,WAApD;;;EAkBF,uBAAA,CACInlF,CADJ,EACyBmlF,CADzB;EAKE,OAFA,IAAMprC,MAAN,EACIh9C,IAAS,CADb,gBAEWuoF;EACT,QAAMC,IAAOD,EAAKvsF,IAAlB;EAAA,QACMN,IAAQ6sF,EAAK7sF,KADnB;EAAA,QAEMhC,IAAQ6uF,EAAK7uF,KAFnB;EAAA,QAGMC,IAAO+F,cAAchG,CAAdgG,CAHb;EAAA,QAII+oF,UAJJ,CAMA,uBAAsBF,CAAlB,GAAwB;EAC1B,UAAMG,IAAeH,EAAKI,YAA1B,CACA,IAA2B,YAAvBD,EAAahtF,KAAU,IAAkC,aAAvBgtF,EAAahtF,KAAnD,EACE,MAAM,IAAI1E,KAAJ,CACF,YAAUuxF,EAAKvsF,IAAf,qCAAA,GACsB0sF,EAAahtF,KADnC,+DADE,CAAN,CAKF,IAAMktF,IAAyBZ,qBAAqBU,EAAahtF,KAAlCssF,CAA/B;EAAA,UACMa,IACF5lF,EAAO3H,KAAP2H,CAAajD,CAAbiD,EAAqBjD,IAASrG,IAAOivF,CAArC3lF,CAFJ;EAAA,UAGM6lF,IAAyC,YAAvBJ,EAAahtF,KAAU,GAC3C,IAAII,UAAJ,CAAe+sF,CAAf,CAD2C,GAE3C,IAAIpoD,WAAJ,CAAgBooD,CAAhB,CALJ,CAMA,IAAc,cAAVntF,CAAJ,EACE+sF,IAAa7sF,aAAa8E,IAAb9E,CACTktF,CADSltF,EACO,UAAA6Y,CAAA;EAAK,eAAAA,IAAIi0E,EAAapjF,KAAjBmP,GAAyBi0E,EAAaxwF,GAAtC;SADZ0D,CAAb6sF,CADF,KAGO;EAAA,YAAc,YAAV/sF,CAAJ,EAKL,MAAM,IAAI1E,KAAJ,CAAU,kCAAgCwxF,CAAhC,QAAA,GAA0C9sF,CAApD,CAAN,CAJA+sF,IAAa5sF,WAAW6E,IAAX7E,CACTitF,CADSjtF,EAET,UAAA4Y,CAAA;EAAK,iBAAAzc,KAAKuE,KAALvE,CAAWyc,IAAIi0E,EAAapjF,KAAjBmP,GAAyBi0E,EAAaxwF,GAAjDF,CAAA;WAFI6D,CAAb4sF;EAMFzoF,YAAUrG,IAAOivF,CAAjB5oF;OAxBF,MAyBO;EACL,UAAM+oF,IAAcf,qBAAqBtsF,CAArBssF,CAApB,CACMa,IAAa5lF,EAAO3H,KAAP2H,CAAajD,CAAbiD,EAAqBjD,IAASrG,IAAOovF,CAArC9lF,CAAb4lF,CAEN,IAAc,cAAVntF,CAAJ,EACE+sF,IAAa,IAAI7sF,YAAJ,CAAiBitF,CAAjB,CAAbJ,CADF,KAEO,IAAc,YAAV/sF,CAAJ,EACL+sF,IAAa,IAAI5sF,UAAJ,CAAegtF,CAAf,CAAbJ,CADK,KAEA;EAAA,YAAc,WAAV/sF,CAAJ,EAGL,MAAM,IAAI1E,KAAJ,CAAU,kCAAgCwxF,CAAhC,QAAA,GAA0C9sF,CAApD,CAAN,CAFA+sF,IAAa,IAAI3sF,UAAJ,CAAe+sF,CAAf,CAAbJ;EAIFzoF,YAAUrG,IAAOovF,CAAjB/oF;EAGF,SAAIwB,UAAJ,CACA,IAAc,cAAV9F,CAAJ,EACE8F,IAAQkP,OAAO+3E,CAAP/3E,EAAmBhX,CAAnBgX,EAA0B,SAA1BA,CAARlP,CADF,KAEO,IAAc,YAAV9F,CAAJ,EACL8F,IAAQkP,OAAO+3E,CAAP/3E,EAAmBhX,CAAnBgX,EAA0B,OAA1BA,CAARlP,CADK,KAEA;EAAA,UAAc,WAAV9F,CAAJ,EAGL,MAAM,IAAI1E,KAAJ,CAAU,kCAAgCwxF,CAAhC,QAAA,GAA0C9sF,CAApD,CAAN,CAFA8F,IAAQkP,OAAO+3E,CAAP/3E,EAAmBhX,CAAnBgX,EAA0B,MAA1BA,CAARlP;EAIFw7C,OAAIwrC,CAAJxrC,IAAYx7C,CAAZw7C;KA5DF,OAAA,EAEmBgsC,KAAnB,EAAmBvnF,YAAnB,EAAmBA,GAAnB;;EA4DA,UAAOu7C,CAAP;EAMF,gCAAA,CAAuCpuC,CAAvC;EAEE,MAAW,SAAPA,CAAJ,EACE,MAAM,IAAI5X,KAAJ,CAAU,0BAAwB2/B,KAAKE,SAALF,CAAe/nB,CAAf+nB,CAAlC,CAAN,CAGF,IAAIsyD,IAAkB,CAAtB;EAAA,MASMC,MATN,CAUAt6E,EAAG/Q,OAAH+Q,CAAW,UAACzW,CAAD;EAMT,QALA8wF,KAAmB9wF,EAAEgxF,UAArBF,EAEAC,EAAazvF,IAAbyvF,CACI/wF,EAAEgxF,UAAFhxF,KAAiBA,EAAE8K,MAAF9K,CAASgxF,UAA1BhxF,GAAuCA,CAAvCA,GACuC,IAAKA,EAAEsE,WAAP,CAA2BtE,CAA3B,CAF3C+wF,CAFAD,IAKM9wF,aAAoByD,YAApBzD,IAAoCA,aAAoB0D,UAAxD1D,IACAA,aAAoB2D,WAD1B,EAEE,MAAM,IAAI9E,KAAJ,CAAU,qCAAmCmB,EAAEsE,WAAFtE,CAAc6D,IAA3D,CAAN;KARJ4S,EAaA,IAAMC,IAAI,IAAI/S,UAAJ,CAAemtF,CAAf,CAAV;EAAA,MACIjpF,IAAS,CADb,CAOA,OALAkpF,EAAarrF,OAAbqrF,CAAqB,UAAC/wF,CAAD;EACnB0W,MAAE0F,GAAF1F,CAAM,IAAI/S,UAAJ,CAAe3D,EAAE8K,MAAjB,CAAN4L,EAAgC7O,CAAhC6O,GACA7O,KAAU7H,EAAEgxF,UADZt6E;KADFq6E,GAKOr6E,EAAE5L,MAAT;EAIF,KAAMmmF,gBAAkC,sBAAXC,MAAW,KACnB,sBAATC,IAAS,IAA+B,sBAATC,IAAtB,IACA,sBAATC,IAF4B,CAAxC,CAaA,yBAAA,CAAiCC,CAAjC;EACE,SAAIL,gBACKC,OAAOF,UAAPE,CAAkBI,CAAlBJ,CADLD,GAGG,IAAIE,IAAJ,EAAUG,EAAV,EAAgB9vF,IAHvB;EAYF,mCAAA,CAA0CsJ,CAA1C;EACE,SAAImmF,gBACKC,OAAO3oF,IAAP2oF,CAAYpmF,CAAZomF,EAAoB3qF,QAApB2qF,CAA6B,QAA7BA,CADLD,GAGGI,KAAKn3E,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0B,IAA1BA,EAAgC,IAAIvW,UAAJ,CAAemH,CAAf,CAAhCoP,CAALm3E,CAHP;EAYF,mCAAA,CAA0CC,CAA1C;EACE,MAAIL,aAAJ,EAAmB;EACjB,QAAMM,IAAML,OAAO3oF,IAAP2oF,CAAYI,CAAZJ,EAAiB,QAAjBA,CAAZ,CACA,OAAOK,EAAIzmF,MAAJymF,CAAWpuF,KAAXouF,CAAiBA,EAAIC,UAArBD,EAAiCA,EAAIC,UAAJD,GAAiBA,EAAIP,UAAtDO,CAAP;EAIF,QAFA,IAAMvvE,IAAIovE,KAAKE,CAALF,CAAV,EACMtmF,IAAS,IAAInH,UAAJ,CAAeqe,EAAEtiB,MAAjB,CADf,EAESW,IAAI,CAAb,EAAgBA,IAAI2hB,EAAEtiB,MAAtB,IAAgCW,CAAhC,EACEyK,EAAOsR,GAAPtR,EAAYkX,EAAEwgC,UAAFxgC,CAAa3hB,CAAb2hB,EAAZlX,EAA8BzK,CAA9ByK,EAEF,OAAOA,EAAOA,MAAd;EASF,iCAAA,CAAwC2mF,CAAxC;EACE,MAAIX,IAAkB,CAAtB,CACAW,EAAQ/rF,OAAR+rF,CAAgB,UAAC3mF,CAAD;EACdgmF,SAAmBhmF,EAAOkmF,UAA1BF;KADFW,EAIA,IAAM9xF,IAAO,IAAIgE,UAAJ,CAAemtF,CAAf,CAAb;EAAA,MACIjpF,IAAS,CADb,CAMA,OAJA4pF,EAAQ/rF,OAAR+rF,CAAgB,UAAC3mF,CAAD;EACdnL,MAAKyc,GAALzc,CAAS,IAAIgE,UAAJ,CAAemH,CAAf,CAATnL,EAAiCkI,CAAjClI,GACAkI,KAAUiD,EAAOkmF,UADjBrxF;KADF8xF,GAIO9xF,EAAKmL,MAAZ;EAUF,kBAAA,CAAyB4mF,CAAzB;EAGE,OADAA,IAAOA,EAAKC,IAALD,EACP,EAAOA,EAAKvnE,QAALunE,CAFW,GAEXA,CAAP,GACEA,IAAOA,EAAKvuF,KAALuuF,CAAW,CAAXA,EAAcA,EAAKhyF,MAALgyF,GAAc,CAA5BA,CAAPA,CAEF,IAAME,IAAQF,EAAK/kF,KAAL+kF,CALI,GAKJA,CAAd,CACA,OAAOE,EAAMA,EAAMlyF,MAANkyF,GAAe,CAArBA,CAAP;EAQF,sCAAA,CAA6CC,CAA7C;EAEE,MAAIA,EAAeC,aAAfD,YAAwCE,WAA5C,EACE,MAAM,IAAIlzF,KAAJ,CAAU,qDAAV,CAAN,CAGF,SACEmzF,WAAW,IAAI3uC,IAAJ,IACX4uC,mBAAmB,QACnBC,oBAAoD,QAAhCL,EAAeC,aAAiB,GAChD,CADgD,GAEhDK,iBAAiB3zD,KAAKE,SAALF,CAAeqzD,EAAeC,aAA9BtzD,CAAjB2zD,GACJC,kBAAgD,QAA9BP,EAAeQ,WAAe,GAC5C,CAD4C,GAE5CF,iBAAiB3zD,KAAKE,SAALF,CAAeqzD,EAAeQ,WAA9B7zD,CAAjB2zD,GACJG,iBAA8C,QAA7BT,EAAeU,UAAc,GAC1C,CAD0C,GAE1CV,EAAeU,UAAfV,CAA0Bb,YAXhC;ECxQF;EAOE,YAAA;EACE1rF,SAAKktF,WAALltF,KAAAA,EACAA,KAAKmtF,WAALntF,KADAA;EAmEJ,UA/DiBotF,aAAAA,GAAf;EAIE,WAHiC,QAA7BA,EAAiB78E,QAAY,KAC/B68E,EAAiB78E,QAAjB68E,GAA4B,IAAIA,CAAJ,EADG,GAG1BA,EAAiB78E,QAAxB;KAJa68E,EAaRA,oBAAAA,GAAP,UAA0BC,CAA1B;EACED,MAAiBE,WAAjBF,GAA+BF,WAA/BE,CAA2CpxF,IAA3CoxF,CAAgDC,CAAhDD;KAdaA,EAuBRA,oBAAAA,GAAP,UAA0BG,CAA1B;EACEH,MAAiBE,WAAjBF,GAA+BD,WAA/BC,CAA2CpxF,IAA3CoxF,CAAgDG,CAAhDH;KAxBaA,EAmCRA,iBAAAA,GAAP,UAAuBI,CAAvB;EACE,WAAOJ,EAAiBK,WAAjBL,CAA6BI,CAA7BJ,EAAkC,MAAlCA,CAAP;KApCaA,EA8CRA,iBAAAA,GAAP,UAAuBI,CAAvB;EACE,WAAOJ,EAAiBK,WAAjBL,CAA6BI,CAA7BJ,EAAkC,MAAlCA,CAAP;KA/CaA,EAkDAA,aAAAA,GAAf,UAA2BI,CAA3B,EAAiDE,CAAjD;EAEE,QAAMC,MAAN,CASA,QARgC,WAAhBD,CAAgB,GAAS1tF,KAAKstF,WAALttF,GAAmBmtF,WAA5B,GACSntF,KAAKstF,WAALttF,GAAmBktF,aACpD9sF,QAAQ,UAAAwtF,CAAA;EACd,UAAMnpF,IAAUmpF,EAAOJ,CAAPI,CAAhB,CACgB,SAAZnpF,CAAY,IACdkpF,EAAc3xF,IAAd2xF,CAAmBlpF,CAAnBkpF,CADc;UAIXA,CAAP;KA7DaP,GA+DjB;KA3EA;EAAA,ICYMS,oBAAoB,KDZ1B;EAAA;ECoBE,YAAA;EACE7tF,SAAK8tF,QAAL9tF,KAAAA;EAwCJ,UArCiB+tF,aAAAA,GAAf;EAIE,WAH0C,QAAtCA,EAA0Bx9E,QAAY,KACxCw9E,EAA0Bx9E,QAA1Bw9E,GAAqC,IAAIA,CAAJ,EADG,GAGnCA,EAA0Bx9E,QAAjC;KAJaw9E,EAaRA,iBAAAA,GAAP,UAAuBC,CAAvB,EAAuCC,CAAvC;EACEvyF,WAAiB,QAAVsyF,CAAPtyF,EAAuB,uCAAvBA,GACIsyF,EAAOnpE,QAAPmpE,CAAgBH,iBAAhBG,MACFA,IAASA,EAAOnwF,KAAPmwF,CAAa,CAAbA,EAAgBA,EAAO1sE,OAAP0sE,CAAeH,iBAAfG,CAAhBA,CADPA,CADJtyF,EAIAA,OAAOsyF,EAAO5zF,MAAP4zF,GAAgB,CAAvBtyF,EAA0B,qCAA1BA,CAJAA,CAKA,IAAM0hB,IAAW2wE,EAA0BT,WAA1BS,EAAjB,CACAryF,OACiC,QAA7B0hB,EAAS0wE,QAAT1wE,CAAkB4wE,CAAlB5wE,CADJ1hB,EAEI,6DAA2DsyF,CAA3D,OAFJtyF,GAGA0hB,EAAS0wE,QAAT1wE,CAAkB4wE,CAAlB5wE,IAA4B6wE,CAH5BvyF;KApBaqyF,EA0BRA,YAAAA,GAAP,UAAkBC,CAAlB;EACE,QAAMC,IAAUjuF,KAAKstF,WAALttF,GAAmB8tF,QAAnB9tF,CAA4BguF,CAA5BhuF,CAAhB,CACA,IAAe,QAAXiuF,CAAJ,EACE,MAAM,IAAI10F,KAAJ,CAAU,2CAAyCy0F,CAAzC,MAAV,CAAN,CAEF,OAAOC,CAAP;KA/BaF,EAkCRA,YAAAA,GAAP;EACE,WAAO5pF,OAAO0O,IAAP1O,CAAYnE,KAAKstF,WAALttF,GAAmB8tF,QAA/B3pF,CAAP;KAnCa4pF,GAqCjB;KD7DA,mBCuEkBP;EAChB,OAAwC,MAApCA,EAAIlsE,OAAJksE,CAAYK,iBAAZL,CAAJ,EACE,MAAM,IAAIj0F,KAAJ,CACF,+EAEGw0F,0BAA0BG,UAA1BH,GAAuC/rF,IAAvC+rF,CAA4C,GAA5CA,CAHD,CAAN,CAKF,SACEC,QAAQR,EAAInmF,KAAJmmF,CAAUK,iBAAVL,EAA6B,CAA7BA,GACRpB,MAAMoB,EAAInmF,KAAJmmF,CAAUK,iBAAVL,EAA6B,CAA7BA,GAFR;EAMF,4BAAA,CACIW,CADJ,EACuBC,CADvB,EAEIC,CAFJ;4BAEIA;;;EA+BqB,iBA9BvB3yF,OACIyyF,MAAcC,CADlB1yF,EAEI,0CAAwCyyF,CAAxC,MAFJzyF,GAKAA,QADM4yF,IAAelB,iBAAiBmB,eAAjBnB,CAAiCe,CAAjCf,GAEJhzF,SAAS,CAD1BsB,EAEI,oEACIyyF,CADJ,MAFJzyF,CALAA,EASAA,OACI4yF,EAAal0F,MAAbk0F,GAAsB,CAD1B5yF,EAEI,2CAAyC4yF,EAAal0F,MAAtD,oCAAA,GACoC+zF,CADpC,MAFJzyF,CATAA,EAaM8yF,IAAcF,EAAa,CAAbA,CAbpB5yF,EAgBAA,QADM+yF,IAAerB,iBAAiBsB,eAAjBtB,CAAiCgB,CAAjChB,GAEJhzF,SAAS,CAD1BsB,EAEI,yEACO0yF,CADP,MAFJ1yF,CAhBAA,EAoBAA,OACI+yF,EAAar0F,MAAbq0F,GAAsB,CAD1B/yF,EAEI,2CAAyC4yF,EAAal0F,MAAtD,yCAAA,GACyCg0F,CADzC,MAFJ1yF,CApBAA,EAwBMizF,IAAcF,EAAa,CAAbA,CAxBpB/yF,EA0BMkzF,IAAeC,SAASV,CAATU,EAAoBb,MA1BzCtyF,EA2BMozF,IAAaD,SAASV,CAATU,EAAoBzC,IA3BvC1wF,EA4BMqzF,IAAaH,MAAiBC,SAASV,CAATU,EAAoBb,MA5BxDtyF,MA8B6B8yF,EAAYQ,IAAZR,GAAN;mBAAjBjC,IAAiBpzE,MAAAA,EAAjBozE,EAKF8B,KAAgBU,CAAhBV,OACIN,0BAA0BkB,UAA1BlB,CAAqCa,CAArCb,EACDmB,WADCnB,CACWe,CADXf,EADJM;EACFl1E,gBAAAA,eAAAA;EAIiB,qBAAMw1E,EAAY72B,IAAZ62B,CAAiBpC,CAAjBoC,EAAN;mBAAbQ,IAAah2E,MAAAA,EAAbg2E,GAKFd,KAAiBU,iBACbhB,0BAA0BkB,UAA1BlB,CAAqCa,CAArCb,EACDmB,WADCnB,CACWe,CADXf;EAAN50E,gBAAAA,eAAAA;EAIF,qBAAOg2E,EAAWC,mBAAlB;;;EAgCF,oBAAA;;;;EACQC,cAAUtB,0BAA0BG,UAA1BH,EAAVsB,EACA9vC,MADA8vC,OAAAA,EAEeC,KAFfD,aAAAA;mBAEerrF,YAAAA,IAAVgqF,QAAAA,MAECD,0BAA0BkB,UAA1BlB,CAAqCC,CAArCD,EAA6CwB,UAA7CxB,GAFS/pF;EAGnB,eAAWooF,CAAX,IAFMoD,IACFr2E,MAAAA,EACJ,EAEEomC,EADYyuC,IAASH,iBAATG,GAA6B5B,CACzC7sC,IAAWiwC,EAAUpD,CAAVoD,CAAXjwC;mBALiBv7C;EAQrB,qBAAOu7C,EAAP;;;EA8BF,qBAAA,CAA2BiuC,CAA3B;;;;EAGS,iBAFDiC,IAAgBZ,SAASrB,CAATqB,CAAhBY,MACU1B,0BAA0BkB,UAA1BlB,CAAqC0B,EAAczB,MAAnDD,EACKmB,WADLnB,CACiB0B,EAAcrD,IAD/B2B,EACT;EAAP,qBAAO50E,MAAAA,GAAP;;;EA4CF,mBAAA,CACIg1E,CADJ,EACuBC,CADvB;;;;EAGS,qBAAMsB,mBAAmBvB,CAAnBuB,EAA8BtB,CAA9BsB,GADQ,CACRA,EAAN;EAAP,qBAAOv2E,MAAAA,GAAP;;;EA2CF,mBAAA,CACIg1E,CADJ,EACuBC,CADvB;;;;EAGS,qBAAMsB,mBAAmBvB,CAAnBuB,EAA8BtB,CAA9BsB,GADQ,CACRA,EAAN;EAAP,qBAAOv2E,MAAAA,GAAP;;;EChTF,KAAMw2E,gBAAgB,cAAtB;EAAA,IACMC,mBAAmB,CADzB;EAAA,IAMMC,mBAAmB,cANzB;EAAA,IAUMC,kBAAkB,kBAVxB,CAeA,4BAAA;EAWE,OAAK3yE,IAAIxG,GAAJwG,CAAQ,YAARA,CAAL,EAIE,MAAM,IAAI5jB,KAAJ,CACF,yFADE,CAAN,CAKF,IAAMw2F,IAAiBj2F,MAAvB;EAAA,MACM+kB,IAAUkxE,EAAUC,SAAVD,IAAuBA,EAAUE,YAAjCF,IACZA,EAAUG,eADEH,IACiBA,EAAUI,WAD3BJ,IAEZA,EAAUK,aAHd,CAIA,IAAe,QAAXvxE,CAAJ,EACE,MAAM,IAAItlB,KAAJ,CACF,2DADE,CAAN,CAGF,OAAOslB,CAAP;EAGF,uBAAA,CAAuBwxE,CAAvB;EACE,MAAMC,IAAKD,EAAYn1F,MAAvB,CACAo1F,EAAGC,iBAAHD,CAAqBT,gBAArBS,IAAwCE,SAAS,aAAjDF,GACAA,EAAGC,iBAAHD,CAAqBR,eAArBQ,IAAuCE,SAAS,aAAhDF,CADAA;EASF;EAME,YAAA,CAAYG,CAAZ;EAGE,QAFAzwF,KAAKgwF,SAALhwF,GAAiB0wF,qBAAjB1wF,EAEiB,QAAbywF,CAAa,KAASA,CAA1B,EACE,MAAM,IAAIl3F,KAAJ,CACF,gEADE,CAAN,CAGFyG,KAAKywF,SAALzwF,GAAiBywF,CAAjBzwF;EAiHJ,UA9GQ2wF,WAAAA,KAAAA,GAAN,UAAWpE,CAAX;;;EAEE,YAAIA,EAAeC,aAAfD,YAAwCE,WAA5C,EACE,MAAM,IAAIlzF,KAAJ,CACF,0FADE,CAAN,CAKF,WAAOyG,KAAK4wF,cAAL5wF,CAAoBA,KAAKywF,SAAzBzwF,EAAoCusF,CAApCvsF,EAAP;;;KARI2wF,EAYAA,WAAAA,KAAAA,GAAN;;;EACE,mBAAO3wF,KAAK4wF,cAAL5wF,CAAoBA,KAAKywF,SAAzBzwF,EAAP;;;KAbI2wF,EA8BEA,WAAAA,eAAAA,GAAR,UAAuBF,CAAvB,EAA0ClE,CAA1C;EAAA,gBAAA,CAEE,OAAO,IAAIpvF,OAAJ,CAAuC,UAACC,CAAD,EAAUC,CAAV;EAC5C,UAAMgzF,IAAc1vF,EAAKqvF,SAALrvF,CAAekwF,IAAflwF,CAAoBgvF,aAApBhvF,EAAmCivF,gBAAnCjvF,CAApB,CACA0vF,EAAYS,eAAZT,GAA8B;EAAM,eAAAU,cAAcV,CAAdU,CAAA;SAApCV,EAEAA,EAAYW,SAAZX,GAAwB;EACtB,YAAMC,IAAKD,EAAYn1F,MAAvB,CAEA,IAAsB,QAAlBqxF,CAAJ,EAA4B;EAE1B,cAAM0E,IAAUX,EAAGY,WAAHZ,CAAeT,gBAAfS,EAAiC,UAAjCA,CAAhB;EAAA,cAEMa,IADaF,EAAQG,WAARH,CAAoBpB,gBAApBoB,EACWt6E,GADXs6E,CACetwF,EAAK8vF,SADpBQ,CADnB,CAGAE,EAAWH,SAAXG,GAAuB;EACrB,gBAAyB,QAArBA,EAAWj2F,MAAf,EAEE,OADAo1F,EAAGe,KAAHf,IACOjzF,EAAO,IAAI9D,KAAJ,CACV,kCAAgCoH,EAAK8vF,SAArC,oBADU,CAAPpzF,CAAP,CAIAD,EAAQ+zF,EAAWj2F,MAAXi2F,CAAkB5E,cAA1BnvF;aAPJ+zF,EAUAA,EAAWG,OAAXH,GAAqB,UAAAn8E,CAAA;EAEnB,mBADAs7E,EAAGe,KAAHf,IACOjzF,EAAO8zF,EAAWn8E,KAAlB3X,CAAP;aAZF8zF,EAcAF,EAAQM,UAARN,GAAqB;EAAM,mBAAAX,EAAGe,KAAHf,EAAA;aAd3Ba;WALF,MAoBO;EAEL,cAOIK,CAPJ;EAAA,cAAMC,IACFC,6BAA6BnF,CAA7BmF,CADJ;EAAA,cAGMC,IAASrB,EAAGY,WAAHZ,CAAeR,eAAfQ,EAAgC,WAAhCA,CAHf;EAAA,cAIIsB,IAAYD,EAAOP,WAAPO,CAAmB7B,eAAnB6B,CAJhB;EAAA,cAKME,IACFD,EAAUE,GAAVF,GAAenB,WAAW9vF,EAAK8vF,WAAWrB,uBAA1CwC,CANJ,CAQAC,EAAeb,SAAfa,GAA2B;EAGzB,gBACME,KAFNP,IAAUlB,EAAGY,WAAHZ,CAAeT,gBAAfS,EAAiC,WAAjCA,GACiBc,YAAYvB,kBACJiC,MACjCrB,WAAW9vF,EAAK8vF,WAChBlE,mBACA6C,wBAJF,CAMA2C,EAAgBf,SAAhBe,GAA4B;EAAM,qBAAA30F,IAASgyF,uBAAThyF,CAAA;eAAlC20F,EACAA,EAAgBT,OAAhBS,GAA0B,UAAA/8E,CAAA;EAIxB,kBAAMg9E,KADNJ,IAAYD,EAAOP,WAAPO,CAAmB7B,eAAnB6B,GACwBx6E,OAAOxW,EAAK8vF,UAAhD,CACAuB,EAAkBhB,SAAlBgB,GAA8B;EAE5B,uBADA1B,EAAGe,KAAHf,IACOjzF,EAAO00F,EAAgB/8E,KAAvB3X,CAAP;iBAFF20F,EAIAA,EAAkBV,OAAlBU,GAA4B,UAAAh9E,CAAA;EAE1B,uBADAs7E,EAAGe,KAAHf,IACOjzF,EAAO00F,EAAgB/8E,KAAvB3X,CAAP;iBANF20F;eANFD;aATFF,EAyBAA,EAAeP,OAAfO,GAAyB,UAAA78E,CAAA;EAEvB,mBADAs7E,EAAGe,KAAHf,IACOjzF,EAAOw0F,EAAe78E,KAAtB3X,CAAP;aA3BFw0F,EA6BAF,EAAOJ,UAAPI,GAAoB;EACH,oBAAXH,CAAW,GACblB,EAAGe,KAAHf,EADa,GAGbkB,EAAQD,UAARC,GAAqB;EAAM,qBAAAlB,EAAGe,KAAHf,EAAA;eAHd;aA9BjBuB;;SAnCJxB,EAyEAA,EAAYiB,OAAZjB,GAAsB,UAAAr7E,CAAA;EAAS,eAAA3X,EAAOgzF,EAAYr7E,KAAnB3X,CAAA;SAzE/BgzF;OAFK,CAAP;KAhCIM,EAZUA,YAAAA,GAAa,cAYvBA,GA8GR;KA9HA;EAAA,IAgIasB,kBAA4B,UAACzE,CAAD;EACvC,SAAKrwE,IAAIxG,GAAJwG,CAAQ,YAARA,MAGEthB,MAAMC,OAAND,CAAc2xF,CAAd3xF,CAHFshB,IAGwBqwE,EAAI0E,UAAJ1E,CAAemD,iBAAiBwB,UAAhC3E,CAHxBrwE,GAIMi1E,iBAAiB5E,EAAI3vF,KAAJ2vF,CAAUmD,iBAAiBwB,UAAjBxB,CAA4Bv2F,MAAtCozF,CAAjB4E,CAJNj1E,GACI,IADT;GAjIF,CA+JA,yBAAA,CAAiCszE,CAAjC;EACE,SAAO,IAAIE,gBAAJ,CAAqBF,CAArB,CAAP;EAGF,0BAAA,CAA0Bl6E,CAA1B;EACE,SAAOA,EAAI27E,UAAJ37E,CAAeo6E,iBAAiBwB,UAAhC57E,IACHA,EAAI1Y,KAAJ0Y,CAAUo6E,iBAAiBwB,UAAjBxB,CAA4Bv2F,MAAtCmc,CADGA,GAEHA,CAFJ;EAzBF62E,kBAAiBiF,kBAAjBjF,CAAoC6E,eAApC7E,GACAA,iBAAiBkF,kBAAjBlF,CAAoC6E,eAApC7E,CADAA,CA8BA;EAGE,YAAA;EACEptF,SAAKgwF,SAALhwF,GAAiB0wF,qBAAjB1wF;EAkGJ,UA/FQuyF,WAAAA,WAAAA,GAAN;;;EACE,mBAAO,IAAIp1F,OAAJ,CACH,UAACC,CAAD,EAAUC,CAAV;EACE,cAAMgzF,IACF1vF,EAAKqvF,SAALrvF,CAAekwF,IAAflwF,CAAoBgvF,aAApBhvF,EAAmCivF,gBAAnCjvF,CADJ,CAEA0vF,EAAYS,eAAZT,GAA8B;EAAM,mBAAAU,cAAcV,CAAdU,CAAA;aAApCV,EAEAA,EAAYW,SAAZX,GAAwB;EACtB,gBAAMC,IAAKD,EAAYn1F,MAAvB;EAAA,gBACMs3F,IAAKlC,EAAGY,WAAHZ,CAAeR,eAAfQ,EAAgC,UAAhCA,CADX;EAAA,gBAWMmC,IATQD,EAAGpB,WAAHoB,CAAe1C,eAAf0C,EAS2BE,MAT3BF,EAFd,CAYAC,EAAkBzB,SAAlByB,GAA8B;EAE5B,mBADA,IAAMlzC,MAAN,OAAA,EACmBpmC,IAAAs5E,EAAkBv3F,MAArC,EAAmB8I,YAAnB,EAAmBA,GAAnB,EAA6C;EAAxC,oBAAMwS,QAAN,CACH+oC,EAAI/oC,EAAKi6E,SAATlxC,IAAsB/oC,EAAK44E,kBAA3B7vC;EAEFniD,iBAAQmiD,CAARniD;eALFq1F,EAOAA,EAAkBnB,OAAlBmB,GAA4B,UAAAz9E,CAAA;EAE1B,qBADAs7E,EAAGe,KAAHf,IACOjzF,EAAOo1F,EAAkBz9E,KAAzB3X,CAAP;eATFo1F,EAWAD,EAAGjB,UAAHiB,GAAgB;EAAM,qBAAAlC,EAAGe,KAAHf,EAAA;eAXtBmC;aAfFpC,EA4BAA,EAAYiB,OAAZjB,GAAsB,UAAAr7E,CAAA;EAAS,mBAAA3X,EAAOgzF,EAAYr7E,KAAnB3X,CAAA;aA5B/BgzF;WAJC,EAAP;;;KADIkC,EAqCAA,WAAAA,YAAAA,GAAN,UAAkBnG,CAAlB;;;EAEE,eADAA,IAAOuG,iBAAiBvG,CAAjBuG,CAAPvG,MACO,IAAIjvF,OAAJ,CAAgC,UAACC,CAAD,EAAUC,CAAV;EACrC,cAAMgzF,IAAc1vF,EAAKqvF,SAALrvF,CAAekwF,IAAflwF,CAAoBgvF,aAApBhvF,EAAmCivF,gBAAnCjvF,CAApB,CACA0vF,EAAYS,eAAZT,GAA8B;EAAM,mBAAAU,cAAcV,CAAdU,CAAA;aAApCV,EAEAA,EAAYW,SAAZX,GAAwB;EACtB,gBAKIY,CALJ;EAAA,gBAAMX,IAAKD,EAAYn1F,MAAvB;EAAA,gBACM03F,IAAStC,EAAGY,WAAHZ,CAAeR,eAAfQ,EAAgC,WAAhCA,CADf;EAAA,gBAEMuC,IAAYD,EAAOxB,WAAPwB,CAAmB9C,eAAnB8C,CAFlB;EAAA,gBAIME,IAAiBD,EAAUl8E,GAAVk8E,CAAczG,CAAdyG,CAJvB,CAMAC,EAAe9B,SAAf8B,GAA2B;EACzB,kBAA6B,QAAzBA,EAAe53F,MAAnB,EAEE,OADAo1F,EAAGe,KAAHf,IACOjzF,EAAO,IAAI9D,KAAJ,CACV,kCAAgC6yF,CAAhC,oBADU,CAAP/uF,CAAP,CAKA,IAAM20F,IAAoBa,EAAU17E,MAAV07E,CAAiBzG,CAAjByG,CAA1B;EAAA,kBACME,IAAkB;EAGtB,oBACMC,KAFN/B,IAAUX,EAAGY,WAAHZ,CAAeT,gBAAfS,EAAiC,WAAjCA,GACiBc,YAAYvB,kBACD14E,OAAOi1E,EAD7C,CAEA4G,EAAmBhC,SAAnBgC,GAA+B;EAC3B,yBAAA51F,EAAQ01F,EAAe53F,MAAf43F,CAAsB1D,kBAA9BhyF,CAAA;mBADJ41F,EAEAA,EAAmB1B,OAAnB0B,GAA6B,UAAAh+E,CAAA;EACzB,yBAAA3X,EAAOy1F,EAAe99E,KAAtB3X,CAAA;mBAHJ21F;iBANF,CAaAhB,EAAkBhB,SAAlBgB,GAA8Be,CAA9Bf,EACAA,EAAkBV,OAAlBU,GAA4B,UAAAh9E,CAAA;EAG1B,uBAFA+9E,KACAzC,EAAGe,KAAHf,EADAyC,EAEO11F,EAAOy1F,EAAe99E,KAAtB3X,CAAP;iBAJF20F;eArBJc,EA6BAA,EAAexB,OAAfwB,GAAyB,UAAA99E,CAAA;EAEvB,qBADAs7E,EAAGe,KAAHf,IACOjzF,EAAOy1F,EAAe99E,KAAtB3X,CAAP;eA/BFy1F,EAkCAF,EAAOrB,UAAPqB,GAAoB;EACH,sBAAX3B,CAAW,GACbX,EAAGe,KAAHf,EADa,GAGbW,EAAQM,UAARN,GAAqB;EAAM,uBAAAX,EAAGe,KAAHf,EAAA;iBAHd;eAnCjBwC;aATFzC,EAmDAA,EAAYiB,OAAZjB,GAAsB,UAAAr7E,CAAA;EAAS,mBAAA3X,EAAOgzF,EAAYr7E,KAAnB3X,CAAA;aAnD/BgzF;WAFK,EAAP;;;KAvCIkC,GA+FR;KAtGA,KAwGIp1E,IAAIxG,GAAJwG,CAAQ,YAARA,GAGF;EACE4wE,4BAA0BkF,eAA1BlF,CACI4C,iBAAiBwB,UADrBpE,EACiC,IAAIwE,uBAAJ,EADjCxE;EAEA,CAHF,CAGE,OAAO9uE,CAAP,QC/UEi0E,iBAAiB,GAAvB;EAAA,IACMC,cAAc,qBADpB;EAAA,IAEMC,cAAc,MAFpB;EAAA,IAGMC,wBAAwB,gBAH9B;EAAA,IAIMC,sBAAsB,cAJ5B;EAAA,IAKMC,qBAAqB,aAL3B,CAYA,qBAAA,CAsBsBnH,CAtBtB;EAwBE,WACE90E,OAAO67E,aAAa/G,GAAMgH,aAAapxF,KAAKkxF,iBAC5CM,WAAWL,aAAa/G,GAAMiH,uBAAuBrxF,KAAKkxF,iBAC1DnG,cAAcoG,aAAa/G,GAAMkH,qBAAqBtxF,KAAKkxF,iBAC3DjG,aAAakG,aAAa/G,GAAMmH,oBAAoBvxF,KAAKkxF,iBAJ3D;EAeF,6BAAA,CAA6B38E,CAA7B;EACE,MAAM+1E,IAAQ/1E,EAAIlP,KAAJkP,CAAU28E,cAAV38E,CAAd,CACA,IAAI+1E,EAAMlyF,MAANkyF,GAAe,CAAnB,EACE,MAAM,IAAI/yF,KAAJ,CAAU,yBAAuBgd,CAAjC,CAAN,CAEF,OAAO+1E,EAAMzuF,KAANyuF,CAAY,CAAZA,EAAeA,EAAMlyF,MAANkyF,GAAe,CAA9BA,EAAiCtqF,IAAjCsqF,CAAsC4G,cAAtC5G,CAAP;EAGF,4BAAA,CAA0B/1E,CAA1B;EACE,SAAOA,EAAI27E,UAAJ37E,CAAek9E,oBAAoBtB,UAAnC57E,IACHA,EAAI1Y,KAAJ0Y,CAAUk9E,oBAAoBtB,UAApBsB,CAA+Br5F,MAAzCmc,CADGA,GAEHA,CAFJ;EAUF;EAOE,YAAA,CAAYk6E,CAAZ;EACE,SAAKtzE,IAAIxG,GAAJwG,CAAQ,YAARA,UAAwD,MAAxBrjB,OAAO45F,YAA5C,EAKE,MAAM,IAAIn6F,KAAJ,CACF,yDADE,CAAN,CAKF,IAFAyG,KAAK2zF,EAAL3zF,GAAUlG,OAAO45F,YAAjB1zF,EAEiB,QAAbywF,CAAa,KAASA,CAA1B,EACE,MAAM,IAAIl3F,KAAJ,CACF,oEADE,CAAN,CAGFyG,KAAKywF,SAALzwF,GAAiBywF,CAAjBzwF,EACAA,KAAK6S,IAAL7S,GAAY4zF,aAAa5zF,KAAKywF,SAAlBmD,CADZ5zF;EAuGJ,UA1FQyzF,WAAAA,KAAAA,GAAN,UAAWlH,CAAX;;;EACE,YAAIA,EAAeC,aAAfD,YAAwCE,WAA5C,EACE,MAAM,IAAIlzF,KAAJ,CACF,0FADE,CAAN,CAIMi6F,IAAWt6D,KAAKE,SAALF,CAAeqzD,EAAeC,aAA9BtzD,CAAXs6D,EACAzG,IAAc7zD,KAAKE,SAALF,CAAeqzD,EAAeQ,WAA9B7zD,CADds6D,EAGApE,IACFsC,6BAA6BnF,CAA7BmF,CAJE8B,CAMN;EAQE,iBAPAxzF,KAAK2zF,EAAL3zF,CAAQ6zF,OAAR7zF,CAAgBA,KAAK6S,IAAL7S,CAAUsX,IAA1BtX,EAAgCk5B,KAAKE,SAALF,CAAek2D,CAAfl2D,CAAhCl5B,GACAA,KAAK2zF,EAAL3zF,CAAQ6zF,OAAR7zF,CAAgBA,KAAK6S,IAAL7S,CAAUwzF,QAA1BxzF,EAAoCwzF,CAApCxzF,CADAA,EAEAA,KAAK2zF,EAAL3zF,CAAQ6zF,OAAR7zF,CAAgBA,KAAK6S,IAAL7S,CAAU+sF,WAA1B/sF,EAAuC+sF,CAAvC/sF,CAFAA,EAGAA,KAAK2zF,EAAL3zF,CAAQ6zF,OAAR7zF,CACIA,KAAK6S,IAAL7S,CAAUitF,UADdjtF,EAEI8zF,0BAA0BvH,EAAeU,UAAzC6G,CAFJ9zF,CAHAA,QAOQovF,wBAAR;EACA,SATF,CASE,OAAOnwE,CAAP;EAEA,eAAW1I,CAAX,IAAkBvW,KAAK6S,IAAvB,EACE7S,KAAK2zF,EAAL3zF,CAAQ+zF,UAAR/zF,CAAmBA,KAAK6S,IAAL7S,CAAUuW,CAAVvW,CAAnBA,EAGF,MAAM,IAAIzG,KAAJ,CACF,2BAAyByG,KAAKywF,SAA9B,2GAAA,GAEsBrB,EAAmBxC,kBAFzC,wBAAA,GAGoBwC,EAAmBtC,gBAHvC,uBAAA,GAImBsC,EAAmBpC,eAJtC,MADE,CAAN;;;;KA3BAyG,EA6CAA,WAAAA,KAAAA,GAAN;;;EAGE,YAAY,SAFNn8E,IACF4hB,KAAKC,KAALD,CAAWl5B,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgBA,KAAK6S,IAAL7S,CAAUsX,IAA1BtX,CAAXk5B,CACQ,CAAZ,EACE,MAAM,IAAI3/B,KAAJ,CACF,oDAAkDyG,KAAKywF,SAAvD,MADE,CAAN,CAIF,IAA+B,WAA3Bn5E,EAAKq1E,iBAAT,EACE,MAAM,IAAIpzF,KAAJ,CACF,2EADE,CAAN,CASF,IAJMgmD,MAAAA,EAIU,SADVi0C,IAAWt6D,KAAKC,KAALD,CAAWl5B,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgBA,KAAK6S,IAAL7S,CAAUwzF,QAA1BxzF,CAAXk5B,CACD,CAAhB,EACE,MAAM,IAAI3/B,KAAJ,CACF,8CAA4CyG,KAAKywF,SAAjD,kBADE,CAAN,CAQF,IAJAlxC,EAAIitC,aAAJjtC,GAAoBi0C,CAApBj0C,EAImB,SADbwtC,IAAc7zD,KAAKC,KAALD,CAAWl5B,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgBA,KAAK6S,IAAL7S,CAAU+sF,WAA1B/sF,CAAXk5B,CACD,CAAnB,EACE,MAAM,IAAI3/B,KAAJ,CACF,kDAAgDyG,KAAKywF,SAArD,mBADE,CAAN,CAQF,IAJAlxC,EAAIwtC,WAAJxtC,GAAkBwtC,CAAlBxtC,EAIwB,SADlB00C,IAAmBj0F,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgBA,KAAK6S,IAAL7S,CAAUitF,UAA1BjtF,CACD,CAAxB,EACE,MAAM,IAAIzG,KAAJ,CACF,0DACIyG,KAAKywF,SADT,mBADE,CAAN,CAMF,OAFAlxC,EAAI0tC,UAAJ1tC,GAAiB20C,0BAA0BD,CAA1BC,CAAjB30C,MAEOA,EAAP;;;KAxFIk0C,EA9BUA,YAAAA,GAAa,iBA8BvBA,GA0FR;KA7HA;EAAA,IA+HaU,qBAA+B,UAAC3G,CAAD;EAC1C,SAAKrwE,IAAIxG,GAAJwG,CAAQ,YAARA,MAGEthB,MAAMC,OAAND,CAAc2xF,CAAd3xF,CAHFshB,IAICqwE,EAAI0E,UAAJ1E,CAAeiG,oBAAoBtB,UAAnC3E,CAJDrwE,GAKMi3E,oBACH5G,EAAI3vF,KAAJ2vF,CAAUiG,oBAAoBtB,UAApBsB,CAA+Br5F,MAAzCozF,CADG4G,CALNj3E,GACI,IADT;GAhIF,CAuKA,4BAAA,CAAoCszE,CAApC;EACE,SAAO,IAAIgD,mBAAJ,CAAwBhD,CAAxB,CAAP;EA5BFrD,kBAAiBiF,kBAAjBjF,CAAoC+G,kBAApC/G,GACAA,iBAAiBkF,kBAAjBlF,CAAoC+G,kBAApC/G,CADAA,CA+BA;EAGE,YAAA;EACE1xF,WAAOyhB,IAAIxG,GAAJwG,CAAQ,YAARA,CAAPzhB,EAA8B,0CAA9BA,GACAA,YACmC,MAAxB5B,OAAO45F,YADlBh4F,EAEI,yDAFJA,CADAA,EAIAsE,KAAK2zF,EAAL3zF,GAAUlG,OAAO45F,YAJjBh4F;EAmCJ,UA5BQ24F,WAAAA,WAAAA,GAAN;;;EAIE,aAHM90C,MAAAA,EACA+0C,IAASnB,cAAcD,cADvB3zC,EAEAg1C,IAASrB,iBAAiBE,WAF1B7zC,EAGGxkD,IAAI,CAAb,EAAgBA,IAAIiF,KAAK2zF,EAAL3zF,CAAQ5F,MAA5B,IAAsCW,CAAtC,GACQwb,IAAMvW,KAAK2zF,EAAL3zF,CAAQuW,GAARvW,CAAYjF,CAAZiF,GACJkyF,WAAWoC,MAAW/9E,EAAIsO,QAAJtO,CAAag+E,CAAbh+E,MACtBk6E,IAAY+D,oBAAoBj+E,CAApBi+E,CAAZ/D,EACNlxC,EAAIkxC,CAAJlxC,IAAiBrmB,KAAKC,KAALD,CAAWl5B,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgBuW,CAAhBvW,CAAXk5B,GAGrB,WAAOqmB,EAAP;;;KAXI80C,EAcAA,WAAAA,YAAAA,GAAN,UAAkBjI,CAAlB;;;EAGE,YAFAA,IAAOuG,mBAAiBvG,CAAjBuG,CAAPvG,EACMv5E,IAAO+gF,aAAaxH,CAAbwH,CADbxH,EAEkC,QAA9BpsF,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgB6S,EAAKyE,IAArBtX,CAAJ,EACE,MAAM,IAAIzG,KAAJ,CAAU,gCAA8B6yF,CAA9B,MAAV,CAAN,CAQF,OANM90E,IAAO4hB,KAAKC,KAALD,CAAWl5B,KAAK2zF,EAAL3zF,CAAQg0F,OAARh0F,CAAgB6S,EAAKyE,IAArBtX,CAAXk5B,CAAP5hB,EAENtX,KAAK2zF,EAAL3zF,CAAQ+zF,UAAR/zF,CAAmB6S,EAAKyE,IAAxBtX,CAFMsX,EAGNtX,KAAK2zF,EAAL3zF,CAAQ+zF,UAAR/zF,CAAmB6S,EAAK2gF,QAAxBxzF,CAHMsX,EAINtX,KAAK2zF,EAAL3zF,CAAQ+zF,UAAR/zF,CAAmB6S,EAAKk6E,WAAxB/sF,CAJMsX,EAKNtX,KAAK2zF,EAAL3zF,CAAQ+zF,UAAR/zF,CAAmB6S,EAAKo6E,UAAxBjtF,CALMsX,MAMCA,EAAP;;;KA1BI+8E,GA4BR;KAvCA,KAyCIl3E,IAAIxG,GAAJwG,CAAQ,YAARA,GAGF;EACE4wE,4BAA0BkF,eAA1BlF,CACI0F,oBAAoBtB,UADxBpE,EACoC,IAAIsG,0BAAJ,EADpCtG;EAEA,CAHF,CAGE,OAAO9uE,CAAP,QC7REw1E,2BAA2B,OAAjC;EAAA,IACMC,8BAA8B,OADpC;EAAA,IAEMC,qCAAqC,cAF3C;EAAA;EAYE,YAAA,CAAYC,CAAZ;EACE,SAAKz3E,IAAIxG,GAAJwG,CAAQ,YAARA,CAAL,EAGE,MAAM,IAAI5jB,KAAJ,CACF,qFADE,CAAN,CAKEq7F,EAAe1C,UAAf0C,CAA0BC,EAAiB1C,UAA3CyC,MACFA,IAAiBA,EAAe/2F,KAAf+2F,CAAqBC,EAAiB1C,UAAjB0C,CAA4Bz6F,MAAjDw6F,CADfA,GAGkB,QAAlBA,CAAkB,IAAkC,MAA1BA,EAAex6F,MAAvB,KACpBw6F,IAAiBH,wBADG,CAHlBG,EAOJ50F,KAAK80F,qBAAL90F,GAA6B40F,IAAiBF,2BAP1CE,EAQJ50F,KAAK+0F,kBAAL/0F,GACI40F,IAAiBD,kCATjBC;EAwDR,UA5CQC,WAAAA,KAAAA,GAAN,UAAWtI,CAAX;;;EAIE,YAHMyI,IAAal7F,OAAOm7F,GAAPn7F,CAAWo7F,eAAXp7F,CAA2B,IAAI+xF,IAAJ,EACzCU,EAAeU,WAD0B,IACZnzE,MAAM,4BADM,CAA3BhgB,CAAbk7F,EAGFzI,EAAeC,aAAfD,YAAwCE,WAA5C,EACE,MAAM,IAAIlzF,KAAJ,CACF,sFADE,CAAN,CAoCA,OAhCM47F,OACJC,QAAQ,OAAOp1F,KAAK+0F,qBACpBxlB,SAASgd,EAAeQ,cAFpBoI,EAIAE,MACJ7I,eAAeD,EAAeC,eAC9B2I,oBANIA,EAQAG,IACFx7F,OAAOm7F,GAAPn7F,CAAWo7F,eAAXp7F,CAA2B,IAAI+xF,IAAJ,EACtB3yD,KAAKE,SAALF,CAAem8D,CAAfn8D,EADsB,IAEtBpf,MAAM,oBAFgB,CAA3BhgB,CATEq7F,GAeAI,IAAgC,QAAnBv1F,KAAKu1F,UAAc,GAAOr9F,SAASC,aAATD,CAAuB,GAAvBA,CAAP,GACO8H,KAAKu1F,YACvCC,WAAWx1F,KAAK80F,qBAjBrBK,EAkBNI,EAAWE,IAAXF,GAAkBD,CAlBZH,EAqBNI,EAAWG,KAAXH,EArBMJ,EAuB2B,QAA7B5I,EAAeU,UAAc,MACzB0I,IAA4C,QAAzB31F,KAAK21F,gBAAoB,GAC9Cz9F,SAASC,aAATD,CAAuB,GAAvBA,CAD8C,GAE9C8H,KAAK21F,kBACQH,WAAWx1F,KAAK+0F,oBACjCY,EAAiBF,IAAjBE,GAAwBX,GACxBW,EAAiBD,KAAjBC,EAN+B,CAvB3BR,QAgCE/F,oBAAoBsC,6BAA6BnF,CAA7BmF,IAA5B;;;KAzCEmD,EAvBUA,YAAAA,GAAa,cAuBvBA,GA4CR;KA7EA;EAAA;EAkFE,YAAA,CAAYe,CAAZ;EACE,QAAa,QAATA,CAAS,IAAQA,EAAMx7F,MAANw7F,GAAe,CAApC,EACE,MAAM,IAAIr8F,KAAJ,CACF,0EACgBq8F,CAFd,CAAN,CAIF51F,KAAK41F,KAAL51F,GAAa41F,CAAb51F;EAwHJ,UArHQ61F,WAAAA,KAAAA,GAAN;;;;;EAIE,eAHMC,IAAW91F,KAAK41F,KAAL51F,CAAW,CAAXA,CAAX81F,EACAC,IAAc/1F,KAAK41F,KAAL51F,CAAWnC,KAAXmC,CAAiB,CAAjBA,CADd81F,MAGC,IAAI34F,OAAJ,CAA4B,UAACC,CAAD,EAAUC,CAAV;EACjC,cAAM24F,IAAa,IAAIC,UAAJ,EAAnB,CACAD,EAAWE,MAAXF,GAAoB,UAACG,CAAD;EAElB,gBAAMC,IAAYl9D,KAAKC,KAALD,CAAYi9D,EAAME,MAANF,CAAqBj7F,MAAjCg+B,CAAlB;EAAA,gBACMszD,IAAgB4J,EAAU5J,aADhC,CAEA,IAAqB,QAAjBA,CAAJ,EAAA;EAM2B,oBAAvBuJ,EAAY37F,MAAW,IACzBgD,IAASovF,kBAATpvF,CADyB,CAI3B,IAAM+3F,IACFiB,EAAUjB,eADd,CAEA,IAAuB,QAAnBA,CAAJ,EAAA;EAMA,oBAAImB,CAAJ,CACA;EACEA,sBACI31F,EAAK41F,2BAAL51F,CAAiCw0F,CAAjCx0F,EAAkDo1F,CAAlDp1F,CADJ21F;EAEA,iBAHF,CAGE,OAAOr3E,CAAP;EAEA,8BADA5hB,EAAO4hB,CAAP5hB,CACA;EAGF,qBAAM0vF,MAAN;EAAA,oBACMqI,MADN;EAAA,oBAEMoB,MAFN,CAGArB,EAAgB/0F,OAAhB+0F,CAAwB,UAAAsB,CAAA;EACtBA,oBAAarB,KAAbqB,CAAmBr2F,OAAnBq2F,CAA2B,UAAArK,CAAA;EACzBgJ,sBAAMp5F,IAANo5F,CAAWhJ,CAAXgJ,GACAoB,EAAex6F,IAAfw6F,CAAoB,IAApBA,CADApB;qBADFqB,GAIA1J,EAAY/wF,IAAZ+wF,MAAAA,CAAAA,CAAAA,EAAoB0J,EAAalnB,OAAjCwd,CAJA0J;mBADFtB,GAQAA,EAAgB/0F,OAAhB+0F,CAAwB,UAAAsB,CAAA;EACtBA,oBAAarB,KAAbqB,CAAmBr2F,OAAnBq2F,CAA2B,UAAArK,CAAA;EACzB,wBAAMsK,IAAmB,IAAIT,UAAJ,EAAzB,CACAS,EAAiBR,MAAjBQ,GAA0B,UAACP,CAAD;EAExB,0BAAMlJ,IAAckJ,EAAME,MAANF,CAAqBj7F,MAAzC;EAAA,0BACMZ,IAAQ86F,EAAM9zE,OAAN8zE,CAAchJ,CAAdgJ,CADd,CAEAoB,EAAel8F,CAAfk8F,IAAwBvJ,CAAxBuJ,GACsC,MAAlCA,EAAel1E,OAAfk1E,CAAuB,IAAvBA,KACFp5F,IACEovF,kBACAO,gBACAE,YAAY0J,wBAAwBH,CAAxBG,GAHdv5F,CAFFo5F;uBAJFE,EAaAA,EAAiBpF,OAAjBoF,GAA2B,UAAC1hF,CAAD;EACzB3X,wBAAO,+CAA6C+uF,CAA7C,OAAP/uF;uBAdFq5F,EAiBAA,EAAiBE,iBAAjBF,CAAmCJ,EAAWlK,CAAXkK,CAAnCI,CAjBAA;qBAFFD;mBADFtB,CARAA;iBAlBA,MACE93F,EAAO,IAAI9D,KAAJ,CACH,+CAA6Cu8F,EAASv3F,IADnD,CAAPlB;eAbF,MACEA,EAAO,IAAI9D,KAAJ,CACH,8CAA4Cu8F,EAASv3F,IADlD,CAAPlB;aALJ24F,EAkEAA,EAAW1E,OAAX0E,GAAqB,UAAChhF,CAAD;EACnB3X,cACI,wEACcy4F,EAASv3F,IADvB,0EADJlB;aAnEF24F,EAyEAA,EAAWa,UAAXb,CAAsBF,CAAtBE,CAzEAA;WAFK,EAAP;;;KAJIH,EAsFEA,WAAAA,4BAAAA,GAAR,UACIiB,CADJ,EACqClB,CADrC;EAKE,SAHA,IAAMmB,MAAN,EACMC,IAAYpB,EAAM9zF,GAAN8zF,CAAU,UAAAqB,CAAA;EAAQ,aAAAC,SAASD,EAAK14F,IAAd24F,CAAA;OAAlBtB,CADlB,EAEMU,MAFN,OAAA,EAGoBa,KAApB,EAAoBnzF,YAApB,EAAoBA,GAApB,EAA8B;aACtBoxF,MAAMh1F,QAAQ,UAAAgsF,CAAA;EAClB,YAAMgL,IAAeF,SAAS9K,CAAT8K,CAArB,CACA,KAAyC,MAArCH,EAAUz1E,OAAVy1E,CAAkBK,CAAlBL,CAAJ,EACE,MAAM,IAAIx9F,KAAJ,CACF,yDACI69F,CADJ,MADE,CAAN,CAKF,IADAL,EAAU/6F,IAAV+6F,CAAeK,CAAfL,IACyC,MAArCC,EAAU11E,OAAV01E,CAAkBI,CAAlBJ,CAAJ,EACE,MAAM,IAAIz9F,KAAJ,CACF,gCAA8B69F,CAA9B,uBADE,CAAN,CAGAd,EAAWlK,CAAXkK,IAAmBV,EAAMoB,EAAU11E,OAAV01E,CAAkBI,CAAlBJ,CAANpB,CAAnBU;;EAKN,SAAIS,EAAU38F,MAAV28F,KAAqBnB,EAAMx7F,MAA/B,EACE,MAAM,IAAIb,KAAJ,CACF,0DACIw9F,EAAU38F,MADd,gDAAA,GAEIw7F,EAAMx7F,MAFV,OADE,CAAN,CAKF,OAAOk8F,CAAP;KAnHIT,GAqHR;KAhNA;EAAA,IAkNawB,yBAAmC,UAAC7J,CAAD;EAC9C,SAAKrwE,IAAIxG,GAAJwG,CAAQ,YAARA,MAGEthB,MAAMC,OAAND,CAAc2xF,CAAd3xF,CAHFshB,IAGwBqwE,EAAI0E,UAAJ1E,CAAeqH,iBAAiB1C,UAAhC3E,CAHxBrwE,GAIMm6E,iBAAiB9J,EAAI3vF,KAAJ2vF,CAAUqH,iBAAiB1C,UAAjB0C,CAA4Bz6F,MAAtCozF,CAAjB8J,CAJNn6E,GACI,IADT;GAnNF,CAgQA,yBAAA,CAAiCy3E,CAAjC;EACE,0BAD+BA,cACxB,IAAIC,gBAAJ,CAAqBD,CAArB,CAAP;EAqCF,sBAAA,CAA6BgB,CAA7B;EACE,SAAO,IAAIC,YAAJ,CAAiBD,CAAjB,CAAP;qCClSE2B,GAAqBC;;;;EAGL,iBADZC,IAAWF,EAAUz1F,GAAVy1F,CAAc,UAAAG,CAAA;EAAY,mBAAAC,MAAMD,CAANC,EAAgBH,CAAhBG,CAAA;aAA1BJ,CAAXE,MACkBt6F,QAAQ4K,GAAR5K,CAAYs6F,CAAZt6F,EAAN;EAEd,iBAFEy6F,IAAYz+E,MAAAA,EAAZy+E,MAEIz6F,QAAQ4K,GAAR5K,CAAYy6F,EAAU91F,GAAV81F,CAAc,UAAAC,CAAA;EAAY,mBAAAA,EAASC,WAATD,EAAA;aAA1BD,CAAZz6F,EAAN;EACJ,qBADIgc,MAAAA,GACJ;;;EAYF,qBAAA,CACI29E,CADJ,EACqCiB,CADrC,EAEIC,CAFJ,EAGIR,CAHJ;4BACqCO;;;EA2DnC,cAhDME,IAAyBnB,EAASh1F,GAATg1F,CAAa;EAAM,oBAAA,CAAA;aAAnBA,CAAzBmB,EACAC,MADAD,EAOAE,IAA8B,QAAfH,CAAe,GAAOA,EAAYl2F,GAAZk2F,CAAgB;EAAM,oBAAA,CAAA;aAAtBA,CAAP,KAP9BC,EAQAG,MARAH,EASNnB,EAAS12F,OAAT02F,CAAiB,UAACuB,CAAD,EAAsBC,CAAtB;EACf,gBAAIC,IAAc,CAAlB,CACAF,EAAoB9oB,OAApB8oB,CAA4Bj4F,OAA5Bi4F,CAAoC,UAAAG,CAAA;EAClC,kBAAMC,IAAY,kBAAkBD,CAAlB,GACdA,EAAatN,YAAbsN,CAA0Bv6F,KADZ,GAEdu6F,EAAav6F,KAFjB;EAAA,kBAIMy6F,IAAenO,qBAAqBkO,CAArBlO,IACjB5mF,cAAmB60F,EAAav8F,KAAhC0H,CALJ;EAAA,kBAOMg1F,IAA8B;EAClCV,kBAAuBK,CAAvBL,KAAqC,CAArCA,EACuC,QAAnCC,EAAoBI,CAApBJ,CAAmC,KACrCA,EAAoBI,CAApBJ,MADqC,CADvCD,EAKAC,EAAoBI,CAApBJ,EAAgCl8F,IAAhCk8F,GACEU,eAAeJ,GACfD,gBACAM,WAAWH,GAHbR,CALAD;iBARF,CAoBmB,QAAfD,CAAe,GACjBA,EAAY53F,OAAZ43F,CAAoB,UAACc,CAAD,EAAaC,CAAb;EACdD,sBAAeN,EAAaj6F,IAA5Bu6F,KACFH,KACAR,EAAaY,CAAbZ,KAA4B,CAF1BW;iBADNd,CADiB,GAQjBW,GARiB,EAWnBP,EAAuBp8F,IAAvBo8F,CAA4BI,EAAaj6F,IAAzC65F,CAXmB,EAYnBG,KAAeG,CAZI;eArBrBL;aAFFvB,CATMmB,GAgDDE,EAAal/E,KAAbk/E,CAAmB,UAAAa,CAAA;EAAS,mBAAAA,CAAA;aAA5Bb,CAAL,EAEE,MADMc,IAAkBjB,EAAY1qF,MAAZ0qF,CAAmB,UAACkB,CAAD,EAASn+F,CAAT;EAAe,oBAACo9F,EAAap9F,CAAbo9F,CAAD;aAAlCH,CAAlBiB,EACA,IAAI1/F,KAAJ,CACF,oDACG0/F,EAAgBj3F,IAAhBi3F,CAAqB,IAArBA,CADH,+CAAA,GAGGb,EAAuBp2F,IAAvBo2F,CAA4B,IAA5BA,CAHH,MADE,CAAN,CAyBc,OAhBVe,IACFlB,EAAuBt8C,MAAvBs8C,CAA8B,UAACmB,CAAD,EAAcC,CAAd,EAA2Bt+F,CAA3B;EAI5B,mBAHIs+F,KACFD,EAAYp9F,IAAZo9F,CAAiBr+F,CAAjBq+F,CADEC,EAGGD,CAAP;aAJFnB,IAAAA,CADEkB,EAQAG,MARAH,EASNA,EAAoB/4F,OAApB+4F,CAA4B,UAAAp+F,CAAA;EAC1B+7F,cAAS/7F,CAAT+7F,EAAY1B,KAAZ0B,CAAkB12F,OAAlB02F,CAA0B,UAAAyC,CAAA;EACxB,kBAAMC,IAAWzB,KACXA,EAAelzE,QAAfkzE,CAAwB,GAAxBA,IAAqC,EAArCA,GAA+B,GADpBA,IACgCwB,CADjD,CAEAD,EAAUt9F,IAAVs9F,CAAeE,CAAfF;eAHFxC;aADFqC,CATMA,MAgBgBM,yBAAyBH,CAAzBG,EAAoCjC,CAApCiC,EAAN;EAqChB,iBArCMtN,IAAUhzE,MAAAA,EAAVgzE,EAEAuN,MAFAvN,EAGFwN,IAAoB,CAHlBxN,EAINgN,EAAoB/4F,OAApB+4F,CAA4B,UAAAp+F,CAAA;EAI1B,iBAHA,IAAM6+F,IAAa9C,EAAS/7F,CAAT+7F,EAAY1B,KAAZ0B,CAAkB18F,MAArC,EAEIy/F,IAAa,CAFjB,EAGS7b,IAAI,CAAb,EAAgBA,IAAI4b,CAApB,EAAgC5b,GAAhC,EACE6b,KAAc1N,EAAQwN,IAAoB3b,CAA5BmO,EAA+BT,UAA7CmO,CAOF,KAHA,IAAMC,IAAc,IAAIrN,WAAJ,CAAgBoN,CAAhB,CAApB,EACME,IAAkB,IAAI17F,UAAJ,CAAey7F,CAAf,CADxB,EAEIE,IAAoB,CAFxB,EAGS7b,IAAI,CAAb,EAAgBA,IAAIyb,CAApB,EAAgCzb,GAAhC,EAAqC;EACnC,kBAAM34E,IAAS,IAAInH,UAAJ,CAAe8tF,EAAQwN,IAAoBxb,CAA5BgO,CAAf,CAAf,CACA4N,EAAgBjjF,GAAhBijF,CAAoBv0F,CAApBu0F,EAA4BC,CAA5BD,GACAC,KAAqBx0F,EAAOkmF,UAD5BqO;EAIqB7B,eAAoBn9F,CAApBm9F,EACR93F,OADQ83F,CACA,UAAAM,CAAA;EACrB,kBAGMyB,IACFC,cAJeJ,EAAYj8F,KAAZi8F,CACftB,EAAaD,WADEuB,EAEftB,EAAaD,WAAbC,GAA2BA,EAAaK,SAFzBiB,CAIfI,GAA2B1B,EAAaI,cAAxCsB,CAJJ,CAKA,KAAK,IAAMxP,CAAX,IAAmBuP,CAAnB,EACEP,EAAiBhP,CAAjBgP,IAAyBO,EAAgBvP,CAAhBuP,CAAzBP;eARmBxB,GAYvByB,KAAqBC,CAZE1B;aAlBzBiB,CAJMhN,MAqCCuN,EAAP;;;ED0EFtM,kBAAiBiF,kBAAjBjF,CAAoCiK,sBAApCjK,EE3NA;EAQE,YAAA,CACIhB,CADJ,EAC2B+N,CAD3B,EAEqBC,CAFrB;EAGE,QADmBp6F,qBAAAA,GAAAo6F,CAAAp6F,EANZA,mBAAAA,GAAiB,MAMLA,EACE,sBAAV23F,KAAX,EACE,MAAM,IAAIp+F,KAAJ,CAEF,uFAFE,CAAN,CAkBF,IAbAmC,OACY,QAAR0wF,CAAQ,IAAQA,EAAKhyF,MAALgyF,GAAc,CADlC1wF,EAEI,uEAFJA,GAKIG,MAAMC,OAAND,CAAcuwF,CAAdvwF,KACFH,OACoB,MAAhB0wF,EAAKhyF,MADTsB,EAEI,iFACyB0wF,EAAKhyF,MAD9B,OAFJsB,CANFA,EAWAsE,KAAKosF,IAALpsF,GAAYosF,CAXZ1wF,EAamB,QAAfy+F,CAAe,IAA4B,QAApBA,EAAYE,IAAvC,EACE,MAAM,IAAI9gG,KAAJ,CACF,oEADE,CAAN,CAGFyG,KAAKm6F,WAALn6F,GAAmBm6F,OAAnBn6F;EA+JJ,UA5JQs6F,WAAAA,KAAAA,GAAN,UAAW/N,CAAX;;;;EACE,gBAAIA,EAAeC,aAAfD,YAAwCE,WAA5C,EACE,MAAM,IAAIlzF,KAAJ,CACF,yFADE,CAAN,CAgCe,QA3BX0kD,IAAO95C,OAAO6M,MAAP7M,GAAe+kB,QAAQlpB,KAAKu6F,gBAA5Bp2F,EAA6CnE,KAAKm6F,WAAlDh2F,GACRk2F,OAAO,IAAIG,QAAJ,IAENrF,OACJC,QAAQ,wBACR7lB,SAASgd,EAAeQ,gBAEpBsI,MACJ7I,eAAeD,EAAeC,eAC9B2I,sBAGFl3C,EAAKo8C,IAALp8C,CAAUw8C,MAAVx8C,CACI,YADJA,EAEI,IAAI4tC,IAAJ,EACK3yD,KAAKE,SAALF,CAAem8D,CAAfn8D,EADL,IAEKpf,MAAM,oBAFX,CAFJmkC,EAKI,YALJA,GAOiC,QAA7BsuC,EAAeU,UAAc,IAC/BhvC,EAAKo8C,IAALp8C,CAAUw8C,MAAVx8C,CACI,mBADJA,EAEI,IAAI4tC,IAAJ,EACKU,EAAeU,WADpB,IACkCnzE,MAAM,4BADxC,CAFJmkC,EAII,mBAJJA,OAOqB05C,MAAM33F,KAAKosF,IAAXuL,EAA2B15C,CAA3B05C,EAAN;EAEjB,iBAFME,IAAW1+E,MAAAA,IAEJuhF,EAAb,EACE,aACEtL,oBAAoBsC,6BAA6BnF,CAA7BmF,GACpBkG,YAAYC,KAFd,CAKA,MAAM,IAAIt+F,KAAJ,CACF,kEACGs+F,EAASx6D,MADZ,MADE,CAAN;;;KA1CEi9D,EAwDAA,WAAAA,KAAAA,GAAN;;;EACE,mBAAOz+F,MAAMC,OAAND,CAAcmE,KAAKosF,IAAnBvwF,IAA2BmE,KAAK26F,eAAL36F,EAA3BnE,GAC2BmE,KAAK46F,aAAL56F,GADlC;;;KAzDIs6F,EAgEQA,WAAAA,mBAAAA,GAAd;;;;EAEqB,gDAAM3C,MAAM33F,KAAKosF,IAALpsF,CAAU,CAAVA,CAAN23F,EAAoB33F,KAAKm6F,WAAzBxC,EAAN;EACjB,kBADME,IAAW1+E,MAAAA,IACHuhF,EAAd,EACE,MAAM,IAAInhG,KAAJ,CACF,4DACIs+F,EAASgD,UAFX,CAAN,CAIK,WAAMhD,EAASC,WAATD,GAAN;EAAP,uBAAO1+E,MAAAA,GAAP;EAEA,gCAAM,IAAI5f,KAAJ,CAAayG,KAAKosF,IAALpsF,CAAU,CAAVA,kBAAAA,GAA2B86F,CAAxC,CAAN;;;;KA1EER,EA8EUA,WAAAA,gBAAAA,GAAhB;;;;EAE0B,mBADlBS,IAAe/6F,KAAKg7F,kBAALh7F,EAAf+6F,MACwBpD,MAAM33F,KAAKosF,IAALpsF,CAAU,CAAVA,CAAN23F,EAAoB33F,KAAKm6F,WAAzBxC,EAAN;EACxB,kBADMsD,IAAkB9hF,MAAAA,IACHuhF,EAArB,EACE,MAAM,IAAInhG,KAAJ,CAAU,4DACZ0hG,EAAgBJ,UADd,CAAN,CAIc,WAAM19F,QAAQ4K,GAAR5K,EAAa49F,GAAcE,EAA3B99F,EAAN;EAIZ,mBAJE+9F,IAAU/hF,MAAAA,EAAV+hF,EACC1O,IAA0C0O,GAAAA,CAD3CA,MAC2CA,GAAAA,EAGfC,IAHeD,GAG7C;qBAImB,SALjB/F,IACFh8E,MAAAA,EAImB,UAAA,OACCnZ,KAAKo7F,WAALp7F,CAAiBm1F,CAAjBn1F;EAAhBq7F,gBAAUliF,MAAAA,EAAVkiF,EACLtO,QADKsO,EACQpO,QADRoO,aAAAA;EAIR,yBAAQ7O,kBAAeO,gBAAaE,gBAApC;;;KAnGIqN,EAsGUA,WAAAA,cAAAA,GAAhB;;;;EAEM,uBAAM3C,MAAM33F,KAAKosF,IAAXuL,EAA2B33F,KAAKm6F,WAAhCxC,EAAN;EACJ,kBAFM2D,IACFniF,MAAAA,IACoBuhF,EAAxB,EACE,MAAM,IAAInhG,KAAJ,CAAU,4DACZ+hG,EAAmBT,UADjB,CAAN,CAGkB,WAAMS,EAAmBH,IAAnBG,GAAN;EAKpB,gBALMC,IAAcpiF,MAAAA,EAAdoiF,EACA/O,IAAgB+O,EAA2B/O,aAD3C+O,EAEApG,IAAkBoG,EAA6BpG,eAF/CoG,EAKe,QAAjB/O,CAAiB,IAA2B,QAAnB2I,CAA7B,EACE,MAAM,IAAI57F,KAAJ,CACF,6BAA2ByG,KAAKosF,IAAhC,8DADE,CAAN,QAOqB,QAAnB+I,CAAmB,SAAA,IACfqG,IACFD,EAA6BpG,eAD3BqG,MAEgBx7F,KAAKo7F,WAALp7F,CAAiBw7F,CAAjBx7F,EAHD;EAGfk7F,gBAAU/hF,MAAAA,EAAV+hF,EACLnO,QADKmO,EACQjO,QADRiO,aAAAA;EAIR,yBAAQ1O,kBAAeO,gBAAaE,gBAApC;;;KAjIIqN,EAoIQA,WAAAA,YAAAA,GAAd,UAA0BnF,CAA1B;;;;EAOE,iBALMsG,IAAa5/F,MAAMC,OAAND,CAAcmE,KAAKosF,IAAnBvwF,IAA2BmE,KAAKosF,IAALpsF,CAAU,CAAVA,CAA3BnE,GAA0CmE,KAAKosF,IAA5DqP,EACAtiF,IAAmBuiF,SAASD,CAATC,CADnBD,EACCnH,QADDmH,EACSlH,QADTkH,EAEAE,IAAa37F,KAAKo6F,gBAALp6F,IAAyBs0F,CAFtCmH,EAIA1O,MAJA0O,OAAAA,EAKcG,KAApB,EAAoB53F,YAApB,EAAoBA,GAApB,EAAW6Z,QAAAA,EACTkvE,EAAY/wF,IAAZ+wF,MAAAA,CAAAA,CAAAA,EAAoBlvE,EAAM0xD,OAA1Bwd,CADSlvE,CAcL,OAVA05E,MAAAA,EACNpC,EAAgB/0F,OAAhB+0F,CAAwB,UAAAsB,CAAA;EACtBA,gBAAarB,KAAbqB,CAAmBr2F,OAAnBq2F,CAA2B,UAAArK,CAAA;EACzBmL,kBAAUv7F,IAAVu7F,CAAeoE,IAAavP,CAAbuP,GAAoBpH,CAAnCgD;iBADFd;eADFtB,CADMoC,OAQJxK,EARIwK,EASJ5kE,IAAAgkE,uBATIY,MAUMkC,yBAAyBlC,CAAzBkC,EAAoCz5F,KAAKm6F,WAAzCV,EAAN;EAHN,iCAEE9mE,OAAAA,OAAAA,GACIypC,MAAAA,GADJzpC,IAFF;;;KAtJI2nE,EA/BUA,kBAAAA,GAAmB,cA+B7BA,GA4JR;KAjMA,mBA8MyB9M;EACvB,MAAMqO,IAAYrO,EAAIsO,WAAJtO,CAAgB,GAAhBA,CAAlB;EAAA,MACMuO,IAAkBvO,EAAIsO,WAAJtO,CAAgB,GAAhBA,CADxB,CAKA,QAHeA,EAAI1oE,SAAJ0oE,CAAc,CAAdA,EAAiBqO,CAAjBrO,IAGE,KADbuO,IAAkBF,CAAlBE,GAA8BvO,EAAI1oE,SAAJ0oE,CAAcuO,CAAdvO,CAA9BuO,GAA+D,GACnE;EAGF,sBAAA,CAAsBvO,CAAtB;EACE,SAAyD,QAAlDA,EAAIwO,KAAJxO,CAAU8M,mBAAmB2B,gBAA7BzO,CAAP;EAGF,KAAa0O,oBAA8B,UAAC1O,CAAD;EACzC,MAAqB,sBAAVmK,KAAX,EAGE,OAAO,IAAP,CAQA,QALI97F,MAAMC,OAAND,CAAc2xF,CAAd3xF,IACO2xF,EAAIv0E,KAAJu0E,CAAU,UAAA2O,CAAA;EAAW,WAAAC,aAAaD,CAAbC,CAAA;KAArB5O,CADP3xF,GAGOugG,aAAa5O,CAAb4O,KAGFC,mBAAmB7O,CAAnB6O,IAGJ,IAJL;GAZJ,CAiKA,2BAAA,CACIjQ,CADJ,EAC2B+N,CAD3B,EAEIC,CAFJ;EAGE,SAAO,IAAIE,kBAAJ,CAAuBlO,CAAvB,EAA6B+N,CAA7B,EAA0CC,CAA1C,CAAP;EAlJFhN,kBAAiBiF,kBAAjBjF,CAAoC8O,iBAApC9O,GACAA,iBAAiBkF,kBAAjBlF,CAAoC8O,iBAApC9O,CADAA,CCnPA;EACE,YAAA,CACqBZ,CADrB,EAEqBO,CAFrB,EAGqBE,CAHrB;EACqBjtF,sBAAAA,GAAAwsF,CAAAxsF,EACAA,gBAAAA,GAAA+sF,CADA/sF,EAEAA,eAAAA,GAAAitF,CAFAjtF;EAiBvB,UAbQs8F,WAAAA,KAAAA,GAAN;;;EAWE,eAVIphG,MAAAA,EACsB,QAAtB8E,KAAKwsF,aAAiB,KACxBtxF,eAAUsxF,eAAexsF,KAAKwsF,iBAAkBtxF,EADxB,CADtBA,EAIoB,QAApB8E,KAAK+sF,WAAe,IAAQ/sF,KAAK+sF,WAAL/sF,CAAiB5F,MAAjB4F,GAA0B,CAAlC,KACtB9E,eAAU6xF,aAAa/sF,KAAK+sF,eAAgB7xF,EADtB,CAJpBA,EAOmB,QAAnB8E,KAAKitF,UAAc,IAAQjtF,KAAKitF,UAALjtF,CAAgB0rF,UAAhB1rF,GAA6B,CAArC,KACrB9E,eAAU+xF,YAAYjtF,KAAKitF,cAAe/xF,EADrB,CAPnBA,MAUGA,EAAP;;;KAXIohG,GAaR;KAnBA;EAAA;EAsBE,YAAA,CACqB3N,CADrB;EACqB3uF,oBAAAA,GAAA2uF,CAAA3uF;EAMvB,UAHQu8F,WAAAA,KAAAA,GAAN,UAAWhQ,CAAX;;;EACE,mBAAOvsF,KAAK2uF,WAAL3uF,CAAiBusF,CAAjBvsF,EAAP;;;KADIu8F,GAGR;KA7BA,CAmDA,mBAAA,CACI/P,CADJ,EACuBO,CADvB,EAEIE,CAFJ;EAGE,SAAO,IAAIqP,iBAAJ,CAAsB9P,CAAtB,EAAqCO,CAArC,EAAkDE,CAAlD,CAAP;EAkBF,yBAAA,CACI0B,CADJ;EAGE,SAAO,IAAI4N,gBAAJ,CAAqB5N,CAArB,CAAP;ECpEF,KAAM0D,qBAAqBjF,iBAAiBiF,kBAA5C;EAAA,IACMC,qBAAqBlF,iBAAiBkF,kBAD5C;EAAA,IAEM5D,kBAAkBtB,iBAAiBsB,eAFzC;EAAA,IAGMH,kBAAkBnB,iBAAiBmB,eAHzC;EAAA,8kBAAA,2BCwBIpe,GAA6BC,GAC7BuB;EACF,MAAMtB,IAAU1rD,gBAAgBwrD,CAAhBxrD,EAAwB,OAAxBA,EAAiC,iBAAjCA,EAAoD,OAApDA,CAAhB;EAAA,MACM2rD,IACF3rD,gBAAgByrD,CAAhBzrD,EAA6B,OAA7BA,EAAsC,iBAAtCA,EAAyD,OAAzDA,CAFJ,CAIA/gB,OACkB,QAAd+tE,CAAc,IAAQA,IAAa,CAAbA,IAAkBv2E,OAAOohG,SAAPphG,CAAiBu2E,CAAjBv2E,CAD5CwI,EAEI,iEACe+tE,CAHnB/tE,GAIAA,OACqB,MAAjBysE,EAAQjxE,IADZwE,EAEI,kDAAgDysE,EAAQjxE,IAF5DwE,CAJAA,EAOAA,OAC0B,MAAtB0sE,EAAalxE,IADjBwE,EAEI,uDACe0sE,EAAalxE,IAHhCwE,CAPAA,EAWAA,OACIysE,EAAQp0E,KAARo0E,CAAc,CAAdA,MAAqBC,EAAar0E,KAAbq0E,CAAmB,CAAnBA,CADzB1sE,EAEI,yCACOysE,EAAQp0E,KAARo0E,CAAc,CAAdA,CADP,UAAA,GAC+BC,EAAar0E,KAAbq0E,CAAmB,CAAnBA,CAD/B,sEAFJ1sE,CAXAA,EAgBAA,OACI+tE,IAAa,CAAbA,IAAkBv2E,OAAOohG,SAAPphG,CAAiBu2E,CAAjBv2E,CADtBwI,EAEI,8DAA4D+tE,CAFhE/tE,CAhBAA,CAsBA,IAAM64F,IAAej6C,OAAO6tB,EAAQvqE,MAARuqE,CAAe,OAAfA,CAAP7tB,EAAgCmvB,CAAhCnvB,CAArB;EAAA,MACMk6C,IAAoBl6C,OAAO8tB,EAAaxqE,MAAbwqE,CAAoB,OAApBA,CAAP9tB,EAAqCmvB,CAArCnvB,CAD1B,CAEA,OAAOi6C,EAAa/yF,SAAb+yF,GAAyB51F,MAAzB41F,CAAgCC,CAAhCD,EAAmD32F,MAAnD22F,CAA0D,OAA1DA,CAAP;EAGF,KAAaE,kBAAkBn3E,KAAIo3E,oCAAJp3E,CAA/B;EAAA,8DAAA;EAAA;ECzBA,YAAA,YAYEq3E,WAAAA,aAAAA,GAAA;EACE,WAAQ78F,KAAKhB,WAALgB,CACH88F,SADL;KADFD,EAkBOA,YAAAA,GAAP,UACIE,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CAAQC,CAAR,CAAP;KApBFH,GAsBF;KDTA;EAAA;EC2BE,YAAA;EACE78F,SAAKi9F,YAALj9F,KAAAA;EAoBJ,UAdSk9F,QAAAA,GAAP;EAIE,WAHiC,QAA7BA,EAAiB3sF,QAAY,KAC/B2sF,EAAiB3sF,QAAjB2sF,GAA4B,IAAIA,CAAJ,EADG,GAG1BA,EAAiB3sF,QAAxB;KAJK2sF,EAUAA,UAAAA,GAAP,UAAwCH,CAAxC;EACEG,MAAiBC,MAAjBD,GAA0BD,YAA1BC,CAAuCH,EAAID,SAA3CI,KACKH,GAAKA,EAAIK,WADdF;KAXKA,GAcT;KDhDA,wBC0EIH;EACFrhG,SACqB,QAAjBqhG,EAAID,SADRphG,EAEI,6EAFJA,GAIAA,OAC6B,mBAAlBqhG,EAAID,SADfphG,EAEI,+DACWqhG,EAAID,SAHnBphG,CAJAA,EAQAA,OACIqhG,EAAID,SAAJC,CAAc3iG,MAAd2iG,GAAuB,CAD3BrhG,EAEI,mFAFJA,CARAA,EAaAwhG,iBAAiBnmF,QAAjBmmF,CAA0BH,CAA1BG,CAbAxhG;;MC1IW2hG,eACXC,YAAa;MAEFC,cACXC,UAAW;MAEAC,gBACXC,YAAa;MAEFC,iBACXC,aAAc;MAEHC,aACXP,YAAa;MAEFQ,qBACXC,SAAW;MAGAC,cAEb,0BAAA,CACIC,CADJ,EAEIC,CAFJ,EAEoD1tB,CAFpD;EAME,MAHe,QAAXA,CAAW,KACbA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,cAARA,CADG,GAGT8gF,aAAkB75F,MAAlB65F,IAA+BC,aAAoB95F,MAAzD;EASO,QAAI65F,aAAkB75F,MAAlB65F,IAA4BC,aAAoB95F,MAApD,EAA4D;EACjE,UAAI65F,EAAOhgG,KAAPggG,KAAiBC,EAASjgG,KAA9B,EACE,MAAM,IAAI1E,KAAJ,CACF,0CAAwC0kG,EAAOhgG,KAA/C,mBAAA,GACgBigG,EAASjgG,KADzB,MADE,CAAN,CAIF,KAAK8S,YAAiBktF,EAAOhiG,KAAxB8U,EAA+BmtF,EAASjiG,KAAxC8U,CAAL,EACE,MAAM,IAAIxX,KAAJ,CACF,2CAAyC0kG,EAAOhiG,KAAhD,mBAAA,GACgBiiG,EAASjiG,KADzB,MADE,CAAN;;KAhBJ,MAAkE;EAChE,QAAMkiG,IAAQF,EAAOj/F,WAAPi/F,CAAmB1/F,IAAjC;EAAA,QACM6/F,IAAQF,EAASl/F,WAATk/F,CAAqB3/F,IADnC,CAGA,IAAI4/F,MAAUC,CAAd,EACE,MAAM,IAAI7kG,KAAJ,CACF,0CAAwC4kG,CAAxC,mBAAA,GACgBC,CAFd,CAAN;EAiBJ,OAAIC,CAAJ,EACIC,CADJ,CAaA,IAVED,IADEJ,aAAkB75F,MAAlB65F,GACaA,EAAO59F,QAAP49F,EADbA,GAGaA,CAFfI,EAKAC,IADEJ,aAAoB95F,MAApB85F,GACeA,EAAS79F,QAAT69F,EADfA,GAGeA,CAPjBG,EAUEA,EAAajkG,MAAbikG,KAAwBC,EAAelkG,MAA3C,EACE,MAAM,IAAIb,KAAJ,CACF,2CAAyC8kG,EAAajkG,MAAtD,mBAAA,GACakkG,EAAelkG,MAD5B,kBAAA,GAEaikG,CAFb,kBAAA,GAGaC,CAHb,MADE,CAAN,CAMF,KAAK,IAAIvjG,IAAI,CAAb,EAAgBA,IAAIujG,EAAelkG,MAAnC,IAA6CW,CAA7C,EAAgD;EAC9C,QAAMrB,IAAI2kG,EAAatjG,CAAbsjG,CAAV;EAAA,QACMnkF,IAAIokF,EAAevjG,CAAfujG,CADV,CAGA,KAAKC,SAAS7kG,CAAT6kG,EAAYnjG,OAAO8e,CAAP9e,CAAZmjG,EAAuB/tB,CAAvB+tB,CAAL,EACE,MAAM,IAAIhlG,KAAJ,CACF,2BAAyBwB,CAAzB,SAAA,GAAiCrB,CAAjC,gBAAA,GAAgDqB,CAAhD,SAAA,GAAwDmf,CAAxD,kBAAA,GACamkF,CADb,kBAAA,GAEaC,CAFb,MADE,CAAN;;EAaN,6BAAA,CAAoC95F,CAApC,EAA2Dye,CAA3D;EACEze,MAAKjE,IAALiE,CAAU;EAAM,WAAAye,EAAKu7E,IAALv7E,EAAA;KAAhBze,EAA6B;EAAM,WAAAye,GAAA;KAAnCze;EAGF,2BAAA,CACIy5F,CADJ,EAEIC,CAFJ;EAGE,SAAOO,kBAAkBR,CAAlBQ,EAA0BP,CAA1BO,EAAoC,CAApCA,CAAP;EAGF,4BAAA,CAAmC/kG,CAAnC,EAA8CwgB,CAA9C,EAAyDs2D,CAAzD;EAIE,MAHe,QAAXA,CAAW,KACbA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,cAARA,CADG,IAGVohF,SAAS7kG,CAAT6kG,EAAYrkF,CAAZqkF,EAAe/tB,CAAf+tB,CAAL,EACE,MAAM,IAAIhlG,KAAJ,CAAU,gCAA8BG,CAA9B,oBAAA,GAAiDwgB,CAA3D,CAAN;EAIJ,kBAAA,CAAkBxgB,CAAlB,EAA6BwgB,CAA7B,EAAwCs2D,CAAxC;EACE,YAAIhyE,MAAM9E,CAAN8E,MAAYA,MAAM0b,CAAN1b,QAGZA,MAAM9E,CAAN8E,KAAYA,MAAM0b,CAAN1b,CAAZA,IAAwBjE,KAAK8Q,GAAL9Q,CAASb,IAAIwgB,CAAb3f,IAAkBi2E,EAH9C;EASF,6BAAA,CACIytB,CADJ,EACwCS,CADxC,EACqDC,CADrD;EAEE,MAAIC,CAAJ,CAEEA,IADEX,aAAkB75F,MAAlB65F,GACWA,EAAO59F,QAAP49F,EADXA,GAGWA,CAFbW,CAIF,KAAK,IAAI7jG,IAAI,CAAb,EAAgBA,IAAI6jG,EAAWxkG,MAA/B,EAAuCW,GAAvC,EACE,IAAI6jG,EAAW7jG,CAAX6jG,IAAgBF,CAAhBE,IAAuBA,EAAW7jG,CAAX6jG,IAAgBD,CAA3C,EACE,MAAM,IAAIplG,KAAJ,CACF,wBAAsBqlG,EAAW7jG,CAAX6jG,CAAtB,WAAA,GAA4CF,CAA5C,aAAA,GAA0DC,CADxD,CAAN;EAMN,iCAAA,CACIV,CADJ,EACyBC,CADzB;EAIEW,SAAO,IAAI1gG,YAAJ,CAAiB8/F,CAAjB,CAAPY,EAAiCC,OAAjCD,CAAyC,IAAI1gG,YAAJ,CAAiB+/F,CAAjB,CAAzCW;;MC7JIE,UAAU;;;ECoBhB,YAAA;;EAsDA,UAtDwCluF,WAAAA,GAAAA,GActCmuF,WAAAA,SAAAA,GAAA,UAASjgG,CAAT,EAA0BkgG,CAA1B,EAA8Cp/E,CAA9C;uBAA0Bo/E,QAElB,IAAA9lF,+BAAA;EAAA,QAACpV,WAAD;EAAA,QAAQgV,WAAR,CAQN,OANA/Y,KAAKk/F,cAALl/F,CAAoB+Y,CAApB/Y,GAGiBmE,OAAO0O,IAAP1O,CAAY4U,CAAZ5U,EACR/D,OADQ+D,CACA,UAAAkT,CAAA;EAAW,aAAA0B,EAAM1B,CAAN0B,EAAe/F,OAAf+F,EAAA;OADX5U,CAHjBnE,EAMIi/F,IACKl7F,CADLk7F,IAGFl7F,EAAMiP,OAANjP,IACO,IAJLk7F,CAAJ;KAxBoCpuF,EA2CtCmuF,WAAAA,iBAAAA,GAAA,UAAiBjgG,CAAjB,EAAkC8gB,CAAlC;EAEE,WAAOs/E,cAAcpgG,CAAdogG,EAAiBt/E,CAAjBs/E,CAAP;KA7CoCtuF,GAsDxC;IAtDwCgsF;;ECatC,YAAA,CACcuC,CADd,EAC8CC,CAD9C,EAEc7uB,CAFd;uBAEcA,UAFd,QAGE//D,MAAAA,KAAAA,SAHF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAAgCA,KAAAA,GAAA0+F,CAAhC1+F,EACAA,SAAAA,GAAA6vE,CADA7vE,EAJNA,kBAAAA,KAIMA,EAHNA,oBAAAA,KAGMA,EAIZA,EAAKwpB,CAALxpB,GAAS+c,KAAKmM,QAAQu1E,CAARv1E,CAALnM,CAJG/c,EAKZA,EAAK2+F,SAAL3+F,GAAiB+c,KAAKmM,OAAOw1E,CAAPx1E,CAALnM,CALL/c,EAMZA,EAAK4+F,WAAL5+F,GAAmB+c,KAAKmM,OAAO,IAAIw1E,CAAXx1E,CAALnM,CANP/c,EAQI,SAAZ6vE,CAAY,KACdA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,SAARA,CADI,CARJxc,EAYZA,EAAK8vE,aAAL9vE,GAAqB+c,KAAKmM,OAAO2mD,CAAP3mD,CAALnM,CAZT/c;EAmFhB,UA9FuCkQ,WAAAA,GAAAA,GA0BrC2uF,WAAAA,eAAAA,GAAA,UAAeC,CAAf;EAAA,gBAAA;EAAA,sBACahxD;EACT,UAAM1qC,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAA2C,QAAvCuiF,EAAKC,gBAALD,CAAsBjxD,CAAtBixD,CAAJ,EAAiD;EAE/ChnF,aAAK;EACH/X,YAAKg/F,gBAALh/F,CAAsB8tC,CAAtB9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAKF,WAA6C,QAAzCgnF,EAAKE,kBAALF,CAAwBjxD,CAAxBixD,CAAJ,EAAmD;EAEjDhnF,aAAK;EACH/X,YAAKi/F,kBAALj/F,CAAwB8tC,CAAxB9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAMF,WAAM/F,IAAW8sF,EAAkBhxD,CAAlBgxD,CAAjB;EAAA,UACMI,IAAkBH,EAAKC,gBAALD,CAAsBjxD,CAAtBixD,CADxB;EAAA,UAEMI,IAAoBJ,EAAKE,kBAALF,CAAwBjxD,CAAxBixD,CAF1B,CAIAhnF,KAAK;EACH,YAAMqnF,IACFp/F,EAAK2+F,SAAL3+F,CAAeiI,GAAfjI,CAAmBk/F,CAAnBl/F,EACK0H,GADL1H,CACSA,EAAK4+F,WAAL5+F,CAAiBiI,GAAjBjI,CAAqBgS,EAASxH,MAATwH,EAArBhS,CADTA,CADJ;EAAA,YAIM4hB,IAAUu9E,EAAkBz3F,GAAlBy3F,CAAsBn/F,EAAK8vE,aAA3BqvB,EACKpjG,IADLojG,GAEKh3F,GAFLg3F,CAESD,EAAgBx3F,GAAhBw3F,CAAoBl/F,EAAK8vE,aAAzBovB,EAAwCnjG,IAAxCmjG,EAFTC,EAGKl3F,GAHLk3F,CAGSntF,CAHTmtF,CAJhB;EAAA,YASME,IACFr/F,EAAK2+F,SAAL3+F,CAAeiI,GAAfjI,CAAmBm/F,CAAnBn/F,EACK0H,GADL1H,CACSA,EAAK4+F,WAAL5+F,CAAiBiI,GAAjBjI,CAAqB4hB,EAAQpX,MAARoX,EAArB5hB,CADTA,CAVJ,CAaAA,EAAKg/F,gBAALh/F,CAAsB8tC,CAAtB9tC,EAAoCqQ,MAApCrQ,CAA2Co/F,CAA3Cp/F,GACAA,EAAKi/F,kBAALj/F,CAAwB8tC,CAAxB9tC,EAAsCqQ,MAAtCrQ,CAA6Cq/F,CAA7Cr/F,CADAA,CAGA,IAAMmQ,IAAWnQ,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAW4hB,CAAX5hB,EAAoB0H,GAApB1H,CAAwBoD,CAAxBpD,CAAjB,CACAoD,EAAMiN,MAANjN,CAAa+M,CAAb/M;SAlBF2U;OAtBJ;EAAA,gBAAA,CACE,KAAK,IAAM+1B,CAAX,IAA2BgxD,CAA3B,IAAWhxD;KA3BwB59B,EAuErC2uF,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACEx/F,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,IACAA,KAAKywE,aAALzwE,CAAmBgT,OAAnBhT,EADAA,EAEAA,KAAKs/F,SAALt/F,CAAegT,OAAfhT,EAFAA,EAGAA,KAAKu/F,WAALv/F,CAAiBgT,OAAjBhT,EAHAA,EAI+B,QAA3BA,KAAK4/F,kBAAsB,KAC7Bz7F,OAAO0O,IAAP1O,CAAYnE,KAAK4/F,kBAAjBz7F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKi/F,kBAALj/F,CAAwBpC,CAAxBoC,EAA8BqS,OAA9BrS,EAAA;OADrBwD,GAEAA,OAAO0O,IAAP1O,CAAYnE,KAAK2/F,gBAAjBx7F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKg/F,gBAALh/F,CAAsBpC,CAAtBoC,EAA4BqS,OAA5BrS,EAAA;OADrBwD,CAH6B,CAJ/BnE;KAxEmC6Q,EAmFrC2uF,WAAAA,UAAAA,GAAA;EACE,aACEJ,cAAcp/F,KAAKo/F,cACnBC,KAAKr/F,KAAKq/F,KACV7uB,SAASxwE,KAAKwwE,SAHhB;KApFmC3/D,EA0F9B2uF,YAAAA,GAAP,UACIzC,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CAAQC,EAAOoC,YAAf,EAA6BpC,EAAOqC,GAApC,EAAyCrC,EAAOxsB,OAAhD,CAAP;KA5FmC3/D,EAC9B2uF,WAAAA,GAAY,mBADkB3uF,GA8FvC;IA9FuCmuF,yBA+FzBQ;ECxFZ,YAAA,CACcJ,CADd,EAC4Ca,CAD5C;uBAC4CA,QAD5C,QAEExvF,MAAAA,KAAAA,SAFF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAA8BA,yBAAAA,GAAAs/F,CAA9Bt/F,EAHNA,kBAAAA,KAGMA,EAEZA,EAAKwpB,CAALxpB,GAAS+c,KAAKmM,QAAQu1E,CAARv1E,CAALnM,CAFG/c,EAIZA,EAAK6vE,OAAL7vE,GAAe+c,KAAKmM,OAAO1M,IAAIxG,GAAJwG,CAAQ,SAARA,CAAP0M,CAALnM,CAJH/c;EAqDhB,UA7DsCkQ,WAAAA,GAAAA,GAepCqvF,WAAAA,eAAAA,GAAA,UAAeT,CAAf;EAAA,gBAAA;EAAA,sBACahxD;EACT,UAAM1qC,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAA2C,QAAvCuiF,EAAKC,gBAALD,CAAsBjxD,CAAtBixD,CAAJ,EAAiD;EAE/ChnF,aAAK;EACH/X,YAAKg/F,gBAALh/F,CAAsB8tC,CAAtB9tC,IACIwB,KAAK4B,EAAM9H,KAAXkG,EAAkBxB,EAAKs/F,uBAAvB99F,EACK8M,QADL9M,EAHY,CAGZA,CADJxB;WADF+X;EAOF,WAAM/F,IAAW8sF,EAAkBhxD,CAAlBgxD,CAAjB;EAAA,UACMI,IAAkBH,EAAKC,gBAALD,CAAsBjxD,CAAtBixD,CADxB,CAGAhnF,KAAK;EACH,YAAMqnF,IAAqBF,EAAgBx3F,GAAhBw3F,CAAoBltF,EAASxH,MAATwH,EAApBktF,CAA3B,CACAl/F,EAAKg/F,gBAALh/F,CAAsB8tC,CAAtB9tC,EAAoCqQ,MAApCrQ,CAA2Co/F,CAA3Cp/F,EAEA,IAAMmQ,IACFnQ,EAAKwpB,CAALxpB,CACKiI,GADLjI,CACSgS,EAAS7J,GAAT6J,CAAaotF,EAAmB13F,GAAnB03F,CAAuBp/F,EAAK6vE,OAA5BuvB,EAAqCrjG,IAArCqjG,EAAbptF,CADThS,EAEK0H,GAFL1H,CAESoD,CAFTpD,CADJ,CAIAoD,EAAMiN,MAANjN,CAAa+M,CAAb/M;SARF2U;OAfJ;EAAA,gBAAA,CACE,KAAK,IAAM+1B,CAAX,IAA2BgxD,CAA3B,IAAWhxD;KAhBuB59B,EA2CpCqvF,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACElgG,KAAKwwE,OAALxwE,CAAagT,OAAbhT,IACAA,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,EADAA,EAE6B,QAAzBA,KAAK2/F,gBAAoB,IAC3Bx7F,OAAO0O,IAAP1O,CAAYnE,KAAK2/F,gBAAjBx7F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKg/F,gBAALh/F,CAAsBpC,CAAtBoC,EAA4BqS,OAA5BrS,EAAA;OADrBwD,CAHFnE;KA5CkC6Q,EAmDpCqvF,WAAAA,UAAAA,GAAA;EACE,aACEd,cAAcp/F,KAAKo/F,cACnBa,yBAAyBjgG,KAAKigG,yBAFhC;KApDkCpvF,EAyD7BqvF,YAAAA,GAAP,UACInD,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CAAQC,EAAOoC,YAAf,EAA6BpC,EAAOiD,uBAApC,CAAP;KA3DkCpvF,EAC7BqvF,WAAAA,GAAY,kBADiBrvF,GA6DtC;IA7DsCmuF,yBA8DxBkB;EChDZ,YAAA,CACcd,CADd,EAC8Ce,CAD9C,EAEcC,CAFd,EAEuC5vB,CAFvC;uBAEuCA,UAFvC,QAGE//D,MAAAA,KAAAA,SAHF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAAgCA,OAAAA,GAAAw/F,CAAhCx/F,EACAA,OAAAA,GAAAy/F,CADAz/F,EACyBA,SAAAA,GAAA6vE,CADzB7vE,EAJNA,wBAAAA,KAIMA,EAHNA,yBAAAA,KAGMA,EAGZA,EAAKwpB,CAALxpB,GAAS+c,KAAKmM,QAAQu1E,CAARv1E,CAALnM,CAHG/c,EAKZA,EAAK0/F,WAAL1/F,GAAmB+c,KAAKmM,OAAOs2E,CAAPt2E,CAALnM,CALP/c,EAMZA,EAAK2/F,WAAL3/F,GAAmB+c,KAAKmM,OAAOu2E,CAAPv2E,CAALnM,CANP/c,EAOZ+X,KAAK;EAEH/X,QAAK4/F,QAAL5/F,GAAgBkpB,OAAOs2E,CAAPt2E,EAAc5a,QAAd4a,EAAhBlpB,EACAA,EAAK6/F,QAAL7/F,GAAgBkpB,OAAOu2E,CAAPv2E,EAAc5a,QAAd4a,EADhBlpB;OAFF+X,CAPY/X,EAYZA,EAAK8/F,aAAL9/F,GAAqB+c,KAAKmM,OAAO,IAAIs2E,CAAXt2E,CAALnM,CAZT/c,EAaZA,EAAK+/F,aAAL//F,GAAqB+c,KAAKmM,OAAO,IAAIu2E,CAAXv2E,CAALnM,CAbT/c,EAcZA,EAAKusE,GAALvsE,GAAW+c,KAAKmM,OAAO,CAAPA,CAALnM,CAdC/c,EAgBI,SAAZ6vE,CAAY,KACdA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,SAARA,CADI,CAhBJxc,EAoBZA,EAAKggG,SAALhgG,GAAiB+c,KAAKmM,OAAO2mD,CAAP3mD,CAALnM,CApBL/c;EAwGhB,UAxHmCkQ,WAAAA,GAAAA,GAuCjC+vF,WAAAA,eAAAA,GAAA,UAAenB,CAAf;EAAA,gBAAA,CACE/mF,KAAK;EACH,UAAMmoF,IAAmBlgG,EAAKusE,GAALvsE,CAAS6H,GAAT7H,CAAaA,EAAK4/F,QAAlB5/F,CAAzB;EAAA,UACMmgG,IAAmBngG,EAAKusE,GAALvsE,CAAS6H,GAAT7H,CAAaA,EAAK6/F,QAAlB7/F,CADzB,CAGA,KAAK,IAAM8tC,CAAX,IAA2BgxD,CAA3B,EAA8C;EAC5C,YAAM17F,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAAiD,QAA7Cxc,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,CAAJ,EAAuD;EACrD,cAAMoO,KAAY,CAAlB,CACApO,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,CAA0BgD,CAA1BhD,CADJpL;EAGF,aAAkD,QAA9CA,EAAKqgG,uBAALrgG,CAA6B8tC,CAA7B9tC,CAAJ,EAAwD;EAChDoO,eAAY,CAAZA,CACNpO,EAAKqgG,uBAALrgG,CAA6B8tC,CAA7B9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,CAA0BgD,CAA1BhD,CADJpL;EAIF,aAAMgS,IAAW8sF,EAAkBhxD,CAAlBgxD,CAAjB;EAAA,YACMwB,IAActgG,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,CADpB;EAAA,YAEMugG,IAAevgG,EAAKqgG,uBAALrgG,CAA6B8tC,CAA7B9tC,CAFrB;EAAA,YAIMwgG,IAAiBxgG,EAAK0/F,WAAL1/F,CAAiBiI,GAAjBjI,CAAqBsgG,CAArBtgG,EACK0H,GADL1H,CACSA,EAAK8/F,aAAL9/F,CAAmBiI,GAAnBjI,CAAuBgS,CAAvBhS,CADTA,CAJvB;EAAA,YAMMygG,IACFzgG,EAAK2/F,WAAL3/F,CAAiBiI,GAAjBjI,CAAqBugG,CAArBvgG,EACK0H,GADL1H,CACSA,EAAK+/F,aAAL//F,CAAmBiI,GAAnBjI,CAAuBgS,EAASxH,MAATwH,EAAvBhS,CADTA,CAPJ;EAAA,YAUM0gG,IAA2BF,EAAer4F,GAAfq4F,CAAmBN,CAAnBM,CAVjC;EAAA,YAWMG,IAA4BF,EAAgBt4F,GAAhBs4F,CAAoBN,CAApBM,CAXlC,CAaAzgG,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,EAA0CqQ,MAA1CrQ,CAAiDwgG,CAAjDxgG,GACAA,EAAKqgG,uBAALrgG,CAA6B8tC,CAA7B9tC,EAA2CqQ,MAA3CrQ,CAAkDygG,CAAlDzgG,CADAA,CAGA,IAAMmQ,IACFnQ,EAAKwpB,CAALxpB,CACKiI,GADLjI,CACS0gG,EAAyBv4F,GAAzBu4F,CACD1gG,EAAKggG,SAALhgG,CAAe0H,GAAf1H,CAAmB2gG,EAA0B5kG,IAA1B4kG,EAAnB3gG,CADC0gG,CADT1gG,EAGK0H,GAHL1H,CAGSoD,CAHTpD,CADJ,CAKAoD,EAAMiN,MAANjN,CAAa+M,CAAb/M;EAGFpD,SAAK4/F,QAAL5/F,CAAcqQ,MAAdrQ,CAAqBA,EAAK4/F,QAAL5/F,CAAciI,GAAdjI,CAAkBA,EAAK0/F,WAAvB1/F,CAArBA,GACAA,EAAK6/F,QAAL7/F,CAAcqQ,MAAdrQ,CAAqBA,EAAK6/F,QAAL7/F,CAAciI,GAAdjI,CAAkBA,EAAK2/F,WAAvB3/F,CAArBA,CADAA;OAzCF+X;KAxC+B7H,EAsFjC+vF,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACE5gG,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,IACAA,KAAK2gG,SAAL3gG,CAAegT,OAAfhT,EADAA,EAEAA,KAAKqgG,WAALrgG,CAAiBgT,OAAjBhT,EAFAA,EAGAA,KAAKsgG,WAALtgG,CAAiBgT,OAAjBhT,EAHAA,EAIAA,KAAKugG,QAALvgG,CAAcgT,OAAdhT,EAJAA,EAKAA,KAAKwgG,QAALxgG,CAAcgT,OAAdhT,EALAA,EAMAA,KAAKygG,aAALzgG,CAAmBgT,OAAnBhT,EANAA,EAOAA,KAAK0gG,aAAL1gG,CAAmBgT,OAAnBhT,EAPAA,EAQAA,KAAKktE,GAALltE,CAASgT,OAAThT,EARAA,EAUmC,QAA/BA,KAAK+gG,sBAA0B,IACjC58F,OAAO0O,IAAP1O,CAAYnE,KAAK+gG,sBAAjB58F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKogG,sBAALpgG,CAA4BpC,CAA5BoC,EAAkCqS,OAAlCrS,EAAA;OADrBwD,CAXFnE,EAeoC,QAAhCA,KAAKghG,uBAA2B,IAClC78F,OAAO0O,IAAP1O,CAAYnE,KAAKghG,uBAAjB78F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKqgG,uBAALrgG,CAA6BpC,CAA7BoC,EAAmCqS,OAAnCrS,EAAA;OADrBwD,CAhBFnE;KAvF+B6Q,EA2GjC+vF,WAAAA,UAAAA,GAAA;EACE,aACExB,cAAcp/F,KAAKo/F,cACnBe,OAAOngG,KAAKmgG,OACZC,OAAOpgG,KAAKogG,OACZ5vB,SAASxwE,KAAKwwE,SAJhB;KA5G+B3/D,EAmH1B+vF,YAAAA,GAAP,UACI7D,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CACHC,EAAOoC,YADJ,EACkBpC,EAAOmD,KADzB,EACgCnD,EAAOoD,KADvC,EAC8CpD,EAAOxsB,OADrD,CAAP;KArH+B3/D,EAC1B+vF,WAAAA,GAAY,eADc/vF,GAwHnC;IAxHmCmuF,yBAyHrB4B;EC1GZ,YAAA,CACcxB,CADd,EAC8Ce,CAD9C,EAEcC,CAFd,EAEuC5vB,CAFvC,EAGc1D,CAHd;uBAEuC0D,4BACzB1D,OAHd,QAIEr8D,MAAAA,KAAAA,SAJF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAAgCA,OAAAA,GAAAw/F,CAAhCx/F,EACAA,OAAAA,GAAAy/F,CADAz/F,EACyBA,SAAAA,GAAA6vE,CADzB7vE,EAEAA,OAAAA,GAAAmsE,CAFAnsE,EAJNA,wBAAAA,KAIMA,EAHNA,4BAAAA,KAGMA,EAIZA,EAAKwpB,CAALxpB,GAAS+c,KAAKmM,QAAQu1E,CAARv1E,CAALnM,CAJG/c,EAOZA,EAAK0/F,WAAL1/F,GAAmB+c,KAAKmM,OAAOs2E,CAAPt2E,CAALnM,CAPP/c,EAQZA,EAAK2/F,WAAL3/F,GAAmB+c,KAAKmM,OAAOu2E,CAAPv2E,CAALnM,CARP/c,EAUZA,EAAK4gG,WAAL5gG,GAAmB+c,KAAKmM,OAAOijD,CAAPjjD,CAALnM,CAVP/c,EAYZ+X,KAAK;EACH/X,QAAK6gG,SAAL7gG,GAAiBkpB,OAAO,CAAPA,EAAU5a,QAAV4a,EAAjBlpB,EACAA,EAAK4/F,QAAL5/F,GAAgBkpB,OAAOs2E,CAAPt2E,EAAc5a,QAAd4a,EADhBlpB;OADF+X,CAZY/X,EAiBZA,EAAK8/F,aAAL9/F,GAAqB+c,KAAKmM,OAAO,IAAIs2E,CAAXt2E,CAALnM,CAjBT/c,EAkBZA,EAAKusE,GAALvsE,GAAW+c,KAAKmM,OAAO,CAAPA,CAALnM,CAlBC/c,EAoBI,SAAZ6vE,CAAY,KACdA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,SAARA,CADI,CApBJxc,EAwBZA,EAAKggG,SAALhgG,GAAiB+c,KAAKmM,OAAO2mD,CAAP3mD,CAALnM,CAxBL/c;EAgHhB,UAhIqCkQ,WAAAA,GAAAA,GA2CnC4wF,WAAAA,eAAAA,GAAA,UAAehC,CAAf;EAAA,gBAAA,CACE/mF,KAAK;EACH,UAAMmoF,IAAmBlgG,EAAKusE,GAALvsE,CAAS6H,GAAT7H,CAAaA,EAAK4/F,QAAlB5/F,CAAzB;EAAA,UACM+gG,IAAK/gG,EAAKwpB,CAALxpB,CAAOmI,GAAPnI,CAAWA,EAAKusE,GAALvsE,CAAS0H,GAAT1H,CAAaA,EAAK4gG,WAAL5gG,CAAiBiI,GAAjBjI,CAAqBA,EAAK6gG,SAA1B7gG,CAAbA,CAAXA,CADX,CAGA,KAAK,IAAM8tC,CAAX,IAA2BgxD,CAA3B,EAA8C;EAC5C,YAAM17F,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAAiD,QAA7Cxc,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,CAAJ,EAAuD;EACrD,cAAMoO,KAAY,CAAlB,CACApO,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,CAA0BgD,CAA1BhD,CADJpL;EAGF,aAAqD,QAAjDA,EAAKghG,0BAALhhG,CAAgC8tC,CAAhC9tC,CAAJ,EAA2D;EACnDoO,eAAY,CAAZA,CACNpO,EAAKghG,0BAALhhG,CAAgC8tC,CAAhC9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,CAA0BgD,CAA1BhD,CADJpL;EAIF,aAAMgS,IAAW8sF,EAAkBhxD,CAAlBgxD,CAAjB;EAAA,YACMwB,IAActgG,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,CADpB;EAAA,YAEMihG,IAAkBjhG,EAAKghG,0BAALhhG,CAAgC8tC,CAAhC9tC,CAFxB;EAAA,YAIMwgG,IAAiBxgG,EAAK0/F,WAAL1/F,CAAiBiI,GAAjBjI,CAAqBsgG,CAArBtgG,EACK0H,GADL1H,CACSA,EAAK8/F,aAAL9/F,CAAmBiI,GAAnBjI,CAAuBgS,CAAvBhS,CADTA,CAJvB;EAAA,YAOMkhG,IAAMlhG,EAAK2/F,WAAL3/F,CAAiBiI,GAAjBjI,CAAqBihG,CAArBjhG,CAPZ;EAAA,YAQMmhG,IAAMnvF,EAAStH,GAATsH,EARZ;EAAA,YAUMovF,IAAqBF,EAAI14F,OAAJ04F,CAAYC,CAAZD,CAV3B,CAYAlhG,EAAKogG,sBAALpgG,CAA4B8tC,CAA5B9tC,EAA0CqQ,MAA1CrQ,CAAiDwgG,CAAjDxgG,GACAA,EAAKghG,0BAALhhG,CAAgC8tC,CAAhC9tC,EAA8CqQ,MAA9CrQ,CACIohG,CADJphG,CADAA,CAIA,IAAMmQ,IACF4wF,EAAG54F,GAAH44F,CAAOb,CAAPa,EACK94F,GADL84F,CACSP,EAAer4F,GAAfq4F,CAAmBxgG,EAAKggG,SAALhgG,CAAe0H,GAAf1H,CAAmBohG,CAAnBphG,CAAnBwgG,CADTO,EAEKr5F,GAFLq5F,CAES39F,CAFT29F,CADJ,CAKA39F,EAAMiN,MAANjN,CAAa+M,CAAb/M;EAGFpD,SAAK6gG,SAAL7gG,CAAeqQ,MAAfrQ,CAAsBA,EAAK6gG,SAAL7gG,CAAe0H,GAAf1H,CAAmBA,EAAKusE,GAAxBvsE,CAAtBA,GACAA,EAAK4/F,QAAL5/F,CAAcqQ,MAAdrQ,CAAqBA,EAAK4/F,QAAL5/F,CAAciI,GAAdjI,CAAkBA,EAAK0/F,WAAvB1/F,CAArBA,CADAA;OAzCF+X;KA5CiC7H,EA0FnC4wF,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACEzhG,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,IACAA,KAAK2gG,SAAL3gG,CAAegT,OAAfhT,EADAA,EAEAA,KAAKugG,QAALvgG,CAAcgT,OAAdhT,EAFAA,EAGAA,KAAKqgG,WAALrgG,CAAiBgT,OAAjBhT,EAHAA,EAIAA,KAAKsgG,WAALtgG,CAAiBgT,OAAjBhT,EAJAA,EAKAA,KAAKygG,aAALzgG,CAAmBgT,OAAnBhT,EALAA,EAOAA,KAAKuhG,WAALvhG,CAAiBgT,OAAjBhT,EAPAA,EAQAA,KAAKwhG,SAALxhG,CAAegT,OAAfhT,EARAA,EAUAA,KAAKktE,GAALltE,CAASgT,OAAThT,EAVAA,EAYmC,QAA/BA,KAAK+gG,sBAA0B,IACjC58F,OAAO0O,IAAP1O,CAAYnE,KAAK+gG,sBAAjB58F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKogG,sBAALpgG,CAA4BpC,CAA5BoC,EAAkCqS,OAAlCrS,EAAA;OADrBwD,CAbFnE,EAiBuC,QAAnCA,KAAK2hG,0BAA8B,IACrCx9F,OAAO0O,IAAP1O,CAAYnE,KAAK2hG,0BAAjBx9F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKghG,0BAALhhG,CAAgCpC,CAAhCoC,EAAsCqS,OAAtCrS,EAAA;OADrBwD,CAlBFnE;KA3FiC6Q,EAiHnC4wF,WAAAA,UAAAA,GAAA;EACE,aACErC,cAAcp/F,KAAKo/F,cACnBe,OAAOngG,KAAKmgG,OACZC,OAAOpgG,KAAKogG,OACZ5vB,SAASxwE,KAAKwwE,SACd1D,OAAO9sE,KAAK8sE,OALd;KAlHiCj8D,EA0H5B4wF,YAAAA,GAAP,UACI1E,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CACHC,EAAOoC,YADJ,EACkBpC,EAAOmD,KADzB,EACgCnD,EAAOoD,KADvC,EAC8CpD,EAAOxsB,OADrD,EAEHwsB,EAAOlwB,KAFJ,CAAP;KA5HiCj8D,EAC5B4wF,WAAAA,GAAY,iBADgB5wF,GAgIrC;IAhIqCmuF,yBAiIvByC;EC5HZ,YAAA,CAAsBrC,CAAtB;EAAA,YACE3uF,MAAAA,KAAAA,SADF,QAAsB9P,cAAAA,GAAAy+F,CAAAz+F,EAEpBA,EAAKqhG,eAALrhG,CAAqBy+F,CAArBz+F,CAFoBA;EAwCxB,UA5CkCkQ,WAAAA,GAAAA,GAShCoxF,WAAAA,eAAAA,GAAA,UAAexC,CAAf;EAAA,gBAAA,CACmBt7F,OAAO0O,IAAP1O,CAAYs7F,CAAZt7F,EACR/D,OADQ+D,CACA,UAAAkT,CAAA;EACf,UAAM1E,IAAW8sF,EAAkBpoF,CAAlBooF,CAAjB;EAAA,UACM17F,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+B9F,CAA/B8F,CADd,CAGAzE,KAAK;EACH,YAAM5H,IAAWnQ,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAWgS,CAAXhS,EAAqB0H,GAArB1H,CAAyBoD,CAAzBpD,CAAjB,CACAoD,EAAMiN,MAANjN,CAAa+M,CAAb/M;SAFF2U;OALevU;KAVa0M,EAyBhCoxF,WAAAA,gBAAAA,GAAA,UAAgB7C,CAAhB;EACEp/F,SAAKo/F,YAALp/F,GAAoBo/F,CAApBp/F,EACc,QAAVA,KAAKmqB,CAAK,IACZnqB,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,EAFFA,EAIAA,KAAKmqB,CAALnqB,GAAS0d,KAAKmM,QAAQu1E,CAARv1E,CAALnM,CAJT1d;KA1B8B6Q,EAiChCoxF,WAAAA,QAAAA,GAAA;EACEjiG,SAAKmqB,CAALnqB,CAAOgT,OAAPhT;KAlC8B6Q,EAqChCoxF,WAAAA,UAAAA,GAAA;EACE,aAAQ7C,cAAcp/F,KAAKo/F,cAA3B;KAtC8BvuF,EAwCzBoxF,YAAAA,GAAP,UACIlF,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CAAQC,EAAOoC,YAAf,CAAP;KA1C8BvuF,EACzBoxF,WAAAA,GAAY,cADapxF,GA4ClC;IA5CkCmuF,yBA6CpBiD;ECxCZ,YAAA,CACc7C,CADd,EAC4C8C,CAD5C,EAEYC,CAFZ;uBAEYA,QAFZ,QAGE1xF,MAAAA,KAAAA,EAAM2uF,CAAN3uF,SAHF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAA8BA,UAAAA,GAAAuhG,CAA9BvhG,EACFA,aAAAA,GAAAwhG,CADExhG,EAGZA,EAAK+yE,CAAL/yE,GAASkpB,OAAOlpB,EAAKuhG,QAAZr4E,CAHGlpB,EAIZA,EAAKyhG,aAALzhG,KAJYA;EAkEhB,UAxEuCkQ,WAAAA,GAAAA,GAarCwxF,WAAAA,eAAAA,GAAA,UAAe5C,CAAf;EAAA,gBAAA;EAAA,sBACahxD;EACT,UAAM1qC,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAAwC,QAApCuiF,EAAK0C,aAAL1C,CAAmBjxD,CAAnBixD,CAAJ,EAA8C;EAE5ChnF,aAAK;EACH/X,YAAKyhG,aAALzhG,CAAmB8tC,CAAnB9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAMF,WAAM4pF,IAAe5C,EAAK0C,aAAL1C,CAAmBjxD,CAAnBixD,CAArB;EAAA,UACM/sF,IAAW8sF,EAAkBhxD,CAAlBgxD,CADjB,CAGA/mF,KAAK;EACH,YAAI5H,CAAJ;EAAA,YACMyxF,IAAkB5hG,EAAK+yE,CAAL/yE,CAAOiI,GAAPjI,CAAW2hG,CAAX3hG,EAAyB0H,GAAzB1H,CAA6BgS,CAA7BhS,CADxB,CAGEmQ,IADEnQ,EAAKwhG,WAALxhG,GAEEA,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAWgS,EAAStK,GAATsK,CAAa4vF,EAAgB35F,GAAhB25F,CAAoB5hG,EAAK+yE,CAAzB6uB,CAAb5vF,CAAXhS,EAAsD0H,GAAtD1H,CAA0DoD,CAA1DpD,CAFFA,GAISA,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAW4hG,CAAX5hG,EAA4B0H,GAA5B1H,CAAgCoD,CAAhCpD,CAHXmQ,EAKFnQ,EAAKyhG,aAALzhG,CAAmB8tC,CAAnB9tC,EAAiCqQ,MAAjCrQ,CAAwC4hG,CAAxC5hG,CALEmQ,EAMF/M,EAAMiN,MAANjN,CAAa+M,CAAb/M,CANE+M;SAJJ4H;OAdJ;EAAA,gBAAA,CACE,KAAK,IAAM+1B,CAAX,IAA2BgxD,CAA3B,IAAWhxD;KAdwB59B,EA0CrCwxF,WAAAA,QAAAA,GAAA;EAGE,QAFA5xF,WAAAA,CAAMuC,OAANvC,KAAAA,KAAAA,GACAzQ,KAAK0zE,CAAL1zE,CAAOgT,OAAPhT,EADAyQ,EAE0B,QAAtBzQ,KAAKoiG,aAAT,EACE,KAAK,IAAM3zD,CAAX,IAA2BzuC,KAAKoiG,aAAhC,EACEpiG,KAAKoiG,aAALpiG,CAAmByuC,CAAnBzuC,EAAiCgT,OAAjChT;KA/C+B6Q,EAyDrCwxF,WAAAA,YAAAA,GAAA,UAAYH,CAAZ;EACEliG,SAAKkiG,QAALliG,GAAgBkiG,CAAhBliG;KA1DmC6Q,EA6DrCwxF,WAAAA,UAAAA,GAAA;EACE,aACEjD,cAAcp/F,KAAKo/F,cACnB8C,UAAUliG,KAAKkiG,UACfC,aAAaniG,KAAKmiG,aAHpB;KA9DmCtxF,EAoE9BwxF,YAAAA,GAAP,UACItF,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CAAQC,EAAOoC,YAAf,EAA6BpC,EAAOkF,QAApC,EAA8ClF,EAAOmF,WAArD,CAAP;KAtEmCtxF,EAC9BwxF,WAAAA,GAAY,mBADkBxxF,GAwEvC;IAxEuCoxF,4BAyEzBI;EC5DZ,YAAA,CACcjD,CADd,EAC8CtyB,CAD9C,EAEco1B,CAFd,EAEwC1xB,CAFxC,EAGIgyB,CAHJ;uBAC8C11B,0BAChCo1B,yBAA0B1xB,4BACpCgyB,QAHJ,QAIE/xF,MAAAA,KAAAA,SAJF,QACc9P,cAAAA,GAAAy+F,CAAAz+F,EAAgCA,OAAAA,GAAAmsE,CAAhCnsE,EACAA,UAAAA,GAAAuhG,CADAvhG,EAC0BA,SAAAA,GAAA6vE,CAD1B7vE,EALNA,wBAAAA,KAKMA,EAJNA,sBAAAA,KAIMA,EAHNA,oBAAAA,KAGMA,EAKZA,EAAKwpB,CAALxpB,GAAS+c,KAAKmM,OAAOu1E,CAAPv1E,CAALnM,CALG/c,EAMZA,EAAK4gG,WAAL5gG,GAAmB+c,KAAKmM,OAAOijD,CAAPjjD,CAALnM,CANP/c,EAOZA,EAAK8hG,cAAL9hG,GAAsB+c,KAAKmM,OAAOq4E,CAAPr4E,CAALnM,CAPV/c,EAQZA,EAAKwsE,aAALxsE,GAAqB+c,KAAKmM,OAAO,IAAIijD,CAAXjjD,CAALnM,CART/c,EASZA,EAAK6hG,QAAL7hG,GAAgB6hG,CATJ7hG,EAWI,SAAZ6vE,CAAY,KACdA,IAAUrzD,IAAIxG,GAAJwG,CAAQ,SAARA,CADI,CAXJxc,EAeZA,EAAK8vE,aAAL9vE,GAAqB+c,KAAKmM,OAAO2mD,CAAP3mD,CAALnM,CAfT/c;EAoIhB,UAlJsCkQ,WAAAA,GAAAA,GAgCpC6xF,WAAAA,eAAAA,GAAA,UAAejD,CAAf;EAAA,gBAAA;EAAA,sBACahxD;EACT,UAAM1qC,IAAQoZ,IAAIE,MAAJF,CAAWlG,mBAAXkG,CAA+BsxB,CAA/BtxB,CAAd,CACA,IAAiD,QAA7CuiF,EAAKiD,sBAALjD,CAA4BjxD,CAA5BixD,CAAJ,EAAuD;EAErDhnF,aAAK;EACH/X,YAAKgiG,sBAALhiG,CAA4B8tC,CAA5B9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAKF,WAA+C,QAA3CgnF,EAAKkD,oBAALlD,CAA0BjxD,CAA1BixD,CAA2C,IAAQA,EAAK8C,QAA5D,EAAsE;EAEpE9pF,aAAK;EACH/X,YAAKiiG,oBAALjiG,CAA0B8tC,CAA1B9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAKF,WAA6C,QAAzCgnF,EAAKmD,kBAALnD,CAAwBjxD,CAAxBixD,CAAJ,EAAmD;EAEjDhnF,aAAK;EACH/X,YAAKkiG,kBAALliG,CAAwB8tC,CAAxB9tC,IACIoL,UAAUhI,CAAVgI,EAAiBkD,QAAjBlD,EAHY,CAGZA,CADJpL;WADF+X;EAMF,WAAMoqF,IAAwBpD,EAAKiD,sBAALjD,CAA4BjxD,CAA5BixD,CAA9B;EAAA,UACMqD,IAAsBrD,EAAKkD,oBAALlD,CAA0BjxD,CAA1BixD,CAD5B;EAAA,UAEMmD,IAAqBnD,EAAKmD,kBAALnD,CAAwBjxD,CAAxBixD,CAF3B;EAAA,UAGM/sF,IAAW8sF,EAAkBhxD,CAAlBgxD,CAHjB,CAKA/mF,KAAK;EACH,YAAMsqF,IACFriG,EAAK4gG,WAAL5gG,CAAiBiI,GAAjBjI,CAAqBmiG,CAArBniG,EACK0H,GADL1H,CACSA,EAAKwsE,aAALxsE,CAAmBiI,GAAnBjI,CAAuBgS,EAASxH,MAATwH,EAAvBhS,CADTA,CADJ,CAIA,IAAIA,EAAK6hG,QAAT,EAAmB;EAEjB,cAAMS,IACFtiG,EAAK4gG,WAAL5gG,CAAiBiI,GAAjBjI,CAAqBoiG,CAArBpiG,EACK0H,GADL1H,CACSA,EAAKwsE,aAALxsE,CAAmBiI,GAAnBjI,CAAuBgS,CAAvBhS,CADTA,CADJ;EAAA,cAIMuiG,IACFviG,EAAK8hG,cAAL9hG,CAAoBiI,GAApBjI,CAAwBkiG,CAAxBliG,EACK0H,GADL1H,CACSA,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAWgS,CAAXhS,EAAqBmI,GAArBnI,CACDqiG,EACKx6F,GADLw6F,CACSC,EAAuB93F,MAAvB83F,GAAgC56F,GAAhC46F,CACDtiG,EAAK8vE,aADJwyB,CADTD,EAGKtmG,IAHLsmG,EADCriG,CADTA,CALJ,CAYAA,EAAKgiG,sBAALhiG,CAA4B8tC,CAA5B9tC,EAA0CqQ,MAA1CrQ,CACIqiG,CADJriG,GAEAA,EAAKiiG,oBAALjiG,CAA0B8tC,CAA1B9tC,EAAwCqQ,MAAxCrQ,CACIsiG,CADJtiG,CAFAA,EAIAA,EAAKkiG,kBAALliG,CAAwB8tC,CAAxB9tC,EAAsCqQ,MAAtCrQ,CAA6CuiG,CAA7CviG,CAJAA,CAMA,IAAMmQ,IAAW/M,EAAMyE,GAANzE,CAAUm/F,CAAVn/F,CAAjB,CACAA,EAAMiN,MAANjN,CAAa+M,CAAb/M;WArBF,MAsBO;EAEL,cAAMo/F,IACFxiG,EAAK4gG,WAAL5gG,CAAiBiI,GAAjBjI,CAAqBmiG,CAArBniG,EACK0H,GADL1H,CACSA,EAAKwsE,aAALxsE,CAAmBiI,GAAnBjI,CAAuBgS,EAASxH,MAATwH,EAAvBhS,CADTA,CADJ,CAIMuiG,IACFviG,EAAK8hG,cAAL9hG,CAAoBiI,GAApBjI,CAAwBkiG,CAAxBliG,EACK0H,GADL1H,CACSA,EAAKwpB,CAALxpB,CAAOiI,GAAPjI,CAAWgS,CAAXhS,EAAqBmI,GAArBnI,CACDwiG,EAAyB96F,GAAzB86F,CAA6BxiG,EAAK8vE,aAAlC0yB,EAAiDzmG,IAAjDymG,EADCxiG,CADTA,CADEuiG,CAKNviG,EAAKgiG,sBAALhiG,CAA4B8tC,CAA5B9tC,EAA0CqQ,MAA1CrQ,CACIwiG,CADJxiG,GAEAA,EAAKkiG,kBAALliG,CAAwB8tC,CAAxB9tC,EAAsCqQ,MAAtCrQ,CAA6CuiG,CAA7CviG,CAFAA,CAIMmQ,IAAW/M,EAAMyE,GAANzE,CAAUm/F,CAAVn/F,CAAX+M,CACN/M,EAAMiN,MAANjN,CAAa+M,CAAb/M;;SA3CJ2U;OA9BJ;EAAA,gBAAA,CACE,KAAK,IAAM+1B,CAAX,IAA2BgxD,CAA3B,IAAWhxD;KAjCuB59B,EA+GpC6xF,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACE1iG,KAAKmqB,CAALnqB,CAAOgT,OAAPhT,IACAA,KAAKywE,aAALzwE,CAAmBgT,OAAnBhT,EADAA,EAEAA,KAAKuhG,WAALvhG,CAAiBgT,OAAjBhT,EAFAA,EAGAA,KAAKyiG,cAALziG,CAAoBgT,OAApBhT,EAHAA,EAIAA,KAAKmtE,aAALntE,CAAmBgT,OAAnBhT,EAJAA,EAKmC,QAA/BA,KAAK2iG,sBAA0B,IACjCx+F,OAAO0O,IAAP1O,CAAYnE,KAAK2iG,sBAAjBx+F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKgiG,sBAALhiG,CAA4BpC,CAA5BoC,EAAkCqS,OAAlCrS,EAAA;OADrBwD,CANFnE,EASiC,QAA7BA,KAAK4iG,oBAAwB,IAAQ5iG,KAAKwiG,QAAb,IAC/Br+F,OAAO0O,IAAP1O,CAAYnE,KAAK4iG,oBAAjBz+F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKiiG,oBAALjiG,CAA0BpC,CAA1BoC,EAAgCqS,OAAhCrS,EAAA;OADrBwD,CAVFnE,EAa+B,QAA3BA,KAAK6iG,kBAAsB,IAC7B1+F,OAAO0O,IAAP1O,CAAYnE,KAAK6iG,kBAAjB1+F,EACK/D,OADL+D,CACa,UAAA5F,CAAA;EAAQ,aAAAoC,EAAKkiG,kBAALliG,CAAwBpC,CAAxBoC,EAA8BqS,OAA9BrS,EAAA;OADrBwD,CAdFnE;KAhHkC6Q,EAmIpC6xF,WAAAA,UAAAA,GAAA;EACE,aACEtD,cAAcp/F,KAAKo/F,cACnBtyB,OAAO9sE,KAAK8sE,OACZo1B,UAAUliG,KAAKkiG,UACf1xB,SAASxwE,KAAKwwE,SACdgyB,UAAUxiG,KAAKwiG,UALjB;KApIkC3xF,EA4I7B6xF,YAAAA,GAAP,UACI3F,CADJ,EACqCC,CADrC;EAEE,WAAO,IAAID,CAAJ,CACHC,EAAOoC,YADJ,EACkBpC,EAAOlwB,KADzB,EACgCkwB,EAAOkF,QADvC,EACiDlF,EAAOxsB,OADxD,EAEHwsB,EAAOwF,QAFJ,CAAP;KA9IkC3xF,EAC7B6xF,WAAAA,GAAY,kBADiB7xF,GAkJtC;IAlJsCmuF,yBAmJxB0D,kBCpJd;EAAA,YAAA,YAuCSU,KAAAA,GAAP,UAAWhE,CAAX;EACE,WAAO,IAAI6C,YAAJ,CAAiB7C,CAAjB,CAAP;KADKgE,EAoBAA,UAAAA,GAAP,UAAgBhE,CAAhB,EAAsC8C,CAAtC,EAAwDC,CAAxD;EAEE,4BAFsDA,SAE/C,IAAIE,iBAAJ,CAAsBjD,CAAtB,EAAoC8C,CAApC,EAA8CC,CAA9C,CAAP;KAtBKiB,EA8CAA,SAAAA,GAAP,UACIhE,CADJ,EAC0BtyB,CAD1B,EACsCo1B,CADtC,EACsD1xB,CADtD,EAEIgyB,CAFJ;EAGE,4BAFwB11B,0BAAYo1B,yBAAgB1xB,4BAClDgyB,SACK,IAAIE,gBAAJ,CACHtD,CADG,EACWtyB,CADX,EACkBo1B,CADlB,EAC4B1xB,CAD5B,EACqCgyB,CADrC,CAAP;KAjDKY,EAkEAA,MAAAA,GAAP,UACIhE,CADJ,EAC0Be,CAD1B,EACuCC,CADvC,EAEI5vB,CAFJ;EAGE,4BAFE4uB,4BAAsBe,0BAAaC,4BACnC5vB,WACK,IAAIowB,aAAJ,CAAkBxB,CAAlB,EAAgCe,CAAhC,EAAuCC,CAAvC,EAA8C5vB,CAA9C,CAAP;KArEK4yB,EAqFAA,UAAAA,GAAP,UAAgBhE,CAAhB,EAAqCC,CAArC,EAAgD7uB,CAAhD;EAEE,4BAFc4uB,4BAAqBC,2BAAW7uB,WAEvC,IAAIgvB,iBAAJ,CAAsBJ,CAAtB,EAAoCC,CAApC,EAAyC7uB,CAAzC,CAAP;KAvFK4yB,EAwGAA,QAAAA,GAAP,UACIhE,CADJ,EAC0Be,CAD1B,EACuCC,CADvC,EACsD5vB,CADtD,EAEI1D,CAFJ;EAGE,4BAFEsyB,4BAAsBe,0BAAaC,4BAAe5vB,4BAClD1D,QACK,IAAI20B,eAAJ,CAAoBrC,CAApB,EAAkCe,CAAlC,EAAyCC,CAAzC,EAAgD5vB,CAAhD,EAAyD1D,CAAzD,CAAP;KA3GKs2B,EA+HAA,SAAAA,GAAP,UAAehE,CAAf,EAAqCa,CAArC;EAEE,4BAFmCA,SAE5B,IAAIC,gBAAJ,CAAqBd,CAArB,EAAmCa,CAAnC,CAAP;KAjIKmD,GAmIT;KA1KA;EAAA,ICMaC,UACXC,KAAKF,sBAAsBE,KAC3BpB,UAAUkB,sBAAsBlB,UAChCqB,UAAUH,sBAAsBG,UAChCC,SAASJ,sBAAsBI,SAC/BC,SAASL,sBAAsBK,SAC/BC,QAAQN,sBAAsBM,QAC9BC,MAAMP,sBAAsBO,MDb9B;EAAA,IEsCaC,aAAa3mF,YAAY2mF,UFtCtC;EAAA,IEuCaC,aAAa5mF,YAAY4mF,UFvCtC;EAAA,IEwCarmF,mBAAmBP,YAAYO,gBFxC5C;EAAA,IEyCajG,SAAS0F,YAAY1F,MFzClC,CE0CAusF,aAWaC,GAXbD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ECrDA,KAAIE,WAAW7mF,IAAIxG,GAAJwG,CAAQ,SAARA,CAAf,CAKA,gBAAA;EACE,SAAO6mF,QAAP;EAOF,yBAAA;EAQE,SAAO,cAAP;ECdF,KAAIC,sBAAsB,CAA1B,CAEA,8BAAA;EACE,SAAOA,qBAAP;EAGF,KAAMC,iBAAN,CAOA,eAAA,CAAuB5P,CAAvB;EAKE,0BALqBA,SACfA,KAAU4P,YAAV5P,KACJ4P,aAAa5P,CAAb4P,IAAuB,CADnB5P,GAGN4P,aAAa5P,CAAb4P,KAAwB,GACjB5P,IAAS4P,aAAa5P,CAAb4P,EAAqBjjG,QAArBijG,EAAhB;EAGF,KAAMC,gBACJ98E,aACAC,WAFF;EAAA,IAKM88E,gBAA0B,SALhC,CAUA,kBAAA,CAA0BrgG,CAA1B,EAAyC9F,CAAzC;EAQE,cAPcomG,MAAVpmG,MACFA,IAAQmmG,gBAEuB,QAA7BD,YAAYlmG,CAAZkmG,EAAmBpgG,CAAnBogG,CAA6B,KAC/BA,YAAYlmG,CAAZkmG,EAAmBpgG,CAAnBogG,IAA4Bt6E,OAAO9lB,CAAP8lB,EAAc5rB,CAAd4rB,CAA5Bs6E,EACAzmF,KAAKymF,YAAYlmG,CAAZkmG,EAAmBpgG,CAAnBogG,CAALzmF,CAF+B,GAI1BymF,YAAYlmG,CAAZkmG,EAAmBpgG,CAAnBogG,CAAP;ECrCF;EACE,YAAA,CAAYjlF,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4BogG,EAAeC,SAA3CrgG;EAEJ,UANoC0M,aAAAA,GAAAA,IAMpC;IANoCtX,MAApC;EAAA;EAYE,YAAA,CAAY2lB,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4BsgG,EAAaD,SAAzCrgG;EAEJ,UANkC0M,aAAAA,GAAAA,IAMlC;IANkCtX,MAXlC;EAAA;EAuBE,YAAA,CAAY2lB,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4BugG,EAAWF,SAAvCrgG;EAEJ,UANgC0M,aAAAA,GAAAA,IAMhC;IANgCtX,MAtBhC;EAAA;EAkCE,YAAA,CAAY2lB,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4BwgG,EAAoBH,SAAhDrgG;EAEJ,UANyC0M,aAAAA,GAAAA,IAMzC;IANyCtX,MAjCzC;EAAA;EA6CE,YAAA,CAAY2lB,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4BygG,EAAeJ,SAA3CrgG;EAEJ,UANoC0M,aAAAA,GAAAA,IAMpC;IANoCtX,MA5CpC;EAAA;EAwDE,YAAA,CAAY2lB,CAAZ;EAAA,YACEzO,MAAAA,KAAAA,EAAMyO,CAANzO,SADF,QAGEtM,OAAOmgG,cAAPngG,CAAsBxD,CAAtBwD,EAA4B0gG,EAAWL,SAAvCrgG;EAEJ,UANgC0M,aAAAA,GAAAA,IAMhC;IANgCtX,MAvDhC,uBCD6BwK,GAAY8qE;EACvC,MAAIhzE,MAAMC,OAAND,CAAckI,CAAdlI,CAAJ,EAA0B;EAGxB,SADA,IAAIipG,MAAJ,EACS/pG,IAAI,CAAb,EAAgBA,IAAI8zE,CAApB,EAA+B9zE,GAA/B,EACE+pG,IAAWA,EAAS39F,MAAT29F,CAAgB/gG,CAAhB+gG,CAAXA,CAEF,OAAOA,CAAP;EAIA,WAFMA,IAAW,IAAIjpG,KAAJ,CAAUgzE,CAAV,GACR1sE,KAAK4B,IACP+gG,CAAP;EAIJ,kBAAA,CAAuBlqG,CAAvB,EAAqCskB,CAArC;EACE,OAAKtkB,CAAL,EACE,MAAM,IAAIgqG,cAAJ,CAAmB1lF,CAAnB,CAAN;EAOJ,eAAA,CAAyBhlB,CAAzB,EAAqC6qG,CAArC;EAEE,OADA,IAAI5qG,IAAU,CAAd,OAAA,EACmB6qG,KAAnB,EAAmBhhG,YAAnB,EAAmBA,GAAnB;eACe+gG,KACX5qG;EAGJ,UAAOA,CAAP;EAQF,0BAAA,CAAoCgX,CAApC;EACE,SAAkB,MAAdA,EAAG/W,MAAW,GACT+W,EAAG,CAAHA,CADS,GAGXA,CAHP;EAeF,gBAAA,CAAuBzW,CAAvB;EACE,SAAImB,MAAMC,OAAND,CAAcnB,CAAdmB,IACKnB,CADLmB,IAGInB,EAHR;EAUF,qBAAA,CAmB4B6D,CAnB5B;EAoBE,MACM0mG,IADe1mG,EAAKke,OAALle,CAAa,sBAAbA,EAAqC,OAArCA,EAEJke,OAFIle,CAEI,iBAFJA,EAEuB,OAFvBA,EAEgC2mG,WAFhC3mG,EAArB,CAOA,OAAoB,QAAhB0mG,EAAS,CAATA,CAAgB,GACXA,CADW,GAGb,YAAYA,CAHnB;EAMF,qBAAA,CAA4BE,CAA5B;EAEE,SAAIA,EAAW/qG,MAAX+qG,IAAqB,CAArBA,GACKA,CADLA,IAI6B,MAA7BA,EAAW7jF,OAAX6jF,CAAmB,GAAnBA,IACKA,IAEFA,EAAW1oF,OAAX0oF,CAAmB,aAAnBA,EAAkC,UAACzxB,CAAD,EAAI0xB,CAAJ;EAAW,WAAAA,EAAGn2E,WAAHm2E,EAAA;KAA7CD,CAPP;EAWF,KAAIE,2BAAJ,CAEA,6BAAA,CAAqC90F,CAArC;EAEE,SAAiB,SAAbA,CAAa,SAAqB8zF,MAAb9zF,CAAR,GACR,IADQ,KAGTusF,WAAWvsF,EAAS+0F,YAAT/0F,IAAyBysF,QAAQzsF,EAASg1F,SAATh1F,IAHpD;EAeF,gCAAA,CACI40F,CADJ,EAEIK,CAFJ,EAGIC,CAHJ,EAIIC,CAJJ;EAME,uBAJEF,0BACAC,0BACAC,eAEwB,mBAAfP,CAAX,EAAoC;EAClC,QAAM5gF,IAAe4gF,CAArB;EAAA,QACI3gG,UADJ,CAEA,IAAI+f,KAAgBkhF,CAApB,EACEjhG,IAAKihG,EAAclhF,CAAdkhF,CAALjhG,CADF,KAEO,IAAI+f,KAAgB8gF,sBAApB,EACL7gG,IAAK6gG,uBAAuB9gF,CAAvB8gF,CAAL7gG,CADK,KAIL,IAAU,SADVA,IAAKghG,EAAcjhF,CAAdihF,CACK,CAAV,EACE,MAAM,IAAId,UAAJ,CACF,aAAWgB,CAAX,OAAA,GAAmCP,CAAnC,gEAAA,GAEUO,CAFV,yHAAA,GAKiBA,CALjB,qGADE,CAAN,CAYJ,OAAOlhG,CAAP;EAGA,OAAMw4F,IAASmI,CAAf,CACA,IAAwB,QAApBnI,EAAOF,SAAa,IAAyB,QAAjBE,EAAOA,MAAvC,EACE,MAAM,IAAI0H,UAAJ,CACCgB,gCAAAA,GACAxsE,KAAKE,SAALF,CAAe8jE,CAAf9jE,CADAwsE,0CADD,CAAN,CAKF,IAAM5I,IAAYE,EAAOF,SAAzB;EAAA,MACIC,UADJ;EAAA,MACSK,UADT,CASA,IAPIN,KAAa2I,CAAb3I,IACDC,KAAD5jF,gBAAC4jF,EAAKK,QADJN,IAEOA,KAAauI,sBAAbvI,IACRC,KAADxqE,wCAACwqE,EAAKK,QADGN,IAEAA,KAAa0I,CAAb1I,KACRC,KAADpqE,YAACoqE,EAAKK,QADGN,CAJPA,EAOO,QAAPC,CAAJ,EACE,MAAM,IAAI2H,UAAJ,CACF,aAAWgB,CAAX,OAAA,GAAmC5I,CAAnC,gEAAA,GAEU4I,CAFV,yHAAA,GAKiBA,CALjB,qGADE,CAAN,CAWF,IAAkB,QAAdtI,CAAJ,EAAwB;EAQtB,SADA,IAAMuI,MAAN,OAAA,EACkBvpC,IAAAj4D,OAAO0O,IAAP1O,CAAYkhG,sBAAZlhG,CAAlB,EAAkBH,YAAlB,EAAkBA,GAAlB;EACE2hG,QADSpvF,QACTovF,IAA6BN,uBAAuB9uF,CAAvB8uF,CAA7BM;EAEF,UAAkB,SAAA,EAAAC,IAAAzhG,OAAO0O,IAAP1O,CAAYshG,CAAZthG,CAAlB,EAAkB0hG,YAAlB,EAAkBA,GAAlB;EACEF,QADSpvF,QACTovF,IAA6BF,EAAclvF,CAAdkvF,CAA7BE;EAGmB3I,OAAOA,MAAPA,CACRyI,aADQzI,GACQ2I,CADR3I,CAIrB,KADA,IAAM8I,mBAA0BT,uBAAhC,OAAA,EACkBU,IAAA5hG,OAAO0O,IAAP1O,CAAYshG,CAAZthG,CAAlB,EAAkB6hG,YAAlB,EAAkBA,GAAlB;EAAK,UAAMzvF,QAAN,CACH8uF,uBAAuB9uF,CAAvB8uF,IAA8BI,EAAclvF,CAAdkvF,CAA9BJ;EAEF,SAAMY,IAAY7I,EAAWL,CAAXK,EAAgBJ,EAAOA,MAAvBI,CAAlB,CAGA,OAFAiI,wCAA6BS,EAA7BT,EAEOY,CAAP;EAKMH,sBAA0BT,uBAA1BS,CACN,KADA,SAAA,EACkBI,IAAA/hG,OAAO0O,IAAP1O,CAAYshG,CAAZthG,CAAlB,EAAkBgiG,YAAlB,EAAkBA,GAAlB;EAAW5vF,YAAAA,CACT8uF,uBAAuB9uF,CAAvB8uF,IAA8BI,EAAclvF,CAAdkvF,CAA9BJ;EAKF,QAAA,GAAA,GAAA,CAAMY,IAAY,IAAIlJ,CAAJ,CAAQC,EAAOA,MAAf,CAAZiJ,CAEN,OADAZ,wCAA6BS,EAA7BT,EACOY,CAAP;EAUN,uBAAA,CAA8BvsG,CAA9B,EAAyCsB,CAAzC;EACE,SAAQtB,IAAIsB,CAAJtB,IAAU,CAAVA,GAAgBA,IAAIsB,CAAJtB,GAAS,CAATA,GAAa,CAArC;EAQF,8BAAA,CAAqCA,CAArC,EAAgDsB,CAAhD;EACE,UAAQ,IAAIorG,cAAc1sG,CAAd0sG,EAAiBprG,CAAjBorG,CAAZ;EAQF,uBAAA,CAA8BnoG,CAA9B;EACE,UAAQA,CAAR,GACE,KAAK,SAAL;EACE,aAAO,SAAP,CACF;EACE,YAAM,IAAIymG,UAAJ,CAAe,oBAAkBzmG,CAAjC,CAAN,CAJJ;EAcF,gBAAA,CAoB0BkT,CApB1B;EAqBE,MAAU,QAANA,CAAJ,EACE,OAAOA,CAAP,CAIF,KAFA,IAAMouC,MAAN,OAAA,EAEgB8mD,KAAhB,EAAgBriG,YAAhB,EAAgBA,GAAhB;EAAK,QAAMtJ,QAAN,EACqB,MAApB6kD,EAAIj+B,OAAJi+B,CAAY7kD,CAAZ6kD,KACFA,EAAIvjD,IAAJujD,CAAS7kD,CAAT6kD;EAGJ,UAAOA,CAAP;EASF,uBAAA,CAA8B5rC,CAA9B;EACE,MAAW,QAAPA,CAAJ,EACE,MAAM,IAAI+wF,UAAJ,CAAe,2BAAyBxrE,KAAKE,SAALF,CAAevlB,CAAfulB,CAAxC,CAAN,CAEF,KAAK,IAAM3iB,CAAX,IAAkB5C,CAAlB,EACE,IAAIA,EAAI2yF,cAAJ3yF,CAAmB4C,CAAnB5C,CAAJ,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;EAUF,mCAAA,CACIzV,CADJ,EACsBqoG,CADtB,EACqCxiG,CADrC;EAEE,MAAa,QAATA,CAAS,IAGT7F,EAAOojB,OAAPpjB,CAAe6F,CAAf7F,IAAwB,CAH5B,EAIE,MAAM,IAAIwmG,UAAJ,CAAkB3gG,sBAAAA,GAAwBwiG,CAAxBxiG,yBAAAA,GACpB7F,CADoB6F,wBAAlB,CAAN;EAoBJ,iCAAA,CACIrJ,CADJ,EACY8rG,CADZ,EACkCC,CADlC,EAEIC,CAFJ;EAKE,0BAJgCD,yBAC9BC,YACFhrG,SAAO+qG,KAAa,CAApB/qG,GACAA,SAAOgrG,KAAaD,CAApB/qG,GAEIG,MAAMC,OAAND,CAAcnB,CAAdmB,KAAoBnB,EAAEN,MAAFM,IAAY+rG,CAAhC5qG,IAA6CnB,EAAEN,MAAFM,IAAYgsG,CAAzD7qG,IACAnB,EAAEue,KAAFve,CAAQ,UAAAwf,CAAA;EAAK,kBAAOA,MAAMssF,CAAb;KAAb9rG,CAFJ;EC1WF,qBAAA,CAAqByqC,CAArB,EAAgCrnC,CAAhC;EACE,SAAO4a,KAAK;EAAM,WAAAiuF,KAASC,MAAQC,UAAc1hE,CAAd0hE,EAAiB1hE,CAAjB0hE,CAARD,EAA6B9oG,CAA7B8oG,GAAmC,CAAnCA,CAATD,CAAA;KAAXjuF,CAAP;EAaF;EAAA,YAAA;;EAMA,UANyC7H,aAAAA,GAAAA,GAGvCi2F,WAAAA,UAAAA,GAAA;EACE,aAAA;KAJqCj2F,GAMzC;IANyCk2F,cAAclK,aAAvD;EAAA;EAgDE,YAAA,CAAYG,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QAHiB9P,iBAAAA,GAAkB,CAAlBA,EACAA,aAAAA,GAAc,CADdA,EAKfA,EAAK2+E,QAAL3+E,GACuB,QAAnBq8F,EAAO1d,QAAY,GAAO0d,EAAO1d,QAAd,GAAyB3+E,EAAKqmG,eANtCrmG,EAOfA,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,GAAOk/F,EAAOl/F,IAAd,GAAqB6C,EAAKsmG,WAPtCtmG;EAqBnB,UAzB6BkQ,aAAAA,GAAAA,GAc3Bq2F,WAAAA,MAAAA,GAAA,UAAM/hE,CAAN;EAAA,gBAAA,CACE,OAAOzsB,KAAK;EACV,UAAMyuF,IAAQC,YAAYjiE,CAAZiiE,EAAezmG,EAAK7C,IAApBspG,CAAd;EAAA,UACMC,IAAUC,YAAgBH,CAAhBG,EAAuB,CAAvBA,EAA0B3mG,EAAK2+E,QAA/BgoB,CADhB,CAEA,OAAOC,IAAQpiE,CAARoiE,EAAWC,IAAQH,CAARG,EAAiBC,IAAQC,UAAUl3B,SAAVk3B,CAARD,EAA8BN,CAA9BM,CAAjBD,CAAXD,CAAP;OAHK7uF,CAAP;KAfyB7H,EAsB3Bq2F,WAAAA,UAAAA,GAAA;EACE,aAAQ5nB,UAAUt/E,KAAKs/E,UAAUxhF,MAAMkC,KAAKlC,MAA5C;KAvByB+S,EACXq2F,WAAAA,GAAY,SADDr2F,GAyB7B;IAzB6Bi2F,WAzC7B,eAmEca,cAAcT,SAuB5B;EAIE,YAAA,CAAYlK,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QADiB9P,aAAAA,GAAc,CAAdA,EAGfA,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,GAAOk/F,EAAOl/F,IAAd,GAAqB6C,EAAKsmG,WAHtCtmG;EAenB,UAlB8BkQ,aAAAA,GAAAA,GAS5B+2F,WAAAA,MAAAA,GAAA,UAAMziE,CAAN;EAAA,gBAAA,CACE,OAAOzsB,KACH;EAAM,aAAA8uF,IACFriE,CADEqiE,EACCC,IAAQC,UAAUl3B,SAAVk3B,CAARD,EAA8BL,YAAYjiE,CAAZiiE,EAAezmG,EAAK7C,IAApBspG,CAA9BK,CADDD,CAAA;OADH9uF,CAAP;KAV0B7H,EAe5B+2F,WAAAA,UAAAA,GAAA;EACE,aAAQ9pG,MAAMkC,KAAKlC,MAAnB;KAhB0B+S,EACZ+2F,WAAAA,GAAY,UADA/2F,GAkB9B;IAlB8Bi2F,WAA9B,eAmBca,cAAcC,UAK5B;EAAA,YAAA;;EAMA,UAN4B/2F,aAAAA,GAAAA,GAG1Bg3F,WAAAA,MAAAA,GAAA,UAAM1iE,CAAN;EACE,WAAO2iE,KAAS3iE,CAAT2iE,CAAP;KAJwBj3F,EACVg3F,WAAAA,GAAY,QADFh3F,GAM5B;IAN4Bi2F,WAA5B,eAOca,cAAcE,QAoC5B;EAWE,YAAA,CAAY7K,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QALiB9P,iBAAAA,GAAkB,CAAlBA,EACAA,iBAAAA,GAAkB,CADlBA,EAEAA,aAAAA,GAAc,CAFdA,EAGAA,aAAAA,GAAc,CAHdA,EAOfA,EAAKonG,QAALpnG,GACuB,QAAnBq8F,EAAO+K,QAAY,GAAO/K,EAAO+K,QAAd,GAAyBpnG,EAAKqnG,eARtCrnG,EASfA,EAAK2+E,QAAL3+E,GACuB,QAAnBq8F,EAAO1d,QAAY,GAAO0d,EAAO1d,QAAd,GAAyB3+E,EAAKqmG,eAVtCrmG,EAWfA,EAAKsnG,IAALtnG,GAA2B,QAAfq8F,EAAOiL,IAAQ,GAAOjL,EAAOiL,IAAd,GAAqBtnG,EAAKunG,WAXtCvnG,EAYfA,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,GAAOk/F,EAAOl/F,IAAd,GAAqB6C,EAAKsmG,WAZtCtmG;EAmCnB,UAzCgCkQ,aAAAA,GAAAA,GAqB9Bs3F,WAAAA,MAAAA,GAAA,UAAMhjE,CAAN;EAAA,gBAAA,CACE,OAAOzsB,KAAK;EACV,UAAMyuF,IAAQC,YAAYjiE,CAAZiiE,EAAezmG,EAAK7C,IAApBspG,CAAd;EAAA,UACMC,IAAUI,IACZF,IACIG,UAAU/mG,EAAKsnG,IAAfP,CADJH,EAEID,YAAgBH,CAAhBG,EAAuB3mG,EAAKonG,QAA5BT,EAAsC3mG,EAAK2+E,QAA3CgoB,CAFJC,CADYE,EAIZF,IAAQG,UAAU,IAAM/mG,EAAKsnG,IAArBP,CAARH,EAAoCJ,CAApCI,CAJYE,CADhB,CAMA,OAAOF,IAAQpiE,CAARoiE,EAAWC,IAAQH,CAARG,EAAiBC,IAAQC,UAAUl3B,SAAVk3B,CAARD,EAA8BN,CAA9BM,CAAjBD,CAAXD,CAAP;OAPK7uF,CAAP;KAtB4B7H,EAiC9Bs3F,WAAAA,UAAAA,GAAA;EACE,aACEJ,UAAU/nG,KAAK+nG,UACfzoB,UAAUt/E,KAAKs/E,UACf2oB,MAAMjoG,KAAKioG,MACXnqG,MAAMkC,KAAKlC,MAJb;KAlC4B+S,EACds3F,WAAAA,GAAY,YADEt3F,GAyChC;IAzCgCi2F,WAAhC,eA0Cca,cAAcQ,YAQ5B,IAAaC,8CAEPC,SAAW,WACXC,YAAc,cACdC,QAAU,UACVC,UAAY,YALlB,CAQA,4BAAA,CAAoCC,CAApC;EAEE,SAAOC,qBAAqBD,CAArBC,CAAP;EAGF,+BAAA,CACI1L,CADJ,EAEIyI,CAFJ;EAGE,0BADEA,SACKkD,uBACH3L,CADG2L,EACK5B,cAAc7J,gBAAd6J,CAA+B5J,MAA/B4J,GAAwC9J,YAD7C0L,EAEHlD,CAFGkD,EAEY,YAFZA,CAAP;EAKF,uBAAA,CAA8BxD,CAA9B;EAEE,SAAkB,QAAdA,CAAc,GACT,IADS,GAGQ,mBAAfA,CAAe,GAKjByD,wBADS9L,WAHEqI,KAAciD,yCAAdjD,GACdiD,0CAA0CjD,CAA1CiD,CADcjD,GAEdA,GACuBnI,YACpB4L,CALiB,GAMfzD,aAAsB2B,UAAtB3B,GACFA,CADEA,GAGFyD,sBAAsBzD,CAAtByD,CAZT;oBCtPsB5L;EACtB,SAAO,IAAIkK,OAAJ,CAAYlK,CAAZ,CAAP;EAWF,kBAAA,CAAyBA,CAAzB;EACE,SAAO,IAAI4K,QAAJ,CAAa5K,CAAb,CAAP;EAUF,gBAAA;EACE,SAAO,IAAI6K,MAAJ,EAAP;EAWF,oBAAA,CAA2B7K,CAA3B;EACE,SAAO,IAAImL,UAAJ,CAAenL,CAAf,CAAP;;MCvCI6L,UAA+B,IAAIC,GAAJ;MAMxBC,4BAA4B,iBAAiB,gBAC1D,wBAAA,CAAgChlG,CAAhC;EACEilG,4BAA0BD,wBAA1BC,EAAoD,YAApDA,EAAkEjlG,CAAlEilG;EAKF,KAAaC,6BAA6B,SAAS,QAAQ,SAA3D,CACA,yBAAA,CAAiCllG,CAAjC;EACEilG,4BAA0BC,yBAA1BD,EAAqD,aAArDA,EAAoEjlG,CAApEilG;EAKF,KAAaE,0BAA0B,OAAO,MAA9C,CACA,sBAAA,CAA8BnlG,CAA9B;EACEilG,4BAA0BE,sBAA1BF,EAAkD,UAAlDA,EAA8DjlG,CAA9DilG;EAGF,KAAMG,oBAAN;EAAA,IACMC,oBAAoB,GAD1B,CAMA,kBAAA,CAA6B7qG,CAA7B,EAA2CiG,CAA3C;EACE2kG,kBAAgBntG,IAAhBmtG,CAAqB5qG,CAArB4qG,EACA;EACE,QAAMvuG,IAAS4J,GAAf,CAEA,OADA2kG,gBAAgB5wF,GAAhB4wF,IACOvuG,CAAP;EACA,GAJF,CAIE,OAAOsf,CAAP;EAEA,UADAivF,gBAAgB5wF,GAAhB4wF,IACMjvF,CAAN;;EAOJ,gCAAA;EACE,SAA+B,MAA3BivF,gBAAgB/uG,MAAW,GACtB,EADsB,GAGtB+uG,gBAAgBnnG,IAAhBmnG,CAAqBC,iBAArBD,IAA0CC,iBAHnD;EAYF,6BAAA,CAAoCC,CAApC;EACE,OAAKC,kBAAkBD,CAAlBC,CAAL,EACE,MAAM,IAAI/vG,KAAJ,CAAU,+BAAgC8vG,CAAhC,GAA6C,GAAvD,CAAN,CAEF,OAAOE,2BAA2BF,CAAlC;EAaF,6BAAA,CAAoCG,CAApC;EACE,OAAKF,kBAAkBE,CAAlBF,CAAL,EACE,MAAM,IAAI/vG,KAAJ,CAAU,+BAAgCiwG,CAAhC,GAA6C,GAAvD,CAAN,CAEGX,QAAQn1F,GAARm1F,CAAYW,CAAZX,KACHA,QAAQ/xF,GAAR+xF,CAAYW,CAAZX,EAAwB,CAAxBA,CADGA,CAGL,IAAMvuG,IAAQuuG,QAAQlyF,GAARkyF,CAAYW,CAAZX,CAAd,CAGA,IAFAA,QAAQ/xF,GAAR+xF,CAAYW,CAAZX,EAAwBA,QAAQlyF,GAARkyF,CAAYW,CAAZX,IAA0B,CAAlDA,GAEIvuG,IAAQ,CAAZ,EAAe;EACb,QAAMY,IAASsuG,IAAa,GAAbA,GAAmBlvG,CAAlC,CAIA,OADAuuG,QAAQ/xF,GAAR+xF,CAAY3tG,CAAZ2tG,EAAoB,CAApBA,GACO3tG,CAAP;EAEA,UAAOsuG,CAAP;EAIJ,KAAMC,kBAAkB,IAAIC,MAAJ,CAAW,8BAAX,CAAxB,CAOA,0BAAA,CAAkCnrG,CAAlC;EACE,WAAOA,EAAKy9F,KAALz9F,CAAWkrG,eAAXlrG,CAAP;sBC5FwB7D;EACxB,SAAOA,MAAMivG,SAASjvG,EAAEuG,QAAFvG,EAATivG,EAAuB,EAAvBA,CAAb;EAUF,mBAAA,CACEzvG,CADF,EACgCgN,CADhC,EACgD3D,CADhD;EAEe,UAAT2D,CAAS,KACXA,IAAQ,CADG,GAGF,QAAP3D,CAAO,KACTA,IAAMrJ,EAAME,MADH,CAHE,CAQb,KADA,IAAI8N,IAAO,CAAX,EACSnN,IAAImM,CAAb,EAAoBnM,IAAIwI,CAAxB,IAA+BxI,CAA/B,EACEmN,KAAQhO,EAAMa,CAANb,CAARgO,CAEF,OAAOA,CAAP;EAQF,mBAAA,CAAmBhO,CAAnB;EAEE,SADAA,IAAQ2B,MAAMC,OAAND,CAAc3B,CAAd2B,IAAuB,IAAIsC,YAAJ,CAAiBjE,CAAjB,CAAvB2B,GAAiD3B,CAAzDA,EACOysB,SAASzsB,CAATysB,CAAP;EAQF,eAAA,CAAoBzsB,CAApB;EACE,SAAO0vG,IAAQC,UAAU3vG,CAAV2vG,CAARD,EAA0BvpG,QAA1BupG,GAAqC,CAArCA,CAAP;EAQF,eAAA,CAAoB1vG,CAApB;EACE,SAAO4vG,IAAQD,UAAU3vG,CAAV2vG,CAARC,EAA0BzpG,QAA1BypG,GAAqC,CAArCA,CAAP;EAQF,iBAAA,CA8CsB5iG,CA9CtB,EA8CqC3D,CA9CrC;EA+CE,MAAIA,IAAM2D,CAAV,EACE,MAAM,IAAIw9F,UAAJ,CAAe,UAAQnhG,CAAR,gBAAA,GAAyB2D,CAAzB,oBAAf,CAAN,CAGF,KADA,IAAMq4C,MAAN,EACSxkD,IAAImM,CAAb,EAAoBnM,IAAIwI,CAAxB,IAA+BxI,CAA/B,EACEwkD,EAAIvjD,IAAJujD,CAASxkD,CAATwkD,EAEF,OAAOA,CAAP;mBCtEmB7kD,GAAWuD;EAC9B,SAAOvD,EAAEoL,MAAFpL,CAASuD,CAATvD,CAAP;EASF,sBAAA,CAA2BA,CAA3B,EAAsCoD,CAAtC;qBAAsCA,KAAQ,GAC5C,IAAM0jB,IAAW9mB,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAAjB,CAKA,OAJIoD,IAAO,CAAPA,KACFA,IAAO0jB,EAASpnB,MAATonB,GAAkB1jB,CAAlB0jB,GAAyB,CAD9B1jB,GAGJ0jB,EAAS21B,MAAT31B,CAAgB1jB,CAAhB0jB,EAAsB,CAAtBA,EAAyB,CAAzBA,CAHI1jB,EAIGpD,EAAEyK,OAAFzK,CAAU8mB,CAAV9mB,CAAP;EAcF,gBAAA,CAAuBA,CAAvB,EAAkCiC,CAAlC;EACE,SAAO+b,KAAK;EACV,QAAuB,MAAnBhe,EAAEuB,KAAFvB,CAAQN,MAAZ,EACE,MAAM,IAAIsqG,UAAJ,CACF,2DACQhqG,EAAEuB,KAAFvB,CAAQN,MADhB,aADE,CAAN,CAKF,OAAOoM,OADGR,aAAWtL,CAAXsL,EAAc,CAAdA,CACHQ,GAAS,GAAG7J,GAAG,EAAf6J,CAAP;KAPKkS,CAAP;EAgBF,mBAAA,CAAwBhe,CAAxB;EACE,MAAMkD,KAAYmsG,UAAqBrvG,EAAEuB,KAAvB8tG,EAAlB,CACA,OAAOrvG,EAAEyK,OAAFzK,CAAUkD,CAAVlD,CAAP;EAWF,sBAAA,CAA6BA,CAA7B;EACE,MAAIA,EAAE0E,IAAF1E,IAAU,CAAd,EACE,MAAM,IAAIgqG,UAAJ,CACF,0DAAwDhqG,EAAE0E,IAA1D,MADE,CAAN,CAGF,IAAMxB,KAAYlD,EAAEuB,KAAFvB,CAAQ,CAARA,GAAYqvG,UAAqBrvG,EAAEuB,KAAvB8tG,EAA8B,CAA9BA,EAA9B,CACA,OAAOrvG,EAAEyK,OAAFzK,CAAUkD,CAAVlD,CAAP;EAWF,6BAAA,CACIR,CADJ,EACmBiF,CADnB,EACkCjD,CADlC;EAEE,SAAOwc,KAAK;EACV,YAAQxe,EAAMkF,IAAd,GACE,KAAK,CAAL;EACE,eAAO4qG,QAAY9vG,CAAZ8vG,EAA+B7qG,CAA/B6qG,EAAsC9tG,CAAtC8tG,CAAP,CACF,KAAK,CAAL;EACE,eAAOC,QACH/vG,CADG+vG,GACiB9qG,GAAO,EADxB8qG,GAC6B/tG,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,EADnC+vG,CAAP,CAEF,KAAK,CAAL;EACE,eAAOC,QACHhwG,CADGgwG,GACiB/qG,GAAO,GAAG,EAD3B+qG,GAEFhuG,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,EAFpBgwG,CAAP,CAGF,KAAK,CAAL;EACE,eAAOC,QACHjwG,CADGiwG,GACiBhrG,GAAO,GAAG,GAAG,EAD9BgrG,GAEFjuG,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,EAFpCiwG,CAAP,CAGF;EACE,cAAM,IAAIzF,UAAJ,CACF,gEACGxqG,EAAMkF,IAFP,CAAN,CAfJ;KADKsZ,CAAP;EA+BF,4BAAA,CACIxe,CADJ,EACmBiF,CADnB,EACkCjD,CADlC;EAEE,SAAOwc,KAAK;EACV,YAAQxe,EAAMkF,IAAd,GACE,KAAK,CAAL;EACE,eAAO4qG,QAAY9vG,CAAZ8vG,EAA+B7qG,CAA/B6qG,EAAsC9tG,CAAtC8tG,CAAP,CACF,KAAK,CAAL;EACE,eAAOC,QACH/vG,CADG+vG,GACiB,GAAG9qG,EADpB8qG,GAC6B/vG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,EAD7C+tG,CAAP,CAEF,KAAK,CAAL;EACE,eAAOC,QACHhwG,CADGgwG,GACiB,GAAG,GAAG/qG,EADvB+qG,GAEFhwG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,EAF9BguG,CAAP,CAGF,KAAK,CAAL;EACE,eAAOC,QACHjwG,CADGiwG,GACiB,GAAG,GAAG,GAAGhrG,EAD1BgrG,GAEFjwG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,EAF9CiuG,CAAP,CAGF;EACE,cAAM,IAAIzF,UAAJ,CACF,+DACGxqG,EAAMkF,IAFP,CAAN,CAfJ;KADKsZ,CAAP;EAgCF,wBAAA,CACIxe,CADJ,EACmBiF,CADnB,EACkCjD,CADlC,EACgD4B,CADhD;EAEE,SAAO4a,KAAK;EACV,YAAQxe,EAAMkF,IAAd,GACE,KAAK,CAAL;EACE,eAAO4qG,QAAY9vG,CAAZ8vG,EAA+B7qG,CAA/B6qG,EAAsC9tG,CAAtC8tG,CAAP,CACF,KAAK,CAAL;EACE,gBAAQlsG,CAAR,GACE,KAAK,CAAL;EACE,mBAAOssG,oBAAoBlwG,CAApBkwG,EAA2BjrG,CAA3BirG,EAAkCluG,CAAlCkuG,CAAP,CACF,KAAK,CAAL;EACE,mBAAOC,mBAAmBnwG,CAAnBmwG,EAA0BlrG,CAA1BkrG,EAAiCnuG,CAAjCmuG,CAAP,CACF;EACE,kBAAM,IAAI3F,UAAJ,CACF,mDACG5mG,CAFD,CAAN,CANJ,CAUF,KAAK,CAAL;EACE,gBAAQA,CAAR,GACE,KAAK,CAAL;EACE,mBAAOssG,oBAAoBlwG,CAApBkwG,EAA2BjrG,CAA3BirG,EAAkCluG,CAAlCkuG,CAAP,CACF,KAAK,CAAL;EACE,mBAAOF,QACHhwG,CADGgwG,GACiB,GAAG/qG,GAAO,EAD3B+qG,GAEFhwG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,EAFpBgwG,CAAP,CAGF,KAAK,CAAL;EACE,mBAAOG,mBAAmBnwG,CAAnBmwG,EAA0BlrG,CAA1BkrG,EAAiCnuG,CAAjCmuG,CAAP,CACF;EACE,kBAAM,IAAI3F,UAAJ,CACF,mDACG5mG,CAFD,CAAN,CAVJ,CAcF,KAAK,CAAL;EACE,gBAAQA,CAAR,GACE,KAAK,CAAL;EACE,mBAAOssG,oBAAoBlwG,CAApBkwG,EAA2BjrG,CAA3BirG,EAAkCluG,CAAlCkuG,CAAP,CACF,KAAK,CAAL;EACE,mBAAOD,QACHjwG,CADGiwG,GACiB,GAAGhrG,GAAO,GAAG,EAD9BgrG,GAEFjwG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,EAFpCiwG,CAAP,CAGF,KAAK,CAAL;EACE,mBAAOA,QACHjwG,CADGiwG,GACiB,GAAG,GAAGhrG,GAAO,EAD9BgrG,GAEFjwG,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBA,EAAM+B,KAAN/B,CAAY,CAAZA,GAAgBgC,GAAMhC,EAAM+B,KAAN/B,CAAY,CAAZA,EAFpCiwG,CAAP,CAGF,KAAK,CAAL;EACE,mBAAOE,mBAAmBnwG,CAAnBmwG,EAA0BlrG,CAA1BkrG,EAAiCnuG,CAAjCmuG,CAAP,CACF;EACE,kBAAM,IAAI3F,UAAJ,CACF,mDACG5mG,CAFD,CAAN,CAdJ,CAkBF;EACE,cAAM,IAAI4mG,UAAJ,CACF,+DACGxqG,EAAMkF,IAFP,CAAN,CAjDJ;KADKsZ,CAAP;EA+DF,qBAAA,CAA4BoP,CAA5B,EAA+ChqB,CAA/C;EACE,MAAIsB,CAAJ,CAeA,wBAhB6CtB,KAAQ,IAEjDA,IAAO,CAAPA,KAGAA,IADW,OADbsB,IAAO0oB,EAAQ,CAARA,EAAW1oB,IACL,IACJA,CADI,GAGJ,CALPtB,GAQAA,MAASgqB,EAAQ,CAARA,EAAW1oB,IAApBtB,KAGFA,KAAQ,CAHNA,GAMGwsG,OAAWxiF,CAAXwiF,EAAoBxsG,CAApBwsG,CAAP;EAUF,8BAAA,CAAqC5wG,CAArC,EAAgDsB,CAAhD;EACE,UAAQtB,EAAE0F,IAAV,GACE,KAAK,CAAL;EACE,aAAOmrG,UAAc7wG,GAAesB,EAA7BuvG,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,UAAc9wG,GAAesB,EAA7BwvG,EAA6C,CAA7CA,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,UAAc/wG,GAAesB,EAA7ByvG,EAA6C,CAA7CA,CAAP,CACF,KAAK,CAAL;EACE,aAAOC,UAAchxG,GAAesB,EAA7B0vG,EAA6C,CAA7CA,CAAP,CACF;EACE,YAAM,IAAIhG,UAAJ,CACF,iEACAhrG,EAAE0F,IAFA,CAAN,CAVJ;EAuBF,gBAAA,CAAqB1E,CAArB,EAAgCiC,CAAhC;EAIE,MAHKd,MAAMC,OAAND,CAAcc,CAAdd,MACHc,KAAKA,EADFd,GAGDnB,EAAE0E,IAAF1E,KAAWiC,EAAEvC,MAAjB,EACE,MAAM,IAAIsqG,UAAJ,CACF,4BAA0B/nG,EAAEvC,MAA5B,2DAAA,GACwCM,EAAE0E,IAD1C,MADE,CAAN,CAIF,OAAOurG,KAASjwG,CAATiwG,EAAYhuG,CAAZguG,CAAP;EAgBF,wBAAA,CACI1uG,CADJ,EACkByL,CADlB,EAC8BkjG,CAD9B,EAC4C3sG,CAD5C,EAEIuqB,CAFJ;EAGE,0BAFgB9gB,yBAAYkjG,QAErBC,aAAiB5uG,CAAjB4uG,EAAwBnjG,CAAxBmjG,EAA8BD,CAA9BC,EAAsC5sG,CAAtC4sG,EAA6CriF,CAA7CqiF,CAAP;EAmBF,eAAA,CAAoBnwG,CAApB,EAA+B0W,CAA/B;EACE,MAAK1W,EAAE0E,IAAF1E,GAAS,CAATA,IAAgB0W,EAAEhS,IAAFgS,GAAS,CAA9B,EACE,MAAM,IAAIuzF,mBAAJ,CACF,gEACsBjqG,EAAEuB,KADxB,oBAAA,GAC+CmV,EAAEnV,KAF/C,CAAN,CAIF,IAAImV,EAAEhS,IAAFgS,IAAU,CAAVA,KACI05F,IAAWpwG,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAAe,CAAfA,EAAkB,CAAlBA,QACXqwG,IAAiB35F,EAAEnV,KAAFmV,CAAQvT,KAARuT,EAAe,CAAfA,EAAkB,CAAlBA,EAFzB,EAII,MAAM,IAAIuzF,mBAAJ,CACF,kGAEIjqG,EAAEuB,KAFN,qBAAA,GAGcmV,EAAEnV,KAJd,CAAN,CAQJ,IAAgB,MAAXvB,EAAE0E,IAAS,IAAkB,MAAXgS,EAAEhS,IAAzB,EACE,OAAO4rG,OAAWtwG,CAAXswG,EAA0B55F,CAA1B45F,CAAP,CAGA,IAAMC,IAAavwG,EAAEuB,KAAFvB,CAAQmD,KAARnD,EAAnB;EAAA,MACMowG,IAAWG,EAAW1yF,GAAX0yF,EADjB,CAEAvwG,IAAIA,EAAEyK,OAAFzK,GAAY,GAAGowG,EAAfpwG,CAAJA,CAIA,IAAMwwG,IAAS95F,EAAEnV,KAAFmV,CAAQvT,KAARuT,EAAf;EAAA,MACM+5F,IAAWD,EAAO3yF,GAAP2yF,EADjB;EAAA,MAEMH,IAAiBG,EAAO3yF,GAAP2yF,EAFvB;EAAA,MAGME,IAAiBF,QAAAA,EAAQC,EAARD,CAHvB;EAAA,MAMMzhG,IAAO5N,MAAMoH,IAANpH,GAAYzB,QAAQgX,EAAEhS,MAAtBvD,EAA6B,UAACg4C,CAAD,EAAI94C,CAAJ;EACxC,WAAU,MAANA,CAAM,GACDqW,EAAEhS,IAAFgS,GAAS,CADR,GAECrW,KAAKqW,EAAEhS,IAAFgS,GAAS,CAAdrW,GACFA,IAAI,CADFA,GAGJA,CALP;KADWc,CANb,CAcAuV,IAAIA,EAAE1H,SAAF0H,CAAY3H,CAAZ2H,EAAkBjM,OAAlBiM,EAA2B25F,IAAiB,EAA5C35F,CAAJA,CAGA,IAAMvD,IAAkBo9F,QAAAA,CAAeG,CAAfH,CAAxB,CACA,OAAOD,OAAWtwG,CAAXswG,EAA0B55F,CAA1B45F,EAAyC7lG,OAAzC6lG,CAAiDn9F,CAAjDm9F,CAAP;EAcJ,kBAAA,CA2CIK,CA3CJ,EA2CuB5kG,CA3CvB,EA2CmD3I,CA3CnD;EA4CE,SAAO4a,KAAK;EAMV,WAJEjS,IADE5K,MAAMC,OAAND,CAAc4K,CAAd5K,IACQ8qB,SAASlgB,CAATkgB,EAAkB,OAAlBA,CADR9qB,GAGQ4K,EAAQw9D,KAARx9D,EAFVA,EAIK6kG,OAAWD,CAAXC,EAAsB7kG,CAAtB6kG,EAA+BxtG,CAA/BwtG,CAAP;KANK5yF,CAAP;EAeF,kBAAA,CAAuBhe,CAAvB;EACE,SAAOmsG,UAAcnsG,CAAdmsG,EAAiBnsG,CAAjBmsG,CAAP;EAeF,iBAAA,CAwBInsG,CAxBJ,EAwBe6T,CAxBf,EAwB6BhB,CAxB7B;EAyBE,SAAOmL,KAAK;EAMV,QALkB,QAAdnL,CAAc,KAChBA,IAAag+F,iBADG,GAGlBC,gBAAgBj+F,CAAhBi+F,CAHkB,EAKA,MAAdj9F,EAAKnP,IAAS,IAAKmP,EAAKnP,IAALmP,KAAc7T,EAAE0E,IAAvC,EACE,MAAM,IAAIslG,UAAJ,CACF,iCAAiCn2F,EAAKnP,IAAtC,GACA,2BADA,GAC8B1E,EAAE0E,IAF9B,CAAN,CAIF,IAEIgS,CAFJ;EAAA,QAAMq6F,IAAYl9F,EAAKtS,KAAvB,CAGA,IAAe,MAAXvB,EAAE0E,IAAN,EACqB,oBAAfmO,CAAe,GAEf6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGk9F,EAAU,CAAVA,GAAc,GAAG,GAAG,EAArCl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EACL,GAAGk9F,EAAU,CAAVA,GAAcA,EAAU,CAAVA,GAAcA,EAAU,CAAVA,GAAcA,EAAU,CAAVA,EADxCl9F,CAAN7T,CAJW,GAOO,mBAAf6S,CAAe,KAEtB6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAG,GAAG,GAAG,GAAGk9F,EAAU,CAAVA,EAA1Bl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGpH,OAAOskG,EAAxBl9F,CAAN7T,CAJkB,CAPP,CADrB,KAeO,IAAe,MAAXA,EAAE0E,IAAN,EACc,oBAAfmO,CAAe,GAEf6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGk9F,EAAU,CAAVA,GAAc,GAAG,EAAlCl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CACA6T,EAAKpJ,OAALoJ,EAAc,GAAGk9F,EAAU,CAAVA,GAAcA,EAAU,CAAVA,GAAcA,EAAU,CAAVA,EAA7Cl9F,CADA7T,CAJW,GAOO,mBAAf6S,CAAe,KAEtB6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAG,GAAG,GAAGk9F,EAAU,CAAVA,EAAvBl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGpH,OAAOskG,EAAxBl9F,CAAN7T,CAJkB,CAPP,CADd,KAeA,IAAe,MAAXA,EAAE0E,IAAN,EACc,oBAAfmO,CAAe,GAEf6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGk9F,EAAU,CAAVA,GAAc,EAA/Bl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGk9F,EAAU,CAAVA,GAAcA,EAAU,CAAVA,EAA/Bl9F,CAAN7T,CAJW,GAMO,mBAAf6S,CAAe,KAEtB6D,IADuB,MAArBq6F,EAAUrxG,MAAW,GACnBM,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAG,GAAGk9F,EAAU,CAAVA,EAApBl9F,CAAN7T,CADmB,GAGnBA,EAAE2N,GAAF3N,CAAM6T,EAAKpJ,OAALoJ,EAAc,GAAGpH,OAAOskG,EAAxBl9F,CAAN7T,CAJkB,CANP,CADd,KAcA;EAAA,YAAIA,EAAE0E,IAAF1E,GAAS,EAAb,EAGL,MAAM,IAAIgqG,UAAJ,CAAe,wCAAsChqG,EAAE0E,IAAvD,CAAN,CAFAgS,IAAI1W,EAAE2N,GAAF3N,CAAM6T,CAAN7T,CAAJ0W;EAIF,YAAOA,CAAP;KA/DKsH,CAAP;EAyEF,eAAA,CAAoBhe,CAApB,EAA+BhD,CAA/B;EAEE,uBAF6BA,QAEf,MAAVA,CAAJ,EACE,MAAM,IAAIitG,mBAAJ,CACF,4CAA0CjtG,CAA1C,8BADE,CAAN,CAIF,OAAOg0G,IAAQhxG,CAARgxG,CAAP;EAWF,kBAAA,CAAyBhxG,CAAzB;EACE,SAAOge,KAAK;EAAM,WAAA8uF,IAAQ9sG,CAAR8sG,EAAWC,IAAQC,UAAU,CAAVA,CAARD,EAAsBkE,IAAQjxG,CAARixG,CAAtBlE,CAAXD,CAAA;KAAX9uF,CAAP;EAaF,iBAAA,CACIhe,CADJ,EACekxG,CADf,EAC8BC,CAD9B,EACqDrjF,CADrD;EAEE,SAAO9P,KAAK;EAGV,QAAkB,QAAdmzF,CAAc,KAASC,KAAKnwG,WAALmwG,CAAiBpxG,EAAEuB,KAAnB6vG,EAA0BD,CAA1BC,CAA3B,EACE,MAAM,IAAInH,mBAAJ,CACF,qDACAzrE,KAAKE,SAALF,CAAe2yE,CAAf3yE,CAFE,CAAN,CAIF,IAAY,QAAR1Q,CAAJ,EACE,MAAM,IAAIm8E,mBAAJ,CAAwB,0CAAxB,CAAN,CAEF,IAAI9hD,IAAakpD,KAAStE,IACtBuE,IAAQJ,CAARI,CADsBvE,EACIwE,cAAkBvxG,EAAEuB,KAApBgwG,EAA2B,CAA3BA,EAA8B,CAA9BA,EAAiC,SAAjCA,CADJxE,CAATsE,CAAjB,CAMA,OAHAlpD,IAAa0kD,IACTC,IAAQE,UAAU,CAAVA,CAARF,EAAsB0E,IAAQxE,UAAU,CAAVA,CAARwE,EAAsBN,CAAtBM,CAAtB1E,CADSD,EAET1kD,CAFS0kD,CAAb1kD,EAGO0kD,IAAQ7sG,CAAR6sG,EAAW1kD,CAAX0kD,CAAP;KAjBK7uF,CAAP;EA8BF,qBAAA,CAA4Bhe,CAA5B;EACE,SAAOge,KAAK;EACV,QAAMtH,IAAIq2F,IAAQC,UAAU,EAAVA,CAARD,EAAwBF,IAAQG,UAAU,EAAVA,CAARH,EAAwB7sG,CAAxB6sG,CAAxBE,CAAV,CACA,OAAOH,YAAgBl2F,CAAhBk2F,EAAmB,CAAnBA,EAAsB,CAAtBA,CAAP;KAFK5uF,CAAP;EAkBF,sBAAA,CAAgChe,CAAhC,EAA4CyxG,CAA5C,EAA0DC,CAA1D;EACE,0BADwDA,SACjDA,IAAW1xG,GAAX0xG,GAAiBD,GAAxB;EClqBK,KAAME,yBAAyB,SAAS,UAAU,SAAlD,CACP,qBAAA,CAA6BtoG,CAA7B;EACEilG,4BAA0BqD,qBAA1BrD,EAAiD,SAAjDA,EAA4DjlG,CAA5DilG;EAKF,KAAasD,6BAA6B,UAAU,UAApD,CACA,0BAAA,CAAkCvoG,CAAlC;EACEilG,4BAA0BsD,yBAA1BtD,EAAqD,cAArDA,EAAqEjlG,CAArEilG;EASF;EAAA,YAAA;;EAeA,UAf0Cn4F,aAAAA,GAAAA,GACjC07F,WAAAA,4BAAAA,GAAP;EACE,YAAO,CAAP;KAFsC17F,EAYxC07F,WAAAA,UAAAA,GAAA;EACE,aAAA;KAbsC17F,GAe1C;IAf0Ck2F,cAAclK,aAAxD;EAAA;EAoBA,YAAA;;EAMA,UAN2BhsF,aAAAA,GAAAA,GAGzB27F,WAAAA,MAAAA,GAAA,UAAMvwG,CAAN,EAAoBgC,CAApB;EACE,WAAOsoB,MAAMtqB,CAANsqB,EAAatoB,CAAbsoB,CAAP;KAJuB1V,EAClB27F,WAAAA,GAAY,OADM37F,GAM3B;IAN2B07F,YApB3B,eA2Bc5E,cAAc6E,OAK5B;EAAA,YAAA;;EAMA,UAN0B37F,aAAAA,GAAAA,GAGxB47F,WAAAA,MAAAA,GAAA,UAAMxwG,CAAN,EAAoBgC,CAApB;EACE,WAAO4a,OAAK5c,CAAL4c,EAAY5a,CAAZ4a,CAAP;KAJsBhI,EACjB47F,WAAAA,GAAY,MADK57F,GAM1B;IAN0B07F,YAA1B,eAOc5E,cAAc8E,MAU5B;EAGE,YAAA,CAAYzP,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,CAEE,IAAsB,mBAAXusF,CAAX,EACE,MAAM,IAAI0H,UAAJ,CACF,sDAAoD1H,CADlD,CAAN,CAGF,SAAqBqH,MAAjBrH,EAAOj5F,KAAX,EACE,MAAM,IAAI2gG,UAAJ,CAAe,wCAAsC1H,CAArD,CAAN,QAEFr8F,EAAKoD,KAALpD,GAAaq8F,EAAOj5F,KAApBpD;EAYJ,UAxB8BkQ,aAAAA,GAAAA,GAe5B67F,WAAAA,MAAAA,GAAA,UAAMzwG,CAAN,EAAoBgC,CAApB;EAAA,gBAAA,CACE,OAAOya,KAAK;EAAM,aAAA9P,IAAIihB,OAAOlpB,EAAKoD,KAAZ8lB,CAAJjhB,EAAwBiQ,OAAK5c,CAAL4c,EAAY5a,CAAZ4a,CAAxBjQ,CAAA;OAAX8P,CAAP;KAhB0B7H,EAmB5B67F,WAAAA,UAAAA,GAAA;EACE,aACE3oG,OAAO/D,KAAK+D,OADd;KApB0B8M,EACrB67F,WAAAA,GAAY,UADS77F,GAwB9B;IAxB8B07F,YAA9B,eAyBc5E,cAAc+E,UAkB5B;EAQE,YAAA,CAAY1P,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QANS9P,gBAAAA,IAAkB,GAAlBA,EACAA,gBAAAA,GAAiB,GADjBA,EAQPA,EAAKqhD,MAALrhD,GAAcq8F,EAAOh7C,MAAPg7C,IAAiBr8F,EAAKgsG,cAR7BhsG,EASPA,EAAKshD,MAALthD,GAAcq8F,EAAO/6C,MAAP+6C,IAAiBr8F,EAAKisG,cAT7BjsG,EAUPA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IAVZ7nB;EAoBX,UAtBmCkQ,aAAAA,GAAAA,GAejCg8F,WAAAA,MAAAA,GAAA,UAAM5wG,CAAN,EAAoBgC,CAApB;EACE,WAAO2nD,cAAc3pD,CAAd2pD,EAAqB5lD,KAAKgiD,MAA1B4D,EAAkC5lD,KAAKiiD,MAAvC2D,EAA+C3nD,CAA/C2nD,CAAP;KAhB+B/0C,EAmBjCg8F,WAAAA,UAAAA,GAAA;EACE,aAAQ7qD,QAAQhiD,KAAKgiD,QAAQC,QAAQjiD,KAAKiiD,QAAQz5B,MAAMxoB,KAAKwoB,MAA7D;KApB+B3X,EAC1Bg8F,WAAAA,GAAY,eADch8F,GAsBnC;IAtBmC07F,YAAnC,eAuBc5E,cAAckF,eAe5B;EAQE,YAAA,CAAY7P,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QANS9P,cAAAA,GAAe,CAAfA,EACAA,gBAAAA,GAAiB,GADjBA,EAQPA,EAAK+G,IAAL/G,GAAYq8F,EAAOt1F,IAAPs1F,IAAer8F,EAAKmsG,YARzBnsG,EASPA,EAAKiqG,MAALjqG,GAAcq8F,EAAO4N,MAAP5N,IAAiBr8F,EAAKosG,cAT7BpsG,EAUPA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IAVZ7nB;EA0BX,UA5BkCkQ,aAAAA,GAAAA,GAehCm8F,WAAAA,MAAAA,GAAA,UAAM/wG,CAAN,EAAoBgC,CAApB;EAEE,QAAc,eADdA,IAAQA,KAAS,SACH,KAAuB,YAAVA,CAA3B,EACE,MAAM,IAAI0mG,mBAAJ,CACF,yCAAuC1mG,CAAvC,MADE,CAAN,CAIF,OAAOgvG,eAAehxG,CAAfgxG,EAAsBjtG,KAAK0H,IAA3BulG,EAAiCjtG,KAAK4qG,MAAtCqC,EAA8ChvG,CAA9CgvG,EAAqDjtG,KAAKwoB,IAA1DykF,CAAP;KAtB8Bp8F,EAyBhCm8F,WAAAA,UAAAA,GAAA;EACE,aAAQtlG,MAAM1H,KAAK0H,MAAMkjG,QAAQ5qG,KAAK4qG,QAAQpiF,MAAMxoB,KAAKwoB,MAAzD;KA1B8B3X,EACzBm8F,WAAAA,GAAY,cADan8F,GA4BlC;IA5BkC07F,YAAlC,eA6Bc5E,cAAcqF,cAmB5B;EASE,YAAA,CAAYhQ,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QANS9P,cAAAA,GAAe,CAAfA,EACAA,gBAAAA,GAAiB,GADjBA,EAQPA,EAAK+G,IAAL/G,GAAYq8F,EAAOt1F,IAAPs1F,IAAer8F,EAAKmsG,YARzBnsG,EASPA,EAAKiqG,MAALjqG,GAAcq8F,EAAO4N,MAAP5N,IAAiBr8F,EAAKosG,cAT7BpsG,EAUPA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IAVZ7nB;EAyBX,UA5BqCkQ,aAAAA,GAAAA,GAgBnCq8F,WAAAA,MAAAA,GAAA,UAAMjxG,CAAN,EAAoBgC,CAApB;EAEE,QAAc,eADdA,IAAQA,KAAS,SACH,KAAuB,YAAVA,CAA3B,EACE,MAAM,IAAI0mG,mBAAJ,CACF,4CAA0C1mG,CAA1C,MADE,CAAN,CAGF,OAAOkoD,gBAAgBlqD,CAAhBkqD,EAAuBnmD,KAAK0H,IAA5By+C,EAAkCnmD,KAAK4qG,MAAvCzkD,EAA+CloD,CAA/CkoD,EAAsDnmD,KAAKwoB,IAA3D29B,CAAP;KAtBiCt1C,EAyBnCq8F,WAAAA,UAAAA,GAAA;EACE,aAAQxlG,MAAM1H,KAAK0H,MAAMkjG,QAAQ5qG,KAAK4qG,QAAQpiF,MAAMxoB,KAAKwoB,MAAzD;KA1BiC3X,EAC5Bq8F,WAAAA,GAAY,iBADgBr8F,GA4BrC;IA5BqC07F,YAArC,eA6Bc5E,cAAcuF,iBAa5B;EAGE,YAAA,CAAYlQ,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QAEE9P,EAAKwsG,IAALxsG,GAA2B,QAAfq8F,EAAOmQ,IAAQ,GAAOtjF,OAAOmzE,EAAOmQ,IAAdtjF,CAAP,GAA6B69E,UAAU,CAAVA,CAAxD/mG;EAkBJ,UAvB8BkQ,aAAAA,GAAAA,GAQ5Bu8F,WAAAA,MAAAA,GAAA,UAAMnxG,CAAN,EAAoBgC,CAApB;EAAA,gBAAA,CACE,OAAOya,KAAK;EACV,UAAqB,MAAjBzc,EAAM7B,MAAW,IAAK6B,EAAM,CAANA,MAAaA,EAAM,CAANA,CAAvC,EACE,MAAM,IAAIyoG,UAAJ,CACF,sEADE,CAAN,CAIA,OAAO97F,IAAIjI,EAAKwsG,IAATvkG,EAAe87C,IAAIzoD,EAAM,CAANA,CAAJyoD,CAAf97C,CAAP;OANG8P,CAAP;KAT0B7H,EAoB5Bu8F,WAAAA,UAAAA,GAAA;EACE,aAAQD,MAAMntG,KAAKmtG,IAALntG,CAAU2W,GAAV3W,IAAd;KArB0B6Q,EACrBu8F,WAAAA,GAAY,UADSv8F,GAuB9B;IAvB8B07F,YAA9B,CAkCA,oBAAA,CACItwG,CADJ,EACkBsR,CADlB;EAEE,MAAI8/F,CAAJ,EACIC,CADJ,CAGA,qBAJgB//F,qBAGhBi+F,gBAAgBj+F,CAAhBi+F,GACqB,MAAjBvvG,EAAM7B,MAAV,EACEizG,IAAQpxG,EAAM,CAANA,CAARoxG,EACAC,IAASrxG,EAAM,CAANA,CADToxG,CADF,KAGO,KAAyC,OAApC,GAAG,GAAG,GAAG/rF,QAAQrlB,EAAM7B,OAA5B;EACL,QAAmB,oBAAfmT,CAAJ,EAAoC;EAClC,UAAMggG,IAAqBC,UAAUvxG,CAAVuxG,EAAiB,CAAjBA,CAA3B,CACAH,IAAQpxG,EAAM,CAANA,IAAWsxG,CAAnBF,EACAC,IAASrxG,EAAM,CAANA,IAAWsxG,CADpBF;OAFF,MAIO,IAAmB,mBAAf9/F,CAAJ,EAAmC;EAClCggG,UAAqBC,UAAUvxG,CAAVuxG,EAAiB,CAAjBA,EAAoBvxG,EAAM7B,MAAN6B,GAAe,CAAnCuxG,CAArBD,CACNF,IAAQpxG,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,IAA0BsxG,CAAlCF,EACAC,IAASrxG,EAAMA,EAAM7B,MAAN6B,GAAe,CAArBA,IAA0BsxG,CADnCF;;KAPG,MAUA;EACL,QAAM3vG,IAAY8vG,UAAUvxG,CAAVuxG,CAAlB,CACAH,IAAQ9yG,KAAKmC,IAALnC,CAAUmD,CAAVnD,CAAR8yG,EACAC,IAAS/yG,KAAKmC,IAALnC,CAAUmD,CAAVnD,CADT8yG;EAIF,WAAQA,GAAOC,EAAf;iBAlCY3F,cAAcyF,UA+D5B;EAWE,YAAA,CAAYpQ,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,CAEE,IAAIusF,EAAOn1F,KAAPm1F,GAAe,CAAnB,EACE,MAAM,IAAI0H,UAAJ,CACF,0CAAwC1H,EAAOn1F,KAD7C,CAAN,QAGFlH,EAAKkH,KAALlH,GAA6B,QAAhBq8F,EAAOn1F,KAAS,GAAO,CAAP,GAAam1F,EAAOn1F,KAAjDlH,EACAA,EAAK8sG,IAAL9sG,GAAYq8F,EAAOyQ,IADnB9sG,EAEA+sG,aAAa/sG,EAAK8sG,IAAlBC,CAFA/sG,EAGAA,EAAKgtG,YAALhtG,GAAoBq8F,EAAO2Q,YAH3BhtG,EAIAitG,kBAAkBjtG,EAAKgtG,YAAvBC,CAJAjtG,EAKAA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IALnB7nB;EA2CJ,UA5DqCkQ,aAAAA,GAAAA,GAyBnCg9F,WAAAA,MAAAA,GAAA,UAAM5xG,CAAN,EAAoBgC,CAApB;EACE,QAAM6vG,IAAOC,YAAY9xG,CAAZ8xG,CAAb;EAAA,QACMV,IAAQS,EAAK,CAALA,CADd;EAAA,QAEMR,IAASQ,EAAK,CAALA,CAFf;EAAA,QAGIjmG,IAAQ7H,KAAK6H,KAHjB,CAYA,IARkB,YAAd7H,KAAKytG,IAAS,GAChB5lG,KAAStN,KAAKI,GAALJ,CAAS,CAATA,EAAY8yG,CAAZ9yG,CADO,GAEO,aAAdyF,KAAKytG,IAAS,GACvB5lG,KAAStN,KAAKI,GAALJ,CAAS,CAATA,EAAY+yG,CAAZ/yG,CADc,GAGvBsN,KAAStN,KAAKI,GAALJ,CAAS,CAATA,GAAa8yG,IAAQC,KAAU,CAA/B/yG,CALO,EAQQ,aAAtByF,KAAK2tG,YAAT,EAAoC;EAClC,UAAM/C,IAASrwG,KAAKmC,IAALnC,CAAUsN,CAAVtN,CAAf,CAEA,IAAc,eADd0D,IAAQA,KAAS,SACH,KAAuB,YAAVA,CAA3B,EACE,MAAM,IAAI0mG,mBAAJ,CACC3kG,KAAKslG,YAALtlG,+BAAAA,GAA8C/B,CAA9C+B,MADD,CAAN,CAGF,OAAOmmD,gBAAgBlqD,CAAhBkqD,EAAuB,CAAvBA,EAA0BykD,CAA1BzkD,EAAkCloD,CAAlCkoD,EAAyCnmD,KAAKwoB,IAA9C29B,CAAP;EAEA,SAAMhI,IAAQ5jD,KAAKmC,IAALnC,CAAU,IAAIsN,CAAdtN,CAAd,CACA,OAAOqrD,cAAc3pD,CAAd2pD,GAAsBzH,CAAtByH,EAA6BzH,CAA7ByH,EAAoC3nD,CAApC2nD,CAAP;KAhD+B/0C,EAoDnCg9F,WAAAA,UAAAA,GAAA;EACE,aACEhmG,OAAO7H,KAAK6H,OACZ4lG,MAAMztG,KAAKytG,MACXE,cAAc3tG,KAAK2tG,cACnBnlF,MAAMxoB,KAAKwoB,MAJb;KArDiC3X,EAC5Bg9F,WAAAA,GAAY,iBADgBh9F,GA4DrC;IA5DqC07F,YAArC,eA6Dc5E,cAAckG,iBAkB5B;EAUE,YAAA,CAAY7Q,CAAZ;aACEvsF,MAAAA,KAAAA,IACE5I,OAAO,GACP4lG,MAAM,UACNE,cAAc,WACdnlF,MAAgB,QAAVw0E,CAAU,GAAO,IAAP,GAAcA,EAAOx0E,MAJvC/X;EAcJ,UAzBmCI,aAAAA,GAAAA,GAmBjCm9F,WAAAA,aAAAA,GAAA;EAIE,WAAOH,gBAAgB/Q,SAAvB;KAvB+BjsF,EAC1Bm9F,WAAAA,GAAY,eADcn9F,GAyBnC;IAzBmCg9F,gBAAnC,eA0BclG,cAAcqG,eAa5B;EAUE,YAAA,CAAYhR,CAAZ;aACEvsF,MAAAA,KAAAA,IACE5I,OAAO,GACP4lG,MAAM,UACNE,cAAc,UACdnlF,MAAgB,QAAVw0E,CAAU,GAAO,IAAP,GAAcA,EAAOx0E,MAJvC/X;EAcJ,UAzBkCI,aAAAA,GAAAA,GAmBhCo9F,WAAAA,aAAAA,GAAA;EAIE,WAAOJ,gBAAgB/Q,SAAvB;KAvB8BjsF,EACzBo9F,WAAAA,GAAY,cADap9F,GAyBlC;IAzBkCg9F,gBAAlC,eA0BclG,cAAcsG,cAY5B;EAGE,YAAA,CAAYjR,CAAZ;aACEvsF,MAAAA,KAAAA,IACE5I,OAAO,GACP4lG,MAAM,SACNE,cAAc,UACdnlF,MAAgB,QAAVw0E,CAAU,GAAO,IAAP,GAAcA,EAAOx0E,MAJvC/X;EAcJ,UAlB8BI,aAAAA,GAAAA,GAY5Bq9F,WAAAA,aAAAA,GAAA;EAIE,WAAOL,gBAAgB/Q,SAAvB;KAhB0BjsF,EACrBq9F,WAAAA,GAAY,UADSr9F,GAkB9B;IAlB8Bg9F,gBAA9B,eAmBclG,cAAcuG,UAa5B;EAGE,YAAA,CAAYlR,CAAZ;aACEvsF,MAAAA,KAAAA,IACE5I,OAAO,GACP4lG,MAAM,SACNE,cAAc,UACdnlF,MAAgB,QAAVw0E,CAAU,GAAO,IAAP,GAAcA,EAAOx0E,MAJvC/X;EAcJ,UAlBiCI,aAAAA,GAAAA,GAY/Bs9F,WAAAA,aAAAA,GAAA;EAIE,WAAON,gBAAgB/Q,SAAvB;KAhB6BjsF,EACxBs9F,WAAAA,GAAY,aADYt9F,GAkBjC;IAlBiCg9F,gBAAjC,eAmBclG,cAAcwG,aAe5B;EAME,YAAA,CAAYnR,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,CAKE,IATO9P,cAAAA,GAAe,CAAfA,EAMPA,EAAKwsG,IAALxsG,GAA2B,QAAfq8F,EAAOmQ,IAAQ,GAAOxsG,EAAKytG,YAAZ,GAA2BpR,EAAOmQ,IANtDxsG,EAOPA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IAPZ7nB,EASU,QAAbA,EAAK6nB,IAAT,EACE,MAAM,IAAIm8E,mBAAJ,CACF,gEADE,CAAN;EAoCN,UAhDgC9zF,aAAAA,GAAAA,GAiB9Bw9F,WAAAA,MAAAA,GAAA,UAAMpyG,CAAN,EAAoBgC,CAApB;EAAA,gBAAA,CACE,OAAOya,KAAK;EACV,UAAqB,MAAjBzc,EAAM7B,MAAV,EACE,MAAM,IAAIuqG,mBAAJ,CACF,gEADE,CAAN,CAGE1oG,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,GAAsB,GAAtBA,IACFiF,QAAQ8Z,IAAR9Z,CACI,6EACcjF,EAAM,CAANA,IAAWA,EAAM,CAANA,CADzB,qCADJiF,CADEjF,CAQJ,IAEMvC,IAAIuzG,eADNhxG,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,IAAuBA,EAAM,CAANA,GAAUA,EAAM,CAANA,EAAjCA,GAA6CA,CACvCgxG,EAAgC,CAAhCA,EAAmC,CAAnCA,EAAsC,SAAtCA,CAFV;EAAA,UAGIt5B,IAAI26B,WAAOp6B,WAAPo6B,CAAmB50G,CAAnB40G,CAHR,CAOA,OAHIryG,EAAM,CAANA,IAAWA,EAAM,CAANA,CAAXA,KACF03E,IAAIA,EAAEjqE,SAAFiqE,EADF13E,GAGG2M,IAAI8+F,UAAU/mG,EAAKwsG,IAAfzF,CAAJ9+F,EAA0B+qE,CAA1B/qE,CAAP;OApBK8P,CAAP;KAlB4B7H,EA0C9Bw9F,WAAAA,UAAAA,GAAA;EACE,aACElB,MAAMntG,KAAKmtG,MACX3kF,MAAMxoB,KAAKwoB,MAFb;KA3C4B3X,EACvBw9F,WAAAA,GAAY,YADWx9F,GAgDhC;IAhDgC07F,YAAhC,eAiDc5E,cAAc0G,YAS5B,IAAaE,+CAEPC,UAAY,YACZC,cAAgB,gBAChBC,eAAiB,iBACjBC,UAAY,YACZC,UAAY,YACZC,aAAe,eACfh2F,MAAQ,QACRi2F,YAAc,cACdppD,cAAgB,gBAChBE,eAAiB,iBACjBO,iBAAmB,mBACnB4oD,iBAAmB,mBACnBxoF,OAAS,SAdf,CAiBA,+BAAA,CACIy2E,CADJ,EAEIyI,CAFJ;EAGE,0BADEA,SACKkD,uBACH3L,CADG2L,EACK5B,cAAc7J,gBAAd6J,CAA+B5J,MAA/B4J,GAAwC9J,YAD7C0L,EAEHlD,CAFGkD,EAEY,aAFZA,CAAP;EAKF,8BAAA,CAAqCqG,CAArC;EAEE,SAAOtG,qBAAqBsG,CAArBtG,CAAP;EAGF,wBAAA,CAA+BvD,CAA/B;EAEE,MAA0B,mBAAfA,CAAX,EAAoC;EAClC,QAAMrI,IAAYqI,KAAcoJ,0CAAdpJ,GACdoJ,2CAA2CpJ,CAA3CoJ,CADcpJ,GAEdA,CAFJ,CAMA,OAAkB,oBAAdrI,CAAc,GACT,IAAIkR,aAAJ,EADS,GAEO,mBAAdlR,CAAc,GAChB,IAAImR,YAAJ,EADgB,GAEA,eAAdnR,CAAc,GAChB,IAAIoR,QAAJ,EADgB,GAEA,kBAAdpR,CAAc,GAChB,IAAIqR,WAAJ,EADgB,GAIhBc,yBADSnS,cAAWE,YACpBiS,CAVT;EAYK,UAAI9J,aAAsBoH,WAAtBpH,GACFA,CADEA,GAGF8J,uBAAuB9J,CAAvB8J,CAHF;;ECnpBP,SAAO,IAAIzC,KAAJ,EAAP;EAUF,kBAAA;EACE,SAAO,IAAIC,IAAJ,EAAP;EAWF,kBAAA,CAAyBzP,CAAzB;EACE,SAAO,IAAI0P,QAAJ,CAAa1P,CAAb,CAAP;EAWF,yBAAA,CAA8BA,CAA9B;EACE,SAAO,IAAI6P,aAAJ,CAAkB7P,CAAlB,CAAP;EAWF,wBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAIgQ,YAAJ,CAAiBhQ,CAAjB,CAAP;EAWF,2BAAA,CAAgCA,CAAhC;EACE,SAAO,IAAIkQ,eAAJ,CAAoBlQ,CAApB,CAAP;EAWF,kBAAA,CAAyBA,CAAzB;EACE,SAAO,IAAIoQ,QAAJ,CAAapQ,CAAb,CAAP;EAWF,yBAAA,CAAgCA,CAAhC;EACE,SAAO,IAAI6Q,eAAJ,CAAoB7Q,CAApB,CAAP;EAWF,uBAAA,CAA8BA,CAA9B;EACE,SAAO,IAAIgR,aAAJ,CAAkBhR,CAAlB,CAAP;EAWF,sBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAIiR,YAAJ,CAAiBjR,CAAjB,CAAP;EAWF,kBAAA,CAAyBA,CAAzB;EACE,SAAO,IAAIkR,QAAJ,CAAalR,CAAb,CAAP;EAWF,qBAAA,CAA4BA,CAA5B;EACE,SAAO,IAAImR,WAAJ,CAAgBnR,CAAhB,CAAP;EAWF,oBAAA,CAA2BA,CAA3B;EACE,SAAO,IAAIqR,UAAJ,CAAerR,CAAf,CAAP;qZC9I8BtiG;EAC9B,SAAOmB,MAAMC,OAAND,CAAcnB,CAAdmB,KAAoBA,MAAMC,OAAND,CAAcnB,EAAE,CAAFA,CAAdmB,CAA3B;EASF,4BAAA,CAAmCnB,CAAnC;EACE,SAAiB,MAAbA,EAAEN,MAAW,KAAA,GAGZyB,MAAMC,OAAND,CAAcnB,EAAE,CAAFA,CAAdmB,IAGEnB,CAHFmB,IACKnB,EAJV;EAeF,6BAAA,CAAoCyW,CAApC;EACE,MAAIzW,CAAJ,CACA,IAAImB,MAAMC,OAAND,CAAcsV,CAAdtV,CAAJ,EAAuB;EACrB,QAAkB,MAAdsV,EAAG/W,MAAP,EACE,MAAM,IAAIsqG,UAAJ,CAAe,yCAAuCvzF,EAAG/W,MAAzD,CAAN,CAEFM,IAAIyW,EAAG,CAAHA,CAAJzW;KAJF,MAMEA,IAAIyW,CAAJzW,CAEF,OAAOA,CAAP;EAaF,4BAAA,CAAmConB,CAAnC;EACE,MAAIjmB,MAAMC,OAAND,CAAcimB,CAAdjmB,KAAyBA,MAAMC,OAAND,CAAcimB,EAAO,CAAPA,CAAdjmB,CAA7B,EAAuD;EACrD,QAAsB,MAAlBimB,EAAO1nB,MAAX,EAEE,QADA0nB,IAASA,GACK,EAAd,CAEA,MAAM,IAAI4iF,UAAJ,CAAe,mCAAiC5iF,EAAO1nB,MAAvD,CAAN;EAGF,UAAO0nB,CAAP;iCC5DiCytD;EAEnC,OADA,IAAI7rD,IAAQ,CAAZ,OAAA,EACqBwrF,KAArB,EAAqBlrG,YAArB,EAAqBA,GAArB;EAAK,QAAMk1F,QAAN,CACyB,MAAxBA,EAAOj9F,KAAPi9F,CAAa9+F,MAAW,GAC1BspB,KAAS,CADiB,GAG1BA,KAASw1E,EAAOj9F,KAAPi9F,CAAav9C,MAAbu9C,CAAoB,UAACx/F,CAAD,EAAIsB,CAAJ;EAAU,aAAAtB,IAAIsB,CAAJ;OAA9Bk+F,CAHiB;EAM9B,UAAOx1E,CAAP;ECTF,KAAMyrF,+BAA+B,UAArC;EAAA;EAqCE,YAAA,CACIv0G,CADJ,EACiBqD,CADjB,EAEIM,CAFJ,EAEyCwQ,CAFzC,EAGI05F,CAHJ;uBACiBxqG,iCACbM,oDAAqCwQ,0BACrC05F,WACFzoG,KAAK/B,KAAL+B,GAAsB,QAAT/B,CAAS,GAAO,SAAP,GAAmBA,GACzC+B,KAAK/D,KAAL+D,GAAapF,EAAIqB,OACjB+D,KAAK2E,EAAL3E,GAAUovG,yBAEV7wG,IAAe,QAARA,CAAQ,GAAO4wG,4BAAP,GAAsC5wG,GACrDyB,KAAKqvG,YAALrvG,GAAoBsvG,oBAAoB/wG,CAApB+wG,GACpBtvG,KAAKzB,IAALyB,GAAYuvG,oBAAoBvvG,KAAKqvG,YAAzBE,GAEZvvG,KAAK+O,SAAL/O,GAAiB+O,GACjB/O,KAAKyoG,UAALzoG,GAAkByoG,GAElBzoG,KAAKpF,GAALoF,GAAWwvG,SAAa50G,CAAb40G,EAAkBxvG,KAAK+O,SAAvBygG,EAAkCxvG,KAAKzB,IAAvCixG,EAA6CxvG,KAAK/B,KAAlDuxG;EAiDf,UAvCEC,WAAAA,KAAAA,GAAA;EAEE,WADAzvG,KAAK0vG,iBAAL1vG,IACOA,KAAKpF,GAAZ;KAFF60G,EAYAA,WAAAA,MAAAA,GAAA,UAAM9sB,CAAN;EAWE,WATA3iF,KAAK0vG,iBAAL1vG,IACA2vG,iBAAiB3vG,KAAKpF,GAAtB+0G,EAA2BhtB,CAA3BgtB,CADA3vG,EAGIA,KAAKpF,GAALoF,CAAS2E,EAAT3E,KAAgB2iF,EAAOh+E,EAAvB3E,KACFA,KAAKpF,GAALoF,CAASgR,MAAThR,CAAgB2iF,CAAhB3iF,GACuB,QAAnBA,KAAKyoG,UAAc,IACrBzoG,KAAKpF,GAALoF,CAASgR,MAAThR,CAAgBA,KAAKyoG,UAALzoG,CAAgBd,KAAhBc,CAAsBA,KAAKpF,GAA3BoF,CAAhBA,CAHAA,CAHJA,EASOA,IAAP;KAvBFyvG,EA6BAA,WAAAA,QAAAA,GAAA;EACEzvG,SAAK0vG,iBAAL1vG,IACAA,KAAKpF,GAALoF,CAASgT,OAAThT,EADAA;KA9BFyvG,EAkCUA,WAAAA,kBAAAA,GAAV;EACE,QAAIzvG,KAAKpF,GAALoF,CAAS2F,UAAb,EACE,MAAM,IAAIpM,KAAJ,CAAU,oBAAkByG,KAAKzB,IAAvB,0BAAV,CAAN;KApCJkxG,GAuCF;KArGA,2BAuG0B/0G,GAAa0W;EACrC,MAAI1W,EAAEuB,KAAFvB,CAAQuG,QAARvG,OAAuB0W,EAAEnV,KAAFmV,CAAQnQ,QAARmQ,EAA3B,EACE,MAAM,IAAI7X,KAAJ,CACF,qBAAqB2/B,KAAKE,SAALF,CAAex+B,EAAEuB,KAAjBi9B,CAArB,GAA+C,OAA/C,GACAA,KAAKE,SAALF,CAAe9nB,EAAEnV,KAAjBi9B,CAFE,CAAN;EAeJ,uBAAA,CA6K8B/nB,CA7K9B;EA8KE,SAAOA,EAAGrP,GAAHqP,CAAO,UAAAzW,CAAA;EAAK,WAAAA,EAAE+K,IAAF/K,EAAA;KAAZyW,CAAP;EAWF,uBAAA,CACIy+F,CADJ;EAEEA,IAAmB9tG,GAAnB8tG,CAAuB,UAACC,CAAD;EACWA,MAAiB,CAAjBA,EACvB9qG,KADuB8qG,CACjBA,EAAiB,CAAjBA,CADiBA;KADlCD;;EChPF,SAhBE,UAAY5S,CAAZ;EACEh9F,SAAK/B,KAAL+B,GAAag9F,EAAO/+F,KAApB+B,EACAA,KAAK/D,KAAL+D,GAAag9F,EAAO/gG,KADpB+D,EAMoB,QAAhBg9F,EAAO/gG,KAAS,GAClB+D,KAAK8vG,IAAL9vG,GAAYg9F,EAAO/gG,KAAP+gG,CAAa5iG,MADP,GAGlB4F,KAAK8vG,IAAL9vG,GAAYg9F,EAAO8S,IATrB9vG,EAWAA,KAAK+vG,OAAL/vG,GAAeg9F,EAAO+S,OAXtB/vG,EAYAA,KAAKgwG,OAALhwG,GAAeg9F,EAAOgT,OAZtBhwG,EAaAA,KAAKghB,IAALhhB,GAAYg9F,EAAOh8E,IAAPg8E,MAbZh9F;KAeJ;;;EAwDA,SAZE,UACa/B,CADb,EACuChC,CADvC,EAEWg0G,CAFX,EAEwCx+F,CAFxC,EAGay+F,CAHb,EAG+B3xG,CAH/B,EAIa4xG,CAJb;EACanwG,cAAAA,GAAA/B,CAAA+B,EAA0BA,UAAAA,GAAA/D,CAA1B+D,EACFA,gBAAAA,GAAAiwG,CADEjwG,EAC2BA,WAAAA,GAAAyR,CAD3BzR,EAEAA,aAAAA,GAAAkwG,CAFAlwG,EAGAA,sBAAAA,GAAAmwG,CAHAnwG,EAIXA,KAAK2E,EAAL3E,GAAUovG,uBAJCpvG,EAKC,QAARzB,CAAQ,KACVyB,KAAKqvG,YAALrvG,GAAoBsvG,oBAAoB/wG,CAApB+wG,CAApBtvG,EACAA,KAAKzB,IAALyB,GAAYuvG,oBAAoBvvG,KAAKqvG,YAAzBE,CAFF,CALDvvG,EASXA,KAAKZ,IAALY,GAAY/D,EAAM7B,MATP4F;KAWf;;MA2DIowG,cAAc;;EA8DhB,YAAA,CACIpT,CADJ,EAGWkT,CAHX;EAGWlwG,iBAAAA,GAAAkwG,CAAAlwG,EACTA,KAAK2E,EAAL3E,GAAUowG,aADDpwG,EASTA,KAAKqwG,aAALrwG,GAAqBg9F,EAAOqT,aATnBrwG,EAmBTA,KAAKswG,aAALtwG,GAAqBg9F,EAAOsT,aAnBnBtwG,EAqBTA,KAAKuwG,WAALvwG,GAAmBg9F,EAAOuT,WArBjBvwG,EAuBTA,KAAKwwG,aAALxwG,GAAqBg9F,EAAOwT,aAvBnBxwG,EA+BTA,KAAKywG,YAALzwG,GAAoBg9F,EAAOyT,YA/BlBzwG,EAiCTA,KAAK0wG,aAAL1wG,GAAqBg9F,EAAO0T,aAjCnB1wG,EAuCTA,KAAK2wG,UAAL3wG,GAAkBg9F,EAAO2T,UAvChB3wG,EAyCTA,KAAK4wG,WAAL5wG,GAAmBg9F,EAAO4T,WAzCjB5wG,EA8CTA,KAAKsW,WAALtW,GAAmBg9F,EAAO1mF,WA9CjBtW,EAgDTA,KAAK6wG,YAAL7wG,GAAoBg9F,EAAO6T,YAhDlB7wG,CAmDT,KAAoB,SAAA,EAAAmZ,IAAA6jF,EAAOsT,aAA3B,EAAoBtsG,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACU,QAATA,CAAS,IACXA,EAAMC,aAAND,CAAoB90G,IAApB80G,CAAyB9wG,IAAzB8wG,CADW;EAIf9T,OAAOqT,aAAPrT,CAAqBgU,YAArBhU,CAAkChhG,IAAlCghG,CAAuCh9F,IAAvCg9F;EAmBJ,UAhBEiU,WAAAA,UAAAA,GAAA;EAEE,SADA,IAAMC,MAAN,OAAA,EACoB/3F,IAAAnZ,KAAKswG,aAAzB,EAAoBtsG,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACU,QAATA,CAAS,GACXI,EAAal1G,IAAbk1G,CAAkBJ,EAAMvyG,IAAxB2yG,CADW,GAGXA,EAAal1G,IAAbk1G,CAAkB,IAAlBA,CAHW;EAMf,cACEb,eAAerwG,KAAKqwG,aAALrwG,GAAqBA,KAAKqwG,aAALrwG,CAAmBzB,IAAxCyB,GAA+C,MAC9DswG,eAAeY,GACfX,aAAavwG,KAAKuwG,aAClBC,eAAexwG,KAAKwwG,eAJtB;KATFS,GAgBF;;MAiDIE,eAAe;;EAuDjB,YAAA,CAAYnU,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,CAdQ9P,WAAAA,GAAsB,IAAtBA,EAEAA,mBAAAA,KAFAA,EAUEA,WAAAA,IAAY,CAVdA,EAgBNA,EAAKgE,EAALhE,GAAUwwG,cAhBJxwG,EAkBNA,EAAKywG,mBAALzwG,GAA2B,IAlBrBA,EAoBNA,EAAK0wG,SAAL1wG,GAAiB,IApBXA,EAqBNA,EAAK2wG,eAAL3wG,IAAuB,CArBjBA,EAwBNA,EAAK4wG,iBAAL5wG,KAxBMA,EAyBNA,EAAK6wG,oBAAL7wG,KAzBMA,EA0BNA,EAAK8wG,OAAL9wG,KA1BMA,EA2BNA,EAAK+wG,QAAL/wG,KA3BMA,EA4BNA,EAAKgxG,MAALhxG,IAAc,CA5BRA,EAkCNA,EAAKqwG,YAALrwG,KAlCMA,EAmCNA,EAAKowG,aAALpwG,KAnCMA,CAqCN,IAAIpC,IAAOy+F,EAAOz+F,IAAlB,CACA,KAAKA,CAAL,EAAW;EACT,UAAM+1F,IAAS3zF,EAAK2kG,YAAL3kG,EAAf,CACApC,IAAOqzG,YAA0Btd,CAA1Bsd,IAAoC,GAApCA,GAA0CC,OAAOvd,CAAPud,CAAjDtzG;EAOF,SALAoC,EAAKpC,IAALoC,GAAYpC,CAAZoC,EAEAA,EAAKoO,SAALpO,GAAqC,QAApBq8F,EAAOjuF,SAAa,IAAciuF,EAAOjuF,SAF1DpO,EAGAA,EAAKmxG,SAALnxG,GAAqC,QAApBq8F,EAAO8U,SAAa,IAAc9U,EAAO8U,SAH1DnxG,EAKyB,QAArBq8F,EAAO78E,UAAc,IAAkC,QAA1B68E,EAAO+U,eAAxC,EAAiE;EAK/D,UAAIA,UAAJ,CACA,IAA8B,QAA1B/U,EAAO+U,eAAX,EACEA,IAAkB/U,EAAO+U,eAAzBA,CADF,KAEO,IAAyB,QAArB/U,EAAO78E,UAAX,EAA+B;EACpC,YAAIoM,IAAoB,IAAxB,CACwB,QAApBywE,EAAOzwE,SAAa,KACtBA,IAAYywE,EAAOzwE,SADG,GAGxBwlF,KAAmBxlF,GAAWplB,OAAO61F,EAAO78E,WAHpB;EAK1Bxf,SAAKoxG,eAALpxG,GAAuBoxG,CAAvBpxG,CAGA,IAAI1C,IAAQ++F,EAAO/+F,KAAnB,CACa,QAATA,CAAS,KACXA,IAAQ++F,EAAOgV,UADJ,GAGA,QAAT/zG,CAAS,KACXA,IAAQ,SADG,CAHA,EAMb0C,EAAK1C,KAAL0C,GAAa1C,CANA;cASO,QAAlB++F,EAAOztB,OAAW,GACpB5uE,EAAKsxG,cAALtxG,GAAsBq8F,EAAOztB,OADT,GAGpB5uE,EAAKsxG,cAALtxG,GAAsB,IAHF,EAQtBA,EAAKuxG,SAALvxG,GAAiB,IARK;EA8+B1B,UAxlCoCkQ,aAAAA,GAAAA,GA8HjBshG,SAAAA,GAAjB,UAAyBrB,CAAzB,EAAuCsB,CAAvC;EACE,WAAOtB,EAAMvyG,IAANuyG,GAAa,MAAbA,GAAsBsB,EAAUnxG,QAAVmxG,EAA7B;KA/HgCvhG,EAyI1BshG,WAAAA,eAAAA,GAAR,UAAuBC,CAAvB,EAA0CC,CAA1C;EACE,QAAiC,MAA7BryG,KAAKgxG,YAALhxG,CAAkB5F,MAAtB,EACE,MAAM,IAAIqqG,YAAJ,CACF,6DAC2B4N,CAD3B,MADE,CAAN,CAIF,IAAIryG,KAAKgxG,YAALhxG,CAAkB5F,MAAlB4F,IAA4BoyG,CAAhC,EACE,MAAM,IAAI1N,UAAJ,CACF,kBAAgB2N,CAAhB,cAAA,GAAoCD,CAApC,8BAAA,GAC0BpyG,KAAKgxG,YAALhxG,CAAkB5F,MAD5C,oBADE,CAAN,CAIF,OAAO4F,KAAKgxG,YAALhxG,CAAkBoyG,CAAlBpyG,CAAP;KApJgC6Q,EAgKlCshG,WAAAA,WAAAA,GAAA,UAAWC,CAAX;EACE,WAAOE,iBACHtyG,KAAKuyG,cAALvyG,CAAoBoyG,CAApBpyG,EAA+B,OAA/BA,EAAwCywG,YADrC6B,CAAP;KAjKgCzhG,EA8KlCshG,WAAAA,YAAAA,GAAA,UAAYC,CAAZ;EACE,WAAOE,iBACHtyG,KAAKuyG,cAALvyG,CAAoBoyG,CAApBpyG,EAA+B,QAA/BA,EAAyC0wG,aADtC4B,CAAP;KA/KgCzhG,EAgMlC1M,qBAAAA,CAAIguG,WAAJhuG,SAAAA,SAAA;EACE,UAAInE,KAAKgxG,YAALhxG,CAAkB5F,MAAlB4F,GAA2B,CAA/B,EACE,MAAM,IAAIukG,cAAJ,CACF,WAASvkG,KAAKzB,IAAd,GACA,qHAFE,CAAN,CAMK,IAAiC,MAA7ByB,KAAKgxG,YAALhxG,CAAkB5F,MAAtB,EACL,MAAM,IAAImqG,cAAJ,CACF,WAASvkG,KAAKzB,IAAd,GACA,wCAFE,CAAN,CAIF,OAAO+zG,iBACHtyG,KAAKuyG,cAALvyG,CAAoB,CAApBA,EAAuB,OAAvBA,EAAgCywG,YAD7B6B,CAAP;2CAbFnuG,CAhMkC0M,EA4NlC1M,qBAAAA,CAAIguG,WAAJhuG,UAAAA,SAAA;EACE,UAAiC,MAA7BnE,KAAKgxG,YAALhxG,CAAkB5F,MAAtB,EACE,MAAM,IAAImqG,cAAJ,CACF,WAASvkG,KAAKzB,IAAd,GACA,wBAFE,CAAN,CAIF,IAAIyB,KAAKgxG,YAALhxG,CAAkB5F,MAAlB4F,GAA2B,CAA/B,EACE,MAAM,IAAIukG,cAAJ,CACF,WAASvkG,KAAKzB,IAAd,GACA,uHAFE,CAAN,CAOF,OAAO+zG,iBACHtyG,KAAKuyG,cAALvyG,CAAoB,CAApBA,EAAuB,QAAvBA,EAAiC0wG,aAD9B4B,CAAP;2CAdFnuG,CA5NkC0M,EA8OlC1M,qBAAAA,CAAIguG,WAAJhuG,UAAAA,SAAA;EACE,aAAOnE,KAAKyxG,OAAZ;2CADFttG,CA9OkC0M,EAuPlCshG,WAAAA,gBAAAA,GAAA;EAKE,WAAOnyG,KAAKsvE,MAALtvE,CAAY8B,GAAZ9B,CAAgB,UAAAwyG,CAAA;EAAU,aAAAA,GAAA;OAA1BxyG,CAAP;KA5PgC6Q,EA+PlC1M,qBAAAA,CAAIguG,WAAJhuG,WAAAA,SAAA;EACE,aAAOnE,KAAK0xG,QAAZ;2CADFvtG,CA/PkC0M,EAmQlC1M,qBAAAA,CAAIguG,WAAJhuG,SAAAA,SAAA;EACE,aAAOnE,KAAK2xG,MAAZ;cAGF,UAAUc,CAAV;EACEzyG,WAAK2xG,MAAL3xG,GAAcyyG,CAAdzyG;2CALFmE,CAnQkC0M,EA2QlC1M,qBAAAA,CAAIguG,WAAJhuG,oBAAAA,SAAA;EACE,aAAInE,KAAK+O,SAAL/O,GACKA,KAAKuxG,iBADVvxG,KAAJ;cAOF,UAAqBuvE,CAArB;EACEvvE,WAAKuxG,iBAALvxG,GAAyBuvE,CAAzBvvE;2CATFmE,CA3QkC0M,EAuRlC1M,qBAAAA,CAAIguG,WAAJhuG,uBAAAA,SAAA;EACE,aAAKnE,KAAK+O,SAAL/O,GAGIA,KAAKwxG,oBAHTxxG,GACIA,KAAKuxG,iBAALvxG,CAAuBmH,MAAvBnH,CAA8BA,KAAKwxG,oBAAnCxxG,CADT;cAOF,UAAwBuvE,CAAxB;EACEvvE,WAAKwxG,oBAALxxG,GAA4BuvE,CAA5BvvE;2CATFmE,CAvRkC0M,EAuSlC1M,qBAAAA,CAAIguG,WAAJhuG,WAAAA,SAAA;EACE,aAAOnE,KAAK0yG,gBAAL1yG,CAAsBmH,MAAtBnH,CAA6BA,KAAK2yG,mBAAlC3yG,CAAP;2CADFmE,CAvSkC0M,EA2SlC1M,qBAAAA,CAAIguG,WAAJhuG,YAAAA,SAAA;EACE,aAAOnE,KAAK4yG,SAAZ;2CADFzuG,CA3SkC0M,EAsTlCshG,WAAAA,YAAAA,GAAA;EACE,SAAKnyG,KAAK6yG,QAAV,EACE,MAAM,IAAIt5G,KAAJ,CACF,sEADE,CAAN;KAxT8BsX,EA0UxBshG,WAAAA,yBAAAA,GAAV,UAAmC1gG,CAAnC;EAGE,QADAA,IAASqhG,OAAqBrhG,CAArBqhG,CAATrhG,EACsB,QAAlBzR,KAAKqxG,SAAa,IAAkC,MAA1BrxG,KAAKqxG,SAALrxG,CAAe5F,MAA7C,EAAA;EAGA,UAAMi3G,IAAYyB,OAAqB9yG,KAAKqxG,SAA1ByB,CAAlB,CACA,IAAIrhG,EAAOrX,MAAPqX,KAAkB4/F,EAAUj3G,MAAhC,EACE,MAAM,IAAIsqG,UAAJ,CACF,WAAS1kG,KAAKzB,IAAd,cAAA,GAA8B8yG,EAAUj3G,MAAxC,8BAAA,GACmBqX,EAAOrX,MAD1B,qCAAA,GAEmBqX,CAHjB,CAAN,CAKF,KAAK,IAAIshG,IAAa,CAAtB,EAAyBA,IAAathG,EAAOrX,MAA7C,EAAqD24G,GAArD,EAAmE;EACjE,YAAMr4G,IAAI+W,EAAOshG,CAAPthG,CAAV;EAAA,YACMq5E,IAAkBumB,EAAU0B,CAAV1B,CADxB,CAEA,IAAY,QAARvmB,CAAJ,EAAA;EAKA,cAAMglB,IAAOp1G,EAAE0E,IAAf,CACA,IAAiB,QAAb0rF,EAAKglB,IAAQ,IACXA,MAAShlB,EAAKglB,IADpB,EAEI,MAAM,IAAIpL,UAAJ,CACF,WAASqO,CAAT,iCAAA,GAAkD/yG,KAAKzB,IAAvD,qBAAA,GACiBusF,EAAKglB,IADtB,kBAAA,GAC0CA,CAFxC,CAAN,CAKJ,IAAoB,QAAhBhlB,EAAKilB,OAAW,IACdD,IAAOhlB,EAAKilB,OADlB,EAEI,MAAM,IAAIrL,UAAJ,CACF,WAASqO,CAAT,iCAAA,GAAkD/yG,KAAKzB,IAAvD,GACA,sBADA,GACuBusF,EAAKilB,OAD5B,kBAAA,GACmDD,CAFjD,CAAN,CAKJ,IAAoB,QAAhBhlB,EAAKklB,OAAW,IACdF,IAAOhlB,EAAKklB,OADlB,EAEI,MAAM,IAAItL,UAAJ,CACF,WAASqO,CAAT,iCAAA,GAAkD/yG,KAAKzB,IAAvD,GACA,sBADA,GACuBusF,EAAKklB,OAD5B,kBAAA,GACmDF,CADnD,MADE,CAAN,CAOJ,IAAkB,QAAdhlB,EAAK7sF,KAAS,IACZvD,EAAEuD,KAAFvD,KAAYowF,EAAK7sF,KADvB,EAEI,MAAM,IAAIymG,UAAJ,CACF,WAASqO,CAAT,iCAAA,GAAkD/yG,KAAKzB,IAAvD,uBAAA,GACoBusF,EAAK7sF,KADzB,mBAAA,GAC+CvD,EAAEuD,KADjD,MADE,CAAN,CAOJ,IAAI6sF,EAAK9pE,IAAT,EAAe;EACb,gBAAM0K,IAAShxB,EAAEuB,KAAjB,CACA,KAAK,IAAMsa,CAAX,IAAkBu0E,EAAK9pE,IAAvB,EAA6B;EAC3B,kBAAMljB,IAAO1C,OAAOmb,CAAPnb,CAAb;EAAA,kBACM2I,IAAQ+mF,EAAK9pE,IAAL8pE,CAAUv0E,CAAVu0E,CADd;EAAA,kBAKMkoB,IACFl1G,KAAQ,CAARA,GAAY4tB,EAAO5tB,CAAP4tB,CAAZ5tB,GAA2B4tB,EAAOA,EAAOtxB,MAAPsxB,GAAgB5tB,CAAvB4tB,CAN/B,CAOA,IAAa,QAAT3nB,CAAS,KAAiD,OAAxCA,GAAO,MAAMud,QAAQ0xF,EAA3C,EACE,MAAM,IAAItO,UAAJ,CACF,WAASqO,CAAT,iCAAA,GACG/yG,KAAKzB,IADR,qBAAA,GAC+BT,CAD/B,mCAAA,GAEciG,CAFd,oBAAA,GAEqC2nB,CAFrC,MADE,CAAN;;EASN,eAAkB,QAAdo/D,EAAK7uF,KAAT,EACE,KAAK,IAAIlB,IAAI,CAAb,EAAgBA,IAAI+vF,EAAK7uF,KAAL6uF,CAAW1wF,MAA/B,IAAyCW,CAAzC,EAA4C;EAC1C,gBAAMk4G,IAAUnoB,EAAK7uF,KAAL6uF,CAAW/vF,CAAX+vF,CAAhB;EAAA,gBACMh+E,IAAMpS,EAAEuB,KAAFvB,CAAQK,CAARL,CADZ,CAEA,IAAe,QAAXu4G,CAAW,IAAe,QAAPnmG,CAAR,IACTmmG,MAAYnmG,CADlB,EAEI,MAAM,IAAI43F,UAAJ,CACF,WAASqO,CAAT,iCAAA,GACG/yG,KAAKzB,IADR,sBAAA,GACgCusF,EAAK7uF,KADrC,6BADE,CAAN;;;;;KA1ZsB4U,EA6alCshG,WAAAA,KAAAA,GAAA,UAAK1gG,CAAL,EAA8ByhG,CAA9B;EACE,WAAOzhG,CAAP;KA9agCZ,EAibxBshG,WAAAA,eAAAA,GAAV,UAAyB1gG,CAAzB,EAAkDyhG,CAAlD;EACwB,YAAlBlzG,KAAKmzG,SAAa,IACpBnzG,KAAKmzG,SAALnzG,CAAeyR,CAAfzR,EAAuBkzG,CAAvBlzG,CADoB;KAlbU6Q,EA4blCshG,WAAAA,YAAAA,GAAA,UAAYiB,CAAZ;EACEpzG,SAAKmzG,SAALnzG,GAAiBozG,CAAjBpzG;KA7bgC6Q,EAoclCshG,WAAAA,cAAAA,GAAA;EACEnyG,SAAKmzG,SAALnzG,GAAiB,IAAjBA;KArcgC6Q,EA4gBlCshG,WAAAA,MAAAA,GAAA,UACI1gG,CADJ,EAEIyhG,CAFJ;EAAA,gBAAA,CAGEA,IAASA,OAATA,EAEAlzG,KAAK0vG,iBAAL1vG,EAFAkzG,CAQA,KAHA,IAAMG,IAAaP,OAAqBrhG,CAArBqhG,CAAnB,EAEIQ,KAAiB,CAFrB,OAAA,EAGoBC,KAApB,EAAoBvvG,YAApB,EAAoBA,GAApB;EACE,4BAAuBwvG,eAAvB,EAAwC;EACtCF,aAAiB,CAAjBA,CACA;;EAIJ,UADA,IAAIG,KAAkB,CAAtB,OAAA,EACoBC,KAApB,EAAoBv6F,YAApB,EAAoBA,GAApB;EACE,0BAAqBq6F,cAArB,EAAqC;EACnCC,aAAkB,CAAlBA,CACA;;EAIJ,SAAIH,MAAmBG,CAAvB,EACE,MAAM,IAAI/O,UAAJ,CACF,iEADE,CAAN,CAMF,OAAOiP,UAAU3zG,KAAKzB,IAAfo1G,EAAqB;EAE1B,WAAKhzG,EAAK8xG,KAAV,EAAiB;EAKf9xG,UAAKizG,wBAALjzG,CAA8B8Q,CAA9B9Q,EAIA,KADA,IAAM2V,MAAN,OAAA,EACoB6C,IAAA25F,OAAqBrhG,CAArBqhG,CAApB,EAAoB9uG,YAApB,EAAoBA,GAApB;EAAK,cAAM6vG,QAAN,CACHv9F,EAAYta,IAAZsa,CAAiBu9F,EAAM53G,KAAvBqa;EAEF3V,WAAKmzG,KAALnzG,CAAW2xG,iBAA+Bh8F,CAA/Bg8F,CAAX3xG,GACAA,EAAK8xG,KAAL9xG,IAAa,CADbA,EAIIA,EAAKsxG,cAALtxG,IACFA,EAAKozG,UAALpzG,CAAgBA,EAAKsxG,cAArBtxG,CALFA,EAQuB,SAAnBA,EAAKuxG,SAAc,IAAQuB,CAAR,KAIrB9yG,EAAKuxG,SAALvxG,GAAiB,CAJI,CARvBA;EA0BF,WANAA,EAAKizG,wBAALjzG,CAA8B8Q,CAA9B9Q,GAMI8yG,CAAJ,EAAqB;EAUnB,aATA,IAMMO,MANN,OAAA,EAScC,IAJenB,OALzBhhG,IAASnR,EAAK1B,IAAL0B,CAAU8Q,CAAV9Q,EAAuCuyG,CAAvCvyG,CAKgBmyG,CAI7B,EAAcvgF,YAAd,EAAcA,GAAd;EAAK,cAAI73B,QAAJ,EAC4B,MAA3B24G,EAAW/xF,OAAX+xF,CAAmB34G,CAAnB24G,MACF34G,IAAIA,EAAE2L,KAAF3L,KAENs5G,EAAeh4G,IAAfg4G,CAAoBt5G,CAApBs5G;EAIF,aAFAliG,IAASwgG,iBAA+B0B,CAA/B1B,CAATxgG,EAEgC,QAA5BnR,EAAKywG,mBAAT,EACE,MAAM,IAAIzM,mBAAJ,CACF,mFADE,CAAN,CAMF,OAAO7yF,CAAP;EAEA,WAAMqO,IAAa+zF,kBAAkBziG,CAAlByiG,CAAnB;EAAA,UACMrmG,IAAclN,EAAKwzG,kBAALxzG,CAAwBwf,CAAxBxf,CADpB;EAAA,UAEImR,UAFJ;EAAA,UAGMsiG,IAAcC,iBAAiB5iG,CAAjB4iG,CAHpB,CAmCA,IA/BA1zG,EAAK2zG,4BAAL3zG,CACI9E,MAAMC,OAAND,CAAc4V,CAAd5V,IAAwBskB,EAAW,CAAXA,CAAxBtkB,GACwBskB,CAF5Bxf,GAOEmR,IAHiB,QAAfjE,CAAe,IAAQA,EAAYzT,MAAZyT,GAAqB,CAA7B,IACfhS,MAAMC,OAAND,CAAcgS,EAAY,CAAZA,CAAdhS,CADe,GAGPgS,EACI/L,GADJ+L,CAEO,UAAC5R,CAAD,EAAQ3B,CAAR;EAAkB,eAAA,IAAIk5G,cAAJ,CACdY,CADc,EACDn4G,CADC,EACM0E,CADN,EAEdmyG,OAAqBrhG,CAArBqhG,CAFc,EAEgBI,CAFhB,EAEwBvyG,EAAKpC,IAF7B,EAGdjE,CAHc,CAAA;SAFzBuT,CAHO,GAUR,IAAI2lG,cAAJ,CACLY,CADK,EACQvmG,CADR,EAC8BlN,CAD9B,EAELmyG,OAAqBrhG,CAArBqhG,CAFK,EAEyBI,CAFzB,EAEiCvyG,EAAKpC,IAFtC,CAdXoC,EA0BAA,EAAK4zG,cAAL5zG,CACI8Q,CADJ9Q,EACiDmR,CADjDnR,EACyD,IADzDA,EAC+D,IAD/DA,EAEIwf,CAFJxf,EAEgBkN,CAFhBlN,EAE6BuyG,CAF7BvyG,CA1BAA,EA6BAA,EAAKuxG,SAALvxG,EA7BAA,EA+BgC,QAA5BA,EAAKywG,mBAAT,EACE,MAAM,IAAIzM,mBAAJ,CACF,mFADE,CAAN,CAKF,OAAO7yF,CAAP;OA5GG6hG,CAAP;KA5iBgC9iG,EAoqBxBshG,WAAAA,6BAAAA,GAAV,UAAuChyF,CAAvC;EACE,QAA4B,QAAxBngB,KAAK+xG,eAAT,EAEO,IAAI5xF,EAAW/lB,MAAX+lB,KAAsBngB,KAAK+xG,eAAL/xG,CAAqB5F,MAA/C,EACL8G,QAAQ8Z,IAAR9Z,CACI,mDACGg4B,KAAKE,SAALF,CAAe/Y,CAAf+Y,CADH,mDAAA,GAEoBA,KAAKE,SAALF,CAAel5B,KAAK+xG,eAApB74E,CAFpB,oBAAA,GAGgBl5B,KAAKzB,IAJzB2C,EADK,KAMA;EACL,UAAIszG,KAAc,CAAlB,CACAx0G,KAAK+xG,eAAL/xG,CAAqBI,OAArBJ,CAA6B,UAACy0G,CAAD,EAAY15G,CAAZ;EACV,gBAAb05G,CAAa,IAAyB,QAAjBt0F,EAAWplB,CAAXolB,CAAR,IACbA,EAAWplB,CAAXolB,MAAkBs0F,CADL,KAEfD,KAAc,CAFC;SADnBx0G,GAMIw0G,KACFtzG,QAAQ8Z,IAAR9Z,CACI,oCACIg4B,KAAKE,SAALF,CAAe/Y,CAAf+Y,CADJ,+CAAA,GAEkCl5B,KAAKzB,IAFvC,OAAA,GAGG26B,KAAKE,SAALF,CAAel5B,KAAK+xG,eAApB74E,CAJPh4B,CAPFlB;;KA/qB8B6Q,EA0sBlC1M,qBAAAA,CAAIguG,WAAJhuG,eAAAA,SAAA;EACE,UAAyB,QAArBnE,KAAKgxG,YAAgB,IAAqC,MAA7BhxG,KAAKgxG,YAALhxG,CAAkB5F,MAAnD,EACE,MAAM,IAAImqG,cAAJ,CACF,eAAavkG,KAAKzB,IAAlB,iEADE,CAAN,CAKF,KADA,IAAMm2G,MAAN,OAAA,EACmBv7F,IAAAnZ,KAAKgxG,YAAxB,EAAmBhtG,YAAnB,EAAmBA,GAAnB;EAAK,YAAMwN,QAAN;EAAA,YACGmjG,IAAcz7E,KAAKE,SAALF,CAAe1nB,EAAKq/F,YAApB33E,CADjB,EAE2C,MAA1Cw7E,EAAgBpzF,OAAhBozF,CAAwBC,CAAxBD,KACFA,EAAgB14G,IAAhB04G,CAAqBC,CAArBD;EAGJ,WAA+B,MAA3BA,EAAgBt6G,MAApB,EAAkC;EAChC,YAAMy2G,IAAe7wG,KAAKgxG,YAALhxG,CAAkB,CAAlBA,EAAqB6wG,YAA1C,CACA,OAAIh1G,MAAMC,OAAND,CAAcg1G,CAAdh1G,KAA+BA,MAAMC,OAAND,CAAcg1G,EAAa,CAAbA,CAAdh1G,CAA/BA,IACwB,MAAxBg1G,EAAaz2G,MADbyB,GAEMg1G,EAAyB,CAAzBA,CAFNh1G,GAIKg1G,CAJT;EAQA,aAAM,IAAItM,cAAJ,CACF,eAAavkG,KAAKzB,IAAlB,8HADE,CAAN;2CAvBJ4F,CA1sBkC0M,EAkvBlCshG,WAAAA,YAAAA,GAAA;EACE,SAAKnyG,KAAKyyG,KAAV,EACE,MAAM,IAAIhO,YAAJ,CACF,wCAAsCzkG,KAAKzB,IAA3C,wFADE,CAAN,CAKF,OAAOq2G,qBAAoC50G,KAAKuvE,OAAzCqlC,CAAP;KAzvBgC/jG,EAswBlCshG,WAAAA,MAAAA,GAAA,UAAMhyF,CAAN;EACEngB,SAAKyyG,KAALzyG,IAAa,CAAbA;KAvwBgC6Q,EAixBlCshG,WAAAA,WAAAA,GAAA,UAAW0C,CAAX;EACE,4BADSA,SACFC,cAAcD,IAAgB70G,KAAK0yG,gBAArBmC,GAAwC70G,KAAKuvE,OAA3DulC,CAAP;KAlxBgCjkG,EAgyBlCshG,WAAAA,WAAAA,GAAA,UAAW5iC,CAAX;EAAA,gBAAA,CACE72D,KAAK;EACH,UAAM8D,IAAS7b,EAAK4uE,OAApB,CACA,IAAI/yD,EAAOpiB,MAAPoiB,KAAkB+yD,EAAQn1E,MAA9B,EAKE,MAAM,IAAIsqG,UAAJ,CACF,8CAA4C/jG,EAAKpC,IAAjD,oCAAA,GACgCgxE,EAAQn1E,MADxC,mCAAA,GAE+BoiB,EAAOpiB,MAFtC,iCAAA,GAGqBm1E,CAHrB,QADE,CAAN,CAMF,IAAsB,MAAlB/yD,EAAOpiB,MAAX,EAAA;EAKA,aAFA,IAAM26G,MAAN,EACMC,IAAcF,cAAct4F,CAAds4F,CADpB,EAES/5G,IAAI,CAAb,EAAgBA,IAAIi6G,EAAY56G,MAAhC,IAA0CW,CAA1C,EAA6C;EAC3C,cAAMk6G,IAAKD,EAAYj6G,CAAZi6G,CAAX;EAAA,cACMvjE,IAAIj1B,EAAOzhB,CAAPyhB,CADV;EAAA,cAEM2oB,IAAIoqC,EAAQx0E,CAARw0E,CAFV,CAGA,KAAKu8B,KAAKnwG,WAALmwG,CAAiBmJ,EAAGh5G,KAApB6vG,EAA2B3mE,EAAElpC,KAA7B6vG,CAAL,EACE,MAAM,IAAIpH,UAAJ,CACF,wBAAsBuQ,EAAGh5G,KAAzB,gDAAA,GAC6CkpC,EAAElpC,KAF7C,CAAN,CAIF84G,EAAkB/4G,IAAlB+4G,EAAwBtjE,GAAGtM,EAA3B4vE;EAEFG,uBAAcH,CAAdG;;OA7BFx8F;KAjyBgC7H,EAg1BxBshG,WAAAA,UAAAA,GAAV,UACI5zG,CADJ,EACkBtC,CADlB,EACgCgC,CADhC,EACkD+wG,CADlD,EAEImG,CAFJ,EAE+BpmG,CAF/B,EAGI05F,CAHJ;EAKE,SAA8C,MAA1CzoG,KAAKo1G,iBAALp1G,CAAuBshB,OAAvBthB,CAA+BzB,CAA/ByB,CAAJ,EACE,MAAM,IAAI0kG,UAAJ,CACF,2BAAyBnmG,CAAzB,gBAAA,GAA2CyB,KAAKzB,IAD9C,CAAN,CAGFyB,KAAKo1G,iBAALp1G,CAAuBhE,IAAvBgE,CAA4BzB,CAA5ByB,GAEa,QAAT/B,CAAS,KACXA,IAAQ,SADG,CAFb+B,CAKA,IAAMk5F,IAAS,IAAIuW,aAAJ,CACXT,EAAY9vG,KAAZ8vG,CAAkB/yG,CAAlB+yG,EAAyB/wG,CAAzB+wG,CADW,EACsB/wG,CADtB,EAC6BM,CAD7B,EACmCwQ,CADnC,EAC8C05F,CAD9C,CAAf,CAcA,OAXmB,QAAf0M,CAAe,IACjBn1G,KAAKq1G,OAALr1G,CAAa;EAAM,aAAAm1G,EAAYj2G,KAAZi2G,CAAkBjc,EAAOzzF,IAAPyzF,EAAlBic,CAAA;OAAnBn1G,CADiB,EAGF,QAAb+O,CAAa,KACfA,KAAY,CADG,CAHE,EAMfA,IACF/O,KAAKuxG,iBAALvxG,CAAuBhE,IAAvBgE,CAA4Bk5F,CAA5Bl5F,CADE+O,GAGF/O,KAAKwxG,oBAALxxG,CAA0BhE,IAA1BgE,CAA+Bk5F,CAA/Bl5F,CATiB,EAWZk5F,CAAP;KA52BgCroF,EAs3BlCshG,WAAAA,QAAAA,GAAA,UAAQ7iC,CAAR;YACgB,QAAVA,CAAU,IAAQzzE,MAAMC,OAAND,CAAcyzE,CAAdzzE,KAA2C,MAAlByzE,EAAOl1E,MAAxC,KAIdk1E,IAASwjC,OAAqBxjC,CAArBwjC,CAATxjC,OACqB+0B,MAAjBrkG,KAAKyxG,WAA0C,SAAjBzxG,KAAKyxG,YACrCt4F,IAAAnZ,KAAKsvE,QAAOtzE,cAAQszE,EANR;KAv3BkBz+D,EA24BlCshG,WAAAA,mBAAAA,GAAA,UAAmBhyF,CAAnB;EACE,WAAOA,CAAP;KA54BgCtP,EAw5BlCshG,WAAAA,YAAAA,GAAA,UAAY1gG,CAAZ,EAAqCotC,CAArC;EAAA,gBAAA,CAEE,KAAK7+C,KAAKsxG,eAAV,EAA2B;EACzB,UAAY,QAARzyD,CAAJ,EAAkB;EAChB,aAAIhjD,MAAMC,OAAND,CAAcgjD,CAAdhjD,CAAJ,EASE,MAAM,IAAIy5G,SAAJ,CACF,WAASt1G,KAAKzB,IAAd,2DADE,CAAN,CARAsgD,EAAKz+C,OAALy+C,CAAa,UAAA02D,CAAA;EACX,cAAmB,QAAfA,CAAJ,EACE,MAAM,IAAID,SAAJ,CACF,WAAS30G,EAAKpC,IAAd,2DADE,CAAN;WAFJsgD;EAcJ,cAAO,IAAP;EAIF,YAAOA,CAAP;KA/6BgChuC,EA87B1BshG,WAAAA,eAAAA,GAAR,UACI1B,CADJ,EAEIC,CAFJ,EAGIC,CAHJ,EAGiCC,CAHjC,EAIIt6F,CAJJ,EAIgCu6F,CAJhC,EAKIqC,CALJ;uBAKIA,UACF,IAAMsC,IACF1C,OAAqBrC,CAArBqC,CADJ,CAEApC,IAAgBoC,OAAqBpC,CAArBoC,CAAhBpC,EACAC,IAAamC,OAAqBnC,CAArBmC,CADbpC,EAEAE,IAAckC,OAAqBlC,CAArBkC,CAFdpC,EAGAp6F,IAAcm/F,mBAA+Bn/F,CAA/Bm/F,CAHd/E,EAIAG,IAAe4E,mBAA+B5E,CAA/B4E,CAJf/E,CAUA,KAHA,IAAMJ,MAAN,EACMC,MADN,EAEMC,MAFN,OAAA,EAGgBkF,KAAhB,EAAgB1xG,YAAhB,EAAgBA,GAAhB;EAAK,UAAMtJ,QAAN,CAKH41G,EAAct0G,IAAds0G,CAAmB51G,EAAEu1G,WAArBK,GACAC,EAAYv0G,IAAZu0G,CAAiB71G,EAAE03G,SAAnB7B,CADAD,EAEAE,EAAcx0G,IAAdw0G,CAAmB91G,EAAEi7G,WAArBnF,CAFAF;EAQF,SAAIW,IAAJ,GAEMZ,eAAerwG,MACfswG,kBACAC,gBACAC,kBACAC,cAAc+E,GACd9E,kBACAC,eACAC,gBACAt6F,gBACAu6F,iBAXN,EAaIqC,CAbJ,EAgBA,KAAK,IAAIn4G,IAAI,CAAb,EAAgBA,IAAI21G,EAAct2G,MAAlC,EAA0CW,GAA1C,EAEE21G,EAAc31G,CAAd21G,EAAiBT,WAAjBS,GAA+B1wG,IAA/B0wG,EACAA,EAAc31G,CAAd21G,EAAiB0B,SAAjB1B,GAA6B1wG,KAAKgxG,YAALhxG,CAAkB5F,MAAlB4F,GAA2B,CADxD0wG,EAEAA,EAAc31G,CAAd21G,EAAiBiF,WAAjBjF,GAA+B31G,CAF/B21G;KA/+B8B7/F,EAygClCshG,WAAAA,UAAAA,GAAA;EACE,QAAMnV,MAC0Bz+F,MAAMyB,KAAKzB,MAAMwQ,WAAW/O,KAAK+O,WADjE,CAQA,OAN4B,QAAxB/O,KAAK+xG,eAAmB,KAC1B/U,EAAwB+U,eAAxB/U,GAA4Bh9F,KAAK+xG,eADP,GAGV,QAAd/xG,KAAK/B,KAAS,KAChB++F,EAAc/+F,KAAd++F,GAAkBh9F,KAAK/B,KADP,CAHU,EAMrB++F,CAAP;KAlhCgCnsF,EA0hCxBshG,WAAAA,eAAAA,GAAV;EAEE,WADAnyG,KAAKuvE,OAALvvE,CAAaI,OAAbJ,CAAqB,UAAAk5F,CAAA;EAAU,aAAAA,EAAOlmF,OAAPkmF,EAAA;OAA/Bl5F,GACOA,KAAKuvE,OAALvvE,CAAa5F,MAApB;KA5hCgCyW,EA+hCxBshG,WAAAA,kBAAAA,GAAV;EACE,QAAuB,MAAnBnyG,KAAKkyG,SAAT,EACE,MAAM,IAAI34G,KAAJ,CAAU,YAAUyG,KAAKzB,IAAf,2BAAV,CAAN;KAjiC8BsS,EAkkClCshG,WAAAA,QAAAA,GAAA;EACE,SAAKnyG,KAAKyyG,KAAV,EACE,MAAM,IAAIl5G,KAAJ,CACF,0BAAwByG,KAAKzB,IAA7B,wCADE,CAAN,CAKF,IAAuB,SAAnByB,KAAKkyG,SAAT,EACE,MAAM,IAAI34G,KAAJ,CACF,0BAAwByG,KAAKzB,IAA7B,uCADE,CAAN,CAKFyB,KAAK0vG,iBAAL1vG,GAEA,IAAI41G,IAAuB,CAA3B,CAKA,OAJyB,OAAnB51G,KAAKkyG,SAAc,KACvB0D,IAAuB51G,KAAK61G,cAAL71G,EADA,KAIjB81G,sBAAsB91G,KAAKkyG,WAAW0D,yBAA9C;KAtlCgC/kG,GAwlCpC;IAxlCoCk2F,cAAclK,yCAomCvB4T;EAKzB,OADA,IAAM3uF,MAAN,OAAA,EACgBi0F,IAHhBtF,IACIqC,OAAqBrC,CAArBqC,CAEJ,EAAgB9uG,YAAhB,EAAgBA,GAAhB;EAAK,QAAMtJ,QAAN,CACHonB,EAAO9lB,IAAP8lB,CAAYpnB,EAAEuB,KAAd6lB;EAEF,UAAOwwF,iBAA+BxwF,CAA/BwwF,CAAP;EAYF,0BAAA,CAA0B7B,CAA1B;EAEE,SAAO,SAAP;EAcF,yBAAA,CACIx9F,CADJ,EAC4B69F,CAD5B,EAEIsB,CAFJ;EAOE,OAJa,QAATtB,CAAS,IAAsB,QAAbsB,CAAa,IAAQA,IAAY,OACrDtB,IAAQ79F,EAAOg9F,WAAfa,EACAsB,IAAYn/F,EAAOm/F,YAEa,MAA9BtB,EAAME,YAANF,CAAmB12G,MAAvB,EACE,QAAQ6Y,EAAR,CAEA,IAAMzB,IAAOs/F,EAAME,YAANF,CAAmBsB,CAAnBtB,CAAb,CACA,IAAkC,MAA9Bt/F,EAAK8+F,aAAL9+F,CAAmBpX,MAAvB,EACE,OAAOoX,EAAKi/F,YAAZ,CAGA,KADA,IAAMuF,MAAN,EACSj7G,IAAI,CAAb,EAAgBA,IAAIyW,EAAK8+F,aAAL9+F,CAAmBpX,MAAvC,EAA+CW,GAA/C,EAME,KALA,SAAA,EAKgBk7G,IAFQC,gBAHd1kG,EAAKi/F,YAALj/F,CAAkBzW,CAAlByW,CAGc0kG,EAFV1kG,EAAK8+F,aAAL9+F,CAAmBzW,CAAnByW,CAEU0kG,EADN1kG,EAAK++F,WAAL/+F,CAAiBzW,CAAjByW,CACM0kG,CAExB,EAAgBlyG,YAAhB,EAAgBA,GAAhB;EAAK,QAAMmyG,QAAN,EAC+B,MAA9BH,EAAc10F,OAAd00F,CAAsBG,CAAtBH,KACFA,EAAch6G,IAAdg6G,CAAmBG,CAAnBH;EAIN,UAAOA,CAAP;ECx+CN,KCtDYI,qBDsDZ;EAAA;EAGE,YAAA,CAAYpZ,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,IACExS,OAAO++F,EAAO/+F,OACdM,MAAqB,QAAfy+F,EAAOz+F,IAAQ,GAAOy+F,EAAOz+F,IAAd,GAAqBszG,OAAO,OAAPA,EAAgB5wG,QAAhB4wG,IAF5CphG,SADF,CAiBE,IAXwB,QAApBusF,EAAOzwE,SAAa,KACtBywE,EAAOzwE,SAAPywE,GAAmB,IADG,GAGH,QAAjBA,EAAOqZ,MAAU,KACnBrZ,EAAOqZ,MAAPrZ,IAAgB,CADG,CAHG,EAOxBr8F,EAAKoO,SAALpO,IAAiB,CAPO,EAQxBA,EAAK8xG,KAAL9xG,IAAa,CARW,EASxBA,EAAK01G,MAAL11G,GAAcq8F,EAAOqZ,MATG,EAWC,QAArBrZ,EAAO78E,UAAc,IAAkC,QAA1B68E,EAAO+U,eAAxC,EACE,MAAM,IAAIrN,UAAJ,CACF,mGADE,CAAN,CAIF,IAAIqN,IAAkB/U,EAAO+U,eAA7B,CACA,IAAuB,QAAnBA,CAAJ,EAA6B;EAC3B,UAAyB,QAArB/U,EAAO78E,UAAX,EACE,MAAM,IAAIukF,UAAJ,CACF,+EADE,CAAN,CAIAqN,KAAmB/U,EAAOzwE,WAAWplB,OAAO61F,EAAO78E,WAAnD4xF;OANJ,MAUE,IAAwB,QAApB/U,EAAOzwE,SAAX,EACE,MAAM,IAAIm4E,UAAJ,CACF,sFADE,CAAN,CAMJ,IAAMzmG,IAAQ++F,EAAO/+F,KAAP++F,IAAgB,SAA9B,CAEAr8F,EAAKoxG,eAALpxG,GAAuBoxG,CAAvBpxG,EACAA,EAAK1C,KAAL0C,GAAa1C,CADb0C,EAGAA,EAAK0wG,SAAL1wG,MAAmB1E,OAAO81G,IAH1BpxG,CAKA,IAAM21G,IAAc,IAAI9C,cAAJ,CAChB7yG,EAAK1C,KADW,EACJ0C,EAAKoxG,eADD,EACkBpxG,CADlB,IAAA,IAAA,EACgCA,EAAKpC,IADrC,CAApB,QAEA+3G,EAAYlE,SAAZkE,GAAwB,CAAxBA,EACAA,EAAYX,WAAZW,GAA0B,CAD1BA,EAMA,IAAIrF,IAAJ,GACEZ,eAAe1vG,GACf2vG,mBACAC,iBACAC,mBACAC,eAAe6F,IACf5F,gBAAgB4F,IAChB3F,aAAa,OACbC,cAAc,OACdt6F,cAAcy7F,IACdlB,eAAekB,IAVjB,CANAuE;EAyCJ,UA7FgCzlG,aAAAA,GAAAA,GAwE9B0lG,WAAAA,MAAAA,GAAA,UACI9kG,CADJ,EAEIyhG,CAFJ;EAGE,UAAM,IAAIxO,UAAJ,CACF,+EACiD1kG,KAAKzB,IAFpD,CAAN;KA3E4BsS,EAgF9B0lG,WAAAA,QAAAA,GAAA;EAEE,aAAQT,sBAAsB91G,KAAKkyG,WAAW0D,sBAAsB,GAApE;KAlF4B/kG,EAqF9B0lG,WAAAA,UAAAA,GAAA;EACE,aACExE,iBAAiB/xG,KAAK+xG,iBACtB9zG,OAAO+B,KAAK/B,OACZo4G,QAAQr2G,KAAKq2G,QACb93G,MAAMyB,KAAKzB,MAJb;KAtF4BsS,EACd0lG,WAAAA,GAAY,YADE1lG,GA6FhC;IA7FgCshG,MAAhC,CAsJA,cAAA,CAAsBnV,CAAtB;EACE,MAAyB,QAArBA,EAAOt7C,UAAc,IAAwB,QAAhBs7C,EAAO/gG,KAAxC,EACE,MAAM,IAAI1C,KAAJ,CACF,8HADE,CAAN,CAMF,IAAyB,QAArByjG,EAAOt7C,UAAc,IAAwB,QAAhBs7C,EAAO/gG,KAAxC,EAEE,MAAM,IAAIyoG,UAAJ,CACF,kFADE,CAAN,CAIF,IAAIhjD,IAAas7C,EAAOt7C,UAAxB,CACoB,QAAhBs7C,EAAO/gG,KAAS,IAAsB,QAAdylD,CAAR,KAClBA,KAAc,MAAMv6C,OAAO61F,EAAO/gG,MADhB,EAIpB,IAAIgC,IAAQ++F,EAAO/+F,KAAnB,CAaA,OAZa,QAATA,CAAS,KACXA,IAAQ,SADG,GAIM,IAAIs4G,UAAJ,GACjBxE,iBAAiBrwD,GACjBnjD,MAAMy+F,EAAOz+F,MACbN,UACAo4G,QAAQrZ,EAAOqZ,QAJE,EAOQrF,YAPR,CAOqB,CAPrB,EAOwBN,aAPxB,CAQJ,CARI,CAQnB;iCEvOyC8F;;;;EACzC,cAAY,QAARA,CAAJ,EACE,UAAA,CAKF,KAAWjgG,CAAX,IAHMkgG,MAAAA,EACA5jG,MADA4jG,EAEAC,MAFAD,EAGYD,CAAlB,EAEuB,oBADfzyG,IAAQyyG,EAAKjgG,CAALigG,EACO,KACbG,IAAc5yG,CAAd4yG,EACNF,EAASz6G,IAATy6G,CAAcE,EAAY3xG,IAAZ2xG,EAAdF,CADME,EAEN9jG,EAAK7W,IAAL6W,CAAU0D,CAAV1D,CAFM8jG,EAGND,EAAiB16G,IAAjB06G,CAAsBC,CAAtBD,CAJmB,EAOR,WAAMv5G,QAAQ4K,GAAR5K,CAAYs5G,CAAZt5G,EAAN;EACf,eADMe,IAASib,MAAAA,EAATjb,EACGnD,IAAI,CAAb,EAAgBA,IAAImD,EAAO9D,MAA3B,IAAqCW,CAArC,EACEy7G,EAAK3jG,EAAK9X,CAAL8X,CAAL2jG,IAAgBt4G,EAAOnD,CAAPmD,EAAU,CAAVA,CAAhBs4G,QAIFxjG,QAAQ0jG,CAAR1jG;;;EASF,8BAAA,CAAqCwjG,CAArC;EACE,MAAY,QAARA,CAAJ,EAGA,KAAK,IAAMjgG,CAAX,IAAkBigG,CAAlB,EAAwB;EACtB,QAAMzyG,IAAQyyG,EAAKjgG,CAALigG,CAAd,CACqB,mBAAVzyG,CAAU,IACnBA,EAAMiP,OAANjP,EADmB;;iBF0GX4jG,cAAc4O,aCpJ5B,UAAYH,CAAZ;EACEA,gBAAAA,YAAAA,EACAA,eAAAA,aADAA;EADF,CAAA,CAAYA,0BAAAA,0BAAAA,CAAZ,EA6BA;EAAA,YAAA;EAEEp2G,uBAAAA,GAAkC,IAAlCA;EA+BF,UAzBE42G,WAAAA,UAAAA,GAAA,UAAUp6F,CAAV;EACExc,SAAKwc,MAALxc,GAAcwc,CAAdxc;KADF42G,EAIMA,WAAAA,aAAAA,GAAN,UAAmBC,CAAnB,EAAkCL,CAAlC;;;;;;KAJAI,EAMMA,WAAAA,WAAAA,GAAN,UAAiBC,CAAjB,EAAgCL,CAAhC;;;;;;KANAI,EAQMA,WAAAA,aAAAA,GAAN,UAAmB/qF,CAAnB,EAAkC2qF,CAAlC;;;;;;KARAI,EAUMA,WAAAA,WAAAA,GAAN,UAAiB/qF,CAAjB,EAAgC2qF,CAAhC;;;;;;KAVAI,EAYMA,WAAAA,aAAAA,GAAN,UAAmBJ,CAAnB;;;;;;KAZAI,EAcMA,WAAAA,WAAAA,GAAN,UAAiBJ,CAAjB;;;;;;KAdAI,EAsBAA,WAAAA,SAAAA,GAAA,UAASE,CAAT,IAtBAF,GAyBF;KAjCA;EAAA;EAsDE,YAAA,CAAYG,CAAZ,EAAwCC,CAAxC;uBAAwCA,SAGrB,QAAbD,CAAa,KACfA,MADe,GAGjB/2G,KAAK+2G,SAAL/2G,GAAiB+2G,GACjB/2G,KAAKg3G,WAALh3G,GAAmBg3G;EAqGvB,UAlGEC,WAAAA,OAAAA,GAAA,UAAOj4D,CAAP;EACEh/C,SAAK+2G,SAAL/2G,CAAehE,IAAfgE,CAAoBg/C,CAApBh/C;KADFi3G,EAIAA,WAAAA,UAAAA,GAAA,UAAUz6F,CAAV;EACE,SAAuB,SAAA,EAAArD,IAAAnZ,KAAK+2G,SAA5B,EAAuB/yG,YAAvB,EAAuBA,GAAvB;aACWkzG,UAAU16F;;KANvBy6F,EAUAA,WAAAA,SAAAA,GAAA,UAASH,CAAT;EACE,SAAuB,SAAA,EAAA39F,IAAAnZ,KAAK+2G,SAA5B,EAAuB/yG,YAAvB,EAAuBA,GAAvB;aACWmzG,SAASL;;KAZtBG,EAqBMA,WAAAA,aAAAA,GAAN,UAAmBJ,CAAnB,EAAkCL,CAAlC;;;;EACc,oBAARA,CAAQ,KACVA,MADU,QAAA,EAGWr9F,IAAAnZ,KAAK+2G,SAHhB,aAAA;qBAGW/yG,YAAAA,YACNozG,aAAaP,GAAOL,GADdxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KAzBzBizG,EAmCMA,WAAAA,WAAAA,GAAN,UAAiBJ,CAAjB,EAAgCL,CAAhC;;;;EACc,oBAARA,CAAQ,KACVA,MADU,QAAA,EAGWr9F,IAAAnZ,KAAK+2G,SAHhB,aAAA;qBAGW/yG,YAAAA,YACNqzG,WAAWR,GAAOL,GADZxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KAvCzBizG,EAiDMA,WAAAA,aAAAA,GAAN,UAAmBprF,CAAnB,EAAkC2qF,CAAlC;;;;EACc,oBAARA,CAAQ,KACVA,MADU,QAAA,EAGWr9F,IAAAnZ,KAAK+2G,SAHhB,aAAA;qBAGW/yG,YAAAA,YACNszG,aAAazrF,GAAO2qF,GADdxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KArDzBizG,EA+DMA,WAAAA,WAAAA,GAAN,UAAiBprF,CAAjB,EAAgC2qF,CAAhC;;;;EAIE,mBAHY,QAARA,CAAQ,KACVA,MADU,OAGNe,qBAAqBf,CAArBe,EAAN;EAAAhlF,kBAAAA,SAAAA,EACuBpZ,IAAAnZ,KAAK+2G,SAD5BxkF,aAAAA;qBACuBvuB,YAAAA,YACNwzG,WAAW3rF,GAAO2qF,GADZxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KApEzBizG,EA6EMA,WAAAA,aAAAA,GAAN,UAAmBT,CAAnB;;;;EACc,oBAARA,CAAQ,KACVA,MADU,QAAA,EAGWr9F,IAAAnZ,KAAK+2G,SAHhB,aAAA;qBAGW/yG,YAAAA,YACNyzG,aAAajB,GADPxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KAjFzBizG,EA0FMA,WAAAA,WAAAA,GAAN,UAAiBT,CAAjB;;;;EACc,oBAARA,CAAQ,KACVA,MADU,QAAA,EAGWr9F,IAAAnZ,KAAK+2G,SAHhB,aAAA;qBAGW/yG,YAAAA,YACN0zG,WAAWlB,GADLxyG;EACrBuuB,kBAAAA,eAAAA;qBADqBvuB;;;;KA9FzBizG,GAkGF;KAlKA;EAAA;EAoME,YAAA,CAAYU,CAAZ;EACE33G,SAAK23G,UAAL33G,GAAkB23G,CAAlB33G,EACAA,KAAK43G,UAAL53G,GAAkB,CADlBA,EAEAA,KAAK63G,oBAAL73G,KAFAA,EAGAA,KAAK83G,qBAAL93G,GAA6B,IAH7BA,EAIAA,KAAK+3G,gBAAL/3G,GAAwB8rG,KAAKpsG,GAALosG,EAJxB9rG;EAwFJ,UA3EgBg4G,WAAAA,uBAAAA,GAAd,UAAqCxB,CAArC;;;;gCACoBA;+CAEK,oBADfzyG,IAAQyyG,EAAKjgG,CAALigG,EACO,SAAA,OACZzyG,EAAiBiB,IAAjBjB;EACP,mBADA4uB,MAAAA,UACA;;;;;KALQqlF,EA4BRA,WAAAA,kBAAAA,GAAN,UAAwBxB,CAAxB;;;;qBAC0B,WAApBx2G,KAAK23G,UAAe,SAAA,IACtB33G,KAAK43G,UAAL53G,IACkC,QAA9BA,KAAK83G,qBAAyB,SAAA,OAG1B93G,KAAKi4G,sBAALj4G,CAA4Bw2G,CAA5Bx2G,EALc;EAOpB,mBAFAmZ,MAAAA,IACMD,IAAI4yF,KAAKpsG,GAALosG,EADV3yF,MAEM++F,YAAN;qBAAA/+F,MAAAA,IAGInZ,KAAK43G,UAAL53G,GAAkBg4G,EAAqBG,kBAAvCn4G,KACFA,KAAK63G,oBAAL73G,CAA0BhE,IAA1BgE,CAA+BkZ,IAAIlZ,KAAK+3G,gBAAxC/3G,GACIA,KAAK63G,oBAAL73G,CAA0B5F,MAA1B4F,IACAg4G,EAAqBI,oBADrBp4G,KAEIq4G,IACFr4G,KAAK63G,oBAAL73G,CAA0B27C,MAA1B37C,CAAiC,UAACs4G,CAAD,EAAMjlC,CAAN;EAAe,qBAAAilC,IAAMjlC,CAAN;eAAhDrzE,IACAA,KAAK63G,oBAAL73G,CAA0B5F,MAFxBi+G,EAGNr4G,KAAK83G,qBAAL93G,GAA6BzF,KAAKuE,KAALvE,CACzBy9G,EAAqBO,gBAArBP,GAAwCK,CADf99G,CAHvB89G,EAKFr4G,KAAK83G,qBAAL93G,GAA6B,CAA7BA,KACFA,KAAK83G,qBAAL93G,GAA6B,CAD3BA,CAPFA,CAFFA,CAHJmZ,EAiBAnZ,KAAK+3G,gBAAL/3G,GAAwB8rG,KAAKpsG,GAALosG,EAjBxB3yF,EAkBAnZ,KAAKw4G,mBAALx4G,GAA2BA,KAAK43G,UAlBhCz+F;qBAsBInZ,KAAK43G,UAAL53G,GAAkBA,KAAKw4G,mBAAvBx4G,IACAA,KAAK83G,qBADL93G,OAEIk4G,YAFJl4G;EAGF,mBADAmZ,MAAAA,QACMnZ,KAAKi4G,sBAALj4G,CAA4Bw2G,CAA5Bx2G,EAAN;EAAAmZ,kBAAAA,IACAnZ,KAAKw4G,mBAALx4G,GAA2BA,KAAK43G,UADhCz+F,aAAAA;;qBAIyB,YAApBnZ,KAAK23G,UAAe,SAAA,OACvBO;EAAN/+F,kBAAAA,eAAAA;;;;KAlEU6+F,EAsERA,WAAAA,kBAAAA,GAAN;;;;qBAC0B,YAApBh4G,KAAK23G,UAAe,SAAA,OAChBO;EAAN/+F,kBAAAA,eAAAA;;;;KAxEU6+F,EAnCEA,oBAAAA,GAAqB,CAmCvBA,EA/BEA,sBAAAA,GAAuB,CA+BzBA,EA5BEA,kBAAAA,GAAmB,EA4BrBA,GA2EhB;KA7RA;EAAA;EA0SE,YAAA,CAAYL,CAAZ;EAAA,YACElnG,MAAAA,KAAAA,SADF,QAGE9P,EAAKg3G,UAALh3G,GAAkBg3G,KAAc,MAAhCh3G;EAkEJ,UA3EgCkQ,aAAAA,GAAAA,GAYxB4nG,WAAAA,aAAAA,GAAN,UAAmBjC,CAAnB;;;iBACEx2G,KAAK04G,WAAL14G,GAAmB,IAAIg4G,oBAAJ,CAAyBh4G,KAAK23G,UAA9B,CAAnB33G;;;KAb4B6Q,EAgBxB4nG,WAAAA,aAAAA,GAAN,UAAmB5B,CAAnB;;;iBACE72G,KAAKuT,IAALvT,GAAY,CAAZA,EACAA,KAAK24G,MAAL34G,KADAA;;;KAjB4B6Q,EAqBxB4nG,WAAAA,WAAAA,GAAN,UAAiB5sF,CAAjB,EAAgC2qF,CAAhC;;;;;;;;EACE,uBAAMx2G,KAAK04G,WAAL14G,CAAiB44G,iBAAjB54G,CAAmCw2G,CAAnCx2G,EAAN;EAOA,iBAAWuW,CAAX,IAPA4C,MAAAA,IAEY,QAARq9F,CAAQ,KACVA,MADU,CAFZr9F,EAKMoT,IAA4B,QAAhBiqF,EAAWt6G,IAAK,GAAO,CAAP,GAAWs6G,EAAWt6G,IALxDid,EAMAnZ,KAAKuT,IAALvT,IAAausB,CANbpT,gBAOW5C;EACT,kBAAMxS,IAAQyyG,EAAKjgG,CAALigG,CAAd,CACA,IAAqB,mBAAVzyG,CAAX,EACO27F,EAAKiZ,MAALjZ,CAAY4G,cAAZ5G,CAA2BnpF,CAA3BmpF,MACHA,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,IAAmB,CADhBA,GAGLA,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,IAAmBA,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,IAA6B37F,IAAQwoB,CAHnDmzE,CADP,KAKO;EACL,oBAAImZ,UAAJ,CACItiG,KAAOmpF,EAAKiZ,MAAZpiG,GACFsiG,IAAqBnZ,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,CADnBnpF,GAGFmpF,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,IAAmBgI,UAAU,CAAVA,CAHjBnxF,EAMJmpF,EAAKiZ,MAALjZ,CAAYnpF,CAAZmpF,IAAmBhnF,KACf;EAAM,yBAAArQ,IAAK1H,EAAKg4G,MAALh4G,CAAY4V,CAAZ5V,CAAL0H,EACIO,IAAI7E,CAAJ6E,EAAW8+F,UAAUn7E,CAAVm7E,CAAX9+F,CADJP,CAAA;mBADSqQ,CANfnC,EASsB,QAAtBsiG,CAAsB,IACxBA,EAAmB7lG,OAAnB6lG,EAVEtiG;;eAhBR4C,UAAAA,EAOkBq9F,CAAlB,IAAWjgG;;;KA7BiB1F,EAsDxB4nG,WAAAA,WAAAA,GAAN,UAAiB5B,CAAjB,EAAgCL,CAAhC;;;;;;;;;EACE,uBAAMx2G,KAAK04G,WAAL14G,CAAiB84G,iBAAjB94G,GAAN;EAEA,gBAFAuyB,MAAAA,IAEY,QAARikF,CAAJ,EACE,mBAAWjgG;EACT,kBAAwB,QAApBwiG,EAAKJ,MAALI,CAAYxiG,CAAZwiG,CAAJ,oBAGgC,mBAArBA,EAAKJ,MAALI,CAAYxiG,CAAZwiG,CAAqB,GAC9BvC,EAAKjgG,CAALigG,IAAYuC,EAAKJ,MAALI,CAAYxiG,CAAZwiG,IAA6BA,EAAKxlG,IADhB,GAG9BmF,KAAK;EACH89F,kBAAKjgG,CAALigG,IAAY5tG,IAAIE,IAAI4+F,UAAU,CAAVA,CAAJ5+F,EAAkB4+F,UAAU/mG,EAAK4S,IAAfm0F,CAAlB5+F,CAAJF,EACIjI,EAAKg4G,MAALh4G,CAAY4V,CAAZ5V,CADJiI,CAAZ4tG,EAEC71G,EAAKg4G,MAALh4G,CAAY4V,CAAZ5V,EAA4BqS,OAA5BrS,EAFD61G,EAGA94F,KAAK84F,EAAKjgG,CAALigG,CAAL94F,CAHA84F;iBADF99F,CAH8B;kCAJhBS,IAAAnZ,KAAKwc,MAALxc,CAAqBg5G,OAAvC,EAAkBh1G,YAAlB,EAAkBA,GAAlB,EAAWuS,QAAAA,IAAAA,EAAAA;;;KA1De1F,GA2EhC;IA3EgC+lG,aApShC;EAAA;EAsXA,YAAA;;EA+CA,UA/C6B/lG,aAAAA,GAAAA,GAIrBooG,WAAAA,aAAAA,GAAN,UAAmBzC,CAAnB;;;iBACEx2G,KAAK62G,KAAL72G,KAAAA,EACAA,KAAKk5G,OAALl5G,KADAA;;;KALyB6Q,EASrBooG,WAAAA,WAAAA,GAAN,UAAiBpC,CAAjB,EAAgCL,CAAhC;;;EAKE,aAAWjgG,CAAX,IAJY,QAARigG,CAAQ,KACVA,MADU,GAGZx2G,KAAK62G,KAAL72G,CAAWhE,IAAXgE,CAAgB62G,CAAhB72G,CAHY,EAIMw2G,CAAlB,EAC2B,QAArBx2G,KAAKk5G,OAALl5G,CAAauW,CAAbvW,CAAqB,KACvBA,KAAKk5G,OAALl5G,CAAauW,CAAbvW,MADuB,GAGzBA,KAAKk5G,OAALl5G,CAAauW,CAAbvW,EAAkBhE,IAAlBgE,CAAuBw2G,EAAKjgG,CAALigG,CAAvBx2G,CAHyB;;;KAfF6Q,EAyBrBooG,WAAAA,SAAAA,GAAN;;;;EAIE,iBAAW1iG,CAAX,IAHMkgG,MAAAA,EACA5jG,MADA4jG,EAEAhwG,MAFAgwG,EAGYz2G,KAAKk5G,OAAvB,EAEE,KADMC,IAAan5G,KAAKk5G,OAALl5G,CAAauW,CAAbvW,CAAbm5G,EACGp+G,IAAI,CAAb,EAAgBA,IAAIo+G,EAAW/+G,MAA/B,IAAyCW,CAAzC,EAC+B,mBAAlBo+G,EAAWp+G,CAAXo+G,CAAkB,KACrBxC,IAAcwC,EAAWp+G,CAAXo+G,CAAdxC,EACNF,EAASz6G,IAATy6G,CAAcE,EAAY3xG,IAAZ2xG,EAAdF,CADME,EAEN9jG,EAAK7W,IAAL6W,CAAU0D,CAAV1D,CAFM8jG,EAGNlwG,EAAQzK,IAARyK,CAAa1L,CAAb0L,CAJ2B,EAQlB,WAAMtJ,QAAQ4K,GAAR5K,CAAYs5G,CAAZt5G,EAAN;EACf,iBADMe,IAASib,MAAAA,EAATjb,EACGvB,IAAI,CAAb,EAAgBA,IAAIuB,EAAO9D,MAA3B,IAAqCuC,CAArC,EAC0BqD,KAAKk5G,OAALl5G,CAAa6S,EAAKlW,CAALkW,CAAb7S,EAAsByG,EAAQ9J,CAAR8J,CAAtBzG,EACRgT,OADQhT,IAExBA,KAAKk5G,OAALl5G,CAAa6S,EAAKlW,CAALkW,CAAb7S,EAAsByG,EAAQ9J,CAAR8J,CAAtBzG,IAAoC9B,EAAOvB,CAAPuB,EAAU,CAAVA,CAFZ8B;;;KA1CD6Q,GA+C7B;IA/C6B+lG,aAtX7B;EAAA;EA2bE,YAAA,CAAY5Z,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF,QAEE9P,EAAKy4G,UAALz4G,GAAkBq8F,EAAOya,YAAzB92G,EACAA,EAAK04G,QAAL14G,GAAgBq8F,EAAO0a,UADvB/2G,EAEAA,EAAK24G,UAAL34G,GAAkBq8F,EAAOoa,YAFzBz2G,EAGAA,EAAK44G,QAAL54G,GAAgBq8F,EAAOqa,UAHvB12G,EAIAA,EAAK64G,UAAL74G,GAAkBq8F,EAAOsa,YAJzB32G,EAKAA,EAAK84G,QAAL94G,GAAgBq8F,EAAOwa,UALvB72G;EAiDJ,UA3DoCkQ,aAAAA,GAAAA,GAkB5B6oG,WAAAA,aAAAA,GAAN,UAAmB7C,CAAnB,EAAkCL,CAAlC;;;;qBACyB,QAAnBx2G,KAAKs5G,UAAc,SAAA,OACf/B,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKs5G,UAALt5G,CAAgB62G,CAAhB72G,EAAuBw2G,CAAvBx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KArB8BtI,EAyB5B6oG,WAAAA,WAAAA,GAAN,UAAiB7C,CAAjB,EAAgCL,CAAhC;;;;qBACuB,QAAjBx2G,KAAKu5G,QAAY,SAAA,OACbhC,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKu5G,QAALv5G,CAAc62G,CAAd72G,EAAqBw2G,CAArBx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KA5B8BtI,EAgC5B6oG,WAAAA,aAAAA,GAAN,UAAmB7tF,CAAnB,EAAkC2qF,CAAlC;;;;qBACyB,QAAnBx2G,KAAKw5G,UAAc,SAAA,OACfjC,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKw5G,UAALx5G,CAAgB6rB,CAAhB7rB,EAAuBw2G,CAAvBx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KAnC8BtI,EAuC5B6oG,WAAAA,WAAAA,GAAN,UAAiB7tF,CAAjB,EAAgC2qF,CAAhC;;;;qBACuB,QAAjBx2G,KAAKy5G,QAAY,SAAA,OACblC,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKy5G,QAALz5G,CAAc6rB,CAAd7rB,EAAqBw2G,CAArBx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KA1C8BtI,EA8C5B6oG,WAAAA,aAAAA,GAAN,UAAmBlD,CAAnB;;;;qBACyB,QAAnBx2G,KAAKo5G,UAAc,SAAA,OACf7B,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKo5G,UAALp5G,CAAgBw2G,CAAhBx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KAjD8BtI,EAqD5B6oG,WAAAA,WAAAA,GAAN,UAAiBlD,CAAjB;;;;qBACuB,QAAjBx2G,KAAKq5G,QAAY,SAAA,OACb9B,qBAAqBf,CAArBe;EACN,mBADAp+F,MAAAA,QACMnZ,KAAKq5G,QAALr5G,CAAcw2G,CAAdx2G,EAAN;EAAAmZ,kBAAAA,eAAAA;;;;KAxD8BtI,GA2DpC;IA3DoC+lG,aAnbpC,+BAmfqCG;EAGnC,SAAiB,QAAbA,CAAa,GACR,IADQ,GAGbA,aAAqBH,YAArBG,IACMA,EADNA,GAGAl7G,MAAMC,OAAND,CAAck7G,CAAdl7G,KAA4Bk7G,EAAU,CAAVA,aAAwBH,YAApD/6G,GACKk7G,CADLl7G,GAKAi3G,OAAqBiE,CAArBjE,EACmBhxG,GADnBgxG,CAEA,UAAA6G,CAAA;EAAkB,WAAA,IAAID,cAAJ,CAAmBC,CAAnB,CAAA;KAFlB7G,CAXJ;EAuBF;EAOE,YAAA,YAcO8G,6BAAAA,GAAP,UACIC,CADJ,EAC4BC,CAD5B;EAEEhO,SAAKpwG,MAALowG,CACI+N,KAAkB,CAAlBA,IAAuBz+G,OAAOohG,SAAPphG,CAAiBy+G,CAAjBz+G,CAD3B0wG,EAEI,gEACe+N,CAHnB/N,GAIA8N,EAA4BG,iBAA5BH,CAA8CE,CAA9CF,CAJA9N,EAKgE,QAA5D8N,EAA4BI,YAA5BJ,CAAyCC,CAAzCD,CAA4D,KAC9DA,EAA4BI,YAA5BJ,CAAyCC,CAAzCD,MAD8D,CALhE9N,EAQA8N,EAA4BI,YAA5BJ,CAAyCC,CAAzCD,EAAyD59G,IAAzD49G,CACIE,CADJF,CARA9N;KAFK8N,EAcQA,mBAAAA,GAAf,UAAiCE,CAAjC;EAEE,SAAK,IAAMG,CAAX,IAAwBL,EAA4BI,YAApD,EAAkE;EAC3CJ,QAA4BI,YAA5BJ,EAA0CK,CAA1CL,EACRx5G,OADQw5G,CACA,UAAAM,CAAA;EACnB,YAAIA,MAASJ,CAAb,EACE,MAAM,IAAIpV,UAAJ,CAAe,iCAAf,CAAN;SAHiBkV;;KAjBlBA,EA6BUA,OAAAA,GAAjB;EACEA,MAA4BI,YAA5BJ,KAAAA;KA9BKA,EAyCAA,iBAAAA,GAAP,UAAuBC,CAAvB;EACE,QAAMG,MAAN,CACA,KAAK,IAAMC,CAAX,IAAwBL,EAA4BI,YAApD,EAAkE;EAChE,UAAMpO,KAASqO,CAAf,CACIJ,KAAkBjO,CAAlBiO,IACFG,EAAah+G,IAAbg+G,MAAAA,CAAAA,CAAAA,EAAqBJ,EAA4BI,YAA5BJ,CAAyChO,CAAzCgO,CAArBI,CADEH;EAIN,YAAOG,EAAal4G,GAAbk4G,CAAiB,UAAAE,CAAA;EAAQ,aAAA,IAAIA,CAAJ,EAAA;OAAzBF,CAAP;KAjDKJ,EApBQA,cAAAA,KAoBRA,GAmDT;KAxEA,6BA2EI7C,GAA2BY,GAC3Bp2G,GAAgC44G,GAAgBC,GAChDC,GAAyBC,GAAuB/tF,GAChDguF,GACAC;EACF,MAAMtB,IAAU,IAAID,OAAJ,EAAhB;EAAA,MACMwB,KACJ,IAAIhC,UAAJ,CAAed,CAAf,UACGiC,4BAA4Bc,eAA5Bd,CAA4Cr4G,CAA5Cq4G,EAHL,CAKiB,QAAb7C,CAAa,IACf0D,EAAgBz+G,IAAhBy+G,MAAAA,CAAAA,CAAAA,EAAwB1D,CAAxB0D,CADe,EAGjBA,EAAgBz+G,IAAhBy+G,CAAqBvB,CAArBuB,CAHiB,CAIjB,IAAME,IAAe,IAAI1D,YAAJ,CAAiBwD,CAAjB,CAArB,CAeA,OAVAE,EAAazD,SAAbyD,GACER,WACAC,iBACAQ,SAASP,GACTQ,OAAOP,GACP/tF,cACAhrB,YACAg5G,iBACAvB,SAASwB,GARXG,KAUQA,iBAAczB,YAAtB;wBE7oB0Bx+G,GAAWoD;EACrC,SAAO4a,KAAK;EACV,QAAMoiG,IAAYlU,MAAQmU,SAASrgH,CAATqgH,CAARnU,EAAqB9oG,CAArB8oG,GAA2B,CAA3BA,CAAlB;EAAA,QACMoU,IAAgBzT,IAAQ19E,OAAO2mD,SAAP3mD,CAAR09E,EAA2B0T,SAAavgH,CAAbugH,CAA3B1T,CADtB;EAAA,QAEMtgG,IAAO0/F,KAASuU,QAAYJ,CAAZI,EAAuBF,CAAvBE,CAATvU,CAFb,CAGA,OAAOa,IAAQ9sG,CAAR8sG,EAAWvgG,CAAXugG,CAAP;KAJK9uF,CAAP;EAwBF,4BAAA,CAAiCyiG,CAAjC,EAAgDC,CAAhD;EACE,SAAO1iG,KAAK;EAAM,WAAA2iG,KAASN,SAAS7O,IAAQkP,CAARlP,EAAeiP,CAAfjP,CAAT6O,CAATM,GAA2C,CAA3CA,CAAA;KAAX3iG,CAAP;EAqBF,2BAAA,CAAkCyiG,CAAlC,EAAiDC,CAAjD;EACE,SAAO1iG,KAAK;EAAM,WAAA2iG,KAAS1P,IAAQO,IAAQkP,CAARlP,EAAeiP,CAAfjP,CAARP,CAAT0P,GAA0C,CAA1CA,CAAA;KAAX3iG,CAAP;EAmBF,qCAAA,CACIyiG,CADJ,EACmBC,CADnB;EAEE,SAAO1iG,KAAK;EACV,QAAMvd,IAAO+wG,IAAQiP,CAARjP,EAAekP,CAAflP,CAAb;EAAA,QACMoP,IACFhU,YAAgBqE,IAAQwP,CAARxP,CAAhBrE,EAAgC92B,SAAhC82B,EAA2ClsG,OAAOmgH,SAAlDjU,CAFJ;EAAA,QAGMkU,IAAY7P,IAAQnE,IAAQrsG,CAARqsG,EAAc8T,CAAd9T,CAARmE,CAHlB,CAIA,OAAOpE,IAAQG,UAAU,GAAVA,CAARH,EAA0B8T,KAASG,CAATH,GAAqB,CAArBA,CAA1B9T,CAAP;KALK7uF,CAAP;EASF,qCAAA,CACIyiG,CADJ,EACmBC,CADnB;EAEE,SAAO1iG,KAAK;EACV,QAAMw0D,IAAMw6B,UAAU,CAAVA,CAAZ;EAAA,QAEM+T,IAAcnU,YAAgB8T,CAAhB9T,EAAuB92B,SAAvB82B,EAAkClsG,OAAOmgH,SAAzCjU,CAFpB;EAAA,QAGMoU,IAAWC,MAAQlU,IAAQv6B,CAARu6B,EAAagU,CAAbhU,CAARkU,CAHjB;EAAA,QAKML,IAAchU,YAAgB6T,CAAhB7T,EAAuB92B,SAAvB82B,EAAkClsG,OAAOmgH,SAAzCjU,CALpB;EAAA,QAMMsU,IAAYD,MAAQlU,IAAQv6B,CAARu6B,EAAa6T,CAAb7T,CAARkU,CANlB,CAQA,OAAON,KAASN,SAAS7O,IAAQwP,CAARxP,EAAkB0P,CAAlB1P,CAAT6O,CAATM,GAAkD,CAAlDA,CAAP;KATK3iG,CAAP;EAaF,sBAAA,CAA6ByiG,CAA7B,EAA4CC,CAA5C;EACE,SAAO1iG,KAAK;EACV,QAAMmjG,IAAanU,UAAU,CAAVA,CAAnB;EAAA,QACMx6B,IAAMw6B,UAAU,CAAVA,CADZ;EAAA,QAEMoU,IACFZ,QAAYW,CAAZX,EAAwBhP,IAAQh/B,CAARg/B,EAAa3E,IAAQ4T,CAAR5T,EAAe6T,CAAf7T,CAAb2E,CAAxBgP,CAHJ,CAIA,OAAOG,KAASN,SAASe,CAATf,CAATM,GAA+B,CAA/BA,CAAP;KALK3iG,CAAP;EASF,eAAA,CAAsByiG,CAAtB,EAAqCC,CAArC;EACE,SAAO1iG,KAAK;EACV,QAAMmjG,IAAanU,UAAU,CAAVA,CAAnB;EAAA,QACMx6B,IAAMw6B,UAAU,CAAVA,CADZ;EAAA,QAEMoU,IACFZ,QAAYW,CAAZX,EAAwBhP,IAAQh/B,CAARg/B,EAAa3E,IAAQ4T,CAAR5T,EAAe6T,CAAf7T,CAAb2E,CAAxBgP,CAHJ,CAIA,OAAOG,KAASS,CAATT,GAAqB,CAArBA,CAAP;KALK3iG,CAAP;EASF,0BAAA,CAAiCyiG,CAAjC,EAAgDC,CAAhD;EACE,SAAO1iG,KAAK;EACV,QAAMmjG,IAAanU,UAAU,CAAVA,CAAnB;EAAA,QACMx6B,IAAMw6B,UAAU,CAAVA,CADZ;EAAA,QAEMqU,IAAMnV,MAAQW,IAAQ4T,CAAR5T,EAAe6T,CAAf7T,CAARX,GAAgC,CAAhCA,CAFZ;EAAA,QAGM/7F,IAAMi/F,IAAQvC,IAAQ2E,IAAQh/B,CAARg/B,EAAaiP,CAAbjP,CAAR3E,EAA6B6T,CAA7B7T,CAARuC,GAA8C,CAA9CA,CAHZ,CAIA,OAAOoR,QAAYW,CAAZX,EAAwBzT,IAAQv6B,CAARu6B,EAAayE,IAAQrhG,CAARqhG,EAAa6P,CAAb7P,CAAbzE,CAAxByT,CAAP;KALKxiG,CAAP;EAiBF,iBAAA,CAAwByiG,CAAxB,EAAuCC,CAAvC;EACE,SAAO1iG,KAAK;EACV,QAAMsjG,IAAOtU,UAAUntG,KAAK4G,GAAL5G,CAAS,CAATA,CAAVmtG,CAAb;EAAA,QACMuU,IAAiB/P,IAAQkP,CAARlP,EAAeiP,CAAfjP,CADvB;EAAA,QAEMgQ,IAAgBhQ,IAClBzE,IACIwU,CADJxU,EAEI0U,SAAa5U,IAAQG,WAAW,CAAXA,CAARH,EAAyB0U,CAAzB1U,CAAb4U,CAFJ1U,CADkByE,EAIlB8P,CAJkB9P,CAFtB,CAOA,OAAOmP,KAASa,CAATb,GAAyB,CAAzBA,CAAP;KARK3iG,CAAP;EAqBF,iCAAA,CACI29E,CADJ,EACoBvkF,CADpB,EACoCsqG,CADpC;EAEE,0BADkCA,SAC3B1jG,KAAK;EACV,QAAI0jG,CAAJ,EACEtqG,IAASuqG,QAAYvqG,CAAZuqG,CAATvqG,CADF,KAEO;EAEL,UAAMwqG,IAAY1V,MAAQ90F,CAAR80F,EAAgB90F,EAAO7V,KAAP6V,CAAa1X,MAAb0X,GAAsB,CAAtC80F,GAAyC,CAAzCA,CAAlB,CACA90F,IAAS01F,IAAQ11F,CAAR01F,EAAgB8U,CAAhB9U,CAAT11F;EAGF,YADAA,IAASw1F,YAAgBx1F,CAAhBw1F,EAAwB92B,SAAxB82B,EAAmC,IAAI92B,SAAvC82B,CAATx1F,EACOk6F,IAAQpF,MACXW,IAAQlR,EAAOjxE,OAAPixE,EAARkR,EAA0BoU,MAAQ7pG,CAAR6pG,CAA1BpU,CADWX,EACiC90F,EAAO7V,KAAP6V,CAAa1X,MAAb0X,GAAsB,CADvD80F,CAARoF,CAAP;KATKtzF,CAAP;EAuBF,uCAAA,CACI29E,CADJ,EACoBvkF,CADpB,EACoCsqG,CADpC;EAEE,0BADkCA,SAC3B1jG,KAAK;EACV,QAAM6jG,IAAaC,MAAUC,UAAUpmB,CAAVomB,CAAVD,EAA6Bv4C,KAA7Bu4C,EAAnB;EAAA,QACM3uG,IAAciE,EAAO7V,KAD3B,CAKA,OAAOygH,wBAFHC,OAAWJ,CAAXI,EAAuB9uG,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,CAAvB8uG,EACKx3G,OADLw3G,CACa9uG,CADb8uG,CAEGD,EAAsC5qG,CAAtC4qG,EAA8CN,CAA9CM,CAAP;KANKhkG,CAAP;EA+BF,uCAAA,CACI29E,CADJ,EACoBvkF,CADpB;EAEE,SAAO4G,KAAK;EACV,QAAMg4D,IAAYwqC,QAAYppG,CAAZopG,EAAoB0B,UAAc9qG,CAAd8qG,CAApB1B,CAAlB;EAAA,QACMvqC,IAAgB42B,IAAQz1F,CAARy1F,EAAgBlR,CAAhBkR,CADtB;EAAA,QAEM32B,IACF+qC,MAAQlU,IAAQC,UAAU,CAAVA,CAARD,EAAsBoV,IAAQ7Q,IAAQL,IAAQ75F,CAAR65F,CAARK,CAAR6Q,CAAtBpV,CAARkU,CAHJ,CAKA,OADelU,IAAQyE,IAAQx7B,CAARw7B,EAAmBv7B,CAAnBu7B,CAARzE,EAA2C72B,CAA3C62B,CACf;KANK/uF,CAAP;EAUF,4BAAA,CAAmCyiG,CAAnC,EAAkDC,CAAlD;EACE,SAAO1iG,KAAK;EACV,QAAItH,CAAJ,CAGA,OAFAA,IAAIk2F,YAAgB8T,CAAhB9T,EAAuB92B,SAAvB82B,EAAkC,IAAI92B,SAAtC82B,CAAJl2F,EACAA,IAAIuqG,MAAQnU,IAAQp2F,CAARo2F,EAAW0E,IAAQ+O,SAAa7pG,CAAb6pG,CAAR/O,EAAyB96F,CAAzB86F,CAAX1E,CAARmU,CADJvqG,EAEOiqG,KAASyB,8BAA8B3B,CAA9B2B,EAAqC1rG,CAArC0rG,CAATzB,GAAmD,CAAnDA,CAAP;KAJK3iG,CAAP;EAQF,mCAAA,CACIyiG,CADJ,EACmBC,CADnB;EAEE,SAAO1iG,KAAK;EACV,QAAM4iG,IAAchU,YAAgB6T,CAAhB7T,EAAuB92B,SAAvB82B,EAAkC,CAAlCA,CAApB;EAAA,QACMmU,IAAcnU,YAAgB8T,CAAhB9T,EAAuB92B,SAAvB82B,EAAkC,CAAlCA,CADpB,CAEA,OAAOV,MACHW,IAAQ4T,CAAR5T,EAAeoU,MAAQnU,IAAQ8T,CAAR9T,EAAqBiU,CAArBjU,CAARmU,CAAfpU,CADGX,GAC0D,CAD1DA,CAAP;KAHKluF,CAAP;EAQF,iBAAA,CAAwByiG,CAAxB,EAAuCC,CAAvC;EACE,SAAO1iG,KAAK;EACV,QAAMqkG,IAAUpB,MAAQlU,IAAQC,UAAUl3B,SAAVk3B,CAARD,EAA8B2T,CAA9B3T,CAARkU,CAAhB,CACA,OAAON,KAASnP,IAAQkP,CAARlP,EAAe3E,IAAQ4T,CAAR5T,EAAewV,CAAfxV,CAAf2E,CAATmP,GAAmD,CAAnDA,CAAP;KAFK3iG,CAAP;EAyBF,yBAAA,CAAgCyiG,CAAhC,EAA+CC,CAA/C;EACE,SAAO1iG,KAAK;EACV,QAAMskG,IAAiBC,YAAY9B,CAAZ8B,GAAoB,CAApBA,CAAvB;EAAA,QACMC,IAAiBD,YAAY7B,CAAZ6B,GAAoB,CAApBA,CADvB;EAAA,QAEME,IAAY5V,IAAQyV,CAARzV,EAAwB2V,CAAxB3V,CAFlB,CAGA,OAAOyE,IAAQpF,MAAQuW,CAARvW,GAAoB,CAApBA,CAARoF,CAAP;KAJKtzF,CAAP;EAQF,aAAA,CAgBoB0kG,CAhBpB;EAiBE,MAAMC,MACJ7qC,sCACA8qC,sCACAC,0DACAC,0DACAC,4BACAC,cACAC,oCACAC,kBACAlB,kDACAmB,8DACAC,wCACAC,sDACAC,kBACAC,kCAdF,CAgBA,IAA8B,mBAAnBb,CAAX,EAAwC;EACtC,QAAIA,KAAkBC,CAAtB,EACE,OAAOA,EAAUD,CAAVC,CAAP,CAEF,IAAIa,IAAS,kBAAgBd,CAA7B,CAMA,MALIA,EAAelY,WAAfkY,GAA6Be,QAA7Bf,CAAsC,qBAAtCA,MACFc,IAAS,kBAAgBd,CAAhB,yFADPA,GAKE,IAAI1Y,UAAJ,CAAewZ,CAAf,CAAN;EAEA,UAAOd,CAAP;2BCzS2BjC,GAAeC;EAC5C,SAAO1iG,KAAK;EACV,QAAM0hE,IAAYmtB,IAAQG,UAAU,EAAVA,CAARH,EAAwB0T,SAAaG,CAAbH,CAAxB1T,CAAlB;EAAA,QACM6W,IAAmBC,OAAOC,QAAYlD,CAAZkD,EAAmBlkC,CAAnBkkC,CAAPD,EAAsClD,EAAMl9G,KAA5CogH,CADzB,CAEA,OAAOhD,KAASkD,MAAUpD,CAAVoD,EAAiBH,CAAjBG,CAATlD,GAA8C,CAA9CA,CAAP;KAHK3iG,CAAP;EAuBF,6BAAA,CAAoCyiG,CAApC,EAAmDC,CAAnD;EACE,SAAO1iG,KACH;EAAM,WAAA2lG,OACFE,MAAUC,OAAWrD,CAAXqD,GAAmB,CAAnBA,CAAVD,EAAiCC,OAAWpD,CAAXoD,GAAmB,CAAnBA,CAAjCD,CADEF,EACuD,SADvDA,CAAA;KADH3lG,CAAP;EAKF,uBAAA,CAAuByiG,CAAvB,EAAsCC,CAAtC;EACE,SAAO1iG,KAAK;EACV,QAAMw0D,IAAMw6B,UAAU,CAAVA,CAAZ,CACA,OAAO+W,WAAetD,EAAMpxG,KAANoxG,CAAYjuC,CAAZiuC,CAAfsD,EAAiCrD,EAAMrxG,KAANqxG,CAAYluC,CAAZkuC,CAAjCqD,EACF3jH,GADE2jH,GAEFl5G,IAFEk5G,CAEG,SAFHA,CAAP;KAFK/lG,CAAP;EAQF,wBAAA,CAAwByiG,CAAxB,EAAuCC,CAAvC;EACE,SAAO1iG,KAAK;EACV,QAAMw0D,IAAMw6B,UAAU,CAAVA,CAAZ;EAAA,QACM99E,IAAO89E,UAAU,CAAVA,CADb,CAEA,OAAO+W,WAAetD,EAAMpxG,KAANoxG,CAAYjuC,CAAZiuC,CAAfsD,EAAiCrD,EAAMrxG,KAANqxG,CAAYxxF,CAAZwxF,CAAjCqD,EACF3jH,GADE2jH,GAEFl5G,IAFEk5G,CAEG,SAFHA,CAAP;KAHK/lG,CAAP;EASF,wBAAA,CAAwByiG,CAAxB,EAAuCC,CAAvC;EACE,SAAO1iG,KAAK;EACV,QAAMw0D,IAAMw6B,UAAU,CAAVA,CAAZ;EAAA,QACM99E,IAAO89E,UAAU,CAAVA,CADb,CAEA,OAAO+W,WAAetD,EAAMpxG,KAANoxG,CAAYvxF,CAAZuxF,CAAfsD,EAAkCrD,EAAMrxG,KAANqxG,CAAYluC,CAAZkuC,CAAlCqD,EACF3jH,GADE2jH,GAEFl5G,IAFEk5G,CAEG,SAFHA,CAAP;KAHK/lG,CAAP;EA0CF,mBAAA,CAA0ByiG,CAA1B,EAAyCC,CAAzC;EACE,SAAO1iG,KAAK;EACV,QAAMkR,IAAO89E,UAAU,CAAVA,CAAb;EAAA,QAEMgX,IAAKC,cAAcxD,CAAdwD,EAAqBvD,CAArBuD,CAFX;EAAA,QAGMC,IAAKC,eAAe1D,CAAf0D,EAAsBzD,CAAtByD,CAHX;EAAA,QAKMC,IAAcJ,EAAGr2G,GAAHq2G,CAAOE,CAAPF,CALpB,CAOA,OAAOK,MAAUT,QAAYQ,CAAZR,EAAyB10F,CAAzB00F,CAAVS,EAA0CL,EAAG51G,GAAH41G,CAAOI,CAAPJ,CAA1CK,EAA+Dn1F,CAA/Dm1F,EACFx5G,IADEw5G,CACG,SADHA,CAAP;KARKrmG,CAAP;EA8CF,gBAAA,CAAuByiG,CAAvB,EAAsCC,CAAtC;EACE,SAAO1iG,KAAK;EACV,QAAMkR,IAAO89E,UAAU,CAAVA,CAAb;EAAA,QAEMgX,IAAKC,cAAcxD,CAAdwD,EAAqBvD,CAArBuD,CAFX;EAAA,QAGMn6G,IAAKw6G,eAAe7D,CAAf6D,EAAsB5D,CAAtB4D,CAHX;EAAA,QAKMF,IAAcJ,EAAGr2G,GAAHq2G,CAAOl6G,CAAPk6G,CALpB,CAOA,OAAOK,MAAUT,QAAYQ,CAAZR,EAAyB10F,CAAzB00F,CAAVS,EAA0CL,EAAG51G,GAAH41G,CAAOI,CAAPJ,CAA1CK,EAA+Dn1F,CAA/Dm1F,EACFx5G,IADEw5G,CACG,SADHA,CAAP;KARKrmG,CAAP;EA4BF,8BAAA,CAAmCyiG,CAAnC,EAAkDC,CAAlD;EACE,SAAO6D,mBAAuB9D,CAAvB8D,EAA8B7D,CAA9B6D,CAAP;EAGF,mCAAA,CACI9D,CADJ,EACmBC,CADnB;EAEE,QAAM,IAAIzW,mBAAJ,EAAN;EAGF,KAUaua,QAAM1sC,kBAVnB;EAAA,IAWa2sC,QAAM3sC,kBAXnB;EAAA,IAYa4sC,QAAM9B,iBAZnB;EAAA,IAaa+B,QAAM/B,iBAbnB;EAAA,IAcagC,SAAO/B,2BAdpB;EAAA,IAeagC,SAAOhC,2BAfpB;EAAA,IAgBab,4BAA0B8C,uBAhBvC;EAAA,IAiBaC,WAASxB,eAjBtB;EAAA,IAkBaJ,kCAAgC6B,6BAlB7C,CAsBA,cAAA,CAAoBva,CAApB;EACE,MAAMwa,MACJC,gCACAC,0CACAC,sBACApD,oDACAmB,gEACAqB,YACAC,YACAC,YACAC,YACAC,cACAC,cACAE,kBAZF,CAcA,IAA0B,mBAAfta,CAAe,IAAYA,KAAcwa,CAApD,EACE,OAAOA,EAAWxa,CAAXwa,CAAP,CACK,IAA0B,mBAAfxa,CAAe,IAA0B,QAAdA,CAAtC,EACL,OAAOA,CAAP,CAEA,MAAM,IAAIT,UAAJ,CAAe,oBAAkBS,CAAjC,CAAN;yBCtPyBA;EAC3B,MAAM4a,MACJC,SAAW;EAAM,aAAA3c,MAAMG,OAANH,CAAc,GAAdA,CAAA;SACjB4c,UAAY;EAAM,aAAA5c,MAAME,QAANF,CAAe,CAAfA,EAAkB,GAAlBA,EAAwB7yB,SAAxB6yB,CAAA;SAClB6c,MAAQ;EAAM,aAAA7c,MAAMM,IAANN,CAAW,IAAXA,EAAkB,EAAlBA,EAAuB,IAAvBA,EAA8B7yB,SAA9B6yB,CAAA;SACd8c,QAAU;EAAM,aAAA9c,MAAMK,MAANL,CAAa,IAAbA,EAAoB,EAApBA,EAAyB,IAAzBA,EAAgC7yB,SAAhC6yB,EAA2C,CAA3CA,CAAA;SAChB+c,SAAW;EAAM,aAAA/c,MAAMI,OAANJ,CAAc,IAAdA,EAAqB,EAArBA,EAA0B,CAA1BA,EAA6B7yB,SAA7B6yB,CAAA;SACjBgd,KAAO;EAAM,aAAAhd,MAAMC,GAAND,CAAU,GAAVA,CAAA;SANf,CAeA,IAPA0c,EAAsBvc,OAAtBuc,GAA0BA,EAAsBC,OAAhDD,EACAA,EAAuBxc,QAAvBwc,GAA2BA,EAAuBE,QADlDF,EAEAA,EAAmBpc,IAAnBoc,GAAuBA,EAAmBG,IAF1CH,EAGAA,EAAqBrc,MAArBqc,GAAyBA,EAAqBI,MAH9CJ,EAIAA,EAAsBtc,OAAtBsc,GAA0BA,EAAsBK,OAJhDL,EAKAA,EAAkBzc,GAAlByc,GAAsBA,EAAkBM,GALxCN,EAOI5a,KAAc4a,CAAlB,EACE,OAAOA,EAAa5a,CAAb4a,GAAP,CAEF,MAAM,IAAIrb,UAAJ,CAAe,uBAAqBS,CAApC,CAAN;yBCbE2R,GAAkBwJ,GAAqBC,GAEvCC;qBAAAA,IACIt/G,QAAQC,KACd,IAkBIs/G,CAlBJ;EAAA,MAAMC,IAAiBC,sBAAsB7J,CAAtB6J,CAAvB;EAAA,MAGMC,KAAuB,gBAAgB,gBAAgB,UAH7D,CAmBA,IAfIF,KACFJ,IAAaA,KAAc,EAA3BA,EACAC,IAAYA,MAAc,KAAM,KAAM,EAFpCG,KAIFJ,IAAaA,KAAc,EAA3BA,EACAC,IAAYA,MAAc,KAAM,KAAM,KAAM,EAL1CG,GASAH,EAAUA,EAAUnmH,MAAVmmH,GAAmB,CAA7BA,KAAmC,CAAnCA,KAEFA,IAAYA,EAAUz+G,GAAVy+G,CAAc,UAAA9uE,CAAA;EAAK,WAAAl3C,KAAKkC,KAALlC,CAAW+lH,IAAa7uE,CAAxBl3C,CAAA;KAAnBgmH,CAFVA,CATAG,GAeCA,CAAL,EAGE,KAAK,IAAM5oH,CAAX,IAFA8oH,EAAU5kH,IAAV4kH,CAAe,iBAAfA,GACAH,MADAG,EAEoB9J,EAAM+J,YAA1B,EACEJ,EAAczkH,IAAdykH,MAAAA,CAAAA,CAAAA,EAAsB3J,EAAM+J,YAAN/J,CAAmBh/G,CAAnBg/G,CAAtB2J,EAIJD,EAAQ,IAAIzjH,MAAJ,CAAWujH,CAAX,CAARE,GACAM,SAASF,CAATE,EAAoBP,CAApBO,EAA+BN,CAA/BM,CADAN,EAEAA,EAAQ,IAAIzjH,MAAJ,CAAWujH,CAAX,CAARE,CAFAA,CAKA,KADA,IAAMO,IAASjK,EAAMiK,MAArB,EACShmH,IAAI,CAAb,EAAgBA,IAAIgmH,EAAO3mH,MAA3B,IAAqCW,CAArC,EACM2lH,IACFM,kBAAkBD,EAAOhmH,CAAPgmH,CAAlBC,EAA6BT,CAA7BS,EAAwCR,CAAxCQ,CADEN,GAGFO,iCACIF,EAAOhmH,CAAPgmH,CADJE,EACeV,CADfU,EAC0BR,CAD1BQ,EACyCT,CADzCS,CAHEP,EAMJF,GAASzlH,MAAMgmH,EAAO3mH,MAAP2mH,GAAgB,CAAtBhmH,GAA0B,GAA1BA,GAAgC,KAAKgC,OAAOujH,EAArDE,CANIE,CAUL5J,EAAcoK,gCAAdpK,GAED,IAAMqK,IAAiBC,qBAAqBtK,CAArBsK,CAAvB;EAAA,MACMC,IAAoBC,qBAAqBxK,EAAMnE,mBAA3B2O,CAD1B,CAGAd,EAAQ,oBAAiBW,IAAiBE,CAAlC,CAARb,GACAA,EAAQ,uBAAqBW,CAA7BX,CADAA,EAEAA,EAAQ,2BAAyBa,CAAjCb,CAFAA,EAGAA,EAAQ,IAAIzjH,MAAJ,CAAWujH,CAAX,CAARE,CAHAA;EAMF,8BAAA,CAA8B1J,CAA9B;EAUE,SAPgD,QAA3CA,EAAcyK,yBAA6B,GAE1CD,qBAAsBxK,EAAcyK,yBAApCD,CAF0C,GAI7BA,qBAAqBxK,EAAMpE,gBAA3B4O,CAGnB;EAGF,+BAAA,CAA+BxK,CAA/B;EACE,MAAI4J,KAAiB,CAArB;EAAA,MACMG,MADN;EAAA,MAEMW,MAFN,CAGA,KAAK,IAAM1pH,CAAX,IAAoBg/G,EAAM+J,YAA1B,EACEA,EAAa7kH,IAAb6kH,CAAkB/J,EAAM+J,YAAN/J,CAAmBh/G,CAAnBg/G,CAAlB+J,EAEF,KAAyB,SAAA,EAAAY,KAAzB,EAAyBz9G,YAAzB,EAAyBA,GAAzB;EAAK,QAAM09G,QAAN,CACH,IAAIA,EAAWtnH,MAAXsnH,GAAoB,CAApBA,IACsB,MAAtBA,EAAWtnH,MAAW,IAAKsnH,EAAW,CAAXA,EAAcpR,aAAdoR,CAA4BtnH,MAA5BsnH,GAAqC,CADpE,EACuE;EACrEhB,WAAiB,CAAjBA,CACA;EAEFc,OAAMxlH,IAANwlH,MAAAA,CAAAA,CAAAA,EAAcE,CAAdF;EAEF,OAAId,CAAJ,EAEE,KAAoB,SAAA,EAAAnuF,IAAAukF,EAAMiK,MAA1B,EAAoB5nG,YAApB,EAAoBA,GAApB;EAEE,SAFG,IACCwoG,KAAO,CADR,OAAA,EAEgBvlD,SAAM40C,YAAzB,EAAmBr+E,YAAnB,EAAmBA,GAAnB;EAAK,UAAMnhB,QAAN,CACH,KAA6B,MAAzBgwG,EAAMlgG,OAANkgG,CAAchwG,CAAdgwG,CAAJ,EAAgC;EAC9B,YAAIG,CAAJ,EAAU;EACRjB,eAAiB,CAAjBA,CACA;EAEAiB,cAAO,CAAPA;;EAIN,UAAKjB,CAAL,EACE;EAIN,UAAOA,CAAP;EAGF,kBAAA,CACIkB,CADJ,EACsBrB,CADtB,EAGIC,CAHJ;qBAGIA,IAA6Dt/G,QAAQC,KAEvE,KADA,IAAI+9B,IAAO,EAAX,EACSnkC,IAAI,CAAb,EAAgBA,IAAI6mH,EAAOxnH,MAA3B,IAAqCW,CAArC,EACMA,IAAI,CAAJA,KACFmkC,IAAOA,EAAKrhC,KAALqhC,CAAW,CAAXA,EAAcA,EAAK9kC,MAAL8kC,GAAc,CAA5BA,IAAiC,GADtCnkC,GAIJmkC,KADAA,KAAQ0iF,EAAO7mH,CAAP6mH,GACI/jH,MAAM,GAAG0iH,EAAUxlH,CAAVwlH,EAJjBxlH,EAKJmkC,KAAQ,IAAIniC,MAAJ,CAAWwjH,EAAUxlH,CAAVwlH,IAAerhF,EAAK9kC,MAA/B,CALJW,CAONylH,EAAQthF,CAARshF;EAQF,2BAAA,CACI1P,CADJ,EACkByP,CADlB,EAGIC,CAHJ;EAIE,MAAI3yG,CAAJ,CACA;EACEA,QAAcqrB,KAAKE,SAALF,CAAe43E,EAAMjjG,WAArBqrB,CAAdrrB;EACA,GAFF,CAEE,OAAOoR,CAAP;EACApR,QAAc,UAAdA;EAOFizG,aAJahQ,EAAMvyG,IAANuyG,OAAAA,GACKA,EAAMxL,YAANwL,EADLA,QAGkBjjG,GAAaijG,EAAM+Q,WAAN/Q,GAAoB7vG,QAApB6vG,GAC5CgQ,EAAiBP,CAAjBO,EAA4BN,CAA5BM;EAMF,0CAAA,CACIhQ,CADJ,EACkByP,CADlB,EACuCE,CADvC,EAGID,CAHJ;EAIE,MAAI3yG,CAAJ,CACA;EACEA,QAAcqrB,KAAKE,SAALF,CAAe43E,EAAMjjG,WAArBqrB,CAAdrrB;EACA,GAFF,CAEE,OAAOoR,CAAP;EACApR,QAAc,UAAdA;EAIF,QADA,IAAMi0G,MAAN,OAAA,EACmB3oG,IAAA23F,EAAME,YAAzB,EAAmBhtG,YAAnB,EAAmBA,GAAnB;EAAK,QAAMwN,QAAN,CACH,MAAqB,QAAjBivG,CAAiB,IAAQA,EAAcrmH,MAAdqmH,GAAuB,CAA/B,KACgB,MAAjCA,EAAcn/F,OAAdm/F,CAAsBjvG,CAAtBivG,EADJ,EAIA,KAAK,IAAI1lH,IAAI,CAAb,EAAgBA,IAAIyW,EAAK8+F,aAAL9+F,CAAmBpX,MAAvC,IAAiDW,CAAjD,EAAoD;EAClD,UAAMgnH,IAAevwG,EAAK8+F,aAAL9+F,CAAmBzW,CAAnByW,EAAsBjT,IAA3C;EAAA,UACMyjH,IAAoBxwG,EAAK++F,WAAL/+F,CAAiBzW,CAAjByW,CAD1B;EAAA,UAEMywG,IAAqBzwG,EAAKg/F,aAALh/F,CAAmBzW,CAAnByW,CAF3B,CAGAswG,EAAY9lH,IAAZ8lH,CACOC,OAAAA,GAAgBC,CAAhBD,OAAAA,GAAsCE,CAAtCF,MADPD;;EAIJ,OAAMvjH,IAAOuyG,EAAMvyG,IAAnB;EAAA,MACMu+F,IAAYgU,EAAMxL,YAANwL,EADlB;EAAA,MAEMoR,IAAyC,MAAvBJ,EAAY1nH,MAAW,GAAI,EAAJ,GAAS0nH,EAAY,CAAZA,CAFxD,CAQAhB,UAJKviH,QAAAA,GAASu+F,CAATv+F,QAAuBsP,GAAaijG,EAAM+Q,WAAN/Q,GAAoB7vG,QAApB6vG,IACvCoR,EAGFpB,EAAiBP,CAAjBO,EAA4BN,CAA5BM,EACA,KAAS/lH,IAAI,CAAb,EAAgBA,IAAI+mH,EAAY1nH,MAAhC,IAA0CW,CAA1C,EACE+lH,UAAU,IAAI,IAAI,IAAIgB,EAAY/mH,CAAZ+mH,EAAtBhB,EAAuCP,CAAvCO,EAAkDN,CAAlDM;wBChMA9jB,GACAyI;EAEF,0BAFEA,SAEKkD,uBACH3L,CADG2L,EACK5B,cAAc7J,gBAAd6J,CAA+B5J,MAA/B4J,GAAwC9J,YAD7C0L,EAEHlD,CAFGkD,EAEY,OAFZA,CAAP;ECIF,sCAAA,CACIpyF,CADJ,EACiBjc,CADjB,EACgCyJ,CADhC;EAEE,UAAgB,mBAARwS,CAAQ,IAA0B,mBAARA,CAAlB,IACA,kBAARA,MACM,MAAVjc,KAAgC,mBAAVyJ,CAF1B;EAWF,6BAAA,CACIo+G,CADJ,EAC+B5rG,CAD/B;EAEE,MAAuB,SAAnB4rG,CAAJ,EACE,OAAO,IAAP,CACK,IAA8B,mBAAnBA,CAAX,EACL,OAAOC,YAA0BD,CAA1BC,CAAP,CACK,IACwB,mBAAnBD,CAAmB,IACA,oBAAnBA,CAFL,EAGL,OAAOA,CAAP,CACK,IAAIA,aAA0BtmH,KAA9B,EAAqC;EAG1C,SAFA,IAAMwmH,MAAN,EACMC,IAAcH,EAAe/nH,MADnC,EAESW,IAAI,CAAb,EAAgBA,IAAIunH,CAApB,IAAmCvnH,CAAnC,EAAsC;EACpC,UAAMyb,IAAO2rG,EAAepnH,CAAfonH,CAAb,CACII,6BAA6BhsG,CAA7BgsG,EAAkCxnH,CAAlCwnH,EAAqC/rG,CAArC+rG,IACFF,EAAQrmH,IAARqmH,CAAa7rG,CAAb6rG,CADEE,GAGFF,EAAQrmH,IAARqmH,CAAaG,oBAAoBhsG,CAApBgsG,EAA0BjsG,CAA1BisG,CAAbH,CAHEE;EAMN,YAAOF,CAAP;EAGA,QADA,IAAMI,MAAN,OAAA,EAC0BtpG,IAAAhV,OAAO0O,IAAP1O,CAAYg+G,CAAZh+G,CAA1B,EAA0BH,YAA1B,EAA0BA,GAA1B;EAAK,QAAM0+G,QAAN;EAAA,QACGC,IAAgBR,EAAeO,CAAfP,CADnB,CAEH,IAAoB,WAAhBO,CAAgB,IAAmC,mBAAlBC,CAArC,EAIEF,EAAOC,CAAPD,IAAsBE,CAAtBF,CAJF,KAKO;EACL,UAAMG,IAAQR,YAA0BM,CAA1BN,CAAd,CACAK,EAAOG,CAAPH,IAAgBD,oBAAoBG,CAApBH,EAAmCI,CAAnCJ,CAAhBC;;EAGJ,UAAOA,CAAP;EAUJ,6BAAA,CACII,CADJ,EAC6CtsG,CAD7C;EAEE,MAAiB,SAAbssG,CAAa,SAAqBxe,MAAbwe,CAAzB,EACE,OAAO,IAAP,CACK,IAAwB,mBAAbA,CAAX,EACL,OAAOjR,YAA0BiR,CAA1BjR,CAAP,CACK,IACkB,mBAAbiR,CAAa,IAAkC,oBAAbA,CADvC,EAEL,OAAOA,CAAP,CACK,IAAIA,aAAoBhnH,KAAxB,EAA+B;EAGpC,SAFA,IAAMinH,MAAN,EACMR,IAAcO,EAASzoH,MAD7B,EAESW,IAAI,CAAb,EAAgBA,IAAIunH,CAApB,IAAmCvnH,CAAnC,EAAsC;EACpC,UAAMyb,IAAOqsG,EAAS9nH,CAAT8nH,CAAb,CACIN,6BAA6BhsG,CAA7BgsG,EAAkCxnH,CAAlCwnH,EAAqC/rG,CAArC+rG,IACFO,EAAQ9mH,IAAR8mH,CAAatsG,CAAbssG,CADEP,GAGFO,EAAQ9mH,IAAR8mH,CAAaC,oBAAoBvsG,CAApBusG,EAA0BxsG,CAA1BwsG,CAAbD,CAHEP;EAMN,YAAOO,CAAP;EAGA,QADA,IAAME,MAAN,OAAA,EACoB7pG,IAAAhV,OAAO0O,IAAP1O,CAAY0+G,CAAZ1+G,CAApB,EAAoBH,YAApB,EAAoBA,GAApB;EAAK,QAAM4+G,QAAN;EAAA,QACGK,IAAUJ,EAASD,CAATC,CADb;EAAA,QAEGK,IAAQtR,YAA0BgR,CAA1BhR,CAFX,CAUDoR,EAAOE,CAAPF,IAPa,WAAVJ,CAAU,IAAoB,gBAAVA,CAAV,IACQ,mBAAZK,CADI,GAOGF,oBAAoBE,CAApBF,EAA6BH,CAA7BG,CAPH,GAKGE,CAEhBD;EAGJ,UAAOA,CAAP;OC1HEjkB,YAAU,QCsChB,oCAAA,CACI+R,CADJ,EACkBvhC,CADlB,EAC4C4zC,CAD5C,EAEIC,CAFJ;EAGE,OAAKD,EAAqBjxB,UAArBixB,CAAgC,IAAhCA,CAAL,EACE,MAAM,IAAIze,UAAJ,CACF,wDACAye,CAFE,CAAN,CAIF,OAAO5zC,CAAP;EAYF,oBAAA,CAAoBtxE,CAApB,EAAmChC,CAAnC,EAAiD8H,CAAjD;EACE,MAAMs/G,IAAWC,cAA4BrlH,CAA5BqlH,CAAjB,CACA,OAAOl/G,OAAOC,IAAPD,CACHnI,CADGmI,IACKlG,QAAyB,MAAjBjC,EAAM7B,MAAW,GAAI2J,CAAJ,GAAY+nG,KAAK/vG,OAAL+vG,CAAa/nG,CAAb+nG,GAD1C1nG,EAEHi/G,CAFGj/G,CAAP;EAmBF,6BAAA,CACIm/G,CADJ,EAC2BxC,CAD3B,EAC4CyC,CAD5C;qBAC4CA,QAO1C,KANA,IAAML,IAAuBI,EAA2BE,aAAxD,EACML,IAAkBG,EAAqB1vG,OAD7C,EAEM6vG,IAAa3C,EAAOj/G,GAAPi/G,CAAW,UAAAjQ,CAAA;EAAS,WAAAA,EAAMvyG,IAAN;KAApBwiH,CAFnB,EAKMzmH,MALN,OAAA,EAMoBqpH,KAApB,EAAoB3/G,YAApB,EAAoBA,GAApB;EACoB,aADT8sG,UACCvyG,IAAQ,KACS,QAArBjE,EAAMw2G,EAAMvyG,IAAZjE,CAAqB,KACvBA,EAAMw2G,EAAMvyG,IAAZjE,MADuB,GAGzBA,EAAMw2G,EAAMvyG,IAAZjE,EAAkB0B,IAAlB1B,CAAuBw2G,CAAvBx2G,CAJgB;EAWpB,QAFA,IAAMspH,IAAgBL,EAAqBh0C,OAA3C,EACMwlC,MADN,EAEStlG,IAAI,CAAb,EAAgBA,IAAIi0G,EAAWtpH,MAA/B,IAAyCqV,CAAzC,EAA4C;EAC1C,QAAMi7E,IAAOg5B,EAAWj0G,CAAXi0G,CAAb;EAAA,QACIG,IAAeD,EAAcl5B,CAAdk5B,CADnB,CAEoB,QAAhBC,CAAgB,KAClBA,MADkB,EAKpB,KADA,IAAIC,MAAJ,EACSnnH,IAAI,CAAb,EAAgBA,IAAIknH,EAAazpH,MAAjC,IAA2CuC,CAA3C,EAA8C;EAE5C,UAAMonH,IACFF,EAAalnH,CAAbknH,CADJ,CAGAC,EAAa9nH,IAAb8nH,CAAkB,IAAIrU,aAAJ,CAAkBuU,WAChCD,EAAmB9lH,KADa+lH,EACVD,EAAmB9nH,KADT+nH,EAEhCD,EAAmBhgH,KAFaigH,CAAlB,CAAlBF;EAIF,UAAoB,SAAA,EAAAvxF,IAAAj4B,EAAMowF,CAANpwF,CAApB,EAAoB6e,YAApB,EAAoBA,GAApB;EAAK,UAAM23F,CAAN;EAAA,UACGmT,KADGnT,UACqBvhC,OAD3B,CAIH,KAFAu0C,IAAeI,4BACXpT,CADWoT,EACJJ,CADII,EACUf,CADVe,EACgCd,CADhCc,GAEE9pH,WAAW6pH,EAAgB7pH,MAA5C,EAAoD;EAClD,aAAIopH,CAAJ,EAME,MAAM,IAAI9e,UAAJ,CACF,YAAUj1F,CAAV,cAAA,GAAuBqhG,EAAMvyG,IAA7B,gBAAA,GACG0lH,EAAgB7pH,MADnB,4CAAA,GAEQ0pH,EAAa1pH,MAFrB,iBADE,CAAN,CALA8G,QAAQ8Z,IAAR9Z,CACI,0CAAwC4vG,EAAMvyG,IAA9C,6CAAA,GAC0CulH,EAAa1pH,MADvD,SAAA,GAEM6pH,EAAgB7pH,MAFtB,OADJ8G;EAaJ,YAAK,IAAInG,IAAI,CAAb,EAAgBA,IAAI+oH,EAAa1pH,MAAjC,IAA2CW,CAA3C,GACMyoH,KACG1X,KAAKnwG,WAALmwG,CACGmY,EAAgBlpH,CAAhBkpH,EAAmBhoH,KADtB6vG,EAC6BgY,EAAa/oH,CAAb+oH,EAAgB7nH,KAD7C6vG,IASPiJ,EAAkB/4G,IAAlB+4G,EAAwBkP,EAAgBlpH,CAAhBkpH,GAAoBH,EAAa/oH,CAAb+oH,EAAgBr+G,IAAhBq+G,GAA5C/O,IAPI7zG,QAAQ8Z,IAAR9Z,CACI,2CAAyC4vG,EAAMvyG,IAA/C,gCAAA,GACyB0lH,EAAgBlpH,CAAhBkpH,EAAmBhoH,KAD5C,SAAA,GAEG6nH,EAAa/oH,CAAb+oH,EAAgB7nH,KAFnB,MADJiF;;EAWVg0G,iBAAcH,CAAdG;EAgBF,uCAAA,CACI3lC,CADJ,EAC6BwxC,CAD7B,EAC8CoD,CAD9C;qBAC8CA,QAI5C,KAFA,IAAMC,MAAN,EACIC,IAAoB,CADxB,OAAA,EAEoBC,KAApB,EAAoBtgH,YAApB,EAAoBA,GAApB,EACE,KADG,SAAA,EACkBuuB,SAAMg9C,OAA3B,EAAqBp2D,YAArB,EAAqBA,GAArB;EAAK,QAAM+/E,QAAN,CACH,IAAyC,QAArCkrB,EAAalrB,EAAOmW,YAApB+U,CAAJ,EACE,MAAM,IAAI1f,UAAJ,CAAe,4BAA0BxL,EAAOmW,YAAhD,CAAN,CAEF+U,EAAalrB,EAAOmW,YAApB+U,IAAoClrB,CAApCkrB,EACAC,GADAD;EAKJ,OAAMrP,MAAN,CACA,KAAK,IAAMhqB,CAAX,IAAmBxb,CAAnB,EAA4B;EAC1B,QAA0B,QAAtB60C,EAAar5B,CAAbq5B,CAAJ,EACErP,EAAkB/4G,IAAlB+4G,EAAwBqP,EAAar5B,CAAbq5B,GAAoB70C,EAAQwb,CAARxb,EAA5CwlC,EADF,KAEO,IAAIoP,CAAJ,EACL,MAAM,IAAIzf,UAAJ,CACF,kDAAgD3Z,CAD9C,CAAN,QAGKq5B,EAAar5B,CAAbq5B;EAGT,OAAID,CAAJ,EAAY;EAEV,QAAMI,MAAN,CACA,KAAK,IAAMC,CAAX,IAAmBJ,CAAnB,EACEG,EAAWvoH,IAAXuoH,CAAgBC,CAAhBD,EAEF,IAAIA,EAAWnqH,MAAXmqH,GAAoB,CAAxB,EACE,MAAM,IAAI7f,UAAJ,CACC6f,EAAWnqH,MAAXmqH,SAAAA,GAAwBF,CAAxBE,2BAAAA,GACAA,CAFD,CAAN;EAMJrP,iBAAcH,CAAdG;EAiBF;EAkCE,YAAA,CAAYlY,CAAZ;EAAA,YAEEvsF,MAAAA,KAAAA,IAAAA,SAFF,CAIE,IAtBF9P,gBAAAA,GAAiB,IAAI0S,GAAJ,EAAjB1S,EAqBEA,EAAKpC,IAALoC,GAAYq8F,EAAOz+F,IArBrBoC,EAsBmB,QAAbA,EAAKpC,IAAT,EAAuB;EACrB,UAAM+1F,IAAS3zF,EAAK2kG,YAAL3kG,GAAoBukG,WAApBvkG,EAAf,CACAA,EAAKpC,IAALoC,GAAYkxG,OAAOvd,CAAPud,CAAZlxG;EAsBF,SAnBAA,EAAK2wG,eAAL3wG,IAAuB,CAAvBA,EACAA,EAAKoO,SAALpO,IAAiB,CADjBA,EAEAA,EAAKmxG,SAALnxG,IAAiB,CAFjBA,EAOI9E,MAAMC,OAAND,CAAcmhG,EAAOvrF,MAArB5V,IACF8E,EAAK8Q,MAAL9Q,GAAcq8F,EAAOvrF,MAAPurF,CAAcn/F,KAAdm/F,EADZnhG,GAGF8E,EAAK8Q,MAAL9Q,IAAeq8F,EAAOvrF,OAVxB9Q,EAYI9E,MAAMC,OAAND,CAAcmhG,EAAOnrF,OAArBhW,IACF8E,EAAKkR,OAALlR,GAAeq8F,EAAOnrF,OAAPmrF,CAAen/F,KAAfm/F,EADbnhG,GAGF8E,EAAKkR,OAALlR,IAAgBq8F,EAAOnrF,QAfzBlR,EAmBI8jH,OAAqB9jH,EAAK8Q,MAA1BgzG,EAAkCrqH,MAAlCqqH,KAA6C9jH,EAAK8Q,MAAL9Q,CAAYvG,MAA7D,EACE,MAAM,IAAIsqG,UAAJ,CACF,qGAEA/jG,EAAK8Q,MAAL9Q,CAAYmB,GAAZnB,CAAgB,UAAAjG,CAAA;EAAK,aAAAA,EAAE6D,IAAF;OAArBoC,CAHE,CAAN,CAOE8jH,OAAqB9jH,EAAKkR,OAA1B4yG,EAAmCrqH,MAAnCqqH,KAA8C9jH,EAAKkR,OAALlR,CAAavG,MAA3DqqH,IACFvjH,QAAQ8Z,IAAR9Z,CACI,uGAEAP,EAAKkR,OAALlR,CAAamB,GAAbnB,CAAiB,UAAAjG,CAAA;EAAK,aAAAA,EAAE6D,IAAF;OAAtBoC,CAHJO,CADEujH,EAWJ9jH,EAAK+jH,WAAL/jH,KAXI8jH,EAYJ9jH,EAAKgkH,sBAALhkH,KAZI8jH,EAaJ9jH,EAAKikH,wBAALjkH,KAbI8jH,EAkBJ9jH,EAAKkkH,YAALlkH,KAlBI8jH,EAmBJ9jH,EAAKmkH,uBAALnkH,KAnBI8jH,EAoBJ9jH,EAAKokH,yBAALpkH,KApBI8jH,EAyBJ9jH,EAAKogH,MAALpgH,KAzBI8jH,CAwCJ,KAAgB,SAAA,EAAAtrG,IAAAxY,EAAKkR,OAArB,EAAgB7N,YAAhB,EAAgBA,GAAhB;EAAK,UACG8sG,KADGp2G,UACOu1G,WADb;EAAA,UAEGmC,IAAY13G,EAAE03G,SAFjB;EAAA,UAGGuD,IAAcj7G,EAAEi7G,WAHnB,CAIHh1G,EAAKkkH,YAALlkH,CAAkB3E,IAAlB2E,CAAuBmwG,CAAvBnwG,GACAA,EAAKmkH,uBAALnkH,CAA6B3E,IAA7B2E,CAAkCyxG,CAAlCzxG,CADAA,EAEAA,EAAKokH,yBAALpkH,CAA+B3E,IAA/B2E,CAAoCg1G,CAApCh1G,CAFAA;EAQF,UAAgB,SAAA,EAAAgyB,IAAAhyB,EAAK8Q,MAArB,EAAgB8gB,YAAhB,EAAgBA,GAAhB;EACQu+E,WADGp2G,UACOu1G,WAAVa,EACAsB,IAAY13G,EAAE03G,SADdtB,EAEA6E,IAAcj7G,EAAEi7G,WAFhB7E,CAONkU,SAAmC,MAAd5S,CAArB4S,EAAsC,0BAAtCA,GACAA,SAAqC,MAAhBrP,CAArBqP,EAAwC,4BAAxCA,CADAA,EAEArkH,EAAK+jH,WAAL/jH,CAAiB3E,IAAjB2E,CAAsBmwG,CAAtBnwG,CAFAqkH,EAGArkH,EAAKgkH,sBAALhkH,CAA4B3E,IAA5B2E,CAAiCyxG,CAAjCzxG,CAHAqkH,EAIArkH,EAAKikH,wBAALjkH,CAA8B3E,IAA9B2E,CAAmCg1G,CAAnCh1G,CAJAqkH;EAQFrkH,OAAKskH,UAALtkH,KAAAA,EACAA,EAAKukH,WAALvkH,KADAA,EAEAA,EAAKwkH,eAALxkH,KAFAA,EAGAA,EAAKykH,cAALzkH,KAHAA,EAIAA,EAAK0kH,eAAL1kH,KAJAA,CAKA,KAAK,IAAI5F,IAAI,CAAb,EAAgBA,IAAI4F,EAAK+jH,WAAL/jH,CAAiBvG,MAArC,EAA6CW,GAA7C,EAAkD;EAGhD,aAFM+1G,IAAQnwG,EAAK+jH,WAAL/jH,CAAiB5F,CAAjB4F,cAES41G,WAAvB,EACE,MAAM,IAAIjB,SAAJ,CACF,0EACoBtY,EAAOvrF,MAD3B,aAAA,GAES1W,CAFT,2CAAA,GAGmB+1G,EAAMxL,YAANwL,EAHnB,MADE,CAAN,CAMFnwG,EAAKskH,UAALtkH,CAAgB3E,IAAhB2E,CAAqBmwG,EAAMvyG,IAA3BoC,GACAA,EAAKwkH,eAALxkH,CAAqB3E,IAArB2E,CAA0BmwG,EAAMiB,eAAhCpxG,CADAA,EAGAA,EAAKykH,cAALzkH,CAAoB3E,IAApB2E,CAAyBmwG,EAAMvyG,IAA/BoC,CAHAA;EAKF,UAAoB,SAAA,EAAAklG,IAAAllG,EAAKkkH,YAAzB,EAAoBzoD,YAApB,EAAoBA,GAApB;EAAW00C,cAAAA,CACTnwG,EAAKukH,WAALvkH,CAAiB3E,IAAjB2E,CAAsBmwG,EAAMvyG,IAA5BoC;EAGFA,OAAK2kH,mBAAL3kH,GAA2BA,EAAK8Q,MAAL9Q,CAAYmB,GAAZnB,CAAgB,UAAAjG,CAAA;EAAK,aAAAA,EAAEuB,KAAF;OAArB0E,CAA3BA,EACAA,EAAK4kH,oBAAL5kH,GAA4BA,EAAKkR,OAALlR,CAAamB,GAAbnB,CAAiB,UAAAjG,CAAA;EAAK,aAAAA,EAAEuB,KAAF;OAAtB0E,CAD5BA,CAyFA,KAjFA,IAAM6kH,MAAN,EAEMC,MAFN,EAGMC,MAHN,EAKMC,MALN,EAMMC,MANN,EAOMC,MAPN,EA2BMC,IACF,UAAC7yG,CAAD,EAAyB8yG,CAAzB,EAAgDC,CAAhD,EACClV,CADD,EACgBsB,CADhB,EACoCuD,CADpC;EAEe,cAAT7E,CAAS,IAAqB,QAAbsB,CAAR,IAA4C,QAAfuD,CAA7B,KACX7E,IAAQ79F,EAAOg9F,WAAfa,EACAsB,IAAYn/F,EAAOm/F,SADnBtB,EAEA6E,IAAc1iG,EAAO0iG,WAHV,EAKb,IAAMnkG,IAAOs/F,EAAME,YAANF,CAAmBsB,CAAnBtB,CAAb,CAGA,KAAuC,MAAnCkV,EAAgB1kG,OAAhB0kG,CAAwBx0G,CAAxBw0G,CAAJ,EACE,MAAM,IAAIvhB,YAAJ,CACF,gBAAcxxF,EAAO1U,IAArB,gBAAA,GAAuCuyG,EAAMvyG,IAA7C,0BADE,CAAN,CAMF,KAAqC,MAAjCwnH,EAAczkG,OAAdykG,CAAsBv0G,CAAtBu0G,CAAJ,EAAA;EAKAplH,UAAKslH,cAALtlH,CAAoB0H,GAApB1H,CAAwBulH,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyB9T,CAAzB8T,CAAxBvlH,GAGMmwG,GAAMnsG,EAANmsG,IAAY8U,CAAZ9U,MACJ8U,EAAa9U,EAAMnsG,EAAnBihH,IAAyBzhH,OAAO0O,IAAP1O,CAAYyhH,CAAZzhH,EAA0B/J,MAD/C02G,CAHNnwG,GAOuC,MAAnCqlH,EAAgB1kG,OAAhB0kG,CAAwBx0G,CAAxBw0G,KACFA,EAAgBhqH,IAAhBgqH,CAAqBx0G,CAArBw0G,CARFrlH,CAaA,KADA,IAAMylH,IAAmB50G,EAAK8+F,aAAL9+F,CAAmBpX,MAA5C,EACSW,IAAI,CAAb,EAAgBA,IAAIqrH,CAApB,EAAsCrrH,GAAtC,EAA2C;EACzC,cAAML,IAAI8W,EAAKi/F,YAALj/F,CAAkBzW,CAAlByW,CAAV;EAAA,cACM60G,IAAQ70G,EAAK8+F,aAAL9+F,CAAmBzW,CAAnByW,CADd;EAAA,cAEM80G,IAAY90G,EAAK++F,WAAL/+F,CAAiBzW,CAAjByW,CAFlB;EAAA,cAGM+0G,IAAc/0G,EAAKg/F,aAALh/F,CAAmBzW,CAAnByW,CAHpB,CAIAs0G,EACIprH,CADJorH,EACOC,CADPD,EACsBE,CADtBF,EACuCO,CADvCP,EAC8CQ,CAD9CR,EAEIS,CAFJT;EAKF,cADAC,EAAc/pH,IAAd+pH,CAAmBv0G,CAAnBu0G,CACA,EAAOC,EAAgB1kG,OAAhB0kG,CAAwBx0G,CAAxBw0G,KAAiC,CAAxC,GACEA,EAAgB7uE,MAAhB6uE,CAAuBA,EAAgB1kG,OAAhB0kG,CAAwBx0G,CAAxBw0G,CAAvBA,EAAsD,CAAtDA,EAEFH,EAAuB7pH,IAAvB6pH,CAA4Br0G,CAA5Bq0G;;OA5EN,EA+EME,MA/EN,EAgFMC,MAhFN,OAAA,EAiFgBhgB,IAAArlG,EAAKkR,OAArB,EAAgB+zF,YAAhB,EAAgBA,GAAhB;EAAK,UAAMlrG,QAAN,CACHorH,EAAgBprH,CAAhBorH,EAAmBC,CAAnBD,EAAkCE,CAAlCF;EAKF,UAFA,SAAA,EAEmBU,IADfX,EAAuBhoH,KAAvBgoH,GAA+B3/G,OAA/B2/G,EACJ,EAAmB9f,YAAnB,EAAmBA,GAAnB;EACE0f,SADSj0G,UACS7M,EAAlB8gH,IAAwBj0G,CAAxBi0G,EAEMj0G,EAAK7M,EAAL6M,IAAWg0G,CAAXh0G,KACJg0G,EAAYh0G,EAAK7M,EAAjB6gH,IAAuB,CADnBh0G,CAFNi0G,CAKA,IAAI3tH,IAAQ0tH,EAAYh0G,EAAK7M,EAAjB6gH,CAAZ;EAAA,UAGMiB,IACsC,QAAvCf,EAAal0G,EAAK6+F,aAAL7+F,CAAmB7M,EAAhC+gH,CAAuC,GACnC,CADmC,GAEnCA,EAAal0G,EAAK6+F,aAAL7+F,CAAmB7M,EAAhC+gH,CANT,CAaA5tH,IAAQyC,KAAKI,GAALJ,CAASzC,CAATyC,EAAgBksH,CAAhBlsH,CAARzC,EACA4tH,EAAal0G,EAAK6+F,aAAL7+F,CAAmB7M,EAAhC+gH,IAAsC5tH,CADtCA,EAEA6tH,EAAen0G,EAAK6+F,aAAL7+F,CAAmB7M,EAAlCghH,IAAwCn0G,EAAK6+F,aAF7Cv4G,EAGA0tH,EAAYh0G,EAAK7M,EAAjB6gH,IAAuB1tH,CAHvBA,CAMA,KAASiD,IAAI,CAAb,EAAgBA,IAAIyW,EAAK8+F,aAAL9+F,CAAmBpX,MAAvC,EAA+CW,GAA/C,EAAoD;EAClD,YAAMgnH,IAAevwG,EAAK8+F,aAAL9+F,CAAmBzW,CAAnByW,CAArB;EAAA,YAEMk1G,KADAtU,IAAY5gG,EAAK++F,WAAL/+F,CAAiBzW,CAAjByW,CAAZ4gG,EACc2P,EAAa/Q,YAAb+Q,CAA0B3P,CAA1B2P,CAAd2E,CAFN;EAAA,YAGMC,IAC8B,QAA/BnB,EAAYkB,EAAY/hH,EAAxB6gH,CAA+B,GAAO,CAAP,GACOA,EAAYkB,EAAY/hH,EAAxB6gH,CAL3C,CAMAA,EAAYkB,EAAY/hH,EAAxB6gH,IAA8BjrH,KAAKI,GAALJ,CAASzC,IAAQ,CAAjByC,EAAoBosH,CAApBpsH,CAA9BirH,EACAC,EAAaiB,EAAY/hH,EAAzB8gH,IAA+BiB,CAD/BlB;;EAMJ,SAAM3E,MAAN,CACA,KAAK,IAAM+F,CAAX,IAAqBpB,CAArB,EAAkC;SAC1B1tH,IAAQ0tH,EAAYoB,CAAZpB,MACC3E,MACbA,EAAa/oH,CAAb+oH,SAEFA,EAAa/oH,CAAb+oH,EAAoB7kH,IAApB6kH,CAAyB4E,EAAamB,CAAbnB,CAAzB5E;EAIF,SAAMgG,MAAN,CACA,KAAK,IAAMC,CAAX,IAAsBpB,CAAtB,EAAoC;SAC5B5tH,IAAQ4tH,EAAaoB,CAAbpB,MACCmB,MACbA,EAAc/uH,CAAd+uH,SAEFA,EAAc/uH,CAAd+uH,EAAqB7qH,IAArB6qH,CAA0BlB,EAAemB,CAAfnB,CAA1BkB;EAIF,SAAIE,IAAY5iH,OAAO0O,IAAP1O,CAAY0iH,CAAZ1iH,EACKrC,GADLqC,CACS,UAAAzJ,CAAA;EAAK,aAAAivG,SAASjvG,CAATivG,EAAY,EAAZA,CAAA;OADdxlG,EAEK2Z,IAFL3Z,CAEU6iH,oBAFV7iH,CAAhB,CAKAxD,EAAKogH,MAALpgH,KAAAA,CACA,KAAoB,SAAA,EAAAsmH,KAApB,EAAoB9gB,YAApB,EAAoBA,GAApB;EAAK,UACG+gB,IAAiBL,EADd/uH,QACc+uH,CADpB,CAIHK,EAAeppG,IAAfopG,CAAoB,UAACxtH,CAAD,EAAIsB,CAAJ;EAClB,YAAMysF,IAASm+B,EAAalsH,EAAEiL,EAAfihH,CAAf;EAAA,YACMj+B,IAASi+B,EAAa5qH,EAAE2J,EAAfihH,CADf,CAEA,OAAIn+B,IAASE,CAATF,IACM,CADNA,GAGAA,IAASE,CAATF,GACK,CADLA,GAGG,CANP;SAHFy/B,EAWA,KAAoB,SAAA,EAAAC,KAApB,EAAoBjhB,YAApB,EAAoBA,GAApB;EAAW4K,gBAAAA,CACTnwG,EAAKogH,MAALpgH,CAAY3E,IAAZ2E,CAAiBmwG,CAAjBnwG;;EAGJA,OAAKkmH,aAALlmH,GAAqBkmH,CAArBlmH,EAGAomH,IAAY5iH,OAAO0O,IAAP1O,CAAY08G,CAAZ18G,EACKrC,GADLqC,CACS,UAAAzJ,CAAA;EAAK,aAAAivG,SAASjvG,CAATivG,EAAY,EAAZA,CAAA;OADdxlG,EAEK2Z,IAFL3Z,CAEU6iH,oBAFV7iH,CAHZxD,CAcA,KAJA,IAAMymH,IAAoBzmH,EAAK8Q,MAAL9Q,CAAY9C,KAAZ8C,EAA1B,EAGM0mH,MAHN,OAAA,EAIoBC,KAApB,EAAoBC,YAApB,EAAoBA,GAApB,EACE,KADG,SAAA,EACgBC,IAAA3G,EADV/oH,QACU+oH,CAAnB,EAAmB4G,YAAnB,EAAmBA,GAAnB;EAAK,UAAMj2G,CAAN,CAEH,IAAa,SADPs/F,KADGt/F,UACU6+F,aACN,CAAb,EAAmB;EACjB,aAAgB,SAAA,EAAAqX,IAAAl2G,EAAKi/F,YAArB,EAAgBkX,YAAhB,EAAgBA,GAAhB;EAAWjtH,kBAAAA,CACT,KAAsC,MAAlC0sH,EAAkB9lG,OAAlB8lG,CAA0B1sH,CAA1B0sH,CAAJ,EACE,MAAM,IAAI3iB,YAAJ,CACF,wDAAsD/pG,CAAtD,GACA,aADA,GACco2G,EAAMvyG,IADpB,mEAAA,GAGU8oH,CAJR,CAAN;EAOJ,cAAgB,SAAA,EAAAO,KAAAp2G,EAAKk/F,aAArB,EAAgBmX,aAAhB,EAAgBA,GAAhB;EAAWntH,mBAAAA,CACT0sH,EAAkBprH,IAAlBorH,CAAuB1sH,CAAvB0sH;EAEFC,WAAwBrrH,IAAxBqrH,CAA6BvW,EAAMvyG,IAAnC8oH;;EAMN1mH,OAAKkgH,YAALlgH,GAAoBkgH,CAApBlgH,CAKA,KADA,IAAMmnH,KAAWnnH,EAAKogH,MAALpgH,CAAYmB,GAAZnB,CAAgB,UAAAjG,CAAA;EAAK,aAAAA,EAAE6D,IAAF;OAArBoC,CAAjB,iBACWonH;EACT,UAAMC,IAAiBF,GAASx6G,MAATw6G,CAAgB,UAAAptH,CAAA;EAAK,eAAAA,MAAMqtH,CAAN;SAArBD,EAAiC1tH,MAAxD,CACA,IAAuB,MAAnB4tH,CAAJ,EACE,MAAM,IAAIvjB,YAAJ,CACF,eAAasjB,CAAb,eAAA,GAA8BC,CAA9B,yEAAA,GAEA9uF,KAAKE,SAALF,CAAe4uF,EAAf5uF,CAHE,CAAN;OAJJ,QAAA,EACmB+uF,OAAnB,EAAmBC,cAAnB,EAAmBA,IAAnB;;cAcAvnH,EAAKowG,aAALpwG,KAAAA,EAEAA,EAAKqwG,YAALrwG,KAFAA,EAOA,IAAIswG,IAAJ,GACEZ,eAAe1vG,GACf2vG,mBACAC,iBACAC,mBACAC,cAAc9vG,EAAK8Q,QACnBi/F,eAAe/vG,EAAKkR,SACpB8+F,YAAYhwG,EAAK8Q,MAAL9Q,CAAYmB,GAAZnB,CAAgB,UAAAjG,CAAA;EAAK,eAAA,IAAA;SAArBiG,GACZiwG,aAAajwG,EAAKkR,OAALlR,CAAamB,GAAbnB,CAAiB,UAAAjG,CAAA;EAAK,eAAA,IAAA;SAAtBiG,GACb2V,aAAa3V,EAAK8Q,MAAL9Q,CAAYmB,GAAZnB,CAAgB,UAAAjG,CAAA;EAAK,eAAAA,EAAEuB,KAAF;SAArB0E,GACbkwG,cAAclwG,EAAKkR,OAALlR,CAAamB,GAAbnB,CAAiB,UAAAjG,CAAA;EAAK,eAAAA,EAAEuB,KAAF;SAAtB0E,GAVhB,CAPAA,EAmBAA,EAAK8xG,KAAL9xG,IAAa,CAnBbA,EAoBAA,EAAKuxG,SAALvxG,GAAiB,CApBjBA;EAszBJ,UAjsCwCkQ,aAAAA,GAAAA,GAka5Bq1G,WAAAA,kBAAAA,GAAV;EACE,QAAuB,MAAnBlmH,KAAKkyG,SAAT,EACE,MAAM,IAAI34G,KAAJ,CAAU,gBAAcyG,KAAKzB,IAAnB,2BAAV,CAAN;KApakCsS,EAkctCq1G,WAAAA,QAAAA,GAAA;EACElmH,SAAK0vG,iBAAL1vG,GACA,IAAM9E,MACe46G,sBAAsB,MAAMF,sBAAsB,GADvE,CAEA,IAAyB,OAAnB51G,KAAKkyG,SAAX,EACE,KAAoB,SAAA,EAAA/4F,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACH51G,EAAO06G,oBAAP16G,IAA+B41G,EAAM99F,OAAN89F,GAAgB8E,oBAA/C16G;EAIJ,YADAA,EAAO46G,oBAAP56G,GAA8B8E,KAAKkyG,SAAnCh3G,EACOA,CAAP;KA5coC2V,EA+ctC1M,qBAAAA,CAAI+hH,WAAJ/hH,oBAAAA,SAAA;EAIE,UAAInE,KAAKuxG,iBAALvxG,CAAuB5F,MAAvB4F,GAAgC,CAApC,EACE,MAAM,IAAI0kG,UAAJ,CACF,sNADE,CAAN,CAOF,KAAK1kG,KAAK+O,SAAV,EACE,SAAA,CAGF,KADA,IAAIwgE,MAAJ,OAAA,EACoBp2D,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,YAAM8sG,QAAN,CACHvhC,IAAUA,EAAQpoE,MAARooE,CAAeuhC,EAAM4B,gBAArBnjC,CAAVA;EAEF,cAAOA,CAAP;2CAnBFprE,CA/csC0M,EAqetC1M,qBAAAA,CAAI+hH,WAAJ/hH,uBAAAA,SAAA;EAEE,WADA,IAAMorE,MAAN,OAAA,EACoBp2D,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,YAAM8sG,QAAN,CACHvhC,EAAQvzE,IAARuzE,MAAAA,CAAAA,CAAAA,EAAgBuhC,EAAM6B,mBAAtBpjC;EAEF,YAAKvvE,KAAK+O,SAAV,EAAqB;EAEnB,aADA,IAAM2jG,MAAN,OAAA,EACoB//E,IAAA3yB,KAAK+gH,MAAzB,EAAoBxuF,YAApB,EAAoBA,GAApB;EAAWu+E,kBAAAA,CACT4B,EAAiB12G,IAAjB02G,MAAAA,CAAAA,CAAAA,EAAyB5B,EAAM4B,gBAA/BA;EAEF,gBAAOA,EAAiBvrG,MAAjBurG,CAAwBnjC,CAAxBmjC,CAAP;EAEF,cAAOnjC,CAAP;2CAZFprE,CAresC0M,EAoftC1M,qBAAAA,CAAI+hH,WAAJ/hH,WAAAA,SAAA;EACE,aAAOnE,KAAK0yG,gBAAL1yG,CAAsBmH,MAAtBnH,CAA6BA,KAAK2yG,mBAAlC3yG,CAAP;2CADFmE,CApfsC0M,EA4gBtCq1G,WAAAA,YAAAA,GAAA,UACI3C,CADJ,EAC0CC,CAD1C,EAEI2E,CAFJ,EAE8BhE,CAF9B;uBAC0CX,0BACtC2E,0BAA0BhE,SAMxBgE,IACFC,8BACI7E,CADJ6E,EACmCpoH,KAAK+gH,MADxCqH,EACgDjE,CADhDiE,CADED,GAIFE,oBAAoB9E,CAApB8E,EAA6CroH,KAAK+gH,MAAlDsH,EAA0D7E,CAA1D6E;KAxhBkCx3G,EAgiB9Bq1G,WAAAA,cAAAA,GAAR;EACE,QAAMoC,IAAYtoH,KAAKulG,SAALvlG,EAAlB,CASA,SAPE88F,WAAW98F,KAAKslG,YAALtlG,IACXg9F,QAAQsrB,GACRC,cAAc,iBAAeC,WAG7B30G,SAAS,iBAEX;KA1iBoChD,EAyjBtCq1G,WAAAA,OAAAA,GAAA,UAAOuC,CAAP,EAAqBC,CAArB;uBAAqBA,QACnB,IAAMntB,IAAcwnB,oBAAoB/iH,KAAK2oH,aAAL3oH,EAApB+iH,CAApB,CACA,OAAO2F,IAAexvF,KAAKE,SAALF,CAAeqiE,CAAfriE,CAAfwvF,GAA6CntB,CAApD;KA3jBoC1qF,EA2kBtCq1G,WAAAA,KAAAA,GAAA,UAAKz0G,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UAAIkwG,CAAJ,CAQA,OATAn3G,IAASqhG,OAAqBrhG,CAArBqhG,CAATrhG,EAIEm3G,IADE,UAAU1V,CAAV,GACMJ,OAAqBI,EAAar0D,IAAlCi0D,CADN,GAGM+V,aAA2B,IAA3BA,EAAiCp3G,EAAOrX,MAAxCyuH,CANVp3G,EASO9Q,EAAKmoH,gBAALnoH,CAAsB8Q,CAAtB9Q,EAA8BioH,CAA9BjoH,EAAqC,CAArCA,CAAP;OAVK+X,CAAP;KA5kBoC7H,EAmmBtCq1G,WAAAA,YAAAA,GAAA,UAAYz0G,CAAZ,EAAqCotC,CAArC;EAAA,gBAAA,CAEE,OAAOnmC,KAAK;EAEV,UAAIkwG,CAAJ,CAOA,OARAn3G,IAASqhG,OAAqBrhG,CAArBqhG,CAATrhG,EAGEm3G,IADU,QAAR/pE,CAAQ,GACFgqE,aAA2B,IAA3BA,EAAiCp3G,EAAOrX,MAAxCyuH,CADE,GAGF/V,OAAqBj0D,CAArBi0D,CALVrhG,EAQO9Q,EAAKmoH,gBAALnoH,CAAsB8Q,CAAtB9Q,EAA8BioH,CAA9BjoH,EAAqC,CAArCA,CAAP;OATK+X,CAAP;KArmBoC7H,EA2nBtCq1G,WAAAA,mBAAAA,GAAA,UAAmB/lG,CAAnB;EACE,QAAM7J,IAAcm/F,mBAA+Bt1F,CAA/Bs1F,CAApB,CACA,IAAIn/F,EAAYlc,MAAZkc,KAAuBtW,KAAK0kH,WAAL1kH,CAAiB5F,MAA5C,EACE,MAAM,IAAIsqG,UAAJ,CACF,iCAA+BvkF,CAA/B,iBAAA,GACangB,KAAK0kH,WAAL1kH,CAAiB5F,MAD9B,oBADE,CAAN,CAOF,KADA,IAAM2uH,MAAN,EACShuH,IAAI,CAAb,EAAgBA,IAAIub,EAAYlc,MAAhC,EAAwCW,GAAxC,EAA6C;EAC3C,UAAM+1G,IAAQ9wG,KAAK0kH,WAAL1kH,CAAiBjF,CAAjBiF,CAAd;EAAA,UACMgpH,IAAa1yG,EAAYvb,CAAZub,CADnB,CAKAyyG,EADMjzE,IAAWg7D,EAAMvyG,IAANuyG,GAAa,MAC9BiY,IAAiCC,CAAjCD;EAGF,SAAMhC,IAAY5iH,OAAO0O,IAAP1O,CAAYnE,KAAK6gH,YAAjB18G,EACKrC,GADLqC,CACS,UAAAzJ,CAAA;EAAK,aAAAivG,SAASjvG,CAATivG,EAAY,EAAZA,CAAA;OADdxlG,EAEK2Z,IAFL3Z,CAEU6iH,oBAFV7iH,CAAlB,CAIA,IAAI4iH,EAAU3sH,MAAV2sH,GAAmB,CAAvB,EACE,KAAoB,SAAA,EAAAkC,KAApB,EAAoBjlH,YAApB,EAAoBA,GAApB,EAEE,KAFG,IAAMlM,QAAN,OAAA,EAEgBoxH,IADLlpH,KAAK6gH,YAAL7gH,CAAkBlI,CAAlBkI,CACd,EAAmBmZ,YAAnB,EAAmBA,GAAnB;EAAK,UAAM3H,QAAN,CAEGs/F,IAAQt/F,EAAK6+F,aAAbS,CACN,KAA2D,MAAvD9wG,KAAK0kH,WAAL1kH,CAAiB8B,GAAjB9B,CAAqB,UAAAtF,CAAA;EAAK,eAAAA,EAAEiK,EAAF;SAA1B3E,EAAgCshB,OAAhCthB,CAAwC8wG,EAAMnsG,EAA9C3E,CAAJ,EAAA;EAMA,aADA,IAAMmpH,MAAN,EACSnrH,IAAI,CAAb,EAAgBA,IAAIwT,EAAK8+F,aAAL9+F,CAAmBpX,MAAvC,EAA+C4D,GAA/C,EAAoD;EAClD,cAAM+jH,IAAevwG,EAAK8+F,aAAL9+F,CAAmBxT,CAAnBwT,CAArB;EAAA,cACM43G,IAAY53G,EAAK++F,WAAL/+F,CAAiBxT,CAAjBwT,CADlB;EAAA,cAEMmkG,IAAcnkG,EAAKg/F,aAALh/F,CAAmBxT,CAAnBwT,CAFpB;EAAA,cAIM63G,IAAaN,EADbjzE,IAAcisE,EAAaxjH,IAAbwjH,MAAAA,GAAqBqH,CAArBrH,MAAAA,GAAkCpM,CACnCoT,CAJnB,CAKAI,EAAYntH,IAAZmtH,CAAiBE,CAAjBF;EAGF,aAGMG,IAAe7T,mBAHD3E,EAAMqD,kBAANrD,CAChBwB,iBAA+B6W,CAA/B7W,CADgBxB,CAGC2E,CAHrB;EAAA,YAIMrD,IAAYtB,EAAME,YAANF,CAAmBxvF,OAAnBwvF,CAA2Bt/F,CAA3Bs/F,CAJlB,CAKA,KAAS9yG,IAAI,CAAb,EAAgBA,IAAIsrH,EAAalvH,MAAjC,EAAyC4D,GAAzC,EAA8C;EAE5C+qH,YADMjzE,IAAcg7D,EAAMvyG,IAANuyG,MAAAA,GAAcsB,CAAdtB,MAAAA,GAA2B9yG,CAC/C+qH,IAAiCO,EAAatrH,CAAbsrH,CAAjCP;;;EAOR,SAAMlY,MAAN;EAAA,QACM0Y,MADN,CAEA,KAASxuH,IAAI,CAAb,EAAgBA,IAAIiF,KAAK6kH,YAAL7kH,CAAkB5F,MAAtC,EAA8CW,GAA9C,EAAmD;EAC3C+1G,UAAQ9wG,KAAK6kH,YAAL7kH,CAAkBjF,CAAlBiF,CAAR8wG,EACAsB,IAAYpyG,KAAK8kH,uBAAL9kH,CAA6BjF,CAA7BiF,CADZ8wG,EAEA6E,IAAc31G,KAAK+kH,yBAAL/kH,CAA+BjF,CAA/BiF,CAFd8wG,CAAN,IAGMh7D,IAAcg7D,EAAMvyG,IAANuyG,MAAAA,GAAcsB,CAAdtB,MAAAA,GAA2B6E,CAH/C,CAIA4T,EAAgBvtH,IAAhButH,CAAqBzzE,CAArByzE;EAGF,UAASxuH,IAAI,CAAb,EAAgBA,IAAIwuH,EAAgBnvH,MAApC,EAA4CW,GAA5C,EAAiD;EAC/C,UAAMwb,IAAMgzG,EAAgBxuH,CAAhBwuH,CAAZ,CACAvE,SAAqBzuG,KAAOwyG,CAA5B/D,GACAnU,EAAa70G,IAAb60G,CAAkBkY,EAAqBxyG,CAArBwyG,CAAlBlY,CADAmU;EAKF,YAAO1S,iBAA+BzB,CAA/ByB,CAAP;KAtsBoCzhG,EAmtB5Bq1G,WAAAA,iBAAAA,GAAV,UAA2Bz0G,CAA3B,EAA6Cm3G,CAA7C;EAEe,YAATA,CAAS,KACXA,IAAQC,aAA2B,IAA3BA,EAAiCp3G,EAAOrX,MAAxCyuH,CADG,EAUb,KADA,IAAMW,MAAN,EACSzuH,IAAI,CAAb,EAAgBA,IAAIiF,KAAKyR,MAALzR,CAAY5F,MAAhC,IAA0CW,CAA1C,EAA6C;EAC3C,UAAML,IAAIsF,KAAKyR,MAALzR,CAAYjF,CAAZiF,CAAV;EAAA,UACMoR,IAAIK,EAAO1W,CAAP0W,CADV;EAAA,UAEMotC,IAAO+pE,EAAM7tH,CAAN6tH,CAFb,CAGAY,EAAU9uH,EAAEiK,EAAZ6kH,KAAmBp4G,GAAGytC,EAAtB2qE;EAMF,UAHA,SAAA,EAGoBC,IAHFtlH,OAAO0O,IAAP1O,CAAYnE,KAAK6gH,YAAjB18G,EACKrC,GADLqC,CACS,UAAAzJ,CAAA;EAAK,aAAAivG,SAASjvG,CAATivG,EAAY,EAAZA,CAAA;OADdxlG,EAEK2Z,IAFL3Z,CAEU6iH,oBAFV7iH,CAGlB,EAAoBH,YAApB,EAAoBA,GAApB,EAEE,KAFG,IAAMlM,QAAN,OAAA,EAEgB4xH,IADL1pH,KAAK6gH,YAAL7gH,CAAkBlI,CAAlBkI,CACd,EAAmBmZ,YAAnB,EAAmBA,GAAnB;EAUE,WAVG,IAAM3H,QAAN,EAEGs/F,IAAQt/F,EAAK6+F,aAFhB,EAGGsZ,IAAwBn4G,EAAKi/F,YAHhC,EAIGmZ,IAAyBp4G,EAAKk/F,aAJjC,EASGmZ,IAAe,IAAIhuH,KAAJ,EATlB,OAAA,EAUaiuH,KAAhB,EAAgBv3F,YAAhB,EAAgBA,GAAhB;WAAW73B,UACHiK,MAAM6kH,KACVK,EAAa7tH,IAAb6tH,CAAkBL,EAAU9uH,EAAEiK,EAAZ6kH,CAAlBK;EAGJ,WAAIA,EAAazvH,MAAbyvH,KAAwBF,EAAsBvvH,MAAlD,EAA0D;EAExD,YAAI84G,MAAJ;EAAA,YACI6W,UADJ;EAAA,YAEIC,UAFJ;EAAA,YAGIC,UAHJ;EAAA,YAIIC,UAJJ,CASA,IAHqB,QAAjB14G,EAAK0+F,QAAY,KACnBgD,IAAS1hG,EAAK0+F,QADK,GAGO,MAAxB2Z,EAAazvH,MAAjB,EAA+B;EACvB,cAAAu4B,QAAA;EAAA,cAACw3F,QAAD;EAAA,cAAiBC,QAAjB,CACa,QAAflX,EAAOr0D,IAAQ,KACjBq0D,EAAar0D,IAAbq0D,GAAiBkX,CADA,GAGnBH,IACInX,OAAqBhC,EAAM7xG,IAAN6xG,CAAWqZ,CAAXrZ,EAA2BoC,CAA3BpC,CAArBgC,CAJe,EAKnBoX,IAAcpX,OACVhC,EAAMuZ,WAANvZ,CAAkBqZ,CAAlBrZ,EAAkCsZ,CAAlCtZ,CADUgC,CALK,EAOnBiX,KAAmBI,EAPA,EAQnBH,KAAiBI,EARE;WAFrB,MAYEL,IAAkBF,EAAa/nH,GAAb+nH,CAAiB,UAAAnvH,CAAA;EAAK,iBAAAA,EAAE,CAAFA,CAAA;WAAtBmvH,CAAlBE,EACAC,IAAgBH,EAAa/nH,GAAb+nH,CAAiB,UAAAnvH,CAAA;EAAK,iBAAAA,EAAE,CAAFA,CAAA;WAAtBmvH,CADhBE,EAEmB,QAAf7W,EAAOr0D,IAAQ,KACjBq0D,EAAar0D,IAAbq0D,GAAiB8W,CADA,CAFnBD,EAKAE,IACInX,OAAqBhC,EAAM7xG,IAAN6xG,CAAWiZ,CAAXjZ,EAA4BoC,CAA5BpC,CAArBgC,CANJiX,EAOAG,IAAcpX,OACVhC,EAAMuZ,WAANvZ,CAAkBiZ,CAAlBjZ,EAAmCkZ,CAAnClZ,CADUgC,CAPdiX,CAWF,IAAIjZ,EAAMM,mBAAV,EACE,MAAM,IAAIzM,mBAAJ,CACF,iHADE,CAAN,CAOF,KAAS5pG,IAAI,CAAb,EAAgBA,IAAI6uH,EAAuBxvH,MAA3C,IAAqDW,CAArD,EAAwD;EAChDL,cAAIkvH,EAAuB7uH,CAAvB6uH,CAAJlvH,EACA0W,IAAI64G,EAAclvH,CAAdkvH,CADJvvH,EAEAmkD,IAAOqrE,EAAYnvH,CAAZmvH,CAFPxvH,CAGN8uH,EAAU9uH,EAAEiK,EAAZ6kH,KAAmBp4G,GAAGytC,EAAtB2qE;;;EASR,UAHA,IAAM9Y,MAAN,EACME,MADN,EAEMC,MAFN,OAAA,EAGgBhL,IAAA7lG,KAAK6R,OAArB,EAAgBuqD,YAAhB,EAAgBA,GAAhB;EACE4oD,gBADStqH,UAEHiK,MAAM6kH,CADZxE,EACuB,8BAA4BtqH,EAAE6D,IAA9B,QAAA,GAAwC7D,EAAEiK,EADjEqgH,EAEM,IAAApf,WAAA;EAAA,UAAC3yF,QAAD,CAAS4rC,QAAAA,CACfgyD,EAAa70G,IAAb60G,CAAkB59F,EAAOhX,KAAzB40G,GACAH,EAAc10G,IAAd00G,CAAmBz9F,CAAnBy9F,CADAG,EAEAD,EAAY50G,IAAZ40G,CAAiB/xD,CAAjB+xD,CAFAC;EAMF,aAAQH,GAAeE,GAAaC,EAApC;KA3zBoChgG,EAs0B9Bq1G,WAAAA,uBAAAA,GAAR,UAA+BnF,CAA/B;EAGE,SAFA,IACIuJ,CADJ,EAAMC,MAAN,OAAA,EAEoBpxG,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACHwZ,IAAYxZ,aAAiBoV,CAAjBpV,GAA6B,CAA7BA,GAAiC,CAA7CwZ,CACA,KAAK,IAAIE,IAAoB,CAA7B,EACKA,IAAoB1Z,EAAME,YAANF,CAAmB12G,MAD5C,EACoDowH,GADpD,EACyE;EACvE,YAAMrE,IAAUD,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyBsE,CAAzBtE,CAAhB,CACIlmH,KAAKimH,cAALjmH,CAAoB0T,GAApB1T,CAAwBmmH,CAAxBnmH,MAEFuqH,EAAkBpE,CAAlBoE,IAA6BD,CAA7BC,EACAD,KAAa,CAHXtqH;;EAOR,YAAOuqH,CAAP;KAr1BoC15G,EA42BtCq1G,WAAAA,SAAAA,GAAA,UAAS3nH,CAAT,EAAwBjE,CAAxB;EACE,QAAa,QAATA,CAAJ,EAAmB;EACjB,UAAI0F,KAAK+gH,MAAL/gH,CAAY5F,MAAZ4F,IAAsB1F,CAA1B,EACE,MAAM,IAAIoqG,UAAJ,CACF,0CAAwCpqG,CAAxC,0BAAA,GACO0F,KAAK+gH,MAAL/gH,CAAY5F,MADnB,eADE,CAAN,CAIA,OAAO4F,KAAK+gH,MAAL/gH,CAAY1F,CAAZ0F,CAAP;EAGF,SAAY,QAARzB,CAAJ,EACE,MAAM,IAAImmG,UAAJ,CAAe,4CAAf,CAAN,CAIJ,KAAoB,SAAA,EAAAvrF,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACH,IAAIA,EAAMvyG,IAANuyG,KAAevyG,CAAnB,EACE,OAAOuyG,CAAP;EAGJ,WAAM,IAAIpM,UAAJ,CAAe,oBAAkBnmG,CAAjC,CAAN;KAh4BoCsS,EAw4BtCq1G,WAAAA,gBAAAA,GAAA;EAAA,gBAAA,CAKE,OAAOxtG,KAAK;EAEV,WADA,IAAM42D,MAAN,OAAA,EACoBn2D,IAAAxY,EAAKogH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB,EACE,KADG,IAAM8sG,QAAN,EACMsB,IAAY,CAArB,EAAwBA,IAAYtB,EAAME,YAANF,CAAmB12G,MAAvD,IACOg4G,CADP,EACkB;EAChB,YAAM+T,IAAUD,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyB9T,CAAzB8T,CAAhB,CACIvlH,EAAKslH,cAALtlH,CAAoB+S,GAApB/S,CAAwBwlH,CAAxBxlH,KACF2uE,EAAOtzE,IAAPszE,MAAAA,CAAAA,CAAAA,EAAewhC,EAAM2Z,eAAN3Z,EAAfxhC,CADE3uE;EAMR,cAAO2uE,CAAP;OAZK52D,CAAP;KA74BoC7H,EA65BtCq1G,WAAAA,UAAAA,GAAA;EAWE,SAVA,IAAMlpB,MAAoCz+F,MAAMyB,KAAKzB,MAArD,EAKMgsH,IACFvqH,KAAK0qH,sBAAL1qH,CAA4BA,KAAK+gH,MAAjC/gH,CANJ,EASM2qH,MATN,OAAA,EAUoBxxG,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAIE,WAJG,IACG4mH,KADG9Z,UACoBxL,cAD1B,EAEGulB,IAAc/Z,EAAMvL,SAANuL,EAFjB,EAGGga,MAHH,EAIMN,IAAoB,CAA7B,EACKA,IAAoB1Z,EAAME,YAANF,CAAmB12G,MAD5C,EACoDowH,GADpD,EACyE;EACvE,YAAMh5G,IAAOs/F,EAAME,YAANF,CAAmB0Z,CAAnB1Z,CAAb;EAAA,YACMqV,IAAUD,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyBsE,CAAzBtE,CADhB;EAAA,YAEIhT,MAFJ,CAGA,IAAIlzG,KAAKimH,cAALjmH,CAAoB0T,GAApB1T,CAAwBmmH,CAAxBnmH,CAAJ,EAAsC;EAGpC,cAAIwR,EAAK0+F,QAAT,EACE;EACEh3E,iBAAKE,SAALF,CAAe1nB,EAAK0+F,QAApBh3E,GACAg6E,IAAS1hG,EAAK0+F,QADdh3E;EAEA,WAHF,CAGE,OAAOja,CAAP;EACA/d,oBAAQ8Z,IAAR9Z,CACI,WAAS4vG,EAAMvyG,IAAf,qDAAA,GAEGiT,EAAK0+F,QAFR,4GADJhvG,GAMAgyG,MANAhyG;EASJ,eAAIsQ,EAAK8+F,aAAL9+F,CAAmBpX,MAAnBoX,GAA4B,CAAhC,EAAmC;EAEjC,iBADA,IAAMu5G,MAAN,EACShwH,IAAI,CAAb,EAAgBA,IAAIyW,EAAK8+F,aAAL9+F,CAAmBpX,MAAvC,EAA+CW,GAA/C,EAAoD;EAClD,kBAAMgnH,IAAevwG,EAAK8+F,aAAL9+F,CAAmBzW,CAAnByW,CAArB;EAAA,kBACM4gG,IAAY5gG,EAAK++F,WAAL/+F,CAAiBzW,CAAjByW,CADlB;EAAA,kBAEMmkG,IAAcnkG,EAAKg/F,aAALh/F,CAAmBzW,CAAnByW,CAFpB,CAKoB,SADhBw5G,IAAeT,EADHrE,EAAUC,OAAVD,CAAkBnE,CAAlBmE,EAAgC9T,CAAhC8T,CACGqE,CACC,MAClBS,IAAe,CADG,GAGpBD,EAAS/uH,IAAT+uH,EACKhJ,EAAaxjH,MAAMysH,GAAcrV,GAAazC,EADnD6X,CAHoB;EAMtBD,eAAqB9uH,IAArB8uH,CAA0BC,CAA1BD;;;EAINH,SAAa3uH,IAAb2uH,GACEpsH,MAAMuyG,EAAMvyG,MACZu+F,WAAW8tB,GACX5tB,QAAQ6tB,GACR7Z,cAAc8Z,GAJhBH;EAOF3tB,OAAe+jB,MAAf/jB,GAAmB2tB,CAAnB3tB,CAEA,IAAMiuB,MAAN,CACA,KAASlwH,IAAI,CAAb,EAAgBA,IAAIiF,KAAK0kH,WAAL1kH,CAAiB5F,MAArC,EAA6CW,GAA7C,EAAkD;EAChD,UAAM+1G,IAAQ9wG,KAAK0kH,WAAL1kH,CAAiBjF,CAAjBiF,CAAd,CACMoyG,IAAYpyG,KAAK2kH,sBAAL3kH,CAA4BjF,CAA5BiF,CAAZoyG,EAEA+T,IAAUD,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyB9T,CAAzB8T,CAFV9T,CAGN,IAAKpyG,KAAKimH,cAALjmH,CAAoB0T,GAApB1T,CAAwBmmH,CAAxBnmH,CAAL,EAAA;EAIqB,kBADjBgrH,IAAeT,EAAkBpE,CAAlBoE,CACE,UAAyBlmB,MAAjB2mB,CAAR,KACnBA,IAAe,CADI,EAGfrV,IAAc31G,KAAK4kH,wBAAL5kH,CAA8BjF,CAA9BiF,CAAd21G,CACNsV,EAAYjvH,IAAZivH,EAAkBna,EAAMvyG,MAAMysH,GAAcrV,EAA5CsV;;EAEFjuB,OAAoB0nB,WAApB1nB,GAAwBiuB,CAAxBjuB,CAEA,IAAMkuB,MAAN,CACA,KAASnwH,IAAI,CAAb,EAAgBA,IAAIiF,KAAK6kH,YAAL7kH,CAAkB5F,MAAtC,EAA8CW,GAA9C,EAAmD;EAC3C+1G,UAAQ9wG,KAAK6kH,YAAL7kH,CAAkBjF,CAAlBiF,CAAR8wG,EACAsB,IAAYpyG,KAAK8kH,uBAAL9kH,CAA6BjF,CAA7BiF,CADZ8wG,EAGAqV,IAAUD,EAAUC,OAAVD,CAAkBpV,CAAlBoV,EAAyB9T,CAAzB8T,CAHVpV,CAIN,IAAK9wG,KAAKimH,cAALjmH,CAAoB0T,GAApB1T,CAAwBmmH,CAAxBnmH,CAAL,EAAA;EAGA,YAAIgrH,CAAJ,CACqB,UADjBA,IAAeT,EAAkBpE,CAAlBoE,CACE,UAAyBlmB,MAAjB2mB,CAAR,KACnBA,IAAe,CADI,EAGfrV,IAAc31G,KAAK+kH,yBAAL/kH,CAA+BjF,CAA/BiF,CAAd21G,CACNuV,EAAalvH,IAAbkvH,EAAmBpa,EAAMvyG,MAAMysH,GAAcrV,EAA7CuV;;EAGF,YADAluB,EAAqB6nB,YAArB7nB,GAAyBkuB,CAAzBluB,EACOA,CAAP;KAhgCoCnsF,EA0gC/Bq1G,YAAAA,GAAP,UACInpB,CADJ,EAEIC,CAFJ;EAKE,QAAMmuB,MAAN;EAAA,QAOMC,MAPN,CASA,UAAA,CACIta,CADJ,EACkBia,CADlB;EAEQja,QAAMvyG,IAANuyG,IAAcsa,CAAdta,GAGJsa,EAAiBta,EAAMvyG,IAAvB6sH,EAA6BpvH,IAA7BovH,CAAkCL,CAAlCK,CAHIta,GACJsa,EAAiBta,EAAMvyG,IAAvB6sH,KAAgCL,EAD5Bja;EAOR,eAAA,CAAqBA,CAArB,EAAmCia,CAAnC;EAGE,WAFA,IACI7X,CADJ,EAAMzC,MAAN,OAAA,EAEwB4a,KAAxB,EAAwBrnH,YAAxB,EAAwBA,GAAxB;EAAK,YAAMsnH,QAAN;EAAA,YACGC,IAAmBD,EAAU,CAAVA,CADtB;EAAA,YAEGE,IAAmBF,EAAU,CAAVA,CAFtB;EAAA,YAGGrJ,IAAqBqJ,EAAU,CAAVA,CAHxB,CAIH,IAAyB,MAArBA,EAAUlxH,MAAd,EACE84G,MAAAA,CADF,KAEO;EAAA,cAAyB,MAArBoY,EAAUlxH,MAAd,EAGL,MAAM,IAAIsqG,UAAJ,CAAe,iDACjBxrE,KAAKE,SAALF,CAAe43E,CAAf53E,CADiB,OAAA,GACSA,KAAKE,SAALF,CAAeoyF,CAAfpyF,CADxB,CAAN,CAFAg6E,IAASoY,EAAU,CAAVA,CAATpY;EAKF,eAAMqY,KAAoBJ,EAA1B,EAEE,YADAM,EAAmB3a,CAAnB2a,EAA0BV,CAA1BU,CACA,CAEF,IAAM1J,IAAeoJ,EAAcI,CAAdJ,CAArB,CACA,IAAIpJ,EAAa/Q,YAAb+Q,CAA0B3nH,MAA1B2nH,IAAoCyJ,CAAxC,EAEE,YADAC,EAAmB3a,CAAnB2a,EAA0BV,CAA1BU,CACA,CAEF,IAAM/E,IAAc3E,EAAa/Q,YAAb+Q,CAA0ByJ,CAA1BzJ,CAApB,CACAtR,EAAaz0G,IAAby0G,CAAkBiW,EAAYhW,aAAZgW,CAA0BzE,CAA1ByE,CAAlBjW;EAKEA,SAAar2G,MAAbq2G,GAAsB,CAAtBA,IACFK,EAAM5xG,KAAN4xG,CACIwB,iBAA+B7B,CAA/B6B,CADJxB,EAEIoC,CAFJpC,CADEL;EAaN,eAAA,CAAsBib,CAAtB;EACE,UAAMC,IAAYD,EAAUntH,IAA5B;EAAA,UAEMuyG,IAAQ8a,YACIF,CADJE,EAE4B,QAAxB5uB,EAAOyI,aAAiB,GACpBzI,EAAOyI,aADa,KAF5BmmB,CAFd,CAOAT,EAAcQ,CAAdR,IAA2Bra,CAA3Bqa,CAIA,KAFA,SAAA,EAEuBU,IADnBH,EAAU1a,YACd,EAAuBhtG,YAAvB,EAAuBA,GAAvB;EAAK,YAAM+mH,QAAN,CACH,MAAMA,aAAoBlvH,MAA1B,EACE,MAAM,IAAI6oG,UAAJ,CACF,2DACIqmB,CAFF,CAAN,CAQFU,EAAmB3a,CAAnB2a,EAA0BV,CAA1BU;;EAOJ,UAFA,IAAMltH,IAAOy+F,EAAOz+F,IAApB,EACMutH,IAAmB9uB,EAAO+jB,MADhC,OAAA,EAEwBgL,KAAxB,EAAwB/nH,YAAxB,EAAwBA,GAAxB;EACEgoH,QADSN,QACTM;EAOF,aAAQC,cAA4Bb,CAA5Ba,CAAR,GACE,KAAwB,SAAA,EAAAC,KAAxB,EAAwB/yG,YAAxB,EAAwBA,GAAxB;EAAK,UAAMuyG,QAAN,CAEH,KADM5a,IAAQqa,EAAcO,EAAUntH,IAAxB4sH,GACJ5sH,QAAQ6sH,CAAlB,EAAoC;EAClC,YAAMe,IAAkCf,EAAiBta,EAAMvyG,IAAvB6sH,CAAxC,QACOA,EAAiBta,EAAMvyG,IAAvB6sH,EACP,KAAuB,SAAA,EAAAgB,KAAvB,EAAuB75F,YAAvB,EAAuBA,GAAvB;EACE85F,YAAYvb,CAAZub,MAAAA;;;EAUR,UAJA,IAAM5b,MAAN,EACMC,MADN,OAAA,EAIwB4b,IADpBtvB,EAAO0nB,WACX,EAAwB/xF,YAAxB,EAAwBA,GAAxB;EAAK,UACGg5F,KADGD,UACmB,EADzB;EAAA,UAEGtZ,IAAYsZ,EAAU,CAAVA,CAFf;EAAA,UAGG/V,IAAc+V,EAAU,CAAVA,CAHjB,CAIH1G,SAAqB2G,KAAaR,CAAlCnG,EACA,IACMuH,KADAzb,IAAQqa,EAAcQ,CAAdR,GACmBna,aAAaoB,GAAW1B,aADzD,CAEAD,EAAaz0G,IAAby0G,CAAkB8b,EAAmB5W,CAAnB4W,CAAlB9b;EAIF,UAFA,SAAA,EAEwB+b,IADpBxvB,EAAO6nB,YACX,EAAwBzoD,YAAxB,EAAwBA,GAAxB;EACQuvD,WADGD,UACmB,EAAtBC,EACAvZ,IAAYsZ,EAAU,CAAVA,CADZC,EAEAhW,IAAc+V,EAAU,CAAVA,CAFdC,CAGN3G,SAAqB2G,KAAaR,CAAlCnG,EACA,IAAMlU,CAAN,CACMyb,KADAzb,IAAQqa,EAAcQ,CAAdR,GACmBna,aAAaoB,GAAW1B,aAAnD6b,CACN7b,EAAc10G,IAAd00G,CAAmB6b,EAAmB5W,CAAnB4W,CAAnB7b;EAEF,YAAO,IAAI3T,CAAJ,GAAStrF,QAAQg/F,GAAc5+F,SAAS6+F,GAAenyG,SAAvD,CAAP;KAtpCoCsS,EA+pCtC1M,qBAAAA,CAAI+hH,WAAJ/hH,YAAAA,SAAA;EAGE,UAAInE,KAAK4yG,SAAT,EACE,MAAM,IAAIlO,UAAJ,CACF,sLADE,CAAN,CAKF,KAAoB,SAAA,EAAAvrF,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EACE,iBAAU6uG,QAAV,EACE,QAAO,CAAP;EAGJ,eAAO,CAAP;2CAdF1uG,CA/pCsC0M,EAsrCtCq1G,WAAAA,YAAAA,GAAA;EAAA,gBAAA,CACExtG,KAAK;EACH/X,QAAKogH,MAALpgH,CAAYP,OAAZO,CAAoB,UAAAmwG,CAAA;EAEdA,UAAM+B,QAAN/B,IACFA,EAAM2b,WAAN3b,EADEA;SAFNnwG;OADF+X;KAvrCoC7H,GAisCxC;IAjsCwCshG,MAAxC,CCxMA,gCAAA,CAAiC57F,CAAjC,EAAsD3b,CAAtD;EAEE,MAAiB,QAAb2b,EAAIta,KAAR,EAAuB;EACrB,QAAIsa,EAAIta,KAAJsa,CAAUnc,MAAVmc,KAAqB3b,EAAIqB,KAAJrB,CAAUR,MAAnC,EACE,MAAM,IAAIsqG,UAAJ,CACF,uBAAqB9pG,EAAIqB,KAAJrB,CAAUR,MAA/B,2CAAA,GACYmc,EAAIta,KAAJsa,CAAUnc,MADtB,OADE,CAAN,CAKF,KAAK,IAAIW,IAAI,CAAb,EAAgBA,IAAIwb,EAAIta,KAAJsa,CAAUnc,MAA9B,IAAwCW,CAAxC,EACE,IAAoB,QAAhBwb,EAAIta,KAAJsa,CAAUxb,CAAVwb,CAAgB,IAAQA,EAAIta,KAAJsa,CAAUxb,CAAVwb,MAAiB3b,EAAIqB,KAAJrB,CAAUG,CAAVH,CAA7C,EACE,MAAM,IAAI8pG,UAAJ,CACF,SAAO3pG,CAAP,gCAAA,GAAsCH,EAAIqB,KAAJrB,CAAUG,CAAVH,CAAtC,6CAAA,GACsC2b,EAAIta,KAAJsa,CAAUxb,CAAVwb,CADtC,OADE,CAAN;EAON,OAAiB,QAAbA,EAAItY,KAAS,IAAQsY,EAAItY,KAAJsY,KAAc3b,EAAIqD,KAA3C,EAEE,OAAOrD,CAAP,CAEF;EAEE,WAAO2K,KAAK3K,CAAL2K,EAAUgR,EAAItY,KAAdsH,CAAP;EACA,GAHF,CAGE,OAAO0Z,CAAP;EAEA,UAAM,IAAIylF,UAAJ,CACF,4BAA0B9pG,EAAIqD,KAA9B,gDAAA,GACesY,EAAIhY,IADnB,QAAA,GAC6BgY,EAAItY,KADjC,OADE,CAAN;;EAkBJ;EAQE,YAAA,CAAYyuH,CAAZ;EACE,QARM1sH,aAAAA,KAAAA,EAQF0sH,aAAiBC,CAArB,EACE,KAAK,IAAMhoH,CAAX,IAAiB+nH,EAAME,QAAvB,EACE5sH,KAAK4sH,QAAL5sH,CAAc2E,CAAd3E,IAAoB0sH,EAAME,QAANF,CAAe/nH,CAAf+nH,CAApB1sH,CAFJ,KAIO;EACL,UAAa,QAAT0sH,CAAJ,EACE,OAEF,KAAmB,SAAA,EAAAG,KAAnB,EAAmB7oH,YAAnB,EAAmBA,GAAnB;EAAK,YAAM8oH,QAAN,CACH9sH,KAAKqI,GAALrI,CAAS8sH,EAAKv2G,GAAdvW,EAAmB8sH,EAAK/oH,KAAxB/D;;;EAoDR,UAvCE2sH,WAAAA,IAAAA,GAAA,UAAIp2G,CAAJ,EAAyBxS,CAAzB;EACE,QAA6B,QAAzB/D,KAAK4sH,QAAL5sH,CAAcuW,EAAI5R,EAAlB3E,CAAJ,EAGE,MAAM,IAAI0kG,UAAJ,CAAe,yBAAuBnuF,EAAIhY,IAA3B,UAAA,GAAuCgY,EAAI5R,EAA1D,CAAN,CAEF,OAJE3E,KAAK4sH,QAAL5sH,CAAcuW,EAAI5R,EAAlB3E,IAAwB+sH,wBAAwBx2G,CAAxBw2G,EAA6BhpH,CAA7BgpH,CAAxB/sH,EAIKA,IAAP;KANF2sH,EAcAA,WAAAA,QAAAA,GAAA,UAAQG,CAAR;EACE9sH,SAAKqI,GAALrI,CAAS8sH,EAAKv2G,GAAdvW,EAAmB8sH,EAAK/oH,KAAxB/D;KAfF2sH,EAsBAA,WAAAA,OAAAA,GAAA,UAAOp2G,CAAP;EACE,WAAgC,QAAzBvW,KAAK4sH,QAAL5sH,CAAcuW,EAAI5R,EAAlB3E,CAAP;KAvBF2sH,EAgCAA,WAAAA,SAAAA,GAAA,UAASp2G,CAAT;EACE,QAA6B,QAAzBvW,KAAK4sH,QAAL5sH,CAAcuW,EAAI5R,EAAlB3E,CAAJ,EACE,MAAM,IAAI0kG,UAAJ,CAAe,sBAAoBxrE,KAAKE,SAALF,CAAe3iB,CAAf2iB,CAAnC,CAAN,CAEA,OAAOl5B,KAAK4sH,QAAL5sH,CAAcuW,EAAI5R,EAAlB3E,CAAP;KApCJ2sH,GAuCF;KAtEA,kBA4FIK,GAA0CC,GAC1C/Z;EAQF,OAPA,IAAMga,IAAerxH,MAAMC,OAAND,CAAcmxH,CAAdnxH,CAArB,EACMsxH,IACFD,IAAeF,CAAfE,IAA8CF,EAFlD,EAIMn7G,MAJN,EAKMu7G,IAAmB,IAAIT,QAAJ,CAAaM,CAAb,CALzB,OAAA,EAOoBI,KAApB,EAAoBrpH,YAApB,EAAoBA,GAApB;EAAK,QAAMspH,QAAN,CACHz7G,EAAQ7V,IAAR6V,CAAa07G,gBAAgBD,CAAhBC,EAAuBH,CAAvBG,EAAyCra,CAAzCqa,CAAb17G;EAEF,UAAOq7G,IAAer7G,CAAfq7G,GAAyBr7G,EAAQ,CAARA,CAAhC;EAGF,yBAAA,CACI8lF,CADJ,EAC2By1B,CAD3B,EAEIla,CAFJ;EAGE,MAAIka,EAAiBI,MAAjBJ,CAAwBz1B,CAAxBy1B,CAAJ,EACE,OAAOA,EAAiBK,QAAjBL,CAA0Bz1B,CAA1By1B,CAAP,CAEF,IAAIz1B,EAAMsY,WAANtY,YAA6B4e,UAAjC,EACE,MAAM,IAAI7R,UAAJ,CACF,8DACI6R,WAAWh4G,IADf,MADE,CAAN,CAOF,KAFA,IACMmvH,MADN,OAAA,EAEoBC,IAFLh2B,EAAMlmF,MAErB,EAAoBzN,YAApB,EAAoBA,GAApB;EAAK,QAEG4pH,IAAWL,oBAAAA,EAAuBH,CAAvBG,EAAyCra,CAAzCqa,CAFd,CAGHG,EAAY1xH,IAAZ0xH,CAAiBE,CAAjBF;EAGF,OAAI57G,IACA6lF,EAAMsY,WAANtY,CAAkBz4F,KAAlBy4F,CAAwB+1B,CAAxB/1B,EAAqCub,CAArCvb,CADJ,CAEK97F,MAAMC,OAAND,CAAciW,CAAdjW,MACHiW,KAAUA,EADPjW,EAML,KAHA,IAAMgyH,IAAeC,eAAen2B,CAAfm2B,CAArB,EACMC,IACFlyH,MAAMC,OAAND,CAAcgyH,CAAdhyH,IAA8BgyH,CAA9BhyH,IAA8CgyH,EAFlD,EAGS9yH,IAAI,CAAb,EAAgBA,IAAIgzH,EAAsB3zH,MAA1C,IAAoDW,CAApD,EACEqyH,EAAiB/kH,GAAjB+kH,CAAqBW,EAAsBhzH,CAAtBgzH,CAArBX,EAA+Ct7G,EAAO/W,CAAP+W,CAA/Cs7G,EAEF,OAAyB,MAAlBt7G,EAAO1X,MAAW,GAAI0X,EAAO,CAAPA,CAAJ,GAAgBA,EAAO6lF,EAAMwY,iBAAbr+F,CAAzC;EASF,wBAAA,CAAwB6lF,CAAxB;EAEE,MAAIk2B,CAAJ,CACA,IAA8C,MAA1Cl2B,EAAMsY,WAANtY,CAAkBqZ,YAAlBrZ,CAA+Bv9F,MAAnC,EACEyzH,IAAel2B,EAAMsY,WAANtY,CAAkB7lF,MAAjC+7G,CADF,KAEO;EAEL,SADA,IAAIzb,IAAoB,IAAxB,EACSr3G,IAAI,CAAb,EAAgBA,IAAI48F,EAAMsY,WAANtY,CAAkBqZ,YAAlBrZ,CAA+Bv9F,MAAnD,IAA6DW,CAA7D,EACE,KAA2B,SAAA,EAAAoe,IAAAw+E,EAAMsY,WAANtY,CAAkBqZ,YAAlBrZ,CAA+B58F,CAA/B48F,EACjB+Y,aADV,EAA2B1sG,YAA3B,EAA2BA,GAA3B;EAEE,eAAiBW,OAAOgzF,EAAMhzF,EAA9B,EAAkC;EAChCytG,YAAYr3G,CAAZq3G,CACA;;EAINyb,SAAel2B,EAAMsY,WAANtY,CAAkBq2B,WAAlBr2B,CAA8Bya,CAA9Bza,CAAfk2B;EAEF,UAAOA,CAAP;EC7EF,KAAMI,gCAAgC,EAAtC,CAYA,sCAAA,CAGInX,CAHJ,EAGgBoX,CAHhB;EAIE,MAAIpX,EAAMjlG,OAANilG,CAAc18G,MAAd08G,GAAuB,CAA3B,EACE,MAAM,IAAInS,mBAAJ,CACF,yGADE,CAAN,CAKFwpB,KAASzyH,MAATyyH,CACItyH,MAAMC,OAAND,CAAcqyH,CAAdryH,KAAqD,MAAvBqyH,EAAY9zH,MAD9C+zH,EAEI,uHAEID,CAJRC,EAQA,IAAMp7C,KADNm7C,IAAcA,GACS,EAAvB;EAAA,MACI/8G,IAAK+8G,EAAY,CAAZA,CADT,CAEA,IAAI/8G,aAAci9G,MAAlB,EAWE,OAVAD,KAASzyH,MAATyyH,CAC4B,MAAxBrX,EAAMrlG,MAANqlG,CAAa18G,MADjB+zH,EAEI,wBAAsBrX,EAAMrlG,MAANqlG,CAAa18G,MAAnC,iGAAA,GAEmB8+B,KAAKE,SAALF,CAAe49E,EAAMmO,UAArB/rF,CAFnB,oCAFJi1F,GAMAA,KAASzyH,MAATyyH,CACIh9G,EAAGlV,KAAHkV,CAAS,CAATA,MAAgB4hE,EAAG92E,KAAH82E,CAAS,CAATA,CADpBo7C,EAEI,qDAAmDh9G,EAAGlV,KAAHkV,CAAS,CAATA,CAAnD,UAAA,GACO4hE,EAAG92E,KAAH82E,CAAS,CAATA,CADP,MAFJo7C,CANAA,GAUQh9G,GAAI4hE,EAAZ,CAEA,IAAIxmD,UAAJ,CACApb,IAAKA,CAALA,CAIA,KAHA,IAAMk9G,MAAN,OAAA,EAGwBl1G,IAAA29F,EAAMmO,UAA9B,EAAwBjhH,YAAxB,EAAwBA,GAAxB;EAAK,QAAM0N,QAAN,CACH,IAAqB,QAAjBP,EAAGO,CAAHP,CAAJ,EACE,MAAM,IAAIuzF,UAAJ,CACF,6EACchzF,CADd,OADE,CAAN,CAIF28G,EAAWryH,IAAXqyH,CAAgBl9G,EAAGO,CAAHP,CAAhBk9G,GACiB,QAAb9hG,CAAa,GACfA,IAAYpb,EAAGO,CAAHP,EAAclV,KAAdkV,CAAoB,CAApBA,CADG,GAGfg9G,KAASzyH,MAATyyH,CACIh9G,EAAGO,CAAHP,EAAclV,KAAdkV,CAAoB,CAApBA,MAA2Bob,CAD/B4hG,EAEI,qDACQh9G,EAAGO,CAAHP,EAAclV,KAAdkV,CAAoB,CAApBA,CADR,UAAA,GACsC4hE,EAAG92E,KAAH82E,CAAS,CAATA,CADtC,MAFJo7C,CAJFE;EAUF,UAAOA,EAAWlnH,MAAXknH,CAAkBt7C,CAAlBs7C,CAAP;EAMJ,yCAAA,CACIrpH,CADJ;EAOE,MAAoB,MAAhBA,EAAK5K,MAAT,EACE,MAAM,IAAIuqG,mBAAJ,CACF,wDADE,CAAN,CAGF,SAAQxzF,IAAInM,EAAK,CAALA,GAAS+tE,IAAI/tE,EAAK,CAALA,GAAzB;EAGF,oBAAA,CAGI8xG,CAHJ,EAGgBwX,CAHhB,EAIItxB,CAJJ;;;;EAgCE,cA3BMuxB,IAA+C,QAA1BvxB,EAAOwxB,eAA5BD,EACNJ,KAASzyH,MAATyyH,CACuB,QAAnBrX,EAAM2X,SADVN,EAEI,0FAFJA,CADMI,EAMNJ,KAASzyH,MAATyyH,CACc,QAAVnxB,CADJmxB,EAEI,+FAFJA,CANMI,EAUNJ,KAASzyH,MAATyyH,CACqB,QAAjBnxB,EAAOmd,MAAU,IAAQnd,EAAOmd,MAAPnd,GAAgB,CAAxB,IACb5hG,OAAOohG,SAAPphG,CAAiB4hG,EAAOmd,MAAxB/+G,CAFR+yH,EAGI,mFACwBnxB,EAAOmd,MAJnCgU,CAVMI,EAeNJ,KAASzyH,MAATyyH,EACKI,KACIvxB,EAAOwxB,eAAPxxB,GAAyB,CAAzBA,IACA5hG,OAAOohG,SAAPphG,CAAiB4hG,EAAOwxB,eAAxBpzH,CAHT+yH,EAII,yGAC8CnxB,EAAOwxB,eALzDL,CAfMI,EAqBNJ,KAASzyH,MAATyyH,CAE0C,QAArCnxB,EAAgC0xB,eAFrCP,EAGI,mFAHJA,CArBMI,EA2BFzX,EAAM6X,UAAV,EACE,MAAM,IAAIp1H,KAAJ,CACF,8DADE,CAAN,CAGFu9G,EAAM6X,UAAN7X,IAAmB,CAAnBA,aAAAA;EAgDE,4CA7CMyD,IAAwC,QAAzBvd,EAAO4xB,gBACxBC,YACAC,YACAvU,MACEwU,gBAAgB/xB,EAAO4xB,cAAvBG,IACFZ,KAASzyH,MAATyyH,CACgC,QAA5BnxB,EAAOgyB,iBAAqB,IACvBhyB,EAAOgyB,iBAAPhyB,GAA2B,CAA3BA,IACA5hG,OAAOohG,SAAPphG,CAAiB4hG,EAAOgyB,iBAAxB5zH,CAHT+yH,EAII,mJAGenxB,EAAOgyB,iBAP1Bb,CADEY,IAUIH,IAAiBK,gCACnBjyB,EAAO4xB,cADYK,CAAjBL,EAONC,IAAQD,EAAez9G,EAPjBy9G,EAQNE,IAAQF,EAAe77C,EAlBrBg8C,CADFxU,GAuBE2U,IAAgBpY,EAAMqY,iBAANrY,IAChBsY,IAAYtY,EAAMuY,sBAANvY,IAEd0D,YAEFA,IADED,IAEE6U,EAAUvxH,KAAVuxH,GAAkBjoH,MAAlBioH,CAAyBA,EAAUttH,GAAVstH,CAAc,UAAAzyH,CAAA;EAAK,mBAAA,SAASA,CAAT;aAAnByyH,CAAzBA,CAFF7U,GAIgB6U,EAAUvxH,KAAVuxH,IAGdrY,IAAYuY,qBAAqBtyB,EAAO+Z,SAA5BuY,GACZn2G,IAA0Bo2G,mBAC5BxY,CAD4BwY,EACjBvyB,EAAO2a,UADU4X,EACEvyB,EAAOz7F,OADTguH,EACkBvyB,EAAOmd,MADzBoV,EACiC,IADjCA,EACuC,IADvCA,EAE5BvyB,EAAOwxB,eAFqBe,EAG5B,IAH4BA,EAI5BhV,CAJ4BgV,EAId/U,CAJc+U,GAAzB5U,oBAAc6U,eAKrB1Y,EAAMoC,OAANpC,GAAgB0Y,OAEV7U,EAAalD,YAAbkD,GAAN;EAImB,iBAJnBhoF,MAAAA,IACIkkF,IAA+B,QAAvB7Z,EAAOod,YAAgB,GAAO,CAAP,GAAWpd,EAAOod,YADrDznF,EAEM88F,MAFN98F,MAIyB27F,EAAQoB,QAARpB,GAAN;EAAfqB,cAAeh9F,MAAAA,EAAfg9F,aAAAA;mBACG9Y,IAAQ7Z,EAAOmd,MAAftD,OACC8D,EAAavD,YAAbuD,CAA0B9D,CAA1B8D,EADD9D;mBACLlkF,MAAAA,IACIi9F,IAAY,CADhBj9F,EAEIk9F,IAAa,CAFjBl9F,EAGK47F,UAAAA,OACkBD,EAAQoB,QAARpB;EAArBqB,cAAeh9F,MAAAA,EAAfg9F,aAAAA;oBAEKpB,KAAqBqB,IAAY5yB,EAAOwxB,sBACnBmB,EAAaxyE,IAAbwyE;EAI1B,cAJMzB,IAAcv7F,MAAAA,EAAdu7F,EAIFK,KAAsBL,EAAYjrG,IAAtC,EAYE,OAXA/hB,QAAQ8Z,IAAR9Z,CACI,uCACG87F,EAAOwxB,eADV,uDAAA,GAGGoB,CAHH,0IAAA,GAOG5yB,EAAOwxB,eAAPxxB,GAAyBA,EAAOmd,MAPnC,qFADJj5G,UAWA,KAIuB,QAArBgtH,EAAYnqH,OAAZ,cAAA,CAaF,KAZM+rH,IACFC,8BAA8BjZ,CAA9BiZ,EAAqC7B,EAAYnqH,KAAjDgsH,CADED,GAEAE,QACWnkG,QAAIgkG,CAHfC,EAINE,EAAgB9zH,IAAhB8zH,GAAoBF,EAAQ,CAARA,EAAW7zH,KAAX6zH,CAAiB,CAAjBA,CAJdA,EAMNnV,EAAarD,YAAbqD,CAA0BkV,CAA1BlV,EAAsCqV,CAAtCrV,CANMmV,EAUAG,IAAOf,EAAcY,CAAdZ,CAVPY,EAWNI,QAAYJ,CAAZI,CAXMJ,EAYG/0H,IAAI,CAAb,EAAgBA,IAAIq0H,EAAUh1H,MAA9B,IAAwCW,CAAxC,EACQwrG,IAAQ6oB,EAAUr0H,CAAVq0H,CAAR7oB,EACAhnD,IAAM0wE,EAAKl1H,CAALk1H,CADN1pB,EAENypB,EAAUzpB,CAAVypB,IAAmBzwE,CAFbgnD,EAGN4pB,KAAS5wE,CAAT4wE,CAHM5pB,CAMR,WAAMoU,EAAanD,UAAbmD,CAAwBkV,CAAxBlV,EAAoCqV,CAApCrV,EAAN;EAAAhoF,gBAAAA,IACAy9F,qBAAqBJ,CAArBI,CADAz9F,EAGAk9F,GAHAl9F,EAIAi9F,GAJAj9F,cAAAA;oBAOE47F,IAAqBqB,KAAa5yB,EAAOwxB,eAAzCD,GACqBL,EAAYjrG,QAE/Bs3F,KACE8V,UAAAA,EACAtB,gBAAgB/xB,EAAO4xB,cAAvBG,KACQx8F,IAAA+9F,MAAA/9F,MAAaukF,EAAMyZ,eAANzZ,CACnB9Z,EAAO4xB,cADY9X,IACKt7E,SAASwhE,EAAOgyB,mBADrBlY,EADrBiY,WAFFxU;mBAGA8V,IAAU99F,OAAAA,OAAAA,GAAOI,MAAAA,GAAPJ,CAAV89F;EAGAA,cAAUC,OAAOxZ,EAAM0Z,QAAN1Z,CAAe+X,CAAf/X,EAAsBgY,CAAtBhY,IACfvqF,WAAyC,QAA9BywE,EAAOyzB,mBAAuB,GACrCxC,6BADqC,GAErCjxB,EAAOyzB,qBACXlvH,SAAS,GAJMu1G,CAAPwZ,CAAVD,cAAAA;EAOF,eAASt1H,IAAI,CAAb,EAAgBA,IAAI+7G,EAAM4Z,YAAN5Z,CAAmB18G,MAAvC,IAAiDW,CAAjD,EACE00H,EAAU,SAAO3Y,EAAM4Z,YAAN5Z,CAAmB/7G,CAAnB+7G,CAAjB2Y,IAA4CY,EAAQt1H,CAARs1H,CAA5CZ;EAQJ,wBAAA;EAGF,iBAAI3Y,EAAM6Z,aAAN7Z,UAAAA,SAAJ;EAIF,qBAAM6D,EAAatD,UAAbsD,CAAwB9D,CAAxB8D,EAA+B8U,CAA/B9U,EAAN;EAEA,iBAFAhoF,MAAAA,IACAkkF,GADAlkF,EAEImkF,EAAM6Z,aAAN7Z,UAAAA,SAAJ;EAIF,qBAAM6D,EAAajD,UAAbiD,GAAN;EACA,iBADAhoF,MAAAA,QACMmkF,EAAMoC,OAANpC,CAAc8Z,QAAd9Z,GAAN;EACA,iBADAnkF,MAAAA,QACOmkF,EAAMoC,QAAb;mBAEApC,EAAM6X,UAAN7X,IAAmB,CAAnBA;;;;EAMJ,yBAAA,CACIwX,CADJ;EAOE,SAAoD,qBAApCA,EAAuBoB,QAAvC;EAKF,8BAAA,CACIA,CADJ;EAEE,SAAsD,qBAAtCA,EAA6BvyE,IAA7C;EAGF,yBAAA,CAGI25D,CAHJ,EAGgBwX,CAHhB,EAIItxB,CAJJ;;;;EAQE,cAHM6zB,IAA+B,QAAlB7zB,EAAOxhE,OAApBq1F,EACA9xH,IAAI+3G,EAAMga,YADVD,EAEAZ,MAFAY,EAGF7zB,EAAOz7F,OAAPy7F,GAAiB,CAArB,EACE,MAAM,IAAI2H,mBAAJ,CAAwB,sCAAxB,CAAN,QAGFwpB,KAASzyH,MAATyyH,EACK0C,KAAe7zB,EAAOxhE,OAAPwhE,GAAiB,CAAjBA,IAAsB5hG,OAAOohG,SAAPphG,CAAiB4hG,EAAOxhE,OAAxBpgC,CAD1C+yH,EAEI,wEACgBj1F,KAAKE,SAALF,CAAe8jE,EAAOxhE,OAAtBtC,CAHpBi1F,GAIqB4C,qBAAqBzC,CAArByC,KACjB53G,IAAAm1G,CAAAn1G,QADiB43G;EAEjB,qBAAOzC,EAAuBoB,QAAvBpB,GAAP;EAAAn1G,cAAAoZ,MAAAA,EAAApZ,aAAAA;EAFEw2G,eAAAA,EAIFqB,IAAc,CAJZrB,EAKF9jG,IAAQ,CALN8jG;;;EAOgB,6BAAMA,EAAaxyE,IAAbwyE,GAAN;EACpB,uBADMzB,IAAc/0G,MAAAA,IACJpV,KAAhB,EAAuB;EAOrB,wBAJMktH,IAAUlB,8BAA8BjZ,CAA9BiZ,EAAqC7B,EAAYnqH,KAAjDgsH,CAAVkB,EACAC,IAAYC,KAAS;EAAM,6BAAApyH,EAAEkyH,CAAFlyH,CAAA;uBAAfoyH,CADZF,EAENf,QAAYe,CAAZf,CAFMe,EAIQ,MAAVplG,CAAJ,EACE,KAAS9wB,IAAI,CAAb,EAAgBA,IAAIm2H,EAAU92H,MAA9B,IAAwCW,CAAxC,EACEk1H,EAAKj0H,IAALi0H,CAAUvoB,UAAU,CAAVA,CAAVuoB,EAIJ,KADMmB,IAAYH,EAAQ,CAARA,EAAWh1H,KAAXg1H,CAAiB,CAAjBA,CAAZG,gBACGr2H;EACP,0BAAMs2H,IAAWH,EAAUn2H,CAAVm2H,CAAjB;EAAA,0BACMI,IAAYrB,EAAKl1H,CAALk1H,CADlB,CAEAA,EAAKl1H,CAALk1H,IAAUkB,KACN;EAAM,+BAAA1pB,IAAQwoB,EAAKl1H,CAALk1H,CAARxoB,EAAiBF,IAAQG,UAAU0pB,CAAV1pB,CAARH,EAA8B8pB,CAA9B9pB,CAAjBE,CAAA;yBADA0pB,CAAVlB,EAGIpkG,IAAQ,CAARA,IACFqkG,QAAYoB,CAAZpB,CAJFD;uBAJImB,EACGr2H,IAAI,CAAb,EAAgBA,IAAIm2H,EAAU92H,MAA9B,IAAwCW,CAAxC,IAASA,GAUTm1H,QAAYgB,CAAZhB,GACAc,KAAeI,CADflB,IAGErkG,CAHFqkG;EAKF,0BAAIhC,EAAYjrG,IAAZirG,IACE2C,KACF3vH,QAAQ8Z,IAAR9Z,CACI,gLAG0B87F,EAAOxhE,OAHjC,qFADJt6B,CADE2vH,cADF3C,OAAJ;;aApCIyB,aAAAA;oBAMCkB,KAAahlG,IAAQmxE,EAAOxhE;;EA2CnC,6BAASzgC;EACP,gBAAMu2H,IAAYrB,EAAKl1H,CAALk1H,CAAlB,CACAA,EAAKl1H,CAALk1H,IACIkB,KAAS;EAAM,qBAAA3pB,IAAQyoB,EAAKl1H,CAALk1H,CAARzoB,EAAiBE,UAAUspB,CAAVtpB,CAAjBF,CAAA;eAAf2pB,CADJlB,EAEAC,QAAYoB,CAAZpB,CAFAD;eAFOl1H,IAAI,CAAb,EAAgBA,IAAIk1H,EAAK71H,MAAzB,IAAmCW,CAAnC,IAASA,GAOT,WAAOw2H,iBAAiBtB,CAAjBsB,EAAP;;;2BChY6BhlG;EAC7B4hG,OAASzyH,MAATyyH,CACI5hG,IAAY,CAAZA,IAAiBnxB,OAAOohG,SAAPphG,CAAiBmxB,CAAjBnxB,CADrB+yH,EAEI,6DAA2D5hG,CAF/D4hG;EAmBF,qBAAA,CACI/iD,CADJ,EAC6BjsE,CAD7B,EAC4C8kB,CAD5C;EAEE,SAAc,QAAVmnD,CAAU,IACJ,KADI,GAEHvvE,MAAMC,OAAND,CAAcuvE,CAAdvvE,IACFuvE,EAAOtpE,GAAPspE,CAAW,UAAAlxE,CAAA;EAAS,WAAAkwG,oBAAoBlwG,CAApBkwG,EAA2BjrG,CAA3BirG,EAAkCnmF,IAAO9kB,CAAzCirG,CAAA;KAApBh/B,CADEvvE,GAGFuuG,oBAAoBh/B,CAApBg/B,EAA4BjrG,CAA5BirG,EAAmCnmF,IAAO9kB,CAA1CirG,CALT;EAsBF,8BAAA,CACIh/B,CADJ,EAC6B3kE,CAD7B;EAEE,SAAO0qH,KAAS;EACd,WAAc,QAAV/lD,CAAU,GACL,IADK,GAEHvvE,MAAMC,OAAND,CAAcuvE,CAAdvvE,IACFuvE,EAAOtpE,GAAPspE,CACH,UAAAlxE,CAAA;EAAS,aAACs3H,qBAAqBt3H,CAArBs3H,EAA4B/qH,CAA5B+qH,CAAD;OADNpmD,CADEvvE,GAMF6K,SACH0kE,CADG1kE,EACuB,YAAlBD,EAAQxI,KAAU,GAAUwI,CAAV,GAAoBA,EAAQw9D,KAARx9D,EAD3CC,CART;KADKyqH,CAAP;EAuBF,qBAAA,CACIj1H,CADJ,EACkBqwB,CADlB;EAKE,OAHA,IAAMza,MAAN,EACI2/G,IAAa,CADjB,EAEIhY,IAAmB,IACvB,EAAOgY,IAAav1H,CAApB,IACEu9G,IAAWgY,IAAallG,MACRrwB,MACdu9G,IAAWv9G,IAEb4V,EAAO9V,IAAP8V,EAAa2/G,GAAYhY,EAAzB3nG,GACA2/G,IAAahY,EAEf,OAAO3nG,CAAP;EA8BF,iBAAA,CAGIglG,CAHJ,EAGgB/3G,CAHhB,EAGiD2yH,CAHjD,EAIItC,CAJJ,EAI0B7iG,CAJ1B,EAI8C4tF,CAJ9C,EAI+D54G,CAJ/D,EAKIw1G,CALJ,EAKgC4a,CALhC,EAMIC,CANJ,EAMuB90H,CANvB,EAMiD09G,CANjD,EAOIJ,CAPJ,EAO2BE,CAP3B,EAOmDuX,CAPnD,EAQIla,CARJ;;;;EA4BE,cAnBiB,QAAbprF,CAAa,KACfA,IAAY,EADG,GAGH,QAAV4tF,CAAU,KACZA,IAAS,CADG,CAHG,EAMF,QAAXr9G,CAAW,KACbA,KAAU,CADG,CANE,EASG,QAAhBs9G,CAAgB,KAClBA,IAAe,CADG,CATH,EAcbG,KAAe,CAdF,EAeL,QAARoX,CAAQ,IAAkB,QAAVC,CAAR,KACVrX,KAAe,CADL,CAfK,EAmBM,QAAnBsX,CAAmB,KACrBtX,KAAe,CAAfA,EACqB,QAAjBD,CAFiB,CAAvB,EAGI,MAAM,IAAI5V,UAAJ,CACF,kGADE,CAAN,CAsBJ,OAbuB,SAHjB2V,IACFvD,EAAMgb,eAANhb,CAAsB4a,CAAtB5a,EAA2BvqF,CAA3BuqF,EAAsCwD,CAAtCxD,EAAqD,iBAArDA,CAEmB,MACrBib,IAAanjE,QAAM,CAANA,EAASyrD,CAATzrD,CADQ,GAIR,QAAXrtD,CAAW,KACbA,IAAU,CADG,CAJQ,EAQjB4X,IAA0Bo2G,mBAC5BxY,CAD4BwY,EACjB5X,CADiB4X,EACLhuH,CADKguH,EACIpV,CADJoV,EACYnV,CADZmV,EAC0BlV,CAD1BkV,EAE5BjV,CAF4BiV,EAEbhjG,CAFagjG,EAEFhV,CAFEgV,EAEY/U,CAFZ+U,CART,EAQhB5U,kBARgB,EAQFzB,aARE,EAWvByB,EAAaxD,QAAbwD,CAAsB7D,CAAtB6D,CAXuB,EAYvB7D,EAAMoC,OAANpC,GAAgBoC,CAZO,MAajByB,EAAalD,YAAbkD,GAAN;EAAApoF,gBAAAA,IACAukF,EAAM6Z,aAAN7Z,IAAsB,CADtBvkF,gBAKSskF;;;EACP,6BAAM8D,EAAavD,YAAbuD,CAA0B9D,CAA1B8D,EAAN;wBAAAxhG,MAAAA,IACMs2G,MADNt2G,EAEqB,QAAjBmhG,GAAA,aAAA,CACF,MAAM,IAAI3V,mBAAJ,CACF,4CADE,CAAN;EAGA,sBAAgB,YAAZ7nG,CAAJ,EACE,MAAM,IAAI6nG,mBAAJ,CAAwB,wCAAxB,CAAN,CACS7nG,KACTgvG,KAAKhvG,OAALgvG,CAAaimB,CAAbjmB,CADShvG,EAKLk1H,IAAoBrrG,SAASorG,CAATprG,CALf7pB,EAOLm1H,IAAUC,YAAY7X,CAAZ6X,EAA6B3lG,CAA7B2lG,CAPLp1H,gBAQF+yH;;;EAEP,iCADMG,MAAAA,MACArV,EAAarD,YAAbqD,CAA0BkV,CAA1BlV,EAAsCqV,CAAtCrV,EAAN;EAsCA,iCAtCAxhG,MAAAA,IAEAg4G,KAAS;EACP,gCAAMM,IAAaQ,EAAQpC,CAARoC,EAAoB,CAApBA,CAAnB;EAAA,gCACMxY,IAAWwY,EAAQpC,CAARoC,EAAoB,CAApBA,CADjB;EAAA,gCAEME,IAAW/nB,oBACI4nB,CADJ5nB,EACuBqnB,CADvBrnB,EAEIqP,IAAWgY,CAFfrnB,CAFjB,CAKA4lB,EAAiBnkG,KAAjBmkG,GAAqBH,CAArBG,EACAA,EAAgB9zH,IAAhB8zH,GAAoBvW,IAAWgY,CAD/BzB,CAOA,KAFA,IAAMoC,IAAWZ,qBAAqBE,CAArBF,EAA0BW,CAA1BX,CAAjB,EACMvB,IAAOlxH,EAAEqzH,CAAFrzH,CADb,EAEShE,IAAI,CAAb,EAAgBA,IAAIq0H,EAAUh1H,MAA9B,IAAwCW,CAAxC,EAA2C;EACzC,kCAAMwrG,IAAQ6oB,EAAUr0H,CAAVq0H,CAAd;EAAA,kCACM7vE,IAAM0wE,EAAKl1H,CAALk1H,CADZ,CAEAD,EAAUzpB,CAAVypB,IAAmBzwE,CAAnBywE,EACAG,KAAS5wE,CAAT4wE,CADAH;EAKF,iCAAIH,MAAeoC,EAAQ73H,MAAR63H,GAAiB,CAAhCpC,IACEtV,CADN,EAEI;EAAA,kCAAM8V,IAAUvZ,EAAMub,QAANvb,CAAe6a,CAAf7a,EAAqB8a,CAArB9a,EAA6BvqF,CAA7BuqF,CAAhB,CAEA,KAAS/7G,IAAI,CAAb,EAAgBA,IAAIq0H,EAAUh1H,MAA9B,IAAwCW,CAAxC,EAA2C;EACnCwrG,oCAAQ6oB,EAAUr0H,CAAVq0H,CAAR7oB,EACAhnD,IAAM8wE,EAAQt1H,CAARs1H,CADN9pB,CAEN4pB,KAAS5wE,CAAT4wE,GAEAV,EAAU,SAASlpB,CAAnBkpB,IAA4BlwE,CAF5B4wE;;;6BA5BRgB,CAFAh4G,MAsCMwhG,EAAanD,UAAbmD,CAAwBkV,CAAxBlV,EAAoCqV,CAApCrV,EAAN;EAGA,iCAHAxhG,MAAAA,IACAi3G,qBAAqBJ,CAArBI,CADAj3G,EAGI29F,EAAM6Z,aAAN7Z,eAAAA,MAAJ;;qBAnDSh6G,EAQF+yH,IAAa,CARX/yH,aAAAA;2BAQc+yH,IAAaoC,EAAQ73H,MAArBy1H,SAAhBA,GAAgBA;;6BAA+BA;EAiDxDmC,oBAAkBh/G,OAAlBg/G,eAAAA;EAGF,6BAAMrX,EAAatD,UAAbsD,CAAwB9D,CAAxB8D,EAA+B8U,CAA/B9U,EAAN;EACA,yBADAxhG,MAAAA,IACI29F,EAAM6Z,aAAN7Z,eAAAA,MAAJ;;aA3EFvkF,EAKSskF,IAAQuD,CALjB7nF,aAAAA;mBAK+BskF,IAAQsD,CAARtD,SAAtBA,GAAsBA;;qBAAkBA;EA0EjD,qBAAM8D,EAAajD,UAAbiD,GAAN;EAEA,iBAFApoF,MAAAA,QAEMukF,EAAMoC,OAANpC,CAAc8Z,QAAd9Z,GAAN;EACA,iBADAvkF,MAAAA,QACOukF,EAAMoC,QAAb;;;EAGF,oBAAA,CAGIpC,CAHJ,EAGgBp8G,CAHhB,EAII0W,CAJJ,EAKI4rF,CALJ;4BAKIA;;;EACF,cAAI8Z,EAAM6X,UAAV,EACE,MAAM,IAAIp1H,KAAJ,CACF,8DADE,CAAN,CAGFu9G,EAAM6X,UAAN7X,IAAmB,CAAnBA,aAAAA;EAwBE,uCAfAwb,eADM/lG,IAAgC,QAApBywE,EAAOzwE,SAAa,GAAO,EAAP,GAAYywE,EAAOzwE,SACzD+lG,GAIMC,IACFzb,EAAM0b,mBAAN1b,CACIp8G,CADJo8G,EACO1lG,CADP0lG,GACU,CADVA,EACiBvqF,CADjBuqF,GAEJrlG,IAAS8gH,EAAiB,CAAjBA,GACTE,IAAUF,EAAiB,CAAjBA,GAKNhY,KAAe,GACfqX,YACyB,QAAzB50B,EAAO4xB,cAAkB,IAAQ5xB,EAAO4xB,cAAP5xB,CAAsB5iG,MAAtB4iG,GAA+B,CAApE,EAAuE;EAErE,gBADAud,KAAe,CAAfA,EACqC,MAAjCvd,EAAO4xB,cAAP5xB,CAAsB5iG,MAA1B,EAIO,MAAqC,MAAjC4iG,EAAO4xB,cAAP5xB,CAAsB5iG,MAAW,GACpC,IAAIuqG,mBAAJ,CACF,+DADE,CADoC,GAIpC,IAAID,UAAJ,CACF,4GAEG1H,EAAO4xB,cAFV,iBADE,CAJD,CAFL8D,IAAY11B,EAAO4xB,cAAP5xB,CAAsB,CAAtBA,CAAZ01B,EACAC,IAAY31B,EAAO4xB,cAAP5xB,CAAsB,CAAtBA,CADZ01B,EAYIE,IAAkB9b,EAAM0b,mBAAN1b,CACI4b,CADJ5b,EACe6b,CADf7b,GAC0B,CAD1BA,EAEIvqF,CAFJuqF,CAZtB4b,EAeFG,IAAOD,EAAgB,CAAhBA,CAfLF,EAgBFI,IAAOF,EAAgB,CAAhBA,CAhBLF,EAoBFd,IAASiB,EAAK1rH,MAAL0rH,CAAYC,CAAZD,CApBPH;aAJJ,MA2B8B,QAA1B11B,EAAO0xB,eAAmB,IAAQ1xB,EAAO0xB,eAAP1xB,GAAyB,CAAjC,IAC1BA,EAAO0xB,eAAP1xB,GAAyB,CADC,IAE5Bud,KAAe,CAAfA,EAEMwY,IACFx4H,KAAKkC,KAALlC,CAAWkX,EAAO,CAAPA,EAAUxV,KAAVwV,CAAgB,CAAhBA,KAAsB,IAAIurF,EAAO0xB,eAAjCj9G,CAAXlX,CAHJggH,EAIMyY,IAAoBvhH,EAAO,CAAPA,EAAUxV,KAAVwV,CAAgB,CAAhBA,CAJ1B8oG,EAKAsY,IAAOI,YAAYxhH,CAAZwhH,EAAoBF,CAApBE,EAA6BD,CAA7BC,CALP1Y,EAMA9oG,IAASwhH,YAAYxhH,CAAZwhH,EAAoB,CAApBA,EAAuBF,CAAvBE,CANT1Y,EAOAuY,IAAOG,YAAYR,CAAZQ,EAAqBF,CAArBE,EAA8BD,CAA9BC,CAPP1Y,EAQAkY,IAAUQ,YAAYR,CAAZQ,EAAqB,CAArBA,EAAwBF,CAAxBE,CARV1Y,EAWAqX,IAASiB,EAAK1rH,MAAL0rH,CAAYC,CAAZD,CAbmB,IAgBO,QAA1B71B,EAAO60B,eAAmB,KACnCtX,KAAe,CADoB,CAhBP,CAsDlB,OAjCNmX,IAAMjgH,EAAOtK,MAAPsK,CAAcghH,CAAdhhH,CAANigH,EAEN5a,EAAMoK,gCAANpK,EAFM4a,EAgBAxC,IAAgBpY,EAAMqY,iBAANrY,EAhBhB4a,EAiBAtC,IAAYtY,EAAMuY,sBAANvY,EAjBZ4a,EAmBFwB,UAnBExB,EAoBFlX,UApBEkX,EAqBFnX,KACFzD,EAAMqc,gBAANrc,IACAoc,IAAcpc,EAAMga,YADpBha,EAEA0D,IACI4U,EAAUvxH,KAAVuxH,GAAkBjoH,MAAlBioH,CAAyBA,EAAUttH,GAAVstH,CAAc,UAAAzyH,CAAA;EAAK,mBAAA,SAASA,CAAT;aAAnByyH,CAAzBA,CAJF7U,KAMF2Y,IAAc,IAAdA,EACAtB,MADAsB,EAEA1Y,IAAkB4U,EAAUvxH,KAAVuxH,EARhB7U,CArBEmX,EAgCA3a,IAAYuY,qBAAqBtyB,EAAO+Z,SAA5BuY,CAhCZoC,MAiCY0B,QACdtc,CADcsc,EACPlE,CADOkE,EACQ1B,CADR0B,EACahE,CADbgE,EACwB7mG,CADxB6mG,EACmCp2B,EAAOmd,MAD1CiZ,EAEdp2B,EAAOz7F,OAFO6xH,EAEErc,CAFFqc,EAEaF,CAFbE,EAE0BxB,CAF1BwB,EAEkCp2B,EAAOlgG,OAFzCs2H,EAGd5Y,CAHc4Y,EAGGp2B,EAAOod,YAHVgZ,EAGwB,IAHxBA,EAG8B,IAH9BA,EAGoCp2B,EAAO2a,UAH3Cyb,EAAN;EAKZ,iBALM7zE,IAAMpmC,MAAAA,EAANomC,EAINu3D,EAAM6X,UAAN7X,IAAmB,CAJbv3D,MAKCA,EAAP;mBAEAu3D,EAAM6X,UAAN7X,IAAmB,CAAnBA,EAEAuc,kBAAkB5hH,CAAlB4hH,EAA0B34H,CAA1B24H,CAFAvc,EAGAuc,kBAAkBZ,CAAlBY,EAA2BjiH,CAA3BiiH,CAHAvc,EAIAuc,kBAAkBR,CAAlBQ,EAAoCX,CAApCW,CAJAvc,EAKAuc,kBAAkBP,CAAlBO,EAAoCV,CAApCU,CALAvc;;;;EAgBJ,oCAAA,CAA2ChvF,CAA3C;EACE,MAAMmoG,MAAN,CACInoG,aAAmB1jB,MAAnB0jB,KACFA,KAAWA,EADTA,EAKJ,KAAK,IAAI/sB,IAAI,CAAb,EAAgBA,IAAI+sB,EAAQ1tB,MAA5B,IAAsCW,CAAtC,EAAyC;EACvC,QAAMkY,IAAS6U,EAAQ/sB,CAAR+sB,CAAf,CACA,IAAoB,MAAhB7U,EAAO7T,IAAX,EACE6wH,EAAKj0H,IAALi0H,CAAUjqH,aAAWiN,CAAXjN,EAAmB,CAAnBA,CAAViqH,EADF,KAEO;EAAA,UAAoB,MAAhBh9G,EAAO7T,IAAX,EACL,MAAM,IAAI7F,KAAJ,CACF,uEADE,CAAN,CAIA02H,EAAKj0H,IAALi0H,CAAUh9G,CAAVg9G;;EAGJ,UAAOA,CAAP;EAcF,2BAAA,CACInoG,CADJ,EAEIwrG,CAFJ;EAGE,MAAe,QAAXxrG,CAAJ,EAAA;EAGA,QAAMyrG,MAAN,CACA,IAAID,aAAsBlvH,MAA1B,EACEmvH,EAAav3H,IAAbu3H,CAAkBD,EAAW3uH,EAA7B4uH,EADF,KAEO,IAAI13H,MAAMC,OAAND,CAAcy3H,CAAdz3H,CAAJ,EACLy3H,EAAWlzH,OAAXkzH,CAAmB,UAAAp6G,CAAA;EAAK,aAAAq6G,EAAav3H,IAAbu3H,CAAkBr6G,EAAEvU,EAApB4uH,CAAA;OAAxBD,EADK,KAEA,IAAkB,QAAdA,CAAJ,EAEL,KAAK,IAAM5oC,CAAX,IAAmB4oC,CAAnB,EAA+B;EAC7B,UAAME,IAAYF,EAAW5oC,CAAX4oC,CAAlB,CACAC,EAAav3H,IAAbu3H,CAAkBC,EAAU7uH,EAA5B4uH;EAIJ,SAAME,MAAN,CACA,IAAI3rG,aAAmB1jB,MAAvB,GAC4C,MAAtCmvH,EAAajyG,OAAbiyG,CAAqBzrG,EAAQnjB,EAA7B4uH,KACFE,EAAiBz3H,IAAjBy3H,CAAsB3rG,CAAtB2rG,EAFJ,KAIO,IAAI53H,MAAMC,OAAND,CAAcisB,CAAdjsB,CAAJ,EACLisB,EAAQ1nB,OAAR0nB,CAAgB,UAAA5O,CAAA;SACsB,MAAhCq6G,EAAajyG,OAAbiyG,CAAqBr6G,EAAEvU,EAAvB4uH,KACFE,EAAiBz3H,IAAjBy3H,CAAsBv6G,CAAtBu6G;OAFJ3rG,EADK,KAMA,IAAe,QAAXA,CAAJ,EAEL,KAAK,IAAMijE,CAAX,IAAmBjjE,CAAnB,EAA4B;EAC1B,UAAM7U,IAAS6U,EAAQijE,CAARjjE,CAAf,EACyC,MAArCyrG,EAAajyG,OAAbiyG,CAAqBtgH,EAAOtO,EAA5B4uH,KACFE,EAAiBz3H,IAAjBy3H,CAAsBxgH,CAAtBwgH;EAKNA,OAAiBrzH,OAAjBqzH,CAAyB,UAAAv6G,CAAA;EAClBA,QAAEvT,UAAFuT,IACHA,EAAElG,OAAFkG,EADGA;OADPu6G;;yBCljB2B/4H;EAE3B,SAAOA,aAAa0J,MAApB;EAMF,qBAAA,CAA4B1J,CAA5B;EAEE,SAAOmB,MAAMC,OAAND,CAAcnB,CAAdmB,CAAP;EAMF,oBAAA,CAA2BnB,CAA3B;EAEE,UAAQg5H,aAAah5H,CAAbg5H,MAAoBC,YAAYj5H,CAAZi5H,CAA5B;EAcF,8BAAA,CACI3uH,CADJ,EACyD4uH,CADzD,EAEI9xG,CAFJ,EAEsB+xG,CAFtB,EAE6CC,CAF7C;EAGE,uBADoBD,0BAAuBC,SAC9B,QAATF,CAAS,IAAyB,MAAjBA,EAAMx5H,MAA3B,EAAyC;EAGvC,QAAY,QAAR4K,CAAJ,EAAkB;EAChB,UAAI+uH,KAAoB,CAAxB,CACA,IAAIJ,YAAY3uH,CAAZ2uH,KAAsB3uH,EAAkB5K,MAAlB4K,GAA2B,CAArD,EACE+uH,KAAoB,CAApBA,CADF,KAEO,IAAIC,WAAWhvH,CAAXgvH,CAAJ;EACL,aAAK,IAAMz9G,CAAX,IAAkBvR,CAAlB,EACE,IAAIA,EAAKshG,cAALthG,CAAoBuR,CAApBvR,CAAJ,EAA8B;EAC5B+uH,eAAoB,CAApBA,CACA;;SAJC,MASLA,KAAoB,CAApBA,CAEF,IAAIA,CAAJ,EACE,MAAM,IAAIrvB,UAAJ,CACF,+BAA6BovB,CAA7B,gCAAA,GACW9uH,CAFT,CAAN;EAKJ,cAAA;EAEF,OAAY,QAARA,CAAJ,EACE,OAAO4uH,EAAM9xH,GAAN8xH,CAAU,UAAAr1H,CAAA;EAAQ,WAAA,IAAA;KAAlBq1H,CAAP,CAGF,IAAIxoD,CAAJ,CACA,IAAI4oD,WAAWhvH,CAAXgvH,CAAJ,EAAsB;EACpBhvH,QAAOA,CAAPA,EACAomE,MADApmE,CAEA,KAAmB,SAAA,EAAAivH,KAAnB,EAAmBjwH,YAAnB,EAAmBA,GAAnB;EAAK,UAAM0mF,QAAN,CACH,IAAkB,QAAd1lF,EAAK0lF,CAAL1lF,CAAJ,EACE,MAAM,IAAI0/F,UAAJ,CACF,2BAAyBha,CAAzB,mCAAA,GACGkpC,CAFD,CAAN,CAIFxoD,EAAOpvE,IAAPovE,CAAYpmE,EAAK0lF,CAAL1lF,CAAZomE;;KATJ,MAWO,IAAIuoD,YAAY3uH,CAAZ2uH,CAAJ,EAAuB;EAE5B,SADA3uH,IAAOA,GACE5K,WAAWw5H,EAAMx5H,MAA1B,EACE,MAAM,IAAIsqG,UAAJ,CACF,+BAA6BovB,CAA7B,mHAAA,GAEmCF,EAAMx5H,MAFzC,kEAAA,GAGgD4K,CAJ9C,CAAN,CAMFomE,IAASpmE,CAATomE;KATK,MAUA;EAEL,QADApmE,IAAOA,CAAPA,EACI4uH,EAAMx5H,MAANw5H,GAAe,CAAnB,EACE,MAAM,IAAIlvB,UAAJ,CACF,eAAaovB,CAAb,cAAA,GAAwCF,EAAMx5H,MAA9C,wEAAA,GAEI4K,EAAK/I,KAHP,CAAN,CAKFmvE,KAAUpmE,EAAVomE;EAMF,OAHAA,IAAS8oD,2BAA2B9oD,CAA3B8oD,CAAT9oD,EAGc,QAAVtpD,CAAJ,EACE,KAAK,IAAI/mB,IAAI,CAAb,EAAgBA,IAAI64H,EAAMx5H,MAA1B,IAAoCW,CAApC,EACE,IAAiB,QAAb+mB,EAAO/mB,CAAP+mB,CAAJ,EAAA;EAGA,QAAM5nB,IAAQkxE,EAAOrwE,CAAPqwE,CAAd,CACA,IAAIlxE,EAAM+B,KAAN/B,CAAYE,MAAZF,KAAuB4nB,EAAO/mB,CAAP+mB,EAAU1nB,MAArC,EACE,MAAM,IAAIsqG,UAAJ,CACF,yBAAuBovB,CAAvB,gBAAA,GAAoDF,EAAM74H,CAAN64H,CAApD,cAAA,GACW9xG,EAAO/mB,CAAP+mB,EAAU1nB,MADrB,6CAAA,GAESF,EAAM+B,KAHb,CAAN,CAKF,KAAK,IAAI+B,IAAI,CAAb,EAAgBA,IAAI8jB,EAAO/mB,CAAP+mB,EAAU1nB,MAA9B,IAAwC4D,CAAxC,EACE,IAAU,MAANA,CAAM,IAAM61H,CAAhB,EAAA;EAIA,UAAM/mH,IAAM5S,EAAM+B,KAAN/B,CAAY8D,CAAZ9D,CAAZ;EAAA,UACMi6H,IAASryG,EAAO/mB,CAAP+mB,EAAU9jB,CAAV8jB,CADf,CAEA,IAAc,QAAVqyG,CAAU,IAAQA,KAAU,CAAlB,IAAuBrnH,MAAQqnH,CAA7C,EACE,MAAM,IAAIzvB,UAAJ,CACF,yBAAuBovB,CAAvB,gBAAA,GAAoDF,EAAM74H,CAAN64H,CAApD,qBAAA,GACkB9xG,EAAO/mB,CAAP+mB,CADlB,kCAAA,GAEI5nB,EAAM+B,KAFV,OADE,CAAN;;EAQR,UAAOmvE,CAAP;EAUF,2BAAA,CACI35D,CADJ,EACsBghH,CADtB,EACyCljD,CADzC;EAEE,MAAM6kD,IAAOC,OAAO5iH,EAAO3P,GAAP2P,CAAW,UAAAE,CAAA;EAAS,WAAAA,EAAM1V,KAAN0V,CAAY,CAAZA,CAAA;KAApBF,CAAP4iH,CAAb,CACAD,EAAKt2G,IAALs2G,GACA,IAAME,IAAOD,OAAO5B,EAAQ3wH,GAAR2wH,CAAY,UAAAp8B,CAAA;EAAU,WAAAA,EAAOp6F,KAAPo6F,CAAa,CAAbA,CAAA;KAAtBo8B,CAAP4B,CAAb,CAGA,IAFAC,EAAKx2G,IAALw2G,IAEIF,EAAKh6H,MAALg6H,GAAc,CAAlB,EACE,MAAM,IAAI1vB,UAAJ,CACF,qFAEGxrE,KAAKE,SAALF,CAAeznB,EAAO3P,GAAP2P,CAAW,UAAAE,CAAA;EAAS,WAAAA,EAAM1V,KAAN;KAApBwV,CAAfynB,CAHD,CAAN,CAKF,IAAIo7F,EAAKl6H,MAALk6H,GAAc,CAAlB,EACE,MAAM,IAAI5vB,UAAJ,CACF,sFAEGxrE,KAAKE,SAALF,CAAeu5F,EAAQ3wH,GAAR2wH,CAAY,UAAAp8B,CAAA;EAAU,WAAAA,EAAOp6F,KAAP;KAAtBw2H,CAAfv5F,CAHD,CAAN,CAKF,IAAIk7F,EAAKh6H,MAALg6H,GAAc,CAAdA,IAAmBE,EAAKl6H,MAALk6H,GAAc,CAAjCF,KAAuCtoB,KAAKnwG,WAALmwG,CAAiBsoB,CAAjBtoB,EAAuBwoB,CAAvBxoB,CAA3C,EACE,MAAM,IAAIpH,UAAJ,CACF,mFACkB0vB,EAAK,CAALA,CADlB,0BAAA,GACiDE,EAAK,CAALA,CADjD,uBADE,CAAN;EAgBJ,yCAAA,CACI7B,CADJ,EACuB8B,CADvB,EACkD1jB,CADlD;EAOE,OAJA,IAAM2jB,KACJC,oBAAyBC,oBACzBC,wBAFF,EAIS55H,IAAI,CAAb,EAAgBA,IAAI03H,EAAQr4H,MAA5B,IAAsCW,CAAtC,EAAyC;EACvC,QAAMqW,IAAIqhH,EAAQ13H,CAAR03H,CAAV;EAAA,QACMmC,IAAOL,EAAQx5H,CAARw5H,CADb;EAAA,QAEMt4H,IAAQ40G,EAAa91G,CAAb81G,CAFd,CAGA,IAAY,QAAR+jB,CAAJ,EAAA;EAGA,UAAIA,MAASD,uBAATC,IACkC,MAAhCxjH,EAAEnV,KAAFmV,CAAQA,EAAEnV,KAAFmV,CAAQhX,MAARgX,GAAiB,CAAzBA,CADN,EAEI,MAAM,IAAIszF,UAAJ,CACF,6CAA2CtzF,EAAEnV,KAA7C,6JADE,CAAN,CAQJ,KAAiC,MAA7Bu4H,EAAUlzG,OAAVkzG,CAAkBI,CAAlBJ,CAAJ,EAGE,KAFA,IAAMK,IAAezjH,EAAEnV,KAAFmV,CAAQvT,KAARuT,CAAc,CAAdA,CAArB,EACM0jH,IAAc74H,EAAM4B,KAAN5B,CAAY,CAAZA,CADpB,EAES+B,IAAI,CAAb,EAAgBA,IAAI62H,EAAaz6H,MAAjC,IAA2C4D,CAA3C,EAA8C;EAC5C,YAAM+2H,IAAYF,EAAa72H,CAAb62H,CAAlB;EAAA,YACM1mG,IAAS2mG,EAAY92H,CAAZ82H,CADf,CAEA,IAAc,QAAV3mG,CAAU,IAAQ4mG,MAAc5mG,CAApC,EACE,MAAM,IAAIu2E,UAAJ,CACF,gCAA8BtzF,EAAEnV,KAAhC,wCAAA,GACmBA,CADnB,6FADE,CAAN;;;;EAoCV,wBAAA,CACI+I,CADJ,EAC2B4uH,CAD3B,EAC4C9xG,CAD5C,EAEI+xG,CAFJ,EAE2BC,CAF3B;EAGE,MAAI1oD,CAAJ,CACA,qBAFEyoD,0BAAuBC,SAErBj4H,MAAMC,OAAND,CAAcmJ,CAAdnJ,CAAJ,EAAyB;EACvB,QAAImJ,EAAK5K,MAAL4K,KAAgB4uH,EAAMx5H,MAA1B,EACE,MAAM,IAAIsqG,UAAJ,CACF,+BAA6BovB,CAA7B,uHAAA,GAEuCF,EAAMx5H,MAF7C,iCAAA,GAGoB4K,EAAK5K,MAHzB,iBADE,CAAN,CAMFgxE,IAASpmE,CAATomE;KARF,MASO;EACL,QAAIwoD,EAAMx5H,MAANw5H,GAAe,CAAnB,EACE,MAAM,IAAIlvB,UAAJ,CACF,uBAAqBkvB,EAAMx5H,MAA3B,MAAA,GAAqC05H,CAArC,qEAAA,GAEG56F,KAAKE,SAALF,CAAel0B,EAAK/I,KAApBi9B,CAFH,MADE,CAAN,CAKFkyC,KAAUpmE,EAAVomE;EAGF,OAAc,QAAVtpD,CAAJ,EACE,KAAK,IAAI/mB,IAAI,CAAb,EAAgBA,IAAI64H,EAAMx5H,MAA1B,IAAoCW,CAApC,EACE,IAAiB,QAAb+mB,EAAO/mB,CAAP+mB,CAAJ,EAAA;EAGA,QAAM5nB,IAAQkxE,EAAOrwE,CAAPqwE,CAAd,CACA,IAAIlxE,EAAM+B,KAAN/B,CAAYE,MAAZF,KAAuB4nB,EAAO/mB,CAAP+mB,EAAU1nB,MAArC,EACE,MAAM,IAAIsqG,UAAJ,CACF,yBAAuBovB,CAAvB,gBAAA,GAAoDF,EAAM74H,CAAN64H,CAApD,cAAA,GACW9xG,EAAO/mB,CAAP+mB,EAAU1nB,MADrB,6CAAA,GAES8+B,KAAKE,SAALF,CAAeh/B,EAAM+B,KAArBi9B,CAHP,CAAN,CAKF,KAAK,IAAIl7B,IAAI,CAAb,EAAgBA,IAAI8jB,EAAO/mB,CAAP+mB,EAAU1nB,MAA9B,IAAwC4D,CAAxC,EACE,IAAU,MAANA,CAAM,IAAM61H,CAAhB,EAAA;EAGA,UAAM/mH,IAAM5S,EAAM+B,KAAN/B,CAAY8D,CAAZ9D,CAAZ;EAAA,UACMi6H,IAASryG,EAAO/mB,CAAP+mB,EAAU9jB,CAAV8jB,CADf,CAEA,IAAc,QAAVqyG,CAAU,IACRA,MAAWrnH,CADjB,EAEI,MAAM,IAAI43F,UAAJ,CACF,yBAAuBovB,CAAvB,gBAAA,GACGF,EAAM74H,CAAN64H,CADH,oBAAA,GAC6B16F,KAAKE,SAALF,CAAepX,EAAO/mB,CAAP+mB,CAAfoX,CAD7B,+BAAA,GAEwBA,KAAKE,SAALF,CAAeh/B,EAAM+B,KAArBi9B,CAFxB,MADE,CAAN;;;EAuBZ,wBAAA,CACI8/E,CADJ,EAEIkM,CAFJ;EAGE,MAAe,QAAXlM,CAAW,IAAQn9G,MAAMC,OAAND,CAAcm9G,CAAdn9G,KAA6C,MAAnBm9G,EAAQ5+G,MAAzD,EACE,OAAO8qH,EAAYpjH,GAAZojH,CAAgB,UAAA3mH,CAAA;EAAQ,aAAA;KAAxB2mH,CAAP,CAEF,IAAIrpH,MAAMC,OAAND,CAAcm9G,CAAdn9G,CAAJ,EAEE,OAAOqpH,EAAYpjH,GAAZojH,CAAgB,UAAA3mH,CAAA;EAAQ,WAAAy6G,CAAA;KAAxBkM,CAAP,CACK,IAAe,QAAXlM,CAAJ,EAAqB;EAG1B,SADA,IAAMgc,MAAN,OAAA,EACmBC,KAAnB,EAAmBjxH,YAAnB,EAAmBA,GAAnB;EAAK,UAAM+mF,QAAN;EAAA,UACCmqC,IACAlc,EAAQ1S,cAAR0S,CAAuBjuB,CAAvBiuB,IAA+BA,EAAQjuB,CAARiuB,CAA/BA,KAFD,CAGEn9G,MAAMC,OAAND,CAAcq5H,CAAdr5H,MACHq5H,KAAiBA,EADdr5H,GAGLm5H,EAAch5H,IAAdg5H,CAAmBE,CAAnBF,CAHKn5H;EAKP,YAAOm5H,CAAP;EAEA,SAAM,IAAI1f,SAAJ,CACF,kFACoB0D,CAFlB,CAAN;EAyEJ;EAiCE,YAAA,CAAYhc,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAKguH,UAALhuH,IAAkB,CAAlBA;EAsmCJ,UAzoC2BkQ,aAAAA,GAAAA,GAwEzBskH,WAAAA,QAAAA,GAAA,UACI7U,CADJ,EACyBC,CADzB,EAEIC,CAFJ;EAKE,yBAHEA,IAEoDt/G,QAAQC,OACzDnB,KAAKyyG,KAAV,EACE,MAAM,IAAI/N,UAAJ,CACF,8KADE,CAAN,CAKF0wB,aAAap1H,IAAbo1H,EAAmB9U,CAAnB8U,EAA+B7U,CAA/B6U,EAA0C5U,CAA1C4U;KAnFuBvkH,EAiGzBskH,WAAAA,QAAAA,GAAA,UAAQn4B,CAAR;EAAA,gBAAA,CAME,IALmB,QAAfA,EAAO43B,IAAQ,KACjB53B,EAAO43B,IAAP53B,KADiB,GAGnBh9F,KAAK40H,IAAL50H,GAAYg9F,EAAO43B,IAHA,EAKa,mBAArB53B,EAAOyxB,SAAlB,EACEzuH,KAAKyuH,SAALzuH,GAAiBq1H,aAAwBr4B,EAAOyxB,SAA/B4G,CAAjBr1H,CADF,KAEO;EACL,YAAMg9F,EAAOyxB,SAAPzxB,YAA4BgC,UAAlC,EACE,MAAM,IAAI0F,UAAJ,CACF,6DADE,CAAN,CAGF1kG,KAAKyuH,SAALzuH,GAAiBg9F,EAAOyxB,SAAxBzuH;EAOF,SAAIs1H,MAAJ,CACA,IAAKz5H,MAAMC,OAAND,CAAcmhG,EAAO43B,IAArB/4H,KAAqD,mBAAhBmhG,EAAO43B,IAA5C/4H,IACsB,qBAAhBmhG,EAAO43B,IADlB;EAmBO,UAAI/4H,MAAMC,OAAND,CAAcmhG,EAAO43B,IAArB/4H,CAAJ,EAAgC;EACrC,YAAImhG,EAAO43B,IAAP53B,CAAY5iG,MAAZ4iG,KAAuBh9F,KAAK6R,OAAL7R,CAAa5F,MAAxC,EACE,MAAM,IAAIsqG,UAAJ,CACF,6FAC+B1kG,KAAK6R,OAAL7R,CAAa5F,MAD5C,qCAAA,GAEuB4iG,EAAO43B,IAF9B,MADE,CAAN,CAKF,IAAMW,IAAYv4B,EAAO43B,IAAzB,CACAU,IAAgBC,EAAUzzH,GAAVyzH,CAAc,UAAAxzH,CAAA;EAAK,iBAAAyzH,IAAWzzH,CAAXyzH,CAAA;WAAnBD,CAAhBD;SARK,MASA;EACL,YAAMG,IAAeD,IAAWx4B,EAAO43B,IAAlBY,CAArB,CACAx1H,KAAK6R,OAAL7R,CAAa8B,GAAb9B,CAAiB,UAAA8wG,CAAA;EACfwkB,YAAct5H,IAAds5H,CAAmBG,CAAnBH;WADFt1H;;EA9BF,WACuC;EAErC,WAAK,IAAMwkH,CAAX,IADAxnB,EAAO43B,IAAP53B,GAAcA,EAAO43B,IAArB53B,EACmBA,EAAO43B,IAA1B,EACE,KAAwC,MAApC50H,KAAKklH,WAALllH,CAAiBshB,OAAjBthB,CAAyBwkH,CAAzBxkH,CAAJ,EACE,MAAM,IAAI0kG,UAAJ,CACF,wCAAsC8f,CAAtC,wCAAA,GACmBxkH,KAAKklH,WAFtB,CAAN,CAKJ,KAAK,IAAM6C,CAAX,IAAmB/nH,KAAKklH,WAAxB,EAC2B,QAArBloB,EAAO43B,IAAP53B,CAAY+qB,CAAZ/qB,CAAqB,IACvB97F,QAAQ8Z,IAAR9Z,CACI,aAAW6mH,CAAX,8HAAA,GAEmBA,CAFnB,qBADJ7mH,CADuB,EAMzBo0H,EAAct5H,IAAds5H,CAAmBE,IAAWx4B,EAAO43B,IAAP53B,CAAY+qB,CAAZ/qB,CAAXw4B,CAAnBF,CANyB;EAwB7Bt1H,UAAKs1H,aAALt1H,GAAqBs1H,CAArBt1H,EAEAA,KAAKqlH,eAALrlH,KAFAA,EAGAA,KAAK01H,gBAAL11H,KAHAA,EAIAA,KAAK21H,WAAL31H,KAJAA,CAKA,KAAK,IAAIjF,IAAI,CAAb,EAAgBA,IAAIiF,KAAK6R,OAAL7R,CAAa5F,MAAjC,IAA2CW,CAA3C,EAA8C;EAE5C,UAAMkB,IAAQ+D,KAAKulH,oBAALvlH,CAA0BjF,CAA1BiF,CAAd;EAAA,UACM41H,IAAO51H,KAAKklH,WAALllH,CAAiBjF,CAAjBiF,CADb,CAEAA,KAAKqlH,eAALrlH,CAAqBhE,IAArBgE,CAA0B41H,CAA1B51H,GACAA,KAAK01H,gBAAL11H,CAAsBhE,IAAtBgE,CAA2B/D,CAA3B+D,CADAA,EAEAA,KAAK21H,WAAL31H,CAAiBhE,IAAjBgE,CAAsBA,KAAKs1H,aAALt1H,CAAmBjF,CAAnBiF,CAAtBA,CAFAA;EAQF,SAAM61H,MAAN,CAGA71H,KAAKg5G,OAALh5G,GAAeg9F,EAAOgc,OAAtBh5G,EAEAA,KAAK0wH,YAAL1wH,IAAqB,OAFrBA,EAGAA,KAAK81H,cAAL91H,KAHAA,EASA2zG,UAAU,MAAVA,EAAkB;EAChB,WAAK,IAAI54G,IAAI,CAAb,EAAgBA,IAAI4F,EAAKkR,OAALlR,CAAavG,MAAjC,IAA2CW,CAA3C,EACE,KAAsC,MAAlC86H,EAAkBv0G,OAAlBu0G,CAA0B96H,CAA1B86H,CAAJ,EAAA;EAKA,YAAMhmD,IAAelvE,EAAK20H,aAAL30H,CAAmB5F,CAAnB4F,CAArB,CACIA,EAAKkR,OAALlR,CAAavG,MAAbuG,GAAsB,CAAtBA,KACFA,EAAKm1H,cAALn1H,CAAoB3E,IAApB2E,EAA0BkvE,GAAc90E,EAAxC4F,GACAA,EAAK+vH,YAAL/vH,CAAkB3E,IAAlB2E,CAAuBA,EAAKukH,WAALvkH,CAAiB5F,CAAjB4F,IAAsB,OAA7CA,CAFEA;;OARRgzG,CATA3zG,CA2BA,IAAMg1H,IAAgBe,eAAe/4B,EAAOgc,OAAtB+c,EAA+B/1H,KAAKklH,WAApC6Q,CAAtB,CAgBApiB,UAAU,QAAVA,EAAoB;EAClB,6BAAS54G;EACP,aAAsC,MAAlC86H,EAAkBv0G,OAAlBu0G,CAA0B96H,CAA1B86H,CAAJ,qBAOsB,UAAC7c,CAAD;EAOpB,eANA,IACIgd,CADJ,EAEIC,CAFJ,EAGIC,CAHJ,gBAMWC;EACT,iBACK,OADA,YAAY,OAAO,gBAAgB,MAAM70G,QAAQ60G,EAAtD,EACQ;EACN,kBAAMtoH,IAAclN,EAAK4kH,oBAAL5kH,CAA0B5F,CAA1B4F,CAApB,CAE4C,MAAxCkN,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,CAAwC,IACxClN,EAAK20H,aAAL30H,CAAmB5F,CAAnB4F,MAA0B+zH,kBADc,IAGG,OAAxC,YAAY,OAAOpzG,QAAQ60G,KAC9BF,IAAQG,kBAC6C,OAA3C,gBAAgB,MAAM90G,QAAQ60G,OACxCF,IAAQI,qBANgC,GASxC11H,EAAK20H,aAAL30H,CAAmB5F,CAAnB4F,MACA21H,6BADA31H,IAI2C,OAAxC,YAAY,OAAO2gB,QAAQ60G,KAC9BF,IAAQM,6BAC6C,OAA3C,gBAAgB,MAAMj1G,QAAQ60G,OACxCF,IAAQO,gCAPR71H,IAW2C,OAAxC,YAAY,OAAO2gB,QAAQ60G,KAC9BF,IAAQQ,uBAC6C,OAA3C,gBAAgB,MAAMn1G,QAAQ60G,OACxCF,IAAQS,0BAvBgC,CA0B5C,IAAIniC,UAAJ,EAC6C,OAAxC,YAAY,OAAOjzE,QAAQ60G,KAC9B5hC,IAAS,SAC4C,OAA3C,gBAAgB,MAAMjzE,QAAQ60G,OACxC5hC,IAAS,OAGX2hC,IAAmBD,GACnBD,IA7CqB,KA6CWzhC;eAtClC,MAuCO;EACL,kBAAMoiC,IAAWC,MAAYT,CAAZS,CAAjB,CAEAV,IAAmBS,CAAnBT,EACAF,IAlDqB,KAkDWG,CADhCD;EAKF,iBAAIW,CAAJ,CACAljB,UAAUqiB,CAAVriB,EAAsB;EACpBkjB,kBAAeX,CAAfW;eADFljB,GA1EJ,UAACmjB,CAAD,EAAsBd,CAAtB,EACCe,CADD;EAEMp2H,gBAAKukH,WAALvkH,CAAiBvG,MAAjBuG,GAA0B,CAA1BA,KACFq1H,IAAar1H,EAAKukH,WAALvkH,CAAiBm2H,CAAjBn2H,IAAgC,GAAhCA,GAAsCq1H,CADjDr1H,GAGJA,EAAK+vH,YAAL/vH,CAAkB3E,IAAlB2E,CAAuBq1H,CAAvBr1H,CAHIA,EAIJA,EAAKm1H,cAALn1H,CAAoB3E,IAApB2E,EAA0Bo2H,GAAcD,EAAxCn2H,CAJIA;EA2EFq2H,aA7EJ,CA6EiBj8H,CA7EjB,EA6EoBi7H,CA7EpB,EA6EgCa,CA7EhC,CA0EIljB;aAvDF,OAAA,EAMqBsjB,KAArB,EAAqBjzH,YAArB,EAAqBA,GAArB;EAwDFkzH,SA/DsB,CAJAlC,EAAcj6H,CAAdi6H,CAIA;WARfj6H,IAAI,CAAb,EAAgBA,IAAI4F,EAAKkR,OAALlR,CAAavG,MAAjC,IAA2CW,CAA3C,IAASA;OADX44G,GA+EA3zG,KAAKuhH,yBAALvhH,GAAiCA,KAAK0yG,gBA/EtCiB;KAxNuB9iG,EAmTfskH,WAAAA,iCAAAA,GAAV;EACwC,YAAlCn1H,KAAKuhH,yBAA6B,IAGlCvhH,KAAK0yG,gBAAL1yG,CAAsB5F,MAAtB4F,KACAA,KAAKuhH,yBAALvhH,CAA+B5F,MAJG,IAKpC8G,QAAQ8Z,IAAR9Z,CACI,mJADJA,CALoC;KApTf2P,EAgWzBskH,WAAAA,SAAAA,GAAA,UACIz6H,CADJ,EACwB0W,CADxB,EAEI4rF,CAFJ;uBAEIA,QACF,IAAMzwE,IAAgC,QAApBywE,EAAOzwE,SAAa,GAAO,EAAP,GAAYywE,EAAOzwE,SAAzD,CACA+lG,eAAe/lG,CAAf+lG,EAIA,IAAMC,IAAmBvyH,KAAKwyH,mBAALxyH,CAAyBtF,CAAzBsF,EAA4BoR,CAA5BpR,GAA+B,CAA/BA,EAAqCusB,CAArCvsB,CAAzB,CACA;EAGE,UAAM0xH,IAAMa,EAAiB,CAAjBA,EAAoBprH,MAApBorH,CAA2BA,EAAiB,CAAjBA,CAA3BA,CAAZ,CACAvyH,KAAKmzH,gBAALnzH,GACA,IAAMjB,IAAIiB,KAAK8wH,YAAf,CAGA,OAAOS,iBADHvxH,KAAKqyH,QAALryH,CAAcjB,CAAdiB,EAAiB0xH,CAAjB1xH,EAAsBusB,CAAtBvsB,EAAiCg9F,EAAOz7F,OAAxCvB,EAAiDg9F,EAAO6d,KAAxD76G,CACGuxH,CAAP;OARF;EAUE8B,wBAAkBd,EAAiB,CAAjBA,CAAlBc,EAAuC34H,CAAvC24H,GACAA,kBAAkBd,EAAiB,CAAjBA,CAAlBc,EAAuCjiH,CAAvCiiH,CADAA;;KAnXqBxiH,EA+YnBskH,WAAAA,gBAAAA,GAAN,UACI7G,CADJ,EAEItxB,CAFJ;;;EAIE,eADAh9F,KAAKmzH,gBAALnzH,QACOuwH,gBAAgBvwH,IAAhBuwH,EAAsBjC,CAAtBiC,EAA+BvzB,CAA/BuzB,EAAP;;;KAnZuB1/G,EAgajBskH,WAAAA,gBAAAA,GAAR,UACIzD,CADJ,EAC0BnlG,CAD1B,EAC8CsuF,CAD9C,EAEIsc,CAFJ;EAGE,QAAI5uG,CAAJ,CACA,qBAFE4uG,cAEW,QAATtc,CAAJ;EAEE,UADAtyF,IAAa,IAAbA,EACiB,QAAbgE,CAAJ,EACE,MAAM,IAAIm4E,UAAJ,CACF,QAAMyyB,CAAN,kEAAA,GACmB5qG,CAFjB,CAAN;OAHJ,MAOO;EAAA,UAAW,QAAPmlG,CAAJ,EAOL,MAAM,IAAIhtB,UAAJ,CACF,2DACGyyB,CADH,yBADE,CAAN,CALE5uG,IADE1sB,MAAMC,OAAND,CAAc61H,CAAd71H,IACW61H,EAAI,CAAJA,EAAOz1H,KAAPy1H,CAAa,CAAbA,CADX71H,GAGW61H,EAAIz1H,KAAJy1H,CAAU,CAAVA,CAFbnpG;EASJ,YAAOA,CAAP;KAtbuB1X,EAgczBskH,WAAAA,QAAAA,GAAA,UAAQ1jH,CAAR,EAAgDI,CAAhD;EAEE,QAAIhW,MAAMC,OAAND,CAAcgW,CAAdhW,KAA6C,MAAnBgW,EAAQzX,MAAtC,EACE,MAAM,IAAIsqG,UAAJ,CACF,oDADE,CAAN,CAIF,IAAM0yB,IAAiBv7H,MAAMC,OAAND,CAAcgW,CAAdhW,CAAvB;EAAA,QACMqpH,IAAekS,IAAiBvlH,CAAjBulH,IACkBvlH,EAFvC;EAAA,QAGMk8G,IAAwB/tH,KAAKq3H,uBAALr3H,CAA6BklH,CAA7BllH,CAH9B;EAAA,QAMMitH,IAAW,IAAIN,QAAJ,EANjB,CAUA,IAHIl7G,aAAkBrN,MAAlBqN,KACFA,KAAUA,EADRA,GAGA5V,MAAMC,OAAND,CAAc4V,CAAd5V,CAAJ,EAA2B;EACzB,UAAK4V,EAAoBrX,MAApBqX,KAA+BzR,KAAKyR,MAALzR,CAAY5F,MAAhD,EACE,MAAM,IAAIsqG,UAAJ,CACF,oCAAmCjzF,EAAoBrX,MAAvD,0DAAA,GAEI4F,KAAKyR,MAALzR,CAAY5F,MAFhB,OADE,CAAN,CAKF,KAAK,IAAIW,IAAI,CAAb,EAAgBA,IAAIiF,KAAKyR,MAALzR,CAAY5F,MAAhC,IAA0CW,CAA1C,EACEkyH,EAAS5kH,GAAT4kH,CAAajtH,KAAKyR,MAALzR,CAAYjF,CAAZiF,CAAbitH,EAA8Bx7G,EAAoB1W,CAApB0W,CAA9Bw7G;OARJ,MAWE,KAAoB,SAAA,EAAA9zG,IAAAnZ,KAAKyR,MAAzB,EAAoBzN,YAApB,EAAoBA,GAApB;EAAK,UAAM2N,QAAN;EAAA,UACG2lH,IAAe7lH,EAA0BE,EAAMpT,IAAhCkT,CADlB,CAEH,IAAmB,QAAf6lH,CAAJ,EACE,MAAM,IAAI5yB,UAAJ,CACF,gDAA8C/yF,EAAMpT,IADlD,CAAN,CAGF0uH,EAAS5kH,GAAT4kH,CAAat7G,CAAbs7G,EAAoBqK,CAApBrK;EAKJ,SAAMsK,IAAiBC,QAAQzJ,CAARyJ,EAA+BvK,CAA/BuK,CAAvB,CACA,OAAOJ,IAAiBG,CAAjBH,GAAkCG,EAAe,CAAfA,CAAzC;KAxeuB1mH,EA8ejBskH,WAAAA,wBAAAA,GAAR,UAAgCsC,CAAhC;EAKE,SAHA,IAAM1J,IACF2J,aAAa,IAAbA,EAAmBD,EAAoBr9H,MAAvCs9H,CADJ,EAEIC,IAAmBF,EAAoBr9H,MAF3C,OAAA,EAGoB+e,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAKE,WALG,IAAM8sG,QAAN,EACG+c,IAAiChyH,MAAMC,OAAND,CAAci1G,EAAMh/F,MAApBjW,IACnCi1G,EAAMh/F,MAD6BjW,IAElCi1G,EAAMh/F,OAHR,EAIG8lH,IAAmB/J,EAAa/rH,GAAb+rH,CAAiB,UAAA/7G,CAAA;EAAU,eAAAA,EAAOvT,IAAP;SAA3BsvH,CAJtB,EAKM9yH,IAAI,CAAb,EAAgBA,IAAI08H,EAAoBr9H,MAAxC,IAAkDW,CAAlD,EAAqD;EACnD,YAAMT,IAAQs9H,EAAiBt2G,OAAjBs2G,CAAyBH,EAAoB18H,CAApB08H,CAAzBG,CAAd,CAKA,KAJe,MAAXt9H,MACFyzH,EAAsBhzH,CAAtBgzH,IAA2BF,EAAavzH,CAAbuzH,CAA3BE,EACA4J,MAEuB,MAArBA,CAAJ,EACE;EAGJ,WAAyB,MAArBA,CAAJ,EACE;EAIJ,SAAIA,IAAmB,CAAvB,EAA0B;EACxB,UAAME,MAAN,CAMA,MALA9J,EAAsB3tH,OAAtB2tH,CAA8B,UAAC96G,CAAD,EAASlY,CAAT;EACd,gBAAVkY,CAAU,IACZ4kH,EAAe77H,IAAf67H,CAAoBJ,EAAoB18H,CAApB08H,CAApBI,CADY;SADhB9J,GAKM,IAAIrpB,UAAJ,CACF,qDACGxrE,KAAKE,SAALF,CAAe2+F,CAAf3+F,CAFD,CAAN;EAIF,YAAO60F,CAAP;KAlhBuBl9G,EAkiBjBskH,WAAAA,YAAAA,GAAR,UAAoBzD,CAApB,EAA0CnlG,CAA1C,EAA0DhrB,CAA1D;EAAA,gBAAA,CAEE,wBAFwCgrB,0BAAgBhrB,SAEjD4vH,KAAS;EACd,UAAM5oG,IAAa5nB,EAAKmxH,eAALnxH,CAAqB+wH,CAArB/wH,CAAnB,CACA,IAAIY,CAAJ,EACE,MAAM,IAAIojG,mBAAJ,CACF,+CADE,CAAN,CAYF,KAHA,IAAMnpE,IAAU02F,YAAY3pG,CAAZ2pG,EAAwB3lG,CAAxB2lG,CAAhB,EACMjC,MADN,gBAGSJ;EACP,YAAMqB,IAAYC,KAAS;EACzB,cAAMM,IAAaj2F,EAAQq0F,CAARr0F,EAAoB,CAApBA,CAAnB;EAAA,cACMi+E,IAAWj+E,EAAQq0F,CAARr0F,EAAoB,CAApBA,CADjB;EAAA,cAIM42F,IAAWa,YAAYvB,CAAZuB,EAAiBxB,CAAjBwB,EAA6BxZ,CAA7BwZ,CAJjB;EAAA,cAOMvG,MAPN,CAQA,IAAI7wH,MAAMC,OAAND,CAAcu2H,CAAdv2H,CAAJ,EACE,KAAK,IAAId,IAAI,CAAb,EAAgBA,IAAIq3H,EAASh4H,MAA7B,IAAuCW,CAAvC,EACE2xH,EAAM1wH,IAAN0wH,GAAYn2G,KAAK5V,EAAK8Q,MAAL9Q,CAAY5F,CAAZ4F,GAAgBoD,OAAOquH,EAASr3H,CAATq3H,GAAxC1F,EAFJ,KAKEA,EAAM1wH,IAAN0wH,GAAYn2G,KAAK5V,EAAK8Q,MAAL9Q,CAAY,CAAZA,GAAgBoD,OAAOquH,GAAxC1F,EAEF,IAAMO,IAAW,IAAIN,QAAJ,CAAaD,CAAb,CAAjB,CACA,OAAO8K,QAAQ72H,EAAKkR,OAAb2lH,EAAsBvK,CAAtBuK,CAAP;WAjBgBrG,CAAlB,CAmBA,IAAmB,MAAftB,CAAJ,EAEE,KAAuB,SAAA,EAAAiI,KAAvB,EAAuB9zH,YAAvB,EAAuBA,GAAvB;EAAK,cAAMqtH,QAAN,CACHpB,EAAKj0H,IAALi0H,CAAUoB,CAAVpB;WAHJ,MAME,KAAK,IAAIl1H,IAAI,CAAb,EAAgBA,IAAIm2H,EAAU92H,MAA9B,IAAwCW,CAAxC,EACEk1H,EAAKl1H,CAALk1H,IAAU8H,qBAAuB9H,EAAKl1H,CAALk1H,CAAvB8H,EAAgC7G,EAAUn2H,CAAVm2H,CAAhC6G,CAAV9H;SA9BN,EAGSJ,IAAa,CAAtB,EAAyBA,IAAar0F,EAAQphC,MAA9C,IAAwDy1H,CAAxD,IAASA,GA+BT,OAAO0B,iBAAiBtB,CAAjBsB,CAAP;OA9CKJ,CAAP;KApiBuBtgH,EAknBzBskH,WAAAA,QAAAA,GAAA,UAAQz6H,CAAR,EAA4BsiG,CAA5B;uBAA4BA,QAE1B,IAAMg7B,IAAkB9D,2BAA2Bx5H,CAA3Bw5H,CAAxB,CACA+D,eACID,CADJC,EACqBj4H,KAAKilH,UAD1BgT,EACsCj4H,KAAKmlH,eAD3C8S,GAC4D,CAD5DA,EAEA;EAKE,UAAM1rG,IAAgC,QAApBywE,EAAOzwE,SAAa,GAAO,EAAP,GAAYywE,EAAOzwE,SAAzD,CAEA,OADA+lG,eAAe/lG,CAAf+lG,GACOtyH,KAAKk4H,WAALl4H,CAAiBg4H,CAAjBh4H,EAAkCusB,CAAlCvsB,CAAP;OAPF;EASEqzH,wBAAkB2E,CAAlB3E,EAAmC34H,CAAnC24H;;KAhoBqBxiH,EAipBzBskH,WAAAA,eAAAA,GAAA,UAAez6H,CAAf;EAIE,WAHAu9H,eAAev9H,CAAfu9H,EAAkBj4H,KAAKilH,UAAvBgT,EAAmCj4H,KAAKmlH,eAAxC8S,GAAyD,CAAzDA,GAGOj4H,KAAKk4H,WAALl4H,CAAiBtF,CAAjBsF,EAAoBtF,EAAEuB,KAAFvB,CAAQ,CAARA,CAApBsF,CAAP;KArpBuB6Q,EAwpBfskH,WAAAA,oBAAAA,GAAV,UACIz6H,CADJ,EAEI0W,CAFJ,EAEsDyiH,CAFtD,EAGItnG,CAHJ;EAKE,yBAHoDsnG,SAG9B,QAAlB7zH,KAAKyuH,SAAT,EACE,MAAM,IAAIhqB,YAAJ,CACF,0FADE,CAAN,CAKF,KADA,IAAMoM,MAAN,EACS91G,IAAI,CAAb,EAAgBA,IAAIiF,KAAK01H,gBAAL11H,CAAsB5F,MAA1C,IAAoDW,CAApD,EAAuD;EACrD,UAAM8S,IAAc7N,KAAK01H,gBAAL11H,CAAsBjF,CAAtBiF,CAApB,CACeA,KAAK21H,WAAL31H,CAAiBjF,CAAjBiF,MACAs2H,6BADAt2H,GAEb6wG,EAAa70G,IAAb60G,CACIhjG,EAAYhQ,KAAZgQ,CAAkB,CAAlBA,EAAqBA,EAAYzT,MAAZyT,GAAqB,CAA1CA,EAA6C1G,MAA7C0G,EAAqD,EAArDA,CADJgjG,CAFa7wG,GAMb6wG,EAAa70G,IAAb60G,CAAkBhjG,CAAlBgjG,CANa7wG;EAkBjB,SAHAm4H,kBANAz9H,IAAI09H,qBACI19H,CADJ09H,EACOp4H,KAAKolH,cADZgT,EAC4Bp4H,KAAKmlH,eADjCiT,GACkD,CADlDA,EACyD,OADzDA,CAMJD,EAHA/mH,IAAIgnH,qBACIhnH,CADJgnH,EACOp4H,KAAKqlH,eADZ+S,EAC6BvnB,CAD7BunB,GAC2C,CAD3CA,EACkD,QADlDA,CAGJD,EAAwB,IAAxBA,GAEAE,gCAAgCjnH,CAAhCinH,EAAmCr4H,KAAK21H,WAAxC0C,EAAqDr4H,KAAK01H,gBAA1D2C,CAFAF,EAGIn4H,KAAK6yG,QAAL7yG,IAA8B,QAAbusB,CAAjBvsB,IAAsCusB,IAAY,CAAlDvsB,IACEtF,EAAE,CAAFA,EAAKuB,KAALvB,CAAW,CAAXA,IAAgB6xB,CAAhB7xB,IAA8B,CADpC,EAEI,MAAM,IAAIgqG,UAAJ,CACF,qHAEGn4E,CAFH,cAAA,GAEwB7xB,EAAE,CAAFA,EAAKuB,KAALvB,CAAW,CAAXA,CAFxB,gBADE,CAAN,CAOJ,QAAQA,GAAG0W,GAAG,KAAd;KAhsBuBP,EA8sBjBskH,WAAAA,SAAAA,GAAR,UACIp2H,CADJ,EACqC2yH,CADrC,EACoDnlG,CADpD,EAEIhrB,CAFJ,EAEiBs5G,CAFjB;EAAA,gBAAA,CAGE,wBADEt5G,QACK4vH,KAAS;EACd,UAAM5oG,IAAa5nB,EAAKmxH,eAALnxH,CAAqB+wH,CAArB/wH,EAA0B4rB,CAA1B5rB,EAAqCk6G,CAArCl6G,EAA4C,OAA5CA,CAAnB;EAAA,UACMsvH,MADN,CAEA,IAAI1uH,IAAU,CAAd,EACE,MAAM,IAAIojG,mBAAJ,CAAwB,sCAAxB,CAAN,CAGF,IAAa,QAATkW,CAAJ,EACE,MAAM,IAAIlW,mBAAJ,CACF,iDADE,CAAN,CAKA,KAFA,IAAMnpE,IAAU02F,YAAY3pG,CAAZ2pG,EAAwB3lG,CAAxB2lG,CAAhB,EACMH,IAAaprG,SAASioC,QAAM,CAANA,EAASrmC,CAATqmC,CAATjoC,CADnB,EAESkpG,IAAa,CAAtB,EAAyBA,IAAar0F,EAAQphC,MAA9C,IAAwDy1H,CAAxD,EAAoE;EAClE,YAAM4B,IAAaj2F,EAAQq0F,CAARr0F,EAAoB,CAApBA,CAAnB;EAAA,YACMi+E,IAAWj+E,EAAQq0F,CAARr0F,EAAoB,CAApBA,CADjB;EAAA,YAEM22F,IACFmG,oBACIvG,CADJuG,EACgB7G,CADhB6G,EAC4B7e,IAAWgY,CADvC6G,CAHJ;EAAA,YAOMlG,IAAWZ,qBAAqBE,CAArBF,EAA0BW,CAA1BX,CAPjB;EAAA,YAQMN,IAAYnyH,EAAEqzH,CAAFrzH,CARlB,CASA,IAAmB,MAAf8wH,CAAJ,EACE,KAAK,IAAI90H,IAAI,CAAb,EAAgBA,IAAIm2H,EAAU92H,MAA9B,IAAwCW,CAAxC,EACEk1H,EAAKj0H,IAALi0H,CAAUvoB,UAAU,CAAVA,CAAVuoB,EAGJ,KAASl1H,IAAI,CAAb,EAAgBA,IAAIm2H,EAAU92H,MAA9B,IAAwCW,CAAxC,EAA2C;EACzC,cAAMs2H,IAAWH,EAAUn2H,CAAVm2H,CAAjB,CACAjB,EAAKl1H,CAALk1H,IACIxoB,IACIwoB,EAAKl1H,CAALk1H,CADJxoB,EAEIF,IAAQG,UAAU+R,IAAWgY,CAArB/pB,CAARH,EAA0C8pB,CAA1C9pB,CAFJE,CADJwoB;;EAOJ,YAASl1H,IAAI,CAAb,EAAgBA,IAAIk1H,EAAK71H,MAAzB,IAAmCW,CAAnC,EACEk1H,EAAKl1H,CAALk1H,IAAUzoB,IAAQyoB,EAAKl1H,CAALk1H,CAARzoB,EAAiBE,UAAUn/E,CAAVm/E,CAAjBF,CAAVyoB,CAGJ,OAAOA,CAAP;OAzCKkB,CAAP;KAjtBuBtgH,EA8vBfskH,WAAAA,uBAAAA,GAAV;EAKE,SAJA,IAAM/F,IAAYpvH,KAAK0wH,YAAvB,EAGM6H,MAHN,EAISx9H,IAAI,CAAb,EAAgBA,IAAIq0H,EAAUh1H,MAA9B,IAAwCW,CAAxC,EAA2C;EACzC,UAAMwrG,IAAQ6oB,EAAUr0H,CAAVq0H,CAAd;EAAA,UACIoJ,IAAWjyB,CADf,CAEA,IAAI7iF,MAAM0rG,CAAN1rG,EAAiB6iF,CAAjB7iF,IAA0B,CAA9B,EAEE80G,KAAY,MADK90G,MAAM0rG,EAAUvxH,KAAVuxH,CAAgB,CAAhBA,EAAmBr0H,CAAnBq0H,CAAN1rG,EAA6B6iF,CAA7B7iF,CACjB80G,CAEFD,EAAiBv8H,IAAjBu8H,CAAsBC,CAAtBD;EAEF,YAAOA,CAAP;KA5wBuB1nH,EAyxBfskH,WAAAA,kBAAAA,GAAV;EAAA,gBAAA,CACE,OAAO,UAACnwH,CAAD;EAIL,UAAMyM,IAASzM,EAAKnH,KAALmH,CAAW,CAAXA,EAAcrE,EAAK8Q,MAAL9Q,CAAYvG,MAA1B4K,CAAf;EAAA,UACMytH,IAAUztH,EAAKnH,KAALmH,CACZrE,EAAK8Q,MAAL9Q,CAAYvG,MADA4K,EACQrE,EAAK8Q,MAAL9Q,CAAYvG,MAAZuG,GAAqBA,EAAKkR,OAALlR,CAAavG,MAD1C4K,CADhB;EAAA,UAIMyzH,MAJN;EAAA,UA8DMC,IAAY/3H,EAAK4gH,yBAAL5gH,CAA+BmB,GAA/BnB,CACd,UAAAo8D,CAAA;EAAS,eAAAA,EAAMt3D,IAANs3D,EAAA;SADKp8D,CA9DlB,CAoEA,QAFIA,EAAK8tH,SAAL9tH,CAAeg4H,QAAfh4H,CAzDsB;EAExB,aADA,IAAM+rH,MAAN,EACS3xH,IAAI,CAAb,EAAgBA,IAAI4F,EAAK8Q,MAAL9Q,CAAYvG,MAAhC,IAA0CW,CAA1C,EACE2xH,EAAM1wH,IAAN0wH,GAAYn2G,KAAK5V,EAAK8Q,MAAL9Q,CAAY5F,CAAZ4F,GAAgBoD,OAAO0N,EAAO1W,CAAP0W,GAAxCi7G,EAEF,IAMIkM,CANJ;EAAA,YAAM3L,IAAW,IAAIN,QAAJ,CAAaD,CAAb,CAAjB;EAAA,YACM76G,IACF2lH,QAAQ72H,EAAKkR,OAAb2lH,EAAsBvK,CAAtBuK,IAAiCprB,WAAY,GAA7CorB,CAFJ,CAOA,KAASz8H,IAAI,CAAb,EAAgBA,IAAI4F,EAAK20H,aAAL30H,CAAmBvG,MAAvC,IAAiDW,CAAjD,EAAoD;EAClD,cACM65H,KAAOiE,GADQl4H,EAAK20H,aAAL30H,CAAmB5F,CAAnB4F,GACK8xH,EAAQ13H,CAAR03H,GAAY5gH,EAAQ9W,CAAR8W,EADtC,CAIiBwpG,KAASuZ,CAATvZ,GAIfud,IADQ,MAAN79H,CAAM,GACI65H,CADJ,GAGIntB,IAAQmxB,CAARnxB,EAAmBmtB,CAAnBntB,CANG4T;EAanB,cAAStgH,IAAI,CAAb,EAAgBA,IAAI4F,EAAKm1H,cAALn1H,CAAoBvG,MAAxC,IAAkDW,CAAlD,EAAqD;EACnD,cAAMo7H,IAASx1H,EAAKm1H,cAALn1H,CAAoB5F,CAApB4F,EAAuB,CAAvBA,CAAf;EAAA,cACMm2H,IAAcn2H,EAAKm1H,cAALn1H,CAAoB5F,CAApB4F,EAAuB,CAAvBA,CADpB;EAAA,cAIMm4H,IACFzd,KAAS8a,EAAO1D,EAAQqE,CAARrE,CAAP0D,EAA6BtkH,EAAQilH,CAARjlH,CAA7BskH,CAAT9a,CALJ,CAOA8U,KAAS2I,CAAT3I,GAEAsI,EAAcz8H,IAAdy8H,CAAmBK,CAAnBL,CAFAtI;EAYF,gBAPAyI,IAAYvd,KAASud,CAATvd,CAAZud,EAGAj4H,EAAK8pH,eAAL9pH,GAAuBP,OAAvBO,CAA+B,UAAAo4H,CAAA;EAC7BH,cAAYnxB,IAAQmxB,CAARnxB,EAAmBsxB,CAAnBtxB,CAAZmxB;WADFj4H,CAHAi4H,EAOOA,CAAP;SAOEj4H,GAFe,CAEfA,EAAuD+3H,CAAvD/3H,GAEoBwG,OAAOsxH,EAA/B;OAxEF;KA1xBuB5nH,EA22BjBskH,WAAAA,iBAAAA,GAAR;EAAA,gBAAA,CACEn1H,KAAK8wH,YAAL9wH,GAAoB,UAACgF,CAAD;EAClB,aAAOmsH,KAAS;EAOd,aANA,IACIyH,CADJ,EAAMI,MAAN,EAEMvnH,IAASzM,EAAKnH,KAALmH,CAAW,CAAXA,EAAcrE,EAAK8Q,MAAL9Q,CAAYvG,MAA1B4K,CAFf,EAGMytH,IAAUztH,EAAKnH,KAALmH,CACZrE,EAAK8Q,MAAL9Q,CAAYvG,MADA4K,EACQrE,EAAK8Q,MAAL9Q,CAAYvG,MAAZuG,GAAqBA,EAAKkR,OAALlR,CAAavG,MAD1C4K,CAHhB,EAKM0nH,MALN,EAMS3xH,IAAI,CAAb,EAAgBA,IAAI4F,EAAK8Q,MAAL9Q,CAAYvG,MAAhC,IAA0CW,CAA1C,EACE2xH,EAAM1wH,IAAN0wH,GAAYn2G,KAAK5V,EAAK8Q,MAAL9Q,CAAY5F,CAAZ4F,GAAgBoD,OAAO0N,EAAO1W,CAAP0W,GAAxCi7G,EAEF,IAAMO,IAAW,IAAIN,QAAJ,CAAaD,CAAb,CAAjB;EAAA,YACM76G,IAAU2lH,QAAQ72H,EAAKkR,OAAb2lH,EAAsBvK,CAAtBuK,CADhB,CAGA,KAASz8H,IAAI,CAAb,EAAgBA,IAAI4F,EAAK20H,aAAL30H,CAAmBvG,MAAvC,IAAiDW,CAAjD,EAAoD;EAClD,cAAM89H,IAAel4H,EAAK20H,aAAL30H,CAAmB5F,CAAnB4F,CAArB;EAAA,cAGMi0H,IAAOvZ,KAASwd,EAAapG,EAAQ13H,CAAR03H,CAAboG,EAAyBhnH,EAAQ9W,CAAR8W,CAAzBgnH,CAATxd,CAHb,CAKEud,IADQ,MAAN79H,CAAM,GACI65H,CADJ,GAGIntB,IAAQmxB,CAARnxB,EAAmBmtB,CAAnBntB,CAFZmxB,EAIFI,EAAWh9H,IAAXg9H,CAAgBJ,CAAhBI,CAJEJ;EAOJ,cAAS79H,IAAI,CAAb,EAAgBA,IAAI4F,EAAKm1H,cAALn1H,CAAoBvG,MAAxC,IAAkDW,CAAlD,EAAqD;EACnD,cAAMo7H,IAASx1H,EAAKm1H,cAALn1H,CAAoB5F,CAApB4F,EAAuB,CAAvBA,CAAf;EAAA,cACMm2H,IAAcn2H,EAAKm1H,cAALn1H,CAAoB5F,CAApB4F,EAAuB,CAAvBA,CADpB;EAAA,cAGMm4H,IACFzd,KAAS8a,EAAO1D,EAAQqE,CAARrE,CAAP0D,EAA6BtkH,EAAQilH,CAARjlH,CAA7BskH,CAAT9a,CAJJ,CAKA2d,EAAWh9H,IAAXg9H,CAAgBF,CAAhBE;EAEF,gBAAOA,CAAP;SAlCK7H,CAAP;OADFnxH;KA52BuB6Q,EAu7BnBskH,WAAAA,IAAAA,GAAN,UACIz6H,CADJ,EAEI0W,CAFJ,EAGI4rF,CAHJ;8BAGIA;;EACF,mBAAOi8B,WAAWj5H,IAAXi5H,EAAiBv+H,CAAjBu+H,EAAoB7nH,CAApB6nH,EAAuBj8B,CAAvBi8B,EAAP;;;KA37BuBpoH,EAs9BnBskH,WAAAA,WAAAA,GAAN,UACI7G,CADJ,EACyBtxB,CADzB;;;EAEE,mBAAOk8B,WAAWl5H,IAAXk5H,EAAiB5K,CAAjB4K,EAA0Bl8B,CAA1Bk8B,EAAP;;;KAx9BuBroH,EAo+BfskH,WAAAA,gBAAAA,GAAV,UAA0Bn4B,CAA1B;EAME,SALA,IAAMm8B,MAAN,EAEMtkB,IAA0B,QAAV7X,CAAU,IAAQA,EAAO6X,aAF/C,EAGMtlC,IAAUslC,IAAgB70G,KAAK0yG,gBAArBmC,GAAwC70G,KAAKuvE,OAH7D,EAIMu0C,IAAe9jH,KAAKo5H,UAALp5H,CAAgB60G,CAAhB70G,CAJrB,EAKSjF,IAAI,CAAb,EAAgBA,IAAIw0E,EAAQn1E,MAA5B,IAAsCW,CAAtC,EACM85G,MAAkBtlC,EAAQx0E,CAARw0E,EAAWxgE,SAA7B8lG,KAIJskB,EAAa5pD,EAAQx0E,CAARw0E,EAAW8/B,YAAxB8pB,IAAwCrV,EAAa/oH,CAAb+oH,CAJpCjP,EAMN,OAAOskB,CAAP;KAj/BuBtoH,EAkhCzB1M,qBAAAA,CAAIgxH,WAAJhxH,gBAAAA,SAAA,UAAiB8f,CAAjB;EACEjkB,WAAK2wH,aAAL3wH,GAAqBikB,CAArBjkB;2CADFmE,CAlhCyB0M,EAwmCnBskH,WAAAA,KAAAA,GAAN,UAAWkE,CAAX,EAA8Cr8B,CAA9C;;;;EAEE,gBAA4B,mBAAjBq8B,CAAX,EAAsC;EAEpC,kBAAwB,OADlBC,IAAWC,GAAG7qC,eAAH6qC,CAAmBF,CAAnBE,GACJn/H,MAAb,EACE,MAAM,IAAIsqG,UAAJ,CACF,4CAA0C20B,CAA1C,MADE,CAAN,CAEK,IAAIC,EAASl/H,MAATk/H,GAAkB,CAAtB,EACL,MAAM,IAAI50B,UAAJ,CACF,0BAAwB40B,EAASl/H,MAAjC,8BAAA,GACQi/H,CADR,MADE,CAAN,CAIFA,IAAeC,EAAS,CAATA,CAAfD;EAEF,iBAAyB,QAArBA,EAAavhE,IAAjB,EACE,MAAM,IAAI4sC,UAAJ,CACF,wGADE,CAAN,CAME,WAAM60B,GAAGC,aAAHD,CAAiBv5H,KAAKy5H,eAALz5H,CAAqBg9F,CAArBh9F,CAAjBu5H,EAAN;EAMJ,mBAPMG,IACFvgH,MAAAA,EADEugH,EAGAhR,KAAe,CAHfgR,EAIAC,IAAgB,IAJhBD,EAKAn+B,IAAcv7F,KAAK45H,MAAL55H,CAAY25H,CAAZ35H,EAAuB0oH,CAAvB1oH,CALd05H,MAOCL,EAAavhE,IAAbuhE,GACL7sC,eAAe+O,GACftO,YAAYysC,EAAmB10H,MAC/B+nF,aAAa2sC,EAAmB/uC,OAH3B0uC,EAAP;;;KAnoCuBxoH,EAClBskH,WAAAA,GAAY,OADMtkH,GAyoC3B;IAzoC2Bq1G,UAA3B,wBC1XI2T,GACAp0B;;;;mBACI,mBAAmBo0B,CAAnB,KACJA,MAAyBrtC,eAAeqtC,GADpC,GAM+B,SADjCrtC,KAFJqtC,IAAwBA,GAEkBrtC,eACVstC,YAAK,KAMnCttC,IAAgBA,EAA4BstC,YANT,CAN/B,EAcAjX,IACFL,oBAAoBh2B,CAApBg2B,CAfE,EAgBA1L,IAAQijB,YAAYlX,CAAZkX,EAAsBt0B,CAAtBs0B,CAhBR,EAkBuC,QAAzCF,EAAsB1kC,eAAmB,SAAA,OAKjCokC,GAAGn+B,WAAHm+B,CACFM,EAAsB1kC,eADpBokC,EAEFM,EAAsBl+B,UAFpB49B,EAGFziB,EAAMvnC,OAANunC,CAAch1G,GAAdg1G,CAAkB,UAAA5d,CAAA;EAAU,mBAAAA,EAAOmW,YAAP;aAA5ByH,CAHEyiB;EAOV,eARMzV,IACFvxF,MAAAA,EADEuxF,EAOAkW,MAPAlW,OAAAA,EAQe3qG,IAAA29F,EAAMvnC,OAA3B,EAAqBvrE,YAArB,EAAqBA,GAArB,EAAWk1F,QAAAA,EACT8gC,EAAmB9gC,EAAOmW,YAA1B2qB,IACIlW,EAAa5qB,EAAOmW,YAApByU,CAFK5qB,CAKL+gC,IAA0B,IAA1BA,EACA9R,KAAmB,CADnB8R,EAENnjB,EAAM1b,WAAN0b,CAAkBkjB,CAAlBljB,EAAsCmjB,CAAtCnjB,EAAsDqR,CAAtDrR,CAFMmjB,aAAAA;EAIR,qBAAOnjB,EAAP;;;EAgIF,2BAAA,CACIojB,CADJ,EAC0C/V,CAD1C;4BAC0CA;;EACxC,UAA+B,mBAApB+V,CAAX,EAAyC;EAEvC,YAAwB,OADlBZ,IAAWC,GAAGhrC,eAAHgrC,CAAmBW,CAAnBX,GACJn/H,MAAb,EAGEk/H,EAASt9H,IAATs9H,CAAcC,GAAGl9B,kBAAHk9B,CAAsBW,CAAtBX,CAAdD,EAHF,KAIO,IAAIA,EAASl/H,MAATk/H,GAAkB,CAAtB,EACL,MAAM,IAAI50B,UAAJ,CACF,0BAAwB40B,EAASl/H,MAAjC,8BAAA,GACQ8/H,CADR,MADE,CAAN,CAIFA,IAAkBZ,EAAS,CAATA,CAAlBY;EAEF,kBAAOC,uBACHD,CADGC,OAC8B91B,CAD9B81B,EACyChW,CADzCgW,EAAP;;;EAOF,gCAAA,CACI11H,CADJ,EAC2BghG,CAD3B,EAEI0e,CAFJ;4BAEIA;;;EACF,cAAoB,QAAhB1/G,EAAQuqF,IAAZ,EACE,MAAM,IAAI0V,UAAJ,CACF,+GADE,CAAN,CAIgB,WAAMjgG,EAAQuqF,IAARvqF,GAAN;EAWlB,cAXM21H,IAAYjhH,MAAAA,EAAZihH,EAE+B,SADjC5tC,IAAgB4tC,EAAU5tC,eACEstC,YAAK,KACnCttC,IAAgBA,EAA4BstC,YADT,CAF/BM,EAKAtjB,IACFijB,YACIvX,oBAAoBh2B,CAApBg2B,CADJuX,EAEIt0B,CAFJs0B,CANEK,EAWsB,QAAxBA,EAAUntC,UAAd,EAAkC;EAEhC,gBAA6B,QAAzBmtC,EAAUrtC,WAAd,EACE,MAAM,IAAI2X,UAAJ,CACF,0GADE,CAAN,CAKI8e,KAAe,CAAfA,EACA2E,KAAmB,CADnB3E,EAEN1M,EAAM1b,WAAN0b,CACIyiB,GAAGr/B,aAAHq/B,CAAiBa,EAAUntC,UAA3BssC,EAAuCa,EAAUrtC,WAAjDwsC,CADJziB,EAEI0M,CAFJ1M,EAEkBqR,CAFlBrR,EAEoCqN,CAFpCrN,CAFM0M;EAMR,sBAAO1M,EAAP;;;iBDoyCYnP,cAAcwtB,OC7vC5B;EAIE,YAAA,CAAYn4B,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,IAAOgB,YAAYI,aAAnBpB,SADF,CAYE,IAVAusF,IAASA,OAATA,EAEAr8F,EAAKoO,SAALpO,IAAiB,CAFjBq8F,EAGAr8F,EAAK05H,UAAL15H,IAAkB,CAHlBq8F,EAIAr8F,EAAK8xG,KAAL9xG,IAAa,CAJbq8F,EAOAr8F,EAAKpC,IAALoC,GAA4B,QAAfq8F,EAAOz+F,IAAQ,GAAQy+F,EAAOz+F,IAAf,GAAsBszG,OAAO,aAAPA,CAPlD7U,EAUqB,QAAjBA,EAAO+jB,MAAX,EACE,KAAoB,SAAA,EAAA5nG,IAAA6jF,EAAO+jB,MAA3B,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACHnwG,EAAK0H,GAAL1H,CAASmwG,CAATnwG;;EA4kBR,UA9lBgCkQ,aAAAA,GAAAA,GAyBtBypH,WAAAA,WAAAA,GAAR,UAAmBxpB,CAAnB;EAEE,QADcA,EAAME,YAANF,CAAmB,CAAnBA,EAAsBJ,aAAtBI,CAAoC,CAApCA,EAAuC70G,KAAvC60G,CACJ/wF,IADI+wF,CACC,UAAAp2G,CAAA;EAAK,aAAAA,IAAI,CAAJ;OADNo2G,CACd,EACE,MAAM,IAAIpM,UAAJ,CACF,oDACGoM,EAAMvyG,IADT,wBAAA,GAEGuyG,EAAME,YAANF,CAAmB,CAAnBA,EAAsBL,YAAtBK,CAAmC,CAAnCA,EAAsC70G,KAFzC,MADE,CAAN;KA5B0B4U,EAuD9BypH,WAAAA,IAAAA,GAAA,UAAIxpB,CAAJ;EACE,QAEIypB,CAFJ;EAAA,QAAMC,IACF1pB,aAAiBwpB,CAAjBxpB,IAA+BA,aAAiBqkB,KADpD,CAGA,IAAIqF,CAAJ,EAA0B;EAExB,UAAkC,OADlCD,IAAazpB,GACEj/F,QAAQzX,MAAvB,EACE,MAAM,IAAIsqG,UAAJ,CACF,uHADE,CAAN,CAMF,IAAiC,MAA7B61B,EAAW9oH,MAAX8oH,CAAkBngI,MAAtB,EACE,MAAM,IAAIsqG,UAAJ,CACF,qHADE,CAAN;EAQJ,SAA4B,MAAxB1kG,KAAK6R,OAAL7R,CAAa5F,MAAjB,EAA+B;EAE7B,UAAkC,MAA9B02G,EAAME,YAANF,CAAmB12G,MAAvB,EAAqC;EAEnC,YAA6B,QAAzB02G,EAAMiB,eAAV,EACE,MAAM,IAAIrN,UAAJ,CACF,+FADE,CAAN,CAKF,IAAMhqG,IAAI+/H,QACR/4E,YAAYovD,EAAMiB,iBAClB9zG,OAAO6yG,EAAM7yG,OACbM,MAAMuyG,EAAMvyG,IAANuyG,GAAa,UAHX2pB,CAAV,CAOA3pB,EAAM5xG,KAAN4xG,CAAYp2G,CAAZo2G;EAGF,WAAI0pB,CAAJ,EACEx6H,KAAK6R,OAAL7R,GAAeu6H,EAAW1oH,OAA1B7R,EACAA,KAAKyR,MAALzR,GAAcu6H,EAAW9oH,MADzBzR,CADF,KAGO;EACL,YAAkC,MAA9B8wG,EAAME,YAANF,CAAmB12G,MAAvB,EACE,MAAM,IAAIsqG,UAAJ,CACF,4GACkDoM,EAAMvyG,IADxD,gBAAA,GAEauyG,EAAME,YAANF,CAAmB12G,MAFhC,uCADE,CAAN,CAOF,IAAmD,MAA/C02G,EAAME,YAANF,CAAmB,CAAnBA,EAAsBJ,aAAtBI,CAAoC12G,MAAxC,EACE,MAAM,IAAIsqG,UAAJ,CACF,uHADE,CAAN,CAMF1kG,KAAK06H,UAAL16H,CAAgB8wG,CAAhB9wG,GACAA,KAAK6R,OAAL7R,IAAgB8wG,EAAME,YAANF,CAAmB,CAAnBA,EAAsBJ,aAAtBI,CAAoC,CAApCA,EADhB9wG,EAEAA,KAAKyR,MAALzR,GAAck2G,gBAAgBl2G,KAAK6R,OAAL7R,CAAa,CAAbA,CAAhBk2G,CAFdl2G;EAKFA,YAAKgxG,YAALhxG,KAAAA,EAKA,IAAIixG,IAAJ,GACEZ,eAAerwG,MACfswG,mBACAC,iBACAC,mBACAC,cAAczwG,KAAKyR,QACnBi/F,eAAe1wG,KAAK6R,SAEpB8+F,YAAYkY,aAA2B,IAA3BA,EAAiC7oH,KAAKyR,MAALzR,CAAY5F,MAA7CyuH,GACZjY,cAAc,OACdt6F,aAAatW,KAAKyR,MAALzR,CAAY8B,GAAZ9B,CAAgB,UAAAtF,CAAA;EAAK,iBAAAA,EAAEuB,KAAF;WAArB+D,GACb6wG,cAAc7wG,KAAK6R,OAAL7R,CAAa,CAAbA,EAAgB/D,OAXhC,CALA+D;OA5CF,MA8DO;EACL,UAAM26H,IAAe7pB,EAAM5xG,KAAN4xG,CAAY9wG,KAAK6R,OAAL7R,CAAa,CAAbA,CAAZ8wG,CAArB,CACA,IAAIj1G,MAAMC,OAAND,CAAc8+H,CAAd9+H,CAAJ,EACE,MAAM,IAAIy5G,SAAJ,CACF,uHADE,CAAN,CAMFt1G,KAAK06H,UAAL16H,CAAgB8wG,CAAhB9wG,GACAA,KAAK6R,OAAL7R,IAAgB26H,EADhB36H,EAGAA,KAAKgxG,YAALhxG,CAAkB,CAAlBA,EAAqB0wG,aAArB1wG,GAAqCA,KAAK6R,OAH1C7R,EAIAA,KAAKgxG,YAALhxG,CAAkB,CAAlBA,EAAqB6wG,YAArB7wG,IAAqCA,KAAK6R,OAAL7R,CAAa,CAAbA,EAAgB/D,MAJrD+D;EAOFA,UAAK+gH,MAAL/gH,CAAYhE,IAAZgE,CAAiB8wG,CAAjB9wG,GACAA,KAAKyyG,KAALzyG,IAAa,CADbA;KA3J4B6Q,EAoK9BypH,WAAAA,IAAAA,GAAA;EACE,QAA2B,MAAvBt6H,KAAK+gH,MAAL/gH,CAAY5F,MAAhB,EACE,MAAM,IAAIk7G,SAAJ,CAAc,mCAAd,CAAN,CAIF,IADAt1G,KAAK+gH,MAAL/gH,CAAYuY,GAAZvY,IAC2B,MAAvBA,KAAK+gH,MAAL/gH,CAAY5F,MAAhB,EACE4F,KAAK6R,OAAL7R,KAAAA,EACAA,KAAKgxG,YAALhxG,KADAA,EAEAA,KAAK+wG,aAAL/wG,KAFAA,CADF,KAIO;EACL,UAAM46H,IAAiB56H,KAAK+gH,MAAL/gH,CAAY5F,MAAZ4F,GAAqB,CAA5C,CACAA,KAAK+gH,MAAL/gH,CAAY46H,CAAZ56H,EAA4B+wG,aAA5B/wG,KAAAA,EACAA,KAAK6R,OAAL7R,IAAgBA,KAAK+gH,MAAL/gH,CAAY46H,CAAZ56H,EAA4B8R,OAD5C9R,EAGAA,KAAKgxG,YAALhxG,CAAkB,CAAlBA,EAAqB0wG,aAArB1wG,GAAqCA,KAAK6R,OAH1C7R,EAIAA,KAAKgxG,YAALhxG,CAAkB,CAAlBA,EAAqB6wG,YAArB7wG,IAAqCA,KAAK6R,OAAL7R,CAAa,CAAbA,EAAgB/D,MAJrD+D;;KAhL0B6Q,EAwL9BypH,WAAAA,KAAAA,GAAA,UAAK7oH,CAAL,EAA8ByhG,CAA9B;EAIE,WAHkB,QAAdlzG,KAAK82G,KAAS,IAChB92G,KAAK8zG,KAAL9zG,EADgB,EAGXA,KAAK82G,KAAL92G,CAAWf,IAAXe,CAAgByR,CAAhBzR,EAAwBkzG,CAAxBlzG,CAAP;KA5L4B6Q,EA+L9BypH,WAAAA,MAAAA,GAAA,UAAMn6G,CAAN;EAKE,QAFA06G,mBAAmB16G,CAAnB06G,GAE2B,MAAvB76H,KAAKyR,MAALzR,CAAY5F,MAAW,IAA6B,MAAxB4F,KAAK6R,OAAL7R,CAAa5F,MAA7C,EACE,MAAM,IAAIk7G,SAAJ,CACF,0EADE,CAAN,CAKFt1G,KAAK82G,KAAL92G,GAAa,IAAIm1H,KAAJ,GACX1jH,QAAQzR,KAAKyR,QACbI,SAAS7R,KAAK6R,OAAL7R,CAAa,CAAbA,GACTzB,MAAMyB,KAAKzB,IAALyB,GAAY,UAHP,CAAbA,EAKAA,KAAK82G,KAAL92G,CAAW+O,SAAX/O,GAAuBA,KAAK+O,SAL5B/O,EAMAA,KAAK82G,KAAL92G,CAAW8xG,SAAX9xG,GAAuBA,KAAK8xG,SAN5B9xG,EASAA,KAAKsxG,eAALtxG,GAAuBA,KAAK82G,KAAL92G,CAAWsxG,eATlCtxG,EAWAA,KAAK0kH,WAAL1kH,GAAmBA,KAAK82G,KAAL92G,CAAW0kH,WAX9B1kH,EAYAA,KAAK2kH,sBAAL3kH,GAA8BA,KAAK82G,KAAL92G,CAAW2kH,sBAZzC3kH,EAaAA,KAAK4kH,wBAAL5kH,GAAgCA,KAAK82G,KAAL92G,CAAW4kH,wBAb3C5kH,EAcAA,KAAK6kH,YAAL7kH,GAAoBA,KAAK82G,KAAL92G,CAAW6kH,YAd/B7kH,EAeAA,KAAK8kH,uBAAL9kH,GAA+BA,KAAK82G,KAAL92G,CAAW8kH,uBAf1C9kH,EAgBAA,KAAK+kH,yBAAL/kH,GAAiCA,KAAK82G,KAAL92G,CAAW+kH,yBAhB5C/kH,EAiBAA,KAAK6gH,YAAL7gH,GAAoBA,KAAK82G,KAAL92G,CAAW6gH,YAjB/B7gH,EAkBAA,KAAKimH,cAALjmH,GAAsBA,KAAK82G,KAAL92G,CAAWimH,cAlBjCjmH,EAmBAA,KAAKklH,WAALllH,GAAmBA,KAAK82G,KAAL92G,CAAWklH,WAnB9BllH,EAoBAA,KAAKilH,UAALjlH,GAAkBA,KAAK82G,KAAL92G,CAAWilH,UApB7BjlH,EAuBAA,KAAKyyG,KAALzyG,IAAa,CAvBbA;KA1M4B6Q,EAoO9BypH,WAAAA,YAAAA,GAAA;EAIE,WAHKt6H,KAAKyyG,KAALzyG,IACHA,KAAK8zG,KAAL9zG,EADGA,EAGEyQ,WAAAA,CAAMoxG,WAANpxG,KAAAA,KAAAA,CAAP;KAxO4BI,EAwQ9BypH,WAAAA,QAAAA,GAAA,UACIha,CADJ,EACyBC,CADzB,EAEIC,CAFJ;uBAEIA,IAEoDt/G,QAAQC,MACzDnB,KAAKyyG,KAALzyG,IACHA,KAAK8zG,KAAL9zG,IAEFyQ,WAAAA,CAAMqqH,OAANrqH,KAAAA,KAAAA,EAAc6vG,CAAd7vG,EAA0B8vG,CAA1B9vG,EAAqC+vG,CAArC/vG;KAhR4BI,EAyR9BypH,WAAAA,WAAAA,GAAA,UAAW/qD,CAAX;EACoB,YAAdvvE,KAAK82G,KAAS,IAChB92G,KAAK8zG,KAAL9zG,EADgB,EAGlBA,KAAK82G,KAAL92G,CAAW+zG,UAAX/zG,CAAsBuvE,CAAtBvvE,CAHkB;KA1RU6Q,EAgS9B1M,qBAAAA,CAAIm2H,WAAJn2H,aAAAA,SAAA;EACE,aAAOnE,KAAKq6H,UAAZ;cAGF,UAAct2H,CAAd;EACM/D,WAAKyyG,KAALzyG,KACFA,KAAK82G,KAAL92G,CAAW8xG,SAAX9xG,GAAuB+D,CADrB/D,GAGJA,KAAKq6H,UAALr6H,GAAkB+D,CAHd/D;2CALNmE,CAhS8B0M,EA4U9BypH,WAAAA,SAAAA,GAAA,UACI5/H,CADJ,EACwB0W,CADxB,EAEI4rF,CAFJ;EAGE,yBADEA,UACGh9F,KAAKyyG,KAAV,EACE,MAAM,IAAIhO,YAAJ,CACF,mDADE,CAAN,CAGF,OAAOzkG,KAAK82G,KAAL92G,CAAWwwH,QAAXxwH,CAAoBtF,CAApBsF,EAAuBoR,CAAvBpR,EAA0Bg9F,CAA1Bh9F,CAAP;KAnV4B6Q,EA6WxBypH,WAAAA,gBAAAA,GAAN,UACIhM,CADJ,EAEItxB,CAFJ;;;EAGE,aAAKh9F,KAAKyyG,KAAV,EACE,MAAM,IAAIhO,YAAJ,CACF,mDADE,CAAN,CAGF,WAAOzkG,KAAK82G,KAAL92G,CAAWuwH,eAAXvwH,CAA2BsuH,CAA3BtuH,EAAoCg9F,CAApCh9F,EAAP;;;KApX4B6Q,EAmZ9BypH,WAAAA,QAAAA,GAAA,UAAQ5/H,CAAR,EAA4BsiG,CAA5B;EAKE,4BAL0BA,SAER,QAAdh9F,KAAK82G,KAAS,IAChB92G,KAAK8zG,KAAL9zG,IAEKA,KAAK82G,KAAL92G,CAAW+6H,OAAX/6H,CAAmBtF,CAAnBsF,EAAsBg9F,CAAtBh9F,CAAP;KAxZ4B6Q,EAka9BypH,WAAAA,eAAAA,GAAA,UAAe5/H,CAAf;EAIE,WAHkB,QAAdsF,KAAK82G,KAAS,IAChB92G,KAAK8zG,KAAL9zG,EADgB,EAGXA,KAAK82G,KAAL92G,CAAWg7H,cAAXh7H,CAA0BtF,CAA1BsF,CAAP;KAta4B6Q,EA8a9BypH,WAAAA,QAAAA,GAAA,UAAQt9B,CAAR;EACEh9F,SAAK8zG,KAAL9zG,IACAA,KAAK82G,KAAL92G,CAAWi7H,OAAXj7H,CAAmBg9F,CAAnBh9F,CADAA,EAEAA,KAAKyuH,SAALzuH,GAAiBA,KAAK82G,KAAL92G,CAAWyuH,SAF5BzuH,EAGAA,KAAK40H,IAAL50H,GAAYA,KAAK82G,KAAL92G,CAAW40H,IAHvB50H,EAIAA,KAAKg5G,OAALh5G,GAAeA,KAAK82G,KAAL92G,CAAWg5G,OAJ1Bh5G,EAOAA,KAAK81H,cAAL91H,GAAsBA,KAAK82G,KAAL92G,CAAW81H,cAPjC91H,EAQAA,KAAK0wH,YAAL1wH,GAAoBA,KAAK82G,KAAL92G,CAAW0wH,YAR/B1wH;KA/a4B6Q,EA2dxBypH,WAAAA,IAAAA,GAAN,UACI5/H,CADJ,EAEI0W,CAFJ,EAGI4rF,CAHJ;8BAGIA;;EACF,aAAKh9F,KAAKyyG,KAAV,EACE,MAAM,IAAIhO,YAAJ,CACF,mDADE,CAAN,CAIF,WAAOzkG,KAAK82G,KAAL92G,CAAWk7H,GAAXl7H,CAAetF,CAAfsF,EAAkBoR,CAAlBpR,EAAqBg9F,CAArBh9F,EAAP;;;KApe4B6Q,EA8fxBypH,WAAAA,WAAAA,GAAN,UACIhM,CADJ,EACyBtxB,CADzB;;;EAEE,aAAKh9F,KAAKyyG,KAAV,EACE,MAAM,IAAIhO,YAAJ,CACF,mDADE,CAAN,CAIF,WAAOzkG,KAAK82G,KAAL92G,CAAWk5H,UAAXl5H,CAAsBsuH,CAAtBtuH,EAA+Bg9F,CAA/Bh9F,EAAP;;;KArgB4B6Q,EAygBvBypH,YAAAA,GAAP,UACIv9B,CADJ,EAEIC,CAFJ;EAGE,QAAIm+B,CAAJ;EAAA,QACIC,MADJ,CAEA,IAAIp+B,aAAkBnhG,KAAtB,EAA6B;EAC3B,UAA6B,QAAvBmhG,EAAO,CAAPA,EAAUF,SAAa,IACE,YAA3BE,EAAO,CAAPA,EAAqBF,SADzB,EAEE,MAAM,IAAI4H,UAAJ,CAAe,gDAAf,CAAN,CAEFy2B,IAAcn+B,CAAdm+B;OALF,MAOErvB,KAAKpwG,MAALowG,CACwB,QAApB9O,EAAe+jB,MADnBjV,EAEI,qHAFJA,GAIAqvB,IAAcn+B,EAAe+jB,MAJ7BjV,SAKO9O,EAAe+jB,MALtBjV,EAMAsvB,IAAmBp+B,CANnB8O,CASF,IAAMgL,IAAQ,IAAI/Z,CAAJ,CAAQq+B,CAAR,CAAd,CACA,MAAMtkB,aAAiBwjB,EAAvB,EACE,MAAM,IAAI31B,mBAAJ,CACF,2DAAyDmS,CADvD,CAAN,CAIF,KAAmB,SAAA,EAAAukB,KAAnB,EAAmBr3H,YAAnB,EAAmBA,GAAnB;EAAK,UACG8sG,IAAQipB,gBAAAA,CADX,CAEHjjB,EAAMzuG,GAANyuG,CAAUhG,CAAVgG;EAEF,YAAOA,CAAP;KAxiB4BjmG,EAukB9B1M,qBAAAA,CAAIm2H,WAAJn2H,gBAAAA,SAAA,UAAiB8f,CAAjB;EAGEjkB,WAAK82G,KAAL92G,CAAWs7H,YAAXt7H,GAA0BikB,CAA1BjkB;2CAHFmE,CAvkB8B0M,EAglB9BypH,WAAAA,UAAAA,GAAA;EAME,SADA,IAAMt9B,MAAN,OAAA,EACoB7jF,IAAAnZ,KAAK+gH,MAAzB,EAAoB/8G,YAApB,EAAoBA,GAApB;EAAK,UAAM8sG,QAAN,CACH9T,EAAOhhG,IAAPghG,GACEF,WAAWgU,EAAMxL,YAANwL,IACX9T,QAAQ8T,EAAMvL,SAANuL,IAFV9T;EAKF,YAAOA,CAAP;KA5lB4BnsF,EACvBypH,WAAAA,GAAY,YADWzpH,GA8lBhC;IA9lBgCskH,MAAhC,gBCjQsBn4B;EACpB,SAAO,IAAIm4B,KAAJ,CAAUn4B,CAAV,CAAP;EAgEF,oBAAA,CAA2BA,CAA3B;EACE,SAAO,IAAIs9B,UAAJ,CAAet9B,CAAf,CAAP;EAUF,mBAAA,CACIk9B,CADJ,EAC0C/V,CAD1C;EAEE,0BADwCA,SACjCoX,kBAAkBrB,CAAlBqB,EAAmCpX,CAAnCoX,CAAP;EAWF,eAAA,CAAsBv+B,CAAtB;EACE,SAAOy9B,MAAMz9B,CAANy9B,CAAP;EAGF,qCAAA,CACI5gB,CADJ,EAEIC,CAFJ;EAGEF,8BAA4B4hB,2BAA5B5hB,CACIC,CADJD,EACoBE,CADpBF;iBDgwBYjS,cAAc2yB,YE94B5B;EAAA,YAAA;;EAKA,UALyCzpH,aAAAA,GAAAA,GAEvC4qH,WAAAA,UAAAA,GAAA;EACE,aAAA;KAHqC5qH,GAKzC;IALyCk2F,cAAclK,aAAvD;EAAA;EAeA,YAAA;;EAYA,UAZyBhsF,aAAAA,GAAAA,GASvB6qH,WAAAA,MAAAA,GAAA,UAAMhhI,CAAN,EAAiBhD,CAAjB;EACE,4BADeA,QACRikI,MAAMjhI,CAANihI,EAASjkI,CAATikI,CAAP;KAVqB9qH,EACP6qH,WAAAA,GAAY,KADL7qH,GAYzB;IAZyB4qH,WAfzB,eA4Bc9zB,cAAc+zB,KAS5B;EAAA,YAAA;;EAKA,UAL0B7qH,aAAAA,GAAAA,GAExB+qH,WAAAA,MAAAA,GAAA,UAAMlhI,CAAN;EACE,WAAOmhI,KAASnhI,CAATmhI,CAAP;KAHsBhrH,EACR+qH,WAAAA,GAAY,MADJ/qH,GAK1B;IAL0B4qH,WAA1B,eAMc9zB,cAAci0B,MAK5B;EAAA,YAAA;;EAKA,UAL0B/qH,aAAAA,GAAAA,GAExBirH,WAAAA,MAAAA,GAAA,UAAMphI,CAAN;EACE,WAAOotG,KAASptG,CAATotG,CAAP;KAHsBj3F,EACRirH,WAAAA,GAAY,MADJjrH,GAK1B;IAL0B4qH,WAA1B,eAMc9zB,cAAcm0B,MAK5B;EAAA,YAAA;;EAKA,UAL2BjrH,aAAAA,GAAAA,GAEzBkrH,WAAAA,MAAAA,GAAA,UAAMrhI,CAAN;EACE,WAAOge,KAAK;EAAM,aAAAsjH,QAAYt0B,UAAU,CAAVA,CAAZs0B,EAA4Bl0B,KAASptG,CAATotG,CAA5Bk0B,CAAA;OAAXtjH,CAAP;KAHuB7H,EACTkrH,WAAAA,GAAY,OADHlrH,GAK3B;IAL2B4qH,WAA3B,eAMc9zB,cAAco0B,OAG5B;EAAA,YAAA;;EAKA,UAL4BlrH,aAAAA,GAAAA,GAE1BorH,WAAAA,MAAAA,GAAA,UAAMvhI,CAAN;EACE,WAAOA,CAAP;KAHwBmW,EACVorH,WAAAA,GAAY,QADFprH,GAK5B;IAL4B4qH,WAA5B,eAMc9zB,cAAcs0B,QAK5B;EAAA,YAAA;;EAKA,UAL6BprH,aAAAA,GAAAA,GAE3BqrH,WAAAA,MAAAA,GAAA,UAAMxhI,CAAN;EACE,WAAOyhI,QAAYzhI,CAAZyhI,CAAP;KAHyBtrH,EACXqrH,WAAAA,GAAY,SADDrrH,GAK7B;IAL6B4qH,WAA7B,eAMc9zB,cAAcu0B,SAK5B;EAAA,YAAA;;EAKA,UALiCrrH,aAAAA,GAAAA,GAE/BurH,WAAAA,MAAAA,GAAA,UAAM1hI,CAAN;EACE,WAAO2hI,YAAc3hI,CAAd2hI,CAAP;KAH6BxrH,EACfurH,WAAAA,GAAY,aADGvrH,GAKjC;IALiC4qH,WAAjC,eAMc9zB,cAAcy0B,aAK5B;EAAA,YAAA;;EAKA,UAL8BvrH,aAAAA,GAAAA,GAE5ByrH,WAAAA,MAAAA,GAAA,UAAM5hI,CAAN;EACE,WAAOyhH,SAAazhH,CAAbyhH,CAAP;KAH0BtrG,EACZyrH,WAAAA,GAAY,UADAzrH,GAK9B;IAL8B4qH,WAA9B,eAMc9zB,cAAc20B,UAK5B;EAAA,YAAA;;EAKA,UAL8BzrH,aAAAA,GAAAA,GAE5B0rH,WAAAA,MAAAA,GAAA,UAAM7hI,CAAN;EACE,WAAO8hI,SAAW9hI,CAAX8hI,CAAP;KAH0B3rH,EACZ0rH,WAAAA,GAAY,UADA1rH,GAK9B;IAL8B4qH,WAA9B,eAMc9zB,cAAc40B,UAK5B;EAAA,YAAA;;EAKA,UAL0B1rH,aAAAA,GAAAA,GAExB4rH,WAAAA,MAAAA,GAAA,UAAM/hI,CAAN;EACE,WAAOgiI,OAAShiI,CAATgiI,CAAP;KAHsB7rH,EACR4rH,WAAAA,GAAY,MADJ5rH,GAK1B;IAL0B4qH,WAA1B,eAMc9zB,cAAc80B,MAK5B;EAAA,YAAA;;EAiBA,UAjB6B5rH,aAAAA,GAAAA,GAc3B8rH,WAAAA,MAAAA,GAAA,UAAMjiI,CAAN,EAAiBoD,CAAjB;EACE,4BADeA,KAAiB,IACzBu+G,QAAY3hH,CAAZ2hH,EAAev+G,CAAfu+G,CAAP;KAfyBxrG,EACX8rH,WAAAA,GAAY,SADD9rH,GAiB7B;IAjB6B4qH,WAA7B,CAoBA,4BAAA,CAAoCmB,CAApC;EACE,SAAOA,EAAWt3B,YAAXs3B,EAAP;EAGF,+BAAA,CACI5/B,CADJ,EAEIyI,CAFJ;EAGE,0BADEA,SACKkD,uBACH3L,CADG2L,EACK5B,cAAc7J,gBAAd6J,CAA+B5J,MAA/B4J,GAAwC9J,YAD7C0L,EAEHlD,CAFGkD,EAEY,YAFZA,CAAP;EAKF,uBAAA,CAA8BxD,CAA9B;EAEE,SAAkB,QAAdA,CAAc,GAET03B,wBADS//B,WAAW,UAAUE,YAC9B6/B,CAFS,GAIQ,mBAAf13B,CAAe,GAEjB03B,wBADS//B,WAAWqI,GAAYnI,YAChC6/B,CAFiB,GAGf13B,aAAsBs2B,UAAtBt2B,GACFA,CADEA,GAGF03B,sBAAsB13B,CAAtB03B,CAVT;iBAhBYl1B,cAAcg1B,SCvI5B;EAIE,YAAA,CAAY3/B,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAgB,QAAVusF,CAAU,KAAA,GAAYA,CAA5BvsF,SADF,QAEE9P,EAAK2wG,eAAL3wG,IAAuB,CAAvBA,EACc,QAAVq8F,CAAU,KACZr8F,EAAK2+E,QAAL3+E,GAAgBq8F,EAAO1d,QADX,CADd3+E;EAyBJ,UA/B0BkQ,aAAAA,GAAAA,GAYxBisH,WAAAA,KAAAA,GAAA,UAAKrrH,CAAL,EAA8ByhG,CAA9B;EACEzhG,QAASsrH,oBAAoBtrH,CAApBsrH,CAATtrH,CACA,IAAIK,IAASvG,KAAKkG,CAALlG,CAAb,CAIA,OAHqB,QAAjBvL,KAAKs/E,QAAY,KACnBxtE,IAASxG,YAAYwG,CAAZxG,EAAoB,CAApBA,EAAuBtL,KAAKs/E,QAA5Bh0E,CADU,GAGdwG,CAAP;KAlBsBjB,EAqBxBisH,WAAAA,mBAAAA,GAAA,UAAmB38G,CAAnB;EACE,WAAOA,CAAP;KAtBsBtP,EAyBxBisH,WAAAA,UAAAA,GAAA;EACE,QAAM9/B,MAAoC1d,UAAUt/E,KAAKs/E,UAAzD;EAAA,QACM09C,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA7BsBnsF,EACjBisH,WAAAA,GAAY,MADKjsH,GA+B1B;IA/B0BshG,MAA1B,eAgCcxK,cAAcm1B,MAuB5B;EAME,YAAA,CAAY9/B,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAgB,QAAVusF,CAAU,KAAA,GAAYA,CAA5BvsF,SADF,QAFS9P,eAAAA,GAAgB,EAAhBA,EAIO,QAAVq8F,CAAU,KACZA,MADY,CAJPr8F,EAQPA,EAAKjJ,KAALiJ,GAA6B,QAAhBq8F,EAAOtlG,KAAS,GAAOiJ,EAAKs8H,aAAZ,GAA4BjgC,EAAOtlG,KARzDiJ;EA0BX,UA9B+BkQ,aAAAA,GAAAA,GAe7BqsH,WAAAA,KAAAA,GAAA,UAAKzrH,CAAL,EAA8ByhG,CAA9B;EACE,QAAMx4G,IAAIqiI,oBAAoBtrH,CAApBsrH,CAAV,CACA,OAAOrxH,UAAUhR,CAAVgR,EAAa1L,KAAKtI,KAAlBgU,CAAP;KAjB2BmF,EAoB7BqsH,WAAAA,mBAAAA,GAAA,UAAmB/8G,CAAnB;EACE,WAAOA,CAAP;KArB2BtP,EAwB7BqsH,WAAAA,UAAAA,GAAA;EACE,QAAMlgC,MAAoCtlG,OAAOsI,KAAKtI,OAAtD;EAAA,QACMslI,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA5B2BnsF,EACtBqsH,WAAAA,GAAY,WADUrsH,GA8B/B;IA9B+BshG,MAA/B,eA+BcxK,cAAcu1B,WA6B5B;EAME,YAAA,CAAYlgC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAgB,QAAVusF,CAAU,KAAA,GAAYA,CAA5BvsF,SADF,CAME,IARO9P,eAAAA,GAAgB,CAAhBA,EAIO,QAAVq8F,CAAU,KACZA,MADY,CAJPr8F,EAQa,QAAhBq8F,EAAOtlG,KAAS,IAAQslG,EAAOtlG,KAAPslG,KAAiBr8F,EAAKs8H,aAAlD,EACE,MAAM,IAAIt4B,mBAAJ,CACF,8BAA4B3H,EAAOtlG,KAAnC,6CADE,CAAN,QAKFiJ,EAAKjJ,KAALiJ,GAA6B,QAAhBq8F,EAAOtlG,KAAS,GAAOiJ,EAAKs8H,aAAZ,GAA4BjgC,EAAOtlG,KAAhEiJ;EAkBJ,UApCyBkQ,aAAAA,GAAAA,GAqBvBgoC,WAAAA,KAAAA,GAAA,UAAKpnC,CAAL,EAA8ByhG,CAA9B;EACE,QAAMx4G,IAAIqiI,oBAAoBtrH,CAApBsrH,CAAV,CACA,OAAOvxH,IAAI9Q,CAAJ8Q,CAAP;KAvBqBqF,EA0BvBgoC,WAAAA,mBAAAA,GAAA,UAAmB14B,CAAnB;EACE,WAAOA,CAAP;KA3BqBtP,EA8BvBgoC,WAAAA,UAAAA,GAAA;EACE,QAAMmkD,MAAoCtlG,OAAOsI,KAAKtI,OAAtD;EAAA,QACMslI,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlCqBnsF,EAChBgoC,WAAAA,GAAY,KADIhoC,GAoCzB;IApCyBshG,MAAzB,eAqCcxK,cAAc9uD,OA2B5B;EAOE,YAAA,CAAYmkD,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAgB,QAAVusF,CAAU,KAAA,GAAYA,CAA5BvsF,SADF,QAFS9P,eAAAA,GAAgB,CAAhBA,EAIO,QAAVq8F,CAAU,KACZA,MADY,CAJPr8F,EAQPA,EAAKw8H,KAALx8H,GAA6B,QAAhBq8F,EAAOmgC,KAAS,GAAOx8H,EAAKy8H,aAAZ,GAA4BpgC,EAAOmgC,KARzDx8H,EASPA,EAAK08H,WAAL18H,GAAmB+mG,UAAU/mG,EAAKw8H,KAAfz1B,CATZ/mG;EA2BX,UAhCqCkQ,aAAAA,GAAAA,GAiBnCysH,WAAAA,KAAAA,GAAA,UAAK7rH,CAAL,EAA8ByhG,CAA9B;EACE,QAAMx4G,IAAIqiI,oBAAoBtrH,CAApBsrH,CAAV,CACA,OAAOriI,EAAEkO,GAAFlO,CAAM6K,OAAK7K,EAAEyP,OAAFzP,CAAUsF,KAAKq9H,WAAf3iI,CAAL6K,EAAkC,SAAlCA,CAAN7K,CAAP;KAnBiCmW,EAsBnCysH,WAAAA,mBAAAA,GAAA,UAAmBn9G,CAAnB;EACE,WAAOA,CAAP;KAvBiCtP,EA0BnCysH,WAAAA,UAAAA,GAAA;EACE,QAAMtgC,MAAoCmgC,OAAOn9H,KAAKm9H,OAAtD;EAAA,QACMH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA9BiCnsF,EAC5BysH,WAAAA,GAAY,iBADgBzsH,GAgCrC;IAhCqCshG,MAArC,eAiCcxK,cAAc21B,iBAoB5B;EAME,YAAA,CAAYtgC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAgB,QAAVusF,CAAU,KAAA,GAAYA,CAA5BvsF,SADF,QAFS9P,cAAAA,GAAe,CAAfA,EAIO,QAAVq8F,CAAU,KACZA,MADY,CAJPr8F,EAOPA,EAAKoM,OAALpM,GAAe,IAAI48H,OAAJ,GAAwBr+H,KAPhCyB,EAQPA,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,GAAO6C,EAAK68H,YAAZ,GAA2BxgC,EAAOl/F,IARtD6C;EA0BX,UA9B6BkQ,aAAAA,GAAAA,GAe3B8rH,WAAAA,KAAAA,GAAA,UAAKlrH,CAAL,EAA8ByhG,CAA9B;EACE,QAAMx4G,IAAIqiI,oBAAoBtrH,CAApBsrH,CAAV,CACA,OAAO/8H,KAAK+M,OAAL/M,CAAatF,CAAbsF,EAAgBA,KAAKlC,IAArBkC,CAAP;KAjByB6Q,EAoB3B8rH,WAAAA,mBAAAA,GAAA,UAAmBx8G,CAAnB;EACE,WAAOA,CAAP;KArByBtP,EAwB3B8rH,WAAAA,UAAAA,GAAA;EACE,QAAM3/B,MAAoCl/F,MAAMkC,KAAKlC,MAArD;EAAA,QACMk/H,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA5ByBnsF,EACpB8rH,WAAAA,GAAY,SADQ9rH,GA8B7B;IA9B6BshG,MAA7B,eA+BcxK,cAAcg1B,WC3R5B;EAAA,YAAA;;EAEA,UAF0C9rH,aAAAA,GAAAA,IAE1C;IAF0Ck2F,cAAclK,aAAxD;EAAA;EAmCE,YAAA,CAAYG,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,SADF;EAAA,QAGQgtH,IAAe,QAAVzgC,CAAU,IAAqB,QAAbA,EAAOygC,EAAf,GAA4B,GAA5B,GAAmCzgC,EAAOygC,EAHjE;EAAA,QAIQC,IAAe,QAAV1gC,CAAU,IAAqB,QAAbA,EAAO0gC,EAAf,GAA4B,GAA5B,GAAmC1gC,EAAO0gC,EAJjE,QAKE/8H,EAAKg9H,KAALh9H,GAAoB,MAAP88H,CAAb98H,EACAA,EAAKi9H,KAALj9H,GAAoB,MAAP+8H,CADb/8H,EAGAA,EAAK88H,EAAL98H,GAAU+mG,UAAU+1B,CAAV/1B,CAHV/mG,EAIAA,EAAK+8H,EAAL/8H,GAAU+mG,UAAUg2B,CAAVh2B,CAJV/mG;EAkCJ,UA9C0BkQ,aAAAA,GAAAA,GAuBxBgtH,WAAAA,MAAAA,GAAA,UAAMnjI,CAAN;EAAA,gBAAA,CACE,OAAOge,KAAK;EACV,UAAIolH,IAAyBv3G,OAAO,EAAPA,CAA7B,CAQA,OAPI5lB,EAAKg9H,KAALh9H,KACFm9H,IAAiBz1H,IAAIy1H,CAAJz1H,EAAoBvN,MAAIysG,IAAQ5mG,EAAK88H,EAAbl2B,EAAiBl8F,IAAI3Q,CAAJ2Q,CAAjBk8F,CAAJzsG,CAApBuN,CADf1H,GAGAA,EAAKi9H,KAALj9H,KACFm9H,IACIz1H,IAAIy1H,CAAJz1H,EAAoBvN,MAAIysG,IAAQ5mG,EAAK+8H,EAAbn2B,EAAiBwT,SAASrgH,CAATqgH,CAAjBxT,CAAJzsG,CAApBuN,CAFF1H,CAHAA,EAOGm9H,EAAep/D,QAAfo/D,EAAP;OATKplH,CAAP;KAxBsB7H,EAqCxBgtH,WAAAA,UAAAA,GAAA;EACE,aAAQJ,IAAMz9H,KAAKy9H,EAALz9H,CAAQK,QAARL,GAAmB,CAAnBA,GAAuB09H,IAAM19H,KAAK09H,EAAL19H,CAAQK,QAARL,GAAmB,CAAnBA,GAA3C;KAtCsB6Q,EAyCjBgtH,YAAAA,GAAP,UACI9gC,CADJ,EAEIC,CAFJ;EAGE,WAAO,IAAID,CAAJ,GAAS0gC,IAAIzgC,EAAOygC,IAAcC,IAAI1gC,EAAO0gC,IAA7C,CAAP;KA5CsB7sH,EACjBgtH,WAAAA,GAAY,MADKhtH,GA8C1B;IA9C0BktH,YA5B1B,CAoFA,WAAA,CAAmB/gC,CAAnB;EACE,SAAO,IAAI6gC,IAAJ,GAAUJ,IAAc,QAAVzgC,CAAU,GAAOA,EAAOygC,EAAd,GAAmB,MAAMC,IAAI,GAArD,CAAP;EAUF,YAAA,CAAmB1gC,CAAnB;EACE,SAAO,IAAI6gC,IAAJ,GAAUH,IAAc,QAAV1gC,CAAU,GAAOA,EAAO0gC,EAAd,GAAmB,MAAMD,IAAI,GAArD,CAAP;iBArBY91B,cAAck2B,MA4B5B,IAAaG,+CAEPC,MAAQ,QAFd,CAKA,6BAAA,CAAqCx1B,CAArC;EAEE,SAAOC,qBAAqBD,CAArBC,CAAP;EAGF,gCAAA,CACI1L,CADJ,EAEIyI,CAFJ;EAGE,0BADEA,SACKkD,uBACH3L,CADG2L,EACK5B,cAAc7J,gBAAd6J,CAA+B5J,MAA/B4J,GAAwC9J,YAD7C0L,EAEHlD,CAFGkD,EAEY,aAFZA,CAAP;EAKF,wBAAA,CAA+BxD,CAA/B;EAGE,SAAkB,QAAdA,CAAc,GACT,IADS,GAGQ,mBAAfA,CAAe,GAKjB+4B,yBADSphC,WAHEqI,KAAc64B,0CAAd74B,GACd64B,2CAA2C74B,CAA3C64B,CADc74B,GAEdA,GACuBnI,YACpBkhC,CALiB,GAMf/4B,aAAsB44B,WAAtB54B,GACFA,CADEA,GAGF+4B,uBAAuB/4B,CAAvB+4B,CAZT;2BC1HEn6H,GAAwBpH,GAAW4B;EACrC,MAAqB,mBAAVwF,CAAX,EACE,OAAO2zH,aAAa3zH,CAAb2zH,EAAoB/6H,CAApB+6H,CAAP,CAEA,IAAI3zH,EAAM3J,MAAN2J,KAAiBpH,CAArB,EACE,MAAM,IAAI+nG,UAAJ,CACF,SAAOnmG,CAAP,kCAAA,GAA2C5B,CAA3C,0BAAA,GACGoH,EAAM3J,MADT,eADE,CAAN,CAIF,KAAK,IAAIW,IAAI,CAAb,EAAgBA,IAAI4B,CAApB,IAAyB5B,CAAzB,EAA4B;EAC1B,QAAMojI,IAAcp6H,EAAMhJ,CAANgJ,CAApB,CACA,KAAKy4F,UAAU2hC,CAAV3hC,CAAL,EACE,MAAM,IAAIkI,UAAJ,CACF,SAAOnmG,CAAP,kCAAA,GACI5B,CADJ,0BAAA,GAEGu8B,KAAKE,SAALF,CAAen1B,CAAfm1B,CAFH,qCAAA,GAGGilG,CAJD,CAAN;EAOJ,UAAOp6H,CAAP;EAYJ,0BAAA,CACIq6H,CADJ,EACyBjwH,CADzB,EAC6CS,CAD7C,EAEItL,CAFJ,EAEoBkK,CAFpB;EAGE,0BADkBA,QACC,QAAf4wH,CAAe,GACVA,CADU,IAMjBC,IADc,WAAZzvH,CAAY,GACCwvH,CADD,GAGCA,KALSjwH,KAAcA,IAAa,MAAMX,IAAW,EAKrD4wH,IAAkC,CAFjDC,EAIK9jI,KAAKkC,KAALlC,EAAY8jI,IAAe/6H,CAAf+6H,GAAwB,KAAK/6H,CAAzC/I,CAVY,CAAnB,CAGA,IACI8jI,CADJ;EAUF,sBAAA,CACIC,CADJ,EACqBC,CADrB,EACyCC,CADzC,EAEI5vH,CAFJ;EAGE,MAAe,QAAX0vH,CAAJ,EACE,OAAO,IAAP,CAGF,IAAgB,YAAZ1vH,CAAJ,EACE0vH,IAAUA,IAAUC,CAAVD,GAAuB3jI,OAAK6jI,IAAaD,GAAY,EAA9B5jI,CAAjC2jI,CADF,KAEO;EAAA,QAAgB,WAAZ1vH,CAAJ,EAGL,MAAM,IAAI81F,UAAJ,CAAe,6BAA2B91F,CAA3B,MAAf,CAAN,CAFA0vH,KAAoBC,CAApBD;EAIF,UAAOA,CAAP;kCC/CE5jI,GAAW6S;EAEb,SAAOmL,KAAK;EAEV,WADA8yF,gBAAgBj+F,CAAhBi+F,GACmB,oBAAfj+F,CAAe,GACVkxH,UAAc/jI,CAAd+jI,GAAkB,GAAG,GAAG,GAAG,EAA3BA,CADU,GAGV/jI,CAHT;KAFKge,CAAP;EA0BF,wBAAA,CACIhe,CADJ,EACegkI,CADf,EAC+BnwH,CAD/B,EAC6ClP,CAD7C,EAC0DuP,CAD1D,EAEIrB,CAFJ,EAE6BsB,CAF7B;EAGE,0BAF2CxP,yBAAauP,+BAC7BC,QACpB6J,KAAK;EAMV,QALkB,QAAdnL,CAAc,KAChBA,IAAag+F,iBADG,GAGlBC,gBAAgBj+F,CAAhBi+F,CAHkB,EAKK,MAAnB9wG,EAAEuB,KAAFvB,CAAQN,MAAZ,EACE,MAAM,IAAIsqG,UAAJ,CACF,iEACGhqG,EAAEuB,KAAFvB,CAAQN,MADX,cADE,CAAN,CAIF,IAA4B,MAAxBskI,EAAOziI,KAAPyiI,CAAatkI,MAAjB,EACE,MAAM,IAAIsqG,UAAJ,CACF,mEACGg6B,EAAOziI,KAAPyiI,CAAatkI,MADhB,aADE,CAAN,CAIF,IAAY,QAARmU,CAAQ,IAA8B,MAAtBA,EAAKtS,KAALsS,CAAWnU,MAA/B,EACE,MAAM,IAAIsqG,UAAJ,CACF,iEACGg6B,EAAOziI,KAAPyiI,CAAatkI,MADhB,aADE,CAAN,CAQF,IAHmB,oBAAfmT,CAAe,KACjB7S,IAAI+jI,UAAc/jI,CAAd+jI,GAAkB,GAAG,GAAG,EAAxBA,CADa,GAGH,aAAZ7vH,CAAJ,EACE,MAAM,IAAI+1F,mBAAJ,CACF,+EADE,CAAN,CAIF,IAAIvzF,IAAYutH,OACZjkI,CADYikI,EACcD,CADdC,EACkCt/H,CADlCs/H,EAEA,WAAZ/vH,CAAY,GAAS,MAAT,GAAkB,OAFlB+vH,EAE2B,KAF3BA,EAEkC9vH,CAFlC8vH,CAAhB,CAMA,OAHY,QAARpwH,CAAQ,KACV6C,IAAIwtH,QAAUxtH,CAAVwtH,EAAarwH,CAAbqwH,CADM,GAGLxtH,CAAP;KApCKsH,CAAP;EAoDF,wBAAA,CAoCIhe,CApCJ,EAoCegkI,CApCf,EAoC+BnwH,CApC/B,EAoC6ClP,CApC7C,EAqCIuP,CArCJ,EAqCuBrB,CArCvB,EAsCIsB,CAtCJ;EAuCE,0BAH2CxP,KAAW,GAAG,sBACvDuP,cAEK8J,KAAK;EAKV,QAJkB,QAAdnL,CAAc,KAChBA,IAAag+F,iBADG,GAGlBC,gBAAgBj+F,CAAhBi+F,CAHkB,EAIH,MAAX9wG,EAAE0E,IAAS,IAAgB,MAAX1E,EAAE0E,IAAtB,EACE,MAAM,IAAIslG,UAAJ,CACF,qEACGhqG,EAAE0E,IADL,MADE,CAAN,CAIF,IAAoB,MAAhBs/H,EAAOt/H,IAAS,IAAqB,MAAhBs/H,EAAOt/H,IAAhC,EACE,MAAM,IAAIslG,UAAJ,CACF,sEACGhqG,EAAE0E,IADL,MADE,CAAN,CAIF,IAAIgS,IAAIytH,sBAAsBnkI,CAAtBmkI,EAAyBtxH,CAAzBsxH,CAAR,CACA,IAAgB,aAAZjwH,CAAJ,EACE,MAAM,IAAI+1F,mBAAJ,CACF,+EADE,CAAN,CAcF,OAVAvzF,IAAI0tH,OACA1tH,CADA0tH,EAC0BJ,CAD1BI,EAEAz/H,CAFAy/H,EAEyC,WAAZlwH,CAAY,GAAS,MAAT,GAAkB,OAF3DkwH,EAGA,MAHAA,EAGQjwH,CAHRiwH,CAAJ1tH,EAIY,QAAR7C,CAAQ,KACV6C,IAAIwtH,QAAUxtH,CAAVwtH,EAAarwH,CAAbqwH,CADM,CAJZxtH,EAOmB,oBAAf7D,CAAe,KACjB6D,IAAIqtH,UAAcrtH,CAAdqtH,GAAkB,GAAG,GAAG,GAAG,EAA3BA,CADa,CAPnBrtH,EAUOA,CAAP;KA/BKsH,CAAP;EAkJF;EAwBE,YAAA,CAAYtZ,CAAZ,EAA0B49F,CAA1B;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAIE,IATQ9P,MAAAA,GAAsB,IAAtBA,EAEDA,4BAAAA,GAAoD,cAFnDA,EAGDA,0BAAAA,GAAkD,OAHjDA,EAORo+H,EAASC,YAATD,CAAsB/hC,CAAtB+hC,CAPQp+H,EAQRA,EAAKvB,IAALuB,GAAYvB,CARJuB,EASU,MAAdA,EAAKvB,IAAS,IAAmB,MAAduB,EAAKvB,IAA5B,EACE,MAAM,IAAIulG,mBAAJ,CACF,mDAAiDhkG,EAAKvB,IAAtD,8BADE,CAAN,CAsBF,IAlBAuB,EAAK69H,UAAL79H,GAAkBs+H,eAAejiC,EAAOwhC,UAAtBS,EAAkC7/H,CAAlC6/H,EAAwC,YAAxCA,CAAlBt+H,EACAA,EAAKtB,OAALsB,GAAes+H,eACO,QAAlBjiC,EAAO39F,OAAW,GAAO,CAAP,GAAW29F,EAAO39F,OADzB4/H,EACkC7/H,CADlC6/H,EACwC,SADxCA,CADft+H,EAGAA,EAAKiO,OAALjO,GAAiC,QAAlBq8F,EAAOpuF,OAAW,GAAO,OAAP,GAAiBouF,EAAOpuF,OAHzDjO,EAIAu+H,iBAAiBv+H,EAAKiO,OAAtBswH,CAJAv+H,EAKAA,EAAK4M,UAAL5M,GACyB,QAArBq8F,EAAOzvF,UAAc,GAAO,cAAP,GAAwByvF,EAAOzvF,UANxD5M,EAOA6qG,gBAAgB7qG,EAAK4M,UAArBi+F,CAPA7qG,EAQAA,EAAKi8H,UAALj8H,GAAkBw+H,cAAcniC,EAAO4/B,UAArBuC,CARlBx+H,EASAA,EAAKy+H,OAALz+H,GAAiC,QAAlBq8F,EAAOoiC,OAAW,IAAcpiC,EAAOoiC,OATtDz+H,EAUAA,EAAK0+H,eAAL1+H,GACI2+H,eAAetiC,EAAOqiC,eAAPriC,IAA0Br8F,EAAK4+H,wBAA9CD,CAXJ3+H,EAYAA,EAAK6+H,cAAL7+H,GAAsB8+H,cAAcziC,EAAOwiC,cAArBC,CAZtB9+H,EAaAA,EAAK++H,eAAL/+H,GAAuBg/H,eAAe3iC,EAAO0iC,eAAtBC,CAbvBh/H,EAcAA,EAAKywG,mBAALzwG,GAA2Bg/H,eAAe3iC,EAAOoU,mBAAtBuuB,CAd3Bh/H,EAeAA,EAAKkO,YAALlO,GAAoBs+H,eACO,QAAvBjiC,EAAOnuF,YAAgB,GAAO,CAAP,GAAWmuF,EAAOnuF,YADzBowH,EACuC7/H,CADvC6/H,EAEhB,cAFgBA,CAfpBt+H,EAkBkB,MAAdA,EAAKvB,IAAS,IACbvD,MAAMC,OAAND,CAAc8E,EAAKkO,YAAnBhT,CADa,IAE8B,MAA1C8E,EAAKkO,YAALlO,CAA+BvG,MAFrC,EAGE,MAAM,IAAIsqG,UAAJ,CACF,mGAEGxrE,KAAKE,SAALF,CAAev4B,EAAKkO,YAApBqqB,CAHD,CAAN,CAKF,IAAkB,MAAdv4B,EAAKvB,IAAT,EACE,IAAiC,mBAAtBuB,EAAKkO,YAAhB,EACElO,EAAKkO,YAALlO,IAAqBA,EAAKkO,cAAclO,EAAKkO,aAA7ClO,CADF,KAEO,IAAiC,MAA7BA,EAAKkO,YAALlO,CAAkBvG,MAAtB,EACL,MAAM,IAAIsqG,UAAJ,CACF,4FAC6BxrE,KAAKE,SAALF,CAAev4B,EAAKkO,YAApBqqB,CAF3B,CAAN;EAqCR,UApGuCroB,aAAAA,GAAAA,GAsEpBkuH,cAAAA,GAAjB,UAA8B/hC,CAA9B;EAIE,QAFAgoB,SACI,gBAAgBhoB,CADpBgoB,EAC4B,yCAD5BA,GAEiC,mBAAtBhoB,EAAOwhC,UAAe,KAC5BoB,wBACG5iC,EAAOwhC,UADVoB,EACsB,QADtBA,EACgC,CADhCA,EACmC,CADnCA,CADL,EAGE,MAAM,IAAIl7B,UAAJ,CACF,kGAC+BxrE,KAAKE,SAALF,CAAe8jE,EAAOwhC,UAAtBtlG,CAD/B,MADE,CAAN;KA7EiCroB,EAkFrCkuH,WAAAA,UAAAA,GAAA;EACE,QAAM/hC,MACJwhC,YAAYx+H,KAAKw+H,YACjBn/H,SAASW,KAAKX,SACduP,SAAS5O,KAAK4O,SACdrB,YAAYvN,KAAKuN,YACjBsB,cAAc7O,KAAK6O,cACnB+tH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZT,SAASp/H,KAAKo/H,SACdC,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBJ,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBP,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAXlB;EAAA,QAaMhD,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAbnB,CAeA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlGmCnsF,GAoGvC;IApGuCshG,MAAvC;EAAA;EAwHE,YAAA,CAAY/yG,CAAZ,EAA0B49F,CAA1B;EAAA,YACEvsF,MAAAA,KAAAA,EAAMrR,CAANqR,EAAYusF,CAAZvsF,SADF,QAXU9P,QAAAA,GAAwB,IAAxBA,EAaRs/H,EAAKjB,YAALiB,CAAkBjjC,CAAlBijC,CAbQt/H,EAcRA,EAAKu/H,OAALv/H,GAAeq8F,EAAOkjC,OAddv/H,EAeRA,EAAKw/H,iBAALx/H,GAAyB2+H,eACrBtiC,EAAOmjC,iBAAPnjC,IAA4Br8F,EAAKy/H,0BADZd,CAfjB3+H,EAiBRA,EAAK0/H,gBAAL1/H,GAAwB8+H,cAAcziC,EAAOqjC,gBAArBZ,CAjBhB9+H,EAkBRA,EAAK2/H,iBAAL3/H,GAAyBg/H,eAAe3iC,EAAOsjC,iBAAtBX,CAlBjBh/H;EAuHZ,UA1HmCkQ,aAAAA,GAAAA,GAwBjCovH,WAAAA,MAAAA,GAAA,UAAM9/G,CAAN;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,CACA,IAAMogH,IACkB,oBAApBvgI,KAAKuN,UAAe,GAAkB,CAAlB,GAAsB4S,EAAW/lB,MAAX+lB,GAAoB,CADlE,CAEA,IAA+B,QAA3BA,EAAWogH,CAAXpgH,CAAJ,EACE,MAAM,IAAIukF,UAAJ,CACF,iEACSvkF,EAAWogH,CAAXpgH,CAFP,CAAN,CAIF,KAAA;EAAA,QAAMqgH,IAAWrgH,EAAWogH,CAAXpgH,CAAjB;EAAA,QAEMsgH,IAAczgI,KAAKw+H,UAALx+H,CAAgBmH,MAAhBnH,EAAwBwgI,GAAUxgI,KAAKkgI,QAAvClgI,CAFpB,CAIAA,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,EACAygI,CADAzgI,EACa,IADbA,EACmBA,KAAKmgI,iBADxBngI,EAEVA,KAAKsgI,iBAFKtgI,GAEc,CAFdA,EAEoBA,KAAKqgI,gBAFzBrgI,CAAdA,EAGIA,KAAKo/H,OAALp/H,KACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCA,KAAKkgI,QADNlgI,EACgB,IADhBA,EACsBA,KAAKq/H,eAD3Br/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,CAHJA,EASAA,KAAKqxG,SAALrxG,MAAmB8vG,MAAM9vG,KAAKZ,IAALY,GAAY,GAAGghB,eAAO7H,EAAConH,CAADpnH,IAAeqnH,IAAtBx/G,IATxChhB,EAUAA,KAAKyyG,KAALzyG,IAAa,CAVbA;KArC+B6Q,EAkDjCovH,WAAAA,KAAAA,GAAA,UAAKxuH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UAAI7G,CAAJ,CADAJ,IAASsrH,oBAAoBtrH,CAApBsrH,CAATtrH,CAEA,IAAMkvH,IAAyB,QAAbhgI,EAAK4N,IAAQ,GAAO,IAAP,GAAc5N,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAA7C,CAEA,IAAkB,MAAdA,EAAKvB,IAAT,EACEyS,IAAU+uH,eACNnvH,CADMmvH,EACEjgI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EADFigI,EACsBD,CADtBC,EACiCjgI,EAAKtB,OAALsB,CAAa,CAAbA,CADjCigI,EAENjgI,EAAKiO,OAFCgyH,EAEQjgI,EAAK4M,UAFbqzH,EAEyBjgI,EAAKkO,YAALlO,CAAkB,CAAlBA,CAFzBigI,CAAV/uH,CADF,KAIO,IAAkB,MAAdlR,EAAKvB,IAAT,EAELyS,IAAUgvH,eACNpvH,CADMovH,EACElgI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EADFkgI,EACsBF,CADtBE,EACiClgI,EAAKtB,OADtCwhI,EAC+ClgI,EAAKiO,OADpDiyH,EAENlgI,EAAK4M,UAFCszH,EAEWlgI,EAAKkO,YAFhBgyH,CAAVhvH,CAFK,KAKA,IAAkB,MAAdlR,EAAKvB,IAAT,EACL,MAAM,IAAIulG,mBAAJ,CAAwB,wCAAxB,CAAN,CAMF,OAHuB,QAAnBhkG,EAAKi8H,UAAc,KACrB/qH,IAAUlR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBkR,CAAtBlR,CADW,GAGhBkR,CAAP;OArBK6G,CAAP;KAnD+B7H,EA4EjCovH,WAAAA,mBAAAA,GAAA,UAAmB9/G,CAAnB;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,CAKA,KAJA,IAAM2gH,MAAN,EACMC,IAA6B,mBAApB/gI,KAAKuN,UAAe,GAC/B4S,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAAoBA,EAAW/lB,MAAX+lB,GAAoB,CAAxCA,CAD+B,GAE/BA,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,CAHJ,EAISplB,IAAI,CAAb,EAAgBA,IAAIgmI,EAAM3mI,MAA1B,IAAoCW,CAApC,EAAuC;EACrC,UAAM68C,IAASopF,iBACXD,EAAMhmI,CAANgmI,CADWC,EACDhhI,KAAKw+H,UAALx+H,CAAgBjF,CAAhBiF,CADCghI,EACmBhhI,KAAK4O,OADxBoyH,EACiChhI,KAAKX,OAALW,CAAajF,CAAbiF,CADjCghI,EAEkB,mBAAtBhhI,KAAK6O,YAAiB,GAAW7O,KAAK6O,YAAhB,GACW7O,KAAK6O,YAAL7O,CAAkBjF,CAAlBiF,CAH7BghI,CAAf,CAIAF,EAAS9kI,IAAT8kI,CAAclpF,CAAdkpF;EAGF,SAAIjzH,KAAesS,EAAW,CAAXA,EAAnB,CAQA,OAPwB,mBAApBngB,KAAKuN,UAAe,IACtBM,IAAcA,EAAY1G,MAAZ0G,CAAmBizH,CAAnBjzH,GACF7R,KAAKgE,KAAKkgI,QAFA,IAItBryH,EAAY7R,IAAZ6R,CAAiB7N,KAAKkgI,OAAtBryH,GACAA,IAAcA,EAAY1G,MAAZ0G,CAAmBizH,CAAnBjzH,CALQ,GAOjBA,CAAP;KAlG+BgD,EAqGjCovH,WAAAA,UAAAA,GAAA;EACE,QAAMjjC,MACJkjC,SAASlgI,KAAKkgI,SACdC,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAJpB;EAAA,QAMMhD,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CANnB,CAQA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA9G+BnsF,EAiHhBovH,cAAAA,GAAjB,UAA8BjjC,CAA9B;EAEE,UAAM,aAAaA,MAAqC,mBAAnBA,EAAOkjC,WACxCljC,EAAOkjC,OAAPljC,GAAiB,CADrB,EAEE,MAAM,IAAI0H,UAAJ,CACF,4EACWxrE,KAAKE,SAALF,CAAe8jE,EAAOkjC,OAAtBhnG,CAFT,CAAN;KArH6BroB,GA0HnC;IA1HmCkuH,SA1GnC;EAAA;EAyPE,YAAA,CAAY/hC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAM,CAANA,EAASusF,CAATvsF,SADF,QAEEwwH,EAAOjC,YAAPiC,CAAoBjkC,CAApBikC;EAkBJ,UAtB4BpwH,aAAAA,GAAAA,GAO1BowH,WAAAA,UAAAA,GAAA;EACE,QAAMjkC,IAASvsF,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAf,CAEA,cADOusF,EAAa59F,MACb49F,CAAP;KAVwBnsF,EAaTowH,cAAAA,GAAjB,UAA8BjkC,CAA9B;EAEE,QAAkC,mBAAtBA,EAAOwhC,UAAe,KAC7BoB,wBACG5iC,EAAOwhC,UADVoB,EACsB,QADtBA,EACgC,CADhCA,EACmC,CADnCA,CADL,EAGE,MAAM,IAAIl7B,UAAJ,CACF,gGAC+BxrE,KAAKE,SAALF,CAAe8jE,EAAOwhC,UAAtBtlG,CAD/B,MADE,CAAN;KAlBsBroB,EACnBowH,WAAAA,GAAY,QADOpwH,GAsB5B;IAtB4BovH,KAvP5B,eA8Qct4B,cAAcs5B,QAmC5B;EAIE,YAAA,CAAYjkC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAIE,IAFA9P,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EAAlBnvG,EAEqB,WAAjBA,EAAKiO,OAAY,IAA2B,YAAjBjO,EAAKiO,OAApC,EACE,MAAM,IAAI81F,UAAJ,CACF,yGAC0C/jG,EAAKiO,OAF7C,CAAN;EAwIN,UAjJqCiC,aAAAA,GAAAA,GAenCswH,WAAAA,MAAAA,GAAA,UAAMhhH,CAAN;EAGE,QAA0B,OAF1BA,IAAa06G,mBAAmB16G,CAAnB06G,GAEEzgI,MAAf,EACE,MAAM,IAAIsqG,UAAJ,CACF,qDACAxrE,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAFE,CAAN,CAKF,IAAMqnG,IACkB,oBAApBvgI,KAAKuN,UAAe,GAAkB,CAAlB,GAAsB4S,EAAW/lB,MAAX+lB,GAAoB,CADlE,CAEA,IAA+B,QAA3BA,EAAWogH,CAAXpgH,CAAJ,EACE,MAAM,IAAIukF,UAAJ,CACF,sEADE,CAAN,CAIF,KAAA;EAAA,QAAM87B,IAAWrgH,EAAWogH,CAAXpgH,CAAjB;EAAA,QACMsgH,IAAczgI,KAAKw+H,UAALx+H,CAAgBmH,MAAhBnH,EAAwBA,KAAKkgI,SAASM,EAAtCxgI,CADpB,CAGAA,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,EACAygI,CADAzgI,EACa,SADbA,EACwBA,KAAKmgI,iBAD7BngI,EAEVA,KAAKsgI,iBAFKtgI,GAEc,CAFdA,EAEoBA,KAAKqgI,gBAFzBrgI,CAAdA,EAGIA,KAAKo/H,OAALp/H,KACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCA,KAAKkgI,QADNlgI,EACgB,SADhBA,EAC2BA,KAAKq/H,eADhCr/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,CAHJA,EAUAA,KAAKqxG,SAALrxG,IACK,IAAIkhI,SAAJ,GAAepxB,MAAM,GAAG9uF,eAAO7H,EAAConH,CAADpnH,IAAeqnH,IAAtBx/G,GAAxB,EAXLhhB,EAYAA,KAAKyyG,KAALzyG,IAAa,CAZbA;KAlCiC6Q,EAiDnCswH,WAAAA,KAAAA,GAAA,UAAK1vH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOie,KAAS;EACd,UAAIx/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAZ,CACA,IAA2B,MAAvBprH,EAAM1V,KAAN0V,CAAYvX,MAAhB,EACE,MAAM,IAAIsqG,UAAJ,CACF,6FAC6B/yF,EAAM1V,KAAN0V,CAAYvX,MAFvC,CAAN,CAKF,IAGIgnI,CAHJ;EAAA,UAIIC,CAJJ;EAAA,UAAMlhH,IAAaxO,EAAM1V,KAAzB;EAAA,UACMswB,IAAYpM,EAAW,CAAXA,CADlB,CAKwB,oBAApBxf,EAAK4M,UAAe,IACtB6zH,IAAQ,CAARA,EACAC,IAAQ,CAFc,KAItBD,IAAQ,CAARA,EACAC,IAAQ,CALc,EAQxB,IAAMjnG,IAASja,EAAWihH,CAAXjhH,CAAf;EAAA,UACMka,IAAQla,EAAWkhH,CAAXlhH,CADd;EAAA,UAEMmhH,IAAU3gI,EAAK69H,UAAL79H,CAAgB,CAAhBA,CAFhB;EAAA,UAGM4gI,IAAU5gI,EAAK69H,UAAL79H,CAAgB,CAAhBA,CAHhB;EAAA,UAIM6gI,IAAU7gI,EAAKtB,OAALsB,CAAa,CAAbA,CAJhB;EAAA,UAKM8gI,IAAU9gI,EAAKtB,OAALsB,CAAa,CAAbA,CALhB;EAAA,UAeMkN,KACD0e,GARam1G,aAAatnG,CAAbsnG,EAAqBF,CAArBE,EAA8BJ,CAA9BI,EAAuC/gI,EAAKiO,OAA5C8yH,GACDA,aAAarnG,CAAbqnG,EAAoBD,CAApBC,EAA6BH,CAA7BG,EAAsC/gI,EAAKiO,OAA3C8yH,GAOoB/gI,EAAKu/H,QAhB1C,CAkBwB,mBAApBv/H,EAAK4M,UAAe,KACtBoE,IAAQ8sH,UAAc9sH,CAAd8sH,GAAsB,GAAG,GAAG,GAAG,EAA/BA,CADc,EAGxB,IAAI5sH,IAAU8vH,gBACVhwH,CADUgwH,EACShhI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EADTghI,EACyC9zH,CADzC8zH,EAEVhhI,EAAKtB,OAFKsiI,EAEwBhhI,EAAKiO,OAF7B+yH,CAAd,CAcA,OAXwB,mBAApBhhI,EAAK4M,UAAe,KACtBsE,IAAU4sH,UAAc5sH,CAAd4sH,GAAwB,GAAG,GAAG,GAAG,EAAjCA,CADY,GAIP,QAAb99H,EAAK4N,IAAQ,KACfsD,IACI+sH,QAAU/sH,CAAV+sH,EAAmBj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAnBi+H,EAAqCj+H,EAAK4M,UAA1CqxH,CAFW,CAJO,EAQD,QAAnBj+H,EAAKi8H,UAAc,KACrB/qH,IAAUlR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBkR,CAAtBlR,CADW,CARC,EAWjBkR,CAAP;OAxDKs/G,CAAP;KAlDiCtgH,EA8GnCswH,WAAAA,mBAAAA,GAAA,UAAmBhhH,CAAnB;EAEE,QAEIogH,CAFJ;EAAA,QAGIqB,CAHJ;EAAA,QAIIC,CAJJ;EAAA,QAAMh0H,KADNsS,IAAa06G,mBAAmB16G,CAAnB06G,GACkBh9H,OAA/B,CAKwB,oBAApBmC,KAAKuN,UAAe,IACtBgzH,IAAc,CAAdA,EACAqB,IAAa,CADbrB,EAEAsB,IAAY,CAHU,KAKtBtB,IAAc,CAAdA,EACAqB,IAAa,CADbrB,EAEAsB,IAAY,CAPU,EAUxB,IAAMP,IAAUthI,KAAKw+H,UAALx+H,CAAgB,CAAhBA,CAAhB;EAAA,QACMuhI,IAAUvhI,KAAKw+H,UAALx+H,CAAgB,CAAhBA,CADhB;EAAA,QAEMwhI,IAAUxhI,KAAKX,OAALW,CAAa,CAAbA,CAFhB;EAAA,QAGMyhI,IAAUzhI,KAAKX,OAALW,CAAa,CAAbA,CAHhB,CAUA,OALA6N,EAAY0yH,CAAZ1yH,IAA2B7N,KAAKkgI,OAAhCryH,EACAA,EAAY+zH,CAAZ/zH,IACI6zH,aAAa7zH,EAAY+zH,CAAZ/zH,CAAb6zH,EAAsCF,CAAtCE,EAA+CJ,CAA/CI,EAAwD1hI,KAAK4O,OAA7D8yH,CAFJ7zH,EAGAA,EAAYg0H,CAAZh0H,IACI6zH,aAAa7zH,EAAYg0H,CAAZh0H,CAAb6zH,EAAqCD,CAArCC,EAA8CH,CAA9CG,EAAuD1hI,KAAK4O,OAA5D8yH,CAJJ7zH,EAKOA,CAAP;KAzIiCgD,EA4InCswH,WAAAA,UAAAA,GAAA;EACE,QAAMnkC,IAASvsF,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAf,CAEA,cADOusF,EAAqBnuF,cACrBmuF,CAAP;KA/IiCnsF,EAC5BswH,WAAAA,GAAY,iBADgBtwH,GAiJrC;IAjJqCowH,OAArC,eAkJct5B,cAAcw5B,iBA2C5B;EAoBE,YAAA,CAAY/hI,CAAZ,EAA0B49F,CAA1B;EAAA,YACEvsF,MAAAA,KAAAA,EAAMrR,CAANqR,EAAYusF,CAAZvsF,SADF,CAGE,IAXO9P,+BAAAA,GACL,eADKA,EAEAA,+BAAAA,GACL,eAHKA,EAKCA,iBAAAA,GAAiC,IALlCA,EAMCA,iBAAAA,GAAiC,IANlCA,EAWe,QAAlBq8F,EAAOkjC,OAAX,EACE,MAAM,IAAIx7B,UAAJ,CACF,qFADE,CAAN,CAIF,IAAgC,QAA5B1H,EAAOmjC,iBAAqB,IAAoC,QAA5BnjC,EAAOsjC,iBAAf,IACD,QAA3BtjC,EAAOqjC,gBADX,EAEE,MAAM,IAAI37B,UAAJ,CACF,oPADE,CAAN,CAMF,IAAsB,QAAlB1H,EAAOpuF,OAAW,IAA2B,WAAnBouF,EAAOpuF,OAAf,IACC,YAAnBouF,EAAOpuF,OADX,EAEE,MAAM,IAAI81F,UAAJ,CACF,kBAAgB/jG,EAAKvB,IAArB,qEAAA,GACoC85B,KAAKE,SAALF,CAAe8jE,EAAOpuF,OAAtBsqB,CAFlC,CAAN,QAKFv4B,EAAKmhI,eAALnhI,GAC8B,QAA1Bq8F,EAAO8kC,eAAmB,GAAO,CAAP,GAAW9kC,EAAO8kC,eADhDnhI,EAEAA,EAAKohI,oBAALphI,GAA4B2+H,eACxBtiC,EAAO+kC,oBAAP/kC,IAA+Br8F,EAAKqhI,6BADZ1C,CAF5B3+H,EAIAA,EAAKshI,oBAALthI,GAA4Bg/H,eAAe3iC,EAAOilC,oBAAtBtC,CAJ5Bh/H,EAKAA,EAAKuhI,mBAALvhI,GAA2B8+H,cAAcziC,EAAOklC,mBAArBzC,CAL3B9+H,EAMAA,EAAKwhI,oBAALxhI,GAA4B2+H,eACxBtiC,EAAO+kC,oBAAP/kC,IAA+Br8F,EAAKyhI,6BADZ9C,CAN5B3+H,EAQAA,EAAK0hI,oBAAL1hI,GAA4Bg/H,eAAe3iC,EAAOqlC,oBAAtB1C,CAR5Bh/H,EASAA,EAAK2hI,mBAAL3hI,GAA2B8+H,cAAcziC,EAAOslC,mBAArB7C,CAT3B9+H;EAiHJ,UA5JmCkQ,aAAAA,GAAAA,GAuDjC0xH,WAAAA,MAAAA,GAAA,UAAMpiH,CAAN;EAEE,SADAA,IAAa06G,mBAAmB16G,CAAnB06G,GACEzgI,SAAS4F,KAAKZ,IAALY,GAAY,CAApC,EACE,MAAM,IAAI0kG,UAAJ,CACF,4BAA0B1kG,KAAKZ,IAA/B,wBAAA,IACGY,KAAKZ,IAALY,GAAY,CADf,kCAAA,GAEGk5B,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAHD,CAAN,CAKF,IAAMqnG,IACkB,oBAApBvgI,KAAKuN,UAAe,GAAkB,CAAlB,GAAsB4S,EAAW/lB,MAAX+lB,GAAoB,CADlE,CAEA,IAA+B,QAA3BA,EAAWogH,CAAXpgH,CAA2B,IAAQA,EAAWogH,CAAXpgH,IAA0B,CAAjE,EACE,MAAM,IAAIukF,UAAJ,CACF,sEACaxrE,KAAKE,SAALF,CAAe/Y,EAAWogH,CAAXpgH,CAAf+Y,CAFX,CAAN,CASF,KAJA,IAAMsnG,IAAWrgH,EAAWogH,CAAXpgH,CAAjB,EACMqiH,IACFxiI,KAAKw+H,UAALx+H,CAAgBmH,MAAhBnH,EAAwBwgI,GAAUxgI,KAAK8hI,gBAAvC9hI,CAFJ,EAGMyiI,MAHN,EAIS1nI,IAAI,CAAb,EAAgBA,IAAIiF,KAAKZ,IAAzB,IAAiCrE,CAAjC,EACE0nI,EAAqBzmI,IAArBymI,CAA0B,CAA1BA,EAEFA,EAAqBzmI,IAArBymI,CAA0BjC,IAAWxgI,KAAK8hI,eAA1CW,EAA2DziI,KAAKkgI,OAAhEuC,EAEA,KAAA,CACAziI,KAAK0iI,eAAL1iI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,EACCwiI,CADDxiI,EACuB,SADvBA,EAEnBA,KAAK+hI,oBAFc/hI,EAEQA,KAAKiiI,oBAFbjiI,GADL,CACKA,EAGnBA,KAAKkiI,mBAHcliI,CAAvBA,EAIAA,KAAK2iI,eAAL3iI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,EACCyiI,CADDziI,EACuB,SADvBA,EAEnBA,KAAKmiI,oBAFcniI,EAEQA,KAAKqiI,oBAFbriI,GALL,CAKKA,EAGnBA,KAAKsiI,mBAHctiI,CAJvBA,EAQIA,KAAKo/H,OAALp/H,GACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCA,KAAKkgI,QADNlgI,EACgB,SADhBA,EAC2BA,KAAKq/H,eADhCr/H,EAERA,KAAK0/H,eAFG1/H,GAVI,CAUJA,EAEyBA,KAAKw/H,cAF9Bx/H,CADVA,GAKFA,KAAKuO,IAALvO,GAAY,IAbdA,EAgBAA,KAAKqxG,SAALrxG,IACK,IAAIkhI,SAAJ,GAAepxB,MAAM9vG,KAAKZ,IAALY,GAAY,GAAGghB,eAAO7H,EAAConH,CAADpnH,IAAeqnH,IAAtBx/G,GAApC,EAjBLhhB,EAkBAA,KAAKyyG,KAALzyG,IAAa,CAlBbA;KAjF+B6Q,EAsGjC0xH,WAAAA,KAAAA,GAAA,UAAK9wH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAGV,UAAI5G,CAAJ,CACA,IAHAL,IAASsrH,oBAAoBtrH,CAApBsrH,CAATtrH,EAGkB,MAAd9Q,EAAKvB,IAAT,EACE,MAAM,IAAIulG,mBAAJ,CACF,kDADE,CAAN,CAwBF,OAtByB,MAAdhkG,EAAKvB,IAAS,KACC,oBAApBuB,EAAK4M,UAAe,KACtBkE,IAASgtH,UAAchtH,CAAdgtH,GAAuB,GAAG,GAAG,GAAG,EAAhCA,CADa,GAIxB3sH,IAAS8wH,gBACLnxH,CADKmxH,EACejiI,EAAK+hI,eAAL/hI,CAAqB8E,IAArB9E,EADfiiI,EAELjiI,EAAKgiI,eAALhiI,CAAqB8E,IAArB9E,EAFKiiI,EAGLjiI,EAAKtB,OAHAujI,EAG6BjiI,EAAKiO,OAHlCg0H,EAILjiI,EAAKkO,YAJA+zH,EAIkC,MAJlCA,CALc,GAYrBjiI,EAAKy+H,OAALz+H,KACFmR,IAAS8sH,QAAU9sH,CAAV8sH,EAAkBj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAlBi+H,EAAoCj+H,EAAK4M,UAAzCqxH,CADPj+H,CAZqB,EAeF,QAAnBA,EAAKi8H,UAAc,KACrB9qH,IAASnR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBmR,CAAtBnR,CADY,CAfE,EAmBD,oBAApBA,EAAK4M,UAAe,KACtBuE,IAAS2sH,UAAc3sH,CAAd2sH,GAAuB,GAAG,GAAG,GAAG,EAAhCA,CADa,CAnBC,EAsBlB3sH,CAAP;OA7BK4G,CAAP;KAvG+B7H,EAwIjC0xH,WAAAA,UAAAA,GAAA;EACE,QAAMvlC,IAASvsF,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAf,CAiBA,cAhBOusF,EAAa59F,aACb49F,EAA0BmjC,0BAC1BnjC,EAA0BsjC,0BAC1BtjC,EAAyBqjC,kBAChCrjC,EAA6B+kC,oBAA7B/kC,GACI8iC,qBAAqB9/H,KAAK+hI,oBAA1BjC,GACJ9iC,EAA6BmlC,oBAA7BnlC,GACI8iC,qBAAqB9/H,KAAKmiI,oBAA1BrC,GACJ9iC,EAA6BilC,oBAA7BjlC,GACI+iC,qBAAqB//H,KAAKiiI,oBAA1BlC,GACJ/iC,EAA6BqlC,oBAA7BrlC,GACI+iC,qBAAqB//H,KAAKqiI,oBAA1BtC,GACJ/iC,EAA4BklC,mBAA5BllC,GACIgjC,oBAAoBhgI,KAAKkiI,mBAAzBlC,GACJhjC,EAA4BslC,mBAA5BtlC,GACIgjC,oBAAoBhgI,KAAKsiI,mBAAzBtC,GACGhjC,CAAP;KA1J+BnsF,EAC1B0xH,WAAAA,GAAY,eADc1xH,GA4JnC;IA5JmCovH,KAAnC;EAAA;EA2LE,YAAA,CAAYjjC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAM,CAANA,EAASusF,CAATvsF;EAEJ,UALqCI,aAAAA,GAAAA,GAC5BgyH,WAAAA,GAAY,iBADgBhyH,GAKrC;IALqC0xH,cAzLrC,eA+Lc56B,cAAck7B,iBAoB5B;EAEE,YAAA,CAAY7lC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAM,CAANA,EAASusF,CAATvsF,SADF,QAEEqyH,EAAO9D,YAAP8D,CAAoB9lC,CAApB8lC,GACAniI,EAAK0wG,SAAL1wG,MAAmBmvG,MAAM,IADzBgzB;EAoBJ,UAxB4BjyH,aAAAA,GAAAA,GAQ1BiyH,WAAAA,UAAAA,GAAA;EACE,QAAM9lC,IAASvsF,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAf,CAGA,cAFOusF,EAAa59F,aACb49F,EAAmBzvF,YACnByvF,CAAP;KAZwBnsF,EAenBiyH,cAAAA,GAAP,UAAoB9lC,CAApB;EAEE,QAAiC,mBAAtBA,EAAOwhC,UAAe,KAC5BoB,wBACG5iC,EAAOwhC,UADVoB,EACsB,QADtBA,EACgC,CADhCA,EACmC,CADnCA,CADL,EAGE,MAAM,IAAIl7B,UAAJ,CACF,2FAC0BxrE,KAAKE,SAALF,CAAe8jE,EAAOwhC,UAAtBtlG,CAD1B,MADE,CAAN;KApBsBroB,EACnBiyH,WAAAA,GAAY,QADOjyH,GAwB5B;IAxB4BovH,KAA5B,eAyBct4B,cAAcm7B,QA6D5B;EAKE,YAAA,CAAY9lC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEiC,mBAApBusF,EAAO+lC,QAAa,GAC7BpiI,EAAKoiI,QAALpiI,KACGq8F,EAAO+lC,UAAU/lC,EAAO+lC,YAAY/lC,EAAO+lC,UAAU/lC,EAAO+lC,UAFlC,GAIQ,mBAAvB/lC,EAAO+lC,QAAP/lC,CAAgB,CAAhBA,CAAuB,GACrCr8F,EAAKoiI,QAALpiI,KACGq8F,EAAO+lC,QAAP/lC,CAAgB,CAAhBA,GAA8BA,EAAO+lC,QAAP/lC,CAAgB,CAAhBA,KAC9BA,EAAO+lC,QAAP/lC,CAAgB,CAAhBA,GAA8BA,EAAO+lC,QAAP/lC,CAAgB,CAAhBA,GAHI,GAMrCr8F,EAAKoiI,QAALpiI,GAAgBq8F,EAAO+lC,QAVM,EAW/BpiI,EAAK4M,UAAL5M,QAC0B0jG,MAAtBrH,EAAOzvF,aAA2B,iBAAiByvF,EAAOzvF,UAZ/B,EAa/B5M,EAAK0wG,SAAL1wG,MAAmBmvG,MAAM,IAbM;EA6DnC,UApEgCj/F,aAAAA,GAAAA,GAuB9BmyH,WAAAA,mBAAAA,GAAA,UAAmB7iH,CAAnB;EACE,WAAwB,oBAApBngB,KAAKuN,UAAe,IAEpB4S,EAAW,CAAXA,GACAA,EAAW,CAAXA,GACAA,EAAW,CAAXA,IAAgBngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,CAAhBmgB,GAAsCngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,GACtCmgB,EAAW,CAAXA,IAAgBngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,CAAhBmgB,GAAsCngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,EALlB,IASpBmgB,EAAW,CAAXA,GACAA,EAAW,CAAXA,IAAgBngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,CAAhBmgB,GAAsCngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,GACtCmgB,EAAW,CAAXA,IAAgBngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,CAAhBmgB,GAAsCngB,KAAK+iI,QAAL/iI,CAAc,CAAdA,EAAiB,CAAjBA,GACtCmgB,EAAW,CAAXA,EAZJ;KAxB4BtP,EAwC9BmyH,WAAAA,KAAAA,GAAA,UAAKvxH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAGV,aAFAjH,IAASsrH,oBAAoBtrH,CAApBsrH,CAATtrH,EAEwB,mBAApB9Q,EAAK4M,UAAe,GAIf01H,eAHSA,eACZxxH,CADYwxH,EACJtiI,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CADIsiI,EAEZxxH,EAAOxV,KAAPwV,CAAa,CAAbA,IAAkB9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAAlB8Q,GAAwC9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAF5BsiI,EAEiD,CAFjDA,CAGTA,EACMtiI,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CADNsiI,EAEHxxH,EAAOxV,KAAPwV,CAAa,CAAbA,IAAkB9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAAlB8Q,GAAwC9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAFrCsiI,EAE0D,CAF1DA,CAJe,GAWfA,eAHSA,eACZxxH,CADYwxH,EACJtiI,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CADIsiI,EAEZxxH,EAAOxV,KAAPwV,CAAa,CAAbA,IAAkB9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAAlB8Q,GAAwC9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAF5BsiI,EAEiD,CAFjDA,CAGTA,EACMtiI,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CADNsiI,EAEHxxH,EAAOxV,KAAPwV,CAAa,CAAbA,IAAkB9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAAlB8Q,GAAwC9Q,EAAKoiI,QAALpiI,CAAc,CAAdA,EAAiB,CAAjBA,CAFrCsiI,EAE0D,CAF1DA,CAXT;OAHKvqH,CAAP;KAzC4B7H,EA8D9BmyH,WAAAA,UAAAA,GAAA;EACE,QAAMhmC,MAAU+lC,UAAU/iI,KAAK+iI,UAAUx1H,YAAYvN,KAAKuN,YAA1D;EAAA,QACMyvH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlE4BnsF,EACvBmyH,WAAAA,GAAY,YADWnyH,GAoEhC;IApEgCshG,MAAhC,eAqEcxK,cAAcq7B,YA8C5B;EAME,YAAA,CAAYhmC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAJmB9P,cAAAA,IAAgB,GAAG,EAAnBA,EAMjBA,EAAK0wG,SAAL1wG,MAAmBmvG,MAAM,IANRnvG,EAOjBA,EAAKzE,IAALyE,GAA2B,QAAfq8F,EAAO9gG,IAAQ,GAAOyE,EAAKuiI,YAAZ,GAA2BlmC,EAAO9gG,IAP5CyE,EAQjBA,EAAK4M,UAAL5M,GACyB,QAArBq8F,EAAOzvF,UAAc,GAAO,cAAP,GAAwByvF,EAAOzvF,UATvC5M;EAmDrB,UArDkCkQ,aAAAA,GAAAA,GAchCsyH,WAAAA,mBAAAA,GAAA,UAAmBhjH,CAAnB;EACE,QAAwB,oBAApBngB,KAAKuN,UAAT,EAAyC;EACvC,UAAM6sB,IACe,QAAjBja,EAAW,CAAXA,CAAiB,GAAO,IAAP,GAAcngB,KAAK9D,IAAL8D,CAAU,CAAVA,IAAemgB,EAAW,CAAXA,CADlD;EAAA,UAEMka,IAAyB,QAAjBla,EAAW,CAAXA,CAAiB,GAAO,IAAP,GAAcngB,KAAK9D,IAAL8D,CAAU,CAAVA,IAAemgB,EAAW,CAAXA,CAF5D,CAGA,QAAQA,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAeia,GAAQC,EAA9C;EAEMD,SACe,QAAjBja,EAAW,CAAXA,CAAiB,GAAO,IAAP,GAAcngB,KAAK9D,IAAL8D,CAAU,CAAVA,IAAemgB,EAAW,CAAXA,CAD5Cia,EAEAC,IAAyB,QAAjBla,EAAW,CAAXA,CAAiB,GAAO,IAAP,GAAcngB,KAAK9D,IAAL8D,CAAU,CAAVA,IAAemgB,EAAW,CAAXA,CAFtDia,CAGN,QAAQja,EAAW,CAAXA,GAAeia,GAAQC,GAAOla,EAAW,CAAXA,EAAtC;KAxB4BtP,EA4BhCsyH,WAAAA,KAAAA,GAAA,UAAK1xH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOie,KAAS;EACd,UAAIx/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAZ;EAAA,UACM58G,IAAaxO,EAAM1V,KADzB,CAGA,IAAwB,oBAApB0E,EAAK4M,UAAT,EAAyC;EACvCoE,YAAQ8sH,UAAc9sH,CAAd8sH,GAAsB,GAAG,GAAG,GAAG,EAA/BA,CAAR9sH,CACA,IAAMyoB,IAASz5B,EAAKzE,IAALyE,CAAU,CAAVA,IAAewf,EAAW,CAAXA,CAA9B;EAAA,YACMka,IAAQ15B,EAAKzE,IAALyE,CAAU,CAAVA,IAAewf,EAAW,CAAXA,CAD7B;EAAA,YAEMijH,IAAUzxH,EAAMtE,qBAANsE,EAA6ByoB,GAAQC,EAArC1oB,CAFhB,CAGA,OAAO8sH,UAAc2E,CAAd3E,GAAwB,GAAG,GAAG,GAAG,EAAjCA,CAAP;EAEMrkG,WAASz5B,EAAKzE,IAALyE,CAAU,CAAVA,IAAewf,EAAW,CAAXA,CAAxBia,EACAC,IAAQ15B,EAAKzE,IAALyE,CAAU,CAAVA,IAAewf,EAAW,CAAXA,CADvBia,CAEN,OAAOzoB,EAAMtE,qBAANsE,EAA6ByoB,GAAQC,EAArC1oB,CAAP;OAbGw/G,CAAP;KA7B8BtgH,EA+ChCsyH,WAAAA,UAAAA,GAAA;EACE,QAAMnmC,MAAU9gG,MAAM8D,KAAK9D,MAAMqR,YAAYvN,KAAKuN,YAAlD;EAAA,QACMyvH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAnD8BnsF,EACzBsyH,WAAAA,GAAY,cADatyH,GAqDlC;IArDkCshG,MAAlC,4BC/oCIz3G,GAAWgoI,GAAyBrjI,GACpCuP,GAAmBrB,GACnBsB;EACF,0BAHsCxP,KAA6B,GAAG,sBACpEuP,cAEK8J,KAAK;EACQ,YAAdnL,CAAc,KAChBA,IAAag+F,iBADG,GAGlBC,gBAAgBj+F,CAAhBi+F,CAHkB,CAIlB,IAAIp6F,IAAIytH,sBAAsBnkI,CAAtBmkI,EAAyBtxH,CAAzBsxH,CAAR,CACA,IAAe,MAAXnkI,EAAE0E,IAAN,EACE,MAAM,IAAIslG,UAAJ,CACF,qEACGhqG,EAAE0E,IADL,OADE,CAAN,CAIF,IAA6B,MAAzBsjI,EAAgBtjI,IAApB,EACE,MAAM,IAAIslG,UAAJ,CACF,2DACGg+B,EAAgBtjI,IADnB,OADE,CAAN,CAUF,OANAgS,IAAIiyH,gBACAjyH,CADAiyH,EACeX,CADfW,EAC4ChkI,CAD5CgkI,EAEY,WAAZz0H,CAAY,GAAS,MAAT,GAAkB,OAF9By0H,EAEuC,MAFvCA,EAE+Cx0H,CAF/Cw0H,CAAJjyH,EAGmB,oBAAf7D,CAAe,KACjB6D,IAAIqtH,UAAcrtH,CAAdqtH,GAAkB,GAAG,GAAG,GAAG,EAA3BA,CADa,CAHnBrtH,EAMOA,CAAP;KAtBKsH,CAAP;iBDksCYivF,cAAcw7B,cC9nC5B;EASE,YAAA,CAAYnmC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAM,CAANA,EAASusF,CAATvsF,SADF,QAFQ9P,iBAAAA,GAAiC,IAAjCA,EAINA,EAAKmhI,eAALnhI,GAC8B,QAA1Bq8F,EAAO8kC,eAAmB,GAAO,CAAP,GAAW9kC,EAAO8kC,eAL1CnhI,EAMNA,EAAKohI,oBAALphI,GAA4B2+H,eACxBtiC,EAAO+kC,oBAAP/kC,IAA+Br8F,EAAKy/H,0BADZd,CANtB3+H,EAQNA,EAAKuhI,mBAALvhI,GAA2B8+H,cAAcziC,EAAOklC,mBAArBzC,CARrB9+H,EASNA,EAAKshI,oBAALthI,GAA4Bg/H,eAAe3iC,EAAOilC,oBAAtBtC,CATtBh/H;EA6FV,UApGqCkQ,aAAAA,GAAAA,GAmBnCyyH,WAAAA,MAAAA,GAAA,UAAMnjH,CAAN;EAEE,SADAA,IAAa06G,mBAAmB16G,CAAnB06G,GACEzgI,SAAS,CAAxB,EACE,MAAM,IAAIsqG,UAAJ,CACF,yEACyBxrE,KAAKE,SAALF,CAAe/Y,CAAf+Y,CADzB,MADE,CAAN,CAIF,IAAMqnG,IAAkC,oBAApBvgI,KAAKuN,UAAe,GAAkB,CAAlB,GAAsB,CAA9D,CACA,IAA+B,QAA3B4S,EAAWogH,CAAXpgH,CAA2B,IAAQA,EAAWogH,CAAXpgH,IAA0B,CAAjE,EACE,MAAM,IAAIukF,UAAJ,CACF,2FAC2BvkF,EAAWogH,CAAXpgH,CAD3B,OADE,CAAN,CAIF,IAAMqgH,IAAWrgH,EAAWogH,CAAXpgH,CAAjB;EAAA,QACMqiH,KACJxiI,KAAKw+H,UAALx+H,CAAgB,CAAhBA,GAAoBA,KAAKw+H,UAALx+H,CAAgB,CAAhBA,GAAoBwgI,GAAUxgI,KAAK8hI,gBAFzD,CAKA9hI,KAAK0iI,eAAL1iI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,EACCwiI,CADDxiI,EACuB,IADvBA,EAEnBA,KAAK+hI,oBAFc/hI,EAEQA,KAAKiiI,oBAFbjiI,GAEmC,CAFnCA,EAGnBA,KAAKkiI,mBAHcliI,CAAvBA,EAIIA,KAAKo/H,OAALp/H,GACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCwgI,IAAWxgI,KAAK8hI,gBADjB9hI,EACmC,IADnCA,EACyCA,KAAKq/H,eAD9Cr/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,GAKFA,KAAKuO,IAALvO,GAAY,IATdA,EAWAA,KAAKyyG,KAALzyG,IAAa,CAXbA;KArCiC6Q,EAmDnCyyH,WAAAA,KAAAA,GAAA,UAAK7xH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UAAI7G,IAAU9D,kBADd0D,IAASsrH,oBAAoBtrH,CAApBsrH,CACKhvH,EACFpN,EAAK+hI,eAAL/hI,CAAqB8E,IAArB9E,EADEoN,EAC2BpN,EAAKtB,OADhC0O,EAEVpN,EAAKiO,OAFKb,EAEIpN,EAAK4M,UAFTQ,EAEqB,IAFrBA,CAAd,CAUA,OANIpN,EAAKy+H,OAALz+H,KACFkR,IAAU+sH,QAAU/sH,CAAV+sH,EAAmBj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAnBi+H,EAAqCj+H,EAAK4M,UAA1CqxH,CADRj+H,GAGmB,QAAnBA,EAAKi8H,UAAc,KACrB/qH,IAAUlR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBkR,CAAtBlR,CADW,CAHnBA,EAMGkR,CAAP;OAZK6G,CAAP;KApDiC7H,EAoEnCyyH,WAAAA,mBAAAA,GAAA,UAAmBnjH,CAAnB;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,CACA,IAAM/a,IACkB,oBAApBpF,KAAKuN,UAAe,GAAkB4S,EAAW,CAAXA,CAAlB,GAAkCA,EAAW,CAAXA,CAD1D;EAAA,QAEMimB,IACkB,oBAApBpmC,KAAKuN,UAAe,GAAkB4S,EAAW,CAAXA,CAAlB,GAAkCA,EAAW,CAAXA,CAH1D;EAAA,QAIMojH,IAAiC,oBAApBvjI,KAAKuN,UAAe,GACnC4S,EAAW,CAAXA,IAAgBngB,KAAK8hI,eADc,GAEnC3hH,EAAW,CAAXA,IAAgBngB,KAAK8hI,eANzB;EAAA,QAOM0B,IAAUxC,iBACZ57H,CADY47H,EACNhhI,KAAKw+H,UAALx+H,CAAgB,CAAhBA,CADMghI,EACchhI,KAAK4O,OADnBoyH,EAC4BhhI,KAAKX,OAALW,CAAa,CAAbA,CAD5BghI,CAPhB;EAAA,QASMyC,IAAUzC,iBACZ56F,CADY46F,EACNhhI,KAAKw+H,UAALx+H,CAAgB,CAAhBA,CADMghI,EACchhI,KAAK4O,OADnBoyH,EAC4BhhI,KAAKX,OAALW,CAAa,CAAbA,CAD5BghI,CAThB,CAWA,OAAwB,oBAApBhhI,KAAKuN,UAAe,IACd4S,EAAW,CAAXA,GAAeojH,GAAYC,GAASC,EADtB,IAIdtjH,EAAW,CAAXA,GAAeqjH,GAASC,GAASF,EAJ3C;KAjFiC1yH,EAyFnCyyH,WAAAA,UAAAA,GAAA;EACE,QAAMtmC,IAASvsF,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAf,CAQA,OAPAusF,EAAwB8kC,eAAxB9kC,GAA4Bh9F,KAAK8hI,eAAjC9kC,EACAA,EAA6B+kC,oBAA7B/kC,GACI8iC,qBAAqB9/H,KAAK+hI,oBAA1BjC,CAFJ9iC,EAGAA,EAA6BilC,oBAA7BjlC,GACI+iC,qBAAqB//H,KAAKiiI,oBAA1BlC,CAJJ/iC,EAKAA,EAA4BklC,mBAA5BllC,GACIgjC,oBAAoBhgI,KAAKiiI,oBAAzBjC,CANJhjC,EAOOA,CAAP;KAlGiCnsF,EAC5ByyH,WAAAA,GAAY,iBADgBzyH,GAoGrC;IApGqCkuH,SAArC,eAqGcp3B,cAAc27B,iBCjK5B;EAOE,YAAA,CAAYtmC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAOE,IALA9P,EAAKsnG,IAALtnG,GAAYpG,KAAKI,GAALJ,CAASA,KAAKE,GAALF,CAASyiG,EAAOiL,IAAhB1tG,EAAsB,CAAtBA,CAATA,EAAmC,CAAnCA,CAAZoG,EACAA,EAAK+iI,UAAL/iI,GAAkB+mG,UAAU/mG,EAAKsnG,IAAfP,CADlB/mG,EAGAA,EAAKkrG,UAALlrG,GAAkBq8F,EAAO6O,UAHzBlrG,EAIAA,EAAK6nB,IAAL7nB,GAAYq8F,EAAOx0E,IAJnB7nB,EAKiB,QAAbA,EAAK6nB,IAAT,EACE,MAAM,IAAIm8E,mBAAJ,CACF,+DACAhkG,EAAK6nB,IAFH,CAAN,QAIF7nB,EAAK2wG,eAAL3wG,IAAuB,CAAvBA;EAkDJ,UArE6BkQ,aAAAA,GAAAA,GAsBnB8yH,WAAAA,cAAAA,GAAR,UAAsBhyH,CAAtB;EACE,QAAuB,QAAnB3R,KAAK6rG,UAAT,EACE,OAAO7rG,KAAK6rG,UAAZ,CAIF,KAFA,IAAM1rF,IAAaxO,EAAM1V,KAAzB,EACM4vG,MADN,EAES9wG,IAAI,CAAb,EAAgBA,IAAIiF,KAAK6rG,UAAL7rG,CAAgB5F,MAApC,IAA8CW,CAA9C,EACE8wG,EAAW7vG,IAAX6vG,CAC0B,QAAtB7rG,KAAK6rG,UAAL7rG,CAAgBjF,CAAhBiF,CAAsB,GAAOmgB,EAAWplB,CAAXolB,CAAP,GAAuBngB,KAAK6rG,UAAL7rG,CAAgBjF,CAAhBiF,CADjD6rG,EAGF,OAAOA,CAAP;KAhCyBh7F,EAmC3B8yH,WAAAA,KAAAA,GAAA,UAAKlyH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,EACA,IAAMgR,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,IAAuB,QAAnBp8H,EAAKkrG,UAAc,KAClBC,KAAKnwG,WAALmwG,CAAiBn6F,EAAM1V,KAAvB6vG,EAA8BnrG,EAAKkrG,UAAnCC,CADL,EAEE,MAAM,IAAInH,mBAAJ,CACF,sEACgBzrE,KAAKE,SAALF,CAAev4B,EAAKkrG,UAApB3yE,CAFd,CAAN,CAIF,IAAI,IAAIv4B,EAAKsnG,IAAT,IAAiBtnG,EAAKsnG,IAALtnG,GAAY,CAAjC,EAAoC;EAClC,YAAMyrG,IACoB,QAAtB8G,EAAiB9G,QAAK,IAAe8G,EAAiB9G,QAD1D;EAAA,YAEMy3B,IAAaljI,EAAKmjI,aAALnjI,CAAmBgR,CAAnBhR,CAFnB,CAOA,OAHIojI,aACI;EAAM,iBAAAC,QAAUryH,CAAVqyH,EAAiBrjI,EAAK+iI,UAAtBM,EAAkCH,CAAlCG,EAA8CrjI,EAAK6nB,IAAnDw7G,CAAA;WADVD,EAEI;EAAM,iBAAApyH,CAAA;WAFVoyH,EAEiB33B,CAFjB23B,CAGJ;EAEF,cAAOtyH,CAAP;OAnBKiH,CAAP;KApCyB7H,EA2D3B8yH,WAAAA,UAAAA,GAAA;EACE,QAAM3mC,MACJiL,MAAMjoG,KAAKioG,MACX4D,YAAY7rG,KAAK6rG,YACjBrjF,MAAMxoB,KAAKwoB,MAHb;EAAA,QAKMw0G,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CALnB,CAOA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAnEyBnsF,EACpB8yH,WAAAA,GAAY,SADQ9yH,GAqE7B;IArE6BshG,MAA7B,eAsEcxK,cAAcg8B,SAmF5B;EAkBE,YAAA,CAAY3mC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAEE,IAhBM9P,YAAAA,GAA2B,IAA3BA,EACAA,SAAAA,IAAU,CADVA,EAIAA,QAAAA,GAAwB,IAJxBA,EAKAA,MAAAA,GAAsB,IALtBA,EAOCA,4BAAAA,GAAoD,cAPrDA,EAQCA,0BAAAA,GAAkD,OARnDA,EAgBwB,QAA1Bq8F,EAAO+U,eAAmB,IAA6B,QAArB/U,EAAO78E,UAAf,IACP,QAAnB68E,EAAOwjC,QADX,EAC6B;EAG3B,UAAIj0G,IAAoB,IAAxB,CACwB,QAApBywE,EAAOzwE,SAAa,KACtBA,IAAYywE,EAAOzwE,SADG,GAGxB5rB,EAAKoxG,eAALpxG,IAAwB4rB,GAAWywE,EAAOwjC,SAHlB;cAM1B7/H,EAAKsjI,KAALtjI,GAAaq8F,EAAOinC,KAApBtjI,EACAA,EAAKi8H,UAALj8H,GAAkBw+H,cAAcniC,EAAO4/B,UAArBuC,CADlBx+H,EAEsB,QAAlBq8F,EAAOoiC,OAAW,KACpBz+H,EAAKy+H,OAALz+H,GAAeq8F,EAAOoiC,OADF,CAFtBz+H,EAKAA,EAAKw/H,iBAALx/H,GAAyB2+H,eACrBtiC,EAAOmjC,iBAAPnjC,IAA4Br8F,EAAKy/H,0BADZd,CALzB3+H,EAOAA,EAAK0+H,eAAL1+H,GACI2+H,eAAetiC,EAAOqiC,eAAPriC,IAA0Br8F,EAAK4+H,wBAA9CD,CARJ3+H,EASAA,EAAK0/H,gBAAL1/H,GAAwB8+H,cAAcziC,EAAOqjC,gBAArBZ,CATxB9+H,EAUAA,EAAK6+H,cAAL7+H,GAAsB8+H,cAAcziC,EAAOwiC,cAArBC,CAVtB9+H,EAWAA,EAAK2/H,iBAAL3/H,GAAyBg/H,eAAe3iC,EAAOsjC,iBAAtBX,CAXzBh/H,EAYAA,EAAK++H,eAAL/+H,GAAuBg/H,eAAe3iC,EAAO0iC,eAAtBC,CAZvBh/H,EAaAA,EAAKywG,mBAALzwG,GAA2Bg/H,eAAe3iC,EAAOoU,mBAAtBuuB,CAb3Bh/H,EAeAA,EAAK0wG,SAAL1wG,MAAmBqvG,SAAS,IAf5BrvG;EA4EJ,UA3G2BkQ,aAAAA,GAAAA,GAiDlBqzH,WAAAA,MAAAA,GAAP,UAAa/jH,CAAb;EAEE,SAAA;EAAA,QAAMgkH,KADNhkH,IAAa06G,mBAAmB16G,CAAnB06G,GACmB16G,EAAW/lB,MAAX+lB,GAAoB,EAApD,CACmB,QAAfngB,KAAK0+H,MAAU,KACjB1+H,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,GACCmkI,GAAcnkI,KAAKikI,MADpBjkI,EAC4B,IAD5BA,EACkCA,KAAKmgI,iBADvCngI,EAEVA,KAAKsgI,iBAFKtgI,GAEc,CAFdA,EAEoBA,KAAKqgI,gBAFzBrgI,CAAdA,EAGIA,KAAKo/H,OAALp/H,KACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCA,KAAKikI,MADNjkI,EACc,IADdA,EACoBA,KAAKq/H,eADzBr/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,CAJa,GAWnBA,KAAKqxG,SAALrxG,MAAmBgwG,SAAS,GAAGhvF,eAAO7H,GAAE,CAAFA,IAAMgrH,IAAbnjH,IAXZ,EAYnBhhB,KAAKyyG,KAALzyG,IAAa,CAZM;KApDI6Q,EAmEzBqzH,WAAAA,mBAAAA,GAAA,UAAmB/jH,CAAnB;EAEE,QAAMtS,KADNsS,IAAa06G,mBAAmB16G,CAAnB06G,GACkBh9H,OAA/B,CAEA,OADAgQ,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,IAAsC7N,KAAKikI,KAA3Cp2H,EACOA,CAAP;KAvEuBgD,EA0EzBqzH,WAAAA,KAAAA,GAAA,UAAKzyH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,EAEA,IACImR,IAASsyH,MADCrH,oBAAoBtrH,CAApBsrH,CACDqH,EAAazjI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EAAbyjI,CADb,CAQA,OANiB,QAAbzjI,EAAK4N,IAAQ,KACfuD,IAAS8sH,QAAU9sH,CAAV8sH,EAAkBj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAlBi+H,CADM,GAGM,QAAnBj+H,EAAKi8H,UAAc,KACrB9qH,IAASnR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBmR,CAAtBnR,CADY,CAHN,EAMVmR,CAAP;OAXK4G,CAAP;KA3EuB7H,EA0FzBqzH,WAAAA,UAAAA,GAAA;EACE,QAAMlnC,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAVlB;EAAA,QAYMhD,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAZnB,CAcA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAzGuBnsF,EAClBqzH,WAAAA,GAAY,OADMrzH,GA2G3B;IA3G2BshG,MAA3B,eA4GcxK,cAAcu8B,OAmB5B;EAEE,YAAA,CAAYlnC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,OAANvsF,SADF,QAEE9P,EAAK0wG,SAAL1wG,MAAmBqvG,SAAS,IAA5BrvG;EAuBJ,UA3B6BkQ,aAAAA,GAAAA,GAO3BwzH,WAAAA,mBAAAA,GAAA,UAAmBlkH,CAAnB;EAEE,SAAkB,SAAA,EAAAhH,KADlBgH,IAAa06G,mBAAmB16G,CAAnB06G,GACgBh9H,MAAM,EAAnC,EAAkBmG,YAAlB,EAAkBA,GAAlB;EACE,UAAW,YAAX,EACE,MAAM,IAAI0gG,UAAJ,CACF,mEACQvkF,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,CADR,oHADE,CAAN;EAOJ,aAAQA,EAAW,CAAXA,GAAeqtF,UAAUrtF,CAAVqtF,EAAsB,CAAtBA,EAAvB;KAlByB38F,EAqB3BwzH,WAAAA,KAAAA,GAAA,UAAK5yH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,aADA/X,EAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,GACO2jI,aAAevH,oBAAoBtrH,CAApBsrH,CAAfuH,CAAP;OAFK5rH,CAAP;KAtByB7H,EACpBwzH,WAAAA,GAAY,SADQxzH,GA2B7B;IA3B6BshG,MAA7B,eA4BcxK,cAAc08B,SAuC5B;EAIE,YAAA,CAAYrnC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAK2wG,eAAL3wG,IAAuB,CAAvBA,EACAA,EAAKi8H,UAALj8H,GAAkBw+H,cAAcniC,EAAO4/B,UAArBuC,CADlBx+H;EAkBJ,UAxBgCkQ,aAAAA,GAAAA,GAU9B4qH,WAAAA,KAAAA,GAAA,UAAKhqH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,EACA,IAAMgR,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,OAAOp8H,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBgR,CAAtBhR,CAAP;OAHK+X,CAAP;KAX4B7H,EAkB9B4qH,WAAAA,UAAAA,GAAA;EACE,QAAMz+B,MAAU4/B,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GAA5B;EAAA,QACM7C,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAtB4BnsF,EACvB4qH,WAAAA,GAAY,YADW5qH,GAwBhC;IAxBgCshG,MAAhC,eAyBcxK,cAAc8zB,cA0B5B;EAIE,YAAA,CAAYz+B,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAKhE,CAALgE,GAASq8F,EAAOrgG,CAAhBgE,EACAA,EAAK0wG,SAAL1wG,MAAmBmvG,MAAM,IADzBnvG;EAuBJ,UA7BkCkQ,aAAAA,GAAAA,GAUhC0zH,WAAAA,mBAAAA,GAAA,UAAmBpkH,CAAnB;EACE,YAAQA,EAAW,CAAXA,GAAengB,KAAKrD,GAAGwjB,EAAW,CAAXA,EAA/B;KAX8BtP,EAchC0zH,WAAAA,KAAAA,GAAA,UAAK9yH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,aAAO8rH,OADP/yH,IAASsrH,oBAAoBtrH,CAApBsrH,CACFyH,EAAiB7jI,EAAKhE,CAAtB6nI,CAAP;OAFK9rH,CAAP;KAf8B7H,EAqBhC0zH,WAAAA,UAAAA,GAAA;EACE,QAAMvnC,MACJrgG,GAAGqD,KAAKrD,GADV;EAAA,QAGMqgI,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAHnB,CAKA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA3B8BnsF,EACzB0zH,WAAAA,GAAY,cADa1zH,GA6BlC;IA7BkCshG,MAAlC,eA8BcxK,cAAc48B,cAsB5B;EAIE,YAAA,CAAYvnC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAEE9P,EAAK8jI,WAAL9jI,GAAmBq8F,EAAOynC,WAA1B9jI,CAGA,KAAK,IAAI5F,IAAI,CAAb,EAAgBA,IAAI4F,EAAK8jI,WAAL9jI,CAAiBvG,MAArC,IAA+CW,CAA/C,EACM4F,EAAK+jI,SAAL/jI,CAAeA,EAAK8jI,WAAL9jI,CAAiB5F,CAAjB4F,CAAfA,MACFA,EAAK8jI,WAAL9jI,CAAiB5F,CAAjB4F,IAAsB,IADpBA;EA2FV,UArG6BkQ,aAAAA,GAAAA,GAgBnB8zH,WAAAA,UAAAA,GAAR,UAAkB73H,CAAlB;EACE,WAAOA,IAAM,CAANA,IAAkB,QAAPA,CAAlB;KAjByB+D,EAkCnB8zH,WAAAA,oBAAAA,GAAR,UAA4BxkH,CAA5B,EAA+CtS,CAA/C;EAKE,SAJA,IAAM+2H,IAAW,4CAAjB,EACMC,IAAah3H,EAAYhQ,KAAZgQ,EADnB,EAEIi3H,IAAQ,CAFZ,EAGIC,IAAU,IAHd,EAIShqI,IAAI,CAAb,EAAgBA,IAAI8pI,EAAWzqI,MAA/B,IAAyCW,CAAzC,EAA4C;EAC1C,UAAM+R,IAAM+3H,EAAW9pI,CAAX8pI,CAAZ,CACA,IAAI7kI,KAAK0kI,SAAL1kI,CAAe8M,CAAf9M,CAAJ,EAAyB;EACvB,YAAgB,SAAZ+kI,CAAJ,EAGE,MAAM,IAAIrgC,UAAJ,CAAe,0CAAf,CAAN,CAFAqgC,IAAUhqI,CAAVgqI;SAFJ,MAOED,KAASh4H,CAATg4H;EAIJ,SAAME,IAAex3B,UAAUrtF,CAAVqtF,CAArB,CACA,IAAgB,SAAZu3B,CAAJ,EAAsB;EACpB,UAAc,MAAVD,CAAU,IAAKE,IAAeF,CAAfE,IAAyB,CAA5C,EACE,MAAM,IAAItgC,UAAJ,CAAekgC,CAAf,CAAN,CAEFC,EAAWE,CAAXF,IAAsBG,IAAeF,CAArCD;OAJF,MAKO,IAAIG,MAAiBF,CAArB,EACL,MAAM,IAAIpgC,UAAJ,CAAekgC,CAAf,CAAN,CAGF,OAAOC,CAAP;KA9DyBh0H,EAiE3B8zH,WAAAA,mBAAAA,GAAA,UAAmBxkH,CAAnB;EAEE,SADA,IAAI8kH,KAAiB,CAArB,EACSlqI,IAAI,CAAb,EAAgBA,IAAIolB,EAAW/lB,MAA/B,IAAyCW,CAAzC,EACE,IAAIiF,KAAK0kI,SAAL1kI,CAAemgB,EAAWplB,CAAXolB,CAAfngB,CAAJ,EAAmC;EACjCilI,WAAiB,CAAjBA,CACA;EAIJ,YAAIA,IACK9kH,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAAoB,CAApBA,EAAuBhZ,MAAvBgZ,CAA8BngB,KAAKykI,WAAnCtkH,CADL8kH,GAGK9kH,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAAoB,CAApBA,EAAuBhZ,MAAvBgZ,CACHngB,KAAKklI,mBAALllI,CAAyBmgB,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,CAAzBngB,EAA8CA,KAAKykI,WAAnDzkI,CADGmgB,CAHT;KA1EyBtP,EAkF3B8zH,WAAAA,KAAAA,GAAA,UAAKlzH,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,EACA,IAAMgR,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd;EAAA,UACM58G,IAAaxO,EAAM1V,KADzB;EAAA,UAEM4R,IAAcsS,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAAoB,CAApBA,EAAuBhZ,MAAvBgZ,CAChBxf,EAAKukI,mBAALvkI,CAAyBwf,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,CAAzBxf,EAA8CA,EAAK8jI,WAAnD9jI,CADgBwf,CAFpB,CAIA,OAAOxO,EAAMxM,OAANwM,CAAc9D,CAAd8D,CAAP;OANK+G,CAAP;KAnFyB7H,EA6F3B8zH,WAAAA,UAAAA,GAAA;EACE,QAAM3nC,MACJynC,aAAazkI,KAAKykI,aADpB;EAAA,QAGMzH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAHnB,CAKA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAnGyBnsF,EACpB8zH,WAAAA,GAAY,SADQ9zH,GAqG7B;IArG6BshG,MAA7B,eAsGcxK,cAAcg9B,SAsC5B;EAKE,YAAA,CAAY3nC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAEE,IAAmB,QAAfusF,EAAOjvE,IAAX,EACE,MAAM,IAAIx0B,KAAJ,CACF,iFADE,CAAN,CAIF,KAAKsC,MAAMC,OAAND,CAAcmhG,EAAOjvE,IAArBlyB,CAAL,EACE,MAAM,IAAItC,KAAJ,CACF,sEACGyjG,EAAOjvE,IADV,cADE,CAAN,CAMF,IAAMo3G,IAAwBv2E,QAAM,CAANA,EAASouC,EAAOjvE,IAAPivE,CAAY5iG,MAAZ4iG,GAAqB,CAA9BpuC,CAA9B,CACA,KAAKk9C,KAAKnwG,WAALmwG,CAAiB9O,EAAOjvE,IAAPivE,CAAYn/F,KAAZm/F,GAAoBl/E,IAApBk/E,EAAjB8O,EAA6Cq5B,CAA7Cr5B,CAAL,EACE,MAAM,IAAIvyG,KAAJ,CACF,iCAAiC2/B,KAAKE,SAALF,CAAe8jE,EAAOjvE,IAAtBmL,CAAjC,GACA,4DAFE,CAAN,QAKFv4B,EAAKotB,IAALptB,GAAYq8F,EAAOjvE,IAAnBptB,EACAA,EAAKykI,kBAALzkI,IAA2B,GAAGwG,OAAOxG,EAAKotB,KAD1CptB,EAEAA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAMnvG,EAAKotB,IAALptB,CAAUvG,MAAVuG,GAAmB,GAAxC,EAFlBA;EA0BJ,UApD6BkQ,aAAAA,GAAAA,GA+B3Bw0H,WAAAA,mBAAAA,GAAA,UAAmBllH,CAAnB;EAEE,QAAMtS,KADNsS,IAAa06G,mBAAmB16G,CAAnB06G,GACkBh9H,OAA/B,CAIA,OAHAmC,KAAK+tB,IAAL/tB,CAAUI,OAAVJ,CAAkB,UAAC8M,CAAD,EAAc/R,CAAd;EAChB8S,QAAY9S,IAAI,CAAhB8S,IAAsBsS,EAAqBrT,CAArBqT,CAAtBtS;OADF7N,GAGO6N,CAAP;KArCyBgD,EAwC3Bw0H,WAAAA,KAAAA,GAAA,UAAK5zH,CAAL,EAA8ByhG,CAA9B;EACE,WAAOxpG,UAAUqzH,oBAAoBtrH,CAApBsrH,CAAVrzH,EAAuC1J,KAAKolI,kBAA5C17H,CAAP;KAzCyBmH,EA4C3Bw0H,WAAAA,UAAAA,GAAA;EACE,QAAMroC,MACJjvE,MAAM/tB,KAAK+tB,MADb;EAAA,QAGMivG,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAHnB,CAKA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlDyBnsF,EACpBw0H,WAAAA,GAAY,SADQx0H,GAoD7B;IApD6BshG,MAA7B,eAqDcxK,cAAc09B,SCvmB5B;EAeE,YAAA,CAAYroC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAEE,IATM9P,YAAAA,GAA4B,IAA5BA,EAECA,gCAAAA,GACL,eAHIA,EASwB,QAA1Bq8F,EAAO+U,eAAmB,IAA6B,QAArB/U,EAAO78E,UAA7C,EAAiE;EAK/D,UAAIoM,IAAoB,IAAxB,CACwB,QAApBywE,EAAOzwE,SAAa,KACtBA,IAAYywE,EAAOzwE,SADG,GAGE,QAAtBywE,EAAOohC,WAAe,GAGxBz9H,EAAKoxG,eAALpxG,IAAwB4rB,GAAW,KAHX,GAOxB5rB,EAAKoxG,eAALpxG,IACK4rB,GAAWplB,OAAO2rG,OAAqB9V,EAAOohC,WAA5BtrB,EAXD;cAc1BnyG,EAAK6/H,QAAL7/H,GAAgBq8F,EAAOwjC,QAAvB7/H,EACAA,EAAK2kI,SAAL3kI,GAAiBq8F,EAAOsoC,SADxB3kI,EAEAA,EAAK4kI,qBAAL5kI,GAA6B2+H,eACzBtiC,EAAOuoC,qBAAPvoC,IAAgCr8F,EAAK6kI,8BADZlG,CAF7B3+H,EAIAA,EAAK8kI,qBAAL9kI,GAA6Bg/H,eAAe3iC,EAAOyoC,qBAAtB9F,CAJ7Bh/H,EAKAA,EAAKywG,mBAALzwG,GAA2Bg/H,eAAe3iC,EAAOoU,mBAAtBuuB,CAL3Bh/H,EAMAA,EAAK+kI,oBAAL/kI,GAA4B8+H,cAAcziC,EAAO0oC,oBAArBjG,CAN5B9+H,EAOAA,EAAKglI,QAALhlI,GAAgBq8F,EAAO2oC,QAPvBhlI,EAQAA,EAAKy9H,WAALz9H,GAAmBq8F,EAAOohC,WAR1Bz9H;EAsFJ,UA3H+BkQ,aAAAA,GAAAA,GAgDtB+0H,WAAAA,MAAAA,GAAP,UAAazlH,CAAb;EACEngB,SAAK6lI,UAAL7lI,GAAkBA,KAAK0gI,SAAL1gI,CACd,YADcA,GACCA,KAAKwgI,UAAUxgI,KAAKslI,UADrBtlI,EACiCA,KAAK/B,KADtC+B,EAEdA,KAAKulI,qBAFSvlI,EAEcA,KAAKylI,qBAFnBzlI,GAE0C,CAF1CA,EAGdA,KAAK0lI,oBAHS1lI,CAAlBA,EAIAA,KAAKyyG,KAALzyG,IAAa,CAJbA;KAjD2B6Q,EA0DnB+0H,WAAAA,6BAAAA,GAAV,UAAuCzlH,CAAvC,IA1D6BtP,EA4D7B+0H,WAAAA,YAAAA,GAAA,UAAYn0H,CAAZ,EAAqCotC,CAArC;EACE,UAAM,IAAI8lD,mBAAJ,CACF,wDADE,CAAN;KA7D2B9zF,EAiE7B+0H,WAAAA,mBAAAA,GAAA,UAAmBzlH,CAAnB;EAEE,QADAA,IAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,EACwB,QAApBngB,KAAKo+H,WAAT,EACE,OAAWj+G,QAAAA,EAAYngB,KAAKslI,UAAjBnlH,CAAX,CAGF,IAAM2lH,IAAmBhzB,OAAqB9yG,KAAKo+H,WAA1BtrB,CAAzB,CACA,IAAIgzB,EAAO1rI,MAAP0rI,KAAkB3lH,EAAW/lB,MAAX+lB,GAAoB,CAA1C,EACE,MAAM,IAAIukF,UAAJ,CACF,sBAAoB1kG,KAAKo+H,WAAzB,0CAAA,GACyBj+G,CAFvB,CAAN,CAKA,KADA,IAAIplB,IAAI,CAAR,EACS0U,IAAI,CAAb,EAAgBA,IAAIq2H,EAAO1rI,MAA3B,IAAqCqV,CAArC,EAAwC;EACtC,UAAMktC,IAAKmpF,EAAOr2H,CAAPq2H,CAAX;EAAA,UACMlpF,IAAKz8B,EAAW1Q,IAAI,CAAf0Q,CADX,CAEA,IAAW,QAANw8B,CAAM,IAAgB,QAANC,CAAV,IAA0BD,MAAOC,CAA5C,EACE,MAAM,IAAI8nD,UAAJ,CACF,sBAAoB1kG,KAAKo+H,WAAzB,0CAAA,GACyBj+G,CAFvB,CAAN,CAGe,QAANw8B,CAAM,KACfmpF,EAAO/qI,CAAP+qI,IAAYlpF,CADG,GAGjB7hD,GAHiB;EAMrB,aAAQolB,EAAW,CAAXA,UAAkB2lH,IAAQ9lI,KAAKslI,WAAvC;KA3F2Bz0H,EA8F7B+0H,WAAAA,KAAAA,GAAA,UAAKn0H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,EAEA,IAAIgR,IAAQorH,oBAAoBtrH,CAApBsrH,CAAZ,CAKA,OAJoB,YAAhBprH,EAAM1T,KAAU,KAClB0T,IAAQ0sG,OAAO1sG,CAAP0sG,EAAc,OAAdA,CADU,GAGL0nB,SAASplI,EAAKklI,UAALllI,CAAgB8E,IAAhB9E,EAATolI,EAAiCp0H,EAAMzM,IAANyM,EAAjCo0H,EACD5gI,OADC4gI,CAEXlL,mBAAmBl6H,EAAKwzG,kBAALxzG,CAAwBgR,EAAM1V,KAA9B0E,CAAnBk6H,CAFWkL,CACf;OARKrtH,CAAP;KA/F2B7H,EA4G7B+0H,WAAAA,UAAAA,GAAA;EACE,QAAM5oC,MACJwjC,UAAUxgI,KAAKwgI,UACf8E,WAAWtlI,KAAKslI,WAChBC,uBAAuBzF,qBAAqB9/H,KAAKulI,qBAA1BzF,GACvB2F,uBAAuB1F,qBAAqB//H,KAAKylI,qBAA1B1F,GACvB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrB2F,sBAAsB1F,oBAAoBhgI,KAAK0lI,oBAAzB1F,GACtB2F,UAAU3lI,KAAK2lI,UACfvH,aAAap+H,KAAKo+H,aARpB;EAAA,QAUMpB,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAVnB,CAYA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAzH2BnsF,EACtB+0H,WAAAA,GAAY,WADU/0H,GA2H/B;IA3H+BshG,MAA/B,eA4HcxK,cAAci+B,WChL5B;EAGE,YAAA,CAAY5oC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,OAANvsF,SADF,QAEE9P,EAAK2wG,eAAL3wG,IAAuB,CAAvBA;EAsMJ,UA3MoCkQ,aAAAA,GAAAA,GAYxBm1H,WAAAA,cAAAA,GAAV,UAAwBv0H,CAAxB;EACE,UAAM,IAAIkzF,mBAAJ,EAAN;KAbgC9zF,EA0B1Bm1H,WAAAA,gCAAAA,GAAR,UAAwCtjG,CAAxC,EAAuDC,CAAvD;EACE,QAAc,QAAVD,CAAU,IAAkB,QAAVC,CAAtB,EACE,OAAO,IAAP,CACK,IAAID,EAAOtoC,MAAPsoC,GAAgBC,EAAOvoC,MAA3B,EACL,OAAO4F,KAAKimI,+BAALjmI,CAAqC2iC,CAArC3iC,EAA6C0iC,CAA7C1iC,CAAP,CACK,IAAsB,MAAlB2iC,EAAOvoC,MAAX,EACL,OAAOsoC,CAAP,CAGF,KADA,IAAM70B,IAAqB60B,EAAO7kC,KAAP6kC,CAAa,CAAbA,EAAgBA,EAAOtoC,MAAPsoC,GAAgBC,EAAOvoC,MAAvCsoC,CAA3B,EACSjzB,IAAI,CAAb,EAAgBA,IAAIkzB,EAAOvoC,MAA3B,IAAqCqV,CAArC,EAAwC;EACtC,UAAM1U,IAAI2nC,EAAOA,EAAOtoC,MAAPsoC,GAAgBC,EAAOvoC,MAAvBsoC,GAAgCjzB,CAAvCizB,CAAV;EAAA,UACM1kC,IAAI2kC,EAAOlzB,CAAPkzB,CADV,CAEA,IAAS,QAAL5nC,CAAK,IAAa,QAALiD,CAAR,IAAqBjD,IAAI,CAAzB,IAA8BiD,IAAI,CAA3C,EACE6P,EAAY7R,IAAZ6R,CAAiB,IAAjBA,EADF,KAEO,IAAU,MAAN9S,CAAJ,EACL8S,EAAY7R,IAAZ6R,CAAiB7P,CAAjB6P,EADK,KAEA,IAAU,MAAN7P,CAAJ,EACL6P,EAAY7R,IAAZ6R,CAAiB9S,CAAjB8S,EADK,KAEA;EACL,YAAI9S,MAAMiD,CAAV,EACE,MAAM,IAAI0mG,UAAJ,CACF,0DACAxrE,KAAKE,SAALF,CAAewJ,CAAfxJ,CADA,GACyB,GADzB,GAC+BA,KAAKE,SAALF,CAAeyJ,CAAfzJ,CAF7B,CAAN,CAIFrrB,EAAY7R,IAAZ6R,CAAiB9S,CAAjB8S;;EAGJ,YAAOA,CAAP;KArDgCgD,EAwDlCm1H,WAAAA,MAAAA,GAAA,UAAM7lH,CAAN;EAOE,QALItkB,MAAMC,OAAND,CAAcskB,CAAdtkB,MAA8BA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CAA9BA,KAEFskB,KAAc06G,mBAAmB16G,CAAnB06G,EAFZh/H,IAIJskB,IAAaA,GACE/lB,SAAS,CAAxB,EACE,MAAM,IAAIsqG,UAAJ,CACF,0EACQvkF,EAAW/lB,MADnB,eADE,CAAN,CAQF,KADA,IAAI8rI,MAAJ,OAAA,EACoBld,KAApB,EAAoBhlH,YAApB,EAAoBA,GAApB;EACe,eADJ/H,QACI,KAAqB,SAAbA,EAAM,CAANA,CAAR,IACXiqI,EAAWlqI,IAAXkqI,CAAgBjqI,EAAM,CAANA,CAAhBiqI,CADW;EAKf,UADAA,IAAazhB,OAAqByhB,CAArBzhB,GACErqH,SAAS,CAAxB,EACE,MAAM,IAAIsqG,UAAJ,CACF,gFAC4BxrE,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAD5B,MADE,CAAN,CAOF,KAFA,IAAIrrB,IACiB,QAAjBsS,EAAW,CAAXA,CAAiB,GAAO,IAAP,GAAcA,EAAW,CAAXA,EAActiB,KAAdsiB,CAAoB,CAApBA,CADnC,EAESplB,IAAI,CAAb,EAAgBA,IAAIolB,EAAW/lB,MAA/B,IAAyCW,CAAzC,EAA4C;EAC1C,UAAMkB,IAAyB,QAAjBkkB,EAAWplB,CAAXolB,CAAiB,GAAO,IAAP,GAAcA,EAAWplB,CAAXolB,EAActiB,KAAdsiB,CAAoB,CAApBA,CAA7C,CACAtS,IAAc7N,KAAKimI,+BAALjmI,CAAqC6N,CAArC7N,EAAkD/D,CAAlD+D,CAAd6N;EAIF,SAAMs4H,IAAWhmH,EAAWre,GAAXqe,CAAe,UAAAlkB,CAAA;EAAS,aAAAA,EAAM7B,MAAN;OAAxB+lB,CAAjB,EACkC,MAA9BA,EAAWmB,OAAXnB,CAAmB,IAAnBA,KAC0C,MAA1CskG,OAAqB0hB,CAArB1hB,EAA+BrqH,SACjC4F,KAAKomI,eAALpmI,IAAuB,IAEvBA,KAAKomI,eAALpmI,IAAuB;KAjGO6Q,EAqGlCm1H,WAAAA,KAAAA,GAAA,UAAKv0H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UADAjH,IAASA,CAATA,EACI9Q,EAAKylI,eAAT,EAA0B;EACxB,YAAMC,MAAN;EAAA,YACMC,IAAY70H,EAAO3P,GAAP2P,CAAW,UAAAE,CAAA;EAAS,iBAAAA,EAAMvS,IAAN;WAApBqS,CADlB,CAEA,KAAiC,MAA7B60H,EAAUhlH,OAAVglH,CAAkB,IAAlBA,CAAJ,EAAoC;EAIlC,eADA,IAAMv2B,IAAUw2B,MAAcD,CAAdC,CAAhB,OAAA,EACc5Y,KAAd,EAAc3pH,YAAd,EAAcA,GAAd;EAEE,iBAFG,IACGwiI,KADC9rI,UACS0E,IADb,EAEMqQ,IAAI,CAAb,EAAgBA,IAAIsgG,IAAUy2B,CAA9B,IAAuC/2H,CAAvC,EACE/U,IAAI+rI,aAAa/rI,CAAb+rI,EAAgB,CAAhBA,CAAJ/rI,CAEF2rI,EAAerqI,IAAfqqI,CAAoB3rI,CAApB2rI;EAEF,kBAAO1lI,EAAK+lI,aAAL/lI,CAAmB0lI,CAAnB1lI,CAAP;EAKA,cADA,IAAIgmI,KAAa,CAAjB,OAAA,EACgBC,KAAhB,EAAgBztH,YAAhB,EAAgBA,GAAhB;EAAK,cAAMze,CAAN,CAEH,IAAa,SADP8rI,KADG9rI,UACO0E,IACH,CAAb,EAAmB;EACjB,gBAAMssB,IAAShxB,EAAEuB,KAAjB;EAAA,gBACMswB,IAAYb,EAAO,CAAPA,CADlB;EAAA,gBAEM9tB,IAAW8tB,EAAO7tB,KAAP6tB,CAAa,CAAbA,EAAgBvkB,MAAhBukB,EAAwBa,EAAxBb,CAFjB;EAAA,gBAGIm7G,IAAcnsI,EAAEyK,OAAFzK,EACb6xB,GAAWplB,OAAO2/H,UAAoBp7G,EAAO7tB,KAAP6tB,CAAa,CAAbA,CAApBo7G,EADLpsI,CAHlB,CAMAmsI,KADAA,IAAcpI,UAAcoI,CAAdpI,GAA4B,GAAG,EAA/BA,GACYt5H,QAAQvH,EAAlCipI,EACAR,EAAerqI,IAAfqqI,CAAoBQ,CAApBR,CADAQ,EAEAF,KAAa,CAFbE;aAPF,MAUO,IAAIL,IAAQ,CAAZ,EAAe;EACpB,gBAAMz4G,IAAOg5G,QAAgB,CAAhBA,EAAmBP,CAAnBO,EAA0B5/H,MAA1B4/H,EAAkC,EAAlCA,CAAb,CACAV,EAAerqI,IAAfqqI,CAAoB5H,UAAc/jI,CAAd+jI,EAAiB1wG,CAAjB0wG,CAApB4H,GACAM,KAAa,CADbN;aAFK,MAMLA,EAAerqI,IAAfqqI,CAAoB3rI,CAApB2rI;EAGJ,aAAIj1H,IAAIzQ,EAAK+lI,aAAL/lI,CAAmB0lI,CAAnB1lI,CAAR;EAAA,YACMqmI,IAAQ51H,EAAEhS,IADhB,CAEA,IAAIunI,CAAJ,EAGE,IAAa,QAATK,CAAJ,EAAmB;EACjB,cAAM97B,IAAS95F,EAAEnV,KAAjB,CAGM2B,KADA2uB,IAAY2+E,EADJA,EAAO9wG,MAAP8wG,GACmB,CAAfA,GAEF/jG,OAAO+jG,EAAOrtG,KAAPqtG,CAAa,CAAbA,EAAgBA,EAAO9wG,MAAP8wG,GAAgB,CAAhCA,EADjBttG,CAENwT,IAAIqtH,UAAcrtH,EAAEjM,OAAFiM,GAAY,GAAGmb,EAAfnb,CAAdqtH,GAA2C,GAAG,EAA9CA,EACKt5H,OADLs5H,CACa7gI,CADb6gI,CAAJrtH;WANF,MAQO,IAAI41H,IAAQ,CAAZ,EAAe;EACdj5G,eAAQi5G,IAAQ,GAAG7/H,OAAO4/H,QAAgB,CAAhBA,EAAmBC,IAAQ,CAA3BD,EAA1Bh5G,CACN3c,IAAIqtH,UAAcrtH,CAAdqtH,EAAiB1wG,CAAjB0wG,CAAJrtH;EAGJ,gBAAOA,CAAP;EAGF,cAAOzQ,EAAK+lI,aAAL/lI,CAAmB8Q,CAAnB9Q,CAAP;OA/DG+X,CAAP;KAtGgC7H,EA0KlCm1H,WAAAA,mBAAAA,GAAA,UAAmB7lH,CAAnB;EAEE,QAAItS,CAAJ,CAEEA,IADmB,SAFrBsS,IAAaA,GAEE,EAAM,GACL,IADK,GAGLA,EAAW,CAAXA,EAActiB,KAAdsiB,CAAoB,CAApBA,CAFdtS,CAIF,KAAK,IAAI9S,IAAI,CAAb,EAAgBA,IAAIolB,EAAW/lB,MAA/B,IAAyCW,CAAzC,EAA4C;EAC1C,UAAMkB,IAAyB,QAAjBkkB,EAAWplB,CAAXolB,CAAiB,GAAO,IAAP,GAAcA,EAAWplB,CAAXolB,EAActiB,KAAdsiB,CAAoB,CAApBA,CAA7C,CACAtS,IAAc7N,KAAKimI,+BAALjmI,CAAqC6N,CAArC7N,EAAkD/D,CAAlD+D,CAAd6N;EAIF,UADA,IAAIq4H,MAAJ,OAAA,EACoB7c,KAApB,EAAoBrlH,YAApB,EAAoBA,GAApB;EACe,eADJ/H,QACI,KAAqB,SAAbA,EAAM,CAANA,CAAR,IACXiqI,EAAWlqI,IAAXkqI,CAAgBjqI,EAAM,CAANA,CAAhBiqI,CADW;EAUf,YAJEr4H,IADwB,OAD1Bq4H,IAAazhB,OAAqByhB,CAArBzhB,GACErqH,MAAW,GACV8rI,EAAW/+H,MAAX++H,CAAkBr4H,CAAlBq4H,CADU,IAGT,MAAM/+H,OAAO0G,EAE9B;KAnMgCgD,EAsMlCm1H,WAAAA,YAAAA,GAAA,UAAYv0H,CAAZ,EAAqCotC,CAArC;EAEE,UAAM,IAAI8lD,mBAAJ,CACF,oDADE,CAAN;KAxMgC9zF,GA2MpC;IA3MoCshG,MAApC;EAAA;EAiOE,YAAA,CAAYnV,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAYJ,UAfyBI,aAAAA,GAAAA,GAMbo2H,WAAAA,cAAAA,GAAV,UAAwBx1H,CAAxB;EACE,WAAOiH,KAAK;EAEV,WADA,IAAI5G,IAASL,EAAO,CAAPA,EAAUpL,KAAVoL,EAAb,EACS1W,IAAI,CAAb,EAAgBA,IAAI0W,EAAOrX,MAA3B,IAAqCW,CAArC,EACE+W,IAAS21F,IAAQ31F,CAAR21F,EAAgBh2F,EAAO1W,CAAP0W,CAAhBg2F,CAAT31F,CAEF,OAAOA,CAAP;OALK4G,CAAP;KAPqB7H,EAChBo2H,WAAAA,GAAY,KADIp2H,GAezB;IAfyBm1H,MA/NzB,eA+Ocr+B,cAAcs/B,KAgD5B;EA+BE,YAAA,CAAYjqC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAYJ,UAf8BI,aAAAA,GAAAA,GAMlBq2H,WAAAA,cAAAA,GAAV,UAAwBz1H,CAAxB;EACE,WAAOiH,KAAK;EAEV,WADA,IAAI5G,IAASL,EAAO,CAAPA,EAAUpL,KAAVoL,EAAb,EACS1W,IAAI,CAAb,EAAgBA,IAAI0W,EAAOrX,MAA3B,IAAqCW,CAArC,EACE+W,IAASy1F,IAAQz1F,CAARy1F,EAAgB91F,EAAO1W,CAAP0W,CAAhB81F,CAATz1F,CAEF,OAAOA,CAAP;OALK4G,CAAP;KAP0B7H,EACrBq2H,WAAAA,GAAY,UADSr2H,GAe9B;IAf8Bm1H,MA7B9B,eA6Ccr+B,cAAcu/B,UAgD5B;EA8BE,YAAA,CAAYlqC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAYJ,UAf6BI,aAAAA,GAAAA,GAMjBs2H,WAAAA,cAAAA,GAAV,UAAwB11H,CAAxB;EACE,WAAOiH,KAAK;EAEV,WADA,IAAI5G,IAASL,EAAO,CAAPA,EAAUpL,KAAVoL,EAAb,EACS1W,IAAI,CAAb,EAAgBA,IAAI0W,EAAOrX,MAA3B,IAAqCW,CAArC,EACE+W,IAAS21F,IAAQ31F,CAAR21F,EAAgBh2F,EAAO1W,CAAP0W,CAAhBg2F,CAAT31F,CAEF,OAAOy1F,IAAQG,UAAU,IAAIj2F,EAAOrX,MAArBstG,CAARH,EAAsCz1F,CAAtCy1F,CAAP;OALK7uF,CAAP;KAPyB7H,EACpBs2H,WAAAA,GAAY,SADQt2H,GAe7B;IAf6Bm1H,MA5B7B,eA4Ccr+B,cAAcw/B,SAiD5B;EA8BE,YAAA,CAAYnqC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAYJ,UAf6BI,aAAAA,GAAAA,GAMjBu2H,WAAAA,cAAAA,GAAV,UAAwB31H,CAAxB;EACE,WAAOiH,KAAK;EAEV,WADA,IAAI5G,IAASL,EAAO,CAAPA,CAAb,EACS1W,IAAI,CAAb,EAAgBA,IAAI0W,EAAOrX,MAA3B,IAAqCW,CAArC,EACE+W,IAASopG,QAAYppG,CAAZopG,EAAoBzpG,EAAO1W,CAAP0W,CAApBypG,CAATppG,CAEF,OAAOA,CAAP;OALK4G,CAAP;KAPyB7H,EACpBu2H,WAAAA,GAAY,SADQv2H,GAe7B;IAf6Bm1H,MA5B7B,eA4Ccr+B,cAAcy/B,SAgD5B;EA8BE,YAAA,CAAYpqC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAYJ,UAf6BI,aAAAA,GAAAA,GAMjBw2H,WAAAA,cAAAA,GAAV,UAAwB51H,CAAxB;EACE,WAAOiH,KAAK;EAEV,WADA,IAAI5G,IAASL,EAAO,CAAPA,CAAb,EACS1W,IAAI,CAAb,EAAgBA,IAAI0W,EAAOrX,MAA3B,IAAqCW,CAArC,EACE+W,IAASkqH,QAAYlqH,CAAZkqH,EAAoBvqH,EAAO1W,CAAP0W,CAApBuqH,CAATlqH,CAEF,OAAOA,CAAP;OALK4G,CAAP;KAPyB7H,EACpBw2H,WAAAA,GAAY,SADQx2H,GAe7B;IAf6Bm1H,MA5B7B,eA4Ccr+B,cAAc0/B,SAgD5B;EA0CE,YAAA,CAAYrqC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAHS9P,cAAAA,IAAgB,CAAhBA,EAKO,QAAVq8F,CAAU,KACZA,MADY,CALPr8F,EAQPA,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,GAAO6C,EAAK68H,YAAZ,GAA2BxgC,EAAOl/F,IARtD6C,EASPA,EAAK2wG,eAAL3wG,IAAuB,CAThBA,EAUPA,EAAKylI,eAALzlI,IAAuB,CAVhBA;EAiGX,UAnGiCkQ,aAAAA,GAAAA,GAe/By2H,WAAAA,MAAAA,GAAA,UAAMnnH,CAAN;EAEE,SAAMtkB,MAAMC,OAAND,CAAcskB,CAAdtkB,MAA6BA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,KACT,MAAtBskB,EAAW/lB,MADf,EAEE,MAAM,IAAIsqG,UAAJ,CACF,uEADE,CAAN,CAOF,KADA,IAAI6iC,KAAe,CAAnB,OAAA,EACoBC,IAHpBrnH,IAAaA,CAGb,EAAoBnc,YAApB,EAAoBA,GAApB;EACE,UAAa,SADJ/H,QACI,CAAb,EAAmB;EACjBsrI,aAAe,CAAfA,CACA;;EAGJ,UAAIA,CAAJ,EAAA;EAKA,WADA,IAAME,MAAN,EACS1sI,IAAI,CAAb,EAAgBA,IAAIolB,EAAW/lB,MAA/B,IAAyCW,CAAzC,EAA4C;EAC1C,YAAM2sI,IAAyBvnH,EAAWplB,CAAXolB,EAActiB,KAAdsiB,EAA/B,CACAunH,EAAuBvwF,MAAvBuwF,CAA8B1nI,KAAKlC,IAAnC4pI,EAAyC,CAAzCA,EAEA,KADA,IAAIC,KAAS,CAAb,OAAA,EACoBC,KAApB,EAAoBzuH,YAApB,EAAoBA,GAApB;EAAK,cAAMld,QAAN,CACH,IAAI6vG,KAAKnwG,WAALmwG,CAAiB7vG,CAAjB6vG,EAAwB47B,CAAxB57B,CAAJ,EAAqD;EACnD67B,iBAAS,CAATA,CACA;;EAGCA,cACHF,EAASzrI,IAATyrI,CAAcC,CAAdD,CADGE;EAIP,WAAIF,EAASrtI,MAATqtI,GAAkB,CAAtB,EACE,MAAM,IAAI/iC,UAAJ,CACF,8GAEAxrE,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAHE,CAAN;;KApD2BroB,EA2DrBy2H,WAAAA,cAAAA,GAAV,UAAwB71H,CAAxB;EAAA,gBAAA,CACE,OAAOiH,KAAK;EACV,aAAOmvH,YAAcp2H,CAAdo2H,EAAsBlnI,EAAK7C,IAA3B+pI,CAAP;OADKnvH,CAAP;KA5D6B7H,EAiE/By2H,WAAAA,mBAAAA,GAAA,UAAmBnnH,CAAnB;EACE,SAAMtkB,MAAMC,OAAND,CAAcskB,CAAdtkB,MAA6BA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CAAnC,EACE,MAAM,IAAI6oG,UAAJ,CACF,6DADE,CAAN,CAQF,KALA,IAAMpuF,IAAc6J,CAApB,EACMtS,IAAcyI,EAAY,CAAZA,EAAezY,KAAfyY,EADpB,EAEMxY,IAAOkC,KAAKlC,IAALkC,GAAY,CAAZA,GAAgB6N,EAAYzT,MAAZyT,GAAqB7N,KAAKlC,IAA1CkC,GAAiDA,KAAKlC,IAFnE,OAAA,EAKoBqb,IAAA7C,EAAYzY,KAAZyY,CAAkB,CAAlBA,CAApB,EAAoBtS,YAApB,EAAoBA,GAApB;EAAK,UAAM/H,QAAN,CACH,IAAyB,QAArB4R,EAAY/P,CAAZ+P,CAAqB,IAAuB,QAAf5R,EAAM6B,CAAN7B,CAAjC,EAAsD;EACpD4R,UAAY/P,CAAZ+P,IAAoB,IAApBA,CACA;EAEFA,SAAY/P,CAAZ+P,KAAqB5R,EAAM6B,CAAN7B,CAArB4R;EAEF,YAAOA,CAAP;KAlF6BgD,EAqF/By2H,WAAAA,YAAAA,GAAA,UAAY71H,CAAZ,EAAqCotC,CAArC;EAEE,UAAM,IAAI8lD,mBAAJ,CACF,0DADE,CAAN;KAvF6B9zF,EA2F/By2H,WAAAA,UAAAA,GAAA;EACE,QAAMtqC,MACJl/F,MAAQkC,KAAKlC,MADf;EAAA,QAGMk/H,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAHnB,CAKA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAjG6BnsF,EACxBy2H,WAAAA,GAAY,aADYz2H,GAmGjC;IAnGiCm1H,MArCjC,CA2LA,sBAAA,CAwCuBloI,CAxCvB,EAwCqCgP,CAxCrC;EAyCE,SAAOhP,IAAO,CAAd,GACEA,KAAQgP,CAARhP,CAEF,OAAOA,CAAP;EAGF,kBAAA,CAAkBpD,CAAlB,EAA6B0W,CAA7B,EAAwC4P,CAAxC;EACE,MAAItmB,EAAEuB,KAAFvB,CAAQN,MAARM,GAAiB,CAAjBA,IAAsB0W,EAAEnV,KAAFmV,CAAQhX,MAARgX,GAAiB,CAA3C,EACE,MAAM,IAAIuzF,mBAAJ,CACF,kEADE,CAAN,CAgBF,IAbAwpB,KAASzyH,MAATyyH,CACIzzH,EAAEuB,KAAFvB,CAAQN,MAARM,IAAkB,CADtByzH,EAEI,yDACezzH,EAAEuB,KAAFvB,CAAQN,MAH3B+zH,GAIAA,KAASzyH,MAATyyH,CACIzzH,EAAEuB,KAAFvB,CAAQN,MAARM,IAAkB,CADtByzH,EAEI,yDACe/8G,EAAEnV,KAAFmV,CAAQhX,MAH3B+zH,CAJAA,EASoB,mBAATntG,CAAS,KAClBA,KAAQA,GAAMA,EADI,CATpBmtG,EAagB,gBAAZzzH,EAAEuD,KAAU,IAA2B,gBAAZmT,EAAEnT,KAAjC,EACE,MAAM,IAAI0mG,mBAAJ,CACF,6DADE,CAAN,CAIF,IAAM6hC,IAAQ9rI,EAAEuB,KAAFvB,CAAQN,MAAtB;EAAA,MACM4sI,IAAQ51H,EAAEnV,KAAFmV,CAAQhX,MADtB,CAEY,QAAR4mB,CAAQ,KAEVA,KAAQwlH,IAAQ,GAAGQ,IAAQ,EAFjB,EAIZ,IAAMc,IAAY9mH,CAAlB,CAEA,OAAOmwG,KAAS;EACd,QAAIh2H,CAAJ,EAmBIokD,CAnBJ,CACA,IAAIinF,IAAQQ,CAAZ,EAAmB;EACjB7rI,UAAOqrI,IAAQQ,CAAf7rI,CAEA,KADA,IAAM4sI,MAAN,EACShtI,IAAI,CAAb,EAAgBA,IAAII,CAApB,IAA4BJ,CAA5B,EACEgtI,EAAU/rI,IAAV+rI,CAAe,CAAfA,EAEF32H,IAAIA,EAAEjM,OAAFiM,CAAUA,EAAEnV,KAAFmV,CAAQjK,MAARiK,CAAe22H,CAAf32H,CAAVA,CAAJA;OANF,MAOO,IAAI41H,IAAQR,CAAZ,EAAmB;EACxBrrI,UAAO6rI,IAAQR,CAAfrrI,CAEA,KADM4sI,MAAAA,EACGhtI,IAAI,CAAb,EAAgBA,IAAII,CAApB,IAA4BJ,CAA5B,EACEgtI,EAAU/rI,IAAV+rI,CAAe,CAAfA,EAEFrtI,IAAIA,EAAEyK,OAAFzK,CAAUA,EAAEuB,KAAFvB,CAAQyM,MAARzM,CAAeqtI,CAAfrtI,CAAVA,CAAJA;OANK,MAQLS,IAAO,CAAPA,CAIF,IAAuB,MAAnBT,EAAEuB,KAAFvB,CAAQN,MAAW,IAAwB,MAAnBgX,EAAEnV,KAAFmV,CAAQhX,MAApC,EAEImlD,IADEuoF,EAAU,CAAVA,MAAiBA,EAAU,CAAVA,CAAjBA,GACIptI,EAAEmO,SAAFnO,CAAY0W,CAAZ1W,EAAeI,GAAfJ,CAAmBotI,EAAU,CAAVA,CAAnBptI,CADJotI,GAGIptI,EAAEgP,SAAFhP,EAAa,GAAG,EAAhBA,EAAoBmO,SAApBnO,CAA8B0W,CAA9B1W,EAAiCI,GAAjCJ,CAAqCotI,EAAU,CAAVA,CAArCptI,CAFN6kD,CAFJ,KAMO;EACL,UAAMyoF,IAAOF,EAAU,CAAVA,MAAiBptI,EAAEuB,KAAFvB,CAAQN,MAARM,GAAiB,CAAlCotI,IAAsC,IAAnD;EAAA,UACMG,IAAOH,EAAU,CAAVA,MAAiB12H,EAAEnV,KAAFmV,CAAQhX,MAARgX,GAAiB,CAAlC02H,IAA6C,IAD1D,CAEAvoF,IAAM7kD,EAAEmM,MAAFnM,CAAS0W,CAAT1W,EAAYstI,CAAZttI,EAAkButI,CAAlBvtI,CAAN6kD;EAGF,SAAIpkD,IAAO,CAAX,EAAc;EACZ,UAAI2c,UAAJ;EAAA,UAMMowH,MANN,CAOA,KAASntI,IALP+c,IADE0uH,IAAQQ,CAARR,GACIA,IAAQQ,CAARR,GAAgB,CADpBA,GAGIA,IAAQ,CAGhB,EAAkBzrI,IAAI+c,IAAM3c,CAA5B,IAAoCJ,CAApC,EACEmtI,EAAYlsI,IAAZksI,CAAiBntI,CAAjBmtI,EAEF3oF,IAAMA,EAAIn5C,OAAJm5C,CAAY2oF,CAAZ3oF,CAANA;EAKF,YAHyB,MAArBA,EAAItjD,KAAJsjD,CAAUnlD,MAAW,KACvBmlD,IAAMA,EAAIv5C,UAAJu5C,CAAe,CAAfA,CADiB,GAGlBA,CAAP;KAjDK4xE,CAAP;iBAhIYxpB,cAAc2/B,aAwM5B;EAME,YAAA,CAAYtqC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAKqgB,IAALrgB,GAAYq8F,EAAOh8E,IAAnBrgB,EACAA,EAAKwnI,SAALxnI,GAAqC,QAApBq8F,EAAOmrC,SAAa,IAAenrC,EAAOmrC,SAD3DxnI,EAEAA,EAAK2wG,eAAL3wG,IAAuB,CAFvBA,EAGAA,EAAKylI,eAALzlI,IAAuB,CAHvBA;EA0GJ,UAlHyBkQ,aAAAA,GAAAA,GAcvBu3H,WAAAA,MAAAA,GAAA,UAAMjoH,CAAN;EACEguG,SAASzyH,MAATyyH,CACItyH,MAAMC,OAAND,CAAcskB,CAAdtkB,KAAmD,MAAtBskB,EAAW/lB,MAAxCyB,IACIA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CADJA,IACoCA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CAFxCsyH,EAGI,+DAHJA,EAIA,IAAMzrF,IAASviB,EAAW,CAAXA,CAAf;EAAA,QACMwiB,IAASxiB,EAAW,CAAXA,CADf,CAEA,IAAIuiB,EAAOtoC,MAAPsoC,GAAgB,CAAhBA,IAAqBC,EAAOvoC,MAAPuoC,GAAgB,CAAzC,EACE,MAAM,IAAIgiE,mBAAJ,CACF,8DADE,CAAN,CAIF,IAAM3jF,IAAOhhB,KAAKqoI,aAALroI,CAAmB0iC,CAAnB1iC,EAA2B2iC,CAA3B3iC,CAAb,CACA,IAAI0iC,EAAO1hB,EAAK,CAALA,CAAP0hB,MAAoBC,EAAO3hB,EAAK,CAALA,CAAP2hB,CAAxB,EACE,MAAM,IAAI+hE,UAAJ,CACF,gCACGhiE,EAAO1hB,EAAK,CAALA,CAAP0hB,CADH,UAAA,GAC0BC,EAAO3hB,EAAK,CAALA,CAAP2hB,CAFxB,CAAN;KA5BmB9xB,EAkCbu3H,WAAAA,cAAAA,GAAV,UAAwB32H,CAAxB;EACE,QAAsB,MAAlBA,EAAOrX,MAAX,EACE,MAAM,IAAIsqG,UAAJ,CACF,oEACgBjzF,EAAOrX,MADvB,eADE,CAAN,CAKF,IAEI4mB,CAFJ;EAAA,QAAI4nE,IAAKn3E,EAAO,CAAPA,CAAT;EAAA,QACIq3E,IAAKr3E,EAAO,CAAPA,CADT,CAiBA,OAREuP,IANGnlB,MAAMC,OAAND,CAAcmE,KAAKghB,IAAnBnlB,IAMImE,KAAKghB,IAALhhB,CAAU8B,GAAV9B,CACI,UAAClC,CAAD,EAAO/C,CAAP;EAAa,aAAAutI,cACTxqI,CADSwqI,EACH72H,EAAO1W,CAAP0W,EAAUxV,KAAVwV,CAAgBrX,MADbkuI,CAAA;OADjBtoI,CANJnE,IAEDysI,cAActoI,KAAKghB,IAAnBsnH,EAAyB1/C,EAAG3sF,KAAH2sF,CAASxuF,MAAlCkuI,GACAA,cAActoI,KAAKghB,IAAnBsnH,EAAyBx/C,EAAG7sF,KAAH6sF,CAAS1uF,MAAlCkuI,EAGFtnH,EAIEhhB,KAAKmoI,SAALnoI,KACF4oF,IAAKq0B,YAAYr0B,CAAZq0B,EAAgBj8F,EAAK,CAALA,CAAhBi8F,CAALr0B,EACAE,IAAKm0B,YAAYn0B,CAAZm0B,EAAgBj8F,EAAK,CAALA,CAAhBi8F,CAFHj9G,CAJFghB,EAQKunH,SAAS3/C,CAAT2/C,EAAaz/C,CAAby/C,EAAiBvnH,CAAjBunH,CAAP;KA1DqB13H,EA6Dfu3H,WAAAA,cAAAA,GAAR,UAAsB1lG,CAAtB,EAAqCC,CAArC;EAYE,WAVK9mC,MAAMC,OAAND,CAAcmE,KAAKghB,IAAnBnlB,IAQImE,KAAKghB,IARTnlB,IAGDysI,cAActoI,KAAKghB,IAAnBsnH,EAAyB5lG,EAAOtoC,MAAhCkuI,GACAA,cAActoI,KAAKghB,IAAnBsnH,EAAyB3lG,EAAOvoC,MAAhCkuI,EAMJ;KAzEqBz3H,EA4EvBu3H,WAAAA,mBAAAA,GAAA,UAAmBjoH,CAAnB;EACEguG,SAASzyH,MAATyyH,CACItyH,MAAMC,OAAND,CAAcskB,CAAdtkB,KAAmD,MAAtBskB,EAAW/lB,MAAxCyB,IACIA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CADJA,IACoCA,MAAMC,OAAND,CAAcskB,EAAW,CAAXA,CAAdtkB,CAFxCsyH,EAGI,+DAHJA,EAIA,IAAMzrF,IAAUviB,EAAW,CAAXA,EAAwBtiB,KAAxBsiB,EAAhB;EAAA,QACMwiB,IAAUxiB,EAAW,CAAXA,EAAwBtiB,KAAxBsiB,EADhB,CAEA,IAAIuiB,EAAOtoC,MAAPsoC,GAAgB,CAAhBA,IAAqBC,EAAOvoC,MAAPuoC,GAAgB,CAAzC,EACE,MAAM,IAAIgiE,mBAAJ,CACF,8DADE,CAAN,CAIF,IAAM3jF,IAAOhhB,KAAKqoI,aAALroI,CAAmB0iC,CAAnB1iC,EAA2B2iC,CAA3B3iC,CAAb,CACA0iC,EAAOyU,MAAPzU,CAAc1hB,EAAK,CAALA,CAAd0hB,EAAuB,CAAvBA,GACAC,EAAOwU,MAAPxU,CAAc3hB,EAAK,CAALA,CAAd2hB,EAAuB,CAAvBA,CADAD,EAEAC,EAAOwU,MAAPxU,CAAc,CAAdA,EAAiB,CAAjBA,CAFAD,CAGA,IAAM70B,IAAc60B,EAAOv7B,MAAPu7B,CAAcC,CAAdD,CAApB,CAIA,OAH2B,MAAvB70B,EAAYzT,MAAW,IACzByT,EAAY7R,IAAZ6R,CAAiB,CAAjBA,CADyB,EAGpBA,CAAP;KAhGqBgD,EAmGvBu3H,WAAAA,YAAAA,GAAA,UAAY32H,CAAZ,EAAqCotC,CAArC;EAEE,UAAM,IAAI8lD,mBAAJ,CACF,kDADE,CAAN;KArGqB9zF,EAyGvBu3H,WAAAA,UAAAA,GAAA;EACE,QAAMprC,MACJh8E,MAAQhhB,KAAKghB,MACbmnH,WAAanoI,KAAKmoI,WAFpB;EAAA,QAIMnL,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAJnB,CAMA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAhHqBnsF,EAChBu3H,WAAAA,GAAY,KADIv3H,GAkHzB;IAlHyBm1H,MAAzB,+BCr9BItrI,GAAWgN,GAAcC,GAAkB6G,GAAeg6H,GAC1Dh4D;EACF,MAAIjxB,CAAJ,CACA,qBAFEixB,WAEa,MAAX91E,EAAE0E,IAAN,EACEmgD,IAAMkpF,qBACF/tI,CADE+tI,EACa/gI,CADb+gI,EAEF9gI,CAFE8gI,EAE+Bj4D,CAF/Bi4D,EAEwCD,CAFxCC,EAGFj6H,CAHEi6H,CAANlpF,CADF,KAKO,IAAe,MAAX7kD,EAAE0E,IAAN,EAELmgD,IAAMmpF,qBACFhuI,CADEguI,EACahhI,CADbghI,EAEF/gI,CAFE+gI,EAE+Bl4D,CAF/Bk4D,EAEwCF,CAFxCE,EAGFl6H,CAHEk6H,CAANnpF,CAFK,KAMA;EAAA,QAAe,MAAX7kD,EAAE0E,IAAN,EAML,MAAM,IAAIulG,mBAAJ,CACF,6DAA2DjqG,EAAE0E,IAA7D,SADE,CAAN,CALAmgD,IAAMopF,qBACFjuI,CADEiuI,EACajhI,CADbihI,EAEFhhI,CAFEghI,EAE+Bn4D,CAF/Bm4D,EAEwCH,CAFxCG,EAGFn6H,CAHEm6H,CAANppF;EASF,UAAOA,CAAP;EAoBF,yCAAA,CACI7kD,CADJ,EACe8tI,CADf,EAC8Bh6H,CAD9B,EAC4CosD,CAD5C,EAEI4V,CAFJ;EAGE,0BADEA,WACK93D,KAAK;EACH,QAAMkwH,IAAkBC,QAAYnuI,CAAZmuI,EAAejuE,CAAfiuE,CAAxB;EAAA,QACMnhI,IAAOkhI,EAAgBlhI,IAD7B;EAAA,QAEMC,IAAWihI,EAAgBjhI,QAFjC,CAKA,QADIG,qBAAmBpN,CAAnBoN,EAAsBJ,CAAtBI,EAA4BH,CAA5BG,EAAsC0G,CAAtC1G,EAA4C0gI,CAA5C1gI,EAAmD0oE,CAAnD1oE,GACYJ,GAAMC,EAAtB;KANF+Q,CAAP;EA2BF,2CAAA,CACIhe,CADJ,EACe8tI,CADf,EAC8Bh6H,CAD9B,EAC4CosD,CAD5C,EAEI4V,CAFJ;EAGE,0BADEA,WACK93D,KAAK;EAKH,SAJA,IAAMkwH,IAAkBC,QAAYnuI,CAAZmuI,EAAejuE,CAAfiuE,CAAxB,EACMnhI,IAAOkhI,EAAgBlhI,IAD7B,EAEMC,IAAWihI,EAAgBjhI,QAFjC,EAGM88H,MAHN,OAAA,EAImBtrH,IAAA2vH,QAAiB,CAAjBA,EAAoBpuI,EAAE0E,IAAtB0pI,CAAnB,EAAmB9kI,YAAnB,EAAmBA,GAAnB;EAAK,UAAMlG,QAAN,EACkC,MAAjC88D,EAAct5C,OAAds5C,CAAsB98D,CAAtB88D,IACF6pE,EAAYzoI,IAAZyoI,CAAiB,CAAjBA,IAEAA,EAAYzoI,IAAZyoI,CAAiB/pI,EAAEuB,KAAFvB,CAAQoD,CAARpD,CAAjB+pI;EAGJ,SAAMsE,IAAgBrhI,EAAKvC,OAALuC,CAAa+8H,CAAb/8H,CAAtB;EAAA,QACMshI,IAAoBrhI,EAASxC,OAATwC,CAAiB88H,CAAjB98H,CAD1B;EAAA,QAEMshI,IACO,QAATT,CAAS,GAAO,IAAP,GAAcA,EAAMrjI,OAANqjI,CAAc/D,CAAd+D,CAH3B;EAAA,QAIMU,IACM,QAAR16H,CAAQ,GAAO,IAAP,GAAcA,EAAKrJ,OAALqJ,CAAai2H,CAAbj2H,CAL1B,CASA,QAHe1G,qBACXpN,CADWoN,EACRihI,CADQjhI,EACOkhI,CADPlhI,EAC0BohI,CAD1BphI,EAEXmhI,CAFWnhI,EAEK0oE,CAFL1oE,GAGCJ,GAAMC,EAAtB;KArBF+Q,CAAP;EAoCF,kCAAA,CACIhe,CADJ,EACe8tI,CADf,EAC8Bh6H,CAD9B,EAC4CosD,CAD5C,EAEI4V,CAFJ;EAGE,0BADEA,WACEs7B,KAAKnwG,WAALmwG,CACIlxC,EAAc/8D,KAAd+8D,GAAsB98C,IAAtB88C,EADJkxC,EACkCg9B,QAAiB,CAAjBA,EAAoBpuI,EAAE0E,IAAF1E,GAAS,CAA7BouI,CADlCh9B,IAEKq9B,gCACHzuI,CADGyuI,EACAX,CADAW,EACO36H,CADP26H,EACavuE,CADbuuE,EAC4B34D,CAD5B24D,CAFLr9B,GAKKs9B,kCACH1uI,CADG0uI,EACAZ,CADAY,EACO56H,CADP46H,EACaxuE,CADbwuE,EAC4B54D,CAD5B44D,CALT;iBDo9BYzhC,cAAcygC,KCp2B5B;EAqBE,YAAA,CAAYprC,CAAZ;EAAA,gBAAA,QACgB,QAAVA,CAAU,KACZA,MADY,IAGdr8F,IAAA8P,MAAAA,KAAAA,EAAMusF,CAANvsF,WAEK6gG,mBAAkB,CALT,EAMd3wG,EAAK7C,IAAL6C,GAA2B,QAAfq8F,EAAOl/F,IAAQ,IAAQ,CAAR,GAAYk/F,EAAOl/F,IANhC,EAOd6C,EAAKuhG,QAALvhG,GAAmC,QAAnBq8F,EAAOkF,QAAY,GAAO,GAAP,GAAclF,EAAOkF,QAP1C,EAQdvhG,EAAK6vE,OAAL7vE,GAAiC,QAAlBq8F,EAAOxsB,OAAW,GAAO,IAAP,GAAcwsB,EAAOxsB,OARxC,EASd7vE,EAAK0oI,MAAL1oI,GAA+B,QAAjBq8F,EAAOqsC,MAAU,IAAcrsC,EAAOqsC,MATtC,EAUd1oI,EAAKkH,KAALlH,GAA6B,QAAhBq8F,EAAOn1F,KAAS,IAAcm1F,EAAOn1F,KAVpC,EAWdlH,EAAK2oI,eAAL3oI,GAAuB2+H,eAAetiC,EAAOssC,eAAPtsC,IAA0B,OAAzCsiC,CAXT,EAYd3+H,EAAK4oI,gBAAL5oI,GAAwB2+H,eAAetiC,EAAOusC,gBAAPvsC,IAA2B,MAA1CsiC,CAZV,EAad3+H,EAAK6oI,qBAAL7oI,GACI2+H,eAAetiC,EAAOwsC,qBAAPxsC,IAAgC,OAA/CsiC,CAdU,EAed3+H,EAAK8oI,yBAAL9oI,GACI2+H,eAAetiC,EAAOysC,yBAAPzsC,IAAoC,MAAnDsiC,CAhBU,EAiBd3+H,EAAK+oI,cAAL/oI,GAAsB8+H,cAAcziC,EAAO0sC,cAArBjK,CAjBR,EAkBd9+H,EAAKgpI,eAALhpI,GAAuB8+H,cAAcziC,EAAO2sC,eAArBlK,CAlBT,EAmBd9+H,EAAKipI,eAALjpI,GAAuBg/H,eAAe3iC,EAAO4sC,eAAtBjK,CAnBT,EAoBdh/H,EAAKkpI,gBAALlpI,GAAwBg/H,eAAe3iC,EAAO6sC,gBAAtBlK,CApBV,EAqBdh/H,EAAKmpI,SAALnpI,GAAiB,CArBH;EAsJlB,UA5KwCkQ,aAAAA,GAAAA,GA8C/Bk5H,WAAAA,MAAAA,GAAP,UAAa5pH,CAAb;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,CACA,IAAMriB,IAAOkC,KAAKlC,IAALkC,IAAa,CAAbA,GAAiBA,KAAKlC,IAAtBkC,GAA8BA,KAAKlC,IAALkC,GAAYmgB,EAAW/lB,MAAlE;EAAA,QACM0S,IAAMqT,EAAWriB,CAAXqiB,CADZ,CAEA,IAAW,QAAPrT,CAAJ,EACE,MAAM,IAAI43F,UAAJ,CACF,UAAQ5mG,CAAR,iGAAA,GAEGo7B,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAFH,MADE,CAAN,CAKFl5B,KAAKqxG,SAALrxG,IACK,IAAIkhI,SAAJ,GAAepxB,MAAM3vF,EAAW/lB,QAAQ4mB,eAAO7H,EAACrb,CAADqb,IAAQrM,IAAfkU,GAAxC,EADLhhB,CAEA,KAAA;EAAA,QAAM/D,KAAS6Q,EAAf,CACI9M,KAAK6H,KAAL7H,KACFA,KAAKwoI,KAALxoI,GAAaA,KAAK0gI,SAAL1gI,CACT,OADSA,EACA/D,CADA+D,EACO,IADPA,EACaA,KAAKupI,gBADlBvpI,EACoCA,KAAK6pI,gBADzC7pI,GAET,CAFSA,EAEHA,KAAK2pI,eAFF3pI,CADXA,GAKAA,KAAKqpI,MAALrpI,KACFA,KAAKwO,IAALxO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,EACA/D,CADA+D,EACO,IADPA,EACaA,KAAKspI,eADlBtpI,EACmCA,KAAK4pI,eADxC5pI,GACyD,CADzDA,EAERA,KAAK0pI,cAFG1pI,CADVA,CALAA,EAUJA,KAAKgqI,UAALhqI,GAAkBA,KAAK0gI,SAAL1gI,CACd,aADcA,EACC/D,CADD+D,EACQ,IADRA,EACcA,KAAKwpI,qBADnBxpI,EAC0C,IAD1CA,GACgD,CADhDA,CAVdA,EAYJA,KAAKiqI,cAALjqI,GAAsBA,KAAK0gI,SAAL1gI,CAClB,iBADkBA,EACC/D,CADD+D,EACQ,IADRA,EACcA,KAAKypI,yBADnBzpI,EAC8C,IAD9CA,GAElB,CAFkBA,CAZlBA,EAeJA,KAAKyyG,KAALzyG,IAAa,CAfTA;KA3DgC6Q,EA6EtCk5H,WAAAA,KAAAA,GAAA,UAAKt4H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV,UAAM0zF,IAAiC,QAAtB8G,EAAiB9G,QAAK,IAAe8G,EAAiB9G,QAAvE;EAAA,UACMz6F,IAAQorH,oBAAoBtrH,CAApBsrH,CADd;EAAA,UAEM58G,IAAaxO,EAAM1V,KAFzB;EAAA,UAGM6zG,IAAO3vF,EAAW/lB,MAHxB;EAAA,UAIMwgE,IAAgBkuE,QAAiB,CAAjBA,EAAoBh5B,CAApBg5B,CAJtB;EAAA,UAKMhrI,IAAO6C,EAAK7C,IAAL6C,IAAa,CAAbA,GAAiBA,EAAK7C,IAAtB6C,GAA8BA,EAAK7C,IAAL6C,GAAYmvG,CALvD,CAMAl1C,EAAczjB,MAAdyjB,CAAqB98D,CAArB88D,EAA2B,CAA3BA,EACA,IAAMsvE,IAAiBrhB,aAA2B,CAA3BA,EAA8B/Y,CAA9B+Y,CAAvB,CACAqhB,EAAepsI,CAAfosI,IAAuB/pH,EAAWriB,CAAXqiB,CAAvB+pH,CAEA,IAAMC,IAAsBvvE,EAAc/8D,KAAd+8D,EAA5B,CACAuvE,EAAoBrsH,IAApBqsH,GACA,IAAMC,KAAqBt+B,KAAKnwG,WAALmwG,CACvBq+B,CADuBr+B,EACFg9B,QAAiB,CAAjBA,EAAoBh5B,CAApBg5B,EAA0BjrI,KAA1BirI,CAAgC,CAAhCA,EAAmCh5B,IAAO,CAA1Cg5B,CADEh9B,CAA3B,CAwBA,KAAKM,CAAL,EACE,OAtBuC;EACvC,YAAIg+B,CAAJ,EAAuB;EACrB,cAAMC,IACF1pI,EAAKqpI,UAALrpI,CAAgB8E,IAAhB9E,GAAuBwE,OAAvBxE,CAA+BupI,CAA/BvpI,CADJ;EAAA,cAEM2pI,IACF3pI,EAAKspI,cAALtpI,CAAoB8E,IAApB9E,GAA2BwE,OAA3BxE,CAAmCupI,CAAnCvpI,CAHJ;EAAA,cAIMuoI,IACFvoI,EAAK0oI,MAAL1oI,GAAcA,EAAK6N,IAAL7N,CAAU8E,IAAV9E,GAAiBwE,OAAjBxE,CAAyBupI,CAAzBvpI,CAAdA,GAAyD,IAL7D;EAAA,cAMMsoI,IACFtoI,EAAKkH,KAALlH,GAAaA,EAAK6nI,KAAL7nI,CAAW8E,IAAX9E,GAAkBwE,OAAlBxE,CAA0BupI,CAA1BvpI,CAAbA,GAAyD,IAP7D,CAQA,OAAOmH,qBACH6J,CADG7J,EACIuiI,CADJviI,EACyBwiI,CADzBxiI,EAEHohI,CAFGphI,EAEYmhI,CAFZnhI,EAE4BnH,EAAK6vE,OAFjC1oE,CAAP;EAIA,gBAAOA,qBACH6J,CADG7J,EACInH,EAAKqpI,UAALrpI,CAAgB8E,IAAhB9E,EADJmH,EAC4BnH,EAAKspI,cAALtpI,CAAoB8E,IAApB9E,EAD5BmH,EAEU,QAAbnH,EAAK6N,IAAQ,GAAO,IAAP,GAAc7N,EAAK6N,IAAL7N,CAAU8E,IAAV9E,EAFxBmH,EAGW,QAAdnH,EAAK6nI,KAAS,GAAO,IAAP,GAAc7nI,EAAK6nI,KAAL7nI,CAAW8E,IAAX9E,EAHzBmH,EAG4CnH,EAAK6vE,OAHjD1oE,CAAP;EAQKyiI,OAtBgC,EAsBvC,CAGI,IAAApxH,4EAAA;EAAA,UAACqxH,QAAD;EAAA,UAAiB9iI,QAAjB;EAAA,UAAuBC,QAAvB;EAAA,UAKA8iI,IACF1gC,UAAqBnvC,EAAc94D,GAAd84D,CAAkB,UAAA98D,CAAA;EAAQ,eAAA6T,EAAM1V,KAAN0V,CAAY7T,CAAZ6T,CAAA;SAA1BipD,CAArBmvC,CANE;EAAA,UAOA2gC,IAAmB/iI,EAASiB,GAATjB,CACrB+/F,UAAU+iC,KAAcA,KAAc,IAAI9pI,EAAK6vE,OAAvBi6D,CAAdA,CAAV/iC,CADqB//F,CAPnB,CA4BN,OAZoC;EAClChH,UAAKmpI,SAALnpI,GACA,IAAMgqI,IAAgBC,cAClBjqI,EAAKqpI,UAALrpI,CAAgB8E,IAAhB9E,EADkBiqI,EACMljI,CADNkjI,EACYjqI,EAAKuhG,QADjB0oC,EAC2BjqI,EAAKmpI,SADhCc,CAAtB,CAEAjqI,EAAKqpI,UAALrpI,CAAgBoE,KAAhBpE,CAAsBgqI,CAAtBhqI,EACA,IAAMkqI,IAAoBD,cACtBjqI,EAAKspI,cAALtpI,CAAoB8E,IAApB9E,EADsBiqI,EACMF,CADNE,EACwBjqI,EAAKuhG,QAD7B0oC,EAEtBjqI,EAAKmpI,SAFiBc,CAA1B,CAGAjqI,EAAKspI,cAALtpI,CAAoBoE,KAApBpE,CAA0BkqI,CAA1BlqI;EAEFmqI,OAVoC,IAY7BN,CAAP;OArEK9xH,CAAP;KA9EoC7H,EAuJtCk5H,WAAAA,UAAAA,GAAA;EACE,QAAM/sC,MACJl/F,MAAMkC,KAAKlC,MACXokG,UAAUliG,KAAKkiG,UACf1xB,SAASxwE,KAAKwwE,SACd64D,QAAQrpI,KAAKqpI,QACbxhI,OAAO7H,KAAK6H,OACZyhI,iBAAiBxJ,qBAAqB9/H,KAAKspI,eAA1BxJ,GACjByJ,kBAAkBzJ,qBAAqB9/H,KAAKupI,gBAA1BzJ,GAClB0J,uBAAuB1J,qBAAqB9/H,KAAKwpI,qBAA1B1J,GACvB2J,2BACI3J,qBAAqB9/H,KAAKypI,yBAA1B3J,GACJ8J,iBAAiB7J,qBAAqB//H,KAAK4pI,eAA1B7J,GACjB8J,kBAAkB9J,qBAAqB//H,KAAK6pI,gBAA1B9J,GAClB2J,gBAAgB1J,oBAAoBhgI,KAAK0pI,cAAzB1J,GAChB2J,iBAAiB3J,oBAAoBhgI,KAAK2pI,eAAzB3J,GAdnB;EAAA,QAgBMhD,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAhBnB,CAkBA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA1KoCnsF,EAC/Bk5H,WAAAA,GAAY,oBADmBl5H,GA4KxC;IA5KwCshG,MAAxC,2BC5MIz3G,GAAWkU,GACXrB;EACF,SAAOmL,KAAK;EACV,QAAe,MAAXhe,EAAE0E,IAAN,EACE,MAAM,IAAIslG,UAAJ,CACF,oEACGhqG,EAAE0E,IADL,eADE,CAAN,CAQF,IAHe,QAAXwP,CAAW,KACbA,MAAY,GAAG,KAAK,GAAG,GADV,GAGQ,MAAnBA,EAAQxU,MAAW,IAA2B,MAAtBwU,EAAQ,CAARA,EAAWxU,MAAhB,IACG,MAAtBwU,EAAQ,CAARA,EAAWxU,MADf,EAEE,MAAM,IAAIsqG,UAAJ,CACF,6GADE,CAAN,CAQF,IAHkB,QAAdn3F,CAAc,KAChBA,IAAag+F,iBADG,GAGC,mBAAfh+F,CAAe,IAAiC,oBAAfA,CAArC,EACE,MAAM,IAAIm3F,UAAJ,CACF,0BAAwBn3F,CAAxB,oEADE,CAAN,CAKF,IAAIw9H,CAAJ,CAOA,OALEA,IADiB,oBAAfx9H,CAAe,KACL,GAAG,KAAK,GAAG,IAAIqB,EAAQ,CAARA,GAAYA,EAAQ,CAARA,EADtB,KAGL,GAAG,IAAIA,EAAQ,CAARA,GAAYA,EAAQ,CAARA,IAAa,GAAG,GAF/Cm8H,EAKKC,IAAQtwI,CAARswI,EAAWD,CAAXC,CAAP;KAjCKtyH,CAAP;iBDuXYivF,cAAcoiC,oBCrS5B;EAKE,YAAA,CAAY/sC,CAAZ;EAAA,gBAAA,CAUE,IATc,QAAVA,CAAU,KACZA,MADY,IAGdr8F,IAAA8P,MAAAA,KAAAA,EAAMusF,CAANvsF,WAEKlD,aACoB,QAArByvF,EAAOzvF,UAAc,GAAOg+F,iBAAP,GAA2BvO,EAAOzvF,UAN7C,EASQ,QAAlByvF,EAAOpuF,OAAX,EACEjO,EAAKiO,OAALjO,KAAiB,GAAG,KAAK,GAAG,GAA5BA,CADF,KAEO,IAA8B,mBAAnBq8F,EAAOpuF,OAAlB,EACLjO,EAAKiO,OAALjO,KACMq8F,EAAOpuF,SAASouF,EAAOpuF,WAAWouF,EAAOpuF,SAASouF,EAAOpuF,SAD/DjO,CADK,KAGA;EAGL,UAFAq8F,EAAOpuF,OAAPouF,GAAiBA,EAAOpuF,OAAxBouF,EAE8B,MAA1BA,EAAOpuF,OAAPouF,CAAe5iG,MAAnB,EACE,MAAM,IAAIsqG,UAAJ,CACF,iFACqB1H,EAAOpuF,OAAPouF,CAAe5iG,MADpC,YADE,CAAN,CAKF,IAAI6wI,UAAJ;EAAA,UACIC,UADJ,CAEA,IAAiC,mBAAtBluC,EAAOpuF,OAAPouF,CAAe,CAAfA,CAAX,EACEiuC,KACKjuC,EAAOpuF,OAAPouF,CAAe,CAAfA,GAA6BA,EAAOpuF,OAAPouF,CAAe,CAAfA,EADlCiuC,EAEAC,KACKluC,EAAOpuF,OAAPouF,CAAe,CAAfA,GAA6BA,EAAOpuF,OAAPouF,CAAe,CAAfA,EAHlCiuC,CADF,KAKO;EAGL,YAFAjuC,EAAOpuF,OAAPouF,GAAiBA,EAAOpuF,OAAxBouF,EAEiC,MAA7BA,EAAOpuF,OAAPouF,CAAe,CAAfA,EAAkB5iG,MAAtB,EACE,MAAM,IAAIsqG,UAAJ,CACF,wFACyB1H,EAAOpuF,OAAPouF,CAAe,CAAfA,EAAkB5iG,MAD3C,YADE,CAAN,CAMF,IAFA6wI,IAAgBjuC,EAAOpuF,OAAPouF,CAAe,CAAfA,CAAhBiuC,EAEiC,MAA7BjuC,EAAOpuF,OAAPouF,CAAe,CAAfA,EAAkB5iG,MAAtB,EACE,MAAM,IAAIsqG,UAAJ,CACF,uFACyB1H,EAAOpuF,OAAPouF,CAAe,CAAfA,EAAkB5iG,MAD3C,YADE,CAAN,CAIF8wI,IAAeluC,EAAOpuF,OAAPouF,CAAe,CAAfA,CAAfkuC;EAEFvqI,SAAKiO,OAALjO,IAAgBsqI,GAAeC,EAA/BvqI;cAEFA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EAAlBnvG;EAkDJ,UAzGmCkQ,aAAAA,GAAAA,GA0DjCs6H,WAAAA,mBAAAA,GAAA,UAAmBhrH,CAAnB;EAGE,QAAI/a,CAAJ,EACIghC,CADJ,CAEA,OAJAjmB,IAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,EAIwB,oBAApBngB,KAAKuN,UAAe,IAEpBnI,IADmB,QAAjB+a,EAAW,CAAXA,CAAiB,IAAQA,EAAW,CAAXA,KAAiB,CAAzB,GACZA,EAAW,CAAXA,IAAgBngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CAAhBmgB,GAAqCngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CADzB,GAGZ,IAFPoF,EAKAghC,IADmB,QAAjBjmB,EAAW,CAAXA,CAAiB,IAAQA,EAAW,CAAXA,KAAiB,CAAzB,GACZA,EAAW,CAAXA,IAAgBngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CAAhBmgB,GAAqCngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CADzB,GAGZ,IAPPoF,GASM+a,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAe/a,GAAMghC,EAXtB,KAcpBhhC,IADmB,QAAjB+a,EAAW,CAAXA,CAAiB,IAAQA,EAAW,CAAXA,KAAiB,CAAzB,GACZA,EAAW,CAAXA,IAAgBngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CAAhBmgB,GAAqCngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CADzB,GAGZ,IAFPoF,EAKAghC,IADmB,QAAjBjmB,EAAW,CAAXA,CAAiB,IAAQA,EAAW,CAAXA,KAAiB,CAAzB,GACZA,EAAW,CAAXA,IAAgBngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CAAhBmgB,GAAqCngB,KAAK4O,OAAL5O,CAAa,CAAbA,EAAgB,CAAhBA,CADzB,GAGZ,IAPPoF,GASM+a,EAAW,CAAXA,GAAe/a,GAAMghC,GAAMjmB,EAAW,CAAXA,EAvBb,CAAxB;KA/D+BtP,EA0FjCs6H,WAAAA,KAAAA,GAAA,UAAK15H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KACH;EAAM,aAAA0yH,iBACFrO,oBAAoBtrH,CAApBsrH,CADEqO,EAC2BzqI,EAAKiO,OADhCw8H,EACyCzqI,EAAK4M,UAD9C69H,CAAA;OADH1yH,CAAP;KA3F+B7H,EAgGjCs6H,WAAAA,UAAAA,GAAA;EACE,QAAMnuC,MACJpuF,SAAS5O,KAAK4O,SACdrB,YAAYvN,KAAKuN,YAFnB;EAAA,QAIMyvH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAJnB,CAMA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAvG+BnsF,EAC1Bs6H,WAAAA,GAAY,eADct6H,GAyGnC;IAzGmCshG,MAAnC,iBChHIz3G,GAAW2wI,GAA4BhsI,GACvCuP,GAAuBrB,GACvB+9H;EACF,SAAO5yH,KAAK;EAoBV,QAAItH,CAAJ,CAnBAo6F,gBAAgBj+F,CAAhBi+F,GACA+/B,cAAcD,CAAdC,CADA//B,EAEA0zB,iBAAiBtwH,CAAjBswH,CAFA1zB,EAGe,QAAXnsG,CAAW,KACbA,KAAW,GAAG,EADD,CAHfmsG,EAMe,QAAX58F,CAAW,KACbA,IAAU,OADG,CANf48F,EASkB,QAAdj+F,CAAc,KAChBA,IAAag+F,iBADG,CATlBC,EAYgB,QAAZ8/B,CAAY,KACdA,IAAW,KADG,CAZhB9/B,EAkBA9wG,IAAImkI,sBAAsBnkI,CAAtBmkI,EAAyBtxH,CAAzBsxH,CAlBJrzB,CAoBA,IAAMggC,IAA6B,WAAZ58H,CAAY,GAAU,MAAV,GAAmB,OAAtD,CAcA,OAXEwC,IAFe,UAAbk6H,CAAa,GAEXG,QAAY/wI,CAAZ+wI,EAA2BJ,CAA3BI,EAAqCpsI,CAArCosI,EAA8CD,CAA9CC,CAFW,GAMXC,QAEAhxI,CAFAgxI,EAE0BL,CAF1BK,EAEoCrsI,CAFpCqsI,EAE6CF,CAF7CE,CAJJt6H,EAQiB,oBAAf7D,CAAe,KACjB6D,IAAIqtH,UAAcrtH,CAAdqtH,GAAkB,GAAG,GAAG,GAAG,EAA3BA,CADa,CARjBrtH,EAWKA,CAAP;KAnCKsH,CAAP;iBDuNYivF,cAAcwjC,eC7J5B;EAWE,YAAA,CAAYnuC,CAAZ;EAAA,gBAAA,CAKE,IAJuB,QAAnBA,EAAOquC,QAAY,KACrBruC,EAAOquC,QAAPruC,GAAkB,CADG,GAGvBr8F,IAAA8P,MAAAA,KAAAA,EAAMusF,CAANvsF,SAHuB,EAIQ,mBAApBusF,EAAOquC,QAAlB,EACE1qI,EAAK0qI,QAAL1qI,IAAiBq8F,EAAOquC,SAAxB1qI,CADF,KAEO;EAAA,WACH9E,MAAMC,OAAND,CAAcmhG,EAAOquC,QAArBxvI,KACyC,MAAxCmhG,EAAOquC,QAAPruC,CAA6B5iG,UACc,mBAApC4iG,EAAOquC,QAAPruC,CAA6B,CAA7BA,CAHL,EAML,MAAM,IAAI0H,UAAJ,CACF,uGAEGxrE,KAAKE,SAALF,CAAe8jE,EAAOquC,QAAtBnyG,CAHD,CAAN,CAFAv4B,EAAK0qI,QAAL1qI,GAAgBq8F,EAAOquC,QAAvB1qI;EAOF,SAAsB,QAAlBq8F,EAAO39F,OAAX,EACEsB,EAAKtB,OAALsB,GAAeA,EAAK0qI,QAApB1qI,CADF,KAGE,IAA8B,mBAAnBq8F,EAAO39F,OAAlB,EACEsB,EAAKtB,OAALsB,IAAgBq8F,EAAO39F,QAAvBsB,CADF,KAEO;EAAA,WACH9E,MAAMC,OAAND,CAAcmhG,EAAO39F,OAArBxD,KACwC,MAAvCmhG,EAAO39F,OAAP29F,CAA4B5iG,UACc,mBAAnC4iG,EAAO39F,OAAP29F,CAA4B,CAA5BA,CAHL,EAML,MAAM,IAAI0H,UAAJ,CACF,sGAEGxrE,KAAKE,SAALF,CAAe8jE,EAAO39F,OAAtB65B,CAHD,CAAN,CAFAv4B,EAAKtB,OAALsB,GAAeq8F,EAAO39F,OAAtBsB;cASJA,EAAKiO,OAALjO,GAAiC,QAAlBq8F,EAAOpuF,OAAW,GAAO,OAAP,GAAiBouF,EAAOpuF,OAAzDjO,EACAu+H,iBAAiBv+H,EAAKiO,OAAtBswH,CADAv+H,EAEAA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EAFlBnvG;EAuCJ,UAtFwCkQ,aAAAA,GAAAA,GAoDtC86H,WAAAA,mBAAAA,GAAA,UAAmBxrH,CAAnB;EAEE,QAAM/lB,IAAS4mI,kBADf7gH,IAAa06G,mBAAmB16G,CAAnB06G,GAEE,EADAmG,EACIhhI,KAAKqrI,QAALrrI,CAAc,CAAdA,CADJghI,EACsBhhI,KAAK4O,OAD3BoyH,EACoChhI,KAAKX,OAALW,CAAa,CAAbA,CADpCghI,CAAf,CAEA,QAAQ7gH,EAAW,CAAXA,GAAe/lB,GAAQ+lB,EAAW,CAAXA,EAA/B;KAxDoCtP,EA+DtC86H,WAAAA,KAAAA,GAAA,UAAKl6H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV/X,QAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,GAEA8Q,IAASg1H,aAAa1J,oBAAoBtrH,CAApBsrH,CAAb0J,EAA0C,CAA1CA,CAFT9lI,CAGA,IAAMmR,IAASnR,EAAKirI,eAALjrI,CACXo8H,oBAAoBtrH,CAApBsrH,CADWp8H,GACmBA,EAAK0qI,QAAL1qI,CAAc,CAAdA,GAAkB,EADrCA,GAEVA,EAAKtB,OAALsB,CAAa,CAAbA,GAAiB,EAFPA,EAEWA,EAAKiO,OAFhBjO,EAEyB,cAFzBA,CAAf,CAIA,OAAOkrI,QAAY/5H,CAAZ+5H,GAAqB,EAArBA,CAAP;OARKnzH,CAAP;KAhEoC7H,EA4EtC86H,WAAAA,UAAAA,GAAA;EACE,QAAM3uC,MACJquC,UAAUrrI,KAAKqrI,UACfz8H,SAAS5O,KAAK4O,SACdvP,SAASW,KAAKX,SAHhB;EAAA,QAKM29H,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CALnB,CAOA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KApFoCnsF,GAsFxC;IAtFwCshG,MAAxC;EAAA;EAiGE,YAAA,CAAYnV,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAUJ,UAbkCI,aAAAA,GAAAA,GAMtBi7H,WAAAA,gBAAAA,GAAV,UACIr6H,CADJ,EACoB45H,CADpB,EACgDhsI,CADhD,EAEIuP,CAFJ,EAE0BrB,CAF1B;EAKE,WAFAi+F,gBAAgBj+F,CAAhBi+F,GACA0zB,iBAAiBtwH,CAAjBswH,CADA1zB,EAEOugC,OAAOt6H,CAAPs6H,EAAeV,CAAfU,EAAyB1sI,CAAzB0sI,EAAkCn9H,CAAlCm9H,EAA2Cx+H,CAA3Cw+H,EAAuD,KAAvDA,CAAP;KAX8Bl7H,EACzBi7H,WAAAA,GAAY,cADaj7H,GAalC;IAbkC86H,UA/FlC,eA6GchkC,cAAcmkC,cAW5B;EAEE,YAAA,CAAY9uC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAUJ,UAbsCI,aAAAA,GAAAA,GAM1Bm7H,WAAAA,gBAAAA,GAAV,UACIv6H,CADJ,EACoB45H,CADpB,EACgDhsI,CADhD,EAEIuP,CAFJ,EAE0BrB,CAF1B;EAKE,WAFAi+F,gBAAgBj+F,CAAhBi+F,GACA0zB,iBAAiBtwH,CAAjBswH,CADA1zB,EAEOugC,OAAOt6H,CAAPs6H,EAAeV,CAAfU,EAAyB1sI,CAAzB0sI,EAAkCn9H,CAAlCm9H,EAA2Cx+H,CAA3Cw+H,EAAuD,KAAvDA,CAAP;KAXkCl7H,EAC7Bm7H,WAAAA,GAAY,kBADiBn7H,GAatC;IAbsC86H,UAAtC,eAcchkC,cAAcqkC,kBA+B5B;EAME,YAAA,CAAYhvC,CAAZ;EAAA,gBAAA,CAQE,IAPuB,QAAnBA,EAAOquC,QAAY,KACrBruC,EAAOquC,QAAPruC,IAAmB,GAAG,EADD,IAGvBr8F,IAAA8P,MAAAA,KAAAA,EAAMusF,CAANvsF,WACK46H,WAAWxvI,MAAMC,OAAND,CAAcmhG,EAAOquC,QAArBxvI,IACZmhG,EAAOquC,QADKxvI,IAEXmhG,EAAOquC,UAAUruC,EAAOquC,SANN,EAOD,QAAlBruC,EAAO39F,OAAX,EACEsB,EAAKtB,OAALsB,GAAeA,EAAK0qI,QAApB1qI,CADF,KAEO,IAAI9E,MAAMC,OAAND,CAAcmhG,EAAO39F,OAArBxD,CAAJ,EAAmC;EACxC,UAA8B,MAA1BmhG,EAAO39F,OAAP29F,CAAe5iG,MAAnB,EACE,MAAM,IAAIsqG,UAAJ,CACF,0HAEG1H,EAAO39F,OAAP29F,CAAe5iG,MAFlB,MADE,CAAN,CAKFuG,EAAKtB,OAALsB,GAAeq8F,EAAO39F,OAAtBsB;OAPK,MAULA,EAAKtB,OAALsB,IAAgBq8F,EAAO39F,SAAS29F,EAAO39F,QAAvCsB,QAEFA,EAAKiO,OAALjO,GAAiC,QAAlBq8F,EAAOpuF,OAAW,GAAO,OAAP,GAAiBouF,EAAOpuF,OAAzDjO,EACAA,EAAK4M,UAAL5M,GACyB,QAArBq8F,EAAOzvF,UAAc,GAAO,cAAP,GAAwByvF,EAAOzvF,UAFxD5M,EAGA6qG,gBAAgB7qG,EAAK4M,UAArBi+F,CAHA7qG,EAIAu+H,iBAAiBv+H,EAAKiO,OAAtBswH,CAJAv+H,EAMAA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EANlBnvG;EAkDJ,UA9EwCkQ,aAAAA,GAAAA,GAqCtCo7H,WAAAA,mBAAAA,GAAA,UAAmB9rH,CAAnB;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,CACA,IAAI/a,IACoB,oBAApBpF,KAAKuN,UAAe,GAAkB4S,EAAW,CAAXA,CAAlB,GAAkCA,EAAW,CAAXA,CAD1D;EAAA,QAEIimB,IACoB,oBAApBpmC,KAAKuN,UAAe,GAAkB4S,EAAW,CAAXA,CAAlB,GAAkCA,EAAW,CAAXA,CAH1D,CAQA,OAJA/a,IACI47H,iBAAiB57H,CAAjB47H,EAAuBhhI,KAAKqrI,QAALrrI,CAAc,CAAdA,CAAvBghI,EAAyChhI,KAAK4O,OAA9CoyH,EAAuDhhI,KAAKX,OAALW,CAAa,CAAbA,CAAvDghI,CADJ57H,EAEAghC,IACI46F,iBAAiB56F,CAAjB46F,EAAuBhhI,KAAKqrI,QAALrrI,CAAc,CAAdA,CAAvBghI,EAAyChhI,KAAK4O,OAA9CoyH,EAAuDhhI,KAAKX,OAALW,CAAa,CAAbA,CAAvDghI,CAHJ57H,EAIwB,oBAApBpF,KAAKuN,UAAe,IACd4S,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAe/a,GAAMghC,EADtB,IAGdjmB,EAAW,CAAXA,GAAe/a,GAAMghC,GAAMjmB,EAAW,CAAXA,EAHrC;KA/CoCtP,EA0DtCo7H,WAAAA,KAAAA,GAAA,UAAKx6H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,aADA/X,EAAKijI,cAALjjI,CAAoB8Q,CAApB9Q,EAA4BuyG,CAA5BvyG,GACOA,EAAKirI,eAALjrI,CACHo8H,oBAAoBtrH,CAApBsrH,CADGp8H,EAC0BA,EAAK0qI,QAD/B1qI,EACyCA,EAAKtB,OAD9CsB,EAEHA,EAAKiO,OAFFjO,EAEWA,EAAK4M,UAFhB5M,CAAP;OAFK+X,CAAP;KA3DoC7H,EAmEtCo7H,WAAAA,UAAAA,GAAA;EACE,QAAMjvC,MACJquC,UAAUrrI,KAAKqrI,UACfz8H,SAAS5O,KAAK4O,SACdvP,SAASW,KAAKX,SACdkO,YAAYvN,KAAKuN,YAJnB;EAAA,QAMMyvH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CANnB,CAQA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA5EoCnsF,GA8ExC;IA9EwCshG,MAAxC;EAAA;EAqGE,YAAA,CAAYnV,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAUJ,UAbkCI,aAAAA,GAAAA,GAMtBq7H,WAAAA,gBAAAA,GAAV,UACIz6H,CADJ,EACoB45H,CADpB,EACgDhsI,CADhD,EAEIuP,CAFJ,EAE0BrB,CAF1B;EAKE,WAFAi+F,gBAAgBj+F,CAAhBi+F,GACA0zB,iBAAiBtwH,CAAjBswH,CADA1zB,EAEOugC,OAAOt6H,CAAPs6H,EAAeV,CAAfU,EAAyB1sI,CAAzB0sI,EAAkCn9H,CAAlCm9H,EAA2Cx+H,CAA3Cw+H,EAAuD,KAAvDA,CAAP;KAX8Bl7H,EACzBq7H,WAAAA,GAAY,cADar7H,GAalC;IAbkCo7H,UAnGlC,eAiHctkC,cAAcukC,cAuB5B;EAEE,YAAA,CAAYlvC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAUJ,UAbsCI,aAAAA,GAAAA,GAM1Bs7H,WAAAA,gBAAAA,GAAV,UACI16H,CADJ,EACoB45H,CADpB,EACgDhsI,CADhD,EAEIuP,CAFJ,EAE0BrB,CAF1B;EAKE,WAFAi+F,gBAAgBj+F,CAAhBi+F,GACA0zB,iBAAiBtwH,CAAjBswH,CADA1zB,EAEOugC,OAAOt6H,CAAPs6H,EAAeV,CAAfU,EAAyB1sI,CAAzB0sI,EAAkCn9H,CAAlCm9H,EAA2Cx+H,CAA3Cw+H,EAAuD,KAAvDA,CAAP;KAXkCl7H,EAC7Bs7H,WAAAA,GAAY,kBADiBt7H,GAatC;IAbsCo7H,UAAtC,eAcctkC,cAAcwkC,kBAK5B;EACE,YAAA,CAAYnvC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EAAlBnvG;EAUJ,UAb8CkQ,aAAAA,GAAAA,GAM5Cu7H,WAAAA,mBAAAA,GAAA,UAAmBjsH,CAAnB;EACE,YAAQA,EAAW,CAAXA,GAAeA,EAAW,CAAXA,EAAvB;KAP0CtP,EAU5Cu7H,WAAAA,KAAAA,GAAA,UAAK36H,CAAL,EAA8ByhG,CAA9B;EACE,UAAM,IAAIvO,mBAAJ,EAAN;KAX0C9zF,GAa9C;IAb8CshG,MAA9C;EAAA;EAwBE,YAAA,CAAYnV,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EASJ,UAZ4CI,aAAAA,GAAAA,GAM1Cw7H,WAAAA,KAAAA,GAAA,UAAK56H,CAAL,EAA8ByhG,CAA9B;EACE,WAAOx6F,KAAK;EACV,UAAM/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,OAAO1hB,KAAS1pG,CAAT0pG,EAAgB,CAAhBA,CAAP;OAFK3iG,CAAP;KAPwC7H,EACnCw7H,WAAAA,GAAY,wBADuBx7H,GAY5C;IAZ4Cu7H,gBAtB5C,eAmCczkC,cAAc0kC,wBAS5B;EAEE,YAAA,CAAYrvC,CAAZ;aACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EASJ,UAZwCI,aAAAA,GAAAA,GAMtCy7H,WAAAA,KAAAA,GAAA,UAAK76H,CAAL,EAA8ByhG,CAA9B;EACE,WAAOx6F,KAAK;EACV,UAAM/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,OAAOjzB,IAAQn4F,CAARm4F,EAAe,CAAfA,CAAP;OAFKpxF,CAAP;KAPoC7H,EAC/By7H,WAAAA,GAAY,oBADmBz7H,GAYxC;IAZwCu7H,gBAAxC,eAaczkC,cAAc2kC,oBAiB5B;EAEE,YAAA,CAAYtvC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAK4M,UAAL5M,GACyB,QAArBq8F,EAAOzvF,UAAc,GAAO,cAAP,GAAwByvF,EAAOzvF,UADxD5M,EAEA6qG,gBAAgB7qG,EAAK4M,UAArBi+F,CAFA7qG,EAGAA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EAHlBnvG;EAyBJ,UA7B8CkQ,aAAAA,GAAAA,GAU5C07H,WAAAA,mBAAAA,GAAA,UAAmBpsH,CAAnB;EAEE,WADAA,IAAaA,CAAbA,EACwB,mBAApBngB,KAAKuN,UAAe,IACd4S,EAAW,CAAXA,GAAeA,EAAW,CAAXA,EADD,IAGdA,EAAW,CAAXA,GAAeA,EAAW,CAAXA,EAHzB;KAZ0CtP,EAmB5C07H,WAAAA,KAAAA,GAAA,UAAK96H,CAAL,EAA8ByhG,CAA9B;EACE,UAAM,IAAIvO,mBAAJ,EAAN;KApB0C9zF,EAuB5C07H,WAAAA,UAAAA,GAAA;EACE,QAAMvvC,MAAUzvF,YAAYvN,KAAKuN,YAAjC;EAAA,QACMyvH,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA3B0CnsF,GA6B9C;IA7B8CshG,MAA9C;EAAA;EA2CA,YAAA;;EAaA,UAb4CthG,aAAAA,GAAAA,GAG1C27H,WAAAA,KAAAA,GAAA,UAAK/6H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV,UAAM/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,OAAwB,mBAApBp8H,EAAK4M,UAAe,GACf8tG,KAAS1pG,CAAT0pG,GAAiB,GAAG,EAApBA,CADe,GAGfA,KAAS1pG,CAAT0pG,GAAiB,GAAG,EAApBA,CAHT;OAFK3iG,CAAP;KAJwC7H,EACnC27H,WAAAA,GAAY,wBADuB37H,GAa5C;IAb4C07H,gBA3C5C,eAyDc5kC,cAAc6kC,wBAc5B;EAAA,YAAA;;EAaA,UAbwC37H,aAAAA,GAAAA,GAGtC47H,WAAAA,KAAAA,GAAA,UAAKh7H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV,UAAM/G,IAAQorH,oBAAoBtrH,CAApBsrH,CAAd,CACA,OAAwB,mBAApBp8H,EAAK4M,UAAe,GACfu8F,IAAQn4F,CAARm4F,GAAgB,GAAG,EAAnBA,CADe,GAGfA,IAAQn4F,CAARm4F,GAAgB,GAAG,EAAnBA,CAHT;OAFKpxF,CAAP;KAJoC7H,EAC/B47H,WAAAA,GAAY,oBADmB57H,GAaxC;IAbwC07H,gBAAxC,0BC9fI96H,GACAi7H,GACAC,GACAC;EAKF,MAAI/wI,MAAMC,OAAND,CAAc4V,CAAd5V,CAAJ,EAA2B;EACzB,QAAoB,QAAhB6wI,CAAgB,IAAqB,QAAbC,CAA5B,EACE,MAAM,IAAIjoC,UAAJ,CACF,+EADE,CAAN,CAIkB,QAAhBkoC,CAAgB,KAClBD,IAAYl7H,EAAO5T,KAAP4T,CAAaA,EAAOrX,MAAPqX,GAAgBm7H,CAA7Bn7H,EAA2CA,EAAOrX,MAAlDqX,CAAZk7H,EACAl7H,IAASA,EAAO5T,KAAP4T,CAAa,CAAbA,EAAgBA,EAAOrX,MAAPqX,GAAgBm7H,CAAhCn7H,CAFS,GAIhBA,EAAOrX,MAAPqX,GAAgB,CAAhBA,KACFi7H,IAAej7H,EAAO5T,KAAP4T,CAAa,CAAbA,EAAgBA,EAAOrX,MAAvBqX,CADbA,CAJgB,EAOpBA,IAASA,EAAO,CAAPA,CAPW;EAUtB,aAAA,CAAsB/W,CAAtB;EAEE,WAAS,QAALA,CAAK,IAAQmB,MAAMC,OAAND,CAAcnB,CAAdmB,CAAR,GACAnB,CADA,IAGCA,EAHV;EAUF,YAAQ+W,WAAQi7H,cAHhBA,IAAeG,EAAaH,CAAbG,GAGeF,WAF9BA,IAAYE,EAAaF,CAAbE,GAEZ;EA8CF,aAAA,CACIC,CADJ,EACmCr7H,CADnC,EACmDs7H,CADnD,EAEIC,CAFJ,EAEyBnuF,CAFzB,EAEwC8tF,CAFxC,EAE8DM,CAF9D,EAGIC,CAHJ;qBAEIF,0BAA0DC,0BAC1DC,QACF,IAAMp9B,IAAOr+F,EAAOxV,KAAPwV,CAAarX,MAA1B,CACA,IAAI01G,IAAO,CAAX,EACE,MAAM,IAAIpL,UAAJ,CAAe,yCAAuCoL,CAAvC,OAAf,CAAN,CAKF,IAsCIj+F,CAtCJ;EAAA,MAuCIs7H,CAvCJ;EAAA,MAAMnsH,KAAQ,GAAG,GAAG7Z,OAAO2hI,QAAiB,CAAjBA,EAAoBh5B,CAApBg5B,EAA3B,CAGA,IAFAr3H,IAASgtH,UAAchtH,CAAdgtH,EAAsBz9G,CAAtBy9G,CAAThtH,EAEY,QAARotC,CAAJ,EACE,MAAM,IAAI8lD,mBAAJ,CACF,8EADE,CAAN,CAKF,IAAiB,QAAbgoC,CAAJ,EACE,MAAM,IAAIhoC,mBAAJ,CACF,gFADE,CAAN,CAMEsoC,KACF/rI,QAAQ8Z,IAAR9Z,CACI,mGADJA,CADE+rI,EAMAD,MACFv7H,IAAS27H,QAAY37H,CAAZ27H,EAAoB,CAApBA,CADPJ,CANAC,CA0BJ,KAFA,IAAII,IAASN,CAAb,EACMO,IAAY77H,EAAOxV,KAAPwV,CAAa,CAAbA,CADlB,gBAESyH;EACP,QAAIq0H,IAAejV,oBAAsB7mH,CAAtB6mH,EAA8Bp/G,CAA9Bo/G,EAAiC,CAAjCA,CAAnB,CACAiV,IAAeA,EAAapoI,OAAbooI,CAAqBA,EAAatxI,KAAbsxI,CAAmB1vI,KAAnB0vI,CAAyB,CAAzBA,CAArBA,CAAfA,CACA,IAAMC,IAAcrc,KAAS;EAAM,aAAA2b,EAAaS,CAAbT,EAA2BO,CAA3BP,CAAA;OAAf3b,CAApB,CAEA,IADAgc,IAAaK,EAAY,CAAZA,CAAbL,EACID,CAAJ,EACE,IAAU,MAANh0H,CAAJ,EACErH,IAAUs7H,EAAWnnI,UAAXmnI,CAAsB,CAAtBA,CAAVt7H,CADF,KAEO;EACL,UAAM47H,IAAanjC,QAAYz4F,GAASs7H,EAAWnnI,UAAXmnI,CAAsB,CAAtBA,EAArB7iC,EAAgD,CAAhDA,CAAnB,CACAz4F,EAAQmB,OAARnB,IACAA,IAAU47H,CADV57H;EAMJw7H,SAASG,EAAY,CAAZA,CAATH;KAlBF,EAESn0H,IAAI,CAAb,EAAgBA,IAAIo0H,CAApB,IAAiCp0H,CAAjC,IAASA,GAkBT,QAAQi0H,GAAYt7H,GAASw7H,EAA7B;iBDmXY1lC,cAAc8kC,oBCvM5B;EAoBE,YAAA,CAAYzvC,CAAZ;EAAA,QAEM0wC,CAFN;EAAA,YACEj9H,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,CAGE,IAAmB,QAAfusF,EAAO0wC,IAAX,EACE,MAAM,IAAIhpC,UAAJ,CACF,sDADE,CAAN,CAOF,IAAmC,SAJjCgpC,IADS7xI,MAAMC,OAAND,CAAcmhG,EAAO0wC,IAArB7xI,IACF,IAAI8xI,eAAJ,GAAqBC,OAAO5wC,EAAO0wC,MAAnC,CADE7xI,GAGFmhG,EAAO0wC,MAEMG,SAAtB,EACE,MAAM,IAAInpC,UAAJ,CACF,mGADE,CAAN,QAIF/jG,EAAK+sI,IAAL/sI,GAAY+sI,CAAZ/sI,EACAA,EAAKmtI,eAALntI,GAC8B,QAA1Bq8F,EAAO8wC,eAAmB,IAAe9wC,EAAO8wC,eAFpDntI,EAGAA,EAAKotI,WAALptI,GAAyC,QAAtBq8F,EAAO+wC,WAAe,IAAe/wC,EAAO+wC,WAH/DptI,EAIAA,EAAKqsI,WAALrsI,GAAyC,QAAtBq8F,EAAOgwC,WAAe,IAAehwC,EAAOgwC,WAJ/DrsI,EAKAA,EAAKiyG,SAALjyG,GAAoC,QAAnBq8F,EAAO6V,QAAY,IAAe7V,EAAO6V,QAL1DlyG,EAMAA,EAAKssI,MAALtsI,GAA+B,QAAjBq8F,EAAOiwC,MAAU,IAAejwC,EAAOiwC,MANrDtsI,EAQAA,EAAK2wG,eAAL3wG,IAAuB,CARvBA,EASAA,EAAK0wG,SAAL1wG,IAAkB,IAAIugI,SAAJ,GAAepxB,MAAM,GAArB,EATlBnvG,EAUAA,EAAKqtI,SAALrtI,GAAiB,IAVjBA,EAWAA,EAAK0sI,MAAL1sI,GAAc,IAXdA,EAaAA,EAAKisI,YAALjsI,GAAoB,IAbpBA,EAiBAA,EAAKstI,UAALttI,KAjBAA;EA6ZJ,UAjcyBkQ,aAAAA,GAAAA,GA0DvBq9H,WAAAA,UAAAA,GAAA;EACE,WAAmB,QAAfluI,KAAKqtI,MAAU,GAGVvE,QAAiB,CAAjBA,EADHjtI,MAAMC,OAAND,CAAcmE,KAAK0tI,IAAL1tI,CAAU6tI,SAAxBhyI,IAAqCmE,KAAK0tI,IAAL1tI,CAAU6tI,SAAV7tI,CAAoB5F,MAAzDyB,GAAkE,CAC/DitI,EAA+BhnI,GAA/BgnI,CAAmC,UAAApuI,CAAA;EAAK,aAAA,IAAA;OAAxCouI,CAHU,GAKV9oI,KAAKqtI,MALd;KA3DqBx8H,EAsEvBq9H,WAAAA,UAAAA,GAAA,UAAUb,CAAV;EACErtI,SAAKqtI,MAALrtI,GAAcqtI,CAAdrtI;KAvEqB6Q,EA0EvBq9H,WAAAA,mBAAAA,GAAA,UAAmB/tH,CAAnB;EACMguH,oBAAgBhuH,CAAhBguH,MACFhuH,IAAcA,EAAuB,CAAvBA,CADZguH,GAGJhuH,IAAaA,CAHTguH,CAMJ,IAAIN,IAAY7tI,KAAK0tI,IAAL1tI,CAAU6tI,SAA1B,CACKhyI,MAAMC,OAAND,CAAcgyI,CAAdhyI,MACHgyI,KAAaA,EADVhyI,EAGL,IACIgS,CADJ;EAAA,QAAMy3H,IAAYuI,EAAU,CAAVA,CAAlB,CAQA,IALEhgI,IADE7N,KAAK8tI,eAAL9tI,IACamgB,EAAW,CAAXA,GAAeA,EAAW,CAAXA,GAAemlH,EAD3CtlI,IAGamgB,EAAW,CAAXA,GAAemlH,EAF9Bz3H,EAKE7N,KAAK+tI,WAAT,EAAsB;EAEpB,WADA,IAAMK,MAAN,OAAA,EACkBC,KAAlB,EAAkBrqI,YAAlB,EAAkBA,GAAlB;EAAK,YAAM8I,QAAN,CACHshI,EAAWpyI,IAAXoyI,EAAiBjuH,EAAW,CAAXA,GAAerT,EAAhCshI;EAEF,eAAQvgI,GAAa1G,OAAOinI,EAA5B;EAEA,YAAOvgI,CAAP;KApGmBgD,EAwGvBq9H,WAAAA,YAAAA,GAAA,UAAYz8H,CAAZ,EAAqCotC,CAArC;EACE,UAAM,IAAI8lD,mBAAJ,CACF,kDADE,CAAN;KAzGqB9zF,EA6GhBq9H,WAAAA,MAAAA,GAAP,UAAa/tH,CAAb;EAIE,QAAyB,QAArBngB,KAAK4sI,YAAT,EACE,MAAM,IAAIjoC,mBAAJ,CACF,kDADE,CAAN,CAIEwpC,gBAAgBhuH,CAAhBguH,MACFhuH,IAAcA,EAAuB,CAAvBA,CADZguH,GAGJhuH,IAAaA,CAHTguH,CAKJ,IAAM5hH,IAAoBvsB,KAAK6yG,QAAL7yG,GAAgBmgB,EAAW,CAAXA,CAAhBngB,GAAgC,IAA1D;EAAA,QACMwgI,IAAWrgH,EAAWA,EAAW/lB,MAAX+lB,GAAoB,CAA/BA,CADjB,CAEAngB,KAAKqxG,SAALrxG,CAAe,CAAfA,IAAoB,IAAIkhI,SAAJ,GAAejlI,QAAQswB,GAAW,MAAMi0G,IAAxC,CAApBxgI,CAIA,IASI6tI,CATJ;EAAA,QAAMS,KAAkBnuH,EAAW,CAAXA,GAAehZ,OAAOgZ,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAA9C,CAgBA,IAXEngB,KAAK0tI,IAAL1tI,CAAU8zG,KAAV9zG,CAAgBsuI,CAAhBtuI,GAMA6tI,IADEhyI,MAAMC,OAAND,CAAemE,KAAK0tI,IAAL1tI,CAAsB6tI,SAArChyI,IACUmE,KAAK0tI,IAAL1tI,CAAU6tI,SADpBhyI,IAGWmE,KAAK0tI,IAAL1tI,CAAU6tI,UARvB7tI,EAWoB,QAAlBA,KAAKguI,SAAT;EACE,WAAKliC,KAAKnwG,WAALmwG,CACG9rG,KAAKguI,SAALhuI,CAAe8B,GAAf9B,CAAmB,UAAA8qF,CAAA;EAAQ,eAAAA,EAAK7uF,KAAL6uF,CAAWA,EAAK7uF,KAAL6uF,CAAW1wF,MAAX0wF,GAAoB,CAA/BA,CAAA;SAA3B9qF,CADH8rG,EAEG+hC,CAFH/hC,CAAL,EAGE,MAAM,IAAIpH,UAAJ,CACF,+FACsC1kG,KAAKguI,SAD3C,iCAAA,GAE6BhuI,KAAK0tI,IAAL1tI,CAAU6tI,SAHrC,CAAN;OAJJ,MAUE7tI,KAAKguI,SAALhuI,GACI6tI,EAAU/rI,GAAV+rI,CAAc,UAAA/gI,CAAA;EAAO,aAAA,IAAIo0H,SAAJ,GAAejlI,QAAQ,MAAM6Q,IAA7B,CAAA;OAArB+gI,CADJ7tI,CAGEA,KAAK6yG,QAAL7yG,IACFA,KAAKysH,WAALzsH,EADEA;KA9JiB6Q,EAoLvBq9H,WAAAA,YAAAA,GAAA,UAAYb,CAAZ,EAAsCjhC,CAAtC;EAAA,gBAAA,kBAAsCA,SACpC1zF,KAAK;EACH,WAAK/X,EAAKkyG,QAAV,EACE,MAAM,IAAItO,cAAJ,CACF,iEADE,CAAN,CAGF,IAAMh4E,IAAY5rB,EAAK0wG,SAAL1wG,CAAe,CAAfA,EAAkB1E,KAAlB0E,CAAwB,CAAxBA,CAAlB,CACA,IAAiB,QAAb4rB,CAAJ,EACE,MAAM,IAAIm4E,UAAJ,CACF,uUADE,CAAN,CASF,IAAmB,QAAf/jG,EAAK0sI,MAAT,EACMxxI,MAAMC,OAAND,CAAc8E,EAAK+sI,IAAL/sI,CAAUktI,SAAxBhyI,IACF8E,EAAK0sI,MAAL1sI,GACIA,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,CAAoBmB,GAApBnB,CAAwB,UAAAmM,CAAA;EAAO,eAAAyhI,OAAWhiH,GAAWzf,EAAtByhI,CAAA;SAA/B5tI,CAFF9E,GAIF8E,EAAK0sI,MAAL1sI,IAAe4tI,OAAWhiH,GAAW5rB,EAAK+sI,IAAL/sI,CAAUktI,UAAhCU,EAJb1yI,CADN,KAOO,IAAc,QAAVwxI,CAAJ,EAELnd,QAAYvvH,EAAK0sI,MAAjBnd,GAEuB,QAAnBvvH,EAAKstI,UAAc,KACrB/d,QAAYvvH,EAAKstI,UAAjB/d,GACAvvH,EAAKstI,UAALttI,KAFqB,CAFvBuvH,EAOIr0H,MAAMC,OAAND,CAAc8E,EAAK+sI,IAAL/sI,CAAUktI,SAAxBhyI,IACF8E,EAAK0sI,MAAL1sI,GACIA,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,CAAoBmB,GAApBnB,CAAwB,UAAAmM,CAAA;EAAO,eAAAyhI,OAAWhiH,GAAWzf,EAAtByhI,CAAA;SAA/B5tI,CAFF9E,GAIF8E,EAAK0sI,MAAL1sI,CAAY,CAAZA,IAAiB4tI,OAAWhiH,GAAW5rB,EAAK+sI,IAAL/sI,CAAUktI,UAAhCU,CAXnBre,CAFK,KAeA;EAIL,YAHKr0H,MAAMC,OAAND,CAAcwxI,CAAdxxI,MACHwxI,KAAUA,EADPxxI,GAGDwxI,EAAOjzI,MAAPizI,KAAkB1sI,EAAK0sI,MAAL1sI,CAAYvG,MAAlC,EACE,MAAM,IAAIsqG,UAAJ,CACF,WAAS/jG,EAAKpC,IAAd,cAAA,GAA8BoC,EAAK0sI,MAAL1sI,CAAYvG,MAA1C,gCAAA,GACmBizI,EAAOjzI,MAD1B,sCAAA,GAEaizI,CAHX,CAAN,EAMe,MAAbjhC,IAKFzrG,EAAKstI,UAALttI,CAAgB3E,IAAhB2E,CAAqBA,EAAK0sI,MAAL1sI,CAAY9C,KAAZ8C,EAArBA,IAEAuvH,QAAYvvH,EAAK0sI,MAAjBnd,EAGF,KAAK,IAAI51H,IAAQ,CAAjB,EAAoBA,IAAQqG,EAAK0sI,MAAL1sI,CAAYvG,MAAxC,IAAkDE,CAAlD,EAAyD;EACvD,cAAMyJ,IAAQspI,EAAO/yI,CAAP+yI,CAAd;EAAA,cACMvgI,IAAMjR,MAAMC,OAAND,CAAc8E,EAAK+sI,IAAL/sI,CAAUktI,SAAxBhyI,IACR8E,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,CAAoBrG,CAApBqG,CADQ9E,GAER8E,EAAK+sI,IAAL/sI,CAAUktI,SAHd;EAAA,cAIMW,KAAiBjiH,GAAWzf,EAJlC,CAKA,KAAKg/F,KAAKnwG,WAALmwG,CAAiB/nG,EAAM9H,KAAvB6vG,EAA8B0iC,CAA9B1iC,CAAL,EACE,MAAM,IAAIpH,UAAJ,CACF,WAASpqG,CAAT,iCAAA,GAA6CqG,EAAKpC,IAAlD,sBAAA,GACkBiwI,CADlB,sBAAA,GAEIzqI,EAAM9H,KAHR,CAAN,CAKF0E,EAAK0sI,MAAL1sI,CAAYrG,CAAZqG,IAAqBoD,CAArBpD;;EAGJA,SAAK0sI,MAAL1sI,CAAYP,OAAZO,CAAoB,UAAAy8C,CAAA;EAAS,eAAA+yE,KAAS/yE,CAAT+yE,CAAA;SAA7BxvH;OA1EF+X;KArLqB7H,EAmQvBq9H,WAAAA,MAAAA,GAAA,UACIz8H,CADJ,EAEIyhG,CAFJ;EAIE,QAAIw5B,IACU,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YADjD;EAAA,QAEIC,IACU,QAAVz5B,CAAU,GAAO,IAAP,GAAcA,EAAkBy5B,SAH9C,CAIc,QAAVz5B,CAAU,KACZA,MADY,EAId,IAAMu7B,IACFC,gBAAgBj9H,CAAhBi9H,EAAwBhC,CAAxBgC,EAAsC/B,CAAtC+B,EAAiD1uI,KAAK4sI,YAAtD8B,CADJ,CAEAj9H,IAASg9H,EAAah9H,MAAtBA,EACAi7H,IAAe+B,EAAa/B,YAD5Bj7H,EAEAk7H,IAAY8B,EAAa9B,SAFzBl7H,CAQA,IAAIk9H,MAAJ;EAAA,QACIC,MADJ,CAEA,IAAoB,QAAhBlC,CAAJ,EAA0B;EACxBx5B,QAAqBw5B,YAArBx5B,GAAyBw5B,CAAzBx5B,EACAy7B,IAAmBA,EAAiBxnI,MAAjBwnI,CAAwBjC,CAAxBiC,CADnBz7B,EAEAlzG,KAAKguI,SAALhuI,KAFAkzG,CAGA,KAAoB,SAAA,EAAA27B,KAApB,EAAoB7qI,YAApB,EAAoBA,GAApB;EAAK,YAAMo5C,QAAN,CACHp9C,KAAKguI,SAALhuI,CAAehE,IAAfgE,CAAoB,IAAIkhI,SAAJ,GAAejlI,OAAOmhD,EAAMnhD,OAA5B,CAApB+D;EAKF4uI,WAAkBA,EAAgBznI,MAAhBynI,CAAuB5uI,KAAKguI,SAA5BY,CAAlBA;EAUF,SARiB,QAAbjC,CAAa,KACfz5B,EAAkBy5B,SAAlBz5B,GAAsBy5B,CAAtBz5B,EACAy7B,IAAmBA,EAAiBxnI,MAAjBwnI,CAAwBhC,CAAxBgC,CADnBz7B,EAGAlzG,KAAK4sI,YAAL5sI,GAAoB2sI,EAAUvyI,MAJf,GAOAu0I,EAAiB,CAAjBA,aAA+Bn7B,cAChD,EAAc;EAEZ,UAAMs7B,KACDr9H,GAAQtK,OAAOwnI,EADpB;EAAA,UAEMI,IAAgB/uI,KAAKqxG,SAALrxG,CAAemH,MAAfnH,CAAsB4uI,CAAtB5uI,CAFtB;EAAA,UAIMgvI,IAAoBhvI,KAAKqxG,SAJ/B,CAKArxG,KAAKqxG,SAALrxG,GAAiB+uI,CAAjB/uI,CACA,IAAM8R,IAASrB,WAAAA,CAAMvR,KAANuR,KAAAA,KAAAA,EAAYq+H,CAAZr+H,EAAuByiG,CAAvBziG,CAAf,CAEA,OADAzQ,KAAKqxG,SAALrxG,GAAiBgvI,CAAjBhvI,EACO8R,CAAP;EAEA,YAAOrB,WAAAA,CAAMvR,KAANuR,KAAAA,KAAAA,EAAYgB,CAAZhB,EAAoByiG,CAApBziG,CAAP;KA3TmBI,EAgUvBq9H,WAAAA,KAAAA,GAAA,UAAKz8H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CAIE,OAAOx6F,KAAK;EACV,UAAMmmC,IAAiB,QAAVq0D,CAAU,GAAO,IAAP,GAAcA,EAAar0D,IAAlD;EAAA,UACMutD,IAAqB,QAAV8G,CAAU,GAAO,IAAP,GAAcA,EAAiB9G,QAD1D;EAAA,UAEIsgC,IACU,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YAHjD,CAcA,IATAj7H,IAASsrH,oBAAoBtrH,CAApBsrH,CAATtrH,EACoB,QAAhBi7H,CAAgB,KAEhBA,IADE/rI,EAAKkyG,QAALlyG,GACaA,EAAK0sI,MADlB1sI,GAGaA,EAAKsuI,eAALtuI,CAAqB8Q,CAArB9Q,CAJC,CADpB8Q,EASY,QAARotC,CAAJ,EACE,MAAM,IAAI8lD,mBAAJ,CAAwB,wCAAxB,CAAN,CAGF,IAAMuqC,IACFrzI,MAAMC,OAAND,CAAc8E,EAAK+sI,IAAL/sI,CAAUktI,SAAxBhyI,IAAqC8E,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,CAAoBvG,MAAzDyB,GAAkE,CADtE,CAEA,IAAI6wI,EAAatyI,MAAbsyI,KAAwBwC,CAA5B,EACE,MAAM,IAAIxqC,UAAJ,CACF,mBAAiBwqC,CAAjB,8BAAA,GACGxC,EAAatyI,MADhB,uBADE,CAAN,CAIEuG,EAAKssI,MAALtsI,IACFO,QAAQ8Z,IAAR9Z,CACI,kEADJA,CADEP,CAKJ,IAAMwuI,MAA0B/iC,aAAhC;EAAA,UAeMgjC,IACFC,IAbS,UAAC59H,CAAD,EAAiB47H,CAAjB;EAGX,YAAMx7H,IACFlR,EAAK+sI,IAAL/sI,CAAU1B,IAAV0B,EAAgB8Q,GAAQtK,OAAOkmI,EAA/B1sI,EAAwCwuI,CAAxCxuI,CADJ,CAGA,QAAQkR,EAAQ,CAARA,GAAYA,EAAQhU,KAARgU,CAAc,CAAdA,EAApB;SAOEw9H,EAAU59H,CAAV49H,EAAkB3C,CAAlB2C,EAAgC1uI,EAAKqsI,WAArCqC,EAAkD,IAAlDA,EAAwD,IAAxDA,EACI1uI,EAAKssI,MADToC,EACiB1uI,EAAKmtI,eADtBuB,CAhBJ;EAAA,UAkBMlC,IAAaiC,EAAW,CAAXA,CAlBnB;EAAA,UAmBMv9H,IAAUu9H,EAAW,CAAXA,CAnBhB;EAAA,UAoBM/B,IAAS+B,EAAW,CAAXA,CApBf,CAsBIzuI,EAAKkyG,QAALlyG,IACFA,EAAK8rH,WAAL9rH,CAAiB0sI,CAAjB1sI,EAAyByrG,CAAzBzrG,CADEA,CAIJ,IAAMmR,IAASnR,EAAKmtI,eAALntI,GAAuBkR,CAAvBlR,GAAiCwsI,CAAhD,CAIA,OAAIxsI,EAAKotI,WAALptI,IACMmR,GAAQ3K,OAAOkmI,EADrB1sI,GAGKmR,CAHT;OA7DK4G,CAAP;KApUqB7H,EAyYvBq9H,WAAAA,gBAAAA,GAAA,UAAgBz8H,CAAhB;EAAA,gBAAA,CACE,OAAOiH,KAAK;EAGV,UAAIg0H,IAAe6B,MAAU98H,EAAOxV,KAAjBsyI,CAAnB,CAKA,OAFA7B,IAAejG,aADfiG,IAAe9lC,MAAQ8lC,CAAR9lC,GAAuB,GAAG,EAA1BA,CACA6/B,CAAfiG,EAEI7wI,MAAMC,OAAND,CAAc8E,EAAK+sI,IAAL/sI,CAAUktI,SAAxBhyI,IACK8E,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,CAAoBmB,GAApBnB,CACH,UAAAmM,CAAA;EAAO,eAAAA,IAAM,CAANA,GAAUwiI,OAAO5C,CAAP4C,GAAsB,GAAGxiI,EAAzBwiI,CAAVxiI,GAA2C4/H,CAA3C;SADJ/rI,CADL9E,GAIK8E,EAAK+sI,IAAL/sI,CAAUktI,SAAVltI,GAAsB,CAAtBA,IACF2uI,OAAO5C,CAAP4C,GAAsB,GAAG3uI,EAAK+sI,IAAL/sI,CAAUktI,UAAnCyB,EADE3uI,IAEF+rI,EANP;OARKh0H,CAAP;KA1YqB7H,EA6ZvB1M,qBAAAA,CAAI+pI,WAAJ/pI,oBAAAA,SAAA;EACE,aAAKnE,KAAK+O,SAAL/O,GAIEA,KAAK0tI,IAAL1tI,CAAU0yG,gBAJZ1yG,KAAL;2CADFmE,CA7ZuB0M,EAqavB1M,qBAAAA,CAAI+pI,WAAJ/pI,uBAAAA,SAAA;EAEE,aAAKnE,KAAK+O,SAAL/O,GAGEA,KAAK0tI,IAAL1tI,CAAU2yG,mBAHZ3yG,GACIA,KAAK0tI,IAAL1tI,CAAUuvE,OADnB;2CAFFprE,CArauB0M,EA6avBq9H,WAAAA,UAAAA,GAAA;EACE,QAAMlxC,MACJ8wC,iBAAiB9tI,KAAK8tI,iBACtBC,aAAa/tI,KAAK+tI,aAClBf,aAAahtI,KAAKgtI,aAClBn6B,UAAU7yG,KAAK6yG,UACfo6B,QAAQjtI,KAAKitI,QALf,CAOyB,QAArBjtI,KAAK4sI,YAAgB,KACvB5vC,EAAO4vC,YAAP5vC,GAAsBh9F,KAAK4sI,YADJ,EAGzB,IAAM2C,IAAavvI,KAAK0tI,IAAL1tI,CAAUulG,SAAVvlG,EAAnB,CACAg9F,EAAO0wC,IAAP1wC,KACEF,WAAW98F,KAAK0tI,IAAL1tI,CAAUslG,YAAVtlG,IACXg9F,QAAQuyC,GAFVvyC,CAIA,IAAMggC,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAAnB,CAEA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA/bqBnsF,EAChBq9H,WAAAA,GAAY,KADIr9H,GAiczB;IAjcyBshG,MAAzB,eAkccxK,cAAcumC,KAS5B;EAAA,YAAA;;EAQA,UARsCr9H,aAAAA,GAAAA,IAQtC;IARsCshG,MAAtC;EAAA;EAmKE,YAAA,CAAYnV,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QALS9P,oBAAAA,GAAqB,MAArBA,EACAA,4BAAAA,GAA6B,cAD7BA,EAEAA,+BAAAA,GAAgC,YAFhCA,EAGAA,0BAAAA,GAAkD,OAHlDA,EAOPA,EAAKsjI,KAALtjI,GAAaq8F,EAAOinC,KAPbtjI,EAQPA,EAAKi8H,UAALj8H,GAAkBw+H,cACO,QAArBniC,EAAO4/B,UAAc,GAAOj8H,EAAK6uI,kBAAZ,GACOxyC,EAAO4/B,UAFrBuC,CARXx+H,EAWPA,EAAKy+H,OAALz+H,GAAiC,QAAlBq8F,EAAOoiC,OAAW,IAAcpiC,EAAOoiC,OAX/Cz+H,EAaPA,EAAKw/H,iBAALx/H,GAAyB2+H,eACrBtiC,EAAOmjC,iBAAPnjC,IAA4Br8F,EAAKy/H,0BADZd,CAblB3+H,EAePA,EAAK8uI,oBAAL9uI,GAA4B2+H,eACxBtiC,EAAOyyC,oBAAPzyC,IAA+Br8F,EAAK+uI,6BADZpQ,CAfrB3+H,EAkBPA,EAAK0+H,eAAL1+H,GACI2+H,eAAetiC,EAAOqiC,eAAPriC,IAA0Br8F,EAAK4+H,wBAA9CD,CAnBG3+H,EAqBPA,EAAK2/H,iBAAL3/H,GAAyBg/H,eAAe3iC,EAAOsjC,iBAAtBX,CArBlBh/H,EAsBPA,EAAKgvI,oBAALhvI,GAA4Bg/H,eAAe3iC,EAAO2yC,oBAAtBhQ,CAtBrBh/H,EAuBPA,EAAK++H,eAAL/+H,GAAuBg/H,eAAe3iC,EAAO0iC,eAAtBC,CAvBhBh/H,EAyBPA,EAAK0/H,gBAAL1/H,GAAwB8+H,cAAcziC,EAAOqjC,gBAArBZ,CAzBjB9+H,EA0BPA,EAAKivI,mBAALjvI,GAA2B8+H,cAAcziC,EAAO4yC,mBAArBnQ,CA1BpB9+H,EA2BPA,EAAK6+H,cAAL7+H,GAAsB8+H,cAAcziC,EAAOwiC,cAArBC,CA3Bf9+H,EA6BPA,EAAKkvI,OAALlvI,GAAemvI,OACV,GAAGC,OAAgB,GAAqB,QAAlB/yC,EAAO6yC,OAAW,GAAO,CAAP,GAAW7yC,EAAO6yC,QAAvDE,EADOD,CA7BRnvI,EA+BPA,EAAKqvI,gBAALrvI,GAAwBmvI,OACtB,GACAC,OACK,GAA8B,QAA3B/yC,EAAOgzC,gBAAoB,GAAO,CAAP,GAAWhzC,EAAOgzC,iBADrDD,EAFsBD,CA/BjBnvI,EAoCPA,EAAKktI,SAALltI,GAAiBA,EAAKsjI,KApCftjI,EAqCPA,EAAKsvI,WAALtvI,GAAmB,IArCZA,EAsCPA,EAAKuvI,oBAALvvI,GAA4B,IAtCrBA;EAyIX,UApKmCkQ,aAAAA,GAAAA,GAoEjCs/H,WAAAA,MAAAA,GAAA,UAAMhwH,CAAN;EACEA,QAAa06G,mBAAmB16G,CAAnB06G,CAAb16G,EAEAngB,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,GACCmgB,EAAWA,EAAW/lB,MAAX+lB,GAAoB,CAA/BA,GAAmCngB,KAAKikI,MADzCjkI,EACiD,IADjDA,EAEVA,KAAKmgI,iBAFKngI,EAEcA,KAAKsgI,iBAFnBtgI,GAEsC,CAFtCA,EAGVA,KAAKqgI,gBAHKrgI,CAFdmgB,EAMAngB,KAAKowI,eAALpwI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,GACEA,KAAKikI,OAAOjkI,KAAKikI,MADnBjkI,EAC2B,IAD3BA,EAEnBA,KAAKyvI,oBAFczvI,EAEQA,KAAK2vI,oBAFb3vI,GAEmC,CAFnCA,EAGnBA,KAAK4vI,mBAHc5vI,CANvBmgB,EAUIngB,KAAKo/H,OAALp/H,GACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACCA,KAAKikI,MADNjkI,EACc,IADdA,EACoBA,KAAKq/H,eADzBr/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,GAKFA,KAAKuO,IAALvO,GAAY,IAfdmgB,EAiBAngB,KAAKyyG,KAALzyG,IAAa,CAjBbmgB;KArE+BtP,EA+FjCs/H,WAAAA,KAAAA,GAAA,UAAK1+H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UAAsB,OADtBjH,IAASA,GACErX,MAAX,EACE,MAAM,IAAIsqG,UAAJ,CACF,gDAA8CjzF,EAAOrX,MAArD,MADE,CAAN,CAGF,IAAIi2I,IAAa5+H,EAAO,CAAPA,CAAjB,CACAA,IAASA,EAAO,CAAPA,CAATA,CACA,IAcI2zB,CAdJ;EAAA,UAAMgnE,IAAiC,QAAtB8G,EAAiB9G,QAAK,IAAe8G,EAAiB9G,QAAvE,CAEI,IAAIzrG,EAAKkvI,OAAT,IAAoBlvI,EAAKkvI,OAALlvI,GAAe,CAAnC,IAA4D,QAApBA,EAAKsvI,WAA7C,KACFtvI,EAAKsvI,WAALtvI,GAAmB2vI,oBACI;EAAM,eAAAr1B,SAAaxpG,CAAbwpG,CAAA;SADVq1B,EAEI3vI,EAAKkvI,OAFTS,EAEkBlkC,CAFlBkkC,CADjB,GAKA,IAAI3vI,EAAKqvI,gBAAT,IAA6BrvI,EAAKqvI,gBAALrvI,GAAwB,CAArD,IAC6B,QAA7BA,EAAKuvI,oBADL,KAEFvvI,EAAKuvI,oBAALvvI,GACI2vI,oBACI;EAAM,eAAAr1B,SAAao1B,CAAbp1B,CAAA;SADVq1B,EACoC3vI,EAAKqvI,gBADzCM,EAEIlkC,CAFJkkC,CAHF,CALA,CAaJ,IAAMC,IAAiB5vI,EAAKsvI,WAA5B;EAAA,UACMO,IAAoB7vI,EAAKuvI,oBAD/B,CAGE9qG,IAAIg/F,MADQ,QAAVmM,CAAU,GACFhpC,IAAQ91F,CAAR81F,EAAgBgpC,CAAhBhpC,CADE,GAGF91F,CAFN2yH,EAA+BzjI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EAA/ByjI,CAAJh/F,EAIe,QAAbzkC,EAAK4N,IAAQ,KACf62B,IAAIw5F,QAAUx5F,CAAVw5F,EAAaj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAbi+H,CADW,CAJfx5F,EAOe,QAAborG,CAAa,KACfH,IAAa9oC,IAAQ8oC,CAAR9oC,EAAoBipC,CAApBjpC,CADE,CAPfniE,CAUF,IAAItzB,IAAS21F,IAAQriE,CAARqiE,EAAW28B,MAAMiM,CAANjM,EAAkBzjI,EAAKyvI,eAALzvI,CAAqB8E,IAArB9E,EAAlByjI,CAAX38B,CAAb,CAMA,OALuB,QAAnB9mG,EAAKi8H,UAAc,KACrB9qH,IAASnR,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBmR,CAAtBnR,CADY,IAKfmR,GAAQA,EAAhB;OA1CK4G,CAAP;KAhG+B7H,EA8IjCs/H,WAAAA,UAAAA,GAAA;EACE,QAAMnzC,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBAfzB;EAAA,QAiBMhT,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAjBnB,CAmBA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlK+BnsF,EAC1Bs/H,WAAAA,GAAY,eADct/H,GAoKnC;IApKmC4/H,QAnInC,eAwSc9oC,cAAcwoC,eAwG5B;EAEE,YAAA,CAAYnzC,CAAZ;aACEA,EAAO0wC,IAAP1wC,GAAc,IAAImzC,aAAJ,CAAkBnzC,CAAlB,CAAdA,EACAvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAuGJ,UA3G+BI,aAAAA,GAAAA,GAQ7B6/H,WAAAA,KAAAA,GAAA,UAAKj/H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACmB,cAAzB/X,EAAK+sI,IAAL/sI,CAAUsvI,WAAe,KAC3B/f,QAAYvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAtB/f,GACAvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAVtvI,GAAwB,IAFG,GAIS,QAAlCA,EAAK+sI,IAAL/sI,CAAUuvI,oBAAwB,KACpChgB,QAAYvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAtBhgB,GACAvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAVvvI,GAAiC,IAFG,CAJT,CAQ7B,IAAMk+C,IAAiB,QAAVq0D,CAAU,GAAO,IAAP,GAAcA,EAAar0D,IAAlD;EAAA,UACMutD,IAAqB,QAAV8G,CAAU,GAAO,IAAP,GAAcA,EAAiB9G,QAD1D;EAAA,UAEMsgC,IACQ,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YAHjD,CAIA,OAAOj8H,WAAAA,CAAMxR,IAANwR,KAAAA,EAAAA,EAAWgB,CAAXhB,IAAoBouC,SAAMutD,aAAUsgC,iBAApCj8H,CAAP;OAbKiI,CAAP;KAT2B7H,EA4B7B1M,qBAAAA,CAAIusI,WAAJvsI,SAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4BikI,KAApC;2CADF9/H,CA5B6B0M,EAgC7B1M,qBAAAA,CAAIusI,WAAJvsI,cAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4B48H,UAApC;2CADFz4H,CAhC6B0M,EAoC7B1M,qBAAAA,CAAIusI,WAAJvsI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4Bo/H,OAApC;2CADFj7H,CApC6B0M,EAwC7B1M,qBAAAA,CAAIusI,WAAJvsI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4BmgI,iBAApC;2CADFh8H,CAxC6B0M,EA4C7B1M,qBAAAA,CAAIusI,WAAJvsI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4ByvI,oBAApC;2CADFtrI,CA5C6B0M,EAgD7B1M,qBAAAA,CAAIusI,WAAJvsI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4Bq/H,eAApC;2CADFl7H,CAhD6B0M,EAoD7B1M,qBAAAA,CAAIusI,WAAJvsI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4BsgI,iBAApC;2CADFn8H,CApD6B0M,EAwD7B1M,qBAAAA,CAAIusI,WAAJvsI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4B2vI,oBAApC;2CADFxrI,CAxD6B0M,EA4D7B1M,qBAAAA,CAAIusI,WAAJvsI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4B0/H,eAApC;2CADFv7H,CA5D6B0M,EAgE7B1M,qBAAAA,CAAIusI,WAAJvsI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4BqgI,gBAApC;2CADFl8H,CAhE6B0M,EAoE7B1M,qBAAAA,CAAIusI,WAAJvsI,uBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4B4vI,mBAApC;2CADFzrI,CApE6B0M,EAwE7B1M,qBAAAA,CAAIusI,WAAJvsI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4Bw/H,cAApC;2CADFr7H,CAxE6B0M,EA4E7B1M,qBAAAA,CAAIusI,WAAJvsI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4B6vI,OAApC;2CADF1rI,CA5E6B0M,EAgF7B1M,qBAAAA,CAAIusI,WAAJvsI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAA4BgwI,gBAApC;2CADF7rI,CAhF6B0M,EAoF7B6/H,WAAAA,UAAAA,GAAA;EACE,QAAM1zC,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBAfzB;EAAA,QAiBMhT,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAjBnB,CAoBA,cAFOusH,EAAiB0Q,MACxBvpI,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAzG2BnsF,EACtB6/H,WAAAA,GAAY,WADU7/H,GA2G/B;IA3G+Bq9H,IAA/B,eA4GcvmC,cAAc+oC,WAyE5B;EAoCE,YAAA,CAAY1zC,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAXS9P,oBAAAA,GAAqB,MAArBA,EACAA,8BAAAA,GAA+B,aAD/BA,EAGAA,4BAAAA,GAA6B,cAH7BA,EAIAA,+BAAAA,GAAgC,YAJhCA,EAKAA,0BAAAA,GAAkD,OALlDA,EAcPA,EAAKsjI,KAALtjI,GAAaq8F,EAAOinC,KAdbtjI,EAePA,EAAKi8H,UAALj8H,GAAkBw+H,mBACQ96B,MAAtBrH,EAAO4/B,aAA2Bj8H,EAAK6uI,qBACLxyC,EAAO4/B,UAF3BuC,CAfXx+H,EAkBPA,EAAKgwI,mBAALhwI,GAA2Bw+H,mBACQ96B,MAA/BrH,EAAO2zC,sBACHhwI,EAAKiwI,+BACL5zC,EAAO2zC,mBAHYxR,CAlBpBx+H,EAsBPA,EAAKy+H,OAALz+H,GAAiC,QAAlBq8F,EAAOoiC,OAAW,IAAcpiC,EAAOoiC,OAtB/Cz+H,EAwBPA,EAAKw/H,iBAALx/H,GAAyB2+H,eACrBtiC,EAAOmjC,iBAAPnjC,IAA4Br8F,EAAKy/H,0BADZd,CAxBlB3+H,EA0BPA,EAAK8uI,oBAAL9uI,GAA4B2+H,eACxBtiC,EAAOyyC,oBAAPzyC,IAA+Br8F,EAAK+uI,6BADZpQ,CA1BrB3+H,EA6BPA,EAAK0+H,eAAL1+H,GACI2+H,eAAetiC,EAAOqiC,eAAPriC,IAA0Br8F,EAAK4+H,wBAA9CD,CA9BG3+H,EAgCPA,EAAK2/H,iBAAL3/H,GAAyBg/H,eAAe3iC,EAAOsjC,iBAAtBX,CAhClBh/H,EAiCPA,EAAKgvI,oBAALhvI,GAA4Bg/H,eAAe3iC,EAAO2yC,oBAAtBhQ,CAjCrBh/H,EAkCPA,EAAK++H,eAAL/+H,GAAuBg/H,eAAe3iC,EAAO0iC,eAAtBC,CAlChBh/H,EAoCPA,EAAK0/H,gBAAL1/H,GAAwB8+H,cAAcziC,EAAOqjC,gBAArBZ,CApCjB9+H,EAqCPA,EAAKivI,mBAALjvI,GAA2B8+H,cAAcziC,EAAO4yC,mBAArBnQ,CArCpB9+H,EAsCPA,EAAK6+H,cAAL7+H,GAAsB8+H,cAAcziC,EAAOwiC,cAArBC,CAtCf9+H,EAwCPA,EAAKkvI,OAALlvI,GAAemvI,OACV,GAAGC,OAAgB,GAAqB,QAAlB/yC,EAAO6yC,OAAW,GAAO,CAAP,GAAW7yC,EAAO6yC,QAAvDE,EADOD,CAxCRnvI,EA0CPA,EAAKqvI,gBAALrvI,GAAwBmvI,OACtB,GACAC,OACK,GAA8B,QAA3B/yC,EAAOgzC,gBAAoB,GAAO,CAAP,GAAWhzC,EAAOgzC,iBADrDD,EAFsBD,CA1CjBnvI,EA+CPA,EAAKkwI,cAALlwI,GAAsBq8F,EAAO6zC,cA/CtBlwI,EAgDPA,EAAKktI,SAALltI,GAAiBA,EAAKsjI,KAhDftjI,EAiDPA,EAAKsvI,WAALtvI,GAAmB,IAjDZA,EAkDPA,EAAKuvI,oBAALvvI,GAA4B,IAlDrBA;EAsKX,UA/L6BkQ,aAAAA,GAAAA,GA8EpBigI,WAAAA,MAAAA,GAAP,UAAa3wH,CAAb;EAEE,QAAMqgH,KADNrgH,IAAa06G,mBAAmB16G,CAAnB06G,GACe16G,EAAW/lB,MAAX+lB,GAAoB,EAAhD,CACAngB,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,GACCwgI,GAAuB,IAAbxgI,KAAKikI,MADhBjkI,EAC4B,IAD5BA,EACkCA,KAAKmgI,iBADvCngI,EAEVA,KAAKsgI,iBAFKtgI,GAEc,CAFdA,EAEoBA,KAAKqgI,gBAFzBrgI,CAAdA,EAGAA,KAAKowI,eAALpwI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,GACEA,KAAKikI,OAAoB,IAAbjkI,KAAKikI,MADnBjkI,EAC+B,IAD/BA,EAEnBA,KAAKyvI,oBAFczvI,EAEQA,KAAK2vI,oBAFb3vI,GAEmC,CAFnCA,EAGnBA,KAAK4vI,mBAHc5vI,CAHvBA,EAOIA,KAAKo/H,OAALp/H,GACFA,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACc,IAAbA,KAAKikI,MADNjkI,EACkB,IADlBA,EACwBA,KAAKq/H,eAD7Br/H,EAERA,KAAK0/H,eAFG1/H,GAEc,CAFdA,EAEoBA,KAAKw/H,cAFzBx/H,CADVA,GAKFA,KAAKuO,IAALvO,GAAY,IAZdA,EAgBAA,KAAKyyG,KAALzyG,IAAa,CAhBbA;KAjFyB6Q,EAoG3BigI,WAAAA,KAAAA,GAAA,UAAKr/H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAEV,UAAsB,OADtBjH,IAASA,GACErX,MAAX,EACE,MAAM,IAAIsqG,UAAJ,CACF,yDACGjzF,EAAOrX,MADV,MADE,CAAN,CAKF,IAAMgyG,IAAiC,QAAtB8G,EAAiB9G,QAAK,IAAe8G,EAAiB9G,QAAvE;EAAA,UACI2kC,IAAWt/H,EAAO,CAAPA,CADf,CAEAA,IAASA,EAAO,CAAPA,CAATA,EAKI,IAAI9Q,EAAKkvI,OAAT,IAAoBlvI,EAAKkvI,OAALlvI,GAAe,CAAnC,IAA4D,QAApBA,EAAKsvI,WAA7C,KACFtvI,EAAKsvI,WAALtvI,GAAmB2vI,oBACI;EAAM,eAAAr1B,SAAaxpG,CAAbwpG,CAAA;SADVq1B,EAEI3vI,EAAKkvI,OAFTS,EAEkBlkC,CAFlBkkC,EAE4B,CAF5BA,CADjB,CALJ7+H,EAUI,IAAI9Q,EAAKqvI,gBAAT,IAA6BrvI,EAAKqvI,gBAALrvI,GAAwB,CAArD,IAC6B,QAA7BA,EAAKuvI,oBADL,KAEFvvI,EAAKuvI,oBAALvvI,GACI2vI,oBACI;EAAM,eAAAr1B,SAAa81B,CAAb91B,CAAA;SADVq1B,EACkC3vI,EAAKqvI,gBADvCM,EACyDlkC,CADzDkkC,EAEI,CAFJA,CAHF,CAVJ7+H,CAiBA,IAEIksC,CAFJ;EAAA,UAGI1iD,CAHJ;EAAA,UAII+1I,CAJJ;EAAA,UAAMT,IAAS5vI,EAAKsvI,WAApB;EAAA,UACMO,IAAY7vI,EAAKuvI,oBADvB,CAMI,IAAIvvI,EAAKkvI,OAAT,IAAoBlvI,EAAKkvI,OAALlvI,GAAe,CAAnC,KACF8Q,IAAS81F,IAAQ91F,CAAR81F,EAAgBgpC,EAAO,CAAPA,CAAhBhpC,CADP,EAGJ,IAAI0pC,IAAU7M,MAAM3yH,CAAN2yH,EAAczjI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EAAdyjI,CAAd,CACIzjI,EAAKy+H,OAALz+H,KACFswI,IAAUrS,QAAUqS,CAAVrS,EAAmBj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAnBi+H,CADRj+H,GAGA,IAAIA,EAAKqvI,gBAAT,IAA6BrvI,EAAKqvI,gBAALrvI,GAAwB,CAArD,KACFowI,IAAWxpC,IAAQwpC,CAARxpC,EAAkBipC,EAAU,CAAVA,CAAlBjpC,CADT,CAHA5mG,CAOJ,IAAMuwI,IAAuBvwI,EAAKyvI,eAALzvI,CAAqB8E,IAArB9E,EAA7B;EAAA,UACMwY,kDADN;EAAA,UACOg4H,QADP;EAAA,UACYC,QADZ;EAAA,UAIMC,IAAcjN,MAAM2M,CAAN3M,EAAgB+M,CAAhB/M,CAJpB;EAAA,UAMM7xG,6BANN;EAAA,UAMO++G,QANP;EAAA,UAMWj2D,QANX;EAAA,UAMek2D,QANf;EAAA,UAOM5+G,6BAPN;EAAA,UAOO6+G,QAPP;EAAA,UAOmBC,QAPnB,CASA9zF,IAAIh9C,EAAKgwI,mBAALhwI,CAAyBzB,KAAzByB,CAA+B8mG,IAAQ6pC,CAAR7pC,EAAY+pC,CAAZ/pC,CAA/B9mG,CAAJg9C,EACA1iD,IAAI0F,EAAKgwI,mBAALhwI,CAAyBzB,KAAzByB,CAA+B8mG,IAAQpsB,CAARosB,EAAYgqC,CAAZhqC,CAA/B9mG,CADJg9C,CAGA,IAAM+zF,IAAatN,MAAM78B,IAAQtsG,CAARssG,EAAWwpC,CAAXxpC,CAAN68B,EAA4BgN,CAA5BhN,CAAnB,CACA4M,IAAKrwI,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsB8mG,IAAQ8pC,CAAR9pC,EAAYiqC,CAAZjqC,CAAtB9mG,CAALqwI,CAEA,IAAM5rG,IAAIqiE,IACNF,IAAQ5pD,CAAR4pD,EAAWwpC,CAAXxpC,CADME,EACgBF,IAAQE,IAAQC,UAAU,CAAVA,CAARD,EAAsBuE,IAAQruD,CAARquD,CAAtBvE,CAARF,EAA2CypC,CAA3CzpC,CADhBE,CAAV,CAGA,QAAQriE,GAAGA,EAAX;OA9DK1sB,CAAP;KArGyB7H,EAuK3BigI,WAAAA,UAAAA,GAAA;EACE,QAAM9zC,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZ8Q,qBAAqB9Q,oBAAoB7/H,KAAK2wI,mBAAzB9Q,GACrBT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBACvBa,gBAAgB7wI,KAAK6wI,gBAjBvB;EAAA,QAmBM7T,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAnBnB,CAqBA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA7LyBnsF,EACpBigI,WAAAA,GAAY,SADQjgI,GA+L7B;IA/L6B4/H,QAA7B,eAgMc9oC,cAAcmpC,SAmD5B;EAEE,YAAA,CAAY9zC,CAAZ;aACgC,MAA1BA,EAAO6zC,cAAmB,IAC5B3vI,QAAQ8Z,IAAR9Z,CACI,gHADJA,CAD4B,EAK9B87F,EAAO0wC,IAAP1wC,GAAc,IAAI8zC,OAAJ,CAAY9zC,CAAZ,CALgB,EAM9BvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EAwHJ,UAjIyBI,aAAAA,GAAAA,GAavB8gI,WAAAA,KAAAA,GAAA,UAAKlgI,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACmB,cAAzB/X,EAAK+sI,IAAL/sI,CAAUsvI,WAAe,KAC3B/f,QAAYvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAtB/f,GACAvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAVtvI,GAAwB,IAFG,GAIS,QAAlCA,EAAK+sI,IAAL/sI,CAAUuvI,oBAAwB,KACpChgB,QAAYvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAtBhgB,GACAvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAVvvI,GAAiC,IAFG,CAJT,CAQ7B,IAAMk+C,IAAiB,QAAVq0D,CAAU,GAAO,IAAP,GAAcA,EAAar0D,IAAlD;EAAA,UACMutD,IAAqB,QAAV8G,CAAU,GAAO,IAAP,GAAcA,EAAiB9G,QAD1D;EAAA,UAEMsgC,IACQ,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YAHjD,CAIA,OAAOj8H,WAAAA,CAAMxR,IAANwR,KAAAA,EAAAA,EAAWgB,CAAXhB,IAAoBouC,SAAMutD,aAAUsgC,iBAApCj8H,CAAP;OAbKiI,CAAP;KAdqB7H,EA+BvB1M,qBAAAA,CAAIwtI,WAAJxtI,SAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBikI,KAA9B;2CADF9/H,CA/BuB0M,EAmCvB1M,qBAAAA,CAAIwtI,WAAJxtI,cAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB48H,UAA9B;2CADFz4H,CAnCuB0M,EAuCvB1M,qBAAAA,CAAIwtI,WAAJxtI,uBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB2wI,mBAA9B;2CADFxsI,CAvCuB0M,EA2CvB1M,qBAAAA,CAAIwtI,WAAJxtI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBo/H,OAA9B;2CADFj7H,CA3CuB0M,EA+CvB1M,qBAAAA,CAAIwtI,WAAJxtI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBmgI,iBAA9B;2CADFh8H,CA/CuB0M,EAmDvB1M,qBAAAA,CAAIwtI,WAAJxtI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsByvI,oBAA9B;2CADFtrI,CAnDuB0M,EAuDvB1M,qBAAAA,CAAIwtI,WAAJxtI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBq/H,eAA9B;2CADFl7H,CAvDuB0M,EA2DvB1M,qBAAAA,CAAIwtI,WAAJxtI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBsgI,iBAA9B;2CADFn8H,CA3DuB0M,EA+DvB1M,qBAAAA,CAAIwtI,WAAJxtI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB2vI,oBAA9B;2CADFxrI,CA/DuB0M,EAmEvB1M,qBAAAA,CAAIwtI,WAAJxtI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB0/H,eAA9B;2CADFv7H,CAnEuB0M,EAuEvB1M,qBAAAA,CAAIwtI,WAAJxtI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBqgI,gBAA9B;2CADFl8H,CAvEuB0M,EA2EvB1M,qBAAAA,CAAIwtI,WAAJxtI,uBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB4vI,mBAA9B;2CADFzrI,CA3EuB0M,EA+EvB1M,qBAAAA,CAAIwtI,WAAJxtI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBw/H,cAA9B;2CADFr7H,CA/EuB0M,EAmFvB1M,qBAAAA,CAAIwtI,WAAJxtI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB6vI,OAA9B;2CADF1rI,CAnFuB0M,EAuFvB1M,qBAAAA,CAAIwtI,WAAJxtI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsBgwI,gBAA9B;2CADF7rI,CAvFuB0M,EA2FvB1M,qBAAAA,CAAIwtI,WAAJxtI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAsB6wI,cAA9B;2CADF1sI,CA3FuB0M,EA+FvB8gI,WAAAA,UAAAA,GAAA;EACE,QAAM30C,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZ8Q,qBAAqB9Q,oBAAoB7/H,KAAK2wI,mBAAzB9Q,GACrBT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjBQ,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBACvBa,gBAAgB7wI,KAAK6wI,gBAjBvB;EAAA,QAmBM7T,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAnBnB,CAsBA,cAFOusH,EAAiB0Q,MACxBvpI,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAtHqBnsF,EAyHhB8gI,YAAAA,GAAP,UACI50C,CADJ,EAEIC,CAFJ;EAME,WAHgC,MAA5BA,EAAsB40C,aAAM,KAC9B50C,EAAuB6zC,cAAvB7zC,GAA2B,CADG,GAGzB,IAAID,CAAJ,CAAQC,CAAR,CAAP;KA/HqBnsF,EAChB8gI,WAAAA,GAAY,KADI9gI,GAiIzB;IAjIyBq9H,IAAzB,eAkIcvmC,cAAcgqC,KAkF5B;EAqCE,YAAA,CAAY30C,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAXS9P,oBAAAA,GAAqB,MAArBA,EACAA,8BAAAA,GAA+B,aAD/BA,EAEAA,4BAAAA,GAA6B,cAF7BA,EAGAA,+BAAAA,GAAgC,YAHhCA,EAKAA,0BAAAA,GAA2B,OAL3BA,EAcPA,EAAKsjI,KAALtjI,GAAaq8F,EAAOinC,KAdbtjI,EAePA,EAAKi8H,UAALj8H,GAAkBw+H,mBACQ96B,MAAtBrH,EAAO4/B,aAA2Bj8H,EAAK6uI,qBACLxyC,EAAO4/B,UAF3BuC,CAfXx+H,EAkBPA,EAAKgwI,mBAALhwI,GAA2Bw+H,mBACQ96B,MAA/BrH,EAAO2zC,sBACHhwI,EAAKiwI,+BACL5zC,EAAO2zC,mBAHYxR,CAlBpBx+H,EAsBPA,EAAKy+H,OAALz+H,GAAiC,QAAlBq8F,EAAOoiC,OAAW,IAAcpiC,EAAOoiC,OAtB/Cz+H,EAwBPA,EAAKw/H,iBAALx/H,GAAyB2+H,eACrBtiC,EAAOmjC,iBAAPnjC,IAA4Br8F,EAAKy/H,0BADZd,CAxBlB3+H,EA0BPA,EAAK8uI,oBAAL9uI,GAA4B2+H,eACxBtiC,EAAOyyC,oBAAPzyC,IAA+Br8F,EAAK+uI,6BADZpQ,CA1BrB3+H,EA6BPA,EAAK0+H,eAAL1+H,GACI2+H,eAAetiC,EAAOqiC,eAAPriC,IAA0Br8F,EAAK4+H,wBAA9CD,CA9BG3+H,EA+BPA,EAAKkxI,cAALlxI,GAAsBq8F,EAAO60C,cA/BtBlxI,EAiCPA,EAAK2/H,iBAAL3/H,GAAyBg/H,eAAe3iC,EAAOsjC,iBAAtBX,CAjClBh/H,EAkCPA,EAAKgvI,oBAALhvI,GAA4Bg/H,eAAe3iC,EAAO2yC,oBAAtBhQ,CAlCrBh/H,EAmCPA,EAAK++H,eAAL/+H,GAAuBg/H,eAAe3iC,EAAO0iC,eAAtBC,CAnChBh/H,EAqCPA,EAAK0/H,gBAAL1/H,GAAwB8+H,cAAcziC,EAAOqjC,gBAArBZ,CArCjB9+H,EAsCPA,EAAKivI,mBAALjvI,GAA2B8+H,cAAcziC,EAAO4yC,mBAArBnQ,CAtCpB9+H,EAuCPA,EAAK6+H,cAAL7+H,GAAsB8+H,cAAcziC,EAAOwiC,cAArBC,CAvCf9+H,EAyCPA,EAAKkvI,OAALlvI,GAAemvI,OACV,GAAGC,OAAgB,GAAqB,QAAlB/yC,EAAO6yC,OAAW,GAAO,CAAP,GAAW7yC,EAAO6yC,QAAvDE,EADOD,CAzCRnvI,EA2CPA,EAAKqvI,gBAALrvI,GAAwBmvI,OACtB,GACAC,OACK,GAA8B,QAA3B/yC,EAAOgzC,gBAAoB,GAAO,CAAP,GAAWhzC,EAAOgzC,iBADrDD,EAFsBD,CA3CjBnvI,EAgDPA,EAAKkwI,cAALlwI,GAAsBq8F,EAAO6zC,cAhDtBlwI,EAiDPA,EAAKktI,SAALltI,IAAkBA,EAAKsjI,OAAOtjI,EAAKsjI,MAjD5BtjI,EAkDPA,EAAKsvI,WAALtvI,GAAmB,IAlDZA,EAmDPA,EAAKuvI,oBAALvvI,GAA4B,IAnDrBA;EAoLX,UA9M8BkQ,aAAAA,GAAAA,GAgFrBihI,WAAAA,MAAAA,GAAP,UAAa3xH,CAAb;EAEE,QAQIk/G,CARJ;EAAA,SAAA;EAAA,QAAMmB,KADNrgH,IAAa06G,mBAAmB16G,CAAnB06G,GACe16G,EAAW/lB,MAAX+lB,GAAoB,EAAhD,CASA,IARAngB,KAAK0+H,MAAL1+H,GAAcA,KAAK0gI,SAAL1gI,CACV,QADUA,GACCwgI,GAAuB,IAAbxgI,KAAKikI,MADhBjkI,EAC4B,IAD5BA,EACkCA,KAAKmgI,iBADvCngI,EAEVA,KAAKsgI,iBAFKtgI,GAEc,CAFdA,EAEoBA,KAAKqgI,gBAFzBrgI,CAAdA,EAGAA,KAAKowI,eAALpwI,GAAuBA,KAAK0gI,SAAL1gI,CACnB,kBADmBA,GACEA,KAAKikI,OAAoB,IAAbjkI,KAAKikI,MADnBjkI,EAC+B,IAD/BA,EAEnBA,KAAKyvI,oBAFczvI,EAEQA,KAAK2vI,oBAFb3vI,GAEmC,CAFnCA,EAGnBA,KAAK4vI,mBAHc5vI,CAHvBA,EAQIA,KAAKo/H,OAAT,EAAkB;EAChB,UAAIp/H,KAAK6xI,cAAT,EAAyB;EACvB,YAAME,IAAmB/xI,KAAKq/H,eAA9B;EAAA,YACM2S,IAAgBhyI,KAAKikI,KAD3B,CAEA5E,IAAkB;EAAK,oBAAA;;EAWvB,kBAXgDxuH,aAAAA,GAAAA,GAG9CohI,WAAAA,MAAAA,GAAA,UAAMh2I,CAAN,EAAoBgC,CAApB;EAEE,gBAAMi0I,IAAKH,EAAiB7yI,KAAjB6yI,EAAwBC,EAAxBD,CAAX;EAAA,gBACMI,IAAK,IAAK1lC,IAAL,GAAavtG,OAAO8yI,GAD/B;EAAA,gBAEMI,IAASL,EAAiB7yI,KAAjB6yI,EAAwC,IAAhBC,EAAxBD,CAFf,CAGA,OAAOha,qBACHA,qBAAuBma,CAAvBna,EAA2Boa,CAA3Bpa,CADGA,EAC6Bqa,CAD7Bra,CAAP;aAR4ClnH,GAWhD;YAXgD07F,0BAC3B,eADH,GAAlB8yB;SAHF,MAgBEA,IAAkBr/H,KAAKq/H,eAAvBA,CAEFr/H,KAAKuO,IAALvO,GAAYA,KAAK0gI,SAAL1gI,CACR,MADQA,GACc,IAAbA,KAAKikI,MADNjkI,EACkB,IADlBA,EACwBq/H,CADxBr/H,EACyCA,KAAK0/H,eAD9C1/H,GAER,CAFQA,EAEFA,KAAKw/H,cAFHx/H,CAAZA;OAnBF,MAuBEA,KAAKuO,IAALvO,GAAY,IAAZA,CAIFA,KAAKyyG,KAALzyG,IAAa,CAAbA;KAtH0B6Q,EAyH5BihI,WAAAA,KAAAA,GAAA,UAAKrgI,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV,UAAM0zF,IAAiC,QAAtB8G,EAAiB9G,QAAK,IAAe8G,EAAiB9G,QAAvE,CAEA,IAAsB,OADtB36F,IAASA,GACErX,MAAX,EACE,MAAM,IAAIsqG,UAAJ,CACF,0DACGjzF,EAAOrX,MADV,MADE,CAAN,CAIF,IAAI22I,IAAWt/H,EAAO,CAAPA,CAAf;EAAA,UACM4gI,IAAW5gI,EAAO,CAAPA,CADjB,CAEAA,IAASA,EAAO,CAAPA,CAATA,EACI,IAAI9Q,EAAKkvI,OAAT,IAAoBlvI,EAAKkvI,OAALlvI,GAAe,CAAnC,IAA4D,QAApBA,EAAKsvI,WAA7C,KACFtvI,EAAKsvI,WAALtvI,GAAmB2vI,oBACI;EAAM,eAAAr1B,SAAaxpG,CAAbwpG,CAAA;SADVq1B,EAEI3vI,EAAKkvI,OAFTS,EAEkBlkC,CAFlBkkC,EAE4B,CAF5BA,CADjB,CADJ7+H,EAMI,IAAI9Q,EAAKqvI,gBAAT,IAA6BrvI,EAAKqvI,gBAALrvI,GAAwB,CAArD,IAC6B,QAA7BA,EAAKuvI,oBADL,KAEFvvI,EAAKuvI,oBAALvvI,GACI2vI,oBACI;EAAM,eAAAr1B,SAAa81B,CAAb91B,CAAA;SADVq1B,EACkC3vI,EAAKqvI,gBADvCM,EACyDlkC,CADzDkkC,EAEI,CAFJA,CAHF,CANJ7+H,CAaA,IAOI1W,CAPJ;EAAA,UAQIgE,CARJ;EAAA,UASIorB,CATJ;EAAA,UAUI5X,CAVJ;EAAA,UAAMg+H,IAAS5vI,EAAKsvI,WAApB;EAAA,UACMO,IACF7vI,EAAKuvI,oBAFT,CAWI,IAAIvvI,EAAKkvI,OAAT,IAAoBlvI,EAAKkvI,OAALlvI,GAAe,CAAnC,KACF8Q,IAAS81F,IAAQ91F,CAAR81F,EAAgBgpC,EAAO,CAAPA,CAAhBhpC,CADP,EAGJ,IAAI5pD,IAAIymF,MAAM3yH,CAAN2yH,EAAczjI,EAAK+9H,MAAL/9H,CAAY8E,IAAZ9E,EAAdyjI,CAAR,CACI,IAAIzjI,EAAKqvI,gBAAT,IAA6BrvI,EAAKqvI,gBAALrvI,GAAwB,CAArD,KACFowI,IAAWxpC,IAAQwpC,CAARxpC,EAAkBipC,EAAU,CAAVA,CAAlBjpC,CADT,GAGJ5pD,IAAI8pD,IAAQ9pD,CAAR8pD,EAAW28B,MAAM2M,CAAN3M,EAAgBzjI,EAAKyvI,eAALzvI,CAAqB8E,IAArB9E,EAAhByjI,CAAX38B,CAHA,EAIA9mG,EAAKy+H,OAALz+H,KACFg9C,IAAIihF,QAAUjhF,CAAVihF,EAAaj+H,EAAK4N,IAAL5N,CAAU8E,IAAV9E,EAAbi+H,CADFj+H,CAJA,CAQE,IAAAwY,6BAAA;EAAA,UAACm5H,QAAD;EAAA,UAAKC,QAAL;EAAA,UAASC,QAAT;EAAA,UAAaC,QAAb,CAEN13I,IAAI4F,EAAKgwI,mBAALhwI,CAAyBzB,KAAzByB,CAA+B2xI,CAA/B3xI,CAAJ5F,EACAgE,IAAI4B,EAAKgwI,mBAALhwI,CAAyBzB,KAAzByB,CAA+B4xI,CAA/B5xI,CADJ5F,EAEAovB,IAAIs9E,IAAQF,IAAQxoG,CAARwoG,EAAW8qC,CAAX9qC,CAARE,EAA8BF,IAAQxsG,CAARwsG,EAAW5mG,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsB6xI,CAAtB7xI,CAAX4mG,CAA9BE,CAFJ1sG,EAGAwX,IAAI5R,EAAKgwI,mBAALhwI,CAAyBzB,KAAzByB,CAA+B8xI,CAA/B9xI,CAHJ5F,CAKA,IAAMqqC,IAAImiE,IAAQh1F,CAARg1F,EAAW5mG,EAAKi8H,UAALj8H,CAAgBzB,KAAhByB,CAAsBwpB,CAAtBxpB,CAAX4mG,CAAV,CAEA,QAAQniE,GAAGA,GAAGjb,EAAd;OAvDKzR,CAAP;KA1H0B7H,EAqL5BihI,WAAAA,UAAAA,GAAA;EACE,QAAM90C,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZ8Q,qBAAqB9Q,oBAAoB7/H,KAAK2wI,mBAAzB9Q,GACrBT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjB+R,gBAAgB7xI,KAAK6xI,gBACrBvR,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBACvBa,gBAAgB7wI,KAAK6wI,gBAlBvB;EAAA,QAoBM7T,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CApBnB,CAsBA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA5M0BnsF,EACrBihI,WAAAA,GAAY,UADSjhI,GA8M9B;IA9M8B4/H,QAA9B,eA+Mc9oC,cAAcmqC,UA0D5B;EAEE,YAAA,CAAY90C,CAAZ;aAC0C,MAApCA,EAAO6zC,cAA6B,IACtC3vI,QAAQ8Z,IAAR9Z,CACI,gHADJA,CADsC,EAKxC87F,EAAO0wC,IAAP1wC,GAAc,IAAI80C,QAAJ,CAAa90C,CAAb,CAL0B,EAMxCvsF,MAAAA,KAAAA,EAAMusF,CAANvsF;EA6HJ,UAtI0BI,aAAAA,GAAAA,GAaxB6hI,WAAAA,KAAAA,GAAA,UAAKjhI,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACmB,cAAzB/X,EAAK+sI,IAAL/sI,CAAUsvI,WAAe,KAC3B/f,QAAYvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAtB/f,GACAvvH,EAAK+sI,IAAL/sI,CAAUsvI,WAAVtvI,GAAwB,IAFG,GAIS,QAAlCA,EAAK+sI,IAAL/sI,CAAUuvI,oBAAwB,KACpChgB,QAAYvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAtBhgB,GACAvvH,EAAK+sI,IAAL/sI,CAAUuvI,oBAAVvvI,GAAiC,IAFG,CAJT,CAQ7B,IAAMk+C,IAAiB,QAAVq0D,CAAU,GAAO,IAAP,GAAcA,EAAar0D,IAAlD;EAAA,UACMutD,IAAqB,QAAV8G,CAAU,GAAO,IAAP,GAAcA,EAAiB9G,QAD1D;EAAA,UAEMsgC,IACQ,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YAHjD,CAIA,OAAOj8H,WAAAA,CAAMxR,IAANwR,KAAAA,EAAAA,EAAWgB,CAAXhB,IAAoBouC,SAAMutD,aAAUsgC,iBAApCj8H,CAAP;OAbKiI,CAAP;KAdsB7H,EA+BxB1M,qBAAAA,CAAIuuI,WAAJvuI,SAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBikI,KAA/B;2CADF9/H,CA/BwB0M,EAmCxB1M,qBAAAA,CAAIuuI,WAAJvuI,cAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB48H,UAA/B;2CADFz4H,CAnCwB0M,EAuCxB1M,qBAAAA,CAAIuuI,WAAJvuI,uBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB2wI,mBAA/B;2CADFxsI,CAvCwB0M,EA2CxB1M,qBAAAA,CAAIuuI,WAAJvuI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBo/H,OAA/B;2CADFj7H,CA3CwB0M,EA+CxB1M,qBAAAA,CAAIuuI,WAAJvuI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBmgI,iBAA/B;2CADFh8H,CA/CwB0M,EAmDxB1M,qBAAAA,CAAIuuI,WAAJvuI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuByvI,oBAA/B;2CADFtrI,CAnDwB0M,EAuDxB1M,qBAAAA,CAAIuuI,WAAJvuI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBq/H,eAA/B;2CADFl7H,CAvDwB0M,EA2DxB1M,qBAAAA,CAAIuuI,WAAJvuI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB6xI,cAA/B;2CADF1tI,CA3DwB0M,EA+DxB1M,qBAAAA,CAAIuuI,WAAJvuI,qBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBsgI,iBAA/B;2CADFn8H,CA/DwB0M,EAmExB1M,qBAAAA,CAAIuuI,WAAJvuI,wBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB2vI,oBAA/B;2CADFxrI,CAnEwB0M,EAuExB1M,qBAAAA,CAAIuuI,WAAJvuI,mBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB0/H,eAA/B;2CADFv7H,CAvEwB0M,EA2ExB1M,qBAAAA,CAAIuuI,WAAJvuI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBqgI,gBAA/B;2CADFl8H,CA3EwB0M,EA+ExB1M,qBAAAA,CAAIuuI,WAAJvuI,uBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB4vI,mBAA/B;2CADFzrI,CA/EwB0M,EAmFxB1M,qBAAAA,CAAIuuI,WAAJvuI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBw/H,cAA/B;2CADFr7H,CAnFwB0M,EAuFxB1M,qBAAAA,CAAIuuI,WAAJvuI,WAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB6vI,OAA/B;2CADF1rI,CAvFwB0M,EA2FxB1M,qBAAAA,CAAIuuI,WAAJvuI,oBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuBgwI,gBAA/B;2CADF7rI,CA3FwB0M,EA+FxB1M,qBAAAA,CAAIuuI,WAAJvuI,kBAAAA,SAAA;EACE,aAAQnE,KAAK0tI,IAAL1tI,CAAuB6wI,cAA/B;2CADF1sI,CA/FwB0M,EAmGxB6hI,WAAAA,UAAAA,GAAA;EACE,QAAM11C,MACJinC,OAAOjkI,KAAKikI,OACZrH,YAAYiD,oBAAoB7/H,KAAK48H,UAAzBiD,GACZ8Q,qBAAqB9Q,oBAAoB7/H,KAAK2wI,mBAAzB9Q,GACrBT,SAASp/H,KAAKo/H,SACde,mBAAmBL,qBAAqB9/H,KAAKmgI,iBAA1BL,GACnB2P,sBAAsB3P,qBAAqB9/H,KAAKyvI,oBAA1B3P,GACtBT,iBAAiBS,qBAAqB9/H,KAAKq/H,eAA1BS,GACjB+R,gBAAgB7xI,KAAK6xI,gBACrBvR,mBAAmBP,qBAAqB//H,KAAKsgI,iBAA1BP,GACnB4P,sBAAsB5P,qBAAqB//H,KAAK2vI,oBAA1B5P,GACtBL,iBAAiBK,qBAAqB//H,KAAK0/H,eAA1BK,GACjB3uB,qBAAqB2uB,qBAAqB//H,KAAKoxG,mBAA1B2uB,GACrBM,kBAAkBL,oBAAoBhgI,KAAKqgI,gBAAzBL,GAClB4P,qBAAqB5P,oBAAoBhgI,KAAK4vI,mBAAzB5P,GACrBR,gBAAgBQ,oBAAoBhgI,KAAKw/H,cAAzBQ,GAChB6P,SAAS7vI,KAAK6vI,SACdG,kBAAkBhwI,KAAKgwI,kBACvBa,gBAAgB7wI,KAAK6wI,gBAlBvB;EAAA,QAoBM7T,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CApBnB,CAuBA,cAFOusH,EAAiB0Q,MACxBvpI,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KA3HsBnsF,EA8HjB6hI,YAAAA,GAAP,UACI31C,CADJ,EAEIC,CAFJ;EAME,WAHgC,MAA5BA,EAAsB40C,aAAM,KAC9B50C,EAAuB6zC,cAAvB7zC,GAA2B,CADG,GAGzB,IAAID,CAAJ,CAAQC,CAAR,CAAP;KApIsBnsF,EACjB6hI,WAAAA,GAAY,MADK7hI,GAsI1B;IAtI0Bq9H,IAA1B,eAuIcvmC,cAAc+qC,MAc5B;EAIE,YAAA,CAAY11C,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAKitI,KAALjtI,GAAaq8F,EAAO4wC,KAApBjtI;EAoKJ,UA1KqCkQ,aAAAA,GAAAA,GASnC1M,qBAAAA,CAAIwpI,WAAJxpI,aAAAA,SAAA;EAME,WADA,IAAM0pI,MAAN,OAAA,EACmB10H,IAAAnZ,KAAK4tI,KAAL5tI,CAAWnC,KAAXmC,GAAmBkG,OAAnBlG,EAAnB,EAAmBgE,YAAnB,EAAmBA,GAAnB;EAAK,YAAM0pI,QAAN,CACC7xI,MAAMC,OAAND,CAAc6xI,EAAKG,SAAnBhyI,IACFgyI,EAAU7xI,IAAV6xI,MAAAA,CAAAA,CAAAA,EAAkBH,EAAKG,SAAvBA,CADEhyI,GAGFgyI,EAAU7xI,IAAV6xI,CAAeH,EAAKG,SAApBA,CAHEhyI;EAMN,cAAOgyI,CAAP;2CAbF1pI,CATmC0M,EAyBnC88H,WAAAA,KAAAA,GAAA,UAAKl8H,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAMV,WAJA,IAAI20H,KADJ57H,IAASA,GACW5T,MAAM,EAA1B,EAGM80I,MAHN,OAAA,EAImBx5H,IAAAxY,EAAKitI,KAALjtI,CAAW9C,KAAX8C,GAAmBuF,OAAnBvF,EAAnB,EAAmBqD,YAAnB,EAAmBA,GAAnB;EAAK,YAAM0pI,QAAN,CACC7xI,MAAMC,OAAND,CAAc6xI,EAAKG,SAAnBhyI,IACF82I,EAAa32I,IAAb22I,CAAkBtF,EAAOl2F,MAAPk2F,CAAc,CAAdA,EAAiBK,EAAKG,SAALH,CAAetzI,MAAhCizI,CAAlBsF,CADE92I,GAGF82I,EAAa32I,IAAb22I,CAAkBtF,EAAOl2F,MAAPk2F,CAAc,CAAdA,EAAiB,CAAjBA,CAAlBsF,CAHE92I;EAMN82I,SAAazsI,OAAbysI,GAKA,KAFA,IACIC,CADJ,EAAMC,MAAN,EAES93I,IAAI,CAAb,EAAgBA,IAAI4F,EAAKitI,KAALjtI,CAAWvG,MAA/B,IAAyCW,CAAzC,EAA4C;EACpC2yI,YAAO/sI,EAAKitI,KAALjtI,CAAW5F,CAAX4F,CAAP+sI,CACNL,IAASsF,EAAa53I,CAAb43I,CAATtF,EAGEuF,IADQ,MAAN73I,CAAM,IACM0W,EAAO,CAAPA,GAAWtK,OAAOkmI,EADxB,IAGMuF,EAAW,CAAXA,GAAezrI,OAAOkmI,EALtCA,EAOAuF,IAAalF,EAAKzuI,IAALyuI,CAAUkF,CAAVlF,EAAsBx6B,CAAtBw6B,CAPbL,EAQAwF,EAAgB72I,IAAhB62I,CAAqBD,EAAW/0I,KAAX+0I,CAAiB,CAAjBA,CAArBC,CARAxF;EAYFA,aAAAA,CACA,KAAyB,SAAA,EAAA16G,IAAAkgH,EAAgBh1I,KAAhBg1I,GAAwB3sI,OAAxB2sI,EAAzB,EAAyBtgH,YAAzB,EAAyBA,GAAzB;EAAK,YAAMugH,QAAN,CACHzF,EAAOrxI,IAAPqxI,MAAAA,CAAAA,CAAAA,EAAeyF,CAAfzF;EAEF,eAAQuF,EAAW,CAAXA,GAAezrI,OAAOkmI,EAA9B;OApCK30H,CAAP;KA1BiC7H,EAkE5B88H,WAAAA,MAAAA,GAAP,UAAaxtH,CAAb;EAOE,QAAImlH,CAAJ,CANI6I,gBAAgBhuH,CAAhBguH,MAGFhuH,IAAcA,EAAuB,CAAvBA,CAHZguH,GAKJhuH,IAAaA,CALTguH,CAOJ,KAAmB,SAAA,EAAAh1H,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB;EAAK,UAAM0pI,QAAN,CAEHA,EAAK55B,KAAL45B,CAAWvtH,CAAXutH,GAEEpI,IADEzpI,MAAMC,OAAND,CAAc6xI,EAAKG,SAAnBhyI,IACU6xI,EAAKG,SAALH,CAAe,CAAfA,CADV7xI,GAGU6xI,EAAKG,SAJnBH,EAMAvtH,KAAcA,EAAW,CAAXA,GAAemlH,EAN7BoI;EAQF1tI,UAAKyyG,KAALzyG,IAAa,CAAbA;KApFiC6Q,EAuFnC88H,WAAAA,UAAAA,GAAA;EAEE,SADA,IAAMoF,MAAN,OAAA,EACmB55H,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB;EAAK,UAAM0pI,QAAN,CACHqF,EAAY/2I,IAAZ+2I,GACEj2C,WAAa98F,KAAKslG,YAALtlG,IACbg9F,QAAU0wC,EAAKnoC,SAALmoC,IAFZqF;EAKF,SAAM/1C,MAAoC4wC,OAASmF,GAAnD;EAAA,QACM/V,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CADnB,CAGA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAlGiCnsF,EAqG5B88H,YAAAA,GAAP,UACI5wC,CADJ,EAEIC,CAFJ,EAGIyI,CAHJ;uBAGIA,QAEF,KADA,IAAMmoC,MAAN,OAAA,EACyBz0H,IAAC6jF,EAAc4wC,KAAxC,EAAyB5pI,YAAzB,EAAyBA,GAAzB;EAAK,UAAMurI,QAAN,CACH3B,EAAM5xI,IAAN4xI,CAAW7T,YAAYwV,CAAZxV,EAAwBt0B,CAAxBs0B,CAAX6T;EAEF,YAAO,IAAI7wC,CAAJ,GAAS6wC,UAAT,CAAP;KA7GiC/8H,EAgHnC1M,qBAAAA,CAAIwpI,WAAJxpI,oBAAAA,SAAA;EACE,WAAKnE,KAAK+O,SAAV,EACE,SAAA,CAGF,KADA,IAAMwgE,MAAN,OAAA,EACmBp2D,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB;EAAK,YAAM0pI,QAAN,CACHn+D,EAAQvzE,IAARuzE,MAAAA,CAAAA,CAAAA,EAAgBm+D,EAAKh7B,gBAArBnjC;EAEF,cAAOA,CAAP;2CARFprE,CAhHmC0M,EA2HnC1M,qBAAAA,CAAIwpI,WAAJxpI,uBAAAA,SAAA;EAEE,WADA,IAAMorE,MAAN,OAAA,EACmBp2D,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB;EAAK,YAAM0pI,QAAN,CACHn+D,EAAQvzE,IAARuzE,MAAAA,CAAAA,CAAAA,EAAgBm+D,EAAK/6B,mBAArBpjC;EAEF,YAAKvvE,KAAK+O,SAAV,EAAqB;EAEnB,aADA,IAAM2jG,MAAN,OAAA,EACmB//E,IAAA3yB,KAAK4tI,KAAxB,EAAmBr7G,YAAnB,EAAmBA,GAAnB;EAAWm7G,kBAAAA,CACTh7B,EAAiB12G,IAAjB02G,MAAAA,CAAAA,CAAAA,EAAyBg7B,EAAKh7B,gBAA9BA;EAEF,gBAAOA,EAAiBvrG,MAAjBurG,CAAwBnjC,CAAxBmjC,CAAP;EAEF,cAAOnjC,CAAP;2CAZFprE,CA3HmC0M,EA+InC88H,WAAAA,WAAAA,GAAA;EAEE,SADA,IAAMp+D,MAAN,OAAA,EACmBp2D,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB;EAAK,UAAM0pI,QAAN,CACHn+D,EAAQvzE,IAARuzE,MAAAA,CAAAA,CAAAA,EAAgBm+D,EAAKn+D,OAArBA;EAEF,YAAOulC,cAAcvlC,CAAdulC,CAAP;KApJiCjkG,EA6JnC88H,WAAAA,WAAAA,GAAA,UAAWp+D,CAAX;EAEE,SADA,IAAMyjE,MAAN,OAAA,EACmB75H,IAAAnZ,KAAK4tI,KAAxB,EAAmB5pI,YAAnB,EAAmBA,GAAnB,EAGE,KAHG,IAAM0pI,QAAN,EACGuF,IAAYvF,EAAKn+D,OAALm+D,CAAatzI,MAD5B,EAEG84I,IAAe3jE,EAAQp4B,MAARo4B,CAAe0jE,CAAf1jE,CAFlB,EAGMx0E,IAAI,CAAb,EAAgBA,IAAI2yI,EAAKn+D,OAALm+D,CAAatzI,MAAjC,IAA2CW,CAA3C,EACEi4I,EAAOh3I,IAAPg3I,EAAatF,EAAKn+D,OAALm+D,CAAa3yI,CAAb2yI,GAAiBwF,EAAan4I,CAAbm4I,EAA9BF,EAGJ99B,cAAc89B,CAAd99B;KAtKiCrkG,EAC5B88H,WAAAA,GAAY,iBADgB98H,GA0KrC;IA1KqC4/H,QAArC,CA6KA,4BAAA,CACI53H,CADJ,EACwBovF,CADxB,EACsCmE,CADtC,EAEI1oF,CAFJ;EAGE,YAAA;EACE,WAAOsgH,QAAUnrH,GAAVmrH,EAAkBt8B,UAAUO,CAAVP,CAAlBs8B,CAAP;EAEF,wBALoC53B,4BAClC1oF,QAIEA,IAAQ,CAAZ,EAAe;EAEb,SADA,IAAMm7B,MAAN,EACS9jD,IAAI,CAAb,EAAgBA,IAAI2oB,CAApB,EAA2B3oB,GAA3B,EACE8jD,EAAK7iD,IAAL6iD,CAAUklF,aAAeoP,CAAfpP,EAA8BlrH,CAA9BkrH,EAAoC33B,CAApC23B,CAAVllF,EAGF,OADAA,EAAKz+C,OAALy+C,CAAa,UAAA60B,CAAA;EAAK,aAAAy8C,KAASz8C,CAATy8C,CAAA;OAAlBtxE,GACOA,CAAP;EAEA,UAAOsxE,KAAS4T,aAAeoP,CAAfpP,EAA8BlrH,CAA9BkrH,EAAoC33B,CAApC23B,CAAT5T,CAAP;iBAhBUxoB,cAAcgmC,iBCh3E5B;EAGE,YAAA,CAAY3wC,CAAZ;EAAA,YAQEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SARF,QASE9P,EAAKmwG,KAALnwG,GAAaq8F,EAAO8T,KAApBnwG;EAmFJ,UA/FsCkQ,aAAAA,GAAAA,GAepCuiI,WAAAA,MAAAA,GAAA,UAAMjzH,CAAN;EACEngB,SAAKyyG,KAALzyG,IAAa,CAAbA;KAhBkC6Q,EAqBpC1M,qBAAAA,CAAIivI,WAAJjvI,aAAAA,SAAA;EAIE,aAAkB,QAAdnE,KAAK8wG,KAAS,IACT9wG,KAAK8wG,KAAL9wG,CAAW+O,SADpB;cAOF,UAAchL,CAAd;EAIoB,cAAd/D,KAAK8wG,KAAS,KAChB9wG,KAAK8wG,KAAL9wG,CAAW+O,SAAX/O,GAAuB+D,CADP;2CAfpBI,CArBoC0M,EAyCpC1M,qBAAAA,CAAIivI,WAAJjvI,oBAAAA,SAAA;EACE,aAAOnE,KAAK8wG,KAAL9wG,CAAW0yG,gBAAlB;2CADFvuG,CAzCoC0M,EA8CpC1M,qBAAAA,CAAIivI,WAAJjvI,uBAAAA,SAAA;EACE,aAAOnE,KAAK8wG,KAAL9wG,CAAW2yG,mBAAlB;2CADFxuG,CA9CoC0M,EAmDpC1M,qBAAAA,CAAIivI,WAAJjvI,WAAAA,SAAA;EAEE,aAAQnE,KAAK8wG,KAAL9wG,CAAmB0xG,QAA3B;2CAFFvtG,CAnDoC0M,EA0DpC1M,qBAAAA,CAAIivI,WAAJjvI,UAAAA,SAAA;EACE,aAAOnE,KAAK8wG,KAAL9wG,CAAWsvE,MAAlB;2CADFnrE,CA1DoC0M,EAgEpCuiI,WAAAA,WAAAA,GAAA;EACE,WAAOpzI,KAAK8wG,KAAL9wG,CAAWo5H,UAAXp5H,EAAP;KAjEkC6Q,EAoEpCuiI,WAAAA,WAAAA,GAAA,UAAW7jE,CAAX;EACEvvE,SAAK8wG,KAAL9wG,CAAW+zG,UAAX/zG,CAAsBuvE,CAAtBvvE;KArEkC6Q,EAwEpCuiI,WAAAA,UAAAA,GAAA;EACE,QAAMp2C,MACJ8T,SACEhU,WAAa98F,KAAK8wG,KAAL9wG,CAAWslG,YAAXtlG,IACbg9F,QAAUh9F,KAAK8wG,KAAL9wG,CAAWulG,SAAXvlG,MAHd;EAAA,QAMMg9H,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CANnB,CAQA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KAjFkCnsF,EAoF7BuiI,YAAAA,GAAP,UACIr2C,CADJ,EAEIC,CAFJ,EAGIyI,CAHJ;uBAGIA,QACF,IACMqL,IAAQipB,YADM/8B,EAAc8T,KACpBipB,EAAyBt0B,CAAzBs0B,CADd,QAEO/8B,EAAc8T,MACrB,IAAMuiC,MAAaviC,UAAnB,CAEA,OADA3sG,OAAO6M,MAAP7M,CAAckvI,CAAdlvI,EAAyB64F,CAAzB74F,GACO,IAAI44F,CAAJ,CAAQs2C,CAAR,CAAP;KA7FkCxiI,GA+FtC;IA/FsCshG,MAAtC;EAAA;EA+IE,YAAA,CAAYnV,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF,QAEE9P,EAAK2wG,eAAL3wG,IAAuB,CAAvBA;EAqDJ,UAzDqCkQ,aAAAA,GAAAA,GAOnCyiI,WAAAA,MAAAA,GAAA,UAAMnzH,CAAN;EAEE,SADAA,IAAa06G,mBAAmB16G,CAAnB06G,GACEzgI,SAAS,CAAxB,EACE,MAAM,IAAIsqG,UAAJ,CACF,kFACexrE,KAAKE,SAALF,CAAe/Y,CAAf+Y,CAFb,CAAN,CAIFl5B,KAAKqxG,SAALrxG,MAAmB/D,OAAOkkB,IAA1BngB,CACA,IAAMuzI,KAAmBpzH,EAAW,CAAXA,GAAehZ,OAAOgZ,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAA/C,CACKngB,KAAK8wG,KAAL9wG,CAAWyyG,KAAXzyG,KACHA,KAAK8wG,KAAL9wG,CAAW8zG,KAAX9zG,CAAiBuzI,CAAjBvzI,GACAA,KAAK8wG,KAAL9wG,CAAWyyG,KAAXzyG,IAAmB,CAFhBA,GAILyQ,WAAAA,CAAMqjG,KAANrjG,KAAAA,KAAAA,EAAY0P,CAAZ1P,CAJKzQ;KAhB4B6Q,EAuBnCyiI,WAAAA,mBAAAA,GAAA,UAAmBnzH,CAAnB;EAEE,QAAMozH,MADNpzH,IAAa06G,mBAAmB16G,CAAnB06G,GACuB,IAAI1zH,OAAOgZ,EAAWtiB,KAAXsiB,CAAiB,CAAjBA,EAA/C;EAAA,QACMqzH,IACFxzI,KAAK8wG,KAAL9wG,CAAWm0G,kBAAXn0G,CAA8BuzI,CAA9BvzI,CAFJ;EAAA,QAGMyzI,IAAYtzH,EAAW,CAAXA,CAHlB,CAIA,QAAQqzH,EAAiB,CAAjBA,GAAqBC,GAAWtsI,OAAOqsI,EAAiB31I,KAAjB21I,CAAuB,CAAvBA,EAA/C;KA7BiC3iI,EAgCnCyiI,WAAAA,KAAAA,GAAA,UAAK7hI,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EAqBV,aANI22H,IAT0B,UAAC59H,CAAD,EAAiB47H,CAAjB;EAM5B,gBADetQ,oBAAoBp8H,EAAKmwG,KAALnwG,CAAW1B,IAAX0B,CAAgB8Q,CAAhB9Q,EAAwBuyG,CAAxBvyG,CAApBo8H,MACf;SAGEsS,EAbJ59H,IAASsrH,oBAAoBtrH,CAApBsrH,CAaLsS,IAAAA,GAAsB,CAAtBA,EAA+C,IAA/CA,EACI,IADJA,GAC0B,CAD1BA,GAEI,CAFJA,EAGiB,CAHjBA,CAMJ;OArBK32H,CAAP;KAjCiC7H,EAC5ByiI,WAAAA,GAAY,iBADgBziI,GAyDrC;IAzDqCuiI,QA7IrC,eAuMczrC,cAAc2rC,iBAG5B,IAAaI,mCAAmC,OAAO,OAAO,UAAU,MAAxE,CACA,oCAAA,CAA4C3vI,CAA5C;EACE4vI,4BACID,+BADJC,EACqC,wBADrCA,EAC+D5vI,CAD/D4vI;EAkBF;EAUE,YAAA,CAAY32C,CAAZ;EAAA,YACEvsF,MAAAA,KAAAA,EAAMusF,CAANvsF,SADF;EAAA,QAWQo6G,IAAc7tB,EAAO8T,KAAP9T,CAAauI,SAAbvI,EAXtB,CA0BE,IAdAr8F,EAAKizI,YAALjzI,GACIo5H,cACKj9B,WAAWE,EAAO8T,KAAP9T,CAAasI,YAAbtI,IAA6BA,QAAQ6tB,GADrDkP,CADJp5H,EAIAkqH,EAAyBmiB,WAAzBniB,IACmC,MAA/BA,EAAyBmiB,WAL7BrsI,EAMAA,EAAKkzI,aAALlzI,GACIo5H,cACKj9B,WAAWE,EAAO8T,KAAP9T,CAAasI,YAAbtI,IAA6BA,QAAQ6tB,GADrDkP,CAPJp5H,EAUAA,EAAKizI,YAALjzI,CAAkBpC,IAAlBoC,GAAyB,aAAaA,EAAKizI,YAALjzI,CAAkBpC,IAVxDoC,EAWAA,EAAKkzI,aAALlzI,CAAmBpC,IAAnBoC,GAA0B,cAAcA,EAAKkzI,aAALlzI,CAAmBpC,IAX3DoC,EAYAmzI,4BAA4B92C,EAAO+2C,SAAnCD,CAZAnzI,EAaAA,EAAKozI,SAALpzI,GAAiBq8F,EAAO+2C,SAbxBpzI,EAcIq8F,EAAOztB,OAAX,EACE,MAAM,IAAIo1B,mBAAJ,CACF,iEADE,CAAN,QAGFhkG,EAAKiyG,SAALjyG,GAAiBq8F,EAAO8T,KAAP9T,CAAa6V,QAA9BlyG,EACAA,EAAKmtI,eAALntI,GAAuBq8F,EAAO8T,KAAP9T,CAAa8wC,eADpCntI,EAEAA,EAAKotI,WAALptI,GAAmBq8F,EAAO8T,KAAP9T,CAAa+wC,WAFhCptI,EAGAA,EAAK2wG,eAAL3wG,IAAuB,CAHvBA,EAIAA,EAAKqzI,UAALrzI,IAAkB,CAJlBA,EAKAA,EAAK0wG,SAAL1wG,GAAiBq8F,EAAO8T,KAAP9T,CAAaqU,SAL9B1wG,EAMAA,EAAKisI,YAALjsI,GAAoB,IANpBA;EAiRJ,UAzTmCkQ,aAAAA,GAAAA,GAiDjC1M,qBAAAA,CAAI8vI,WAAJ9vI,aAAAA,SAAA;EACE,aAAOnE,KAAKg0I,UAAZ;cAGF,UAAcjwI,CAAd;EAIE/D,WAAKg0I,UAALh0I,GAAkB+D,CAAlB/D,EACyB,QAArBA,KAAK4zI,YAAgB,KACvB5zI,KAAK4zI,YAAL5zI,CAAkB+O,SAAlB/O,GAA8B+D,CADP,CADzB/D,EAI0B,QAAtBA,KAAK6zI,aAAiB,KACxB7zI,KAAK6zI,aAAL7zI,CAAmB+O,SAAnB/O,GAA+B+D,CADP,CAJ1B/D;2CARFmE,CAjDiC0M,EAkEjCojI,WAAAA,WAAAA,GAAA;EACE,WAAOj0I,KAAK4zI,YAAL5zI,CAAkBo5H,UAAlBp5H,GAA+BmH,MAA/BnH,CACHA,KAAK6zI,aAAL7zI,CAAmBo5H,UAAnBp5H,EADGA,CAAP;KAnE+B6Q,EAuEjCojI,WAAAA,WAAAA,GAAA,UAAW1kE,CAAX;EACE,QAAM2kE,IAAa3kE,EAAQn1E,MAA3B;EAAA,QACM+5I,IAAiB55I,KAAKkC,KAALlC,CAAW25I,IAAa,CAAxB35I,CADvB,CAEAyF,KAAK4zI,YAAL5zI,CAAkB+zG,UAAlB/zG,CAA6BuvE,EAAQ1xE,KAAR0xE,CAAc,CAAdA,EAAiB4kE,CAAjB5kE,CAA7BvvE,GACAA,KAAK6zI,aAAL7zI,CAAmB+zG,UAAnB/zG,CAA8BuvE,EAAQ1xE,KAAR0xE,CAAc4kE,CAAd5kE,CAA9BvvE,CADAA;KA1E+B6Q,EA8EjCojI,WAAAA,mBAAAA,GAAA,UAAmB9zH,CAAnB;EACE,QAOItS,CAPJ;EAAA,QAQIgjG,CARJ;EAAA,QASIu9B,CATJ;EAAA,QAAIgG,IACAp0I,KAAK4zI,YAAL5zI,CAAkBm0G,kBAAlBn0G,CAAqCmgB,CAArCngB,CADJ,CA0BA,OAxBMnE,MAAMC,OAAND,CAAcu4I,CAAdv4I,KAA8BA,MAAMC,OAAND,CAAcu4I,EAAY,CAAZA,CAAdv4I,CAA9BA,KACJu4I,KAAeA,EADXv4I,GAGNu4I,IAAcA,CAHRv4I,EAQFmE,KAAK+tI,WAAL/tI,IACFouI,IAAagG,EAAYv2I,KAAZu2I,CAAkB,CAAlBA,CAAbhG,EACAvgI,IAAcumI,EAAY,CAAZA,CAFZp0I,IAIF6N,IAAcumI,EAAY,CAAZA,CAZVv4I,EAcNgS,IAAcA,CAdRhS,EAeiB,aAAnBmE,KAAK+zI,SAAc,IACrBlmI,EAAYA,EAAYzT,MAAZyT,GAAqB,CAAjCA,KAAuC,CAAvCA,EACAgjG,KAAgBhjG,EAFK,IAIrBgjG,IAD2B,QAAlB7wG,KAAK+zI,SAAa,IACXlmI,GAAaA,EAAYhQ,KAAZgQ,GADF,IAGXA,EArBZhS,EAwBFmE,KAAK+tI,WAAL/tI,GACoB,QAAlBA,KAAK+zI,SAAa,GACbljC,EAAa1pG,MAAb0pG,CAAoBu9B,CAApBv9B,EAAgC1pG,MAAhC0pG,CAAuCu9B,EAAWvwI,KAAXuwI,EAAvCv9B,CADa,IAGdhjG,GAAa1G,OAAOinI,GAAYjnI,OAAOinI,EAAWvwI,KAAXuwI,GAJ7CpuI,GAMGsyG,iBAA+BzB,CAA/ByB,CANP;KAzG+BzhG,EAkHjCojI,WAAAA,MAAAA,GAAA,UACIxiI,CADJ,EAEIyhG,CAFJ;EAGE,QAAIw5B,IACU,QAAVx5B,CAAU,GAAO,IAAP,GAAcA,EAAqBw5B,YADjD;EAAA,QAEIC,IACU,QAAVz5B,CAAU,GAAO,IAAP,GAAcA,EAAkBy5B,SAH9C,CAIc,QAAVz5B,CAAU,KACZA,MADY,EAGd,IAAMu7B,IACFC,gBAAgBj9H,CAAhBi9H,EAAwBhC,CAAxBgC,EAAsC/B,CAAtC+B,EAAiD1uI,KAAK4sI,YAAtD8B,CADJ,CAWA,IATAj9H,IAASg9H,EAAah9H,MAAtBA,EACAi7H,IAAe+B,EAAa/B,YAD5Bj7H,EAEAk7H,IAAY8B,EAAa9B,SAFzBl7H,EAII5V,MAAMC,OAAND,CAAc4V,CAAd5V,MACF6wI,IAAgBj7H,EAAuC5T,KAAvC4T,CAA6C,CAA7CA,CAAhBi7H,EACAj7H,IAAUA,EAAuC,CAAvCA,CAFR5V,CAJJ4V,GASqB,QAAhBi7H,CAAgB,IAAgC,MAAxBA,EAAatyI,WACzB,QAAbuyI,CADJ,EAEE,OAAOl8H,WAAAA,CAAMvR,KAANuR,KAAAA,KAAAA,EAAYgB,CAAZhB,EAAoByiG,CAApBziG,CAAP,CAEF,IAAMk+H,MAAN;EAAA,QACMC,MADN,CAEA,IAAoB,QAAhBlC,CAAJ,EAA0B;EACxB,UAAMwC,IAAYxC,EAAatyI,MAA/B,CACA,IAAI80I,IAAY,CAAZA,GAAgB,CAApB,EACE,MAAM,IAAIxqC,UAAJ,CACF,+HADE,CAAN,CAKFwO,EAAqBw5B,YAArBx5B,GAAyBw5B,CAAzBx5B,EACAy7B,EAAiB3yI,IAAjB2yI,MAAAA,CAAAA,CAAAA,EAAyBjC,CAAzBiC,CADAz7B,CAEA,IAAMmhC,IAAc3H,EACI5qI,GADJ4qI,CACQ,UAAAtvF,CAAA;EAAS,eAAA,IAAI8jF,SAAJ,GAAejlI,OAAOmhD,EAAMnhD,OAA5B,CAAA;SADjBywI,CAApB,CAEA1sI,KAAK4zI,YAAL5zI,CAAkBguI,SAAlBhuI,GAA8Bq0I,EAAWx2I,KAAXw2I,CAAiB,CAAjBA,EAAoBnF,IAAY,CAAhCmF,CAA9Br0I,EACAA,KAAK6zI,aAAL7zI,CAAmBguI,SAAnBhuI,GAA+Bq0I,EAAWx2I,KAAXw2I,CAAiBnF,IAAY,CAA7BmF,CAD/Br0I,EAEA4uI,EAAgB5yI,IAAhB4yI,MAAAA,CAAAA,CAAAA,EAAwByF,CAAxBzF,CAFA5uI;EAIF,SAAiB,QAAb2sI,CAAJ,EACE,MAAM,IAAIhoC,mBAAJ,CACF,uEADE,CAAN,CAMF,KADA,IAAM2vC,IAAmB3F,EAAiB,CAAjBA,aAA+Bn7B,cAAxD,OAAA,EACqB+gC,KAArB,EAAqBvwI,YAArB,EAAqBA,GAArB;EACE,0BAAsBwvG,mBAAmB8gC,CAAzC,EACE,MAAM,IAAI5vC,UAAJ,CACF,8GADE,CAAN;EAMJ,SAAI4vC,CAAJ,EAAsB;EAEpB,UAAMxF,KAAar9H,GAAQtK,OAAOwnI,EAAlC;EAAA,UACMI,IAAgB/uI,KAAKqxG,SAALrxG,CAAemH,MAAfnH,CAAsB4uI,CAAtB5uI,CADtB;EAAA,UAWMgvI,IAAoBhvI,KAAKqxG,SAX/B,CAYArxG,KAAKqxG,SAALrxG,GAAiB+uI,CAAjB/uI,CACA,IAAM8R,IACFrB,WAAAA,CAAMvR,KAANuR,KAAAA,KAAAA,EAAYq+H,CAAZr+H,EAAsDyiG,CAAtDziG,CADJ,CAGA,OADAzQ,KAAKqxG,SAALrxG,GAAiBgvI,CAAjBhvI,EACO8R,CAAP;EAEA,YAAOrB,WAAAA,CAAMvR,KAANuR,KAAAA,KAAAA,EAAYgB,CAAZhB,EAAoByiG,CAApBziG,CAAP;KAhM6BI,EAoMjCojI,WAAAA,KAAAA,GAAA,UAAKxiI,CAAL,EAA8ByhG,CAA9B;EAAA,gBAAA,CACE,OAAOx6F,KAAK;EACV,UAAsB,QAAlBw6F,EAAar0D,IAAjB,EACE,MAAM,IAAI8lD,mBAAJ,CACF,0EADE,CAAN,CAIF,IAEIvzF,CAFJ;EAAA,UAGIojI,CAHJ;EAAA,UAgBInH,CAhBJ;EAAA,UA8BIv7H,CA9BJ;EAAA,UAAM46H,IAAex5B,EAAqBw5B,YAA1C,CAIA,IAAoB,QAAhBA,CAAJ,EACEt7H,IAAIzQ,EAAKizI,YAALjzI,CAAkB1B,IAAlB0B,CAAuB8Q,CAAvB9Q,EAA+BuyG,CAA/BvyG,CAAJyQ,EACAojI,IAAO7zI,EAAKkzI,aAALlzI,CAAmB1B,IAAnB0B,CAAwB8Q,CAAxB9Q,EAAgCuyG,CAAhCvyG,CADPyQ,CADF,KAGO;EACL,YAAMqjI,IAAe/H,EAAa7uI,KAAb6uI,CAAmB,CAAnBA,EAAsBA,EAAatyI,MAAbsyI,GAAsB,CAA5CA,CAArB;EAAA,YACMgI,IAAgBhI,EAAa7uI,KAAb6uI,CAAmBA,EAAatyI,MAAbsyI,GAAsB,CAAzCA,CADtB,CAEAt7H,IAAIzQ,EAAKizI,YAALjzI,CAAkB1B,IAAlB0B,CACA8Q,CADA9Q,EACQwD,OAAO6M,MAAP7M,CAAc+uG,CAAd/uG,IAAuBuoI,cAAc+H,GAArCtwI,CADRxD,CAAJyQ,EAEAojI,IAAO7zI,EAAKkzI,aAALlzI,CAAmB1B,IAAnB0B,CACH8Q,CADG9Q,EACKwD,OAAO6M,MAAP7M,CAAc+uG,CAAd/uG,IAAuBuoI,cAAcgI,GAArCvwI,CADLxD,CAFPyQ;EAkCF,cA3BIzQ,EAAKotI,WAALptI,KACE9E,MAAMC,OAAND,CAAcuV,CAAdvV,MACFwxI,IAAUj8H,EAAevT,KAAfuT,CAAqB,CAArBA,EAAwBjK,MAAxBiK,CAAgCojI,EAAkB32I,KAAlB22I,CAAwB,CAAxBA,CAAhCpjI,CADRvV,GAIJuV,IAAKA,EAAe,CAAfA,CAJDvV,EAKJ24I,IAAQA,EAAkB,CAAlBA,CANN7zI,GASAA,EAAKmtI,eAALntI,KACF6zI,IAAOpH,QAAYoH,CAAZpH,EAA4B,CAA5BA,CADLzsI,CATAA,EAcmB,aAAnBA,EAAKozI,SAAc,GACrBjiI,IAAS+1H,aAAez2H,GAAaojI,EAA5B3M,CADY,GAEO,UAAnBlnI,EAAKozI,SAAc,GAC5BjiI,IAAS21F,IAAQr2F,CAARq2F,EAAqB+sC,CAArB/sC,CADmB,GAEA,UAAnB9mG,EAAKozI,SAAc,GAC5BjiI,IAASy1F,IAAQG,UAAU,EAAVA,CAARH,EAAwBE,IAAQr2F,CAARq2F,EAAqB+sC,CAArB/sC,CAAxBF,CADmB,GAEA,UAAnB5mG,EAAKozI,SAAc,GAC5BjiI,IAASy1F,IAAQn2F,CAARm2F,EAAqBitC,CAArBjtC,CADmB,GAED,QAAlB5mG,EAAKozI,SAAa,KAC3BjiI,KAAUV,GAAaojI,EADI,CAtBzB7zI,EA2BAA,EAAKotI,WAALptI,GACoB,QAAlBA,EAAKozI,SAAa,GACZjiI,EAAoB3K,MAApB2K,CAA2Bu7H,CAA3Bv7H,CADY,IAGdA,GAAkB3K,OAAOkmI,EAJ/B1sI,GAMGmR,CANP;OAlDK4G,CAAP;KArM+B7H,EAiQjCojI,WAAAA,YAAAA,GAAA,UAAY5G,CAAZ;EACErtI,SAAK4zI,YAAL5zI,CAAkBysH,WAAlBzsH,IACAA,KAAK6zI,aAAL7zI,CAAmBysH,WAAnBzsH,EADAA;KAlQ+B6Q,EAsQjCojI,WAAAA,MAAAA,GAAA,UAAM9zH,CAAN;EAAA,gBAAA,CACEwzF,UAAU3zG,KAAK4zI,YAAL5zI,CAAkBzB,IAA5Bo1G,EAAkC;EAChChzG,QAAKizI,YAALjzI,CAAkBmzG,KAAlBnzG,CAAwBwf,CAAxBxf;OADFgzG,GAGAA,UAAU3zG,KAAK6zI,aAAL7zI,CAAmBzB,IAA7Bo1G,EAAmC;EACjChzG,QAAKkzI,aAALlzI,CAAmBmzG,KAAnBnzG,CAAyBwf,CAAzBxf;OADFgzG,CAHAA,EAMA3zG,KAAKyyG,KAALzyG,IAAa,CANb2zG;KAvQ+B9iG,EAkRjC1M,qBAAAA,CAAI8vI,WAAJ9vI,oBAAAA,SAAA;EACE,aAAOnE,KAAK4zI,YAAL5zI,CAAkB0yG,gBAAlB1yG,CAAmCmH,MAAnCnH,CACHA,KAAK6zI,aAAL7zI,CAAmB0yG,gBADhB1yG,CAAP;2CADFmE,CAlRiC0M,EAuRjC1M,qBAAAA,CAAI8vI,WAAJ9vI,uBAAAA,SAAA;EACE,aAAOnE,KAAK4zI,YAAL5zI,CAAkB2yG,mBAAlB3yG,CAAsCmH,MAAtCnH,CACHA,KAAK6zI,aAAL7zI,CAAmB2yG,mBADhB3yG,CAAP;2CADFmE,CAvRiC0M,EA8RjCojI,WAAAA,UAAAA,GAAA;EACE,QAAMj3C,MACJ+2C,WAAa/zI,KAAK+zI,WADpB;EAAA,QAIM/W,IAAavsH,WAAAA,CAAM80F,SAAN90F,KAAAA,KAAAA,CAJnB,CAMA,OADAtM,OAAO6M,MAAP7M,CAAc64F,CAAd74F,EAAsB64H,CAAtB74H,GACO64F,CAAP;KArS+BnsF,EAwS1BojI,YAAAA,GAAP,UACIl3C,CADJ,EAEIC,CAFJ;EAGE,QAAM23C,IACF5a,YAAY/8B,EAAc8T,KAA1BipB,CADJ,CAIA,WAFO/8B,EAAc8T,OAES,QAA1B9T,EAAqB4vC,YAAzB,EACE,MAAM,IAAIjoC,mBAAJ,CACF,0FADE,CAAN,CAKF,IAAM0uC,IAAkCr2C,CAAxC,CAEA,OADAq2C,EAAiBviC,KAAjBuiC,GAAqBsB,CAArBtB,EACO,IAAIt2C,CAAJ,CAAQs2C,CAAR,CAAP;KAvT+BxiI,EAC1BojI,WAAAA,GAAY,eADcpjI,GAyTnC;IAzTmCuiI,QAAnC,qBCpO2Bp2C;EACzB,SAAO,IAAIuZ,UAAJ,CAAevZ,CAAf,CAAP;EAcF,eAAA,CAAoBA,CAApB;EACE,SAAO,IAAInkD,KAAJ,CAAQmkD,CAAR,CAAP;EAYF,cAAA,CAAqBA,CAArB;EACE,SAAO,IAAI8/B,IAAJ,CAAS9/B,CAAT,CAAP;EAYF,mBAAA,CAA0BA,CAA1B;EACE,SAAO,IAAIkgC,SAAJ,CAAclgC,CAAd,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAI2/B,SAAJ,CAAY3/B,CAAZ,CAAP;EAYF,yBAAA,CAAgCA,CAAhC;EACE,SAAO,IAAIsgC,eAAJ,CAAoBtgC,CAApB,CAAP;EAcF,kBAAA,CAAuBA,CAAvB;EACE,SAAO,IAAI8lC,MAAJ,CAAW9lC,CAAX,CAAP;EAYF,kBAAA,CAAuBA,CAAvB;EACE,SAAO,IAAIikC,MAAJ,CAAWjkC,CAAX,CAAP;EAYF,2BAAA,CAAgCA,CAAhC;EACE,SAAO,IAAImkC,eAAJ,CAAoBnkC,CAApB,CAAP;EAYF,2BAAA,CAAgCA,CAAhC;EACE,SAAO,IAAI6lC,eAAJ,CAAoB7lC,CAApB,CAAP;EAYF,oBAAA,CAA2BA,CAA3B;EACE,SAAO,IAAIgmC,UAAJ,CAAehmC,CAAf,CAAP;EAYF,sBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAImmC,YAAJ,CAAiBnmC,CAAjB,CAAP;EAeF,2BAAA,CAAgCA,CAAhC;EACE,SAAO,IAAIsmC,eAAJ,CAAoBtmC,CAApB,CAAP;EAcF,oBAAA,CAA2BA,CAA3B;EACE,SAAO,IAAIy+B,YAAJ,CAAez+B,CAAf,CAAP;EAYF,eAAA,CAAsBA,CAAtB;EACE,SAAO,IAAIknC,KAAJ,CAAUlnC,CAAV,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAI2mC,OAAJ,CAAY3mC,CAAZ,CAAP;EAYF,qBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAIqnC,OAAJ,CAAYrnC,CAAZ,CAAP;EAYF,sBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAIunC,YAAJ,CAAiBvnC,CAAjB,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAI2nC,OAAJ,CAAY3nC,CAAZ,CAAP;EAYF,iBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAIqoC,OAAJ,CAAYroC,CAAZ,CAAP;EAYF,mBAAA,CAA0BA,CAA1B;EACE,SAAO,IAAI4oC,SAAJ,CAAc5oC,CAAd,CAAP;EAcF,eAAA,CAAoBA,CAApB;EACE,SAAO,IAAIiqC,GAAJ,CAAQjqC,CAAR,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAImqC,OAAJ,CAAYnqC,CAAZ,CAAP;EAYF,uBAAA,CAA4BA,CAA5B;EACE,SAAO,IAAIsqC,WAAJ,CAAgBtqC,CAAhB,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAIoqC,OAAJ,CAAYpqC,CAAZ,CAAP;EAYF,mBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAIqqC,OAAJ,CAAYrqC,CAAZ,CAAP;EAYF,oBAAA,CAAyBA,CAAzB;EACE,SAAO,IAAIkqC,QAAJ,CAAalqC,CAAb,CAAP;EAYF,iBAAA,CAAoBA,CAApB;EACE,SAAO,IAAIorC,GAAJ,CAAQprC,CAAR,CAAP;EAcF,gCAAA,CAAmCA,CAAnC;EAEE,SAAO,IAAI+sC,kBAAJ,CAAuB/sC,CAAvB,CAAP;EAcF,uBAAA,CAA8BA,CAA9B;EACE,SAAO,IAAImuC,aAAJ,CAAkBnuC,CAAlB,CAAP;EAaF,0BAAA,CAAiCA,CAAjC;EACE,SAAO,IAAIgvC,gBAAJ,CAAqBhvC,CAArB,CAAP;EAEF,mBAAA,CAA0BA,CAA1B;EACE,SAAO43C,iBAAiB53C,CAAjB43C,CAAP;EAIF,sBAAA,CAA6B53C,CAA7B;EACE,SAAO43C,iBAAiB53C,CAAjB43C,CAAP;EAYF,0BAAA,CAAiC53C,CAAjC;EACE,SAAO,IAAImvC,gBAAJ,CAAqBnvC,CAArB,CAAP;EAEF,mBAAA,CAA0BA,CAA1B;EACE,SAAO63C,iBAAiB73C,CAAjB63C,CAAP;EAIF,sBAAA,CAA6B73C,CAA7B;EACE,SAAO63C,iBAAiB73C,CAAjB63C,CAAP;EAYF,gCAAA,CAAuC73C,CAAvC;EACE,SAAO,IAAIqvC,sBAAJ,CAA2BrvC,CAA3B,CAAP;EAYF,gCAAA,CAAuCA,CAAvC;EAEE,SAAO,IAAIwvC,sBAAJ,CAA2BxvC,CAA3B,CAAP;EAYF,4BAAA,CAAmCA,CAAnC;EACE,SAAO,IAAIsvC,kBAAJ,CAAuBtvC,CAAvB,CAAP;EAYF,4BAAA,CAAmCA,CAAnC;EACE,SAAO,IAAIyvC,kBAAJ,CAAuBzvC,CAAvB,CAAP;EAYF,sBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAI8uC,YAAJ,CAAiB9uC,CAAjB,CAAP;EAYF,sBAAA,CAA6BA,CAA7B;EACE,SAAO,IAAIkvC,YAAJ,CAAiBlvC,CAAjB,CAAP;EAcF,aAAA,CAAoBA,CAApB;EACE,SAAO,IAAI20C,GAAJ,CAAQ30C,CAAR,CAAP;EAYF,iBAAA,CAAwBA,CAAxB;EACE,SAAO,IAAI8zC,OAAJ,CAAY9zC,CAAZ,CAAP;EAYF,cAAA,CAAqBA,CAArB;EACE,SAAO,IAAI01C,IAAJ,CAAS11C,CAAT,CAAP;EAYF,kBAAA,CAAyBA,CAAzB;EACE,SAAO,IAAI80C,QAAJ,CAAa90C,CAAb,CAAP;EAYF,mBAAA,CAA0BA,CAA1B;EACE,SAAO,IAAI0zC,SAAJ,CAAc1zC,CAAd,CAAP;EAYF,uBAAA,CAA8BA,CAA9B;EACE,SAAO,IAAImzC,aAAJ,CAAkBnzC,CAAlB,CAAP;EAYF,eAAA,CAAoBA,CAApB;EACE,SAAO,IAAIkxC,GAAJ,CAAQlxC,CAAR,CAAP;EAYF,yBAAA,CAAgCA,CAAhC;EACE,SAAO,IAAI2wC,eAAJ,CAAoB3wC,CAApB,CAAP;EAcF,uBAAA,CAA8BA,CAA9B;EACE,SAAO,IAAIi3C,aAAJ,CAAkBj3C,CAAlB,CAAP;EAYF,yBAAA,CAAgCA,CAAhC;EACE,SAAO,IAAIs2C,eAAJ,CAAoBt2C,CAApB,CAAP;iBD5GY2K,cAAcssC,eCgH5B,IAAaa,kBAAkBC,kBAA/B;EAAA,IACaC,kBAAkBC,kBAD/B;EAAA,IAEaC,YAAYC,YAFzB;EAAA,IAGaC,YAAYC,YAHzB;EAAA,0/CAAA;EAAA,oEAAA,2BCjqB+Bl6B,GAAeC;EAC5C,SAAOk6B,eAAuBn6B,CAAvBm6B,EAA8Bl6B,CAA9Bk6B,CAAP;EAUF,8BAAA,CAAmCn6B,CAAnC,EAAkDC,CAAlD;EACE,SAAOm6B,qBAA2Bp6B,CAA3Bo6B,EAAkCn6B,CAAlCm6B,CAAP;EAUF,+BAAA,CAAoCp6B,CAApC,EAAmDC,CAAnD;EACE,SAAOo6B,oBAA4Br6B,CAA5Bq6B,EAAmCp6B,CAAnCo6B,CAAP;EAUF,mCAAA,CAAwCr6B,CAAxC,EAAuDC,CAAvD;EACE,SAAOq6B,0BAAgCt6B,CAAhCs6B,EAAuCr6B,CAAvCq6B,CAAP;EAUF,qBAAA,CAA0Bt6B,CAA1B,EAAyCC,CAAzC;EACE,SAAOs6B,UAAkBv6B,CAAlBu6B,EAAyBt6B,CAAzBs6B,CAAP;EAUF,kBAAA,CAAuBv6B,CAAvB,EAAsCC,CAAtC;EACE,SAAOu6B,OAAex6B,CAAfw6B,EAAsBv6B,CAAtBu6B,CAAP;EAUF,2BAAA,CAAgCx6B,CAAhC,EAA+CC,CAA/C;EACE,SAAOw6B,gBAAuBz6B,CAAvBy6B,EAA8Bx6B,CAA9Bw6B,CAAP;EAUF,6BAAA,CAAkCz6B,CAAlC,EAAiDC,CAAjD;EACE,SAAOy6B,kBAAyB16B,CAAzB06B,EAAgCz6B,CAAhCy6B,CAAP;EAUF,uCAAA,CACI16B,CADJ,EACmBC,CADnB;EAEE,SAAO06B,4BAAmC36B,CAAnC26B,EAA0C16B,CAA1C06B,CAAP;EAGF,gBAAA,CAAqB36B,CAArB,EAAoCC,CAApC;EACE,SAAO06B,4BAAmC36B,CAAnC26B,EAA0C16B,CAA1C06B,CAAP;EAGF,gBAAA,CAAqB36B,CAArB,EAAoCC,CAApC;EACE,SAAO06B,4BAAmC36B,CAAnC26B,EAA0C16B,CAA1C06B,CAAP;EAUF,8BAAA,CAAiC36B,CAAjC,EAAgDC,CAAhD;EACE,SAAOqZ,mBAAwBtZ,CAAxBsZ,EAA+BrZ,CAA/BqZ,CAAP;EAGF,eAAA,CAAoBtZ,CAApB,EAAmCC,CAAnC;EACE,SAAOqZ,mBAAwBtZ,CAAxBsZ,EAA+BrZ,CAA/BqZ,CAAP;EAGF,eAAA,CAAoBtZ,CAApB,EAAmCC,CAAnC;EACE,SAAOqZ,mBAAwBtZ,CAAxBsZ,EAA+BrZ,CAA/BqZ,CAAP;mfCrHmBz3B;EACnB,SAAO,IAAI6gC,IAAJ,CAAS7gC,CAAT,CAAP;EAWF,cAAA,CAAmBA,CAAnB;EACE,SAAO+4C,GAAgB/4C,CAAhB+4C,CAAP;EAWF,cAAA,CAAmB/4C,CAAnB;EACE,SAAOg5C,GAAgBh5C,CAAhBg5C,CAAP;;;EC9BF,YAAA;EAAA,0DAAA,QAEEr1I,OAAAA,GAAe,IAAfA;EAQF,UAVuCkQ,aAAAA,GAAAA,GAIrColI,WAAAA,SAAAA,GAAA,UAASn/B,CAAT;EACE,UAAMA,aAAiBqe,MAAvB,EACE,MAAM,IAAI57H,KAAJ,CAAU,iDAAV,CAAN,CAEFyG,KAAK82G,KAAL92G,GAAa82G,CAAb92G;KARmC6Q,GAUvC;IAVuC+lG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ECfvC,iBAAiBs/B,SAAjB,CAmBA,SAASA,SAAT,CAAmB1xI,CAAnB,EAAuBw+C,CAAvB;EAKI,OAJA,IAAIxmC,IAAU,IAAI3gB,KAAJ,CAAUs6I,UAAU/7I,MAAV+7I,GAAmB,CAA7B,CAAd,EACI5zI,IAAU,CADd,EAEIjI,IAAU,CAFd,EAGI87I,KAAU,CACd,EAAO97I,IAAQ67I,UAAU/7I,MAAzB,GACIoiB,EAAOja,GAAPia,IAAmB25H,UAAU77I,GAAV67I,CAAnB35H,CACJ,OAAO,IAAIrf,OAAJ,CAAY,UAAkBC,CAAlB,EAA2BC,CAA3B;EACfmf,MAAOja,CAAPia,IAAiB,UAAkByC,CAAlB;EACb,UAAIm3H,CAAJ,EAEI,IADAA,KAAU,CAAVA,EACIn3H,CAAJ,EACI5hB,EAAO4hB,CAAP5hB,EADJ,KAEK;EAGD,aAFA,IAAImf,IAAS,IAAI3gB,KAAJ,CAAUs6I,UAAU/7I,MAAV+7I,GAAmB,CAA7B,CAAb,EACI5zI,IAAS,CACb,EAAOA,IAASia,EAAOpiB,MAAvB,GACIoiB,EAAOja,GAAPia,IAAmB25H,UAAU5zI,CAAV4zI,CAAnB35H,CACJpf,EAAQ8B,KAAR9B,CAAc,IAAdA,EAAoBof,CAApBpf;;OAVZof,CAcA;EACIhY,QAAGtF,KAAHsF,CAASw+C,KAAO,IAAhBx+C,EAAsBgY,CAAtBhY;EACF,KAFF,CAEE,OAAOya,CAAP;EACMm3H,YACAA,KAAU,CAAVA,EACA/4I,EAAO4hB,CAAP5hB,CAFA+4I;;KAlBL,CAAP;;ECpBJ,MAAIC,IAAS74F,CAAb,CAOA64F,EAAOj8I,MAAPi8I,GAAgB,UAAgBC,CAAhB;EACZ,QAAI7kG,IAAI6kG,EAAOl8I,MAAf,CACA,KAAKq3C,CAAL,EACI,OAAO,CAAP,CAEJ,KADA,IAAI90C,IAAI,CACR,IAAS80C,IAAI,IAAI,KAA0B,QAArB6kG,EAAOtnH,MAAPsnH,CAAc7kG,CAAd6kG,CAAtB,KACM35I,EACN,OAAOpC,KAAKuQ,IAALvQ,CAA0B,IAAhB+7I,EAAOl8I,MAAjBG,IAA+B,CAA/BA,GAAmCoC,CAA1C;KAPJ05I,CAiBA,KANA,IAAIE,IAAM,IAAI16I,KAAJ,CAAU,EAAV,CAAV,EAGI26I,IAAM,IAAI36I,KAAJ,CAAU,GAAV,CAHV,EAMSd,IAAI,CAAb,EAAgBA,IAAI,EAApB,GACIy7I,EAAID,EAAIx7I,CAAJw7I,IAASx7I,IAAI,EAAJA,GAASA,IAAI,EAAbA,GAAkBA,IAAI,EAAJA,GAASA,IAAI,EAAbA,GAAkBA,IAAI,EAAJA,GAASA,IAAI,CAAbA,GAAiBA,IAAI,EAAJA,GAAS,EAA3Ey7I,IAAiFz7I,GAAjFy7I,CASJH,EAAOI,MAAPJ,GAAgB,UAAgB7wI,CAAhB,EAAwBrG,CAAxB,EAA+BoE,CAA/B;EAMZ,SALA,IAII2V,CAJJ,EAAIw9H,IAAQ,IAAZ,EACIC,MADJ,EAEI57I,IAAI,CAFR,EAGIiD,IAAI,CAER,EAAOmB,IAAQoE,CAAf,GAAoB;EAChB,UAAIvI,IAAIwK,EAAOrG,GAAPqG,CAAR,CACA,QAAQxH,CAAR,GACI,KAAK,CAAL;EACI24I,YAAM57I,GAAN47I,IAAaJ,EAAIv7I,KAAK,CAATu7I,CAAbI,EACAz9H,KAAS,IAAJle,MAAU,CADf27I,EAEA34I,IAAI,CAFJ24I,CAGA,MACJ,KAAK,CAAL;EACIA,YAAM57I,GAAN47I,IAAaJ,EAAIr9H,IAAIle,KAAK,CAAbu7I,CAAbI,EACAz9H,KAAS,KAAJle,MAAW,CADhB27I,EAEA34I,IAAI,CAFJ24I,CAGA,MACJ,KAAK,CAAL;EACIA,YAAM57I,GAAN47I,IAAaJ,EAAIr9H,IAAIle,KAAK,CAAbu7I,CAAbI,EACAA,EAAM57I,GAAN47I,IAAaJ,EAAQ,KAAJv7I,CAAJu7I,CADbI,EAEA34I,IAAI,CAFJ24I,CAZR,CAiBI57I,IAAI,IAAJA,MACC27I,MAAUA,MAAVA,GAAuB16I,KAAK4Y,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,CAAlC/hI,IAC7B7Z,IAAI,CAFJA;EAWR,YANIiD,MACA24I,EAAM57I,GAAN47I,IAAaJ,EAAIr9H,CAAJq9H,CAAbI,EACAA,EAAM57I,GAAN47I,IAAa,EADbA,EAEU,MAAN34I,CAAM,KACN24I,EAAM57I,GAAN47I,IAAa,EADP,CAHV34I,GAMA04I,KACI37I,KACA27I,EAAM16I,IAAN06I,CAAW9hI,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,EAAM94I,KAAN84I,CAAY,CAAZA,EAAe57I,CAAf47I,CAAlC/hI,CAAX8hI,CADA37I,EAEG27I,EAAM10I,IAAN00I,CAAW,EAAXA,CAHPA,IAKG9hI,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,EAAM94I,KAAN84I,CAAY,CAAZA,EAAe57I,CAAf47I,CAAlC/hI,CALP;KApCJyhI,CAsDAA,EAAOO,MAAPP,GAAgB,UAAgBC,CAAhB,EAAwB9wI,CAAxB,EAAgCjD,CAAhC;EAIZ,SAHA,IAEI2W,CAFJ,EAAI/Z,IAAQoD,CAAZ,EACIvE,IAAI,CADR,EAGSjD,IAAI,CAAb,EAAgBA,IAAIu7I,EAAOl8I,MAA3B,GAAoC;EAChC,UAAI+vB,IAAImsH,EAAOp5F,UAAPo5F,CAAkBv7I,GAAlBu7I,CAAR,CACA,IAAU,OAANnsH,CAAM,IAAMnsB,IAAI,CAApB,EACI,MACJ,SAAqBqmG,OAAhBl6E,IAAIqsH,EAAIrsH,CAAJqsH,EAAT,EACI,MAAMj9I,MAnBI,kBAmBJA,CAAN,CACJ,QAAQyE,CAAR,GACI,KAAK,CAAL;EACIkb,cAAIiR,CAAJjR,EACAlb,IAAI,CADJkb,CAEA,MACJ,KAAK,CAAL;EACI1T,YAAOjD,GAAPiD,IAAmB0T,KAAK,CAALA,IAAc,KAAJiR,MAAW,CAAxC3kB,EACA0T,IAAIiR,CADJ3kB,EAEAxH,IAAI,CAFJwH,CAGA,MACJ,KAAK,CAAL;EACIA,YAAOjD,GAAPiD,KAAwB,KAAJ0T,MAAW,KAAS,KAAJiR,MAAW,CAA/C3kB,EACA0T,IAAIiR,CADJ3kB,EAEAxH,IAAI,CAFJwH,CAGA,MACJ,KAAK,CAAL;EACIA,YAAOjD,GAAPiD,KAAwB,IAAJ0T,MAAU,IAAIiR,CAAlC3kB,EACAxH,IAAI,CADJwH,CAhBR;EAqBJ,SAAU,MAANxH,CAAJ,EACI,MAAMzE,MA1CQ,kBA0CRA,CAAN,CACJ,OAAOgJ,IAASpD,CAAhB;KAjCJk3I,EAyCAA,EAAOr8I,IAAPq8I,GAAc,UAAcC,CAAd;EACV,+EAA0Et8I,IAAnE,CAAwEs8I,CAAxE;EAAP;KA1CJD;;qBC9FiBQ,aAQjB,SAASA,YAAT;EAOI72I,OAAK82I,UAAL92I,KAAAA;EAUJ62I,cAAaryC,SAAbqyC,CAAuBE,EAAvBF,GAA4B,UAAYG,CAAZ,EAAiBxyI,CAAjB,EAAqBw+C,CAArB;EAKxB,UAJChjD,KAAK82I,UAAL92I,CAAgBg3I,CAAhBh3I,MAAyBA,KAAK82I,UAAL92I,CAAgBg3I,CAAhBh3I,MAAzBA,GAAqDhE,OAClDwI,IAAMA,GACNw+C,KAAMA,KAAOhjD,SAEVA,IAAP;GALJ62I,EAcAA,aAAaryC,SAAbqyC,CAAuBI,GAAvBJ,GAA6B,UAAaG,CAAb,EAAkBxyI,CAAlB;EACzB,WAAY6/F,MAAR2yC,CAAJ,EACIh3I,KAAK82I,UAAL92I,KAAAA,CADJ,KAGI,SAAWqkG,MAAP7/F,CAAJ,EACIxE,KAAK82I,UAAL92I,CAAgBg3I,CAAhBh3I,MAAAA,CADJ,KAII,KADA,IAAIk3I,IAAYl3I,KAAK82I,UAAL92I,CAAgBg3I,CAAhBh3I,CAAhB,EACSjF,IAAI,CAAb,EAAgBA,IAAIm8I,EAAU98I,MAA9B,GACQ88I,EAAUn8I,CAAVm8I,EAAa1yI,EAAb0yI,KAAoB1yI,CAApB0yI,GACAA,EAAU//F,MAAV+/F,CAAiBn8I,CAAjBm8I,EAAoB,CAApBA,CADAA,KAGEn8I,CAHFm8I,CAMhB,OAAOl3I,IAAP;GA7BJ62I,EAsCAA,aAAaryC,SAAbqyC,CAAuBM,IAAvBN,GAA8B,UAAcG,CAAd;EAC1B,MAAIE,IAAYl3I,KAAK82I,UAAL92I,CAAgBg3I,CAAhBh3I,CAAhB,CACA,IAAIk3I,CAAJ,EAAe;EAGX,SAFA,IAAIz3H,MAAJ,EACI1kB,IAAI,CACR,EAAOA,IAAIo7I,UAAU/7I,MAArB,GACIqlB,EAAKzjB,IAALyjB,CAAU02H,UAAUp7I,GAAVo7I,CAAV12H,EACJ,KAAK1kB,IAAI,CAAT,EAAYA,IAAIm8I,EAAU98I,MAA1B,GACI88I,EAAUn8I,CAAVm8I,EAAa1yI,EAAb0yI,CAAgBh4I,KAAhBg4I,CAAsBA,EAAUn8I,GAAVm8I,EAAel0F,GAArCk0F,EAA0Cz3H,CAA1Cy3H;EAER,UAAOl3I,IAAP;GAhDJ62I,CCxBA,cAAiBh4H,QAAQA,OAARA,CAAjB,CAqFA,SAASA,OAAT,CAAiB2+B,CAAjB;EAwNI,SArN4B,sBAAjBr/C,YAAiB,GAAa;EAErC,QAAIi5I,IAAM,IAAIj5I,YAAJ,GAAoB,EAApB,CAAV;EAAA,QACIk5I,IAAM,IAAIh5I,UAAJ,CAAe+4I,EAAI5xI,MAAnB,CADV;EAAA,QAEI8xI,IAAiB,QAAXD,EAAI,CAAJA,CAFV,CAIA,SAASE,CAAT,CAA4B38I,CAA5B,EAAiCqxF,CAAjC,EAAsC8vB,CAAtC;EACIq7B,QAAI,CAAJA,IAASx8I,CAATw8I,EACAnrD,EAAI8vB,CAAJ9vB,IAAeorD,EAAI,CAAJA,CADfD,EAEAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAFfD,EAGAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAHfD,EAIAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAJfD;EAOJ,cAASI,CAAT,CAA4B58I,CAA5B,EAAiCqxF,CAAjC,EAAsC8vB,CAAtC;EACIq7B,QAAI,CAAJA,IAASx8I,CAATw8I,EACAnrD,EAAI8vB,CAAJ9vB,IAAeorD,EAAI,CAAJA,CADfD,EAEAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAFfD,EAGAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAHfD,EAIAnrD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAJfD;EAYJ,cAASK,CAAT,CAA2BxrD,CAA3B,EAAgC8vB,CAAhC;EAKI,aAJAs7B,EAAI,CAAJA,IAASprD,EAAI8vB,CAAJ9vB,CAATorD,EACAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CADTorD,EAEAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAFTorD,EAGAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAHTorD,EAIOD,EAAI,CAAJA,CAAP;EAGJ,cAASM,CAAT,CAA2BzrD,CAA3B,EAAgC8vB,CAAhC;EAKI,aAJAs7B,EAAI,CAAJA,IAASprD,EAAI8vB,CAAJ9vB,CAATorD,EACAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CADTorD,EAEAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAFTorD,EAGAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAHTorD,EAIOD,EAAI,CAAJA,CAAP;EAjBJ55F,OAAQm6F,YAARn6F,GAAuB85F,IAAKC,CAALD,GAA0BE,CAAjDh6F,EAEAA,EAAQo6F,YAARp6F,GAAuB85F,IAAKE,CAALF,GAA0BC,CAFjD/5F,EAqBAA,EAAQq6F,WAARr6F,GAAsB85F,IAAKG,CAALH,GAAyBI,CArB/Cl6F,EAuBAA,EAAQs6F,WAARt6F,GAAsB85F,IAAKI,CAALJ,GAAyBG,CAvB/Cj6F;EAvBqC,GAAA,EAAb,GAiDjB;EAEP,aAASu6F,CAAT,CAA4BC,CAA5B,EAAuCp9I,CAAvC,EAA4CqxF,CAA5C,EAAiD8vB,CAAjD;EACI,UAAIhxG,IAAOnQ,IAAM,CAANA,GAAU,CAAVA,GAAc,CAAzB,CAGA,IAFImQ,MACAnQ,KAAOA,CADPmQ,GAEQ,MAARnQ,CAAJ,EACIo9I,EAAU,IAAIp9I,CAAJ,GAAU,CAAV,GAA6B,CAA7B,GAAkD,UAA5Do9I,EAAwE/rD,CAAxE+rD,EAA6Ej8B,CAA7Ei8B,EADJ,KAEK,IAAIx5I,MAAM5D,CAAN4D,CAAJ,EACDw5I,EAAU,UAAVA,EAAsB/rD,CAAtB+rD,EAA2Bj8B,CAA3Bi8B,EADC,KAEA,IAAIp9I,IAAM,qBAAV,EACDo9I,GAAWjtI,KAAQ,EAARA,GAAa,gBAAgB,CAAxCitI,EAA2C/rD,CAA3C+rD,EAAgDj8B,CAAhDi8B,EADC,KAEA,IAAIp9I,IAAM,sBAAV,EACDo9I,GAAWjtI,KAAQ,EAARA,GAAaxQ,KAAKuE,KAALvE,CAAWK,IAAM,qBAAjBL,OAA6C,CAArEy9I,EAAwE/rD,CAAxE+rD,EAA6Ej8B,CAA7Ei8B,EADC,KAEA;EACD,YAAI3xD,IAAW9rF,KAAKkC,KAALlC,CAAWA,KAAK4G,GAAL5G,CAASK,CAATL,IAAgBA,KAAK09I,GAAhC19I,CAAf,CAEAy9I,GAAWjtI,KAAQ,EAARA,GAAas7E,IAAW,GAAXA,IAAkB,EAA/Bt7E,GADyD,UAArDxQ,KAAKuE,KAALvE,CAAWK,IAAML,KAAKmO,GAALnO,CAAS,CAATA,GAAa8rF,CAAb9rF,CAANK,GAA+B,OAA1CL,OAC8C,CAA7Dy9I,EAAgE/rD,CAAhE+rD,EAAqEj8B,CAArEi8B;;EAOR,cAASE,CAAT,CAA2BC,CAA3B,EAAqClsD,CAArC,EAA0C8vB,CAA1C;EACI,UAAIq8B,IAAOD,EAASlsD,CAATksD,EAAcp8B,CAAdo8B,CAAX;EAAA,UACIptI,IAAsB,KAAdqtI,KAAQ,EAAM,IAAI,CAD9B;EAAA,UAEI/xD,IAAW+xD,MAAS,EAATA,GAAc,GAF7B;EAAA,UAGIC,IAAkB,UAAPD,CAHf,CAIA,OAAoB,QAAb/xD,CAAa,GACdgyD,IACApnH,GADAonH,GAEAttI,KAAOzO,IAAAA,CAAPyO,CAHc,GAID,MAAbs7E,CAAa,GACN,wBAAPt7E,CAAO,GAAwBstI,CADlB,GAEbttI,IAAOxQ,KAAKmO,GAALnO,CAAS,CAATA,EAAY8rF,IAAW,GAAvB9rF,CAAPwQ,IAAsCstI,IAAW,OAAjDttI,CANN;EARJyyC,OAAQm6F,YAARn6F,GAAuBu6F,EAAmBO,IAAnBP,CAAwB,IAAxBA,EAA8BQ,WAA9BR,CAAvBv6F,EACAA,EAAQo6F,YAARp6F,GAAuBu6F,EAAmBO,IAAnBP,CAAwB,IAAxBA,EAA8BS,WAA9BT,CADvBv6F,EAiBAA,EAAQq6F,WAARr6F,GAAsB06F,EAAkBI,IAAlBJ,CAAuB,IAAvBA,EAA6BO,UAA7BP,CAjBtB16F,EAkBAA,EAAQs6F,WAARt6F,GAAsB06F,EAAkBI,IAAlBJ,CAAuB,IAAvBA,EAA6BQ,UAA7BR,CAlBtB16F;EArBO,GAAA,EAjDiB,EA6FA,sBAAjBm7F,YAAiB,GAAa;EAErC,QAAIC,IAAM,IAAID,YAAJ,GAAmB,EAAnB,CAAV;EAAA,QACItB,IAAM,IAAIh5I,UAAJ,CAAeu6I,EAAIpzI,MAAnB,CADV;EAAA,QAEI8xI,IAAiB,QAAXD,EAAI,CAAJA,CAFV,CAIA,SAASwB,CAAT,CAA6Bj+I,CAA7B,EAAkCqxF,CAAlC,EAAuC8vB,CAAvC;EACI68B,QAAI,CAAJA,IAASh+I,CAATg+I,EACA3sD,EAAI8vB,CAAJ9vB,IAAeorD,EAAI,CAAJA,CADfuB,EAEA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAFfuB,EAGA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAHfuB,EAIA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAJfuB,EAKA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CALfuB,EAMA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CANfuB,EAOA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAPfuB,EAQA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CARfuB;EAWJ,cAASE,CAAT,CAA6Bl+I,CAA7B,EAAkCqxF,CAAlC,EAAuC8vB,CAAvC;EACI68B,QAAI,CAAJA,IAASh+I,CAATg+I,EACA3sD,EAAI8vB,CAAJ9vB,IAAeorD,EAAI,CAAJA,CADfuB,EAEA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAFfuB,EAGA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAHfuB,EAIA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAJfuB,EAKA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CALfuB,EAMA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CANfuB,EAOA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CAPfuB,EAQA3sD,EAAI8vB,IAAM,CAAV9vB,IAAeorD,EAAI,CAAJA,CARfuB;EAgBJ,cAASG,CAAT,CAA4B9sD,CAA5B,EAAiC8vB,CAAjC;EASI,aARAs7B,EAAI,CAAJA,IAASprD,EAAI8vB,CAAJ9vB,CAATorD,EACAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CADTorD,EAEAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAFTorD,EAGAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAHTorD,EAIAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAJTorD,EAKAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CALTorD,EAMAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CANTorD,EAOAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAPTorD,EAQOuB,EAAI,CAAJA,CAAP;EAGJ,cAASI,CAAT,CAA4B/sD,CAA5B,EAAiC8vB,CAAjC;EASI,aARAs7B,EAAI,CAAJA,IAASprD,EAAI8vB,CAAJ9vB,CAATorD,EACAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CADTorD,EAEAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAFTorD,EAGAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAHTorD,EAIAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAJTorD,EAKAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CALTorD,EAMAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CANTorD,EAOAA,EAAI,CAAJA,IAASprD,EAAI8vB,IAAM,CAAV9vB,CAPTorD,EAQOuB,EAAI,CAAJA,CAAP;EAzBJp7F,OAAQy7F,aAARz7F,GAAwB85F,IAAKuB,CAALvB,GAA2BwB,CAAnDt7F,EAEAA,EAAQ07F,aAAR17F,GAAwB85F,IAAKwB,CAALxB,GAA2BuB,CAFnDr7F,EA6BAA,EAAQ27F,YAAR37F,GAAuB85F,IAAKyB,CAALzB,GAA0B0B,CA7BjDx7F,EA+BAA,EAAQ47F,YAAR57F,GAAuB85F,IAAK0B,CAAL1B,GAA0ByB,CA/BjDv7F;EA/BqC,GAAA,EAAb,GAiEjB;EAEP,aAAS67F,CAAT,CAA6BrB,CAA7B,EAAwCsB,CAAxC,EAA8CC,CAA9C,EAAoD3+I,CAApD,EAAyDqxF,CAAzD,EAA8D8vB,CAA9D;EACI,UAAIhxG,IAAOnQ,IAAM,CAANA,GAAU,CAAVA,GAAc,CAAzB,CAGA,IAFImQ,MACAnQ,KAAOA,CADPmQ,GAEQ,MAARnQ,CAAJ,EACIo9I,EAAU,CAAVA,EAAa/rD,CAAb+rD,EAAkBj8B,IAAMu9B,CAAxBtB,GACAA,EAAU,IAAIp9I,CAAJ,GAAU,CAAV,GAA6B,CAA7B,GAAkD,UAA5Do9I,EAAwE/rD,CAAxE+rD,EAA6Ej8B,IAAMw9B,CAAnFvB,CADAA,CADJ,KAGO,IAAIx5I,MAAM5D,CAAN4D,CAAJ,EACHw5I,EAAU,CAAVA,EAAa/rD,CAAb+rD,EAAkBj8B,IAAMu9B,CAAxBtB,GACAA,EAAU,UAAVA,EAAsB/rD,CAAtB+rD,EAA2Bj8B,IAAMw9B,CAAjCvB,CADAA,CADG,KAGA,IAAIp9I,IAAM,sBAAV,EACHo9I,EAAU,CAAVA,EAAa/rD,CAAb+rD,EAAkBj8B,IAAMu9B,CAAxBtB,GACAA,GAAWjtI,KAAQ,EAARA,GAAa,gBAAgB,CAAxCitI,EAA2C/rD,CAA3C+rD,EAAgDj8B,IAAMw9B,CAAtDvB,CADAA,CADG,KAGA;EACH,YAAIK,CAAJ,CACA,IAAIz9I,IAAM,uBAAV,EAEIo9I,GADAK,IAAWz9I,IAAM,YACM,CAAvBo9I,EAA0B/rD,CAA1B+rD,EAA+Bj8B,IAAMu9B,CAArCtB,GACAA,GAAWjtI,KAAQ,EAARA,GAAastI,IAAW,gBAAgB,CAAnDL,EAAsD/rD,CAAtD+rD,EAA2Dj8B,IAAMw9B,CAAjEvB,CADAA,CAFJ,KAIO;EACH,cAAI3xD,IAAW9rF,KAAKkC,KAALlC,CAAWA,KAAK4G,GAAL5G,CAASK,CAATL,IAAgBA,KAAK09I,GAAhC19I,CAAf,CACiB,SAAb8rF,CAAa,KACbA,IAAW,IADE,GAGjB2xD,EAAqB,oBADrBK,IAAWz9I,IAAML,KAAKmO,GAALnO,CAAS,CAATA,GAAa8rF,CAAb9rF,CACI,MAAqB,CAA1Cy9I,EAA6C/rD,CAA7C+rD,EAAkDj8B,IAAMu9B,CAAxDtB,CAHiB,EAIjBA,GAAWjtI,KAAQ,EAARA,GAAas7E,IAAW,IAAXA,IAAmB,EAAhCt7E,GAAgD,UAAXstI,CAAW,GAAU,aAAa,CAAlFL,EAAqF/rD,CAArF+rD,EAA0Fj8B,IAAMw9B,CAAhGvB,CAJiB;;;EAY7B,cAASwB,CAAT,CAA4BrB,CAA5B,EAAsCmB,CAAtC,EAA4CC,CAA5C,EAAkDttD,CAAlD,EAAuD8vB,CAAvD;EACI,UAAI09B,IAAKtB,EAASlsD,CAATksD,EAAcp8B,IAAMu9B,CAApBnB,CAAT;EAAA,UACIuB,IAAKvB,EAASlsD,CAATksD,EAAcp8B,IAAMw9B,CAApBpB,CADT;EAAA,UAEIptI,IAAoB,KAAZ2uI,KAAM,EAAM,IAAI,CAF5B;EAAA,UAGIrzD,IAAWqzD,MAAO,EAAPA,GAAY,IAH3B;EAAA,UAIIrB,IAAW,cAAmB,UAALqB,CAAd,IAA8BD,CAJ7C,CAKA,OAAoB,SAAbpzD,CAAa,GACdgyD,IACApnH,GADAonH,GAEAttI,KAAOzO,IAAAA,CAAPyO,CAHc,GAID,MAAbs7E,CAAa,GACN,SAAPt7E,CAAO,GAASstI,CADH,GAEbttI,IAAOxQ,KAAKmO,GAALnO,CAAS,CAATA,EAAY8rF,IAAW,IAAvB9rF,CAAPwQ,IAAuCstI,IAAW,gBAAlDttI,CANN;EATJyyC,OAAQy7F,aAARz7F,GAAwB67F,EAAoBf,IAApBe,CAAyB,IAAzBA,EAA+Bd,WAA/Bc,EAA4C,CAA5CA,EAA+C,CAA/CA,CAAxB77F,EACAA,EAAQ07F,aAAR17F,GAAwB67F,EAAoBf,IAApBe,CAAyB,IAAzBA,EAA+Bb,WAA/Ba,EAA4C,CAA5CA,EAA+C,CAA/CA,CADxB77F,EAkBAA,EAAQ27F,YAAR37F,GAAuBg8F,EAAmBlB,IAAnBkB,CAAwB,IAAxBA,EAA8Bf,UAA9Be,EAA0C,CAA1CA,EAA6C,CAA7CA,CAlBvBh8F,EAmBAA,EAAQ47F,YAAR57F,GAAuBg8F,EAAmBlB,IAAnBkB,CAAwB,IAAxBA,EAA8Bd,UAA9Bc,EAA0C,CAA1CA,EAA6C,CAA7CA,CAnBvBh8F;EAhCO,GAAA,EA9JiB,EAqNrBA,CAAP;EAKJ,UAAS+6F,WAAT,CAAqB39I,CAArB,EAA0BqxF,CAA1B,EAA+B8vB,CAA/B;EACI9vB,IAAI8vB,CAAJ9vB,IAA6B,MAAbrxF,CAAhBqxF,EACAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,CAARA,GAAa,GAD7BqxF,EAEAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,EAARA,GAAa,GAF7BqxF,EAGAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,EAHxBqxF;EAMJ,UAASusD,WAAT,CAAqB59I,CAArB,EAA0BqxF,CAA1B,EAA+B8vB,CAA/B;EACI9vB,IAAI8vB,CAAJ9vB,IAAgBrxF,MAAQ,EAAxBqxF,EACAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,EAARA,GAAa,GAD7BqxF,EAEAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,CAARA,GAAa,GAF7BqxF,EAGAA,EAAI8vB,IAAM,CAAV9vB,IAA6B,MAAbrxF,CAHhBqxF;EAMJ,UAASwsD,UAAT,CAAoBxsD,CAApB,EAAyB8vB,CAAzB;EACI,UAAQ9vB,EAAI8vB,CAAJ9vB,IACAA,EAAI8vB,IAAM,CAAV9vB,KAAgB,CADhBA,GAEAA,EAAI8vB,IAAM,CAAV9vB,KAAgB,EAFhBA,GAGAA,EAAI8vB,IAAM,CAAV9vB,KAAgB,QAAQ,CAHhC;EAMJ,UAASysD,UAAT,CAAoBzsD,CAApB,EAAyB8vB,CAAzB;EACI,UAAQ9vB,EAAI8vB,CAAJ9vB,KAAgB,EAAhBA,GACAA,EAAI8vB,IAAM,CAAV9vB,KAAgB,EADhBA,GAEAA,EAAI8vB,IAAM,CAAV9vB,KAAgB,CAFhBA,GAGAA,EAAI8vB,IAAM,CAAV9vB,OAAkB,CAH1B;ECzUJ,iBAAiB0tD,OAAjB,CAQA,SAASA,OAAT,CAAiBC,UAAjB;EACI;EACI,QAAIvwI,SAAMwwI,KAAK,QAAQp9H,OAAR,CAAgB,GAAhB,EAAoB,IAApB,CAALo9H,EAAgCD,UAAhCC,CAAV,CACA,IAAIxwI,WAAQA,OAAIjP,MAAJiP,IAAclF,OAAO0O,IAAP1O,CAAYkF,MAAZlF,EAAiB/J,MAAvCiP,CAAJ,EACI,OAAOA,MAAP;EACN,GAJF,CAIE,OAAO6Q,CAAP,WACK,IAAP;;ECRJ,MAAI4/H,IAAOt8F,CAAX,CAOAs8F,EAAK1/I,MAAL0/I,GAAc,UAAqBxD,CAArB;EAGV,SAFA,IAAIxsH,IAAM,CAAV,EACIK,IAAI,CADR,EAESpvB,IAAI,CAAb,EAAgBA,IAAIu7I,EAAOl8I,MAA3B,IAAqCW,CAArC,GACIovB,IAAImsH,EAAOp5F,UAAPo5F,CAAkBv7I,CAAlBu7I,KACI,MACJxsH,KAAO,IACFK,IAAI,IAAJA,GACLL,KAAO,CADFK,GAEiB,UAAZ,QAAJA,CAAgB,KAAkD,UAAZ,QAA3BmsH,EAAOp5F,UAAPo5F,CAAkBv7I,IAAI,CAAtBu7I,CAAuC,CAAlD,MACpBv7I,GACF+uB,KAAO,CAFe,IAItBA,KAAO,EAEf,OAAOA,CAAP;KAfJgwH,EAyBAA,EAAKr0I,IAALq0I,GAAY,UAAmBt0I,CAAnB,EAA2BrG,CAA3B,EAAkCoE,CAAlC;EAER,QADUA,IAAMpE,CAANoE,GACA,CAAV,EACI,OAAO,EAAP,CAKJ,KAJA,IAGI2V,CAHJ,EAAIw9H,IAAQ,IAAZ,EACIC,MADJ,EAEI57I,IAAI,CAER,EAAOoE,IAAQoE,CAAf,IACI2V,IAAI1T,EAAOrG,GAAPqG,KACI,MACJmxI,EAAM57I,GAAN47I,IAAaz9H,IACRA,IAAI,GAAJA,IAAWA,IAAI,GAAfA,GACLy9H,EAAM57I,GAAN47I,KAAkB,KAAJz9H,MAAW,IAAsB,KAAlB1T,EAAOrG,GAAPqG,CADxB0T,GAEAA,IAAI,GAAJA,IAAWA,IAAI,GAAfA,IACLA,MAAU,IAAJA,MAAU,MAAwB,KAAlB1T,EAAOrG,GAAPqG,MAAyB,MAAwB,KAAlBA,EAAOrG,GAAPqG,MAAyB,IAAsB,KAAlBA,EAAOrG,GAAPqG,KAAwB,KAA1G0T,EACAy9H,EAAM57I,GAAN47I,IAAa,SAAUz9H,KAAK,EAAf,CADbA,EAEAy9H,EAAM57I,GAAN47I,IAAa,SAAc,OAAJz9H,CAAV,CAHRA,IAKLy9H,EAAM57I,GAAN47I,KAAkB,KAAJz9H,MAAW,MAAwB,KAAlB1T,EAAOrG,GAAPqG,MAAyB,IAAsB,KAAlBA,EAAOrG,GAAPqG,GAC5DzK,IAAI,IAAJA,MACC27I,MAAUA,MAAVA,GAAuB16I,KAAK4Y,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,CAAlC/hI,IAC7B7Z,IAAI,CAFJA,EAKR,OAAI27I,KACI37I,KACA27I,EAAM16I,IAAN06I,CAAW9hI,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,EAAM94I,KAAN84I,CAAY,CAAZA,EAAe57I,CAAf47I,CAAlC/hI,CAAX8hI,CADA37I,EAEG27I,EAAM10I,IAAN00I,CAAW,EAAXA,CAHPA,IAKG9hI,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0BA,MAA1BA,EAAkC+hI,EAAM94I,KAAN84I,CAAY,CAAZA,EAAe57I,CAAf47I,CAAlC/hI,CALP;KAlDJklI,EAiEAA,EAAK/0I,KAAL+0I,GAAa,UAAoBxD,CAApB,EAA4B9wI,CAA5B,EAAoCjD,CAApC;EAIT,SAHA,IACI6nB,CADJ,EAEIC,CAFJ,EAAIlrB,IAAQoD,CAAZ,EAGSxH,IAAI,CAAb,EAAgBA,IAAIu7I,EAAOl8I,MAA3B,IAAqCW,CAArC,GACIqvB,IAAKksH,EAAOp5F,UAAPo5F,CAAkBv7I,CAAlBu7I,KACI,MACL9wI,EAAOjD,GAAPiD,IAAmB4kB,IACZA,IAAK,IAALA,IACP5kB,EAAOjD,GAAPiD,IAAmB4kB,KAAM,CAANA,GAAgB,GAAnC5kB,EACAA,EAAOjD,GAAPiD,IAA8B,KAAX4kB,CAAW,GAAK,GAF5BA,IAGkB,UAAZ,QAALA,CAAiB,KAAyD,UAAZ,SAAjCC,IAAKisH,EAAOp5F,UAAPo5F,CAAkBv7I,IAAI,CAAtBu7I,CAA4B,CAAY,CAAzD,IACzBlsH,IAAK,UAAiB,OAALA,MAAgB,EAA5B,KAAwC,OAALC,CAAnC,CAALD,IACErvB,CADFqvB,EAEA5kB,EAAOjD,GAAPiD,IAAmB4kB,KAAM,EAANA,GAAgB,GAFnCA,EAGA5kB,EAAOjD,GAAPiD,IAAmB4kB,KAAM,EAANA,GAAW,EAAXA,GAAgB,GAHnCA,EAIA5kB,EAAOjD,GAAPiD,IAAmB4kB,KAAM,CAANA,GAAW,EAAXA,GAAgB,GAJnCA,EAKA5kB,EAAOjD,GAAPiD,IAA8B,KAAX4kB,CAAW,GAAK,GANV,KAQzB5kB,EAAOjD,GAAPiD,IAAmB4kB,KAAM,EAANA,GAAgB,GAAnC5kB,EACAA,EAAOjD,GAAPiD,IAAmB4kB,KAAM,CAANA,GAAW,EAAXA,GAAgB,GADnC5kB,EAEAA,EAAOjD,GAAPiD,IAA8B,KAAX4kB,CAAW,GAAK,GAVV,EAajC,OAAO7nB,IAASpD,CAAhB;KAzFJ26I;;eCbiBhrI,OA6BjB,SAASA,MAAT,CAAcirI,CAAd,EAAqBl8I,CAArB,EAA4B3B,CAA5B;EACI,MAAI89I,IAAS99I,KAAQ,IAArB;EAAA,MACIo0B,IAAS0pH,MAAS,CADtB;EAAA,MAEIC,IAAS,IAFb;EAAA,MAGI13I,IAASy3I,CAHb,CAIA,OAAO,UAAoB99I,CAApB;EACH,QAAIA,IAAO,CAAPA,IAAYA,IAAOo0B,CAAvB,EACI,OAAOypH,EAAM79I,CAAN69I,CAAP,CACAx3I,IAASrG,CAATqG,GAAgBy3I,CAAhBz3I,KACA03I,IAAOF,EAAMC,CAAND,CAAPE,EACA13I,IAAS,CAFTA,EAIJ,IAAI0pF,IAAMpuF,EAAMoB,IAANpB,CAAWo8I,CAAXp8I,EAAiB0E,CAAjB1E,EAAyB0E,KAAUrG,CAAnC2B,CAAV,CAGA,OAFa,IAAT0E,CAAS,KACTA,IAAwB,KAAL,IAATA,CAAc,CADf,GAEN0pF,CAAP;KAVJ;EClCJ,gBAAiBiuD,QAAjB,CAYA,SAASA,QAAT,CAAkBT,CAAlB,EAAsBC,CAAtB;EASI15I,OAAKy5I,EAALz5I,GAAUy5I,MAAO,CAAjBz5I,EAMAA,KAAK05I,EAAL15I,GAAU05I,MAAO,CANjB15I;EAcJ,KAAI4pB,OAAOswH,SAAStwH,IAATswH,GAAgB,IAAIA,QAAJ,CAAa,CAAb,EAAgB,CAAhB,CAA3B,CAEAtwH,KAAKuwH,QAALvwH,GAAgB;EAAa,SAAO,CAAP;GAA7BA,EACAA,KAAKwwH,QAALxwH,GAAgBA,KAAKywH,QAALzwH,GAAgB;EAAa,SAAO5pB,IAAP;GAD7C4pB,EAEAA,KAAKxvB,MAALwvB,GAAc;EAAa,SAAO,CAAP;GAF3BA,CASA,IAAI0wH,WAAWJ,SAASI,QAATJ,GAAoB,kBAAnC,CAOAA,SAASK,UAATL,GAAsB,UAAoBn2I,CAApB;EAClB,MAAc,MAAVA,CAAJ,EACI,OAAO6lB,IAAP,CACJ,IAAI7e,IAAOhH,IAAQ,CAAnB,CACIgH,MACAhH,KAASA,CADTgH,EAEJ,IAAI0uI,IAAK11I,MAAU,CAAnB;EAAA,MACI21I,KAAM31I,IAAQ01I,KAAM,eAAe,CADvC,CAWA,OATI1uI,MACA2uI,KAAMA,MAAO,CAAbA,EACAD,KAAMA,MAAO,CADbC,IAEMD,IAAK,eACPA,IAAK,CAALA,IACMC,IAAK,eACPA,IAAK,GANb3uI,GASG,IAAImvI,QAAJ,CAAaT,CAAb,EAAiBC,CAAjB,CAAP;GAjBJQ,EAyBAA,SAASj3I,IAATi3I,GAAgB,UAAcn2I,CAAd;EACZ,MAAqB,mBAAVA,CAAX,EACI,OAAOm2I,SAASK,UAATL,CAAoBn2I,CAApBm2I,CAAP,CACJ,IAAIpuC,QAAK0uC,QAAL1uC,CAAc/nG,CAAd+nG,CAAJ,EAA0B;EAEtB,SAAIA,QAAK2uC,IAAT,EAGI,OAAOP,SAASK,UAATL,CAAoBvwC,SAAS5lG,CAAT4lG,EAAgB,EAAhBA,CAApBuwC,CAAP,CAFAn2I,IAAQ+nG,QAAK2uC,IAAL3uC,CAAU4uC,UAAV5uC,CAAqB/nG,CAArB+nG,CAAR/nG;EAIR,UAAOA,EAAM26F,GAAN36F,IAAaA,EAAM46F,IAAnB56F,GAA0B,IAAIm2I,QAAJ,CAAan2I,EAAM26F,GAAN36F,KAAc,CAA3B,EAA8BA,EAAM46F,IAAN56F,KAAe,CAA7C,CAA1BA,GAA4E6lB,IAAnF;GAnCJswH,EA2CAA,SAAS11C,SAAT01C,CAAmBC,QAAnBD,GAA8B,UAAkBS,CAAlB;EAC1B,OAAKA,KAAY36I,KAAK05I,EAAL15I,KAAY,EAA7B,EAAiC;EAC7B,QAAIy5I,IAAgB,KAAVz5I,KAAKy5I,EAAK,KAAM,CAA1B;EAAA,QACIC,KAAM15I,KAAK05I,OAAW,CAD1B,CAIA,OAFKD,MACDC,IAAKA,IAAK,CAALA,KAAW,CADfD,KAEIA,IAAU,aAALC,EAAd;EAEJ,UAAO15I,KAAKy5I,EAALz5I,GAAoB,aAAVA,KAAK05I,EAAtB;GAnDJQ,EA2DAA,SAAS11C,SAAT01C,CAAmBU,MAAnBV,GAA4B,UAAgBS,CAAhB;EACxB,SAAO7uC,QAAK2uC,IAAL3uC,GACD,IAAIA,QAAK2uC,IAAT,CAAwB,IAAVz6I,KAAKy5I,EAAnB,EAAqC,IAAVz5I,KAAK05I,EAAhC,EAAwCmB,QAAQF,CAARE,CAAxC,CADC/uC,KAGCpN,KAAe,IAAV1+F,KAAKy5I,IAAQ96C,MAAgB,IAAV3+F,KAAK05I,IAAQiB,UAAUE,QAAQF,CAARE,GAHvD;GA5DJX,CAkEA,IAAIh9F,aAAatoC,OAAO4vF,SAAP5vF,CAAiBsoC,UAAlC,CAOAg9F,SAASY,QAATZ,GAAoB,UAAkBa,CAAlB;EAChB,SAAIA,MAAST,QAATS,GACOnxH,IADPmxH,GAEG,IAAIb,QAAJ,EACDh9F,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,IACAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,CAD5BA,GAEAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,EAF5BA,GAGAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,QAAQ,CAJnC,GAMDA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,IACAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,CAD5BA,GAEAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,EAF5BA,GAGAA,WAAWj+C,IAAXi+C,CAAgB69F,CAAhB79F,EAAsB,CAAtBA,KAA4B,QAAQ,CATnC,CAFP;GADJg9F,EAoBAA,SAAS11C,SAAT01C,CAAmBc,MAAnBd,GAA4B;EACxB,SAAOtlI,OAAO2rC,YAAP3rC,CACc,MAAjB5U,KAAKy5I,EADF7kI,EAEH5U,KAAKy5I,EAALz5I,KAAY,CAAZA,GAAiB,GAFd4U,EAGH5U,KAAKy5I,EAALz5I,KAAY,EAAZA,GAAiB,GAHd4U,EAIH5U,KAAKy5I,EAALz5I,KAAY,EAJT4U,EAKc,MAAjB5U,KAAK05I,EALF9kI,EAMH5U,KAAK05I,EAAL15I,KAAY,CAAZA,GAAiB,GANd4U,EAOH5U,KAAK05I,EAAL15I,KAAY,EAAZA,GAAiB,GAPd4U,EAQH5U,KAAK05I,EAAL15I,KAAY,EART4U,CAAP;GArBJslI,EAqCAA,SAAS11C,SAAT01C,CAAmBE,QAAnBF,GAA8B;EAC1B,MAAIr7F,IAAS7+C,KAAK05I,EAAL15I,IAAW,EAAxB,CAGA,OAFAA,KAAK05I,EAAL15I,KAAaA,KAAK05I,EAAL15I,IAAW,CAAXA,GAAeA,KAAKy5I,EAALz5I,KAAY,MAAM6+C,OAAU,CAAxD7+C,EACAA,KAAKy5I,EAALz5I,IAAaA,KAAKy5I,EAALz5I,IAAW,CAAXA,GAAiC6+C,OAAU,CADxD7+C,EAEOA,IAAP;GAzCJk6I,EAgDAA,SAAS11C,SAAT01C,CAAmBG,QAAnBH,GAA8B;EAC1B,MAAIr7F,MAAmB,IAAV7+C,KAAKy5I,GAAlB,CAGA,OAFAz5I,KAAKy5I,EAALz5I,KAAaA,KAAKy5I,EAALz5I,KAAY,CAAZA,GAAgBA,KAAK05I,EAAL15I,IAAW,MAAM6+C,OAAU,CAAxD7+C,EACAA,KAAK05I,EAAL15I,IAAaA,KAAK05I,EAAL15I,KAAY,CAAZA,GAAiC6+C,OAAU,CADxD7+C,EAEOA,IAAP;GApDJk6I,EA2DAA,SAAS11C,SAAT01C,CAAmB9/I,MAAnB8/I,GAA4B;EACxB,MAAIe,IAASj7I,KAAKy5I,EAAlB;EAAA,MACIyB,KAASl7I,KAAKy5I,EAALz5I,KAAY,EAAZA,GAAiBA,KAAK05I,EAAL15I,IAAW,OAAO,CADhD;EAAA,MAEIm7I,IAASn7I,KAAK05I,EAAL15I,KAAY,EAFzB,CAGA,OAAiB,MAAVm7I,CAAU,GACA,MAAVD,CAAU,GACRD,IAAQ,KAARA,GACEA,IAAQ,GAARA,GAAc,CAAdA,GAAkB,CADpBA,GAEEA,IAAQ,OAARA,GAAkB,CAAlBA,GAAsB,CAHhB,GAIRC,IAAQ,KAARA,GACEA,IAAQ,GAARA,GAAc,CAAdA,GAAkB,CADpBA,GAEEA,IAAQ,OAARA,GAAkB,CAAlBA,GAAsB,CAPhB,GAQVC,IAAQ,GAARA,GAAc,CAAdA,GAAkB,EARzB;GA/DJjB;EC/HA,MACIpuC,IAAOtuD,CADX,CA6NA,SAAS49F,CAAT,CAAe//G,CAAf,EAAoBC,CAApB,EAAyB+/G,CAAzB;EACI,SAAK,IAAIxoI,IAAO1O,OAAO0O,IAAP1O,CAAYm3B,CAAZn3B,CAAX,EAA6BpJ,IAAI,CAAtC,EAAyCA,IAAI8X,EAAKzY,MAAlD,IAA4DW,CAA5D,OACyBspG,MAAjBhpE,EAAIxoB,EAAK9X,CAAL8X,CAAJwoB,KAA+BggH,MAC/BhgH,EAAIxoB,EAAK9X,CAAL8X,CAAJwoB,IAAeC,EAAIzoB,EAAK9X,CAAL8X,CAAJyoB,GACvB,OAAOD,CAAP;EAoBJ,YAASigH,CAAT,CAAkB/8I,CAAlB;EAEI,aAASg9I,CAAT,CAAqBr8H,CAArB,EAA8Bs8H,CAA9B;EAEI,YAAMx7I,gBAAgBu7I,EAAtB,EACI,OAAO,IAAIA,CAAJ,CAAgBr8H,CAAhB,EAAyBs8H,CAAzB,CAAP,CAKJr3I,OAAOiM,cAAPjM,CAAsBnE,IAAtBmE,EAA4B,SAA5BA,IAAyCwS,KAAK;EAAa,iBAAOuI,CAAP;aAA3D/a,GAGI5K,MAAMkiJ,iBAANliJ,GACAA,MAAMkiJ,iBAANliJ,CAAwByG,IAAxBzG,EAA8BgiJ,CAA9BhiJ,CADAA,GAGA4K,OAAOiM,cAAPjM,CAAsBnE,IAAtBmE,EAA4B,OAA5BA,IAAuCJ,OAAO,IAAKxK,KAAL,GAAc+N,SAAS,IAArEnD,CANJA,EAQIq3I,KACAJ,EAAMp7I,IAANo7I,EAAYI,CAAZJ,CATJj3I;EAoBJ,aARCo3I,EAAY/2C,SAAZ+2C,GAAwBp3I,OAAOu3I,MAAPv3I,CAAc5K,MAAMirG,SAApBrgG,GAAgCnF,cAAcu8I,GAEvEp3I,OAAOiM,cAAPjM,CAAsBo3I,EAAY/2C,SAAlCrgG,EAA6C,MAA7CA,IAAuDwS,KAAK;EAAa,eAAOpY,CAAP;WAAzE4F,GAEAo3I,EAAY/2C,SAAZ+2C,CAAsBt6I,QAAtBs6I,GAAiC;EAC7B,aAAOv7I,KAAKzB,IAALyB,GAAY,IAAZA,GAAmBA,KAAKkf,OAA/B;SAGGq8H,CAAP;EA/QJzvC,KAAKoqC,SAALpqC,GAAiB6vC,SAAjB7vC,EAGAA,EAAKuqC,MAALvqC,GAAc8vC,QAHd9vC,EAMAA,EAAK+qC,YAAL/qC,GAAoB+vC,YANpB/vC,EASAA,EAAKgwC,KAALhwC,GAAaiwC,OATbjwC,EAYAA,EAAK6tC,OAAL7tC,GAAekwC,SAZflwC,EAeAA,EAAKguC,IAALhuC,GAAYmwC,MAfZnwC,EAkBAA,EAAKh9F,IAALg9F,GAAYowC,MAlBZpwC,EAqBAA,EAAKouC,QAALpuC,GAAgBqwC,QArBhBrwC,EA6BAA,EAAKswC,UAALtwC,GAAkB3nG,OAAOk4I,MAAPl4I,GAAgBA,OAAOk4I,MAAPl4I,GAAAA,CAAhBA,KA7BlB2nG,EAoCAA,EAAKwwC,WAALxwC,GAAmB3nG,OAAOk4I,MAAPl4I,GAAgBA,OAAOk4I,MAAPl4I,GAAAA,CAAhBA,KApCnB2nG,EA4CAA,EAAKywC,MAALzwC,GAAc+uC,QAAQv+F,iBAAO38C,OAAP28C,IAAkBA,iBAAO38C,OAAP28C,CAAet+B,QAAjCs+B,IAA6CA,iBAAO38C,OAAP28C,CAAet+B,QAAfs+B,CAAwB9qC,IAA7EqpI,CA5Cd/uC,EAoDAA,EAAKtP,SAALsP,GAAiB1wG,OAAOohG,SAAPphG,IAA+C,UAAmB2I,CAAnB;EAC5D,WAAwB,mBAAVA,CAAU,IAAYy4I,SAASz4I,CAATy4I,CAAZ,IAA+BjiJ,KAAKkC,KAALlC,CAAWwJ,CAAXxJ,MAAsBwJ,CAA7E;KArDJ+nG,EA6DAA,EAAK0uC,QAAL1uC,GAAgB,UAAkB/nG,CAAlB;EACZ,WAAwB,mBAAVA,CAAU,IAAYA,aAAiB6Q,MAArD;KA9DJk3F,EAsEAA,EAAK2wC,QAAL3wC,GAAgB,UAAkB/nG,CAAlB;EACZ,WAAOA,KAA0B,mBAAVA,CAAvB;KAvEJ+nG,EAkFAA,EAAK4wC,KAAL5wC,GAQAA,EAAK6wC,KAAL7wC,GAAa,UAAen4F,CAAf,EAAoBwrC,CAApB;EACT,QAAIp7C,IAAQ4P,EAAIwrC,CAAJxrC,CAAZ,CACA,SAAa,QAAT5P,CAAS,KAAQ4P,EAAI2yF,cAAJ3yF,CAAmBwrC,CAAnBxrC,OACO,mBAAV5P,CAAU,KAAalI,MAAMC,OAAND,CAAckI,CAAdlI,IAAuBkI,EAAM3J,MAA7ByB,GAAsCsI,OAAO0O,IAAP1O,CAAYJ,CAAZI,EAAmB/J,UAAU,EAD5G;KA5FJ0xG,EA4GAA,EAAKlgB,MAALkgB,GAAc;EACV;EACI,UAAIlgB,IAASkgB,EAAK6tC,OAAL7tC,CAAa,QAAbA,EAAuBlgB,MAApC,CAEA,OAAOA,EAAO4Y,SAAP5Y,CAAiBgxD,SAAjBhxD,GAA6BA,CAA7BA,GAAiE,IAAxE;EACF,KAJF,CAIE,OAAO1xE,CAAP;EAEE,aAAO,IAAP;;EAPM,GAAA,EA5Gd4xF,EAwHAA,EAAK+wC,YAAL/wC,GAAoB,IAxHpBA,EA2HAA,EAAKgxC,mBAALhxC,GAA2B,IA3H3BA,EAkIAA,EAAKixC,SAALjxC,GAAiB,UAAmBkxC,CAAnB;EAEb,WAA8B,mBAAhBA,CAAgB,GACxBlxC,EAAKlgB,MAALkgB,GACIA,EAAKgxC,mBAALhxC,CAAyBkxC,CAAzBlxC,CADJA,GAEI,IAAIA,EAAKjwG,KAAT,CAAemhJ,CAAf,CAHoB,GAIxBlxC,EAAKlgB,MAALkgB,GACIA,EAAK+wC,YAAL/wC,CAAkBkxC,CAAlBlxC,CADJA,GAE0B,sBAAfztG,UAAe,GAClB2+I,CADkB,GAElB,IAAI3+I,UAAJ,CAAe2+I,CAAf,CARd;KApIJlxC,EAmJAA,EAAKjwG,KAALiwG,GAAmC,sBAAfztG,UAAe,GAAcA,UAAd,GAAsDxC,KAnJzFiwG,EAkKAA,EAAK2uC,IAAL3uC,GAAuCxvD,iBAAO2gG,OAAP3gG,IAA6CA,iBAAO2gG,OAAP3gG,CAAem+F,IAA5Dn+F,IAAoEwvD,EAAK6tC,OAAL7tC,CAAa,MAAbA,CAlK3GA,EAyKAA,EAAKoxC,MAALpxC,GAAc,kBAzKdA,EAgLAA,EAAKqxC,OAALrxC,GAAe,uBAhLfA,EAuLAA,EAAKsxC,OAALtxC,GAAe,4CAvLfA,EA8LAA,EAAKuxC,UAALvxC,GAAkB,UAAoB/nG,CAApB;EACd,WAAOA,IACD+nG,EAAKouC,QAALpuC,CAAc7oG,IAAd6oG,CAAmB/nG,CAAnB+nG,EAA0BkvC,MAA1BlvC,EADC/nG,GAED+nG,EAAKouC,QAALpuC,CAAcwuC,QAFpB;KA/LJxuC,EA0MAA,EAAKwxC,YAALxxC,GAAoB,UAAsBivC,CAAtB,EAA4BJ,CAA5B;EAChB,QAAI4C,IAAOzxC,EAAKouC,QAALpuC,CAAcgvC,QAAdhvC,CAAuBivC,CAAvBjvC,CAAX,CACA,OAAIA,EAAK2uC,IAAL3uC,GACOA,EAAK2uC,IAAL3uC,CAAU0xC,QAAV1xC,CAAmByxC,EAAK9D,EAAxB3tC,EAA4ByxC,EAAK7D,EAAjC5tC,EAAqC6uC,CAArC7uC,CADPA,GAEGyxC,EAAKpD,QAALoD,CAAc1C,QAAQF,CAARE,CAAd0C,CAFP;KA5MJzxC,EAgOAA,EAAKsvC,KAALtvC,GAAasvC,CAhObtvC,EAuOAA,EAAK2xC,OAAL3xC,GAAe,UAAiB9f,CAAjB;EACX,WAAOA,EAAIh9D,MAAJg9D,CAAW,CAAXA,EAAckZ,WAAdlZ,KAA8BA,EAAIlnE,SAAJknE,CAAc,CAAdA,CAArC;KAxOJ8f,EAkRAA,EAAKwvC,QAALxvC,GAAgBwvC,CAlRhBxvC,EAqSAA,EAAK4xC,aAAL5xC,GAAqBwvC,EAAS,eAATA,CArSrBxvC,EAyTAA,EAAK6xC,WAAL7xC,GAAmB,UAAkB8xC,CAAlB;EAEf,SADA,IAAIC,MAAJ,EACS9iJ,IAAI,CAAb,EAAgBA,IAAI6iJ,EAAWxjJ,MAA/B,IAAyCW,CAAzC,EACI8iJ,EAASD,EAAW7iJ,CAAX6iJ,CAATC,IAA0B,CAA1BA,CAOJ,OAAO;EACH,WAAK,IAAIhrI,IAAO1O,OAAO0O,IAAP1O,CAAYnE,IAAZmE,CAAX,EAA8BpJ,IAAI8X,EAAKzY,MAALyY,GAAc,CAArD,EAAwD9X,KAAK,CAA7D,IAAkEA,CAAlE,EACI,IAA0B,MAAtB8iJ,EAAShrI,EAAK9X,CAAL8X,CAATgrI,CAAsB,SAAuBx5C,MAAlBrkG,KAAK6S,EAAK9X,CAAL8X,CAAL7S,CAAL,IAAsD,SAAlBA,KAAK6S,EAAK9X,CAAL8X,CAAL7S,CAA9D,EACI,OAAO6S,EAAK9X,CAAL8X,CAAP;OAHZ;KAnUJi5F,EAuVAA,EAAKgyC,WAALhyC,GAAmB,UAAkB8xC,CAAlB;EAQf,WAAO,UAASr/I,CAAT;EACH,WAAK,IAAIxD,IAAI,CAAb,EAAgBA,IAAI6iJ,EAAWxjJ,MAA/B,IAAyCW,CAAzC,EACQ6iJ,EAAW7iJ,CAAX6iJ,MAAkBr/I,CAAlBq/I,WACO59I,KAAK49I,EAAW7iJ,CAAX6iJ,CAAL59I,CADP49I;OAFZ;KA/VJ9xC,EAsXAA,EAAKiyC,aAALjyC,KACIkyC,OAAOppI,QACPqpI,OAAOrpI,QACPkuC,OAAOluC,QACPumF,OAAM,GA1XV2Q,EA6XAA,EAAKoyC,UAALpyC,GAAkB;EACd,QAAIlgB,IAASkgB,EAAKlgB,MAAlB,CAEKA,KAMLkgB,EAAK+wC,YAAL/wC,GAAoBlgB,EAAO3oF,IAAP2oF,KAAgBvtF,WAAW4E,IAA3B2oF,IAAmCA,EAAO3oF,IAA1C2oF,IAEhB,UAAqB7nF,CAArB,EAA4Bo6I,CAA5B;EACI,aAAO,IAAIvyD,CAAJ,CAAW7nF,CAAX,EAAkBo6I,CAAlB,CAAP;OAHRryC,EAKAA,EAAKgxC,mBAALhxC,GAA2BlgB,EAAOwyD,WAAPxyD,IAEvB,UAA4B1vF,CAA5B;EACI,aAAO,IAAI0vF,CAAJ,CAAW1vF,CAAX,CAAP;OAdH0vF,IACDkgB,EAAK+wC,YAAL/wC,GAAoBA,EAAKgxC,mBAALhxC,GAA2B,IAD9ClgB;KAhYTkgB;;eCHiBuyC;MAIbC;MAEApE,aAAYpuC,QAAKouC;MACjB7D,SAAYvqC,QAAKuqC;MACjByD,OAAYhuC,QAAKguC,KAWrB,SAASyE,EAAT,CAAY/5I,CAAZ,EAAgBslB,CAAhB,EAAqBlvB,CAArB;EAMIoF,OAAKwE,EAALxE,GAAUwE,CAAVxE,EAMAA,KAAK8pB,GAAL9pB,GAAW8pB,CANX9pB,EAYAA,KAAKm9C,IAALn9C,QAAYqkG,CAZZrkG,EAkBAA,KAAKpF,GAALoF,GAAWpF,CAlBXoF;EAsBJ,UAASw+I,IAAT,cAUSC,KAAT,CAAeC,CAAf;EAMI1+I,OAAK2+I,IAAL3+I,GAAY0+I,EAAOC,IAAnB3+I,EAMAA,KAAK4+I,IAAL5+I,GAAY0+I,EAAOE,IANnB5+I,EAYAA,KAAK8pB,GAAL9pB,GAAW0+I,EAAO50H,GAZlB9pB,EAkBAA,KAAKm9C,IAALn9C,GAAY0+I,EAAOrR,MAlBnBrtI;EA0BJ,UAASq+I,MAAT;EAMIr+I,OAAK8pB,GAAL9pB,GAAW,CAAXA,EAMAA,KAAK2+I,IAAL3+I,GAAY,IAAIu+I,EAAJ,CAAOC,IAAP,EAAa,CAAb,EAAgB,CAAhB,CANZx+I,EAYAA,KAAK4+I,IAAL5+I,GAAYA,KAAK2+I,IAZjB3+I,EAkBAA,KAAKqtI,MAALrtI,GAAc,IAlBdA;EAuEJ,UAAS6+I,SAAT,CAAmBjkJ,CAAnB,EAAwBqxF,CAAxB,EAA6B8vB,CAA7B;EACI9vB,IAAI8vB,CAAJ9vB,IAAiB,MAANrxF,CAAXqxF;EAGJ,UAAS6yD,aAAT,CAAuBlkJ,CAAvB,EAA4BqxF,CAA5B,EAAiC8vB,CAAjC;EACI,SAAOnhH,IAAM,GAAb,GACIqxF,EAAI8vB,GAAJ9vB,IAAmB,MAANrxF,CAAM,GAAM,GAAzBqxF,EACArxF,OAAS,CADTqxF,CAGJA,EAAI8vB,CAAJ9vB,IAAWrxF,CAAXqxF;EAYJ,UAAS8yD,QAAT,CAAkBj1H,CAAlB,EAAuBlvB,CAAvB;EACIoF,OAAK8pB,GAAL9pB,GAAW8pB,CAAX9pB,EACAA,KAAKm9C,IAALn9C,QAAYqkG,CADZrkG,EAEAA,KAAKpF,GAALoF,GAAWpF,CAFXoF;EAgDJ,UAASg/I,aAAT,CAAuBpkJ,CAAvB,EAA4BqxF,CAA5B,EAAiC8vB,CAAjC;EACI,SAAOnhH,EAAI8+I,EAAX,GACIztD,EAAI8vB,GAAJ9vB,IAAsB,MAATrxF,EAAI6+I,EAAK,GAAM,GAA5BxtD,EACArxF,EAAI6+I,EAAJ7+I,IAAUA,EAAI6+I,EAAJ7+I,KAAW,CAAXA,GAAeA,EAAI8+I,EAAJ9+I,IAAU,QAAQ,CAD3CqxF,EAEArxF,EAAI8+I,EAAJ9+I,MAAY,CAFZqxF,CAIJ,OAAOrxF,EAAI6+I,EAAJ7+I,GAAS,GAAhB,GACIqxF,EAAI8vB,GAAJ9vB,IAAsB,MAATrxF,EAAI6+I,EAAK,GAAM,GAA5BxtD,EACArxF,EAAI6+I,EAAJ7+I,GAASA,EAAI6+I,EAAJ7+I,KAAW,CADpBqxF,CAGJA,EAAI8vB,GAAJ9vB,IAAarxF,EAAI6+I,EAAjBxtD;EA2CJ,UAASgzD,YAAT,CAAsBrkJ,CAAtB,EAA2BqxF,CAA3B,EAAgC8vB,CAAhC;EACI9vB,IAAI8vB,CAAJ9vB,IAA8B,MAAdrxF,CAAhBqxF,EACAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,CAARA,GAAc,GAD9BqxF,EAEAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,EAARA,GAAc,GAF9BqxF,EAGAA,EAAI8vB,IAAM,CAAV9vB,IAAgBrxF,MAAQ,EAHxBqxF;EAnKJoyD,QAAO3C,MAAP2C,GAAgBvyC,QAAKlgB,MAALkgB,GACV;EACE,UAAQuyC,OAAO3C,MAAP2C,GAAgB;EACpB,WAAO,IAAIC,YAAJ,EAAP;QADJ;GAFQxyC,GAOV;EACE,SAAO,IAAIuyC,MAAJ,EAAP;GARRA,EAgBAA,OAAOtE,KAAPsE,GAAe,UAAeniJ,CAAf;EACX,SAAO,IAAI4vG,QAAKjwG,KAAT,CAAeK,CAAf,CAAP;GAjBJmiJ,EAsBIvyC,QAAKjwG,KAALiwG,KAAejwG,KAAfiwG,KACAuyC,OAAOtE,KAAPsE,GAAevyC,QAAKh9F,IAALg9F,CAAUuyC,OAAOtE,KAAjBjuC,EAAwBA,QAAKjwG,KAALiwG,CAAWtH,SAAXsH,CAAqB5oG,QAA7C4oG,CADfA,CAtBJuyC,EAiCAA,OAAO75C,SAAP65C,CAAiBa,KAAjBb,GAAyB,UAAc75I,CAAd,EAAkBslB,CAAlB,EAAuBlvB,CAAvB;EAGrB,SAFAoF,KAAK4+I,IAAL5+I,GAAYA,KAAK4+I,IAAL5+I,CAAUm9C,IAAVn9C,GAAiB,IAAIu+I,EAAJ,CAAO/5I,CAAP,EAAWslB,CAAX,EAAgBlvB,CAAhB,CAA7BoF,EACAA,KAAK8pB,GAAL9pB,IAAY8pB,CADZ9pB,EAEOA,IAAP;GApCJq+I,EAkEAU,SAASv6C,SAATu6C,GAAqB56I,OAAOu3I,MAAPv3I,CAAco6I,GAAG/5C,SAAjBrgG,CAlErBk6I,EAmEAU,SAASv6C,SAATu6C,CAAmBv6I,EAAnBu6I,GAAwBD,aAnExBT,EA0EAA,OAAO75C,SAAP65C,CAAiBc,MAAjBd,GAA0B,UAAsBt6I,CAAtB;EAWtB,SARA/D,KAAK8pB,GAAL9pB,KAAaA,KAAK4+I,IAAL5+I,GAAYA,KAAK4+I,IAAL5+I,CAAUm9C,IAAVn9C,GAAiB,IAAI++I,QAAJ,EACrCh7I,OAAkB,KACT,MAAY,IACpBA,IAAQ,KAARA,GAAoB,CAApBA,GACAA,IAAQ,OAARA,GAAoB,CAApBA,GACAA,IAAQ,SAARA,GAAoB,CAApBA,GACoB,CANgB,EAO1CA,CAP0C,GAOlC+lB,GAPR9pB,EAQOA,IAAP;GArFJq+I,EA8FAA,OAAO75C,SAAP65C,CAAiB/2H,KAAjB+2H,GAAyB,UAAqBt6I,CAArB;EACrB,SAAOA,IAAQ,CAARA,GACD/D,KAAKk/I,KAALl/I,CAAWg/I,aAAXh/I,EAA0B,EAA1BA,EAA8Bk6I,WAASK,UAATL,CAAoBn2I,CAApBm2I,CAA9Bl6I,CADC+D,GAED/D,KAAKm/I,MAALn/I,CAAY+D,CAAZ/D,CAFN;GA/FJq+I,EAyGAA,OAAO75C,SAAP65C,CAAiBe,MAAjBf,GAA0B,UAAsBt6I,CAAtB;EACtB,SAAO/D,KAAKm/I,MAALn/I,EAAa+D,KAAS,CAATA,GAAaA,KAAS,QAAQ,CAA3C/D,CAAP;GA1GJq+I,EAgIAA,OAAO75C,SAAP65C,CAAiBgB,MAAjBhB,GAA0B,UAAsBt6I,CAAtB;EACtB,MAAIw5I,IAAOrD,WAASj3I,IAATi3I,CAAcn2I,CAAdm2I,CAAX,CACA,OAAOl6I,KAAKk/I,KAALl/I,CAAWg/I,aAAXh/I,EAA0Bu9I,EAAKnjJ,MAALmjJ,EAA1Bv9I,EAAyCu9I,CAAzCv9I,CAAP;GAlIJq+I,EA4IAA,OAAO75C,SAAP65C,CAAiBiB,KAAjBjB,GAAyBA,OAAO75C,SAAP65C,CAAiBgB,MA5I1ChB,EAoJAA,OAAO75C,SAAP65C,CAAiBkB,MAAjBlB,GAA0B,UAAsBt6I,CAAtB;EACtB,MAAIw5I,IAAOrD,WAASj3I,IAATi3I,CAAcn2I,CAAdm2I,EAAqBE,QAArBF,EAAX,CACA,OAAOl6I,KAAKk/I,KAALl/I,CAAWg/I,aAAXh/I,EAA0Bu9I,EAAKnjJ,MAALmjJ,EAA1Bv9I,EAAyCu9I,CAAzCv9I,CAAP;GAtJJq+I,EA8JAA,OAAO75C,SAAP65C,CAAiBx/I,IAAjBw/I,GAAwB,UAAoBt6I,CAApB;EACpB,SAAO/D,KAAKk/I,KAALl/I,CAAW6+I,SAAX7+I,EAAsB,CAAtBA,EAAyB+D,IAAQ,CAARA,GAAY,CAArC/D,CAAP;GA/JJq+I,EA8KAA,OAAO75C,SAAP65C,CAAiBmB,OAAjBnB,GAA2B,UAAuBt6I,CAAvB;EACvB,SAAO/D,KAAKk/I,KAALl/I,CAAWi/I,YAAXj/I,EAAyB,CAAzBA,EAA4B+D,MAAU,CAAtC/D,CAAP;GA/KJq+I,EAwLAA,OAAO75C,SAAP65C,CAAiBoB,QAAjBpB,GAA4BA,OAAO75C,SAAP65C,CAAiBmB,OAxL7CnB,EAgMAA,OAAO75C,SAAP65C,CAAiBqB,OAAjBrB,GAA2B,UAAuBt6I,CAAvB;EACvB,MAAIw5I,IAAOrD,WAASj3I,IAATi3I,CAAcn2I,CAAdm2I,CAAX,CACA,OAAOl6I,KAAKk/I,KAALl/I,CAAWi/I,YAAXj/I,EAAyB,CAAzBA,EAA4Bu9I,EAAK9D,EAAjCz5I,EAAqCk/I,KAArCl/I,CAA2Ci/I,YAA3Cj/I,EAAyD,CAAzDA,EAA4Du9I,EAAK7D,EAAjE15I,CAAP;GAlMJq+I,EA4MAA,OAAO75C,SAAP65C,CAAiBsB,QAAjBtB,GAA4BA,OAAO75C,SAAP65C,CAAiBqB,OA5M7CrB,EAoNAA,OAAO75C,SAAP65C,CAAiBvC,KAAjBuC,GAAyB,UAAqBt6I,CAArB;EACrB,SAAO/D,KAAKk/I,KAALl/I,CAAW8rG,QAAKgwC,KAALhwC,CAAW6rC,YAAtB33I,EAAoC,CAApCA,EAAuC+D,CAAvC/D,CAAP;GArNJq+I,EA8NAA,OAAO75C,SAAP65C,CAAiB/gG,MAAjB+gG,GAA0B,UAAsBt6I,CAAtB;EACtB,SAAO/D,KAAKk/I,KAALl/I,CAAW8rG,QAAKgwC,KAALhwC,CAAWmtC,aAAtBj5I,EAAqC,CAArCA,EAAwC+D,CAAxC/D,CAAP;GA/NJq+I,CAkOA,IAAIuB,aAAa9zC,QAAKjwG,KAALiwG,CAAWtH,SAAXsH,CAAqBh1F,GAArBg1F,GACX,UAAwBlxG,CAAxB,EAA6BqxF,CAA7B,EAAkC8vB,CAAlC;EACE9vB,IAAIn1E,GAAJm1E,CAAQrxF,CAARqxF,EAAa8vB,CAAb9vB;GAFS6f,GAKX,UAAwBlxG,CAAxB,EAA6BqxF,CAA7B,EAAkC8vB,CAAlC;EACE,OAAK,IAAIhhH,IAAI,CAAb,EAAgBA,IAAIH,EAAIR,MAAxB,IAAkCW,CAAlC,EACIkxF,EAAI8vB,IAAMhhH,CAAVkxF,IAAerxF,EAAIG,CAAJH,CAAfqxF;GAPZ,CAeAoyD,OAAO75C,SAAP65C,CAAiBv7F,KAAjBu7F,GAAyB,UAAqBt6I,CAArB;EACrB,MAAI+lB,IAAM/lB,EAAM3J,MAAN2J,KAAiB,CAA3B,CACA,KAAK+lB,CAAL,EACI,OAAO9pB,KAAKk/I,KAALl/I,CAAW6+I,SAAX7+I,EAAsB,CAAtBA,EAAyB,CAAzBA,CAAP,CACJ,IAAI8rG,QAAK0uC,QAAL1uC,CAAc/nG,CAAd+nG,CAAJ,EAA0B;EACtB,QAAI7f,IAAMoyD,OAAOtE,KAAPsE,CAAav0H,IAAMusH,OAAOj8I,MAAPi8I,CAActyI,CAAdsyI,CAAnBgI,CAAV,CACAhI,OAAOO,MAAPP,CAActyI,CAAdsyI,EAAqBpqD,CAArBoqD,EAA0B,CAA1BA,GACAtyI,IAAQkoF,CADRoqD;EAGJ,UAAOr2I,KAAKm/I,MAALn/I,CAAY8pB,CAAZ9pB,EAAiBk/I,KAAjBl/I,CAAuB4/I,UAAvB5/I,EAAmC8pB,CAAnC9pB,EAAwC+D,CAAxC/D,CAAP;GATJq+I,EAiBAA,OAAO75C,SAAP65C,CAAiB/H,MAAjB+H,GAA0B,UAAsBt6I,CAAtB;EACtB,MAAI+lB,IAAMgwH,KAAK1/I,MAAL0/I,CAAY/1I,CAAZ+1I,CAAV,CACA,OAAOhwH,IACD9pB,KAAKm/I,MAALn/I,CAAY8pB,CAAZ9pB,EAAiBk/I,KAAjBl/I,CAAuB85I,KAAK/0I,KAA5B/E,EAAmC8pB,CAAnC9pB,EAAwC+D,CAAxC/D,CADC8pB,GAED9pB,KAAKk/I,KAALl/I,CAAW6+I,SAAX7+I,EAAsB,CAAtBA,EAAyB,CAAzBA,CAFN;GAnBJq+I,EA6BAA,OAAO75C,SAAP65C,CAAiBwB,IAAjBxB,GAAwB;EAIpB,SAHAr+I,KAAKqtI,MAALrtI,GAAc,IAAIy+I,KAAJ,CAAUz+I,IAAV,CAAdA,EACAA,KAAK2+I,IAAL3+I,GAAYA,KAAK4+I,IAAL5+I,GAAY,IAAIu+I,EAAJ,CAAOC,IAAP,EAAa,CAAb,EAAgB,CAAhB,CADxBx+I,EAEAA,KAAK8pB,GAAL9pB,GAAW,CAFXA,EAGOA,IAAP;GAjCJq+I,EAwCAA,OAAO75C,SAAP65C,CAAiByB,KAAjBzB,GAAyB;EAUrB,SATIr+I,KAAKqtI,MAALrtI,IACAA,KAAK2+I,IAAL3+I,GAAcA,KAAKqtI,MAALrtI,CAAY2+I,IAA1B3+I,EACAA,KAAK4+I,IAAL5+I,GAAcA,KAAKqtI,MAALrtI,CAAY4+I,IAD1B5+I,EAEAA,KAAK8pB,GAAL9pB,GAAcA,KAAKqtI,MAALrtI,CAAY8pB,GAF1B9pB,EAGAA,KAAKqtI,MAALrtI,GAAcA,KAAKqtI,MAALrtI,CAAYm9C,IAJ1Bn9C,KAMAA,KAAK2+I,IAAL3+I,GAAYA,KAAK4+I,IAAL5+I,GAAY,IAAIu+I,EAAJ,CAAOC,IAAP,EAAa,CAAb,EAAgB,CAAhB,CAAxBx+I,EACAA,KAAK8pB,GAAL9pB,GAAY,CAPZA,GASGA,IAAP;GAlDJq+I,EAyDAA,OAAO75C,SAAP65C,CAAiB0B,MAAjB1B,GAA0B;EACtB,MAAIM,IAAO3+I,KAAK2+I,IAAhB;EAAA,MACIC,IAAO5+I,KAAK4+I,IADhB;EAAA,MAEI90H,IAAO9pB,KAAK8pB,GAFhB,CASA,OANA9pB,KAAK8/I,KAAL9/I,GAAam/I,MAAbn/I,CAAoB8pB,CAApB9pB,GACI8pB,MACA9pB,KAAK4+I,IAAL5+I,CAAUm9C,IAAVn9C,GAAiB2+I,EAAKxhG,IAAtBn9C,EACAA,KAAK4+I,IAAL5+I,GAAY4+I,CADZ5+I,EAEAA,KAAK8pB,GAAL9pB,IAAY8pB,CAHZA,CADJ9pB,EAMOA,IAAP;GAnEJq+I,EA0EAA,OAAO75C,SAAP65C,CAAiBj3G,MAAjBi3G,GAA0B;EAItB,OAHA,IAAIM,IAAO3+I,KAAK2+I,IAAL3+I,CAAUm9C,IAArB,EACI8uC,IAAOjsF,KAAKhB,WAALgB,CAAiB+5I,KAAjB/5I,CAAuBA,KAAK8pB,GAA5B9pB,CADX,EAEI+7G,IAAO,CACX,EAAO4iC,CAAP,GACIA,EAAKn6I,EAALm6I,CAAQA,EAAK/jJ,GAAb+jJ,EAAkB1yD,CAAlB0yD,EAAuB5iC,CAAvB4iC,GACA5iC,KAAO4iC,EAAK70H,GADZ60H,EAEAA,IAAOA,EAAKxhG,IAFZwhG,CAKJ,OAAO1yD,CAAP;GApFJoyD,EAuFAA,OAAOH,UAAPG,GAAoB,UAAS2B,CAAT;EAChB1B,iBAAe0B,CAAf1B;GAxFJD,CChXA,oBAAiBC,cAAjB,EAICA,eAAa95C,SAAb85C,GAAyBn6I,OAAOu3I,MAAPv3I,CAAck6I,OAAO75C,SAArBrgG,GAAiCnF,cAAcs/I,eAIzE,IAAI1yD,WAASkgB,QAAKlgB,MAAlB,CAQA,SAAS0yD,cAAT;EACID,SAAOp/I,IAAPo/I,CAAYr+I,IAAZq+I;EAQJC,gBAAavE,KAAbuE,GAAqB,UAAsBpiJ,CAAtB;EACjB,UAAQoiJ,eAAavE,KAAbuE,GAAqBxyC,QAAKgxC,qBAAqB5gJ,EAAvD;GADJoiJ,CAIA,IAAI2B,mBAAmBr0D,YAAUA,SAAO4Y,SAAP5Y,YAA4BvtF,UAAtCutF,IAAkF,UAA9BA,SAAO4Y,SAAP5Y,CAAiB90E,GAAjB80E,CAAqBrtF,IAAzEqtF,GACjB,UAA8BhxF,CAA9B,EAAmCqxF,CAAnC,EAAwC8vB,CAAxC;EACE9vB,IAAIn1E,GAAJm1E,CAAQrxF,CAARqxF,EAAa8vB,CAAb9vB;GAFeL,GAMjB,UAA+BhxF,CAA/B,EAAoCqxF,CAApC,EAAyC8vB,CAAzC;EACE,MAAInhH,EAAI6hD,IAAR,EACI7hD,EAAI6hD,IAAJ7hD,CAASqxF,CAATrxF,EAAcmhH,CAAdnhH,EAAmB,CAAnBA,EAAsBA,EAAIR,MAA1BQ,EADJ,KAEK,KAAK,IAAIG,IAAI,CAAb,EAAgBA,IAAIH,EAAIR,MAAxB,GACD6xF,EAAI8vB,GAAJ9vB,IAAarxF,EAAIG,GAAJH,CAAbqxF;GAVZ,CA0BA,SAASi0D,iBAAT,CAA2BtlJ,CAA3B,EAAgCqxF,CAAhC,EAAqC8vB,CAArC;EACQnhH,IAAIR,MAAJQ,GAAa,EAAbA,GACAkxG,QAAKguC,IAALhuC,CAAU/mG,KAAV+mG,CAAgBlxG,CAAhBkxG,EAAqB7f,CAArB6f,EAA0BiQ,CAA1BjQ,CADAlxG,GAGAqxF,EAAI2wD,SAAJ3wD,CAAcrxF,CAAdqxF,EAAmB8vB,CAAnB9vB,CAHArxF;EAXR0jJ,gBAAa95C,SAAb85C,CAAuBx7F,KAAvBw7F,GAA+B,UAA4Bv6I,CAA5B;EACvB+nG,UAAK0uC,QAAL1uC,CAAc/nG,CAAd+nG,MACA/nG,IAAQ+nG,QAAK+wC,YAAL/wC,CAAkB/nG,CAAlB+nG,EAAyB,QAAzBA,CADRA,EAEJ,IAAIhiF,IAAM/lB,EAAM3J,MAAN2J,KAAiB,CAA3B,CAIA,OAHA/D,KAAKm/I,MAALn/I,CAAY8pB,CAAZ9pB,GACI8pB,KACA9pB,KAAKk/I,KAALl/I,CAAWigJ,gBAAXjgJ,EAA6B8pB,CAA7B9pB,EAAkC+D,CAAlC/D,CAFJA,EAGOA,IAAP;GAPJs+I,EAoBAA,eAAa95C,SAAb85C,CAAuBhI,MAAvBgI,GAAgC,UAA6Bv6I,CAA7B;EAC5B,MAAI+lB,IAAM8hE,SAAOF,UAAPE,CAAkB7nF,CAAlB6nF,CAAV,CAIA,OAHA5rF,KAAKm/I,MAALn/I,CAAY8pB,CAAZ9pB,GACI8pB,KACA9pB,KAAKk/I,KAALl/I,CAAWkgJ,iBAAXlgJ,EAA8B8pB,CAA9B9pB,EAAmC+D,CAAnC/D,CAFJA,EAGOA,IAAP;GAzBJs+I,CC7CA,aAAiB6B,MAAjB;EAAA,IAIIC,YAJJ;EAAA,IAMIlG,aAAYpuC,QAAKouC,QANrB;EAAA,IAOIJ,SAAYhuC,QAAKguC,IAPrB,CAUA,SAASuG,eAAT,CAAyBC,CAAzB,EAAiCC,CAAjC;EACI,SAAOC,WAAW,yBAAyBF,EAAOvkC,GAAhC,GAAsC,KAAtC,IAA+CwkC,KAAe,CAA9D,IAAmE,KAAnE,GAA2ED,EAAOx2H,GAA7F02H,CAAP;EASJ,UAASL,MAAT,CAAgB36I,CAAhB;EAMIxF,OAAKisF,GAALjsF,GAAWwF,CAAXxF,EAMAA,KAAK+7G,GAAL/7G,GAAW,CANXA,EAYAA,KAAK8pB,GAAL9pB,GAAWwF,EAAOpL,MAZlB4F;EAeJ,KAAIygJ,eAAqC,sBAAfpiJ,UAAe,GACnC,UAA4BmH,CAA5B;EACE,MAAIA,aAAkBnH,UAAlBmH,IAAgC3J,MAAMC,OAAND,CAAc2J,CAAd3J,CAApC,EACI,OAAO,IAAIskJ,MAAJ,CAAW36I,CAAX,CAAP,CACJ,MAAMjM,MAAM,gBAANA,CAAN;GAJiC,GAOnC,UAAsBiM,CAAtB;EACE,MAAI3J,MAAMC,OAAND,CAAc2J,CAAd3J,CAAJ,EACI,OAAO,IAAIskJ,MAAJ,CAAW36I,CAAX,CAAP,CACJ,MAAMjM,MAAM,gBAANA,CAAN;GAVR,CA4EA,SAASmnJ,cAAT;EAEI,MAAInD,IAAO,IAAIrD,UAAJ,CAAa,CAAb,EAAgB,CAAhB,CAAX;EAAA,MACIn/I,IAAI,CADR,CAEA,MAAIiF,KAAK8pB,GAAL9pB,GAAWA,KAAK+7G,GAAhB/7G,GAAsB,EAA1B,EAaO;EACH,WAAOjF,IAAI,CAAX,IAAgBA,CAAhB,EAAmB;EAEf,UAAIiF,KAAK+7G,GAAL/7G,IAAYA,KAAK8pB,GAArB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,CAAN,CAGJ,IADA9C,EAAK9D,EAAL8D,IAAWA,EAAK9D,EAAL8D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAAiC,IAAJjF,OAAW,CAA9DwiJ,EACIv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EACI,OAAOu9I,CAAP;EAIR,YADAA,EAAK9D,EAAL8D,IAAWA,EAAK9D,EAAL8D,IAAkC,MAAvBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,MAAmC,IAAJjF,OAAW,CAAhEwiJ,EACOA,CAAP;EAxBA,UAAOxiJ,IAAI,CAAX,IAAgBA,CAAhB,EAGI,IADAwiJ,EAAK9D,EAAL8D,IAAWA,EAAK9D,EAAL8D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAAiC,IAAJjF,OAAW,CAA9DwiJ,EACIv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EACI,OAAOu9I,CAAP,CAKR,IAFAA,EAAK9D,EAAL8D,IAAWA,EAAK9D,EAAL8D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA6B,QAAQ,CAA3Du9I,EACAA,EAAK7D,EAAL6D,IAAWA,EAAK7D,EAAL6D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA8B,OAAO,CAD3Du9I,EAEIv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EACI,OAAOu9I,CAAP,CAgBR,IAfIxiJ,IAAI,CAAJA,EAeAiF,KAAK8pB,GAAL9pB,GAAWA,KAAK+7G,GAAhB/7G,GAAsB,CAA1B;EACI,WAAOjF,IAAI,CAAX,IAAgBA,CAAhB,EAGI,IADAwiJ,EAAK7D,EAAL6D,IAAWA,EAAK7D,EAAL6D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAAiC,IAAJjF,CAAI,GAAI,OAAO,CAAlEwiJ,EACIv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EACI,OAAOu9I,CAAP;KALZ,MAQI,OAAOxiJ,IAAI,CAAX,IAAgBA,CAAhB,EAAmB;EAEf,QAAIiF,KAAK+7G,GAAL/7G,IAAYA,KAAK8pB,GAArB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,CAAN,CAGJ,IADA9C,EAAK7D,EAAL6D,IAAWA,EAAK7D,EAAL6D,IAAgC,MAArBv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAAiC,IAAJjF,CAAI,GAAI,OAAO,CAAlEwiJ,EACIv9I,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EACI,OAAOu9I,CAAP;EAIZ,SAAMhkJ,MAAM,yBAANA,CAAN;EAkCJ,UAASonJ,eAAT,CAAyB10D,CAAzB,EAA8B1oF,CAA9B;EACI,UAAQ0oF,EAAI1oF,IAAM,CAAV0oF,IACAA,EAAI1oF,IAAM,CAAV0oF,KAAgB,CADhBA,GAEAA,EAAI1oF,IAAM,CAAV0oF,KAAgB,EAFhBA,GAGAA,EAAI1oF,IAAM,CAAV0oF,KAAgB,QAAQ,CAHhC;EAkCJ,UAAS20D,WAAT;EAGI,MAAI5gJ,KAAK+7G,GAAL/7G,GAAW,CAAXA,GAAeA,KAAK8pB,GAAxB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsB,CAAtBA,CAAN,CAEJ,OAAO,IAAInG,UAAJ,CAAayG,gBAAgB3gJ,KAAKisF,GAArB00D,EAA0B3gJ,KAAK+7G,GAAL/7G,IAAY,CAAtC2gJ,CAAb,EAAuDA,gBAAgB3gJ,KAAKisF,GAArB00D,EAA0B3gJ,KAAK+7G,GAAL/7G,IAAY,CAAtC2gJ,CAAvD,CAAP;EArLJR,QAAOzE,MAAPyE,GAAgBr0C,QAAKlgB,MAALkgB,GACV,UAA6BtmG,CAA7B;EACE,UAAQ26I,OAAOzE,MAAPyE,GAAgB,UAAuB36I,CAAvB;EACpB,WAAOsmG,QAAKlgB,MAALkgB,CAAY+0C,QAAZ/0C,CAAqBtmG,CAArBsmG,IACD,IAAIs0C,YAAJ,CAAiB56I,CAAjB,CADCsmG,GAGD20C,aAAaj7I,CAAbi7I,CAHN;OAIDj7I,EALH;GAFQsmG,GAUV20C,YAVNN,EAYAA,OAAO37C,SAAP27C,CAAiBW,MAAjBX,GAA0Br0C,QAAKjwG,KAALiwG,CAAWtH,SAAXsH,CAAqB5oG,QAArB4oG,IAA4DA,QAAKjwG,KAALiwG,CAAWtH,SAAXsH,CAAqBjuG,KAZ3GsiJ,EAmBAA,OAAO37C,SAAP27C,CAAiBhB,MAAjBgB,GAA0B;EACtB,MAAIp8I,IAAQ,UAAZ,CACA,OAAO;EACuD,QAA1DA,KAAuC,MAArB/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,OAAqC,CAAvD+D,EAA8D/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EAAgC,OAAO+D,CAAP,CAChC,IAA1DA,KAASA,KAA8B,MAArB/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA8B,OAAO,CAAvD+D,EAA8D/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EAAgC,OAAO+D,CAAP,CAChC,IAA1DA,KAASA,KAA8B,MAArB/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA6B,QAAQ,CAAvD+D,EAA8D/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EAAgC,OAAO+D,CAAP,CAChC,IAA1DA,KAASA,KAA8B,MAArB/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA6B,QAAQ,CAAvD+D,EAA8D/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EAAgC,OAAO+D,CAAP,CAChC,IAA1DA,KAASA,KAA+B,KAAtB/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAd/7G,MAA6B,QAAQ,CAAvD+D,EAA8D/D,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,IAAuB,GAA3B,EAAgC,OAAO+D,CAAP,CAG1F,KAAK/D,KAAK+7G,GAAL/7G,IAAY,KAAKA,KAAK8pB,GAA3B,EAEI,MADA9pB,KAAK+7G,GAAL/7G,GAAWA,KAAK8pB,GAAhB9pB,EACMqgJ,gBAAgBrgJ,IAAhBqgJ,EAAsB,EAAtBA,CAAN,CAEJ,OAAOt8I,CAAP;KAZJ;EAFsB,CAAA,EAnB1Bo8I,EAyCAA,OAAO37C,SAAP27C,CAAiB74H,KAAjB64H,GAAyB;EACrB,SAAuB,IAAhBngJ,KAAKm/I,MAALn/I,EAAP;GA1CJmgJ,EAiDAA,OAAO37C,SAAP27C,CAAiBf,MAAjBe,GAA0B;EACtB,MAAIp8I,IAAQ/D,KAAKm/I,MAALn/I,EAAZ,CACA,OAAO+D,MAAU,CAAVA,KAAwB,IAARA,EAAhBA,GAA6B,CAApC;GAnDJo8I,EAwIAA,OAAO37C,SAAP27C,CAAiBthJ,IAAjBshJ,GAAwB;EACpB,SAAyB,MAAlBngJ,KAAKm/I,MAALn/I,EAAP;GAzIJmgJ,EAuJAA,OAAO37C,SAAP27C,CAAiBX,OAAjBW,GAA2B;EAGvB,MAAIngJ,KAAK+7G,GAAL/7G,GAAW,CAAXA,GAAeA,KAAK8pB,GAAxB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsB,CAAtBA,CAAN,CAEJ,OAAOM,gBAAgB3gJ,KAAKisF,GAArB00D,EAA0B3gJ,KAAK+7G,GAAL/7G,IAAY,CAAtC2gJ,CAAP;GA7JJR,EAoKAA,OAAO37C,SAAP27C,CAAiBV,QAAjBU,GAA4B;EAGxB,MAAIngJ,KAAK+7G,GAAL/7G,GAAW,CAAXA,GAAeA,KAAK8pB,GAAxB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsB,CAAtBA,CAAN,CAEJ,OAAkD,IAA3CM,gBAAgB3gJ,KAAKisF,GAArB00D,EAA0B3gJ,KAAK+7G,GAAL/7G,IAAY,CAAtC2gJ,CAAP;GA1KJR,EA6MAA,OAAO37C,SAAP27C,CAAiBrE,KAAjBqE,GAAyB;EAGrB,MAAIngJ,KAAK+7G,GAAL/7G,GAAW,CAAXA,GAAeA,KAAK8pB,GAAxB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsB,CAAtBA,CAAN,CAEJ,IAAIt8I,IAAQ+nG,QAAKgwC,KAALhwC,CAAW+rC,WAAX/rC,CAAuB9rG,KAAKisF,GAA5B6f,EAAiC9rG,KAAK+7G,GAAtCjQ,CAAZ,CAEA,OADA9rG,KAAK+7G,GAAL/7G,IAAY,CAAZA,EACO+D,CAAP;GArNJo8I,EA6NAA,OAAO37C,SAAP27C,CAAiB7iG,MAAjB6iG,GAA0B;EAGtB,MAAIngJ,KAAK+7G,GAAL/7G,GAAW,CAAXA,GAAeA,KAAK8pB,GAAxB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsB,CAAtBA,CAAN,CAEJ,IAAIt8I,IAAQ+nG,QAAKgwC,KAALhwC,CAAWqtC,YAAXrtC,CAAwB9rG,KAAKisF,GAA7B6f,EAAkC9rG,KAAK+7G,GAAvCjQ,CAAZ,CAEA,OADA9rG,KAAK+7G,GAAL/7G,IAAY,CAAZA,EACO+D,CAAP;GArOJo8I,EA4OAA,OAAO37C,SAAP27C,CAAiBr9F,KAAjBq9F,GAAyB;EACrB,MAAI/lJ,IAAS4F,KAAKm/I,MAALn/I,EAAb;EAAA,MACIb,IAASa,KAAK+7G,GADlB;EAAA,MAEIx4G,IAASvD,KAAK+7G,GAAL/7G,GAAW5F,CAFxB,CAKA,IAAImJ,IAAMvD,KAAK8pB,GAAf,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsBjmJ,CAAtBimJ,CAAN,CAGJ,OADArgJ,KAAK+7G,GAAL/7G,IAAY5F,CAAZ4F,EACInE,MAAMC,OAAND,CAAcmE,KAAKisF,GAAnBpwF,IACOmE,KAAKisF,GAALjsF,CAASnC,KAATmC,CAAeb,CAAfa,EAAsBuD,CAAtBvD,CADPnE,GAEGsD,MAAUoE,CAAVpE,GACD,IAAIa,KAAKisF,GAALjsF,CAAShB,WAAb,CAAyB,CAAzB,CADCG,GAEDa,KAAK8gJ,MAAL9gJ,CAAYf,IAAZe,CAAiBA,KAAKisF,GAAtBjsF,EAA2Bb,CAA3Ba,EAAkCuD,CAAlCvD,CAJN;GAtPJmgJ,EAiQAA,OAAO37C,SAAP27C,CAAiB7J,MAAjB6J,GAA0B;EACtB,MAAIr9F,IAAQ9iD,KAAK8iD,KAAL9iD,EAAZ,CACA,OAAO85I,OAAKr0I,IAALq0I,CAAUh3F,CAAVg3F,EAAiB,CAAjBA,EAAoBh3F,EAAM1oD,MAA1B0/I,CAAP;GAnQJqG,EA2QAA,OAAO37C,SAAP27C,CAAiBY,IAAjBZ,GAAwB,UAAc/lJ,CAAd;EACpB,MAAsB,mBAAXA,CAAX,EAAgC;EAE5B,QAAI4F,KAAK+7G,GAAL/7G,GAAW5F,CAAX4F,GAAoBA,KAAK8pB,GAA7B,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,EAAsBjmJ,CAAtBimJ,CAAN,CACJrgJ,KAAK+7G,GAAL/7G,IAAY5F,CAAZ4F;KAJJ,MAMI;EAEI,QAAIA,KAAK+7G,GAAL/7G,IAAYA,KAAK8pB,GAArB,EACI,MAAMu2H,gBAAgBrgJ,IAAhBqgJ,CAAN;KAHR,QAIgC,MAAvBrgJ,KAAKisF,GAALjsF,CAASA,KAAK+7G,GAAL/7G,EAATA,CAJT,EAMJ,OAAOA,IAAP;GAxRJmgJ,EAgSAA,OAAO37C,SAAP27C,CAAiBa,QAAjBb,GAA4B,UAASc,CAAT;EACxB,UAAQA,CAAR,GACI,KAAK,CAAL;EACIjhJ,WAAK+gJ,IAAL/gJ,GACA,MACJ,KAAK,CAAL;EACIA,WAAK+gJ,IAAL/gJ,CAAU,CAAVA,EACA,MACJ,KAAK,CAAL;EACIA,WAAK+gJ,IAAL/gJ,CAAUA,KAAKm/I,MAALn/I,EAAVA,EACA,MACJ,KAAK,CAAL;EACI,eAAG;EACC,YAAuC,MAAlCihJ,IAA2B,IAAhBjhJ,KAAKm/I,MAALn/I,EAAuB,CAAvC,EACI,MACJA,KAAKghJ,QAALhhJ,CAAcihJ,CAAdjhJ;EAEJ,aACJ,KAAK,CAAL;EACIA,WAAK+gJ,IAAL/gJ,CAAU,CAAVA,EACA,MAGJ;EACI,YAAMzG,MAAM,uBAAuB0nJ,CAAvB,GAAkC,aAAlC,GAAkDjhJ,KAAK+7G,GAA7DxiH,CAAN,CAvBR,CAyBA,OAAOyG,IAAP;GA1TJmgJ,EA6TAA,OAAOjC,UAAPiC,GAAoB,UAASe,CAAT;EAChBd,iBAAec,CAAfd,CAEA,IAAI57I,IAAKsnG,QAAK2uC,IAAL3uC,GAAY,QAAZA,GAAkD,UAA3D,CACAA,QAAKsvC,KAALtvC,CAAWq0C,OAAO37C,SAAlBsH,IAEIwzC,OAAO;EACH,aAAOoB,eAAezhJ,IAAfyhJ,CAAoB1gJ,IAApB0gJ,EAA0Bl8I,CAA1Bk8I,GAA8B,CAA9BA,CAAP;SAGJrB,QAAQ;EACJ,aAAOqB,eAAezhJ,IAAfyhJ,CAAoB1gJ,IAApB0gJ,EAA0Bl8I,CAA1Bk8I,GAA8B,CAA9BA,CAAP;SAGJnB,QAAQ;EACJ,aAAOmB,eAAezhJ,IAAfyhJ,CAAoB1gJ,IAApB0gJ,EAA0BrG,QAA1BqG,GAAqCl8I,CAArCk8I,GAAyC,CAAzCA,CAAP;SAGJhB,SAAS;EACL,aAAOkB,YAAY3hJ,IAAZ2hJ,CAAiB5gJ,IAAjB4gJ,EAAuBp8I,CAAvBo8I,GAA2B,CAA3BA,CAAP;SAGJjB,UAAU;EACN,aAAOiB,YAAY3hJ,IAAZ2hJ,CAAiB5gJ,IAAjB4gJ,EAAuBp8I,CAAvBo8I,GAA2B,CAA3BA,CAAP;SAnBR90C;GAjUJq0C,CC7DA,oBAAiBC,cAAjB,CAeA,SAASA,cAAT,CAAsB56I,CAAtB;EACI26I,SAAOlhJ,IAAPkhJ,CAAYngJ,IAAZmgJ,EAAkB36I,CAAlB26I;IAZHC,eAAa57C,SAAb47C,GAAyBj8I,OAAOu3I,MAAPv3I,CAAcg8I,OAAO37C,SAArBrgG,GAAiCnF,cAAcohJ,gBAsBrEt0C,QAAKlgB,MAALkgB,KACAs0C,eAAa57C,SAAb47C,CAAuBU,MAAvBV,GAAgCt0C,QAAKlgB,MAALkgB,CAAYtH,SAAZsH,CAAsBjuG,KADtDiuG,GAMJs0C,eAAa57C,SAAb47C,CAAuB9J,MAAvB8J,GAAgC;EAC5B,MAAIt2H,IAAM9pB,KAAKm/I,MAALn/I,EAAV,CACA,OAAOA,KAAKisF,GAALjsF,CAASmhJ,SAATnhJ,CAAmBA,KAAK+7G,GAAxB/7G,EAA6BA,KAAK+7G,GAAL/7G,GAAWzF,KAAKE,GAALF,CAASyF,KAAK+7G,GAAL/7G,GAAW8pB,CAApBvvB,EAAyByF,KAAK8pB,GAA9BvvB,CAAxCyF,CAAP;IClCJ,cAAiBohJ,OAAjB,CAwCA,SAASA,OAAT,CAAiBC,CAAjB,EAA0BC,CAA1B,EAA4CC,CAA5C;EAEI,MAAuB,qBAAZF,CAAX,EACI,MAAM/rC,UAAU,4BAAVA,CAAN,CAEJxJ,QAAK+qC,YAAL/qC,CAAkB7sG,IAAlB6sG,CAAuB9rG,IAAvB8rG,GAMA9rG,KAAKqhJ,OAALrhJ,GAAeqhJ,CANfv1C,EAYA9rG,KAAKshJ,gBAALthJ,GAAwB66I,QAAQyG,CAARzG,CAZxB/uC,EAkBA9rG,KAAKuhJ,iBAALvhJ,GAAyB66I,QAAQ0G,CAAR1G,CAlBzB/uC;IAxCHs1C,QAAQ58C,SAAR48C,GAAoBj9I,OAAOu3I,MAAPv3I,CAAc2nG,QAAK+qC,YAAL/qC,CAAkBtH,SAAhCrgG,GAA4CnF,cAAcoiJ,SAwE/EA,QAAQ58C,SAAR48C,CAAkBI,OAAlBJ,GAA4B,SAASI,CAAT,CAAiBt4H,CAAjB,EAAyBu4H,CAAzB,EAAsCC,CAAtC,EAAoDC,CAApD,EAA6D3iG,CAA7D;EAExB,OAAK2iG,CAAL,EACI,MAAMrsC,UAAU,2BAAVA,CAAN,CAEJ,IAAIssC,IAAO5hJ,IAAX,CACA,KAAKg/C,CAAL,EACI,OAAO8sD,QAAKoqC,SAALpqC,CAAe01C,CAAf11C,EAAwB81C,CAAxB91C,EAA8B5iF,CAA9B4iF,EAAsC21C,CAAtC31C,EAAmD41C,CAAnD51C,EAAiE61C,CAAjE71C,CAAP,CAEJ,IAAK81C,EAAKP,OAAV,EAKA;EACI,WAAOO,EAAKP,OAALO,CACH14H,CADG04H,EAEHH,EAAYG,EAAKN,gBAALM,GAAwB,iBAAxBA,GAA4C,QAAxDH,EAAkEE,CAAlEF,EAA2Er6G,MAA3Eq6G,EAFGG,EAGH,UAAqB3iI,CAArB,EAA0B44E,CAA1B;EAEI,UAAI54E,CAAJ,EAEI,OADA2iI,EAAKzK,IAALyK,CAAU,OAAVA,EAAmB3iI,CAAnB2iI,EAAwB14H,CAAxB04H,GACO5iG,EAAS//B,CAAT+/B,CAAP,CAGJ,IAAiB,SAAb64C,CAAJ,EAAA;EAKA,cAAMA,aAAoB6pD,EAA1B,EACI;EACI7pD,cAAW6pD,EAAaE,EAAKL,iBAALK,GAAyB,iBAAzBA,GAA6C,QAA1DF,EAAoE7pD,CAApE6pD,CAAX7pD;EACF,SAFF,CAEE,OAAO54E,CAAP;EAEE,iBADA2iI,EAAKzK,IAALyK,CAAU,OAAVA,EAAmB3iI,CAAnB2iI,EAAwB14H,CAAxB04H,GACO5iG,EAAS//B,CAAT+/B,CAAP;EAKR,gBADA4iG,EAAKzK,IAALyK,CAAU,MAAVA,EAAkB/pD,CAAlB+pD,EAA4B14H,CAA5B04H,GACO5iG,EAAS,IAATA,EAAe64C,CAAf74C,CAAP;EAdI4iG,SAAKr+I,GAALq+I,EAA0B,CAA1BA;OAXLA,CAAP;EA4BF,GA7BF,CA6BE,OAAO3iI,CAAP;EAGE,WAFA2iI,EAAKzK,IAALyK,CAAU,OAAVA,EAAmB3iI,CAAnB2iI,EAAwB14H,CAAxB04H,QACAnkJ,WAAW;EAAauhD,QAAS//B,CAAT+/B;OAAxBvhD,EAA0C,CAA1CA,CACA;KArCJ,MACIA,WAAW;EAAauhD,MAASzlD,MAAM,eAANA,CAATylD;KAAxBvhD,EAA6D,CAA7DA;KA6CR2jJ,QAAQ58C,SAAR48C,CAAkB79I,GAAlB69I,GAAwB,UAAaS,CAAb;EAOpB,SANI7hJ,KAAKqhJ,OAALrhJ,KACK6hJ,KACD7hJ,KAAKqhJ,OAALrhJ,CAAa,IAAbA,EAAmB,IAAnBA,EAAyB,IAAzBA,CADC6hJ,EAEL7hJ,KAAKqhJ,OAALrhJ,GAAe,IAFV6hJ,EAGL7hJ,KAAKm3I,IAALn3I,CAAU,KAAVA,EAAiBi3I,GAAjBj3I,EAJAA,GAMGA,IAAP;;ECtIMw9C,IA6BN4jG,OA7BM5jG,GA6BIm+F,OA7BJn+F;;;;ECNV,MACIskG,IAAWtkG,CADf,CA4BA,SAASukG,CAAT;EACID,MAAS3B,MAAT2B,CAAgB5D,UAAhB4D,CAA2BA,EAAS1B,YAApC0B,GACAA,EAASh2C,IAATg2C,CAAc5D,UAAd4D,EADAA;EApBJA,KAAShuC,KAATguC,GAAiB,SAAjBA,EAGAA,EAASzD,MAATyD,GAAwBnG,MAHxBmG,EAIAA,EAASxD,YAATwD,GAAwBlG,aAJxBkG,EAKAA,EAAS3B,MAAT2B,GAAwBjG,MALxBiG,EAMAA,EAAS1B,YAAT0B,GAAwB/F,aANxB+F,EASAA,EAASh2C,IAATg2C,GAAwB9F,OATxB8F,EAUAA,EAASE,GAATF,GAAwB7F,KAVxB6F,EAWAA,EAASG,KAATH,GAAwB5F,KAXxB4F,EAYAA,EAASC,SAATD,GAAwBC,CAZxBD,EAyBAA,EAASzD,MAATyD,CAAgB5D,UAAhB4D,CAA2BA,EAASxD,YAApCwD,CAzBAA,EA0BAC,GA1BAD;;kBCNiBnG;;;;MCEbuG,UAAUC,UAAUhC;MAAQiC,QAAQD,UAAUr2C;MAE9Cu2C,QAAQF,UAAUF,KAAVE,CAAyBG,OAAzBH,KAA+BA,UAAUF,KAAVE,CAAyBG,OAAzBH,KAA/BA,EAEZE,MAAME,UAANF,GAAmB;EAEf,MAuCQG,CAvCR;EAAA,MAuCyBtkJ,CAvCzB;EAAA,MAAIqkJ,MAAJ,CA4qDA,OA1qDAA,EAAWE,GAAXF,GAAiB;EAEb,aAASE,CAAT,CAAahxG,CAAb;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA4B5B,YAxBAgxG,EAAIj+C,SAAJi+C,CAAcE,OAAdF,GAAwB,EAAxBA,EACAA,EAAIj+C,SAAJi+C,CAAc1+I,KAAd0+I,GAAsBL,MAAMrF,SAANqF,GAAAA,CADtBK,EAGAA,EAAI7L,MAAJ6L,GAAa,UAAgBxnJ,CAAhB,EAAmB8G,CAAnB;EACH9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBI,GAArB,EACjD,EAAOxnJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEivE,OAAFjvE,GAAYz4E,EAAEq7I,MAAFr7I,EAAZy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE3vE,KAAF2vE,GAAUz4E,EAAE6nD,KAAF7nD,EAAVy4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EARJ;EAYJ,cAAOy4E,CAAP;OArBJ+uE,EAwBOA,CAAP;EAjCa,GAAA,EAAjBF,EAoCAA,EAAWK,QAAXL,IACQC,MAAAA,GAAiBtkJ,IAASiG,OAAOu3I,MAAPv3I,CAAcq+I,CAAdr+I,GACvBq+I,EAAW,CAAXA,IAAgB,gBAAgB,CADnCA,EAEJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,UAAvBtkJ,IAAqC,CAFjCskJ,EAGJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,WAAvBtkJ,IAAsC,CAHlCskJ,EAIJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,UAAvBtkJ,IAAqC,CAJjCskJ,EAKJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,UAAvBtkJ,IAAqC,CALjCskJ,EAMJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,UAAvBtkJ,IAAqC,CANjCskJ,EAOJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,SAAvBtkJ,IAAoC,CAPhCskJ,EAQJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,WAAvBtkJ,IAAsC,CARlCskJ,EASJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,cAAvBtkJ,IAAyC,CATrCskJ,EAUJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,UAAvBtkJ,IAAqC,CAVjCskJ,EAWJtkJ,EAAOskJ,EAAW,EAAXA,IAAiB,SAAxBtkJ,IAAqC,EAXjCskJ,EAYJtkJ,EAAOskJ,EAAW,EAAXA,IAAiB,UAAxBtkJ,IAAsC,EAZlCskJ,EAaJtkJ,EAAOskJ,EAAW,EAAXA,IAAiB,WAAxBtkJ,IAAuC,EAbnCskJ,EAcJtkJ,EAAOskJ,EAAW,EAAXA,IAAiB,WAAxBtkJ,IAAuC,EAdnCskJ,EAeJtkJ,EAAOskJ,EAAW,EAAXA,IAAiB,aAAxBtkJ,IAAyC,EAfrCskJ,EAgBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GAhBvCskJ,EAiBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,eAAzBtkJ,IAA4C,GAjBxCskJ,EAkBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GAlBvCskJ,EAmBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GAnBvCskJ,EAoBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GApBvCskJ,EAqBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,aAAzBtkJ,IAA0C,GArBtCskJ,EAsBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,eAAzBtkJ,IAA4C,GAtBxCskJ,EAuBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,kBAAzBtkJ,IAA+C,GAvB3CskJ,EAwBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GAxBvCskJ,EAyBJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,aAAzBtkJ,IAA0C,GAzBtCskJ,EA0BJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,cAAzBtkJ,IAA2C,GA1BvCskJ,EA2BJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,eAAzBtkJ,IAA4C,GA3BxCskJ,EA4BJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,eAAzBtkJ,IAA4C,GA5BxCskJ,EA6BJtkJ,EAAOskJ,EAAW,GAAXA,IAAkB,iBAAzBtkJ,IAA8C,GA7B1CskJ,EA8BGtkJ,CA/BXqkJ,CApCAA,EAsEAA,EAAWM,WAAXN,GAAyB;EAErB,aAASM,CAAT,CAAqBpxG,CAArB;EAEI,UADAzxC,KAAK8M,GAAL9M,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAkE5B,YA9DAoxG,EAAYr+C,SAAZq+C,CAAsB/1I,GAAtB+1I,GAA4BT,MAAMhG,UAAlCyG,EACAA,EAAYr+C,SAAZq+C,CAAsBC,WAAtBD,IAAoC,CADpCA,EAGAA,EAAYjM,MAAZiM,GAAqB,UAAgB5nJ,CAAhB,EAAmB8G,CAAnB;EACX9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBQ,WAArB,EACjD,EAAO5nJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,cAAE5mE,GAAF4mE,IAASA,EAAE5mE,GAAF4mE,CAAMt5E,MAAfs5E,KACFA,EAAE5mE,GAAF4mE,KADEA,GAENA,EAAE5mE,GAAF4mE,CAAM13E,IAAN03E,CAAW2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BU,GAA7BV,CAAiCzL,MAAjCyL,CAAwCpnJ,CAAxConJ,EAA2CpnJ,EAAEkkJ,MAAFlkJ,EAA3ConJ,CAAX3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIA,cAAEovE,WAAFpvE,GAAgBz4E,EAAE4D,IAAF5D,EAAhBy4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAVJ;EAcJ,cAAOy4E,CAAP;OAvBJmvE,EA0BAA,EAAYE,GAAZF,GAAkB;EAEd,eAASE,CAAT,CAAatxG,CAAb;EACI,YAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA4B5B,cAxBAsxG,EAAIv+C,SAAJu+C,CAAc7mJ,IAAd6mJ,GAAqBX,MAAM3H,IAAN2H,GAAaA,MAAM3H,IAAN2H,CAAW5E,QAAX4E,CAAoB,CAApBA,EAAsB,CAAtBA,GAAwB,CAAxBA,CAAbA,GAA8C,CAAnEW,EACAA,EAAIv+C,SAAJu+C,CAAcxkJ,IAAdwkJ,GAAqB,EADrBA,EAGAA,EAAInM,MAAJmM,GAAa,UAAgB9nJ,CAAhB,EAAmB8G,CAAnB;EACH9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BU,GAAjC,EACjD,EAAO9nJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAEx3E,IAAFw3E,GAASz4E,EAAEqkJ,KAAFrkJ,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EARJ;EAYJ,gBAAOy4E,CAAP;SArBJqvE,EAwBOA,CAAP;EAjCc,KAAA,EA1BlBF,EA8DOA,CAAP;EAxEqB,GAAA,EAtEzBN,EAiJAA,EAAWn+I,MAAXm+I,GAAoB;EAEhB,aAASn+I,CAAT,CAAgBqtC,CAAhB;EAUI,UATAzxC,KAAKgjJ,QAALhjJ,KAAAA,EACAA,KAAKijJ,SAALjjJ,KADAA,EAEAA,KAAKkjJ,MAALljJ,KAFAA,EAGAA,KAAKmjJ,SAALnjJ,KAHAA,EAIAA,KAAKojJ,WAALpjJ,KAJAA,EAKAA,KAAKqjJ,QAALrjJ,KALAA,EAMAA,KAAKsjJ,OAALtjJ,KANAA,EAOAA,KAAKujJ,SAALvjJ,KAPAA,EAQAA,KAAKwjJ,SAALxjJ,KARAA,EASIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAkI5B,YA9HArtC,EAAOogG,SAAPpgG,CAAiBnG,KAAjBmG,GAAyB,CAAzBA,EACAA,EAAOogG,SAAPpgG,CAAiBq/I,WAAjBr/I,GAA+B,IAD/BA,EAEAA,EAAOogG,SAAPpgG,CAAiBs/I,aAAjBt/I,GAAiC,CAFjCA,EAGAA,EAAOogG,SAAPpgG,CAAiBu/I,aAAjBv/I,GAAiCg+I,MAAMrF,SAANqF,GAAAA,CAHjCh+I,EAIAA,EAAOogG,SAAPpgG,CAAiB4+I,QAAjB5+I,GAA4Bg+I,MAAMhG,UAJlCh4I,EAKAA,EAAOogG,SAAPpgG,CAAiB6+I,SAAjB7+I,GAA6Bg+I,MAAMhG,UALnCh4I,EAMAA,EAAOogG,SAAPpgG,CAAiB8+I,MAAjB9+I,GAA0Bg+I,MAAMhG,UANhCh4I,EAOAA,EAAOogG,SAAPpgG,CAAiB++I,SAAjB/+I,GAA6Bg+I,MAAMhG,UAPnCh4I,EAQAA,EAAOogG,SAAPpgG,CAAiBg/I,WAAjBh/I,GAA+Bg+I,MAAMhG,UARrCh4I,EASAA,EAAOogG,SAAPpgG,CAAiBi/I,QAAjBj/I,GAA4Bg+I,MAAMhG,UATlCh4I,EAUAA,EAAOogG,SAAPpgG,CAAiBk/I,OAAjBl/I,GAA2Bg+I,MAAMhG,UAVjCh4I,EAWAA,EAAOogG,SAAPpgG,CAAiBm/I,SAAjBn/I,GAA6Bg+I,MAAMhG,UAXnCh4I,EAYAA,EAAOogG,SAAPpgG,CAAiBo/I,SAAjBp/I,GAA6Bg+I,MAAMhG,UAZnCh4I,EAcAA,EAAOwyI,MAAPxyI,GAAgB,UAAgBnJ,CAAhB,EAAmB8G,CAAnB;EACN9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBj+I,MAArB,EACjD,EAAOnJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEz1E,KAAFy1E,GAAUz4E,EAAEqsB,KAAFrsB,EAAVy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE+vE,WAAF/vE,GAAgB2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAhB3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEgwE,aAAFhwE,GAAkBz4E,EAAEqsB,KAAFrsB,EAAlBy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEiwE,aAAFjwE,GAAkBz4E,EAAE6nD,KAAF7nD,EAAlBy4E,CACA,MACJ,KAAK,CAAL;EAGI,gBAFMA,EAAEsvE,QAAFtvE,IAAcA,EAAEsvE,QAAFtvE,CAAWt5E,MAAzBs5E,KACFA,EAAEsvE,QAAFtvE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADA,IAAImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAEsvE,QAAFtvE,CAAW13E,IAAX03E,CAAgBz4E,EAAE6gJ,KAAF7gJ,EAAhBy4E,EAHR,KAKIA,EAAEsvE,QAAFtvE,CAAW13E,IAAX03E,CAAgBz4E,EAAE6gJ,KAAF7gJ,EAAhBy4E,EACJ,MACJ,KAAK,CAAL;EAGI,gBAFMA,EAAEuvE,SAAFvvE,IAAeA,EAAEuvE,SAAFvvE,CAAYt5E,MAA3Bs5E,KACFA,EAAEuvE,SAAFvvE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAEuvE,SAAFvvE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEqiD,MAAFriD,EAAjBy4E,EAHR,KAKIA,EAAEuvE,SAAFvvE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEqiD,MAAFriD,EAAjBy4E,EACJ,MACJ,KAAK,CAAL;EAGI,gBAFMA,EAAEwvE,MAAFxvE,IAAYA,EAAEwvE,MAAFxvE,CAASt5E,MAArBs5E,KACFA,EAAEwvE,MAAFxvE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAEwvE,MAAFxvE,CAAS13E,IAAT03E,CAAcz4E,EAAEqsB,KAAFrsB,EAAdy4E,EAHR,KAKIA,EAAEwvE,MAAFxvE,CAAS13E,IAAT03E,CAAcz4E,EAAEqsB,KAAFrsB,EAAdy4E,EACJ,MACJ,KAAK,CAAL;EACUA,cAAEyvE,SAAFzvE,IAAeA,EAAEyvE,SAAFzvE,CAAYt5E,MAA3Bs5E,KACFA,EAAEyvE,SAAFzvE,KADEA,GAENA,EAAEyvE,SAAFzvE,CAAY13E,IAAZ03E,CAAiBz4E,EAAE6nD,KAAF7nD,EAAjBy4E,CAFMA,CAGN,MACJ,KAAK,CAAL;EAGI,gBAFMA,EAAE0vE,WAAF1vE,IAAiBA,EAAE0vE,WAAF1vE,CAAct5E,MAA/Bs5E,KACFA,EAAE0vE,WAAF1vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE0vE,WAAF1vE,CAAc13E,IAAd03E,CAAmBz4E,EAAE6gJ,KAAF7gJ,EAAnBy4E,EAHR,KAKIA,EAAE0vE,WAAF1vE,CAAc13E,IAAd03E,CAAmBz4E,EAAE6gJ,KAAF7gJ,EAAnBy4E,EACJ,MACJ,KAAK,EAAL;EAGI,gBAFMA,EAAE2vE,QAAF3vE,IAAcA,EAAE2vE,QAAF3vE,CAAWt5E,MAAzBs5E,KACFA,EAAE2vE,QAAF3vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE2vE,QAAF3vE,CAAW13E,IAAX03E,CAAgBz4E,EAAEqkJ,KAAFrkJ,EAAhBy4E,EAHR,KAKIA,EAAE2vE,QAAF3vE,CAAW13E,IAAX03E,CAAgBz4E,EAAEqkJ,KAAFrkJ,EAAhBy4E,EACJ,MACJ,KAAK,EAAL;EAGI,gBAFMA,EAAE4vE,OAAF5vE,IAAaA,EAAE4vE,OAAF5vE,CAAUt5E,MAAvBs5E,KACFA,EAAE4vE,OAAF5vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE4vE,OAAF5vE,CAAU13E,IAAV03E,CAAez4E,EAAE4D,IAAF5D,EAAfy4E,EAHR,KAKIA,EAAE4vE,OAAF5vE,CAAU13E,IAAV03E,CAAez4E,EAAE4D,IAAF5D,EAAfy4E,EACJ,MACJ,KAAK,EAAL;EAGI,gBAFMA,EAAE6vE,SAAF7vE,IAAeA,EAAE6vE,SAAF7vE,CAAYt5E,MAA3Bs5E,KACFA,EAAE6vE,SAAF7vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE6vE,SAAF7vE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEkkJ,MAAFlkJ,EAAjBy4E,EAHR,KAKIA,EAAE6vE,SAAF7vE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEkkJ,MAAFlkJ,EAAjBy4E,EACJ,MACJ,KAAK,EAAL;EAGI,gBAFMA,EAAE8vE,SAAF9vE,IAAeA,EAAE8vE,SAAF9vE,CAAYt5E,MAA3Bs5E,KACFA,EAAE8vE,SAAF9vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE8vE,SAAF9vE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEokJ,MAAFpkJ,EAAjBy4E,EAHR,KAKIA,EAAE8vE,SAAF9vE,CAAY13E,IAAZ03E,CAAiBz4E,EAAEokJ,MAAFpkJ,EAAjBy4E,EACJ,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAnGJ;EAuGJ,cAAOy4E,CAAP;OA3HJtvE,EA8HOA,CAAP;EAhJgB,GAAA,EAjJpBm+I,EAoSAA,EAAWqB,SAAXrB,GAAuB;EAEnB,aAASqB,CAAT,CAAmBnyG,CAAnB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAe5B,SAAIoyG,CAAJ,CA4JA,OAvKAD,EAAUp/C,SAAVo/C,CAAoBzwI,IAApBywI,GAA2B,IAA3BA,EACAA,EAAUp/C,SAAVo/C,CAAoBlnI,CAApBknI,GAAwBxB,MAAMrF,SAANqF,GAAAA,CADxBwB,EAEAA,EAAUp/C,SAAVo/C,CAAoB7oJ,CAApB6oJ,GAAwBxB,MAAM3H,IAAN2H,GAAaA,MAAM3H,IAAN2H,CAAW5E,QAAX4E,CAAoB,CAApBA,EAAsB,CAAtBA,GAAwB,CAAxBA,CAAbA,GAA8C,CAFtEwB,EAGAA,EAAUp/C,SAAVo/C,CAAoB7kJ,CAApB6kJ,GAAwB,CAHxBA,EAIAA,EAAUp/C,SAAVo/C,CAAoB5oJ,CAApB4oJ,IAAwB,CAJxBA,EAKAA,EAAUp/C,SAAVo/C,CAAoB9pI,IAApB8pI,GAA2B,CAL3BA,EAMAA,EAAUp/C,SAAVo/C,CAAoB3nJ,KAApB2nJ,GAA4B,IAN5BA,EAOAA,EAAUp/C,SAAVo/C,CAAoB3wI,MAApB2wI,GAA6B,IAP7BA,EAQAA,EAAUp/C,SAAVo/C,CAAoBE,WAApBF,GAAkC,EARlCA,EASAA,EAAUp/C,SAAVo/C,CAAoB/mH,IAApB+mH,GAA2B,IAT3BA,EAaAz/I,OAAOiM,cAAPjM,CAAsBy/I,EAAUp/C,SAAhCrgG,EAA2C,OAA3CA,IACIwS,KAAKyrI,MAAMzE,WAANyE,CAAkByB,KAAgB,QAAQ,KAAK,KAAK,KAAK,KAAK,QAAQ,SAAS,UAAU,eAAe,OAAxGzB,GACLtrI,KAAKsrI,MAAMtE,WAANsE,CAAkByB,CAAlBzB,GAFTj+I,CAbAy/I,EAkBAA,EAAUhN,MAAVgN,GAAmB,UAAgB3oJ,CAAhB,EAAmB8G,CAAnB;EACT9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBuB,SAArB,EACjD,EAAO3oJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEvgE,IAAFugE,GAAS2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2B0B,SAA3B1B,CAAqCzL,MAArCyL,CAA4CpnJ,CAA5ConJ,EAA+CpnJ,EAAEkkJ,MAAFlkJ,EAA/ConJ,CAAT3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEh3D,CAAFg3D,GAAMz4E,EAAE6nD,KAAF7nD,EAANy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE34E,CAAF24E,GAAMz4E,EAAEqkJ,KAAFrkJ,EAANy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE30E,CAAF20E,GAAMz4E,EAAE6gJ,KAAF7gJ,EAANy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE14E,CAAF04E,GAAMz4E,EAAE4D,IAAF5D,EAANy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE55D,IAAF45D,GAASz4E,EAAEqsB,KAAFrsB,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEz3E,KAAFy3E,GAAU2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAV3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEzgE,MAAFygE,GAAW2uE,MAAME,UAANF,CAAiBj+I,MAAjBi+I,CAAwBzL,MAAxByL,CAA+BpnJ,CAA/BonJ,EAAkCpnJ,EAAEkkJ,MAAFlkJ,EAAlConJ,CAAX3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEowE,WAAFpwE,GAAgBz4E,EAAEq7I,MAAFr7I,EAAhBy4E,CACA,MACJ,KAAK,EAAL;EACIA,cAAE72C,IAAF62C,GAAS2uE,MAAME,UAANF,CAAiB2B,YAAjB3B,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAAT3uE,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAhCJ;EAoCJ,cAAOy4E,CAAP;OA5DJkwE,EA+DAA,EAAUG,SAAVH,GAAsB;EAElB,eAASG,CAAT,CAAmBtyG,CAAnB;EASI,YARAzxC,KAAK0c,CAAL1c,KAAAA,EACAA,KAAKjF,CAALiF,KADAA,EAEAA,KAAKjB,CAALiB,KAFAA,EAGAA,KAAKhF,CAALgF,KAHAA,EAIAA,KAAK8Z,IAAL9Z,KAJAA,EAKAA,KAAK/D,KAAL+D,KALAA,EAMAA,KAAKiT,MAALjT,KANAA,EAOAA,KAAK68B,IAAL78B,KAPAA,EAQIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAwF5B,cApFAsyG,EAAUv/C,SAAVu/C,CAAoBrnI,CAApBqnI,GAAwB3B,MAAMhG,UAA9B2H,EACAA,EAAUv/C,SAAVu/C,CAAoBhpJ,CAApBgpJ,GAAwB3B,MAAMhG,UAD9B2H,EAEAA,EAAUv/C,SAAVu/C,CAAoBhlJ,CAApBglJ,GAAwB3B,MAAMhG,UAF9B2H,EAGAA,EAAUv/C,SAAVu/C,CAAoB/oJ,CAApB+oJ,GAAwB3B,MAAMhG,UAH9B2H,EAIAA,EAAUv/C,SAAVu/C,CAAoBjqI,IAApBiqI,GAA2B3B,MAAMhG,UAJjC2H,EAKAA,EAAUv/C,SAAVu/C,CAAoB9nJ,KAApB8nJ,GAA4B3B,MAAMhG,UALlC2H,EAMAA,EAAUv/C,SAAVu/C,CAAoB9wI,MAApB8wI,GAA6B3B,MAAMhG,UANnC2H,EAOAA,EAAUv/C,SAAVu/C,CAAoBlnH,IAApBknH,GAA2B3B,MAAMhG,UAPjC2H,EASAA,EAAUnN,MAAVmN,GAAmB,UAAgB9oJ,CAAhB,EAAmB8G,CAAnB;EACT9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2B0B,SAA/B,EACjD,EAAO9oJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,gBAAEh3D,CAAFg3D,IAAOA,EAAEh3D,CAAFg3D,CAAIt5E,MAAXs5E,KACFA,EAAEh3D,CAAFg3D,KADEA,GAENA,EAAEh3D,CAAFg3D,CAAI13E,IAAJ03E,CAASz4E,EAAE6nD,KAAF7nD,EAATy4E,CAFMA,CAGN,MACJ,KAAK,CAAL;EAGI,kBAFMA,EAAE34E,CAAF24E,IAAOA,EAAE34E,CAAF24E,CAAIt5E,MAAXs5E,KACFA,EAAE34E,CAAF24E,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADA,IAAImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE34E,CAAF24E,CAAI13E,IAAJ03E,CAASz4E,EAAEqkJ,KAAFrkJ,EAATy4E,EAHR,KAKIA,EAAE34E,CAAF24E,CAAI13E,IAAJ03E,CAASz4E,EAAEqkJ,KAAFrkJ,EAATy4E,EACJ,MACJ,KAAK,CAAL;EAGI,kBAFMA,EAAE30E,CAAF20E,IAAOA,EAAE30E,CAAF20E,CAAIt5E,MAAXs5E,KACFA,EAAE30E,CAAF20E,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE30E,CAAF20E,CAAI13E,IAAJ03E,CAASz4E,EAAE6gJ,KAAF7gJ,EAATy4E,EAHR,KAKIA,EAAE30E,CAAF20E,CAAI13E,IAAJ03E,CAASz4E,EAAE6gJ,KAAF7gJ,EAATy4E,EACJ,MACJ,KAAK,CAAL;EAGI,kBAFMA,EAAE14E,CAAF04E,IAAOA,EAAE14E,CAAF04E,CAAIt5E,MAAXs5E,KACFA,EAAE14E,CAAF04E,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE14E,CAAF04E,CAAI13E,IAAJ03E,CAASz4E,EAAE4D,IAAF5D,EAATy4E,EAHR,KAKIA,EAAE14E,CAAF04E,CAAI13E,IAAJ03E,CAASz4E,EAAE4D,IAAF5D,EAATy4E,EACJ,MACJ,KAAK,CAAL;EAGI,kBAFMA,EAAE55D,IAAF45D,IAAUA,EAAE55D,IAAF45D,CAAOt5E,MAAjBs5E,KACFA,EAAE55D,IAAF45D,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE55D,IAAF45D,CAAO13E,IAAP03E,CAAYz4E,EAAEqsB,KAAFrsB,EAAZy4E,EAHR,KAKIA,EAAE55D,IAAF45D,CAAO13E,IAAP03E,CAAYz4E,EAAEqsB,KAAFrsB,EAAZy4E,EACJ,MACJ,KAAK,CAAL;EACUA,gBAAEz3E,KAAFy3E,IAAWA,EAAEz3E,KAAFy3E,CAAQt5E,MAAnBs5E,KACFA,EAAEz3E,KAAFy3E,KADEA,GAENA,EAAEz3E,KAAFy3E,CAAQ13E,IAAR03E,CAAa2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAb3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACUA,gBAAEzgE,MAAFygE,IAAYA,EAAEzgE,MAAFygE,CAASt5E,MAArBs5E,KACFA,EAAEzgE,MAAFygE,KADEA,GAENA,EAAEzgE,MAAFygE,CAAS13E,IAAT03E,CAAc2uE,MAAME,UAANF,CAAiBj+I,MAAjBi+I,CAAwBzL,MAAxByL,CAA+BpnJ,CAA/BonJ,EAAkCpnJ,EAAEkkJ,MAAFlkJ,EAAlConJ,CAAd3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACUA,gBAAE72C,IAAF62C,IAAUA,EAAE72C,IAAF62C,CAAOt5E,MAAjBs5E,KACFA,EAAE72C,IAAF62C,KADEA,GAENA,EAAE72C,IAAF62C,CAAO13E,IAAP03E,CAAY2uE,MAAME,UAANF,CAAiB2B,YAAjB3B,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAAZ3uE,CAFMA,CAGN,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EA9DJ;EAkEJ,gBAAOy4E,CAAP;SAjFJqwE,EAoFOA,CAAP;EArGkB,KAAA,EA/DtBH,EAuKOA,CAAP;EAhLmB,GAAA,EApSvBrB,EAudAA,EAAWyB,YAAXzB,GAA0B;EAEtB,aAASyB,CAAT,CAAsBvyG,CAAtB;EAEI,UADAzxC,KAAKikJ,IAALjkJ,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAiC5B,YA7BAuyG,EAAax/C,SAAbw/C,CAAuBzlJ,IAAvBylJ,GAA8B,EAA9BA,EACAA,EAAax/C,SAAbw/C,CAAuBC,IAAvBD,GAA8B5B,MAAM9F,WADpC0H,EAGAA,EAAapN,MAAboN,GAAsB,UAAgB/oJ,CAAhB,EAAmB8G,CAAnB;EACZ9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAsFwU,CAAtF,EAAI0a,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiB2B,YAArB,EACjD,EAAO/oJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIz4E,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEuwE,IAAFvwE,KAAW0uE,MAAM9F,WAAjB5oE,KACAA,EAAEuwE,IAAFvwE,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEuwE,IAAFvwE,CAAOjkE,CAAPikE,IAAY2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2BzL,MAA3ByL,CAAkCpnJ,CAAlConJ,EAAqCpnJ,EAAEkkJ,MAAFlkJ,EAArConJ,CALZpnJ,CAMA,MACJ;EACIA,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAbJ;EAiBJ,cAAOy4E,CAAP;OA1BJswE,EA6BOA,CAAP;EAvCsB,GAAA,EAvd1BzB,EAigBAA,EAAW2B,OAAX3B,GAAqB;EAEjB,aAAS2B,CAAT,CAAiBzyG,CAAjB;EAGI,UAFAzxC,KAAK2R,KAAL3R,KAAAA,EACAA,KAAKikJ,IAALjkJ,KADAA,EAEIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA+C5B,YA3CAyyG,EAAQ1/C,SAAR0/C,CAAkB3lJ,IAAlB2lJ,GAAyB,EAAzBA,EACAA,EAAQ1/C,SAAR0/C,CAAkB1+H,EAAlB0+H,GAAuB,EADvBA,EAEAA,EAAQ1/C,SAAR0/C,CAAkBvyI,KAAlBuyI,GAA0B9B,MAAMhG,UAFhC8H,EAGAA,EAAQ1/C,SAAR0/C,CAAkBC,MAAlBD,GAA2B,EAH3BA,EAIAA,EAAQ1/C,SAAR0/C,CAAkBD,IAAlBC,GAAyB9B,MAAM9F,WAJ/B4H,EAMAA,EAAQtN,MAARsN,GAAiB,UAAgBjpJ,CAAhB,EAAmB8G,CAAnB;EACP9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAiFwU,CAAjF,EAAI0a,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiB6B,OAArB,EACjD,EAAOjpJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEluD,EAAFkuD,GAAOz4E,EAAEq7I,MAAFr7I,EAAPy4E,CACA,MACJ,KAAK,CAAL;EACUA,cAAE/hE,KAAF+hE,IAAWA,EAAE/hE,KAAF+hE,CAAQt5E,MAAnBs5E,KACFA,EAAE/hE,KAAF+hE,KADEA,GAENA,EAAE/hE,KAAF+hE,CAAQ13E,IAAR03E,CAAaz4E,EAAEq7I,MAAFr7I,EAAby4E,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIA,cAAEywE,MAAFzwE,GAAWz4E,EAAEq7I,MAAFr7I,EAAXy4E,CACA,MACJ,KAAK,CAAL;EACIz4E,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEuwE,IAAFvwE,KAAW0uE,MAAM9F,WAAjB5oE,KACAA,EAAEuwE,IAAFvwE,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEuwE,IAAFvwE,CAAOjkE,CAAPikE,IAAY2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2BzL,MAA3ByL,CAAkCpnJ,CAAlConJ,EAAqCpnJ,EAAEkkJ,MAAFlkJ,EAArConJ,CALZpnJ,CAMA,MACJ;EACIA,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAxBJ;EA4BJ,cAAOy4E,CAAP;OAxCJwwE,EA2COA,CAAP;EAtDiB,GAAA,EAjgBrB3B,EA0jBAA,EAAW6B,UAAX7B,GAAwB;EAEpB,aAAS6B,CAAT,CAAoB3yG,CAApB;EAEI,UADAzxC,KAAKqkJ,YAALrkJ,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAuC5B,YAnCA2yG,EAAW5/C,SAAX4/C,CAAqBE,QAArBF,GAAgC,CAAhCA,EACAA,EAAW5/C,SAAX4/C,CAAqBG,WAArBH,GAAmC,CADnCA,EAEAA,EAAW5/C,SAAX4/C,CAAqBC,YAArBD,GAAoChC,MAAMhG,UAF1CgI,EAIAA,EAAWxN,MAAXwN,GAAoB,UAAgBnpJ,CAAhB,EAAmB8G,CAAnB;EACV9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiB+B,UAArB,EACjD,EAAOnpJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAE4wE,QAAF5wE,GAAaz4E,EAAEqsB,KAAFrsB,EAAby4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE6wE,WAAF7wE,GAAgBz4E,EAAEqsB,KAAFrsB,EAAhBy4E,CACA,MACJ,KAAK,CAAL;EAGI,gBAFMA,EAAE2wE,YAAF3wE,IAAkBA,EAAE2wE,YAAF3wE,CAAet5E,MAAjCs5E,KACFA,EAAE2wE,YAAF3wE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADA,IAAImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE2wE,YAAF3wE,CAAe13E,IAAf03E,CAAoBz4E,EAAEqsB,KAAFrsB,EAApBy4E,EAHR,KAKIA,EAAE2wE,YAAF3wE,CAAe13E,IAAf03E,CAAoBz4E,EAAEqsB,KAAFrsB,EAApBy4E,EACJ,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAlBJ;EAsBJ,cAAOy4E,CAAP;OAhCJ0wE,EAmCOA,CAAP;EA7CoB,GAAA,EA1jBxB7B,EA0mBAA,EAAWiC,QAAXjC,GAAsB;EAElB,aAASiC,CAAT,CAAkB/yG,CAAlB;EAEI,UADAzxC,KAAKwR,IAALxR,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAkC5B,YA9BA+yG,EAAShgD,SAATggD,CAAmBhzI,IAAnBgzI,GAA0BpC,MAAMhG,UAAhCoI,EACAA,EAAShgD,SAATggD,CAAmBxmI,QAAnBwmI,GAA8B,IAD9BA,EAEAA,EAAShgD,SAATggD,CAAmBC,OAAnBD,GAA6B,IAF7BA,EAIAA,EAAS5N,MAAT4N,GAAkB,UAAgBvpJ,CAAhB,EAAmB8G,CAAnB;EACR9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBmC,QAArB,EACjD,EAAOvpJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,cAAEliE,IAAFkiE,IAAUA,EAAEliE,IAAFkiE,CAAOt5E,MAAjBs5E,KACFA,EAAEliE,IAAFkiE,KADEA,GAENA,EAAEliE,IAAFkiE,CAAO13E,IAAP03E,CAAY2uE,MAAME,UAANF,CAAiB6B,OAAjB7B,CAAyBzL,MAAzByL,CAAgCpnJ,CAAhConJ,EAAmCpnJ,EAAEkkJ,MAAFlkJ,EAAnConJ,CAAZ3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIA,cAAE11D,QAAF01D,GAAa2uE,MAAME,UAANF,CAAiB+B,UAAjB/B,CAA4BzL,MAA5ByL,CAAmCpnJ,CAAnConJ,EAAsCpnJ,EAAEkkJ,MAAFlkJ,EAAtConJ,CAAb3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAE+wE,OAAF/wE,GAAY2uE,MAAME,UAANF,CAAiBqC,kBAAjBrC,CAAoCzL,MAApCyL,CAA2CpnJ,CAA3ConJ,EAA8CpnJ,EAAEkkJ,MAAFlkJ,EAA9ConJ,CAAZ3uE,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAbJ;EAiBJ,cAAOy4E,CAAP;OA3BJ8wE,EA8BOA,CAAP;EAxCkB,GAAA,EA1mBtBjC,EAqpBAA,EAAWoC,aAAXpC,GAA2B;EAEvB,aAASoC,CAAT,CAAuBlzG,CAAvB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAU5B,SAAIoyG,CAAJ,CA8NA,OApOAc,EAAcngD,SAAdmgD,CAAwBC,QAAxBD,GAAmC,IAAnCA,EACAA,EAAcngD,SAAdmgD,CAAwBE,SAAxBF,GAAoC,IADpCA,EAEAA,EAAcngD,SAAdmgD,CAAwBG,SAAxBH,GAAoC,IAFpCA,EAGAA,EAAcngD,SAAdmgD,CAAwBI,SAAxBJ,GAAoC,IAHpCA,EAIAA,EAAcngD,SAAdmgD,CAAwBK,OAAxBL,GAAkC,IAJlCA,EAQAxgJ,OAAOiM,cAAPjM,CAAsBwgJ,EAAcngD,SAApCrgG,EAA+C,MAA/CA,IACIwS,KAAKyrI,MAAMzE,WAANyE,CAAkByB,KAAgB,YAAY,aAAa,aAAa,aAAa,UAArFzB,GACLtrI,KAAKsrI,MAAMtE,WAANsE,CAAkByB,CAAlBzB,GAFTj+I,CARAwgJ,EAaAA,EAAc/N,MAAd+N,GAAuB,UAAgB1pJ,CAAhB,EAAmB8G,CAAnB;EACb9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAArB,EACjD,EAAO1pJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEkxE,QAAFlxE,GAAa2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B4C,QAA/B5C,CAAwCzL,MAAxCyL,CAA+CpnJ,CAA/ConJ,EAAkDpnJ,EAAEkkJ,MAAFlkJ,EAAlDonJ,CAAb3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEmxE,SAAFnxE,GAAc2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B6C,SAA/B7C,CAAyCzL,MAAzCyL,CAAgDpnJ,CAAhDonJ,EAAmDpnJ,EAAEkkJ,MAAFlkJ,EAAnDonJ,CAAd3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEoxE,SAAFpxE,GAAc2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B8C,SAA/B9C,CAAyCzL,MAAzCyL,CAAgDpnJ,CAAhDonJ,EAAmDpnJ,EAAEkkJ,MAAFlkJ,EAAnDonJ,CAAd3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEqxE,SAAFrxE,GAAc2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B+C,SAA/B/C,CAAyCzL,MAAzCyL,CAAgDpnJ,CAAhDonJ,EAAmDpnJ,EAAEkkJ,MAAFlkJ,EAAnDonJ,CAAd3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEsxE,OAAFtxE,GAAY2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+BgD,OAA/BhD,CAAuCzL,MAAvCyL,CAA8CpnJ,CAA9ConJ,EAAiDpnJ,EAAEkkJ,MAAFlkJ,EAAjDonJ,CAAZ3uE,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAjBJ;EAqBJ,cAAOy4E,CAAP;OAxCJixE,EA2CAA,EAAcM,QAAdN,GAAyB;EAErB,eAASM,CAAT,CAAkBxzG,CAAlB;EAEI,YADAzxC,KAAK+D,KAAL/D,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0B5B,cAtBAwzG,EAASzgD,SAATygD,CAAmBlhJ,KAAnBkhJ,GAA2B7C,MAAMhG,UAAjC6I,EAEAA,EAASrO,MAATqO,GAAkB,UAAgBhqJ,CAAhB,EAAmB8G,CAAnB;EACR9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B4C,QAAnC,EACjD,EAAOhqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,gBAAE3vE,KAAF2vE,IAAWA,EAAE3vE,KAAF2vE,CAAQt5E,MAAnBs5E,KACFA,EAAE3vE,KAAF2vE,KADEA,GAENA,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAEq7I,MAAFr7I,EAAby4E,CAFMA,CAGN,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAPJ;EAWJ,gBAAOy4E,CAAP;SAnBJuxE,EAsBOA,CAAP;EAhCqB,KAAA,EA3CzBN,EA8EAA,EAAcO,SAAdP,GAA0B;EAEtB,eAASO,CAAT,CAAmBzzG,CAAnB;EAEI,YADAzxC,KAAK+D,KAAL/D,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0B5B,cAtBAyzG,EAAU1gD,SAAV0gD,CAAoBnhJ,KAApBmhJ,GAA4B9C,MAAMhG,UAAlC8I,EAEAA,EAAUtO,MAAVsO,GAAmB,UAAgBjqJ,CAAhB,EAAmB8G,CAAnB;EACT9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B6C,SAAnC,EACjD,EAAOjqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,gBAAE3vE,KAAF2vE,IAAWA,EAAE3vE,KAAF2vE,CAAQt5E,MAAnBs5E,KACFA,EAAE3vE,KAAF2vE,KADEA,GAENA,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAE6nD,KAAF7nD,EAAby4E,CAFMA,CAGN,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAPJ;EAWJ,gBAAOy4E,CAAP;SAnBJwxE,EAsBOA,CAAP;EAhCsB,KAAA,EA9E1BP,EAiHAA,EAAcQ,SAAdR,GAA0B;EAEtB,eAASQ,CAAT,CAAmB1zG,CAAnB;EAEI,YADAzxC,KAAK+D,KAAL/D,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA+B5B,cA3BA0zG,EAAU3gD,SAAV2gD,CAAoBphJ,KAApBohJ,GAA4B/C,MAAMhG,UAAlC+I,EAEAA,EAAUvO,MAAVuO,GAAmB,UAAgBlqJ,CAAhB,EAAmB8G,CAAnB;EACT9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B8C,SAAnC,EACjD,EAAOlqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EAGI,kBAFMw6D,EAAE3vE,KAAF2vE,IAAWA,EAAE3vE,KAAF2vE,CAAQt5E,MAAnBs5E,KACFA,EAAE3vE,KAAF2vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADA,IAAImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAEqkJ,KAAFrkJ,EAAby4E,EAHR,KAKIA,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAEqkJ,KAAFrkJ,EAAby4E,EACJ,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAZJ;EAgBJ,gBAAOy4E,CAAP;SAxBJyxE,EA2BOA,CAAP;EArCsB,KAAA,EAjH1BR,EAyJAA,EAAcS,SAAdT,GAA0B;EAEtB,eAASS,CAAT,CAAmB3zG,CAAnB;EAEI,YADAzxC,KAAK+D,KAAL/D,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA+B5B,cA3BA2zG,EAAU5gD,SAAV4gD,CAAoBrhJ,KAApBqhJ,GAA4BhD,MAAMhG,UAAlCgJ,EAEAA,EAAUxO,MAAVwO,GAAmB,UAAgBnqJ,CAAhB,EAAmB8G,CAAnB;EACT9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+B+C,SAAnC,EACjD,EAAOnqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EAGI,kBAFMw6D,EAAE3vE,KAAF2vE,IAAWA,EAAE3vE,KAAF2vE,CAAQt5E,MAAnBs5E,KACFA,EAAE3vE,KAAF2vE,KADEA,GAEU,MAAP,IAAJx6D,CAAW,CAAhB,EAEI,KADA,IAAImR,IAAKpvB,EAAEkkJ,MAAFlkJ,KAAaA,EAAE8gH,GACxB,EAAO9gH,EAAE8gH,GAAF9gH,GAAQovB,CAAf,GACIqpD,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAE6gJ,KAAF7gJ,EAAby4E,EAHR,KAKIA,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAaz4E,EAAE6gJ,KAAF7gJ,EAAby4E,EACJ,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAZJ;EAgBJ,gBAAOy4E,CAAP;SAxBJ0xE,EA2BOA,CAAP;EArCsB,KAAA,EAzJ1BT,EAiMAA,EAAcU,OAAdV,GAAwB;EAEpB,eAASU,CAAT,CAAiB5zG,CAAjB;EAEI,YADAzxC,KAAK+D,KAAL/D,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0B5B,cAtBA4zG,EAAQ7gD,SAAR6gD,CAAkBthJ,KAAlBshJ,GAA0BjD,MAAMhG,UAAhCiJ,EAEAA,EAAQzO,MAARyO,GAAiB,UAAgBpqJ,CAAhB,EAAmB8G,CAAnB;EACP9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+BgD,OAAnC,EACjD,EAAOpqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,gBAAE3vE,KAAF2vE,IAAWA,EAAE3vE,KAAF2vE,CAAQt5E,MAAnBs5E,KACFA,EAAE3vE,KAAF2vE,KADEA,GAENA,EAAE3vE,KAAF2vE,CAAQ13E,IAAR03E,CAAa2uE,MAAME,UAANF,CAAiBI,GAAjBJ,CAAqBzL,MAArByL,CAA4BpnJ,CAA5BonJ,EAA+BpnJ,EAAEkkJ,MAAFlkJ,EAA/BonJ,CAAb3uE,CAFMA,CAGN,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAPJ;EAWJ,gBAAOy4E,CAAP;SAnBJ2xE,EAsBOA,CAAP;EAhCoB,KAAA,EAjMxBV,EAoOOA,CAAP;EA7OuB,GAAA,EArpB3BpC,EAq4BAA,EAAW+C,QAAX/C,GAAsB;EAElB,aAAS+C,CAAT,CAAkB7zG,CAAlB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgDO,SAC3B+wG,CAD2B,EACVtkJ,CADU,CAQnC,OApDAonJ,EAAS9gD,SAAT8gD,CAAmBC,kBAAnBD,GAAwC,EAAxCA,EACAA,EAAS9gD,SAAT8gD,CAAmBE,cAAnBF,GAAoC,EADpCA,EAEAA,EAAS9gD,SAAT8gD,CAAmBG,aAAnBH,GAAmC,EAFnCA,EAGAA,EAAS9gD,SAAT8gD,CAAmBI,SAAnBJ,GAA+B,CAH/BA,EAIAA,EAAS9gD,SAAT8gD,CAAmBK,OAAnBL,IAA6B,CAJ7BA,EAKAA,EAAS9gD,SAAT8gD,CAAmBM,yBAAnBN,GAA+C,CAL/CA,EAMAA,EAAS9gD,SAAT8gD,CAAmBvmD,OAAnBumD,GAA6B,CAN7BA,EAQAA,EAAS1O,MAAT0O,GAAkB,UAAgBrqJ,CAAhB,EAAmB8G,CAAnB;EACR9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBiD,QAArB,EACjD,EAAOrqJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAE6xE,kBAAF7xE,GAAuBz4E,EAAEq7I,MAAFr7I,EAAvBy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE8xE,cAAF9xE,GAAmBz4E,EAAEq7I,MAAFr7I,EAAnBy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE+xE,aAAF/xE,GAAkBz4E,EAAEq7I,MAAFr7I,EAAlBy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEgyE,SAAFhyE,GAAcz4E,EAAEqsB,KAAFrsB,EAAdy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEiyE,OAAFjyE,GAAYz4E,EAAE4D,IAAF5D,EAAZy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEkyE,yBAAFlyE,GAA8Bz4E,EAAE6gJ,KAAF7gJ,EAA9By4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEqrB,OAAFrrB,GAAYz4E,EAAEqsB,KAAFrsB,EAAZy4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAvBJ;EA2BJ,cAAOy4E,CAAP;OAzCJ4xE,EA4CAA,EAASO,uBAATP,IACQ9C,MAAAA,GAAiBtkJ,IAASiG,OAAOu3I,MAAPv3I,CAAcq+I,CAAdr+I,GACvBq+I,EAAW,CAAXA,IAAgB,YAAY,CAD/BA,EAEJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,IAAvBtkJ,IAA+B,CAF3BskJ,EAGJtkJ,EAAOskJ,EAAW,CAAXA,IAAgB,IAAvBtkJ,IAA+B,CAH3BskJ,EAIGtkJ,CALXonJ,CA5CAA,EAoDOA,CAAP;EA7DkB,GAAA,EAr4BtB/C,EAq8BAA,EAAWuD,UAAXvD,GAAwB;EAEpB,aAASuD,CAAT,CAAoBr0G,CAApB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAS5B,SAAIoyG,CAAJ,CA0EA,OA/EAiC,EAAWthD,SAAXshD,CAAqBvnJ,IAArBunJ,GAA4B,EAA5BA,EACAA,EAAWthD,SAAXshD,CAAqBC,SAArBD,GAAiC,IADjCA,EAEAA,EAAWthD,SAAXshD,CAAqB7nJ,KAArB6nJ,GAA6B,CAF7BA,EAGAA,EAAWthD,SAAXshD,CAAqBrC,WAArBqC,GAAmC,IAHnCA,EAOA3hJ,OAAOiM,cAAPjM,CAAsB2hJ,EAAWthD,SAAjCrgG,EAA4C,UAA5CA,IACIwS,KAAKyrI,MAAMzE,WAANyE,CAAkByB,KAAgB,QAAQ,YAA1CzB,GACLtrI,KAAKsrI,MAAMtE,WAANsE,CAAkByB,CAAlBzB,GAFTj+I,CAPA2hJ,EAYAA,EAAWlP,MAAXkP,GAAoB,UAAgB7qJ,CAAhB,EAAmB8G,CAAnB;EACV9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiByD,UAArB,EACjD,EAAO7qJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEqyE,SAAFryE,GAAc2uE,MAAME,UAANF,CAAiByD,UAAjBzD,CAA4B2D,SAA5B3D,CAAsCzL,MAAtCyL,CAA6CpnJ,CAA7ConJ,EAAgDpnJ,EAAEkkJ,MAAFlkJ,EAAhDonJ,CAAd3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEz1E,KAAFy1E,GAAUz4E,EAAEqsB,KAAFrsB,EAAVy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAE+vE,WAAF/vE,GAAgB2uE,MAAME,UAANF,CAAiBQ,WAAjBR,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAhB3uE,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAdJ;EAkBJ,cAAOy4E,CAAP;OApCJoyE,EAuCAA,EAAWE,SAAXF,GAAuB;EAEnB,eAASE,CAAT,CAAmBv0G,CAAnB;EACI,YAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgC5B,cA5BAu0G,EAAUxhD,SAAVwhD,CAAoBC,gBAApBD,GAAuC,EAAvCA,EACAA,EAAUxhD,SAAVwhD,CAAoBE,iBAApBF,GAAwC,EADxCA,EAEAA,EAAUxhD,SAAVwhD,CAAoBG,oBAApBH,GAA2C,EAF3CA,EAIAA,EAAUpP,MAAVoP,GAAmB,UAAgB/qJ,CAAhB,EAAmB8G,CAAnB;EACT9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiByD,UAAjBzD,CAA4B2D,SAAhC,EACjD,EAAO/qJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAEuyE,gBAAFvyE,GAAqBz4E,EAAEq7I,MAAFr7I,EAArBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEwyE,iBAAFxyE,GAAsBz4E,EAAEq7I,MAAFr7I,EAAtBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEyyE,oBAAFzyE,GAAyBz4E,EAAEq7I,MAAFr7I,EAAzBy4E,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAXJ;EAeJ,gBAAOy4E,CAAP;SAzBJsyE,EA4BOA,CAAP;EArCmB,KAAA,EAvCvBF,EA+EOA,CAAP;EAxFoB,GAAA,EAr8BxBvD,EAgiCAA,EAAW6D,YAAX7D,GAA0B;EAEtB,aAAS6D,CAAT,CAAsB30G,CAAtB;EAGI,UAFAzxC,KAAKyR,MAALzR,KAAAA,EACAA,KAAK6R,OAAL7R,KADAA,EAEIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0C5B,YAtCA20G,EAAa5hD,SAAb4hD,CAAuB30I,MAAvB20I,GAAgChE,MAAM9F,WAAtC8J,EACAA,EAAa5hD,SAAb4hD,CAAuBv0I,OAAvBu0I,GAAiChE,MAAM9F,WADvC8J,EAEAA,EAAa5hD,SAAb4hD,CAAuBC,UAAvBD,GAAoC,EAFpCA,EAIAA,EAAaxP,MAAbwP,GAAsB,UAAgBnrJ,CAAhB,EAAmB8G,CAAnB;EACZ9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAsFwU,CAAtF,EAAI0a,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiB+D,YAArB,EACjD,EAAOnrJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIje,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEjiE,MAAFiiE,KAAa0uE,MAAM9F,WAAnB5oE,KACAA,EAAEjiE,MAAFiiE,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEjiE,MAAFiiE,CAASjkE,CAATikE,IAAc2uE,MAAME,UAANF,CAAiByD,UAAjBzD,CAA4BzL,MAA5ByL,CAAmCpnJ,CAAnConJ,EAAsCpnJ,EAAEkkJ,MAAFlkJ,EAAtConJ,CALdpnJ,CAMA,MACJ,KAAK,CAAL;EACIA,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAE7hE,OAAF6hE,KAAc0uE,MAAM9F,WAApB5oE,KACAA,EAAE7hE,OAAF6hE,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAE7hE,OAAF6hE,CAAUjkE,CAAVikE,IAAe2uE,MAAME,UAANF,CAAiByD,UAAjBzD,CAA4BzL,MAA5ByL,CAAmCpnJ,CAAnConJ,EAAsCpnJ,EAAEkkJ,MAAFlkJ,EAAtConJ,CALfpnJ,CAMA,MACJ,KAAK,CAAL;EACIy4E,cAAE2yE,UAAF3yE,GAAez4E,EAAEq7I,MAAFr7I,EAAfy4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EArBJ;EAyBJ,cAAOy4E,CAAP;OAnCJ0yE,EAsCOA,CAAP;EAjDsB,GAAA,EAhiC1B7D,EAolCAA,EAAW+D,YAAX/D,GAA0B;EAEtB,aAAS+D,CAAT,CAAsB70G,CAAtB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA4B5B,YAxBA60G,EAAa9hD,SAAb8hD,CAAuB5vI,UAAvB4vI,GAAoC,IAApCA,EACAA,EAAa9hD,SAAb8hD,CAAuBC,QAAvBD,GAAkC,EADlCA,EAGAA,EAAa1P,MAAb0P,GAAsB,UAAgBrrJ,CAAhB,EAAmB8G,CAAnB;EACZ9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBiE,YAArB,EACjD,EAAOrrJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEh9D,UAAFg9D,GAAe2uE,MAAME,UAANF,CAAiByD,UAAjBzD,CAA4BzL,MAA5ByL,CAAmCpnJ,CAAnConJ,EAAsCpnJ,EAAEkkJ,MAAFlkJ,EAAtConJ,CAAf3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAE6yE,QAAF7yE,GAAaz4E,EAAEq7I,MAAFr7I,EAAby4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EARJ;EAYJ,cAAOy4E,CAAP;OArBJ4yE,EAwBOA,CAAP;EAjCsB,GAAA,EAplC1B/D,EAwnCAA,EAAWiE,KAAXjE,GAAmB;EAEf,aAASiE,CAAT,CAAe/0G,CAAf;EAII,UAHAzxC,KAAKymJ,QAALzmJ,KAAAA,EACAA,KAAK0mJ,SAAL1mJ,KADAA,EAEAA,KAAKikJ,IAALjkJ,KAFAA,EAGIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0N5B,YAtNA+0G,EAAMhiD,SAANgiD,CAAgBjoJ,IAAhBioJ,GAAuB,EAAvBA,EACAA,EAAMhiD,SAANgiD,CAAgBC,QAAhBD,GAA2BpE,MAAMhG,UADjCoK,EAEAA,EAAMhiD,SAANgiD,CAAgBE,SAAhBF,GAA4BpE,MAAMhG,UAFlCoK,EAGAA,EAAMhiD,SAANgiD,CAAgBvC,IAAhBuC,GAAuBpE,MAAMhG,UAH7BoK,EAIAA,EAAMhiD,SAANgiD,CAAgBG,WAAhBH,GAA8B,IAJ9BA,EAKAA,EAAMhiD,SAANgiD,CAAgB1rB,OAAhB0rB,GAA0B,EAL1BA,EAMAA,EAAMhiD,SAANgiD,CAAgBI,WAAhBJ,GAA8B,EAN9BA,EAOAA,EAAMhiD,SAANgiD,CAAgBK,aAAhBL,IAAgC,CAPhCA,EAQAA,EAAMhiD,SAANgiD,CAAgBM,WAAhBN,IAA8B,CAR9BA,EASAA,EAAMhiD,SAANgiD,CAAgBO,UAAhBP,IAA6B,CAT7BA,EAUAA,EAAMhiD,SAANgiD,CAAgBQ,wBAAhBR,IAA2C,CAV3CA,EAYAA,EAAM5P,MAAN4P,GAAe,UAAgBvrJ,CAAhB,EAAmB8G,CAAnB;EACL9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBmE,KAArB,EACjD,EAAOvrJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACUA,cAAE+yE,QAAF/yE,IAAcA,EAAE+yE,QAAF/yE,CAAWt5E,MAAzBs5E,KACFA,EAAE+yE,QAAF/yE,KADEA,GAENA,EAAE+yE,QAAF/yE,CAAW13E,IAAX03E,CAAgB2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB4E,MAAvB5E,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAAhB3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACUA,cAAEgzE,SAAFhzE,IAAeA,EAAEgzE,SAAFhzE,CAAYt5E,MAA3Bs5E,KACFA,EAAEgzE,SAAFhzE,KADEA,GAENA,EAAEgzE,SAAFhzE,CAAY13E,IAAZ03E,CAAiB2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB4E,MAAvB5E,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAAjB3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACUA,cAAEuwE,IAAFvwE,IAAUA,EAAEuwE,IAAFvwE,CAAOt5E,MAAjBs5E,KACFA,EAAEuwE,IAAFvwE,KADEA,GAENA,EAAEuwE,IAAFvwE,CAAO13E,IAAP03E,CAAY2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB6E,OAAvB7E,CAA+BzL,MAA/ByL,CAAsCpnJ,CAAtConJ,EAAyCpnJ,EAAEkkJ,MAAFlkJ,EAAzConJ,CAAZ3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIA,cAAEizE,WAAFjzE,GAAgB2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB8E,aAAvB9E,CAAqCzL,MAArCyL,CAA4CpnJ,CAA5ConJ,EAA+CpnJ,EAAEkkJ,MAAFlkJ,EAA/ConJ,CAAhB3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEonD,OAAFpnD,GAAYz4E,EAAEq7I,MAAFr7I,EAAZy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEkzE,WAAFlzE,GAAgBz4E,EAAEq7I,MAAFr7I,EAAhBy4E,CACA,MACJ,KAAK,EAAL;EACIA,cAAEmzE,aAAFnzE,GAAkBz4E,EAAE4D,IAAF5D,EAAlBy4E,CACA,MACJ,KAAK,EAAL;EACIA,cAAEozE,WAAFpzE,GAAgBz4E,EAAE4D,IAAF5D,EAAhBy4E,CACA,MACJ,KAAK,EAAL;EACIA,cAAEqzE,UAAFrzE,GAAez4E,EAAE4D,IAAF5D,EAAfy4E,CACA,MACJ,KAAK,EAAL;EACIA,cAAEszE,wBAAFtzE,GAA6Bz4E,EAAE4D,IAAF5D,EAA7By4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAzCJ;EA6CJ,cAAOy4E,CAAP;OA/DJ8yE,EAkEAA,EAAMS,MAANT,GAAe;EAEX,eAASS,CAAT,CAAgBx1G,CAAhB;EACI,YAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgD5B,cA5CAw1G,EAAOziD,SAAPyiD,CAAiB1oJ,IAAjB0oJ,GAAwB,EAAxBA,EACAA,EAAOziD,SAAPyiD,CAAiBL,WAAjBK,GAA+B,EAD/BA,EAEAA,EAAOziD,SAAPyiD,CAAiBntI,IAAjBmtI,GAAwB,CAFxBA,EAGAA,EAAOziD,SAAPyiD,CAAiBG,QAAjBH,GAA4B,EAH5BA,EAIAA,EAAOziD,SAAPyiD,CAAiBI,UAAjBJ,GAA8B,EAJ9BA,EAKAA,EAAOziD,SAAPyiD,CAAiBK,YAAjBL,GAAgC,EALhCA,EAMAA,EAAOziD,SAAPyiD,CAAiBM,KAAjBN,IAAyB,CANzBA,EAQAA,EAAOrQ,MAAPqQ,GAAgB,UAAgBhsJ,CAAhB,EAAmB8G,CAAnB;EACN9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB4E,MAA3B,EACjD,EAAOhsJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEkzE,WAAFlzE,GAAgBz4E,EAAEq7I,MAAFr7I,EAAhBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE55D,IAAF45D,GAASz4E,EAAEqsB,KAAFrsB,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE0zE,QAAF1zE,GAAaz4E,EAAEq7I,MAAFr7I,EAAby4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE2zE,UAAF3zE,GAAez4E,EAAEq7I,MAAFr7I,EAAfy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE4zE,YAAF5zE,GAAiBz4E,EAAEq7I,MAAFr7I,EAAjBy4E,CACA,MACJ,KAAK,EAAL;EACIA,gBAAE6zE,KAAF7zE,GAAUz4E,EAAE4D,IAAF5D,EAAVy4E,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAvBJ;EA2BJ,gBAAOy4E,CAAP;SAzCJuzE,EA4COA,CAAP;EArDW,KAAA,EAlEfT,EA0HAA,EAAMU,OAANV,GAAgB;EAEZ,eAASU,CAAT,CAAiBz1G,CAAjB;EACI,YAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgD5B,cA5CAy1G,EAAQ1iD,SAAR0iD,CAAkB3oJ,IAAlB2oJ,GAAyB,EAAzBA,EACAA,EAAQ1iD,SAAR0iD,CAAkBptI,IAAlBotI,GAAyB,EADzBA,EAEAA,EAAQ1iD,SAAR0iD,CAAkB39H,YAAlB29H,GAAiC,IAFjCA,EAGAA,EAAQ1iD,SAAR0iD,CAAkBN,WAAlBM,GAAgC,EAHhCA,EAIAA,EAAQ1iD,SAAR0iD,CAAkBM,UAAlBN,IAA+B,CAJ/BA,EAKAA,EAAQ1iD,SAAR0iD,CAAkBj+I,OAAlBi+I,GAA4B9E,MAAM3H,IAAN2H,GAAaA,MAAM3H,IAAN2H,CAAW5E,QAAX4E,CAAoB,CAApBA,EAAsB,CAAtBA,GAAwB,CAAxBA,CAAbA,GAA8C,CAL1E8E,EAMAA,EAAQ1iD,SAAR0iD,CAAkBO,aAAlBP,GAAkC,IANlCA,EAQAA,EAAQtQ,MAARsQ,GAAiB,UAAgBjsJ,CAAhB,EAAmB8G,CAAnB;EACP9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB6E,OAA3B,EACjD,EAAOjsJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAEn1E,IAAFm1E,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE55D,IAAF45D,GAASz4E,EAAEq7I,MAAFr7I,EAATy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEnqD,YAAFmqD,GAAiB2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2BzL,MAA3ByL,CAAkCpnJ,CAAlConJ,EAAqCpnJ,EAAEkkJ,MAAFlkJ,EAArConJ,CAAjB3uE,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEkzE,WAAFlzE,GAAgBz4E,EAAEq7I,MAAFr7I,EAAhBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE8zE,UAAF9zE,GAAez4E,EAAE4D,IAAF5D,EAAfy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEzqE,OAAFyqE,GAAYz4E,EAAEqkJ,KAAFrkJ,EAAZy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE+zE,aAAF/zE,GAAkB2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2BzL,MAA3ByL,CAAkCpnJ,CAAlConJ,EAAqCpnJ,EAAEkkJ,MAAFlkJ,EAArConJ,CAAlB3uE,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAvBJ;EA2BJ,gBAAOy4E,CAAP;SAzCJwzE,EA4COA,CAAP;EArDY,KAAA,EA1HhBV,EAkLAA,EAAMW,aAANX,GAAsB;EAElB,eAASW,CAAT,CAAuB11G,CAAvB;EACI,YAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA4B5B,cAxBA01G,EAAc3iD,SAAd2iD,CAAwBpoD,OAAxBooD,GAAkC,CAAlCA,EACAA,EAAc3iD,SAAd2iD,CAAwBO,WAAxBP,GAAsC,EADtCA,EAGAA,EAAcvQ,MAAduQ,GAAuB,UAAgBlsJ,CAAhB,EAAmB8G,CAAnB;EACb9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuB8E,aAA3B,EACjD,EAAOlsJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAEqrB,OAAFrrB,GAAYz4E,EAAEqsB,KAAFrsB,EAAZy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAEg0E,WAAFh0E,GAAgBz4E,EAAEq7I,MAAFr7I,EAAhBy4E,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EARJ;EAYJ,gBAAOy4E,CAAP;SArBJyzE,EAwBOA,CAAP;EAjCkB,KAAA,EAlLtBX,EAsNOA,CAAP;EAlOe,GAAA,EAxnCnBjE,EA61CAA,EAAWoF,MAAXpF,GAAoB;EAEhB,aAASoF,CAAT,CAAgBl2G,CAAhB;EAEI,UADAzxC,KAAKwlB,EAALxlB,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA0B5B,YAtBAk2G,EAAOnjD,SAAPmjD,CAAiBniI,EAAjBmiI,GAAsBvF,MAAMhG,UAA5BuL,EAEAA,EAAO/Q,MAAP+Q,GAAgB,UAAgB1sJ,CAAhB,EAAmB8G,CAAnB;EACN9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBsF,MAArB,EACjD,EAAO1sJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,cAAEluD,EAAFkuD,IAAQA,EAAEluD,EAAFkuD,CAAKt5E,MAAbs5E,KACFA,EAAEluD,EAAFkuD,KADEA,GAENA,EAAEluD,EAAFkuD,CAAK13E,IAAL03E,CAAU2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuBzL,MAAvByL,CAA8BpnJ,CAA9BonJ,EAAiCpnJ,EAAEkkJ,MAAFlkJ,EAAjConJ,CAAV3uE,CAFMA,CAGN,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAPJ;EAWJ,cAAOy4E,CAAP;OAnBJi0E,EAsBOA,CAAP;EAhCgB,GAAA,EA71CpBpF,EAg4CAA,EAAWqF,YAAXrF,GAA0B;EAEtB,aAASqF,CAAT,CAAsBn2G,CAAtB;EAII,UAHAzxC,KAAK6nJ,aAAL7nJ,KAAAA,EACAA,KAAK8nJ,YAAL9nJ,KADAA,EAEAA,KAAK+nJ,YAAL/nJ,KAFAA,EAGIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA+G5B,YA3GAm2G,EAAapjD,SAAbojD,CAAuBI,WAAvBJ,GAAqC,IAArCA,EACAA,EAAapjD,SAAbojD,CAAuBK,QAAvBL,GAAkC,IADlCA,EAEAA,EAAapjD,SAAbojD,CAAuBM,QAAvBN,GAAkC,IAFlCA,EAGAA,EAAapjD,SAAbojD,CAAuBC,aAAvBD,GAAuCxF,MAAM9F,WAH7CsL,EAIAA,EAAapjD,SAAbojD,CAAuBE,YAAvBF,GAAsCxF,MAAM9F,WAJ5CsL,EAKAA,EAAapjD,SAAbojD,CAAuBG,YAAvBH,GAAsCxF,MAAMhG,UAL5CwL,EAOAA,EAAahR,MAAbgR,GAAsB,UAAgB3sJ,CAAhB,EAAmB8G,CAAnB;EACZ9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAsFwU,CAAtF,EAAI0a,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBuF,YAArB,EACjD,EAAO3sJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEs0E,WAAFt0E,GAAgB2uE,MAAME,UAANF,CAAiBuF,YAAjBvF,CAA8B8F,WAA9B9F,CAA0CzL,MAA1CyL,CAAiDpnJ,CAAjDonJ,EAAoDpnJ,EAAEkkJ,MAAFlkJ,EAApDonJ,CAAhB3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEu0E,QAAFv0E,GAAa2uE,MAAME,UAANF,CAAiBmC,QAAjBnC,CAA0BzL,MAA1ByL,CAAiCpnJ,CAAjConJ,EAAoCpnJ,EAAEkkJ,MAAFlkJ,EAApConJ,CAAb3uE,CACA,MACJ,KAAK,CAAL;EACIA,cAAEw0E,QAAFx0E,GAAa2uE,MAAME,UAANF,CAAiBiD,QAAjBjD,CAA0BzL,MAA1ByL,CAAiCpnJ,CAAjConJ,EAAoCpnJ,EAAEkkJ,MAAFlkJ,EAApConJ,CAAb3uE,CACA,MACJ,KAAK,CAAL;EACIz4E,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEm0E,aAAFn0E,KAAoB0uE,MAAM9F,WAA1B5oE,KACAA,EAAEm0E,aAAFn0E,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEm0E,aAAFn0E,CAAgBjkE,CAAhBikE,IAAqB2uE,MAAME,UAANF,CAAiBsC,aAAjBtC,CAA+BzL,MAA/ByL,CAAsCpnJ,CAAtConJ,EAAyCpnJ,EAAEkkJ,MAAFlkJ,EAAzConJ,CALrBpnJ,CAMA,MACJ,KAAK,CAAL;EACIA,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEo0E,YAAFp0E,KAAmB0uE,MAAM9F,WAAzB5oE,KACAA,EAAEo0E,YAAFp0E,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEo0E,YAAFp0E,CAAejkE,CAAfikE,IAAoB2uE,MAAME,UAANF,CAAiB+D,YAAjB/D,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CALpBpnJ,CAMA,MACJ,KAAK,CAAL;EACUy4E,cAAEq0E,YAAFr0E,IAAkBA,EAAEq0E,YAAFr0E,CAAet5E,MAAjCs5E,KACFA,EAAEq0E,YAAFr0E,KADEA,GAENA,EAAEq0E,YAAFr0E,CAAe13E,IAAf03E,CAAoB2uE,MAAME,UAANF,CAAiBiE,YAAjBjE,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAApB3uE,CAFMA,CAGN,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAhCJ;EAoCJ,cAAOy4E,CAAP;OAjDJk0E,EAoDAA,EAAaO,WAAbP,GAA2B;EAEvB,eAASO,CAAT,CAAqB12G,CAArB;EAEI,YADAzxC,KAAKooJ,IAALpoJ,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA8C5B,cA1CA02G,EAAY3jD,SAAZ2jD,CAAsBE,gBAAtBF,GAAyC,EAAzCA,EACAA,EAAY3jD,SAAZ2jD,CAAsBG,cAAtBH,GAAuC,IADvCA,EAEAA,EAAY3jD,SAAZ2jD,CAAsBI,OAAtBJ,GAAgC,IAFhCA,EAGAA,EAAY3jD,SAAZ2jD,CAAsBC,IAAtBD,GAA6B/F,MAAMhG,UAHnC+L,EAIAA,EAAY3jD,SAAZ2jD,CAAsBK,iBAAtBL,GAA0C,EAJ1CA,EAKAA,EAAY3jD,SAAZ2jD,CAAsBM,oBAAtBN,GAA6C,EAL7CA,EAOAA,EAAYvR,MAAZuR,GAAqB,UAAgBltJ,CAAhB,EAAmB8G,CAAnB;EACX9G,qBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBuF,YAAjBvF,CAA8B8F,WAAlC,EACjD,EAAOltJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,cAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,gBAAE20E,gBAAF30E,GAAqBz4E,EAAEq7I,MAAFr7I,EAArBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE40E,cAAF50E,GAAmB2uE,MAAME,UAANF,CAAiBsF,MAAjBtF,CAAwBzL,MAAxByL,CAA+BpnJ,CAA/BonJ,EAAkCpnJ,EAAEkkJ,MAAFlkJ,EAAlConJ,CAAnB3uE,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE60E,OAAF70E,GAAY2uE,MAAME,UAANF,CAAiBI,GAAjBJ,CAAqBzL,MAArByL,CAA4BpnJ,CAA5BonJ,EAA+BpnJ,EAAEkkJ,MAAFlkJ,EAA/BonJ,CAAZ3uE,CACA,MACJ,KAAK,CAAL;EACUA,gBAAE00E,IAAF10E,IAAUA,EAAE00E,IAAF10E,CAAOt5E,MAAjBs5E,KACFA,EAAE00E,IAAF10E,KADEA,GAENA,EAAE00E,IAAF10E,CAAO13E,IAAP03E,CAAYz4E,EAAEq7I,MAAFr7I,EAAZy4E,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIA,gBAAE80E,iBAAF90E,GAAsBz4E,EAAEq7I,MAAFr7I,EAAtBy4E,CACA,MACJ,KAAK,CAAL;EACIA,gBAAE+0E,oBAAF/0E,GAAyBz4E,EAAEq7I,MAAFr7I,EAAzBy4E,CACA,MACJ;EACIz4E,gBAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAtBJ;EA0BJ,gBAAOy4E,CAAP;SAvCJy0E,EA0COA,CAAP;EApDuB,KAAA,EApD3BP,EA2GOA,CAAP;EAvHsB,GAAA,EAh4C1BrF,EA0/CAA,EAAWmG,UAAXnG,GAAwB;EAEpB,aAASmG,CAAT,CAAoBj3G,CAApB;EAEI,UADAzxC,KAAK2oJ,UAAL3oJ,KAAAA,EACIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA8B5B,YA1BAi3G,EAAWlkD,SAAXkkD,CAAqBE,uBAArBF,GAA+CtG,MAAM3H,IAAN2H,GAAaA,MAAM3H,IAAN2H,CAAW5E,QAAX4E,CAAoB,CAApBA,EAAsB,CAAtBA,GAAwB,CAAxBA,CAAbA,GAA8C,CAA7FsG,EACAA,EAAWlkD,SAAXkkD,CAAqBC,UAArBD,GAAkCtG,MAAMhG,UADxCsM,EAGAA,EAAW9R,MAAX8R,GAAoB,UAAgBztJ,CAAhB,EAAmB8G,CAAnB;EACV9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBqG,UAArB,EACjD,EAAOztJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEk1E,uBAAFl1E,GAA4Bz4E,EAAEqkJ,KAAFrkJ,EAA5By4E,CACA,MACJ,KAAK,CAAL;EACUA,cAAEi1E,UAAFj1E,IAAgBA,EAAEi1E,UAAFj1E,CAAat5E,MAA7Bs5E,KACFA,EAAEi1E,UAAFj1E,KADEA,GAENA,EAAEi1E,UAAFj1E,CAAa13E,IAAb03E,CAAkB2uE,MAAME,UAANF,CAAiBuF,YAAjBvF,CAA8BzL,MAA9ByL,CAAqCpnJ,CAArConJ,EAAwCpnJ,EAAEkkJ,MAAFlkJ,EAAxConJ,CAAlB3uE,CAFMA,CAGN,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAVJ;EAcJ,cAAOy4E,CAAP;OAvBJg1E,EA0BOA,CAAP;EApCoB,GAAA,EA1/CxBnG,EAiiDAA,EAAWmC,kBAAXnC,GAAgC;EAE5B,aAASmC,CAAT,CAA4BjzG,CAA5B;EAGI,UAFAzxC,KAAe6oJ,QAAf7oJ,KAAAA,EACAA,KAAK2S,QAAL3S,KADAA,EAEIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgC5B,YA5BAizG,EAAmBlgD,SAAnBkgD,CAAuCmE,QAAvCnE,GAA2CtC,MAAMhG,UAAjDsI,EACAA,EAAmBlgD,SAAnBkgD,CAA6B/xI,QAA7B+xI,GAAwCtC,MAAMhG,UAD9CsI,EAGAA,EAAmB9N,MAAnB8N,GAA4B,UAAgBzpJ,CAAhB,EAAmB8G,CAAnB;EAClB9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiBqC,kBAArB,EACjD,EAAOzpJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACUw6D,cAAYm1E,QAAZn1E,IAAiBA,EAAYm1E,QAAZn1E,CAAct5E,MAA/Bs5E,KACFA,EAAYm1E,QAAZn1E,KADEA,GAENA,EAAYm1E,QAAZn1E,CAAc13E,IAAd03E,CAAmB2uE,MAAME,UAANF,CAAiByG,WAAjBzG,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAnB3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACUA,cAAE/gE,QAAF+gE,IAAcA,EAAE/gE,QAAF+gE,CAAWt5E,MAAzBs5E,KACFA,EAAE/gE,QAAF+gE,KADEA,GAENA,EAAE/gE,QAAF+gE,CAAW13E,IAAX03E,CAAgB2uE,MAAME,UAANF,CAAiB0G,WAAjB1G,CAA6BzL,MAA7ByL,CAAoCpnJ,CAApConJ,EAAuCpnJ,EAAEkkJ,MAAFlkJ,EAAvConJ,CAAhB3uE,CAFMA,CAGN,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EAZJ;EAgBJ,cAAOy4E,CAAP;OAzBJgxE,EA4BOA,CAAP;EAvC4B,GAAA,EAjiDhCnC,EA2kDAA,EAAWuG,WAAXvG,GAAyB;EAErB,aAASuG,CAAT,CAAqBr3G,CAArB;EAII,UAHAzxC,KAAKikJ,IAALjkJ,KAAAA,EACAA,KAAKgpJ,OAALhpJ,KADAA,EAEAA,KAAKpE,GAALoE,KAFAA,EAGIyxC,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EAgD5B,YA5CAq3G,EAAYtkD,SAAZskD,CAAsBG,SAAtBH,GAAkC,IAAlCA,EACAA,EAAYtkD,SAAZskD,CAAsB7E,IAAtB6E,GAA6B1G,MAAM9F,WADnCwM,EAEAA,EAAYtkD,SAAZskD,CAAsBE,OAAtBF,GAAgC1G,MAAMhG,UAFtC0M,EAGAA,EAAYtkD,SAAZskD,CAAsBltJ,GAAtBktJ,GAA4B1G,MAAM9F,WAHlCwM,EAKAA,EAAYlS,MAAZkS,GAAqB,UAAgB7tJ,CAAhB,EAAmB8G,CAAnB;EACX9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAqFwU,CAArF,EAAI0a,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiByG,WAArB,EACjD,EAAO7tJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEu1E,SAAFv1E,GAAc2uE,MAAME,UAANF,CAAiBmE,KAAjBnE,CAAuBzL,MAAvByL,CAA8BpnJ,CAA9BonJ,EAAiCpnJ,EAAEkkJ,MAAFlkJ,EAAjConJ,CAAd3uE,CACA,MACJ,KAAK,CAAL;EACIz4E,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAEuwE,IAAFvwE,KAAW0uE,MAAM9F,WAAjB5oE,KACAA,EAAEuwE,IAAFvwE,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAEuwE,IAAFvwE,CAAOjkE,CAAPikE,IAAY2uE,MAAME,UAANF,CAAiBuB,SAAjBvB,CAA2BzL,MAA3ByL,CAAkCpnJ,CAAlConJ,EAAqCpnJ,EAAEkkJ,MAAFlkJ,EAArConJ,CALZpnJ,CAMA,MACJ,KAAK,CAAL;EACUy4E,cAAEs1E,OAAFt1E,IAAaA,EAAEs1E,OAAFt1E,CAAUt5E,MAAvBs5E,KACFA,EAAEs1E,OAAFt1E,KADEA,GAENA,EAAEs1E,OAAFt1E,CAAU13E,IAAV03E,CAAe2uE,MAAME,UAANF,CAAiB6B,OAAjB7B,CAAyBzL,MAAzByL,CAAgCpnJ,CAAhConJ,EAAmCpnJ,EAAEkkJ,MAAFlkJ,EAAnConJ,CAAf3uE,CAFMA,CAGN,MACJ,KAAK,CAAL;EACIz4E,cAAE8lJ,IAAF9lJ,GAAS8gH,GAAT9gH,IACIy4E,EAAE93E,GAAF83E,KAAU0uE,MAAM9F,WAAhB5oE,KACAA,EAAE93E,GAAF83E,KADAA,CADJz4E,EAGAwU,IAAIxU,EAAEq7I,MAAFr7I,EAHJA,EAIAA,EAAE8gH,GAAF9gH,EAJAA,EAKAy4E,EAAE93E,GAAF83E,CAAMjkE,CAANikE,IAAWz4E,EAAEq7I,MAAFr7I,EALXA,CAMA,MACJ;EACIA,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EA1BJ;EA8BJ,cAAOy4E,CAAP;OAzCJo1E,EA4COA,CAAP;EAxDqB,GAAA,EA3kDzBvG,EAsoDAA,EAAWwG,WAAXxG,GAAyB;EAErB,aAASwG,CAAT,CAAqBt3G,CAArB;EACI,UAAIA,CAAJ,EACI,KAAK,IAAIixG,IAAKv+I,OAAO0O,IAAP1O,CAAYstC,CAAZttC,CAAT,EAAyBpJ,IAAI,CAAlC,EAAqCA,IAAI2nJ,EAAGtoJ,MAA5C,IAAsDW,CAAtD,EACoB,QAAZ02C,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CAAY,KACZzxC,KAAK0iJ,EAAG3nJ,CAAH2nJ,CAAL1iJ,IAAcyxC,EAAEixG,EAAG3nJ,CAAH2nJ,CAAFjxG,CADF;EA4B5B,YAxBAs3G,EAAYvkD,SAAZukD,CAAsBxkI,YAAtBwkI,GAAqC,EAArCA,EACAA,EAAYvkD,SAAZukD,CAAsBG,YAAtBH,GAAqC,EADrCA,EAGAA,EAAYnS,MAAZmS,GAAqB,UAAgB9tJ,CAAhB,EAAmB8G,CAAnB;EACX9G,mBAAainJ,OAAbjnJ,KACFA,IAAIinJ,QAAQxG,MAARwG,CAAejnJ,CAAfinJ,CADFjnJ,EAGN,KADA,IAAIkvB,SAAUk6E,MAANtiG,IAAkB9G,EAAE6uB,MAAM7uB,EAAE8gH,GAAF9gH,GAAQ8G,CAA1C,EAA6C2xE,IAAI,IAAI2uE,MAAME,UAANF,CAAiB0G,WAArB,EACjD,EAAO9tJ,EAAE8gH,GAAF9gH,GAAQkvB,CAAf,GAAkB;EACd,YAAIjR,IAAIje,EAAEkkJ,MAAFlkJ,EAAR,CACA,QAAQie,MAAM,CAAd,GACA,KAAK,CAAL;EACIw6D,cAAEnvD,YAAFmvD,GAAiBz4E,EAAEq7I,MAAFr7I,EAAjBy4E,CACA,MACJ,KAAK,CAAL;EACIA,cAAEw1E,YAAFx1E,GAAiBz4E,EAAEq7I,MAAFr7I,EAAjBy4E,CACA,MACJ;EACIz4E,cAAE+lJ,QAAF/lJ,CAAe,IAAJie,CAAXje,EARJ;EAYJ,cAAOy4E,CAAP;OArBJq1E,EAwBOA,CAAP;EAjCqB,GAAA,EAtoDzBxG,EA0qDOA,CAAP;EA9qDe,CAAA,EAAnBF,CAirDA,mBAAiBA,KAAjB;EAAA,4CAAA,wBClqDI8G,GAAmB33I,GAAYg4G,GAC/B4/B;EACF,MAAMrsF,IAAQvrD,EAAKgL,MAALhL,CAAY23I,CAAZ33I,CAAd,CACA,IAAIurD,UAA8BsnC,MAArBtnC,EAAMg2C,UAAnB,EAA6C;EAC3C,QAAmB,aAAfh2C,EAAMjjD,IAAV,EACE,OAAOuvI,UAAU73I,EAAKyzG,UAALzzG,CAAgBurD,EAAMg2C,UAAtBvhG,CAAV63I,EAA6C7/B,CAA7C6/B,EAAwDD,CAAxDC,CAAP,CAEF,IAAmB,cAAftsF,EAAMjjD,IAAV,EAQE,QAPoC,MAArBijD,EAAMg2C,UAAe,GACJ,MAA3Bh2C,EAAMusF,gBAAqB,GACvB93I,EAAKyzG,UADkB,GAEvBzzG,EAAKyzG,UAALzzG,CAAgB3T,KAAhB2T,CACIurD,EAAMg2C,UADVvhG,GACuBurD,EAAMusF,gBAD7B93I,CAH2B,GAKhCA,EAAKyzG,UAALzzG,CAAgB2lC,MAAhB3lC,CAAuBurD,EAAMg2C,UAA7BvhG,GAEU1P,IAAI,UAAAvD,CAAA;EAAQ,aAAA8qJ,UAAU9qJ,CAAV8qJ,EAAgB7/B,CAAhB6/B,EAA2BD,CAA3BC,CAAA;QAA1B,CAEF,IAAMrkJ,IAAOnJ,MAAM2oG,SAAN3oG,CAAgBgC,KAAhBhC,CAAsBoD,IAAtBpD,CACTwtJ,UACI73I,EAAKyzG,UAALzzG,CAAgB3T,KAAhB2T,CAAsBurD,EAAMg2C,UAA5BvhG,EAAwC,CAAxCA,CADJ63I,EACgD7/B,CADhD6/B,EAC2DD,CAD3DC,EAEKhpJ,QAFLgpJ,EADSxtJ,CAAb,CAIA,OAAsB,aAAfkhE,EAAMjjD,IAAS,GAAW9U,EAAK,CAALA,CAAX,GAAqBA,CAA3C;EAEF,UAAO+3D,KAASA,EAAMh5D,KAAtB;EASF,mBAAA,CACIxF,CADJ,EACkBgrJ,CADlB,EAEIH,CAFJ;EAGQ,MAAAjwI,oBAAA;EAAA,MAACqwI,QAAD;EAAA,MAAWlvJ,QAAX;EAAA,MACAmvJ,IAAYL,EAAQM,iBAARN,CAA0BO,IAA1BP,CAA+B,UAAAK,CAAA;EAC/C,aAASF,EAAWK,yBAAyBJ,CAAzBI,EAAmCH,CAAnCG,CAAXL,CAAT;KADgBH,CADZ,CAKN,YAAqB/kD,MAAdolD,IACHF,EAAWK,yBAAyBJ,CAAzBI,EAAmCH,CAAnCG,CAAXL,EAA0DjvJ,CAA1DivJ,SACAllD,CAFJ;EAUF,sCAAA,CACI9lG,CADJ,EACkBgrJ,CADlB,EAEIH,CAFJ;EAGE,SAAOG,EAAWK,yBAAyBrrJ,CAAzBqrJ,EAA+BR,EAAQS,gBAAvCD,CAAXL,CAAP;EASF,6BAAA,CACI73I,CADJ,EACuB03I,CADvB;EAEQ,MAAAjwI,oBAAA;EAAA,MAACqwI,QAAD;EAAA,MAAWlvJ,QAAX,CAEN,QACEsvJ,yBAAyBJ,CAAzBI,EAAmCR,KAAWA,EAAQS,gBAAtDD,GACAtvJ,EAFF;EAMF,kCAAA,CAAkCiE,CAAlC,EAAgDkrJ,CAAhD;EACE,SAASA,IAAelrJ,OAAAA,GAAQkrJ,CAAvBA,GAAqClrJ,CAA9C;EAGF,uBAAA,CAA8BA,CAA9B;EACE,MAAMjE,IAAQiE,EAAKu9F,WAALv9F,CAAiB,GAAjBA,CAAd,CACA,QAAe,MAAXjE,KAAsBiE,GAAM,MAEfA,EAAKumB,SAALvmB,CAAe,CAAfA,EAAkBjE,CAAlBiE,GACCnD,OAAOmD,EAAKumB,SAALvmB,CAAejE,IAAQ,CAAvBiE,CAAPnD,EAHlB;EAMF,mBAAA,CAAsBP,CAAtB,EAAqCqB,CAArC;EAEE,OADA,IAAM+Y,MAAN,EACSla,IAAI,CAAb,EAAgBA,IAAIF,EAAIT,MAAxB,EAAgCW,KAAKmB,CAArC,EACE+Y,EAAIjZ,IAAJiZ,CAASpa,EAAIgD,KAAJhD,CAAUE,CAAVF,EAAaE,IAAImB,CAAjBrB,CAAToa,EAEF,OAAOA,CAAP;EChGK,KAAMkmF,UAET2uD,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACEytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,WACfpwI,MAAQ,kBAIVgwI,UAAY,WACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,YACZC,UAAY,YACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,WACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAIlDgwI,UAAY,WACZC,UAAY,WACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAIlDgwI,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,qBACZC,UAAY,qBACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,YACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OApLjB;EAAA,8CAAA;EAAA,ICAMjvD,YAET2uD,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,eACZC,UAAY,eACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,kBACfD,aAAe,gBACfpwI,MAAQ,cAGRqwI,aAAe,kBACfD,aAAe,gBACfpwI,MAAQ,iBAKZgwI,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,eACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OAEjBF,aAAe,gBAAgBpwI,MAAQ,UAAUyP,cAAgB,OACjE2gI,aAAe,gBAAgBpwI,MAAQ,UAAUyP,cAAgB,UAIpEugI,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,WACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,UACZC,UAAY,UACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,cACZC,UAAY,cACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,cACZC,UAAY,cACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,YACZC,UAAY,YACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,OACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBACjDqwI,aAAe,aACfD,aAAe,YACfpwI,MAAQ,QACRswI,eAAgB,OAGhBD,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,aACZC,UAAY,aACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,SACfD,aAAe,SACfpwI,MAAQ,UACRyP,cAAgB,QAGhB4gI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OD/fjB;EAAA,+CAAA;EAAA,IEAMjvD,YAET2uD,UAAY,YACZC,UAAY,YACZC,UAAY,WACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAG9DgwI,UAAY,UACZC,UAAY,UACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cAClDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAIrDgwI,UAAY,SACZC,UAAY,SACZC,UAAY,WACZxtI,WACEytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,WACfpwI,MAAQ,kBAIVgwI,UAAY,SACZC,UAAY,SACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OAGhBD,aAAe,cACfD,aAAe,aACfpwI,MAAQ,cAGRqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,eAKZgwI,UAAY,QACZC,UAAY,QACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,iBACZC,UAAY,iBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,iBACZC,UAAY,eACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cAClDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACvDqwI,aAAe,iBACfD,aAAe,gBACfpwI,MAAQ,aAGRqwI,aAAe,gBACfD,aAAe,eACfpwI,MAAQ,YAGRqwI,aAAe,oBACfD,aAAe,kBACfpwI,MAAQ,YAGRqwI,aAAe,4BACfD,aAAe,0BACfpwI,MAAQ,YAGRqwI,aAAe,qBACfD,aAAe,QACfpwI,MAAQ,iBAKZgwI,UAAY,sBACZC,UAAY,oBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,qBACZC,UAAY,mBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,SACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,uBACZC,UAAY,qBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACrDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACvDqwI,aAAe,iBACfD,aAAe,gBACfpwI,MAAQ,gBAKZgwI,UAAY,wBACZC,UAAY,sBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACrDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,gBAIvDgwI,UAAY,uBACZC,UAAY,qBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACvDqwI,aAAe,yBACfD,aAAe,uBACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,sBACZC,UAAY,oBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACrDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,gBAIvDgwI,UAAY,qBACZC,UAAY,mBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,iBAIvDgwI,UAAY,sBACZC,UAAY,oBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cFhN3D;EAAA,6CAAA;EAAA,IGAMqhF,YAET2uD,UAAY,WACZC,UAAY,WACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRswI,eAAgB,OAEjBD,aAAe,SAASD,aAAe,cAAcpwI,MAAQ,gBAE5DqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,WACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRswI,eAAgB,OAEjBD,aAAe,SAASD,aAAe,cAAcpwI,MAAQ,gBAE5DqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,UACZC,UAAY,UACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,UAAUD,aAAe,UAAUpwI,MAAQ,cAC1DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRyP,cAAgB,WAGhB4gI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OAGhBD,aAAe,YACfD,aAAe,YACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,UACZC,UAAY,UACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACnDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OAEjBD,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,iBACfD,aAAe,iBACfpwI,MAAQ,YAGRqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRyP,cAAgB,YAGhB4gI,aAAe,aACfD,aAAe,aACfpwI,MAAQ,mBAKZgwI,UAAY,uBACZC,UAAY,mBACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,eAAepwI,MAAQ,gBACzDqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRswI,eAAgB,UAKpBN,UAAY,mBACZC,UAAY,mBACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRyP,cAAgB,YAGhB4gI,aAAe,aACfD,aAAe,aACfpwI,MAAQ,mBAKZgwI,UAAY,yBACZC,UAAY,mBACZC,UAAY,eACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,gBAC5DqwI,aAAe,WAAWD,aAAe,OAAOpwI,MAAQ,cACvDqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRyP,cAAgB,YAGhB4gI,aAAe,aACfD,aAAe,aACfpwI,MAAQ,gBH/JT;EAAA,iDAAA;EAAA,IIAMqhF,YAET2uD,UAAY,QACZC,UAAY,QACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBACnDmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,gBAIvDgwI,UAAY,YACZC,UAAY,YACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cAClDmwI,cAAgB,GAAGC,aAAe,OAAOpwI,MAAQ,cAChDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,UACZC,UAAY,UACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,cACrDmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cAClDmwI,cAAgB,GAChBC,aAAe,WACfpwI,MAAQ,UACRyP,cAAgB,OAGhB0gI,cAAgB,GAChBC,aAAe,YACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRswI,eAAgB,OAGhBD,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBACnDqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,gBAIvDgwI,UAAY,YACZC,UAAY,YACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,gBAI3DgwI,UAAY,iBACZC,UAAY,iBACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBAClDqwI,aAAe,UACfD,aAAe,UACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,UACfD,aAAe,UACfpwI,MAAQ,UACRyP,cAAgB,OAEjB4gI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACvDqwI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,SACfD,aAAe,SACfpwI,MAAQ,UACRyP,cAAgB,GAChB6gI,eAAgB,OAGhBD,aAAe,KACfD,aAAe,KACfpwI,MAAQ,UACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cACjDmwI,cAAgB,GAChBC,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAEjB4gI,aAAe,QAAQD,aAAe,SAASpwI,MAAQ,gBAI1DgwI,UAAY,mBACZC,UAAY,mBACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBAClDqwI,aAAe,SACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,UACfD,aAAe,UACfpwI,MAAQ,UACRyP,cAAgB,OAEjB4gI,aAAe,QAAQD,aAAe,QAAQpwI,MAAQ,cACrDqwI,aAAe,SACfD,aAAe,SACfpwI,MAAQ,UACRyP,cAAgB,GAChB6gI,eAAgB,OAEjBD,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACvDqwI,aAAe,KACfD,aAAe,KACfpwI,MAAQ,UACRswI,eAAgB,UAKpBN,UAAY,SACZC,UAAY,SACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBACnDqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,gBAIvDgwI,UAAY,aACZC,UAAY,aACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,KAAKD,aAAe,SAASpwI,MAAQ,aJhLpD;EAAA,8CAAA;EAAA,IKAMqhF,YAET2uD,UAAY,uBACZC,UAAY,qBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,iBAI7DgwI,UAAY,uBACZC,UAAY,qBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,cAC1DmwI,cAAgB,GAAGC,aAAe,kBAAkBpwI,MAAQ,iBAI/DgwI,UAAY,SACZC,UAAY,cACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,aAAapwI,MAAQ,cACtDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,YACZC,UAAY,kBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OL/CjB;EAAA,6CAAA;EAAA,IMAMjvD,YACX2uD,UAAY,UACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,UAAUD,aAAe,UAAUpwI,MAAQ,YNPxD;EAAA,gDAAA;EAAA,IOAMqhF,YAET2uD,UAAY,0BACZC,UAAY,eACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,cACrDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACxDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,gBAI3DgwI,UAAY,eACZC,UAAY,eACZC,UAAY,SACZxtI,WACG2tI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,aACxDqwI,aAAe,SAASD,aAAe,SAASpwI,MAAQ,gBAG5DgwI,UAAY,SAASC,UAAY,SAASC,UAAY,aACrDF,UAAY,YACZC,UAAY,YACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,YACZC,UAAY,YACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,QACZC,UAAY,QACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,QACZC,UAAY,QACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,SACZC,UAAY,SACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,UACZC,UAAY,UACZC,UAAY,SACZxtI,WACEytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,KACfpwI,MAAQ,kBAIVgwI,UAAY,SACZC,UAAY,SACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CmwI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,QACfpwI,MAAQ,eAETqwI,aAAe,WAAWD,aAAe,WAAWpwI,MAAQ,cAC3DqwI,aAAe,WACfD,aAAe,UACfpwI,MAAQ,UACRwwI,cAAe,OAGfH,aAAe,aACfD,aAAe,aACfpwI,MAAQ,UACRyP,cAAgB,UAIrBugI,UAAY,QAAQC,UAAY,QAAQC,UAAY,SAASxtI,gBAC5DstI,UAAY,gBACZC,UAAY,gBACZC,UAAY,SACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,2BACZC,UAAY,2BACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CqwI,aAAe,OAAOD,aAAe,OAAOpwI,MAAQ,cACpDqwI,aAAe,OAAOD,aAAe,OAAOpwI,MAAQ,cPnGpD;EAAA,2CAAA;EAAA,IQAMqhF,YAET2uD,UAAY,kBACZC,UAAY,kBACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBACjDqwI,aAAe,iBACfD,aAAe,gBACfpwI,MAAQ,YAGRqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,yBACZC,UAAY,yBACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBACjDqwI,aAAe,iBACfD,aAAe,gBACfpwI,MAAQ,YAGRqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,iBACZC,UAAY,iBACZC,UAAY,SACZxtI,WACGytI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,YAAYpwI,MAAQ,gBACtDqwI,aAAe,UAAUD,aAAe,UAAUpwI,MAAQ,cACzDqwI,aAAe,uBACfD,aAAe,sBACfpwI,MAAQ,cRnDT;EAAA,6CAAA;EAAA,ISAMqhF,YAET2uD,UAAY,SACZC,UAAY,SACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,YACZC,UAAY,YACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,WACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,gBACZC,UAAY,gBACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,aACZC,UAAY,aACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,cACZC,UAAY,cACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,cACZC,UAAY,cACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,aACZC,UAAY,aACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,UACZC,UAAY,SACZC,UAAY,WACZxtI,WACGytI,cAAgB,GAAGC,aAAe,aAAapwI,MAAQ,cACvDmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OTzIjB;EAAA,6CAAA;EAAA,IUAMjvD,aAET2uD,UAAY,UACZC,UAAY,UACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,QACRyP,eAAgB,OAGhB4gI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,QACRyP,eAAgB,OAGhB4gI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,eACZC,UAAY,UACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,SACfD,aAAe,cACfpwI,MAAQ,QACRyP,eAAgB,OAGhB4gI,aAAe,SACfD,aAAe,cACfpwI,MAAQ,QACRyP,eAAgB,OAGhB4gI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,UAKpBN,UAAY,aACZC,UAAY,aACZC,UAAY,YACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBACjDqwI,aAAe,KACfD,aAAe,SACfpwI,MAAQ,SACRswI,eAAgB,OV/DjB;EAAA,+CAAA;EAAA,IWAMjvD,aAET2uD,UAAY,kBACZC,UAAY,sBACZC,UAAY,iBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cAClDmwI,cAAgB,GAAGC,aAAe,YAAYpwI,MAAQ,cACrDqwI,aAAe,WACfD,aAAe,WACfpwI,MAAQ,UACRyP,cAAgB,UAGhB4gI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRswI,eAAgB,UAKpBN,UAAY,oBACZC,UAAY,sBACZC,UAAY,iBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,cACnDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,cAClDmwI,cAAgB,GAAGC,aAAe,YAAYpwI,MAAQ,cACrDqwI,aAAe,WACfD,aAAe,WACfpwI,MAAQ,UACRyP,cAAgB,UAGhB4gI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,UACRswI,eAAgB,UAKpBN,UAAY,OACZC,UAAY,8BACZC,UAAY,iBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,gBACfD,aAAe,UACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,SACfD,aAAe,SACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,WAKpBugI,UAAY,WACZC,UAAY,WACZC,UAAY,iBACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,cACZC,UAAY,cACZC,UAAY,iBACZxtI,WAAYytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,iBAG3DgwI,UAAY,iBACZC,UAAY,iBACZC,UAAY,iBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,eAAepwI,MAAQ,gBACzDmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,cAC1DmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,cACzDqwI,aAAe,oBACfD,aAAe,mBACfpwI,MAAQ,QACRyP,eAAgB,GAChB6gI,eAAgB,OXvGjB;EAAA,oDAAA;EAAA,IYAMjvD,aAET2uD,UAAY,OACZC,UAAY,OACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,QACZC,UAAY,QACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,OACZC,UAAY,OACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,OACZC,UAAY,OACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,OACZC,UAAY,OACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,OACZC,UAAY,OACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBAClDqwI,aAAe,aAAaD,aAAe,YAAYpwI,MAAQ,eAIlEgwI,UAAY,UACZC,UAAY,UACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAIrDgwI,UAAY,UACZC,UAAY,UACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAIrDgwI,UAAY,QACZC,UAAY,QACZC,UAAY,aACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,gBACjDqwI,aAAe,aACfD,aAAe,YACfpwI,MAAQ,YZxFT;EAAA,gDAAA;EAAA,IaAMqhF,aAET2uD,UAAY,YACZC,UAAY,UACZC,UAAY,cACZxtI,WAEIytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,WACfpwI,MAAQ,eAETmwI,eAAiB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAItDgwI,UAAY,UACZC,UAAY,UACZC,UAAY,cACZxtI,WAEIytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,WACfpwI,MAAQ,eAETmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,iBAIrDgwI,UAAY,YACZC,UAAY,UACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,cACpDmwI,cAAgB,GAChBC,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,UACZC,UAAY,UACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,cACpDqwI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,oBACfD,aAAe,mBACfpwI,MAAQ,QACRswI,eAAgB,UAKpBN,UAAY,WACZC,UAAY,WACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CmwI,cAAgB,GAChBC,aAAe,QACfpwI,MAAQ,QACRswI,eAAgB,UAKpBN,UAAY,aACZC,UAAY,WACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,mBAIrDgwI,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBACnDmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,mBAIrDgwI,UAAY,gBACZC,UAAY,gBACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,gBACnDmwI,cAAgB,GAAGC,aAAe,OAAOpwI,MAAQ,gBACjDmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACpDqwI,aAAe,cACfD,aAAe,aACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,YACfD,aAAe,WACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,iBACfD,aAAe,eACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,iBACfD,aAAe,gBACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,oBACfD,aAAe,kBACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,QACZC,UAAY,SACZC,UAAY,cACZxtI,WAEIytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,WACfpwI,MAAQ,eAGRqwI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,UACZC,UAAY,WACZC,UAAY,cACZxtI,WAEIytI,cAAgB,GAChBI,oBAAsB,GACtBH,aAAe,UACfpwI,MAAQ,cAGRqwI,aAAe,QACfD,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAGhB4gI,aAAe,OACfD,aAAe,OACfpwI,MAAQ,UACRyP,cAAgB,GAChB6gI,eAAgB,UAKpBN,UAAY,QACZC,UAAY,QACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,QAAQpwI,MAAQ,mBAIrDgwI,UAAY,SACZC,UAAY,SACZC,UAAY,cACZxtI,WAEIytI,cAAgB,GAChBC,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,OAEjB0gI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,aACfD,aAAe,mBACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,UACZC,UAAY,SACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,mBAAmBpwI,MAAQ,gBAE5DmwI,cAAgB,GAChBC,aAAe,QACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,aACZC,UAAY,aACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,cACrDmwI,cAAgB,GAAGC,aAAe,UAAUpwI,MAAQ,cACpDmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,mBAItDgwI,UAAY,YACZC,UAAY,YACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,iBAIxDgwI,UAAY,iBACZC,UAAY,iBACZC,UAAY,cACZxtI,WACGytI,cAAgB,GAAGC,aAAe,iBAAiBpwI,MAAQ,cAC3DmwI,cAAgB,GAAGC,aAAe,eAAepwI,MAAQ,gBACzDmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,cAC1DmwI,cAAgB,GAAGC,aAAe,gBAAgBpwI,MAAQ,cACzDqwI,aAAe,oBACfD,aAAe,mBACfpwI,MAAQ,QACRyP,eAAgB,GAChB6gI,eAAgB,Ob/PjB;EAAA,gDAAA;EAAA,IcAMjvD,aAET2uD,UAAY,QACZC,UAAY,QACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,QACfD,aAAe,UACfpwI,MAAQ,SACRswI,eAAgB,OAEjBD,aAAe,QAAQD,aAAe,SAASpwI,MAAQ,gBAI1DgwI,UAAY,cACZC,UAAY,cACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CmwI,cAAgB,GAChBM,uBAAyB,OACzBL,aAAe,QACfpwI,MAAQ,iBAKZgwI,UAAY,OACZC,UAAY,OACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACpDqwI,aAAe,kBACfD,aAAe,iBACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,SACZC,UAAY,OACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,WAAWpwI,MAAQ,gBACpDmwI,cAAgB,GAChBC,aAAe,iBACfpwI,MAAQ,UACRyP,cAAgB,UAKpBugI,UAAY,WACZC,UAAY,WACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,mBAItDgwI,UAAY,WACZC,UAAY,WACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,QACfI,uBAAyB,gBACzBL,aAAe,QACfpwI,MAAQ,mBAKZgwI,UAAY,kBACZC,UAAY,kBACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,cAAcpwI,MAAQ,gBACxDmwI,cAAgB,GAAGC,aAAe,YAAYpwI,MAAQ,mBAIzDgwI,UAAY,kBACZC,UAAY,kBACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC/CmwI,cAAgB,GAAGC,aAAe,cAAcpwI,MAAQ,gBACxDmwI,cAAgB,GAAGC,aAAe,SAASpwI,MAAQ,mBAItDgwI,UAAY,gBACZC,UAAY,gBACZC,UAAY,kBACZxtI,WACGytI,cAAgB,GAAGC,aAAe,KAAKpwI,MAAQ,cAC9CqwI,aAAe,cACfD,aAAe,aACfpwI,MAAQ,cAGRqwI,aAAe,eACfD,aAAe,cACfpwI,MAAQ,cd/GT;EAAA,qDAAA;EAAA,IeqBD0wI,oBAAoB,UAAU,SAAS,SAAS,QAAQ,gBfrBvD;EAAA,IesBDC,qBACD,uBAAuB,uBAAuB,QfvB5C;EAAA;EeoCL,YAAA;EACE,QAAM1mD,KACJ2mD,YAAYC,WAAWC,SAASC,aAAaC,UAAUC,SACvDC,YAAYC,SAAS99I,SAAO+9I,OAAOC,UAAUC,eAAe57E,WAC5D67E,WAAWC,eAHb;EAAA,QAKMC,OAA6BpkJ,OAAOjI,UAAU6kG,EAAIjiG,GAAJiiG,CAAQ,UAAAv+E,CAAA;EAAM,aAAAA,EAAG21E,IAAH;OAAd4I,EALpD,CAOA/jG,KAAKwrJ,SAALxrJ,GAAiBurJ,EAAY5vG,MAAZ4vG,CACb,UAACzpJ,CAAD,EAAM2pJ,CAAN;EAEE,aADA3pJ,EAAI2pJ,EAAO3B,QAAXhoJ,IAAuB2pJ,CAAvB3pJ,EACOA,CAAP;OAHWypJ,IAAAA,CAAjBvrJ;EA+NJ,UA5OEmE,qBAAAA,CAAkBunJ,CAAlBvnJ,YAAAA,SAAA;EACE,aAAOnE,KAAK2rJ,SAAL3rJ,KAAmBA,KAAK2rJ,SAAL3rJ,GAAiB,IAAIA,IAAJ,EAApCA,CAAP;2CADFmE,GAqBQunJ,WAAAA,cAAAA,GAAR,UAAsBl6I,CAAtB;EACE,WAAOg5I,iBAAiBzqI,IAAjByqI,CAAsB,UAAAhlI,CAAA;EAAM,aAAAA,MAAOhU,EAAKgU,EAAZ;OAA5BglI,CAAP;KAtBFrmJ,EAyBQunJ,WAAAA,eAAAA,GAAR,UAAuBl6I,CAAvB;EACE,WAAOi5I,kBAAkB1qI,IAAlB0qI,CAAuB,UAAAjlI,CAAA;EAAM,aAAAA,MAAOhU,EAAKgU,EAAZ;OAA7BilI,CAAP;KA1BFtmJ,EA8BAunJ,WAAAA,eAAAA,GAAA,UAAeR,CAAf;EAAA,gBAAA;EAAA,QAEMU,KAAkB,CAFxB;EAAA,QAGMC,KAAmB,CAHzB;EAAA,QAIQC,MAJR;EAAA,QAKQv8E,MALR;EAAA,QAMQiyC,IALU0pC,EAAM15I,IAAN05I,CAKMvvG,MALNuvG,CAKoC,UAACppJ,CAAD,EAAM0P,CAAN;EAMlD,aALA1P,EAAI0P,EAAKjT,IAATuD,IAAiBnB,EAAKorJ,OAALprJ,CAAa6Q,CAAb7Q,CAAjBmB,EACInB,EAAKqrJ,aAALrrJ,CAAmB6Q,CAAnB7Q,MAA0BirJ,KAAkB,CAA5CjrJ,CADJmB,EAEInB,EAAKsrJ,cAALtrJ,CAAoB6Q,CAApB7Q,MAA2BkrJ,KAAmB,CAA9ClrJ,CAFJmB,EAGgB,kBAAZ0P,EAAKgU,EAAO,IAAesmI,EAAa9vJ,IAAb8vJ,CAAkBhqJ,EAAI0P,EAAKjT,IAATuD,CAAlBgqJ,CAH/BhqJ,EAIgB,YAAZ0P,EAAKgU,EAAO,IAAS+pD,EAAQvzE,IAARuzE,CAAaztE,EAAI0P,EAAKjT,IAATuD,CAAbytE,CAJzBztE,EAKOA,CAAP;OAXcopJ,IAAAA,CADlB;EAAA,QAeQz5I,MAfR;EAAA,QAgBQI,MAhBR,CAgCE,OAfA1N,OAAO0O,IAAP1O,CAAYq9G,CAAZr9G,EAAmB/D,OAAnB+D,CAA2B,UAAAoS,CAAA;EACzB,UAAM/E,IAAOgwG,EAAMjrG,CAANirG,CAAb,CACAhwG,EAAKyzG,UAALzzG,CAAgBpR,OAAhBoR,CAAwB,UAAAjT,CAAA;EACf,YAAAirJ,6BAAA,CACPh4I,EAAKC,MAALD,CAAYxV,IAAZwV,CAAiBgwG,EAAMgoC,CAANhoC,CAAjBhwG,GACAgwG,EAAMgoC,CAANhoC,EAAgB0qC,QAAhB1qC,CAAyBxlH,IAAzBwlH,CAA8BhwG,CAA9BgwG,CADAhwG;SAFFA,GAK2B,MAAvBA,EAAKC,MAALD,CAAYpX,MAAW,IAAGqX,EAAOzV,IAAPyV,CAAYD,CAAZC,CAL9BD;OAFFrN,GAUAA,OAAO0O,IAAP1O,CAAYq9G,CAAZr9G,EAAmB/D,OAAnB+D,CAA2B,UAAAoS,CAAA;EACzB,UAAM/E,IAAOgwG,EAAMjrG,CAANirG,CAAb,CAC6B,MAAzBhwG,EAAK06I,QAAL16I,CAAcpX,MAAW,IAAGyX,EAAQ7V,IAAR6V,CAAaL,CAAbK,CAAH;OAF/B1N,CAVAA,IAgBEq9G,UACA/vG,WACAI,YACA09D,YACAu8E,iBACAF,oBACAC,qBAPF;KA9DF1nJ,EAyEQunJ,WAAAA,QAAAA,GAAR,UAAgBl6I,CAAhB;EAAA,gBAAA;EAAA,QACQi6I,IAASzrJ,KAAKwrJ,SAALxrJ,CAAewR,EAAKgU,EAApBxlB,CADjB,CAEE,SAAeqkG,MAAXonD,CAAJ,EACE,MAAM,IAAIlyJ,KAAJ,CAAU,qCAAqCiY,EAAKgU,EAApD,CAAN,CAEF,IAAM2mI,MACJ5tJ,MAAMiT,EAAKjT,MACXinB,IAAIimI,EAAO1B,UACXC,UAAUyB,EAAOzB,UACjB/kC,aACKzzG,EAAKG,KAALH,QACI1P,IAAI,UAAA6P,CAAA;EAAS,eAAAA,EAAMugF,UAANvgF,CAAiB,GAAjBA,IAAwBA,EAAM1X,MAAN0X,CAAa,CAAbA,CAAxBA,GAA0CA,CAA1C;YACtBF,YACAy6I,cACA1vI,YATF,CAwFA,OA5EMivI,EAAOjvI,MAAPivI,KACJU,EAAQ3vI,MAAR2vI,GAAiBV,EAAOjvI,MAAPivI,CAAc9vG,MAAd8vG,CACuC,UAAC3pJ,CAAD,EAAMi7D,CAAN;EACtD,UAAMg2C,IAAah2C,EAAMktF,YAAzB;EAAA,UACMX,IAAmBvsF,EAAMstF,kBAD/B;EAAA,UAEMvwI,IAAOijD,EAAMjjD,IAFnB;EAAA,UAGI/V,SAAQsgG,CAHZ,CAIA,SAAmBA,MAAf0O,CAAJ,EACE,QAAQh2C,EAAMjjD,IAAd,GACE,KAAK,QAAL;iBAIgBuqF,OAHdtgG,IAAQpD,EAAKyrJ,cAALzrJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAGqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAKyrJ,cAALzrJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,QAAL;iBAGgB0jG,OAFdtgG,IAAQpD,EAAK0rJ,cAAL1rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAEqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAK0rJ,cAAL1rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,UAAL;iBAGgB0jG,OAFdtgG,IAAQpD,EAAK2rJ,oBAAL3rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAEqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAK2rJ,oBAAL3rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,MAAL;iBAGgB0jG,OAFdtgG,IAAQpD,EAAK4rJ,YAAL5rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAEqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAK4rJ,YAAL5rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,OAAL;iBAGgB0jG,OAFdtgG,IAAQpD,EAAK6rJ,mBAAL7rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAEqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAK6rJ,mBAAL7rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,OAAL;iBAGgB0jG,OAFdtgG,IAAQpD,EAAK8rJ,aAAL9rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMotF,WADbxpJ,EAC0Bo8D,EAAMxzC,YADhC5oB,MAEqBo8D,EAAMwtF,0BACjCxmJ,IAAQpD,EAAK8rJ,aAAL9rJ,CACJ6Q,EAAKyyI,IADDtjJ,EACOo8D,EAAMwtF,qBADb5pJ,EAEJo8D,EAAMxzC,YAFF5oB,GAIV,MACF,KAAK,QAAL,CACA,KAAK,SAAL;EACE,gBACF;EACE,gBAAM,IAAIpH,KAAJ,CACF,6BAA2BwjE,EAAMjjD,IAAjC,cAAA,GAAiDtI,EAAKgU,EADpD,CAAN,CA5DJ,CAiEF,OADA1jB,EAAIi7D,EAAMmtF,WAAVpoJ,MAA0BiC,UAAOgvG,eAAYj5F,SAAMwvI,qBAAnDxnJ,EACOA,CAAP;OAxEe2pJ,IAAAA,CADbA,GA4ECU,CAAP;KAtKFhoJ,EAyKQunJ,WAAAA,eAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EACiEouJ,CADjE,EAEIC,CAFJ;uBAEIA,QACF,IAAM7vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd,CACA,SAAcroD,MAAVtnC,CAAJ,EAAyB;EACvB,UAAMh5D,IAAQ6Q,OAAO2rC,YAAP3rC,CAAoB1V,KAApB0V,CAA0B,IAA1BA,EAAgCmoD,EAAMrgD,CAAtC9H,CAAd,CACA,OAAOg4I,IAAW7oJ,CAAX6oJ,GAAmB7oJ,EAAMmhG,WAANnhG,EAA1B;EAEF,YAAO4oJ,CAAP;KAjLFxoJ,EAoLQunJ,WAAAA,aAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EAEIouJ,CAFJ;EAGE,QAAM5vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd,CACA,OAAO3vF,IAAQA,EAAM/hE,CAAd+hE,GAAkB4vF,CAAzB;KAxLFxoJ,EA2LQunJ,WAAAA,eAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EAEIouJ,CAFJ;EAGE,QAAM5vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd;EAAA,QACM3oJ,IAASg5D,IAAQA,EAAMA,EAAMh5D,KAAZg5D,CAARA,GAA6B4vF,CAD5C,CAEA,OAAyB,mBAAV5oJ,CAAU,GAAYA,CAAZ,GAAoBA,EAAakgE,KAAblgE,EAA7C;KAhMFI,EAkMQunJ,WAAAA,cAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EAEIouJ,CAFJ;EAGE,QAAM5vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd,CACA,IAAI3vF,KAASA,EAAMjjD,IAAnB,EACE,QAAQijD,EAAMjjD,IAAd,GACE,KAAKyoI,eAAWK,QAAXL,CAAoBsK,QAAzB;EACE,eAAO,SAAP,CACF,KAAKtK,eAAWK,QAAXL,CAAoBuK,QAAzB;EACE,eAAO,OAAP,CACF,KAAKvK,eAAWK,QAAXL,CAAoBwK,OAAzB;EACE,eAAO,MAAP,CACF;EACE,eAAOJ,CAAP,CARJ,CAWF,OAAOA,CAAP;KAlNFxoJ,EAoNQunJ,WAAAA,oBAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EAEIouJ,CAFJ;EAGE,QAAM5vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd,CACA,OAAI3vF,KAASA,EAAM9gE,KAAf8gE,GACKA,EAAM9gE,KAAN8gE,CAAYjwD,GAAZiwD,CAAgBj7D,GAAhBi7D,CACH,UAAAjwD,CAAA;EACI,aAAqB,mBAAbA,EAAI5Q,IAAS,GAAY4Q,EAAI5Q,IAAhB,GAAuB4Q,EAAI5Q,IAAJ4Q,CAAgBm3D,KAAhBn3D,EAA5C;OAFDiwD,CADLA,GAKG4vF,CALP;KAxNFxoJ,EAgOQunJ,WAAAA,qBAAAA,GAAR,UACIgB,CADJ,EACmDnuJ,CADnD,EAEIouJ,CAFJ;EAGE,QAAM5vF,IAAQ2vF,EAAMnuJ,CAANmuJ,CAAd,CACA,OAAI3vF,KACOA,EAAM5pD,IAAN4pD,CAAWh+D,CAAXg+D,IAAgBA,EAAM5pD,IAAN4pD,CAAWh+D,CAAXg+D,CAAa3iE,MAA7B2iE,GAAsCA,EAAM5pD,IAAN4pD,CAAWh+D,CAAjDg+D,GACsCA,EAAM5pD,IAAN4pD,CAAWhiE,GAC9C+G,IAAI,UAAAkV,CAAA;EAAK,aAAc,mBAANA,CAAM,GAAYA,CAAZ,GAAgBA,EAASitD,KAATjtD,EAA9B;QAHnB+lD,GAMG4vF,CANP;KApOFxoJ,GA4OF;Kf3QO;EAAA,IgBSI6oJ,YAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,KAAL;EACE,cAAQiiF,IACHwlD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADGxlD,EAEJwlD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIxlD,EAAR,CAIF,KAAK,MAAL;EACE,cAAQylD,KACJD,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADIC,EAAR,CAGF,KAAK,KAAL;EACE,cAAQC,IACJF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIE,EAEJF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIE,EAAR,CAGF,KAAK,KAAL;EACE,cAAQ5lD,IACJ0lD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI1lD,EAEJ0lD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI1lD,EAAR,CAGF,KAAK,KAAL;EACE,cAAQC,IACJylD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIzlD,EAEJylD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIzlD,EAAR,CAIF,KAAK,UAAL;EACE,cAAQ4lD,SACJH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIG,EAEJH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIG,EAAR,CAIF,KAAK,KAAL;EACE,cAAQlhD,IACJ+gD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI/gD,EAEJ+gD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI/gD,EAAR,CAIF,KAAK,SAAL;EACE,cAAQ8vB,QACJixB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIjxB,EAEJixB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIjxB,EAAR,CAIF,KAAK,SAAL;EACE,cAAQ9gB,QACJ+xC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI/xC,EAEJ+xC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI/xC,EAAR,CAIF,KAAK,KAAL;EACE,cAAQmyC,IACJJ,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADII,EAEJJ,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFII,EAAR,CAIF,KAAK,mBAAL;EACE,cAAQC,kBACJL,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIK,EAEJL,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIK,EAAR,CAIF;EACE,YAAMh4C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAtDJ;GhBZK;EAAA,IiBSI03C,cAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,KAAL;EACE,cAAQmmF,IACJshD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIthD,EAAR,CAEF,KAAK,MAAL;EACE,cAAQ4hD,KACJN,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIM,EAAR,CAEF,KAAK,OAAL;EACE,cAAQC,MACJP,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIO,EAAR,CAEF,KAAK,MAAL;EACE,cAAQC,KACJR,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIQ,EAAR,CAEF,KAAK,OAAL;EACE,cAAQC,MACJT,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIS,EAAR,CAEF,KAAK,MAAL;EACE,cAAQC,KACJV,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIU,EAAR,CAEF,KAAK,OAAL;EACE,cAAQC,MACJX,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIW,EAEJX,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIW,EAAR,CAGF,KAAK,OAAL;EACE,cAAQC,MACJZ,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIY,EAAR,CAEF,KAAK,MAAL;EACE,cAAQC,KACJb,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIa,EAAR,CAEF,KAAK,KAAL;EACE,cAAQC,IACJd,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIc,EAAR,CAEF,KAAK,MAAL;EACE,cAAQC,KACJf,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIe,EAAR,CAEF,KAAK,KAAL;EACE,cAAQtiD,IACJuhD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIvhD,EAAR,CAEF,KAAK,KAAL;EACE,cAAQuiD,IACJhB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIgB,EAAR,CAEF,KAAK,KAAL;EACE,cAAQpxC,IACJowC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIpwC,EAAR,CAEF,KAAK,OAAL;EACE,cAAQqxC,MACJjB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIiB,EAAR,CAGF,KAAK,OAAL;EACE,cAAQ1xC,MACJywC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIzwC,EAAR,CAEF,KAAK,KAAL;EACE,cAAQb,MACJsxC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADItxC,EAAR,CAEF,KAAK,OAAL;EACE,cAAQwyC,MACJlB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIkB,EAAR,CAGF,KAAK,KAAL;EACE,cAAQniD,IACJihD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIjhD,EAAR,CAEF,KAAK,YAAL;EACE,cAAQoiD,WACJnB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADImB,EAAR,CAGF,KAAK,MAAL;EACE,cAAQtmD,KACJmlD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADInlD,EAAR,CAEF,KAAK,OAAL;EACE,cAAQumD,MACJpB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIoB,EAAR,CAGF,KAAK,MAAL;EACE,cAAQxyB,KACJoxB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIpxB,EAAR,CAEF,KAAK,SAAL;EACE,cAAQM,QACJ8wB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI9wB,EAAR,CAEF,KAAK,KAAL;EACE,cAAQmyB,IACJrB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIqB,EAAR,CAEF,KAAK,MAAL;EACE,cAAQC,KACJtB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIsB,EAAR,CAGF,KAAK,MAAL;EACE,cAAQC,KACJvB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIuB,EAAR,CAGF,KAAK,UAAL;EACE,cAAQryC,SACJ8wC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI9wC,EAAR,CAGF,KAAK,MAAL;EACE,cAAQxV,KACJsmD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADItmD,EAAR,CAGF,KAAK,QAAL;EACE,cAAQ8nD,OACJxB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIwB,EAAR,CAGF,KAAK,MAAL;EACE,cAAQ/xB,OACJuwB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIvwB,EAAR,CAGF,KAAK,KAAL;EACE,cAAQgyB,IACJzB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIyB,EAAR,CAEF,KAAK,aAAL;EACE,cAAQpnD,YACJ2lD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI3lD,EAEJ2lD,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAFI3lD,EAGJ2lD,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAHI3lD,EAAR,CAIF,KAAK,OAAL;EACE,cAAQE,IACJmnD,OAAW,CAAXA,EAAgB,SAAhBA,CADInnD,EAEJb,KAAS0iD,UAAU73I,EAAKyzG,UAALzzG,CAAgB,CAAhBA,CAAV63I,EAA8B7/B,CAA9B6/B,EAAyCD,CAAzCC,CAAT1iD,CAFIa,EAAR,CAGF,KAAK,MAAL;EACE,cAAQonD,KACJ3B,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI2B,EAEJ3B,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFI2B,EAAR,CAGF,KAAK,WAAL;EACE,cAAQC,UACJ5B,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI4B,EAEJ5B,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFI4B,EAAR,CAGF;EACE,YAAMv5C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CA9HJ;GjBZK;EAAA;EkBiBL,YAAA,CACoB/2G,CADpB,EACkDN,CADlD,EAEY6wJ,CAFZ,EAEqCC,CAFrC,EAGoBC,CAHpB,EAIoBC,CAJpB,EAKoBC,CALpB;EACoBlvJ,aAAAA,GAAAzB,CAAAyB,EAA8BA,UAAAA,GAAA/B,CAA9B+B,EACRA,YAAAA,GAAA8uJ,CADQ9uJ,EACiBA,iBAAAA,GAAA+uJ,CADjB/uJ,EAEAA,2BAAAA,GAAAgvJ,CAFAhvJ,EAGAA,gBAAAA,GAAAivJ,CAHAjvJ,EAIAA,mBAAAA,GAAAkvJ,CAJAlvJ,EAJZA,YAAAA,KAIYA,EAHZA,YAAAA,IAAU,CAGEA,EAKlBA,KAAK2E,EAAL3E,GAAUmvJ,EAAYC,MAAZD,EALQnvJ;EA8RtB,UAtREmE,qBAAAA,CAAIgrJ,WAAJhrJ,UAAAA,SAAA;EACE,aAAOnE,KAAKqvJ,OAAZ;2CADFlrJ,GAOAgrJ,WAAAA,cAAAA,GAAA;EACEnvJ,SAAK8nB,OAAL9nB,CAAaI,OAAbJ,CAAqB,UAAAiT,CAAA;EAAU,aAAAA,EAAOA,MAAPA,CAAcD,OAAdC,EAAA;OAA/BjT,GACAA,KAAK8nB,OAAL9nB,KADAA,EAEAA,KAAKqvJ,OAALrvJ,IAAe,CAFfA;KARFmE,EAaAgrJ,WAAAA,KAAAA,GAAA;EACE,WAAOnvJ,KAAK8nB,OAAL9nB,CAAa5F,MAApB;KAdF+J,EAqBAgrJ,WAAAA,KAAAA,GAAA,UAAK70J,CAAL;EACE,QAAI0F,KAAKqvJ,OAAT,EACE,MAAM,IAAI91J,KAAJ,CAAU,iBAAeyG,KAAKzB,IAApB,8BAAV,CAAN,CAGF,IAAIjE,IAAQ,CAARA,IAAaA,KAAS0F,KAAK8nB,OAAL9nB,CAAa5F,MAAvC,EACE,MAAM,IAAIb,KAAJ,CAAU,8BAA4Be,CAA5B,0BAAA,GACZ0F,KAAK8nB,OAAL9nB,CAAa5F,MADX,CAAN,CAIF,IAAMk1J,IAAkBtvJ,KAAK8nB,OAAL9nB,CAAa1F,CAAb0F,CAAxB,CACA,IAAIsvJ,EAAgBC,OAApB,EACE,MAAM,IAAIh2J,KAAJ,CACF,iBAAeyG,KAAKzB,IAApB,4BAAA,GACIjE,CADJ,yGADE,CAAN,CAWF,OALI0F,KAAKkvJ,cAALlvJ,KACFsvJ,EAAgBC,OAAhBD,IAA0B,CADxBtvJ,GAIJsvJ,EAAgB7pJ,IAAhB6pJ,IAAuB,CAJnBtvJ,EAKGsvJ,EAAgBr8I,MAAvB;KA5CF9O,EAkDAgrJ,WAAAA,SAAAA,GAAA,UAAS1oJ,CAAT;EAAA,gBAAA,CACE,OAAOA,EAAQ3E,GAAR2E,CAAY,UAAAnM,CAAA;EAAS,aAAAqG,EAAK8E,IAAL9E,CAAUrG,CAAVqG,CAAA;OAArB8F,CAAP;KAnDFtC,EA2DAgrJ,WAAAA,MAAAA,GAAA,UAAM70J,CAAN,EAAqB2Y,CAArB;EACE,QAAIjT,KAAKqvJ,OAAT,EACE,MAAM,IAAI91J,KAAJ,CAAU,iBAAeyG,KAAKzB,IAApB,8BAAV,CAAN,CAGF,IAAIjE,IAAQ,CAARA,KAAc0F,KAAKivJ,eAAe30J,KAAS0F,KAAK8uJ,OAApD,EACE,MAAM,IAAIv1J,KAAJ,CAAU,6BACZe,CADY,gDAAA,GACuC0F,KAAK8uJ,OADtD,CAAN,CAIF,IAAM51I,IAAIlZ,KAAK8nB,OAAL9nB,CAAa1F,CAAb0F,OAAV,CAEA,IAAIiT,EAAOhV,KAAPgV,KAAiBjT,KAAK/B,KAA1B,EACE,MAAM,IAAI1E,KAAJ,CAAU,iBACZyG,KAAKzB,IADO,4CAAA,GACuCjE,CADvC,6CAAA,GAGZ2Y,EAAOhV,KAHK,gCAAA,GAG8B+B,KAAK/B,KAHnC,MAAV,CAAN,CAgBF,IAToB,MAAhB+B,KAAK9D,IAAL8D,EAAgB,IAAkC,MAA7BA,KAAK+uJ,YAAL/uJ,CAAkB5F,MAAvB,KAClB4F,KAAK+uJ,YAAL/uJ,GAAoBiT,EAAOhX,KADT,GAIpB+D,KAAKqmB,iBAALrmB,CACIA,KAAK+uJ,YADT/uJ,EACuBiT,EAAOhX,KAD9B+D,EAEI,iBAAeA,KAAKzB,IAApB,4CAAA,GACIjE,CADJ,MAFJ0F,CAJoB,EAShBkZ,KAAKA,EAAEzT,IAAX,EACE,MAAM,IAAIlM,KAAJ,CACF,iBAAeyG,KAAKzB,IAApB,4CAAA,GACIjE,CADJ,wCADE,CAAN,CAKF,IAAI4e,KAAKA,EAAEs2I,OAAX,EACE,MAAM,IAAIj2J,KAAJ,CACF,iBAAeyG,KAAKzB,IAApB,4CAAA,GACIjE,CADJ,2CADE,CAAN,CAKF4e,EAAEjG,MAAFiG,GAAWjG,CAAXiG,EACAA,EAAEs2I,OAAFt2I,IAAY,CADZA,EAGAlZ,KAAK8nB,OAAL9nB,CAAa1F,CAAb0F,IAAsBkZ,CAHtBA;KApGF/U,EA6GAgrJ,WAAAA,UAAAA,GAAA,UAAU1oJ,CAAV,EAA6BqhB,CAA7B;EAAA,gBAAA,CACE,IAAIrhB,EAAQrM,MAARqM,KAAmBqhB,EAAQ1tB,MAA/B,EACE,MAAM,IAAIb,KAAJ,CACF,iBAAeyG,KAAKzB,IAApB,gEAAA,GAEIkI,EAAQrM,MAFZ,uCAAA,GAGI0tB,EAAQ1tB,MAHZ,MADE,CAAN,CAOFqM,EAAQrG,OAARqG,CAAgB,UAAC1L,CAAD,EAAIT,CAAJ;EAAc,aAAAqG,EAAKoE,KAALpE,CAAW5F,CAAX4F,EAAcmnB,EAAQxtB,CAARwtB,CAAdnnB,CAAA;OAA9B8F;KAtHFtC,EAiIAgrJ,WAAAA,OAAAA,GAAA,UAAO1oJ,CAAP,EAA2BxI,CAA3B;EACE,QAAMA,KAASA,MAAU+B,KAAK/B,KAA9B,EACE,MAAM,IAAI1E,KAAJ,CAAU,0BACZyG,KAAK/B,KADO,iCAAA,GAC6BA,CADvC,CAAN,CAIF,KAAKwI,CAAL,EAAc;EACZA,YAAAA,CACA,KAAK,IAAI1L,IAAI,CAAb,EAAgBA,IAAIiF,KAAK9D,IAAL8D,EAApB,EAAiCjF,GAAjC,EACE0L,EAAQzK,IAARyK,CAAa1L,CAAb0L;EAIJ,SAAuB,MAAnBA,EAAQrM,MAAZ,EACE,OAAO6Y,SAAAA,GAAY,GAAG9L,OAAOnH,KAAK+uJ,aAA3B97I,CAAP,CAKF,IAAM6U,IAAU9nB,KAAKyvJ,QAALzvJ,CAAcyG,CAAdzG,CAAhB,CAKA,OAHAA,KAAKqmB,iBAALrmB,CACIA,KAAK+uJ,YADT/uJ,EACuB8nB,EAAQ,CAARA,EAAW7rB,KADlC+D,EACyC,8BADzCA,GAGOsH,MAAMwgB,CAANxgB,EAAe,CAAfA,CAAP;KAzJFnD,EA+JAgrJ,WAAAA,OAAAA,GAAA,UAAOlxJ,CAAP;EACE,QAAMA,KAASA,MAAU+B,KAAK/B,KAA9B,EACE,MAAM,IAAI1E,KAAJ,CAAU,0BACZyG,KAAK/B,KADO,iCAAA,GAC6BA,CADvC,CAAN,CAIF,IAAoB,MAAhB+B,KAAK9D,IAAL8D,EAAJ,EACE,OAAOiT,SAAAA,GAAY,GAAG9L,OAAOnH,KAAK+uJ,aAA3B97I,CAAP,CAIF,KADA,IAAMxM,MAAN,EACS1L,IAAI,CAAb,EAAgBA,IAAIiF,KAAK9D,IAAL8D,EAApB,EAAiCjF,GAAjC,EACE0L,EAAQzK,IAARyK,CAAa1L,CAAb0L,EAGF,IAAMqhB,IAAU9nB,KAAKyvJ,QAALzvJ,CAAcyG,CAAdzG,CAAhB,CAOA,OALAA,KAAKqmB,iBAALrmB,CACIA,KAAK+uJ,YADT/uJ,EACuB8nB,EAAQ,CAARA,EAAW7rB,KADlC+D,EAEI,qDACIA,KAAK+uJ,YADT,8BAAA,GACiDjnI,EAAQ,CAARA,EAAW7rB,KAD5D,MAFJ+D,GAKOmH,OAAO2gB,CAAP3gB,EAAgB,CAAhBA,CAAP;KArLFhD,EA8LAgrJ,WAAAA,QAAAA,GAAA,UAAQ1oJ,CAAR,EAA2BwM,CAA3B;EACE,QAAIA,EAAOhV,KAAPgV,KAAiBjT,KAAK/B,KAA1B,EACE,MAAM,IAAI1E,KAAJ,CAAU,0BACZyG,KAAK/B,KADO,2BAAA,GACuBgV,EAAOhV,KADxC,CAAN,CAIF,IAAIwI,EAAQrM,MAARqM,KAAmBwM,EAAOhX,KAAPgX,CAAa,CAAbA,CAAvB,EACE,MAAM,IAAI1Z,KAAJ,CAAU,wDACZkN,EAAQrM,MADI,UAAA,GACU6Y,EAAOhX,KAAPgX,CAAa,CAAbA,CADpB,CAAN,CAIF,IAAMsmE,IAAWh/E,KAAKI,GAALJ,MAAAA,CAAAA,IAAAA,EAAYkM,CAAZlM,CAAjB,CAEA,KAAKyF,KAAKivJ,eAAe11E,KAAYv5E,KAAK8uJ,OAA1C,EACE,MAAM,IAAIv1J,KAAJ,CACF,qCAAmCggF,CAAnC,WAAA,GAAoDv5E,KAAK8uJ,OAAzD,MADE,CAAN,CAIF9uJ,KAAK0vJ,SAAL1vJ,CAAeyG,CAAfzG,EAAwBuH,QAAQ0L,CAAR1L,EAAgB,CAAhBA,CAAxBvH;KAhNFmE,EAyNAgrJ,WAAAA,MAAAA,GAAA,UAAM/0J,CAAN,EAAwB6Y,CAAxB;EAAA,gBAAA,CACE,IAAIA,EAAOhV,KAAPgV,KAAiBjT,KAAK/B,KAA1B,EACE,MAAM,IAAI1E,KAAJ,CAAU,0BACZyG,KAAK/B,KADO,2BAAA,GACuBgV,EAAOhV,KADxC,CAAN,CAGF,IAAI0xJ,IAAc,CAAlB;EAAA,QACMC,IAAoBx1J,EAAO0H,GAAP1H,CAAW,UAAA0vB,CAAA;EAEnC,aADA6lI,KAAe7lI,CACf;OAFwB1vB,CAD1B,CAMA,IAAIu1J,MAAgB18I,EAAOhX,KAAPgX,CAAa,CAAbA,CAApB,EACE,MAAM,IAAI1Z,KAAJ,CAAU,uGAEZo2J,CAFY,8BAAA,GAE2B18I,EAAOhX,KAF5C,CAAN,CAKF,KAAK+D,KAAKivJ,eAAe70J,EAAOA,MAAPA,KAAkB4F,KAAK8uJ,OAAhD,EACE,MAAM,IAAIv1J,KAAJ,CACF,6DACIyG,KAAK8uJ,OADT,UAAA,GACwB10J,EAAOA,MAD/B,mEADE,CAAN,CAMF,IAAMy1J,IAAgC,MAAhBF,CAAgB,GAAI,CAAJ,GAAQ18I,EAAO/W,IAAP+W,GAAc08I,CAA5D;EAAA,QACM7nI,MADN,CAEApP,KAAK;EACHzF,UAASA,EAAO9N,OAAP8N,EAAgB,GAAG08I,GAAaE,EAAhC58I,CAATA,CACA,KAAK,IAAIlY,IAAI,CAAb,EAAgBA,IAAIX,EAAOA,MAA3B,IAAqCW,CAArC,EAAwC;EACtC,YACM+0J,KAAW,GADa,MAAN/0J,CAAM,GAAK,CAAL,GAAS60J,EAAkB70J,IAAI,CAAtB60J,GACH,EADpC;EAAA,YAEMG,KAAS,GAAG31J,EAAOW,CAAPX,GAAWy1J,EAF7B,CAGA/nI,EAAQ/sB,CAAR+sB,IAAajqB,MAAMoV,CAANpV,EAAciyJ,CAAdjyJ,EAAuBkyJ,CAAvBlyJ,EAA8BsH,OAA9BtH,CAAsC8C,EAAKouJ,YAA3ClxJ,CAAbiqB;EAEF,cAAOA,CAAP;OARFpP,EAWA,KADA,IAAMjS,MAAN,EACS1L,IAAI,CAAb,EAAgBA,IAAIX,EAAOA,MAA3B,EAAmCW,GAAnC,EACE0L,EAAQ1L,CAAR0L,IAAa1L,CAAb0L,CAEFzG,KAAK0vJ,SAAL1vJ,CAAeyG,CAAfzG,EAAwB8nB,CAAxB9nB;KAjQFmE,EAoQQgrJ,WAAAA,kBAAAA,GAAR,UACI5zJ,CADJ,EACsBC,CADtB,EACwCC,CADxC;uBACwCA,SACtCqwG,KAAKpwG,MAALowG,CACI9rG,KAAKrE,WAALqE,CAAiBzE,CAAjByE,EAAyBxE,CAAzBwE,CADJ8rG,EAEIrwG,IAAqB,UAArBA,GAAgCF,CAAhCE,UAAAA,GAA8CD,CAA9CC,gBAFJqwG;KAtQF3nG,EA2QQgrJ,WAAAA,YAAAA,GAAR,UAAoBhzJ,CAApB,EAAkCC,CAAlC;EACE,QAAID,EAAG/B,MAAH+B,KAAcC,EAAGhC,MAArB,EACE,QAAO,CAAP,CAEF,KAAK,IAAIW,IAAI,CAAb,EAAgBA,IAAIoB,EAAG/B,MAAvB,EAA+BW,GAA/B,EACE,KAAe,MAAXoB,EAAGpB,CAAHoB,MAA2B,MAAXC,EAAGrB,CAAHqB,KAAgBD,EAAGpB,CAAHoB,MAAUC,EAAGrB,CAAHqB,CAA9C,EACE,QAAO,CAAP,CAGJ,QAAO,CAAP;KApRF+H,EAbegrJ,QAAAA,GAAS,CAaxBhrJ,GAsRF;KlBhTO,sBmBWHqN,GAAYg4G,GACZ4/B;;;;oBACM53I,EAAKgU,UACN;EAAA,2BAAA,MAIA;EAAA,2BAAA,MASA;EAAA,2BAAA,MAMA;EAAA,2BAAA,MAQA;EAAA,2BAAA,MAMA;EAAA,2BAAA,MAMA;EAAA,2BAAA,MAoBA;EAAA,2BAAA,MAUA;EAAA,4BAAA,MAQA;EAAA,4BAAA,MAUA;EAAA,4BAAA,MAWA;EAAA,4BAAA,MAQA;EAAA,4BAAA,MAWA;EAAA,4BAAA,MAMA;EAAA,4BAAA;EA1HH,sBACGynI,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,EAA+D5mJ,KAA/D4mJ,IADH;EASQ,iBALF+C,IACF/C,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CADE+C,EAEAC,IACFhD,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAHE+C,MAKQA,EAAKhrJ,IAALgrJ,GAAN;EAAR,qBAAQz9H,MAAAA,GAAmB,CAAnBA,UAAyB8xE,GAAW4rD,EAAK5pJ,KAAL4pJ,GAApC19H,IACyB09H,EAAK5pJ,KAAL4pJ,SAAc5rD,GAD/C;EAMA,sBAFM3yF,IAAYF,EAAKyzG,UAALzzG,CAAgBm4I,IAAhBn4I,CACd,UAAAjT,CAAA;EAAQ,wBAAwC8lG,MAAxCglD,UAAU9qJ,CAAV8qJ,EAAgB7/B,CAAhB6/B,EAA2BD,CAA3BC,CAAA;aADM73I,MAEE63I,UAAU33I,CAAV23I,EAAqB7/B,CAArB6/B,EAAgCD,CAAhCC,EAAyChjJ,KAAzCgjJ,WACDhlD,EADnB;EASA,iBALM6rD,IACFjD,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CADEiD,EAEAlrJ,IACFioJ,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHEiD,EAIN9G,EAAQ+G,UAAR/G,CAAmB8G,CAAnB9G,CAJM8G,OAKElrJ,EAAKqB,KAALrB,IAAR;EAMA,iBAHMiO,IACFg6I,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CADEh6I,EAENm2I,EAAQgH,SAARhH,EAFMn2I,OAGEA,EAAO5M,KAAP4M,IAAR;EAMA,iBAHMtB,IACFs7I,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CADEt7I,EAENy3I,EAAQiH,aAARjH,EAFMz3I,OAGEA,EAAMtL,KAANsL,IAAR;EAoBA,iBAjBMzV,IAAO+wJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAP/wJ,EACA+B,IACFgvJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFE/wJ,EAGA6yJ,IACF9B,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAJE/wJ,EAKA+yJ,IACFhC,cAAc,aAAdA,EAA6Bz7I,CAA7By7I,EAAmCzjC,CAAnCyjC,EAA8C7D,CAA9C6D,CANE/wJ,EAOAgzJ,IACFjC,cAAc,gBAAdA,EAAgCz7I,CAAhCy7I,EAAsCzjC,CAAtCyjC,EAAiD7D,CAAjD6D,CARE/wJ,EASA8yJ,IACF/B,cAAc,wBAAdA,EAAwCz7I,CAAxCy7I,EAA8CzjC,CAA9CyjC,EAAyD7D,CAAzD6D,CAVE/wJ,EAYAwuF,IAAOuiE,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAZP/wJ,EAaAo0J,IAAc,IAAInB,WAAJ,CAChBzkE,CADgB,EACVzsF,CADU,EACH/B,CADG,EACG6yJ,CADH,EACiBC,CADjB,EACyCC,CADzC,EAEhBC,CAFgB,CAbdhzJ,EAgBNktJ,EAAQmH,cAARnH,CAAuBkH,CAAvBlH,CAhBMltJ,OAiBE2tB,OAAOymI,EAAY3rJ,EAAnBklB,GAAwBA,OAAO,CAAPA,GAAhC;EAUA,iBAPMllB,IACFsoJ,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEtoJ,EAEArK,IAAQ2yJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFRtoJ,EAGA6rJ,IACFvD,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAJEtoJ,EAKmBykJ,EAAQqH,cAARrH,CAAuBzkJ,CAAvBykJ,EACRrkJ,KADQqkJ,CACF9uJ,CADE8uJ,EACKoH,CADLpH,CALnBzkJ,OAOEklB,OAAO,CAAPA,GAAR;EAQA,iBALM6mI,IACFzD,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEyD,EAEAC,IACF1D,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAHEyD,OAIkBtH,EAAQqH,cAARrH,CAAuBsH,CAAvBtH,EACA3jJ,IADA2jJ,CACKuH,CADLvH,GACxB;EAUA,iBAPMwH,IACF3D,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADE2D,EAEAC,IACF5D,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAHE2D,EAIAE,IACF7D,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CALE2D,OAMoBxH,EAAQqH,cAARrH,CAAuBwH,CAAvBxH,EACA1iJ,MADA0iJ,CACOyH,CADPzH,EACsB0H,CADtB1H,GAC1B;EAWA,iBARM2H,IACF9D,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADE8D,EAEAC,IACF/D,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAHE8D,EAIAE,IACFhE,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CALE8D,EAMqB3H,EAAQqH,cAARrH,CAAuB2H,CAAvB3H,EACRr/D,OADQq/D,CACA4H,CADA5H,EACgB6H,CADhB7H,CANrB2H,OAQElnI,OAAO,CAAPA,GAAR;EAQA,iBALMqnI,IACFjE,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEiE,EAEAC,IAAoB/H,EAAQqH,cAARrH,CAAuB8H,CAAvB9H,CAFpB8H,EAGAE,IACFnE,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAJEiE,OAKEC,EAAkBhqJ,MAAlBgqJ,CAAyBC,CAAzBD,GAAR;EAWA,iBARME,IACFpE,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEoE,EAEAC,IACFrE,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHEoE,EAIAE,IACFtE,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CALEoE,EAMmBjI,EAAQqH,cAARrH,CAAuBiI,CAAvBjI,EACR/hJ,KADQ+hJ,CACFmI,CADEnI,EACOkI,CADPlI,CANnBiI,OAQExnI,OAAO,CAAPA,GAAR;EAMA,iBAHM2nI,IACFvE,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEuE,EAEAC,IAAkBrI,EAAQqH,cAARrH,CAAuBoI,CAAvBpI,CAFlBoI,OAGE3nI,OAAO4nI,EAAgBv1J,IAAhBu1J,EAAP5nI,EAA+B,OAA/BA,GAAR;EAOA,iBAJM6nI,IACFzE,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADEyE,EAEmBtI,EAAQqH,cAARrH,CAAuBsI,CAAvBtI,EACRuI,aADQvI,EAFnBsI,SAIN;EAEA,gBAAMp8C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN;;;ECvIC,KAAI03C,cACP,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAEE,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,QAAL;EACE,UAAMliB,IACF2pJ,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CADJ;EAAA,UAEMxqJ,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFZ;EAAA,UAGM1/I,IACD0/I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,EACIh+H,WADJg+H,EAJL;EAAA,UAMMz/I,IACFy/I,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAPJ,CAQA,QAAQtuB,OACJsuB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADItuB,EAEJsuB,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAFItuB,EAGJr7H,CAHIq7H,EAGIl8H,CAHJk8H,EAG6BpxH,CAH7BoxH,EAIJnxH,CAJImxH,EAAR,CAMF,KAAK,QAAL;EACQr7H,UACF2pJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADE3pJ,EAEAb,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFN3pJ,EAGAiK,IACD0/I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,EACIh+H,WADJg+H,EAJC3pJ,CAAN,IAMMqK,IACFs/I,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CAPJ,CAQA,QAAQnuB,OACJmuB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADInuB,EAGJmuB,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHInuB,GAIHx7H,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAJRw7H,EAIoBr8H,CAJpBq8H,EAKJvxH,CALIuxH,GAK4BnxH,EAAU,CAAVA,GAAcA,EAAU,CAAVA,EAL1CmxH,EAAR,CAOF,KAAK,iBAAL;EACE,UAAM7iI,IAAQgxJ,cACI,aADJA,EACmBz7I,CADnBy7I,EACyBzjC,CADzByjC,EAEI7D,CAFJ6D,CAAd,CAIM3pJ,IACF2pJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADE3pJ,EAEAb,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFN3pJ,CAGN,QAAQq+H,gBACJsrB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADItrB,EAGJsrB,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHItrB,EAIJ1lI,CAJI0lI,GAIIr+H,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAJfq+H,EAI2Bl/H,CAJ3Bk/H,EAAR,CAMF,KAAK,iBAAL;EACQr+H,UACF2pJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADE3pJ,EAEAb,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFN3pJ,EAGAqK,IACFs/I,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CAJE3pJ,EAKAiK,IACD0/I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,EACIh+H,WADJg+H,EANC3pJ,CASN,QAAQ+/H,gBACJ4pB,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADI5pB,EAGJ4pB,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHI5pB,GAIH//H,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAJR+/H,EAIoB5gI,CAJpB4gI,EAKJ91H,CALI81H,GAK4B11H,EAAU,CAAVA,GAAcA,EAAU,CAAVA,EAL1C01H,EAAR,CAQF,KAAK,SAAL;EACQ//H,UACF2pJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADE3pJ,EAEAb,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFN3pJ,CAAN,IAGMk7H,IACFyuB,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CAJJ,CAMA,QAAQvhB,QACJuhB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIvhB,GAGHlN,EAAW,CAAXA,GAAeA,EAAW,CAAXA,EAHZkN,GAG6BpoI,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAHxCooI,EAIJjpI,CAJIipI,EAAR,CAOF,KAAK,SAAL;EACQpoI,UACF2pJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADE3pJ,EAEAb,IAAMwqJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFN3pJ,EAGAk7H,IACFyuB,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CAJE3pJ,CAMN,QAAQmoI,QACJwhB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIxhB,GAGHjN,EAAW,CAAXA,GAAeA,EAAW,CAAXA,EAHZiN,GAG6BnoI,EAAO,CAAPA,GAAWA,EAAO,CAAPA,EAHxCmoI,EAIJhpI,CAJIgpI,EAAR,CAMF;EACE,YAAMn2B,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CA5FJ;GAHC;EAAA,ICAI03C,cAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,MAAL;EACE,UAAMvpB,IACFgxJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADJ;EAAA,UAEMhvJ,IACFgvJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAHJ;EAAA,UAIMlpJ,IAAQkpJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAJd,CAKA,QAAQ2E,KAAS31J,CAAT21J,EAAgB7tJ,CAAhB6tJ,EAAuB3zJ,CAAvB2zJ,EAAR,CAEF,KAAK,UAAL;EACE,UAAMzyJ,IAAQ8tJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAAd;EAAA,UACM4E,IAAO5E,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CADb;EAAA,UAEMvmI,IAAMumI,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFZ,CAGA,QAAQ6E,SAAa3yJ,CAAb2yJ,EAAoBD,CAApBC,EAA0BprI,CAA1BorI,EAAR,CAEF,KAAK,QAAL;EACE,UAAMrrJ,IACFwmJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADJ;EAAA,UAEMn1J,IAAQm1J,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFd;EAAA,UAGMxkI,IACFwkI,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAJJ;EAAA,UAKMvkI,IACFukI,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CANJ,CAOA,QAAQtwC,OAAWl2G,CAAXk2G,EAAoB7kH,CAApB6kH,EAA2Bl0F,CAA3Bk0F,EAAoCj0F,CAApCi0F,EAAR,CAEF,KAAK,MAAL;EACE,cAAQo1C,OACJ9E,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADI8E,EAEJ9E,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFI8E,EAAR,CAIF,KAAK,UAAL;EACE,cAAQ92C,SACJgyC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIhyC,EAAR,CAGF,KAAK,eAAL;EACE,cAAQhP,cAEJghD,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFIhhD,EAGJghD,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHIhhD,EAIJghD,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAJIhhD,EAKJghD,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CALIhhD,EAAR,CAOF,KAAK,OAAL;EACQ9sG,UAAQ8tJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAAR9tJ,CAAN,IACM6yJ,IAAO/E,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CADb;EAAA,UAEMpgJ,IAAOogJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFb,CAGA,QAAQgF,MACJ9yJ,CADI8yJ,EACGD,CADHC,EACSplJ,CADTolJ,EAEJhF,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFIgF,EAAR,CAKF,KAAK,iBAAL;EACQh2J,UACFgxJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADEhxJ,CAAN,IAEMyL,IAAOulJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFb;EAAA,UAGMrsG,IACFqsG,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAJJ;EAAA,UAKMzkI,IAAOykI,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CALb,CAMA,QAAQiF,gBACJj2J,CADIi2J,EACGxqJ,CADHwqJ,EACStxG,CADTsxG,EAEJjF,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFIiF,EAIJ1pI,CAJI0pI,EAAR,CAMF,KAAK,OAAL;EACE,cAAQ3jB,MACJ0e,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADI1e,EAEJ0e,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFI1e,EAAR,CAIF,KAAK,WAAL;EACE,cAAQ3xB,UACJqwC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIrwC,EAAR,CAGF;EACE,YAAMtH,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CA1EJ;GDHK,sBEDH9jG,GAAYg4G,GACZ4/B;;;;oBACM53I,EAAKgU,UACN;EAAA,2BAAA,MAeA;EAAA,2BAAA,MAIA;EAAA,2BAAA;EARK,iBAVFmD,IACFskI,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADEtkI,EAEAC,IACFqkI,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHEtkI,EAIAE,IACFokI,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CALEtkI,EAMAG,IACFmkI,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAPEtkI,EAQAI,IACFkkI,cAAc,gBAAdA,EAAgCz7I,CAAhCy7I,EAAsCzjC,CAAtCyjC,EAAiD7D,CAAjD6D,CATEtkI,MAUQwpI,UAAU18E,sBAAV08E,CACVxpI,CADUwpI,EACavpI,CADbupI,EACqCtpI,CADrCspI,EAEVrpI,CAFUqpI,EAEIppI,CAFJopI,EAAN;EAAR,sBAAQ5/H,MAAAA,IAAR;EAKQ,qBAAM6/H,WACVnF,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CADUmF,EAAN;EAAR,sBAAQ7/H,MAAAA,IAAR;EAIO,qBAAM8/H,eACTpF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADSoF,EAETpF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFSoF,EAAN;EAAP,qBAAO9/H,MAAAA,GAAP;EAKA,gBAAM+iF,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN;;;EC3BC,KAAI03C,cACP,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAEE,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,MAAL;EACE,UAAM9qB,IAAIuyJ,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAAV;EAAA,UACMx9I,IAAIw9I,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADV;EAAA,UAEMv9I,IACFu9I,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHJ;EAAA,UAIM/xJ,IAASo3J,KAAS53J,CAAT43J,EAAY7iJ,CAAZ6iJ,EAAe5iJ,CAAf4iJ,CAJf,CAKA,QAAQp3J,EAAOgD,QAAQhD,EAAOuL,QAA9B,CAEF;EACE,YAAM6uG,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAVJ;GAHC;EAAA,ICAI03C,cAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,OAAL;EACE,aAAOgkG,EAAUh4G,EAAKjT,IAAfirH,CAAP,CAEF,KAAK,aAAL;EACE,UAAMmjC,IACFM,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADJ,CAEA,QAAQ5D,UAAU73I,EAAKjT,IAAf8qJ,EAAqB7/B,CAArB6/B,EAAgCD,CAAhCC,KAA4CsD,EAApD,CACF,KAAK,UAAL,CACA,KAAK,cAAL,CACA,KAAK,yBAAL;EACE,cACGM,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EAA4D5mJ,KAA5D4mJ,GADH,CAGF,KAAK,UAAL;EAGE,cADKA,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EACY5mJ,KADZ4mJ,GACL,CACF,KAAK,OAAL;EACE,cAAQsF,SACHtF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EAA4DhxJ,KADzDs2J,EAEJ,OAFIA,EAAR,CAGF,KAAK,QAAL;EACE,aAAQtF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EACHnrJ,GADGmrJ,CACC,UAAC/zI,CAAD;EAAmB,eAAAq5I,SAAar5I,EAAEjd,KAAfs2J,CAAA;SADpBtF,CAAR,CAEF,KAAK,MAAL;EACE,cAAQ0B,OACH1B,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EAA4D/wJ,IADzDyyJ,EAEJ,OAFIA,EAAR,CAGF,KAAK,MAAL;EACE,cAAQA,OACH1B,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,EAA4D7tJ,IADzDuvJ,EAEJ,OAFIA,EAAR,CAGF,KAAK,MAAL;EACE,eAAA,CACF,KAAK,OAAL;EACE,UAAMh9I,IAAQs7I,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAAd;EAAA,UACMjoJ,IACFioJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFJ;EAAA,UAGM/tI,IACF+tI,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAJJ;EAAA,UAKMuF,IACFvF,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CANJ,CAOA/rJ,QAAQ8Z,IAAR9Z,CACI,gGADJA,GAGAA,QAAQC,GAARD,CAAYge,CAAZhe,CAHAA,CAIA,KAAK,IAAInG,IAAI,CAAb,EAAgBA,IAAIiK,EAAK5K,MAAzB,EAAiCW,GAAjC,EACEmG,QAAQC,GAARD,CACIrF,MAAM2oG,SAAN3oG,CAAgBgC,KAAhBhC,CAAsBoD,IAAtBpD,CAA2BmJ,EAAK,CAALA,EAAQ3E,QAAR2E,EAA3BnJ,EAA+CgC,KAA/ChC,CAAqD,CAArDA,EAAwD22J,CAAxD32J,CADJqF,EAGF,QAAQyQ,EAAR,CAEF;EACE,YAAM2jG,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAtDJ;GDHK;EAAA,IEAI03C,cAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,gBAAL;EACE,UAAM8uD,IACF24E,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CADJ;EAAA,UAEM/wJ,IAAO+wJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFb;EAAA,UAGM//I,IACF+/I,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAJJ,CAKA,QAAQkF,UAAU/kJ,cAAV+kJ,CACJ79E,CADI69E,GACoCj2J,EAAK,CAALA,GAASA,EAAK,CAALA,EAD7Ci2J,EAEJjlJ,CAFIilJ,EAAR,CAIF,KAAK,uBAAL;EACQ79E,UACF24E,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CADE34E,EAEAp4E,IAAO+wJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFP34E,EAGApnE,IACF+/I,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAJE34E,CAKN,QAAQ69E,UAAU9kJ,qBAAV8kJ,CACJ79E,CADI69E,GACoCj2J,EAAK,CAALA,GAASA,EAAK,CAALA,EAD7Ci2J,EAEJjlJ,CAFIilJ,EAAR,CAIF,KAAK,eAAL;EACE,UAAMhlJ,IACF8/I,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADJ;EAAA,UAEMtkI,IACFskI,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAHJ;EAAA,UAIM/3E,IACF+3E,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CALJ;EAAA,UAMMhkI,IACFgkI,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAPJ;EAAA,UAQM/jI,IACF+jI,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CATJ;EAAA,UAUM9jI,IACF8jI,cAAc,oBAAdA,EAAoCz7I,CAApCy7I,EAA0CzjC,CAA1CyjC,EAAqD7D,CAArD6D,CAXJ,CAaA,QAAQkF,UAAU98E,aAAV88E,CACJhlJ,CADIglJ,EACmBxpI,CADnBwpI,EAC0Cj9E,CAD1Ci9E,EAEJlpI,CAFIkpI,EAE0BjpI,CAF1BipI,EAGJhpI,CAHIgpI,EAAR,CAKF;EACE,YAAM78C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAzCJ;GFHK;EAAA,IGAI03C,cAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,OAAL;EACE,cAAQ+4F,MACJ0uC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI1uC,EAEJ0uC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI1uC,EAAR,CAIF,KAAK,UAAL;EACE,cAAQk0C,SACJxF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIwF,EAEJxF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIwF,EAAR,CAIF,KAAK,SAAL;EACE,cAAQn0C,QACJ2uC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI3uC,EAEJ2uC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI3uC,EAAR,CAIF,KAAK,cAAL;EACE,cAAQo0C,aACJzF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIyF,EAEJzF,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIyF,EAAR,CAIF,KAAK,MAAL;EACE,cAAQC,KACJ1F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI0F,EAEJ1F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI0F,EAAR,CAIF,KAAK,WAAL;EACE,cAAQC,UACJ3F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI2F,EAEJ3F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI2F,EAAR,CAIF,KAAK,YAAL;EACE,cAAQn0C,WACJwuC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIxuC,EAEJwuC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIxuC,EAAR,CAIF,KAAK,YAAL;EACE,cAAQo0C,WACJ5F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI4F,EAAR,CAGF,KAAK,WAAL;EACE,cAAQC,UACJ7F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI6F,EAEJ7F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFI6F,EAAR,CAIF,KAAK,OAAL;EACE,cAAQ/zC,MACJkuC,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CADIluC,EAEJkuC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIluC,EAGJkuC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAHIluC,EAAR,CAKF;EACE,YAAMzJ,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CApDJ;GHHK;EAAA,IIAI03C,eAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,QAAL;EACE,cAAQwlF,OACJiiD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIjiD,EAEJiiD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAFIjiD,EAGJiiD,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CAHIjiD,EAIJiiD,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CAJIjiD,EAAR,CAKF,KAAK,WAAL;EACE,cAAQyzB,UACJwuB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIxuB,EAEJwuB,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFIxuB,EAAR,CAIF;EACE,YAAMnpB,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAbJ;GJHK;EAAA,IKAI03C,eAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,oBAAL;EACE,cAAQutI,mBACJ9F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI8F,EAEJ9F,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFI8F,EAGJ9F,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAHI8F,EAIJ9F,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAJI8F,EAKJ9F,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CALI8F,EAMJ9F,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CANI8F,EAAR,CAQF,KAAK,4BAAL;EACE,cAAQC,2BACJ/F,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI+F,EAGJ/F,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHI+F,EAIJ/F,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAJI+F,EAKJ/F,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CALI+F,EAMJ/F,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CANI+F,EAAR,CAQF,KAAK,SAAL;EACE,cAAQ32C,QACJ4wC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI5wC,EAAR,CAGF,KAAK,YAAL;EACE,cAAQ42C,WACJhG,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIgG,EAAR,CAGF,KAAK,eAAL;EACE,cAAQC,cACJjG,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADIiG,EAGJjG,cAAc,aAAdA,EAA6Bz7I,CAA7By7I,EAAmCzjC,CAAnCyjC,EAA8C7D,CAA9C6D,CAHIiG,EAIJjG,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAJIiG,EAKJjG,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CALIiG,EAAR,CAQF;EACE,YAAM59C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CArCJ;GLHK;EAAA,IMAI03C,eAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,KAAL;EACE,UAAM1nB,IAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb;EAAA,UACMjmJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFJ,CAGA,QAAQnjD,IACJmjD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADInjD,EACwDhsG,CADxDgsG,EAEJ9iG,CAFI8iG,EAAR,CAIF,KAAK,MAAL;EACQhsG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQu9G,KACJ4xC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI5xC,EACwDv9G,CADxDu9G,EAEJr0G,CAFIq0G,EAAR,CAIF,KAAK,KAAL;EACQv9G,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQ8rG,IACJqjD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIrjD,EACwD9rG,CADxD8rG,EAEJ5iG,CAFI4iG,EAAR,CAIF,KAAK,KAAL;EACQ9rG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQ8oG,MACJqmD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIrmD,EACwD9oG,CADxD8oG,EAEJ5/F,CAFI4/F,EAAR,CAIF,KAAK,KAAL;EACQ9oG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQq1J,IACJlG,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIkG,EACwDr1J,CADxDq1J,EAEJnsJ,CAFImsJ,EAAR,CAIF,KAAK,KAAL;EACQr1J,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQs1J,IACJnG,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADImG,EACwDt1J,CADxDs1J,EAEJpsJ,CAFIosJ,EAAR,CAIF,KAAK,QAAL;EACQt1J,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,CACN,QAAQ0gH,OACJyuC,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIzuC,EACwD1gH,CADxD0gH,EAAR,CAGF,KAAK,QAAL;EACQ1gH,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,CACN,QAAQu1J,OACJpG,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIoG,EACwDv1J,CADxDu1J,EAAR,CAGF,KAAK,MAAL;EACQv1J,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACAkJ,IACFimJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CAFEnvJ,CAGN,QAAQ8wJ,KACJ3B,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI2B,EACwD9wJ,CADxD8wJ,EAEJ5nJ,CAFI4nJ,EAAR,CAIF;EACE,YAAMt5C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CApEJ;GNHK;EAAA,IOAI03C,eAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,QAAL;EACE,UAAM1nB,IAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb;EAAA,UACMx7I,IACFw7I,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAFJ,CAGA,QAAQ3iD,OAAW74F,CAAX64F,EAAmBxsG,CAAnBwsG,EAAR,CAEF,KAAK,QAAL;EACQxsG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,CAAN,IACM6T,IAAQs7I,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADd;EAAA,UAEMxmJ,IACFwmJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAHJ,CAIA,QAAQ3hD,OAAW35F,CAAX25F,EAAkB7kG,CAAlB6kG,EAA2BxtG,CAA3BwtG,EAAR,CAEF,KAAK,SAAL;EACQxtG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,EACA6T,IAAQs7I,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADRnvJ,CAEN,QAAQsvI,QAAYz7H,CAAZy7H,EAAmBtvI,CAAnBsvI,EAAR,CAEF,KAAK,OAAL;EAEE,UAAMlmI,IAAQ+lJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAAd;EAAA,UAEM/wJ,IAAO+wJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAFb,CAGA,QAAQqG,MACJrG,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIqG,EACwDpsJ,CADxDosJ,EAEJp3J,CAFIo3J,EAAR,CAIF,KAAK,cAAL;EACQpsJ,UACF+lJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADE/lJ,CAAN,IAEM3D,IAAM0pJ,cAAc,KAAdA,EAAqBz7I,CAArBy7I,EAA2BzjC,CAA3ByjC,EAAsC7D,CAAtC6D,CAFZ;EAAA,UAGM5tJ,IACF4tJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAJJ;EAAA,UAKMr9I,IACFq9I,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CANJ;EAAA,UAOMp9I,IACFo9I,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CARJ;EAAA,UASM/pI,IACF+pI,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CAVJ;EAAA,UAWM9pI,IACF8pI,cAAc,aAAdA,EAA6Bz7I,CAA7By7I,EAAmCzjC,CAAnCyjC,EAA8C7D,CAA9C6D,CAZJ;EAAA,UAaM7pI,IACF6pI,cAAc,gBAAdA,EAAgCz7I,CAAhCy7I,EAAsCzjC,CAAtCyjC,EAAiD7D,CAAjD6D,CAdJ;EAAA,UAeMh6I,IAASg6I,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAff,CAgBA,IAAqB,MAAjB/lJ,EAAM9M,MAAW,IAAK6Y,EAAOhX,KAAPgX,CAAa7Y,MAAb6Y,GAAsB,CAAhD,EACE,KAAK,IAAIlY,IAAI,CAAb,EAAgBA,IAAIkY,EAAOhX,KAAPgX,CAAa7Y,MAAjC,EAAyCW,GAAzC,EACEmM,EAAMlL,IAANkL,CAAW,CAAXA,GACA3D,EAAIvH,IAAJuH,CAAS0P,EAAOhX,KAAPgX,CAAalY,CAAbkY,CAAT1P,CADA2D,EAEA7H,EAAQrD,IAARqD,CAAaA,EAAQ,CAARA,CAAbA,CAFA6H,CAKJ,QAAQqsJ,aACJtgJ,CADIsgJ,EACIrsJ,CADJqsJ,EACWhwJ,CADXgwJ,EACgBl0J,CADhBk0J,EACyB3jJ,CADzB2jJ,EACoC1jJ,CADpC0jJ,EAC6CrwI,CAD7CqwI,EAEJpwI,CAFIowI,EAESnwI,CAFTmwI,EAAR,CAIF,KAAK,OAAL;EACE,aAAOpiC,KAAS;EACd,YAAMrzH,IAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb;EAAA,YACMnlI,IACFmlI,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CAFJ;EAAA,YAIMhxJ,IAAQ6rB,EAAQ,CAARA,EAAW7rB,KAJzB;EAAA,YAKMy7B,IAAgB5P,EAAQ,CAARA,EAAW1hB,OAAX0hB,GAAqB7rB,KAL3C;EAAA,YAMMu3J,IAAS1rI,EAAQhmB,GAARgmB,CAAY,UAAA7U,CAAA;EACzB,cAAMwgJ,IAAYtlC,KAASxyH,WAATwyH,CAAqBl7G,EAAOhX,KAA5BkyH,EAAmClyH,CAAnCkyH,CAAlB,CACA,KAAKslC,MACAtlC,KAASxyH,WAATwyH,CAAqBl7G,EAAO7M,OAAP6M,GAAiBhX,KAAtCkyH,EAA6Cz2F,CAA7Cy2F,CADL,EAEE,MAAM,IAAI50H,KAAJ,CAAU,wCAAV,CAAN,CAEF,OAAOk6J,IAAYxgJ,CAAZwgJ,GAAqBxgJ,EAAO9N,OAAP8N,CAAehX,CAAfgX,CAA5B;WANa6U,CANf,CAcA,QAAQ4rI,MAAUF,CAAVE,EAAkB51J,CAAlB41J,EAAR;SAfKviC,CAAP,CAkBF,KAAK,SAAL;EACE,aAAOA,KAAS;EACd,YAAMrzH,IAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb;EAAA,YACMh6I,IACFg6I,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAFJ,CAGA,OAAO0G,QAAY1gJ,CAAZ0gJ,EAAoB71J,CAApB61J,CAAP;SAJKxiC,CAAP,CAOF,KAAK,MAAL;EACE,UAAM5qH,IAAO0mJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb,CACA,QAAQtiD,KACJsiD,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADItiD,EACwDpkG,CADxDokG,EAAR,CAGF,KAAK,OAAL;EACQ7sG,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,CAAN,IACMsJ,IACF6lJ,cAAc,iBAAdA,EAAiCz7I,CAAjCy7I,EAAuCzjC,CAAvCyjC,EAAkD7D,CAAlD6D,CAFJ,CAIA,OAAO2G,QACH3G,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADG2G,EAEHxsJ,CAFGwsJ,EAEc91J,CAFd81J,CAAP,CAIF,KAAK,WAAL;EACQntJ,UACFwmJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADExmJ,CAAN,IAEMvI,IACF+uJ,cAAc,QAAdA,EAAwBz7I,CAAxBy7I,EAA8BzjC,CAA9ByjC,EAAyC7D,CAAzC6D,CAHJ;EAAA,UAIMhxJ,IACFgxJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CALJ,CAMA,QAAQ4G,UAAcptJ,CAAdotJ,EAAuB31J,CAAvB21J,EAA+B53J,CAA/B43J,EAAR,CAEF,KAAK,UAAL;EACE,UAAMn5J,IAAIuyJ,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CAAV,CACMxmJ,IACFwmJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADExmJ,CAEN,QAAQqtJ,SAAap5J,CAAbo5J,EAAgBrtJ,CAAhBqtJ,EAAR,CAEF,KAAK,eAAL;EACQrtJ,UACFwmJ,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CADExmJ,EAGAxK,IACFgxJ,cAAc,aAAdA,EAA6Bz7I,CAA7By7I,EAAmCzjC,CAAnCyjC,EAA8C7D,CAA9C6D,CAJExmJ,CAAN,IAKM6iB,IACF2jI,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CANJ;EAAA,UAOM1jI,IACF0jI,cAAc,cAAdA,EAA8Bz7I,CAA9By7I,EAAoCzjC,CAApCyjC,EAA+C7D,CAA/C6D,CARJ,CASA,QAAQiG,cAAkBzsJ,CAAlBysJ,EAA2B5pI,CAA3B4pI,EAAyCj3J,CAAzCi3J,EAAgD3pI,CAAhD2pI,EAAR,CAEF;EACE,YAAM59C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CA7HJ;GPHK;EAAA,IQAI03C,eAAwB,UAACx7I,CAAD,EAAag4G,CAAb,EACC4/B,CADD;EAGjC,UAAQ53I,EAAKgU,EAAb,GACE,KAAK,MAAL;EACE,cAAQuuI,KACJ9G,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI8G,EAEJ9G,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFI8G,EAAR,CAKF,KAAK,YAAL;EACE,UAAMj2J,IAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAb,CACA,QAAQ+G,WACJ/G,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADI+G,EACwDl2J,CADxDk2J,EAAR,CAGF,KAAK,SAAL;EACQl2J,UAAOmvJ,cAAc,MAAdA,EAAsBz7I,CAAtBy7I,EAA4BzjC,CAA5ByjC,EAAuC7D,CAAvC6D,CAAPnvJ,CACN,QAAQ+tI,QACJohB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIphB,EACwD/tI,CADxD+tI,EAAR,CAIF,KAAK,SAAL;EACE,cAAQooB,QACJhH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIgH,EAEJhH,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CAFIgH,EAAR,CAIF,KAAK,KAAL;EACE,cAAQjpB,IACJiiB,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIjiB,EAEJ3jI,UACI4lJ,cAAc,SAAdA,EAAyBz7I,CAAzBy7I,EAA+BzjC,CAA/ByjC,EAA0C7D,CAA1C6D,CADJ5lJ,EAEI,CAFJA,CAFI2jI,EAKJiiB,cAAc,eAAdA,EAA+Bz7I,CAA/By7I,EAAqCzjC,CAArCyjC,EAAgD7D,CAAhD6D,CALIjiB,EAAR,CAOF,KAAK,gBAAL;EACE,UAAM37H,IACF49I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CADJ;EAAA,UAEMzlJ,IAAWH,UACb4lJ,cAAc,UAAdA,EAA0Bz7I,CAA1By7I,EAAgCzjC,CAAhCyjC,EAA2C7D,CAA3C6D,CADa5lJ,EACoD,CADpDA,CAFjB,CAIA,QAAQ6sJ,eACJjH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIiH,EAEJ7kJ,CAFI6kJ,EAEQ1sJ,CAFR0sJ,EAAR,CAIF,KAAK,gBAAL;EACQ7kJ,UACF49I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CADE59I,CAAN,IAEMC,IAAQjI,UACV4lJ,cAAc,OAAdA,EAAuBz7I,CAAvBy7I,EAA6BzjC,CAA7ByjC,EAAwC7D,CAAxC6D,CADU5lJ,EACoD,CADpDA,CAFd,CAIA,QAAQ8sJ,eACJlH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADIkH,EAEJ9kJ,CAFI8kJ,EAEQ7kJ,CAFR6kJ,EAAR,CAIF,KAAK,cAAL;EACE,UAAMpkJ,IACFk9I,cAAc,WAAdA,EAA2Bz7I,CAA3By7I,EAAiCzjC,CAAjCyjC,EAA4C7D,CAA5C6D,CADJ;EAAA,UAEM1/I,IACF0/I,cAAc,YAAdA,EAA4Bz7I,CAA5By7I,EAAkCzjC,CAAlCyjC,EAA6C7D,CAA7C6D,CAHJ,CAKA,QAAQmH,aACJnH,cAAc,GAAdA,EAAmBz7I,CAAnBy7I,EAAyBzjC,CAAzByjC,EAAoC7D,CAApC6D,CADImH,EAEJrkJ,CAFIqkJ,EAEO7mJ,CAFP6mJ,EAAR,CAIF;EACE,YAAM9+C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CA5DJ;GRHK,uBSmBH9jG,GAAYg4G,GACZ4/B;EACF,UAAQ53I,EAAKw4I,QAAb,GACE,KAAK,YAAL;EACE,aAAOqK,UAAqB7iJ,CAArB6iJ,EAA2B7qC,CAA3B6qC,EAAsCjL,CAAtCiL,CAAP,CACF,KAAK,YAAL;EACE,aAAOC,YAAoB9iJ,CAApB8iJ,EAA0B9qC,CAA1B8qC,EAAqClL,CAArCkL,CAAP,CACF,KAAK,SAAL;EACE,aAAOC,YAAkB/iJ,CAAlB+iJ,EAAwB/qC,CAAxB+qC,EAAmCnL,CAAnCmL,CAAP,CACF,KAAK,aAAL;EACE,aAAOC,YAAsBhjJ,CAAtBgjJ,EAA4BhrC,CAA5BgrC,EAAuCpL,CAAvCoL,CAAP,CACF,KAAK,UAAL;EACE,aAAOC,YAAmBjjJ,CAAnBijJ,EAAyBjrC,CAAzBirC,EAAoCrL,CAApCqL,CAAP,CACF,KAAK,SAAL;EACE,aAAOC,YAAkBljJ,CAAlBkjJ,EAAwBlrC,CAAxBkrC,EAAmCtL,CAAnCsL,CAAP,CACF,KAAK,YAAL;EACE,aAAOC,YAAqBnjJ,CAArBmjJ,EAA2BnrC,CAA3BmrC,EAAsCvL,CAAtCuL,CAAP,CACF,KAAK,OAAL;EACE,aAAOC,YAAgBpjJ,CAAhBojJ,EAAsBprC,CAAtBorC,EAAiCxL,CAAjCwL,CAAP,CACF,KAAK,OAAL;EACE,aAAOC,YAAgBrjJ,CAAhBqjJ,EAAsBrrC,CAAtBqrC,EAAiCzL,CAAjCyL,CAAP,CACF,KAAK,SAAL;EACE,aAAOC,YAAkBtjJ,CAAlBsjJ,EAAwBtrC,CAAxBsrC,EAAmC1L,CAAnC0L,CAAP,CACF,KAAK,UAAL;EACE,aAAOC,aAAmBvjJ,CAAnBujJ,EAAyBvrC,CAAzBurC,EAAoC3L,CAApC2L,CAAP,CACF,KAAK,eAAL;EACE,aAAOC,aAAwBxjJ,CAAxBwjJ,EAA8BxrC,CAA9BwrC,EAAyC5L,CAAzC4L,CAAP,CACF,KAAK,WAAL;EACE,aAAOC,aAAoBzjJ,CAApByjJ,EAA0BzrC,CAA1ByrC,EAAqC7L,CAArC6L,CAAP,CACF,KAAK,YAAL;EACE,aAAOC,aAAoB1jJ,CAApB0jJ,EAA0B1rC,CAA1B0rC,EAAqC9L,CAArC8L,CAAP,CACF,KAAK,gBAAL;EACE,aAAOC,aAAyB3jJ,CAAzB2jJ,EAA+B3rC,CAA/B2rC,EAA0C/L,CAA1C+L,CAAP,CACF;EACE,YAAM7/C,UAAU,eAAa9jG,EAAKgU,EAAlB,wBAAV8vF,CAAN,CAhCJ;ECTF;EAME,YAAA,CACoB8/C,CADpB,EAEoBC,CAFpB;EACoBr1J,kBAAAA,GAAAo1J,CAAAp1J,EACAA,mBAAAA,GAAAq1J,CADAr1J,EANZA,gBAAAA,KAAe2E,IAAI,GAAG2wJ,WAAW,IAAIC,aAAa,GAMtCv1J,EALZA,aAAAA,IAAoCA,KAAKw1J,YAK7Bx1J,EAJZA,WAAAA,GAAS,CAIGA,EAElBA,KAAKy1J,yBAALz1J,EAFkBA;EAwHtB,UAnHU01J,WAAAA,SAAAA,GAAR,UAAiB/wJ,CAAjB,EAA6B2wJ,CAA7B;EACE,aAAQ3wJ,OAAI2wJ,cAAWC,aAAa,GAApC;KADMG,EASRvxJ,qBAAAA,CAAIuxJ,WAAJvxJ,kBAAAA,SAOA;EACE,aAAOnE,KAAKxI,QAAZ;cARF,UAAmBA,CAAnB;EACMwI,WAAKxI,QAALwI,KAAkBxI,CAAlBwI,KACFA,KAAKxI,QAALwI,GAAgBxI,CAAhBwI,EACAA,KAAKy1J,yBAALz1J,EAFEA;2CADNmE,CATQuxJ,EAuBRvxJ,qBAAAA,CAAIuxJ,WAAJvxJ,oBAAAA,SAAA;EACE,aAAOnE,KAAK21J,kBAAL31J,CAAwB,CAAxBA,CAAP;2CADFmE,CAvBQuxJ,EA+BRvxJ,qBAAAA,CAAIuxJ,WAAJvxJ,qBAAAA,SAAA;EACE,aAAOnE,KAAK21J,kBAAZ;2CADFxxJ,CA/BQuxJ,EAmCAA,WAAAA,0BAAAA,GAAR;EAEE,SADA,IAAM9hC,MAAN,EACS74H,IAAI,CAAb,EAAgBA,IAAIiF,KAAKxI,QAALwI,CAAc5F,MAAd4F,GAAuB,CAA3C,EAA8CjF,GAA9C,EAAmD;EACjD,UAAMvD,IAAWwI,KAAKxI,QAALwI,CAAcnC,KAAdmC,CAAoB,CAApBA,EAAuBA,KAAKxI,QAALwI,CAAc5F,MAAd4F,GAAuBjF,CAA9CiF,CAAjB,CACA4zH,EAAM53H,IAAN43H,CAAW5zH,KAAK41J,oBAAL51J,CAA0BxI,CAA1BwI,CAAX4zH;EAEFA,OAAM53H,IAAN43H,CAAW,EAAXA,GACA5zH,KAAK21J,kBAAL31J,GAA0B4zH,CAD1BA;KAzCM8hC,EA6CAA,WAAAA,qBAAAA,GAAR,UAA6Bl+J,CAA7B;EACE,WAAOA,IACHA,EACKsK,GADLtK,CAEQ,UAAA4xJ,CAAA;EAAW,aAAgB,MAAfA,EAAQzkJ,EAAO,IAA6B,MAAxBykJ,EAAQmM,WAAb,GACvB,EADuB,GAEpBnM,EAAQkM,SAARlM,MAAAA,GAAqBA,EAAQmM,WAFzB;OAFnB/9J,EAKKwK,IALLxK,CAKU,GALVA,CADGA,GAOH,EAPJ;KA9CMk+J,EA4DRA,WAAAA,WAAAA,GAAA,UAAWxF,CAAX;EACMlwJ,SAAKxI,QAALwI,KACFA,KAAK61J,MAAL71J,IACAA,KAAKxI,QAALwI,GAAgBA,KAAKxI,QAALwI,CAAcnC,KAAdmC,EADhBA,EAEAA,KAAKxI,QAALwI,CAAchE,IAAdgE,CAAmBA,KAAK81J,QAAL91J,CAAcA,KAAK61J,MAAnB71J,EAA2BkwJ,CAA3BlwJ,CAAnBA,CAFAA,EAGAA,KAAK21J,kBAAL31J,CAAwBguB,OAAxBhuB,CAAgCA,KAAK41J,oBAAL51J,CAA0BA,KAAKxI,QAA/BwI,CAAhCA,CAJEA;KA7DE01J,EAyERA,WAAAA,UAAAA,GAAA;EACE,UAAI11J,KAAKxI,QAALwI,IAAiBA,KAAKxI,QAALwI,CAAc5F,MAAd4F,GAAuB,EAA5C,EAKE,MAAM,IAAIzG,KAAJ,CAAU,yCAAV,CAAN,CAJAyG,KAAKxI,QAALwI,GAAgBA,KAAKxI,QAALwI,CAAcnC,KAAdmC,EAAhBA,EACAA,KAAKxI,QAALwI,CAAcm3C,MAAdn3C,EAAsB,CAAtBA,CADAA,EAEAA,KAAK0pJ,iBAAL1pJ,CAAuBq2C,KAAvBr2C,EAFAA;KA3EI01J,EAuFRA,WAAAA,cAAAA,GAAA;EACE,UAAI11J,KAAKxI,QAALwI,IAAiBA,KAAKxI,QAALwI,CAAc5F,MAAd4F,GAAuB,EAA5C,EAYE,MAAM,IAAIzG,KAAJ,CAAU,uDAAV,CAAN,CAXAyG,KAAKxI,QAALwI,GAAgBA,KAAKxI,QAALwI,CAAcnC,KAAdmC,EAAhBA,EACAA,KAAK61J,MAAL71J,EADAA,CAEA,IAAMopJ,IACFjlJ,OAAO6M,MAAP7M,GAAAA,EAAkBnE,KAAKxI,QAALwI,CAAcA,KAAKxI,QAALwI,CAAc5F,MAAd4F,GAAuB,CAArCA,CAAlBmE,CADJ,CAGAilJ,EAAQmM,WAARnM,IAAuB,CAAvBA,EACAA,EAAQzkJ,EAARykJ,GAAappJ,KAAK61J,MADlBzM,EAEAppJ,KAAKxI,QAALwI,CAAcm3C,MAAdn3C,EAAsB,CAAtBA,EAAyB,CAAzBA,EAA4BopJ,CAA5BppJ,CAFAopJ,EAGAppJ,KAAK21J,kBAAL31J,CAAwBm3C,MAAxBn3C,CACI,CADJA,EACO,CADPA,EACUA,KAAK41J,oBAAL51J,CAA0BA,KAAKxI,QAA/BwI,CADVA,CAHAopJ;KA9FIsM,EAwGRA,WAAAA,UAAAA,GAAA,UAAUn3J,CAAV;EACE,WAAOyB,KAAKo1J,SAALp1J,CAAezB,CAAfyB,CAAP;KAzGM01J,EA4GRA,WAAAA,eAAAA,GAAA,UAAepF,CAAf;EACEtwJ,SAAKq1J,cAALr1J,CAAoBswJ,EAAY3rJ,EAAhC3E,IAAsCswJ,CAAtCtwJ;KA7GM01J,EAgHRA,WAAAA,eAAAA,GAAA,UAAe/wJ,CAAf;EACE,WAAO3E,KAAKq1J,cAALr1J,CAAoB2E,CAApB3E,CAAP;KAjHM01J,GAmHV;KA/HA;EAAA;EC4CE,YAAA,CAAoBxK,CAApB;EAAoBlrJ,cAAAA,GAAAkrJ,CAAAlrJ,EAhDZA,gBAAAA,GAAmC,IAAI8oG,GAAJ,EAgDvB9oG,EA/CZA,eAAAA,KA+CYA,EA3CZA,cAAAA,GAAY,GA2CAA,EAClBA,KAAK8rJ,YAAL9rJ,GAAoBkrJ,EAAMY,YADR9rJ,EAElBA,KAAK+1J,QAAL/1J,GAAgBkrJ,EAAMr5I,OAFJ7R,EAGlBA,KAAKi7H,OAALj7H,EAHkBA;EA6YtB,UAvbEmE,qBAAAA,CAAI6xJ,WAAJ7xJ,aAAAA,SAAA;EACE,aAAOnE,KAAKi2J,UAAZ;cAEF,UAAcb,CAAd;EACE,UAAMc,IAAY/xJ,OAAO0O,IAAP1O,CAAYixJ,CAAZjxJ,EAAuBrC,GAAvBqC,CACd,UAAAoS,CAAA;EAAO,eAAA6+I,EAAU7+I,CAAV6+I,EAAetzJ,GAAfszJ,CAAmB,UAAAniJ,CAAA;EAAU,iBAAAA,EAAOtO,EAAP;WAA7BywJ,CAAA;SADOjxJ,CAAlB,CAEAnE,KAAKk2J,SAALl2J,MAAoBmH,OAAOjI,UAAUg3J,EAArCl2J,EACAA,KAAKi2J,UAALj2J,GAAkBo1J,CADlBp1J;2CANFmE,GAUAA,qBAAAA,CAAI6xJ,WAAJ7xJ,UAAAA,SAAA;EACE,aAAOnE,KAAK8rJ,YAAL9rJ,CAAkB8B,GAAlB9B,CAAsB,UAAAwR,CAAA;EAC3B,iBACEjT,MAAMiT,EAAKjT,MACXtC,OAAOuV,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,GAAuBA,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,CAAqBzN,KAA5CyN,QACuB6yF,GAC9BpmG,OAAOuT,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,GAAuBA,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,CAAqBzN,KAA5CyN,QACuB6yF,GALhC;SADKrkG,CAAP;2CADFmE,CAVAA,EAsBAA,qBAAAA,CAAI6xJ,WAAJ7xJ,WAAAA,SAAA;EACE,aAAOnE,KAAK+1J,QAAL/1J,CAAc8B,GAAd9B,CAAkB,UAAAwR,CAAA;EACvB,iBACEjT,MAAMiT,EAAKjT,MACXtC,OAAOuV,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,GAAuBA,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,CAAqBzN,KAA5CyN,QACuB6yF,GAC9BpmG,OAAOuT,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,GAAuBA,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,CAAqBzN,KAA5CyN,QACuB6yF,GALhC;SADKrkG,CAAP;2CADFmE,CAtBAA,EAkCAA,qBAAAA,CAAI6xJ,WAAJ7xJ,cAAAA,SAAA;EACE,aAAOnE,KAAK8rJ,YAAL9rJ,CAAkB8B,GAAlB9B,CAAsB,UAAAwR,CAAA;EAAQ,eAAAA,EAAKjT,IAAL;SAA9ByB,CAAP;2CADFmE,CAlCAA,EAsCAA,qBAAAA,CAAI6xJ,WAAJ7xJ,eAAAA,SAAA;EACE,aAAOnE,KAAK6R,OAAL7R,CAAa8B,GAAb9B,CAAiB,UAAAwR,CAAA;EAAQ,eAAAA,EAAKjT,IAAL;SAAzByB,CAAP;2CADFmE,CAtCAA,EAgDAA,qBAAAA,CAAI6xJ,WAAJ7xJ,sBAAAA,SAAA;EACE,aAAOnE,KAAKkrJ,KAALlrJ,CAAW4rJ,eAAlB;2CADFznJ,CAhDAA,EAoDAA,qBAAAA,CAAI6xJ,WAAJ7xJ,uBAAAA,SAAA;EACE,aAAOnE,KAAKkrJ,KAALlrJ,CAAW6rJ,gBAAlB;2CADF1nJ,CApDAA,EA4DQ6xJ,WAAAA,QAAAA,GAAR,UAAgBG,CAAhB;EAGE,SAAIn2J,KAAKkrJ,KAALlrJ,CAAW4rJ,oBAAmB5rJ,KAAKkrJ,KAALlrJ,CAAW6rJ,gBAA7C,EAAA;EAGA,UAAMuK,MAAN;EAAA,UACM3kJ,IAAS0kJ,KAAcn2J,KAAKkrJ,KAALlrJ,CAAW8rJ,YADxC;EAAA,UAGMuK,IADkB5kJ,EAAO3P,GAAP2P,CAAW,UAAAD,CAAA;EAAQ,eAAAA,EAAKjT,IAAL;SAAnBkT,EAA8BqM,IAA9BrM,GACQzP,IADRyP,CACazR,KAAKs2J,SADlB7kJ,CAFxB,CAMA,KAAIzR,KAAKu2J,WAALv2J,CAAiB2W,GAAjB3W,CAAqBq2J,CAArBr2J,CAAJ,EAAA;EAMA,aAFA,IAAMsH,IAAYmK,QAAAA,CAAWzR,KAAKkrJ,KAALlrJ,CAAWuvE,OAAtB99D,CAAlB,EACM+kJ,MACN,EAAOlvJ,EAAMlN,MAANkN,GAAe,CAAtB,GAAyB;EACvB,cAAMkK,IAAOlK,EAAMiR,GAANjR,EAAb,CACAkvJ,EAAQhlJ,EAAKjT,IAAbi4J,KAAqB,CAArBA,EACAJ,EAAcp6J,IAAdo6J,CAAmB5kJ,CAAnB4kJ,CADAI,EAEAhlJ,EAAK06I,QAAL16I,CAAcpR,OAAdoR,CAAsB,UAACilJ,CAAD;eACfD,EAAQC,EAAUl4J,IAAlBi4J,KAA2BC,EAAUxxC,UAAVwxC,CAAqBx9I,KAArBw9I,CAA2B,UAAAl4J,CAAA;EAC9C,kBAAAirJ,6BAAA,CACP,OAAOgN,EAAQhN,CAARgN,CAAP;eAF0BC,KAI9BnvJ,EAAMtL,IAANsL,CAAWmvJ,CAAXnvJ;aALJkK,CAFAglJ;EAWFx2J,cAAKu2J,WAALv2J,CAAiB8W,GAAjB9W,CAAqBq2J,CAArBr2J,EAA8Bo2J,CAA9Bp2J;;;KA3FFmE,EAuGA6xJ,WAAAA,QAAAA,GAAA,UACIvkJ,CADJ,EAC6BilJ,CAD7B,EAEI7kJ,CAFJ;EAAA,gBAAA,kBAC6B6kJ,QAE3B,IAAM9iC,IAAQzvH,OAAO0O,IAAP1O,CAAYsN,CAAZtN,EAAoB2Z,IAApB3Z,EAAd,CACAnE,KAAK22J,UAAL32J,CAAgByR,CAAhBzR,EAAwB02J,CAAxB12J,GACAA,KAAK42J,sBAAL52J,CAA4ByR,CAA5BzR,EAAoC02J,CAApC12J,CADAA,EAGAA,KAAKi7H,OAALj7H,CAAa4zH,EAAM9xH,GAAN8xH,CAAU,UAAAr1H,CAAA;EAAQ,aAAAoC,EAAKuqJ,KAALvqJ,CAAW6gH,KAAX7gH,CAAiBpC,CAAjBoC,CAAA;OAAlBizH,CAAb5zH,CAHAA,CAIA,IAAMklH,IAAcllH,KAAK62J,gBAAL72J,CAAsB6R,CAAtB7R,CAApB,CACAA,KAAK82J,WAAL92J,CACIA,KAAKu2J,WAALv2J,CAAiB2W,GAAjB3W,CAAqB4zH,EAAM5xH,IAAN4xH,CAAW5zH,KAAKs2J,SAAhB1iC,CAArB5zH,CADJA,EACsDklH,CADtDllH,EAGA,IAAMq1J,MAAN,CAwBA,OAvBe38I,KAAK;EAOlB,WANA,IAAM0wI,IAAU,IAAIsM,gBAAJ,CAAqB/0J,EAAKs1J,UAA1B,EAAsCZ,CAAtC,CAAhB,EACM7rC,mBAAgB7oH,EAAKy0J,WAAc3jJ,EADzC,EAEM0G,IAAgBxX,EAAKo2J,kBAALp2J,CAAwB6oH,CAAxB7oH,CAFtB,EAGMq2J,MAHN,EAKMC,IAAgBt2J,EAAK41J,WAAL51J,CAAiBgW,GAAjBhW,CAAqBizH,EAAM5xH,IAAN4xH,CAAWjzH,EAAK21J,SAAhB1iC,CAArBjzH,CALtB,EAMS5F,IAAI,CAAb,EAAgBA,IAAIk8J,EAAc78J,MAAlC,EAA0CW,GAA1C,EAA+C;EAC7C,YAAMyW,IAAOylJ,EAAcl8J,CAAdk8J,CAAb,CASA,IARKztC,EAAUh4G,EAAKjT,IAAfirH,MACHA,EAAUh4G,EAAKjT,IAAfirH,IACIwjC,aAAUx7I,CAAVw7I,EAAgBxjC,CAAhBwjC,EAA2B5D,CAA3B4D,CADJxjC,EAEA7oH,EAAKu2J,sBAALv2J,CACI6Q,EAAKjT,IADToC,EACe6Q,CADf7Q,EACqB6oH,CADrB7oH,EACgCyoJ,CADhCzoJ,EACyCwX,CADzCxX,EAEIq2J,CAFJr2J,CAHG6oH,GAQDtE,EAAYjsG,KAAZisG,CAAkB,UAAA3mH,CAAA;EAAQ,mBAAEirH,EAAUjrH,CAAVirH,CAAF;WAA1BtE,CAAJ,EACE;EAGJ,cAAOvkH,EAAKw2J,WAALx2J,CAAiB6oH,CAAjB7oH,EAA4ByoJ,CAA5BzoJ,EAAqCukH,CAArCvkH,CAAP;OArBa+X,CAuBf;KA3IFvU,EA8IQ6xJ,WAAAA,mBAAAA,GAAR,UAA2BxsC,CAA3B;EACE,QAAM4tC,OAASjwJ,OAAOjI,UAElBiF,OAAO0O,IAAP1O,CAAYqlH,CAAZrlH,EACKrC,GADLqC,CACS,UAAAoS,CAAA;EAAO,aAAAizG,EAAUjzG,CAAVizG,CAAA;OADhBrlH,EAEKrC,GAFLqC,CAES,UAAA2jB,CAAA;EAAW,aAAAA,EAAQhmB,GAARgmB,CAAY,UAAA7U,CAAA;EAAU,eAAAA,EAAOtO,EAAP;SAAtBmjB,CAAA;OAFpB3jB,EAFJ,CAKA,OAAO,IAAIkP,GAAJ,CAAQ+jJ,CAAR,CAAP;KApJFjzJ,EAsJQ6xJ,WAAAA,uBAAAA,GAAR,UACIxM,CADJ,EACsBh4I,CADtB,EACkCg4G,CADlC,EAEI4/B,CAFJ,EAE+BjxI,CAF/B,EAGI6+I,CAHJ;EAMwB,kBAAlBxlJ,EAAKw4I,QAAa,KAItBxgC,EAAUggC,CAAVhgC,EAAoBppH,OAApBopH,CAA4B,UAAAv2G,CAAA;EACZ,cAAVA,CAAU,KACZ+jJ,EAAgC/jJ,EAAOtO,EAAvCqyJ,KACKA,EAAgC/jJ,EAAOtO,EAAvCqyJ,KAA8C,KAC/CxlJ,EAAK06I,QAAL16I,CAAcpX,MAHN;OADhBovH,GAOAh4G,EAAKC,MAALD,CAAYpR,OAAZoR,CAAoB,UAAAG,CAAA;EAGlB,UAAuB,cAAnBA,EAAMq4I,QAAV,EAAkC;EAChC,YAAMliI,IACFuvI,6BAA6B1lJ,EAAMpT,IAAnC84J,EAAyC7tC,CAAzC6tC,EAAoDjO,CAApDiO,CADJ,CAEe,QAAXvvI,CAAW,IACbA,EAAQ1nB,OAAR0nB,CAAgB,UAAA7U,CAAA;EACd,cAAIA,MAAWkF,EAAczE,GAAdyE,CAAkBlF,EAAOtO,EAAzBwT,CAAf,EAA6C;EAC3C,gBAAMuL,IAAQszI,EAAgC/jJ,EAAOtO,EAAvCqyJ,CAAd,CACc,MAAVtzI,CAAU,IACZzQ,EAAOD,OAAPC,WACO+jJ,EAAgC/jJ,EAAOtO,EAAvCqyJ,CAFK,IAGM,QAATtzI,CAAS,IAGlBszI,EAAgC/jJ,EAAOtO,EAAvCqyJ,GANY;;WAHlBlvI,CADa;;OANnBtW,CAXsB;KA5JxBrN,EAwMM6xJ,WAAAA,aAAAA,GAAN,UAAmBvkJ,CAAnB,EAA4CI,CAA5C;;;;;;;;;;;EASkB,mBAPhB7R,KAAK22J,UAAL32J,CAAgByR,CAAhBzR,GAAwB,CAAxBA,GACAA,KAAK42J,sBAAL52J,CAA4ByR,CAA5BzR,GAAoC,CAApCA,CADAA,EAEMq1J,MAFNr1J,EAGMopJ,IAAU,IAAIsM,gBAAJ,CAAqB11J,KAAKi2J,UAA1B,EAAsCZ,CAAtC,CAHhBr1J,MAOsBA,KAAKs3J,sBAALt3J,CAA4ByR,CAA5BzR,EAAoCopJ,CAApCppJ,EAAN;EAkBhB,mBAlBM8nB,IAAU3O,MAAAA,EAAV2O,EACAozE,IAAUl7F,KAAKm3J,WAALn3J,CAAiB8nB,CAAjB9nB,EAA0BopJ,CAA1BppJ,EAAmC6R,CAAnC7R,CADV8nB,EAIAyvI,IAAYpzJ,OAAO0O,IAAP1O,CAAY+2F,CAAZ/2F,EAAqBrC,GAArBqC,CAAyB,UAAAoS,CAAA;EAAO,qBAAA2kF,EAAQ3kF,CAAR2kF,EAAav2F,EAAb;eAAhCR,CAJZ2jB,EAKA0vI,IACFrzJ,OAAO0O,IAAP1O,CAAYsN,CAAZtN,EAAoBrC,GAApBqC,CAAwB,UAAAoS,CAAA;EAAO,qBAAA9E,EAAO8E,CAAP9E,EAAY3P,GAAZ2P,CAAgB,UAAAE,CAAA;EAAS,uBAAAA,EAAMhN,EAAN;iBAAzB8M,CAAA;eAA/BtN,CANE2jB,EAOA2vI,OAActwJ,OAAOjI,UAAUs4J,EAP/B1vI,EAQN3jB,OAAO0O,IAAP1O,CAAY2jB,CAAZ3jB,EAAqB/D,OAArB+D,CAA6B,UAAAoS,CAAA;EACPuR,gBAAQvR,CAARuR,EACR1nB,OADQ0nB,CACA,UAAA7U,CAAA;EACdA,sBAA4C,MAAlCskJ,EAAUj2I,OAAVi2I,CAAkBtkJ,EAAOtO,EAAzB4yJ,CAAVtkJ,KACiC,MAAjCwkJ,EAASn2I,OAATm2I,CAAiBxkJ,EAAOtO,EAAxB8yJ,CADAxkJ,KAEuC,MAAvCtS,EAAKu1J,SAALv1J,CAAe2gB,OAAf3gB,CAAuBsS,EAAOtO,EAA9BhE,CAFAsS,IAGFA,EAAOD,OAAPC,EAHEA;iBAFc6U;eADtB3jB,CARM2jB,MAkBCozE,EAAP;;;KAnOF/2F,EA4Oc6xJ,WAAAA,uBAAAA,GAAd,UACIvkJ,CADJ,EAEI23I,CAFJ;;;;;;;;;;;;EAGQx1B,gBAAQzvH,OAAO0O,IAAP1O,CAAYsN,CAAZtN,CAARyvH,EACA8jC,IAAa9jC,EAAM9xH,GAAN8xH,CAAU,UAAAr1H,CAAA;EAAQ,qBAAAoC,EAAKuqJ,KAALvqJ,CAAW6gH,KAAX7gH,CAAiBpC,CAAjBoC,CAAA;eAAlBizH,CADbA,EAEAtsH,IACEowJ,QAAAA,CAAe13J,KAAKkrJ,KAALlrJ,CAAWuvE,OAA1BmoF,EAAmC51J,GAAnC41J,CAAuC,UAAAlmJ,CAAA;EACzC,uBAAQA,SAAMha,UAAU4xJ,EAAQuO,gBAAhC;eADED,CAHF9jC,EAMApK,mBAAgBxpH,KAAKo1J,WAAc3jJ,EANnCmiH,EAOAojC,MAPApjC,EAQAz7G,IAAgBnY,KAAK+2J,kBAAL/2J,CAAwBwpH,CAAxBxpH,CARhB4zH,EASAgkC,MATAhkC,aAAAA;qBAUCtsH,EAAMlN,MAANkN,GAAe,CAAfA,IACCmvG,IAAWz2G,KAAK63J,YAAL73J,CACb03J,CADa13J,EACDsH,CADCtH,EACMopJ,CADNppJ,EACewpH,CADfxpH,EAC0B43J,CAD1B53J,EACiCmY,CADjCnY,EAEbg3J,CAFah3J,CAAXy2G,MAGAt5G,QAAQ4K,GAAR5K,CAAYs5G,CAAZt5G,EAJDmK;qBAIL6R,MAAAA;EAGF,uBAAOqwG,EAAP;;;KAhQFrlH,EAmQQ6xJ,WAAAA,aAAAA,GAAR,UACI0B,CADJ,EACwBpwJ,CADxB,EACmD8hJ,CADnD,EAEI5/B,CAFJ,EAEgCouC,CAFhC,EAGIz/I,CAHJ,EAII6+I,CAJJ;EAME,SANF,YAAA,EAKQvgD,MALR;EAOI,UAAMjgG,IAAOlP,EAAMiR,GAANjR,EAAb,CACA8hJ,EAAQuO,cAARvO,GAAyB5yI,EAAKhf,QAA9B4xJ,CACA,IAAII,IAAW,EAAf,CAUA,IANqB,YAAjBhzI,EAAKhF,IAALgF,CAAUgP,EAAO,IACjBynI,cAAc,YAAdA,EAA4Bz2I,EAAKhF,IAAjCy7I,EAAuCzjC,CAAvCyjC,EAAkD7D,CAAlD6D,CADiB,KAElBzD,0CAFkB,IAMkB,MAAnCkO,EAAWp2I,OAAXo2I,CAAmBlhJ,EAAKhF,IAAxBkmJ,CAAJ,EAA0C;EACxC,YAAM5vI,IAAUklI,aAAUx2I,EAAKhF,IAAfw7I,EAAqBxjC,CAArBwjC,EAAgC5D,CAAhC4D,CAAhB,CACKxD,MACFA,0CADEA,EAIL,IAAMsO,IAAiB1O,EAAQuO,cAA/B,CACI7vI,aAAmB3qB,OAAnB2qB,GACF2uF,EAASz6G,IAATy6G,CAAc3uF,EAAQvnB,IAARunB,CAAa,UAAA5O,CAAA;EAOzB,iBANAswG,EAAUggC,CAAVhgC,IAAsBtwG,CAAtBswG,EACA4/B,EAAQuO,cAARvO,GAAyB0O,CADzBtuC,EAEA7oH,EAAKu2J,sBAALv2J,CACI6oJ,CADJ7oJ,EACc6V,EAAKhF,IADnB7Q,EACyB6oH,CADzB7oH,EACoCyoJ,CADpCzoJ,EAC6CwX,CAD7CxX,EAEIq2J,CAFJr2J,CAFA6oH,EAKA7oH,EAAKo3J,iBAALp3J,CAAuB6V,EAAKhF,IAA5B7Q,EAAkC2G,CAAlC3G,EAAyCyoJ,CAAzCzoJ,EAAkD6oH,CAAlD7oH,EAA6Di3J,CAA7Dj3J,CALA6oH,EAMOtwG,CAAP;WAPY4O,CAAd2uF,CADE3uF,IAWF0hG,EAAUggC,CAAVhgC,IAAsB1hG,CAAtB0hG,EACA9pB,EAAKw3D,sBAALx3D,CACI8pD,CADJ9pD,EACclpF,EAAKhF,IADnBkuF,EACyB8pB,CADzB9pB,EACoC0pD,CADpC1pD,EAC6CvnF,CAD7CunF,EAEIs3D,CAFJt3D,CADA8pB,EAIA9pB,EAAKq4D,iBAALr4D,CAAuBlpF,EAAKhF,IAA5BkuF,EAAkCp4F,CAAlCo4F,EAAyC0pD,CAAzC1pD,EAAkD8pB,CAAlD9pB,EAA6Dk4D,CAA7Dl4D,CAfE53E;SAPN,MAyBE43E,EAAKq4D,iBAALr4D,CAAuBlpF,EAAKhF,IAA5BkuF,EAAkCp4F,CAAlCo4F,EAAyC0pD,CAAzC1pD,EAAkD8pB,CAAlD9pB,EAA6Dk4D,CAA7Dl4D;OA5CN,UAME,EAAOp4F,EAAMlN,MAANkN,GAAe,CAAtB,OAyCA,OAAOmvG,CAAP;KAlTFtyG,EAqTQ6xJ,WAAAA,kBAAAA,GAAR,UACIxkJ,CADJ,EACgBlK,CADhB,EAC2C8hJ,CAD3C,EAEI5/B,CAFJ,EAEgCouC,CAFhC;EAGEpmJ,MAAK06I,QAAL16I,CAAcpR,OAAdoR,CAAsB,UAACilJ,CAAD;EACb,UAAAjN,qCAAA,CACFoO,EAAMpO,CAANoO,MAEkB,YAAjBnB,EAAUjxI,EAAO,GACfixI,EAAUxxC,UAAVwxC,CAAqB12I,IAArB02I,CAA0B,UAAAl4J,CAAA;EACxB,iBAAS8qJ,UAAU9qJ,CAAV8qJ,EAAgB7/B,CAAhB6/B,EAA2BD,CAA3BC,CAAT;SADFoN,MAGFmB,EAAMpO,CAANoO,KAAkB,CAAlBA,EACAtwJ,EAAMtL,IAANsL,GAAY9P,UAAU4xJ,EAAQuO,gBAAgBnmJ,MAAMilJ,GAApDnvJ,CAJEmvJ,CADe,GAQbA,EAAUxxC,UAAVwxC,CAAqBx9I,KAArBw9I,CAA2B,UAAAl4J,CAAA;EACzB,iBAAS8qJ,UAAU9qJ,CAAV8qJ,EAAgB7/B,CAAhB6/B,EAA2BD,CAA3BC,CAAT;SADFoN,MAGNmB,EAAMpO,CAANoO,KAAkB,CAAlBA,EACAtwJ,EAAMtL,IAANsL,GAAY9P,UAAU4xJ,EAAQuO,gBAAgBnmJ,MAAMilJ,GAApDnvJ,CAJMmvJ,CAVLmB;OAFPpmJ;KAxTFrN,EA8UQ6xJ,WAAAA,iBAAAA,GAAR,UAAyBnkJ,CAAzB;EAIE,YAHIA,KAAaA,aAAmBhW,UAClCgW,KAAWA,KAELA,KAAW7R,KAAKkrJ,KAALlrJ,CAAW6R,OAAX7R,CAAmB8B,GAAnB9B,CAAuB,UAAAwR,CAAA;EAAQ,aAAAA,EAAKjT,IAAL;OAA/ByB,CAAnB;KAlVFmE,EAqVQ6xJ,WAAAA,YAAAA,GAAR,UACIxsC,CADJ,EACgC4/B,CADhC,EAEIv3I,CAFJ;EAIE,WADyB7R,KAAK62J,gBAAL72J,CAAsB6R,CAAtB7R,EACD27C,MADC37C,CACsB,UAAC8B,CAAD,EAAMvD,CAAN;EAE7C,aADAuD,EAAIvD,CAAJuD,IAAYunJ,UAAU9qJ,CAAV8qJ,EAAgB7/B,CAAhB6/B,EAA2BD,CAA3BC,CAAZvnJ,EACOA,CAAP;OAHuB9B,IAAAA,CACzB;KAzVFmE,EAiWA6xJ,WAAAA,QAAAA,GAAA;EAAA,gBAAA,CACE7xJ,OAAO0O,IAAP1O,CAAYnE,KAAKo1J,SAAjBjxJ,EACK/D,OADL+D,CAEQ,UAAAoS,CAAA;EAAO,aAAA5V,EAAKy0J,SAALz0J,CAAe4V,CAAf5V,EAAoBP,OAApBO,CAA4B,UAAAsS,CAAA;EAAU,eAAAA,EAAOD,OAAPC,EAAA;SAAtCtS,CAAA;OAFfwD;KAlWFA,EAuWQ6xJ,WAAAA,uBAAAA,GAAR,UACIvkJ,CADJ,EAC6BilJ,CAD7B;uBAC6BA,SAC3B12J,KAAK8rJ,YAAL9rJ,CAAkBI,OAAlBJ,CAA0B,UAAAwR,CAAA;EACxB,UAAMi/F,IAAeh/F,EAAOD,EAAKjT,IAAZkT,CAArB,CAGA,IAAKilJ,KAAqBjmD,CAA1B,EAAA;EAIA,YAAM9+F,IAAQ8+F,EAAa,CAAbA,CAAd,CACA,IAAIj/F,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,IAAwBA,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,CAAqBzN,KAAjD,EAAwD;EACtD,cAAMi0J,IAAQxmJ,EAAKgL,MAALhL,CAAmBvV,KAAnBuV,CAAqBzN,KAAnC;EAAA,cACMi4F,IAAQg8D,EAAM59J,MAAN49J,KAAiBrmJ,EAAM1V,KAAN0V,CAAYvX,MAA7B49J,IACVrmJ,EAAM1V,KAAN0V,CAAYsH,KAAZtH,CACI,UAAC7E,CAAD,EAAMxS,CAAN;EAAgB,oBAAkB,MAAlB09J,EAAM19J,CAAN09J,KAAuBA,EAAM19J,CAAN09J,MAAiBlrJ,CAAxC;aADpB6E,CAFJ,CAIAm6F,KAAKpwG,MAALowG,CACI9P,CADJ8P,EAEI,wBACIt6F,EAAKjT,IADT,iDAAA,GAEIy5J,CAFJ,iBAAA,GAEwBrmJ,EAAM1V,KAF9B,MAFJ6vG;EAMEt6F,WAAKgL,MAALhL,CAAmBvT,KAAnBuT,IAAwBA,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,CAAqBzN,KAA7CyN,IACFs6F,KAAKpwG,MAALowG,CACIn6F,EAAM1T,KAAN0T,KAAgBH,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,CAAqBzN,KADzC+nG,EAEI,wBACIt6F,EAAKjT,IADT,gDAAA,GAEIiT,EAAKgL,MAALhL,CAAmBvT,KAAnBuT,CAAqBzN,KAFzB,eAAA,GAE2C4N,EAAM1T,KAJrD6tG,CADEt6F;;OApBNxR;KAzWFmE,EAuYQ6xJ,WAAAA,WAAAA,GAAR,UAAmBvkJ,CAAnB,EAA4CilJ,CAA5C;EAAA,gBAAA,kBAA4CA,QAC1C,IAAMuB,IAAY9zJ,OAAO0O,IAAP1O,CAAYsN,CAAZtN,CAAlB;EAAA,QACM+zJ,MADN;EAAA,QAEMC,MAFN,CAIAn4J,KAAK03J,UAAL13J,CAAgBI,OAAhBJ,CAAwB,UAAAzB,CAAA;SACW,MAA7B05J,EAAU32I,OAAV22I,CAAkB15J,CAAlB05J,KAAgCC,EAAQl8J,IAARk8J,CAAa35J,CAAb25J;OADtCl4J,GAIAi4J,EAAU73J,OAAV63J,CAAkB,UAAA15J,CAAA;SACuB,MAAnCoC,EAAK+2J,UAAL/2J,CAAgB2gB,OAAhB3gB,CAAwBpC,CAAxBoC,KAAsCw3J,EAAMn8J,IAANm8J,CAAW55J,CAAX45J;OAD5CF,CAJAj4J,CAQA,IAAMo4J,IAAaD,EAAM7qJ,MAAN6qJ,CAAa,UAAA55J,CAAA;EAAQ,cAACoC,EAAKuqJ,KAALvqJ,CAAW6gH,KAAX7gH,CAAiBpC,CAAjBoC,CAAD;OAArBw3J,CAAnB,CAEA,IAAID,EAAQ99J,MAAR89J,GAAiB,CAAjBA,IAAsBxB,CAA1B,EACE,MAAM,IAAIn9J,KAAJ,CACF,4DACI0+J,CADJ,2CAAA,GACsDC,CADtD,OADE,CAAN,CAKF,IAAIC,EAAM/9J,MAAN+9J,GAAe,CAAfA,IAAoBzB,CAAxB,EACE,MAAM,IAAIn9J,KAAJ,CACF,gEACiB4+J,CADjB,iDAAA,GAEIn4J,KAAK03J,UAFT,OADE,CAAN,CAMF,IAAIU,EAAWh+J,MAAXg+J,GAAoB,CAAxB,EACE,MAAM,IAAI7+J,KAAJ,CACF,yDACU6+J,CADV,+BADE,CAAN;KApaJj0J,EA0aQ6xJ,WAAAA,YAAAA,GAAR,UAAoBiB,CAApB,EAA2CplJ,CAA3C;EACE,QAAMwmJ,IAAoBpB,EAAcn1J,GAAdm1J,CAAkB,UAAAzlJ,CAAA;EAAQ,aAAAA,EAAKjT,IAAL;OAA1B04J,CAA1B;EAAA,QACMkB,MADN,CAMA,IAJAtmJ,EAAQzR,OAARyR,CAAgB,UAAAtT,CAAA;SAC2B,MAArC85J,EAAkB/2I,OAAlB+2I,CAA0B95J,CAA1B85J,KAAwCF,EAAMn8J,IAANm8J,CAAW55J,CAAX45J;OAD9CtmJ,GAIIsmJ,EAAM/9J,MAAN+9J,GAAe,CAAnB,EACE,MAAM,IAAI5+J,KAAJ,CACF,mEACI4+J,CADJ,OADE,CAAN;KAlbJh0J,GAubF;KDzbA;EAAA;EE8BE,YAAA,CACYm0J,CADZ,EACsCC,CADtC,EAEYC,CAFZ;EACYx4J,iBAAAA,GAAAs4J,CAAAt4J,EAA0BA,sBAAAA,GAAAu4J,CAA1Bv4J,EACAA,kBAAAA,GAAAw4J,CADAx4J,EApCJA,YAAAA,GAAU,KAoCNA;EA+Ld,UAhOEmE,qBAAAA,CAAIs0J,WAAJt0J,gBAAAA,SAAA;EACE,aAAOnE,KAAK++F,OAAZ;2CADF56F,GAIAA,qBAAAA,CAAIs0J,WAAJt0J,cAAAA,SAAA;EACE,aAAOnE,KAAK04J,QAAL14J,CAAc03J,UAArB;2CADFvzJ,CAJAA,EAQAA,qBAAAA,CAAIs0J,WAAJt0J,eAAAA,SAAA;EACE,aAAOnE,KAAK04J,QAAL14J,CAAc24J,WAArB;2CADFx0J,CARAA,EAYAA,qBAAAA,CAAIs0J,WAAJt0J,UAAAA,SAAA;EACE,aAAOnE,KAAK04J,QAAL14J,CAAcyR,MAArB;2CADFtN,CAZAA,EAgBAA,qBAAAA,CAAIs0J,WAAJt0J,WAAAA,SAAA;EACE,aAAOnE,KAAK04J,QAAL14J,CAAc6R,OAArB;2CADF1N,CAhBAA,EAoBAA,qBAAAA,CAAIs0J,WAAJt0J,WAAAA,SAAA;EACE,aAAOnE,KAAK04J,QAAL14J,CAAco1J,SAArB;2CADFjxJ,CApBAA,EAoCQs0J,WAAAA,cAAAA,GAAR;EACE,QAAMrsE,KAAQpsF,KAAKs4J,UAAUt4J,KAAKu4J,kBAAlC,CACA,IAAIv4J,KAAKw4J,aAAT,EACEx4J,KAAKyE,OAALzE,GAAe44J,GAAOv8D,kBAAPu8D,CAA0BxsE,CAA1BwsE,EAAgC54J,KAAKw4J,aAArCI,CAAf54J,CADF,KAEO;EACL,UAAMs5H,IAAWs/B,GAAOrqE,eAAPqqE,CAAuBxsE,CAAvBwsE,CAAjB,CACA,IAAwB,MAApBt/B,EAASl/H,MAAb,EAGEk/H,EAASt9H,IAATs9H,CAAcs/B,GAAOv8D,kBAAPu8D,CAA0BxsE,CAA1BwsE,EAAgC54J,KAAKw4J,aAArCI,CAAdt/B,EAHF,KAIO,IAAIA,EAASl/H,MAATk/H,GAAkB,CAAtB,EACL,MAAM,IAAI//H,KAAJ,CACF,0BAAwB+/H,EAASl/H,MAAjC,8BAAA,IACSgyF,EADT,MADE,CAAN,CAIFpsF,KAAKyE,OAALzE,GAAes5H,EAAS,CAATA,CAAft5H;;KAnDJmE,EA2DMs0J,WAAAA,KAAAA,GAAN;;;;EAEE,gBADAz4J,KAAK64J,aAAL74J,IACyB,QAArBA,KAAKyE,OAALzE,CAAagvF,IAAjB,EACE,MAAM,IAAIz1F,KAAJ,CACF,+GADE,CAAN,CAIgB,WAAMyG,KAAKyE,OAALzE,CAAagvF,IAAbhvF,GAAN;EAUlB,mBAVMo6H,IAAYjhH,MAAAA,EAAZihH,EACA8wB,IAAQ3I,eAAWiC,QAAXjC,CAAoB3L,MAApB2L,CACV,IAAIlkJ,UAAJ,CAAe+7H,EAAU5tC,aAAzB,CADU+1D,CADRnoB,EAINp6H,KAAK++F,OAAL/+F,GAAkBkrJ,EAAMltI,QAANktI,CAAe5G,QAAf4G,MAAAA,GAA2BA,EAAMltI,QAANktI,CAAe3G,WAJtDnqB,EAKAg7B,IACFwD,GAAO1+D,aAAP0+D,CAAqBx+B,EAAUntC,UAA/B2rE,EAA2Cx+B,EAAUrtC,WAArD6rE,CANEx+B,EAONp6H,KAAK04J,QAAL14J,GACI,IAAIg2J,aAAJ,CAAkBtK,gBAAgBoN,QAAhBpN,CAAyBqN,cAAzBrN,CAAwCR,CAAxCQ,CAAlB,CAREtxB,EASNp6H,KAAK04J,QAAL14J,CAAco1J,SAAdp1J,GAA0BA,KAAKg5J,4BAALh5J,CAAkCo1J,CAAlCp1J,CATpBo6H,OAUC,EAAP;;;KA5EFj2H,EAmHAs0J,WAAAA,QAAAA,GAAA,UACIhnJ,CADJ,EAEIurF,CAFJ;EAIE,WAAOh9F,KAAKi5J,QAALj5J,CAAcyR,CAAdzR,GAAsB,CAAtBA,EAA4BA,KAAK24J,WAAjC34J,CAAP;KAvHFmE,EA0HQs0J,WAAAA,mBAAAA,GAAR,UAA2BhnJ,CAA3B;EACE,QAAMynJ,IAAaznJ,aAAkB28G,MAAlB38G,IAAgCA,EAAhCA,GAA0CA,CAA7D,CACA,IAAIynJ,EAAW9+J,MAAX8+J,KAAsBl5J,KAAK03J,UAAL13J,CAAgB5F,MAA1C,EACE,MAAM,IAAIb,KAAJ,CACF,sDACwByG,KAAK03J,UAAL13J,CAAgB5F,MADxC,oCAAA,GAEmB8+J,EAAW9+J,MAF9B,oBADE,CAAN,CAKF,OAAO4F,KAAK03J,UAAL13J,CAAgB27C,MAAhB37C,CAAuB,UAAC8B,CAAD,EAAM4P,CAAN,EAAiB3W,CAAjB;EAE5B,aADA+G,EAAI4P,CAAJ5P,IAAiBo3J,EAAWn+J,CAAXm+J,CAAjBp3J,EACOA,CAAP;OAFK9B,IAAAA,CAAP;KAlIFmE,EAqJAs0J,WAAAA,QAAAA,GAAA,UACIhnJ,CADJ,EAEII,CAFJ;EAGE,WAAO7R,KAAKi5J,QAALj5J,CAAcyR,CAAdzR,GAAsB,CAAtBA,EAA6B6R,CAA7B7R,CAAP;KAxJFmE,EA2JQs0J,WAAAA,SAAAA,GAAR,UACIhnJ,CADJ,EAEIilJ,CAFJ,EAE6B7kJ,CAF7B;EAQE,yBANE6kJ,SAEF7kJ,IAAUA,KAAW7R,KAAK24J,cACtBlnJ,aAAkB28G,MAAlB38G,IAAgC5V,MAAMC,OAAND,CAAc4V,CAAd5V,OAClC4V,IAASzR,KAAKm5J,kBAALn5J,CAAwByR,CAAxBzR,IAEPA,KAAK04J,QAAL14J,CAAco5J,kBAAdp5J,IAAoCA,KAAK04J,QAAL14J,CAAcq5J,mBAAtD,EACE,MAAM,IAAI9/J,KAAJ,CACF,sFADE,CAAN,CAIF,IAAM2B,IAAS8E,KAAK04J,QAAL14J,CAAcw3H,OAAdx3H,CACXA,KAAKg5J,4BAALh5J,CAAkCyR,CAAlCzR,CADWA,EACgC02J,CADhC12J,EACkD6R,CADlD7R,CAAf;EAAA,QAEM6S,IAAO1O,OAAO0O,IAAP1O,CAAYjJ,CAAZiJ,CAFb,CAGA,OAAQtI,MAAMC,OAAND,CAAcgW,CAAdhW,KAA0BgW,EAAQzX,MAARyX,GAAiB,CAA3ChW,GACJgW,EAAQ/P,GAAR+P,CAAY,UAAAL,CAAA;EAAQ,aAAAtW,EAAOsW,CAAPtW,CAAA;OAApB2W,CADIhW,GAEJX,EAAO2X,EAAK,CAALA,CAAP3X,CAFJ;KA3KFiJ,EA6LMs0J,WAAAA,aAAAA,GAAN,UACIhnJ,CADJ,EAEII,CAFJ;;;;EAGE,iBAAM7R,KAAK04J,QAAL14J,CAAco5J,uBACdp5J,KAAK04J,QAAL14J,CAAcq5J,mBADpB,EAEE,MAAM,IAAI9/J,KAAJ,CACF,iHADE,CAAN,CASa,OALfsY,IAAUA,KAAW7R,KAAK24J,WAA1B9mJ,GACIJ,aAAkB28G,MAAlB38G,IAAgC5V,MAAMC,OAAND,CAAc4V,CAAd5V,OAClC4V,IAASzR,KAAKm5J,kBAALn5J,CAAwByR,CAAxBzR,EAFX6R,MAKqB7R,KAAK04J,QAAL14J,CAAcs5J,YAAdt5J,CACjBA,KAAKg5J,4BAALh5J,CAAkCyR,CAAlCzR,CADiBA,EAC0B6R,CAD1B7R,EAAN;EAGf,mBAHM9E,IAASie,MAAAA,EAATje,EAEA2X,IAAO1O,OAAO0O,IAAP1O,CAAYjJ,CAAZiJ,CAFPjJ,MAGCW,MAAMC,OAAND,CAAcgW,CAAdhW,KAA0BgW,EAAQzX,MAARyX,GAAiB,CAA3ChW,GACHgW,EAAQ/P,GAAR+P,CAAY,UAAAL,CAAA;EAAQ,qBAAAtW,EAAOsW,CAAPtW,CAAA;eAApB2W,CADGhW,GAEHX,EAAO2X,EAAK,CAALA,CAAP3X,EAFJ;;;KA9MFiJ,EAmNQs0J,WAAAA,6BAAAA,GAAR,UAAqC32J,CAArC;EAEE,WAAOqC,OAAO0O,IAAP1O,CAAYrC,CAAZqC,EAAiBw3C,MAAjBx3C,CAAwB,UAACo1J,CAAD,EAA0BhjJ,CAA1B;EAE7B,aADAgjJ,EAAOhjJ,CAAPgjJ,KAAez3J,EAAIyU,CAAJzU,EAAfy3J,EACOA,CAAP;OAFKp1J,IAAAA,CAAP;KArNFA,EA6NAs0J,WAAAA,QAAAA,GAAA;EACEz4J,SAAK04J,QAAL14J,CAAcgT,OAAdhT;KA9NFmE,GAgOF;KF9NA,0BE2PIm0J,GAAkBkB,GAClBhB;;;;EAEF,sBADM1hD,IAAQ,IAAI2hD,WAAJ,CAAgBH,CAAhB,EAA0BkB,CAA1B,EAA8ChB,CAA9C,GACFxpE,OAAZ;EACA,iBADA71E,MAAAA,QACO29F,EAAP;;;OClSI/X,YAAU;;ECAhB,IAAMA,YAAU,QAAhB;;MCwBaA,YAAU;EACrB,iBAAa06D,OADQ;EAErB,mBAAeC,SAFM;EAGrB,sBAAkBC,SAHG;EAIrB,YAAQC;EAJa,CAAhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
\No newline at end of file