UNPKG

8.63 kBTypeScriptView Raw
1/**
2 * @license
3 * Copyright 2018 Google LLC
4 *
5 * Use of this source code is governed by an MIT-style
6 * license that can be found in the LICENSE file or at
7 * https://opensource.org/licenses/MIT.
8 * =============================================================================
9 */
10/// <amd-module name="@tensorflow/tfjs-layers/dist/layers/core" />
11/**
12 * TensorFlow.js Layers: Basic Layers.
13 */
14import { serialization, Tensor } from '@tensorflow/tfjs-core';
15import { Activation as ActivationFn } from '../activations';
16import { Constraint, ConstraintIdentifier } from '../constraints';
17import { DisposeResult, Layer, LayerArgs } from '../engine/topology';
18import { Initializer, InitializerIdentifier } from '../initializers';
19import { ActivationIdentifier } from '../keras_format/activation_config';
20import { DataFormat, Shape } from '../keras_format/common';
21import { LayerConfig } from '../keras_format/topology_config';
22import { Regularizer, RegularizerIdentifier } from '../regularizers';
23import { Kwargs } from '../types';
24export declare interface DropoutLayerArgs extends LayerArgs {
25 /** Float between 0 and 1. Fraction of the input units to drop. */
26 rate: number;
27 /**
28 * Integer array representing the shape of the binary dropout mask that will
29 * be multiplied with the input.
30 *
31 * For instance, if your inputs have shape `(batchSize, timesteps, features)`
32 * and you want the dropout mask to be the same for all timesteps, you can use
33 * `noise_shape=(batch_size, 1, features)`.
34 */
35 noiseShape?: number[];
36 /** An integer to use as random seed. */
37 seed?: number;
38}
39export declare class Dropout extends Layer {
40 /** @nocollapse */
41 static className: string;
42 private readonly rate;
43 private readonly noiseShape;
44 private readonly seed;
45 constructor(args: DropoutLayerArgs);
46 protected getNoiseShape(input: Tensor): Shape;
47 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
48 getConfig(): serialization.ConfigDict;
49 dispose(): DisposeResult;
50}
51export declare interface DenseLayerArgs extends LayerArgs {
52 /** Positive integer, dimensionality of the output space. */
53 units: number;
54 /**
55 * Activation function to use.
56 *
57 * If unspecified, no activation is applied.
58 */
59 activation?: ActivationIdentifier;
60 /** Whether to apply a bias. */
61 useBias?: boolean;
62 /**
63 * Initializer for the dense kernel weights matrix.
64 */
65 kernelInitializer?: InitializerIdentifier | Initializer;
66 /**
67 * Initializer for the bias vector.
68 */
69 biasInitializer?: InitializerIdentifier | Initializer;
70 /**
71 * If specified, defines inputShape as `[inputDim]`.
72 */
73 inputDim?: number;
74 /**
75 * Constraint for the kernel weights.
76 */
77 kernelConstraint?: ConstraintIdentifier | Constraint;
78 /**
79 * Constraint for the bias vector.
80 */
81 biasConstraint?: ConstraintIdentifier | Constraint;
82 /**
83 * Regularizer function applied to the dense kernel weights matrix.
84 */
85 kernelRegularizer?: RegularizerIdentifier | Regularizer;
86 /**
87 * Regularizer function applied to the bias vector.
88 */
89 biasRegularizer?: RegularizerIdentifier | Regularizer;
90 /**
91 * Regularizer function applied to the activation.
92 */
93 activityRegularizer?: RegularizerIdentifier | Regularizer;
94}
95export interface SpatialDropout1DLayerConfig extends LayerConfig {
96 /** Float between 0 and 1. Fraction of the input units to drop. */
97 rate: number;
98 /** An integer to use as random seed. */
99 seed?: number;
100}
101export declare class SpatialDropout1D extends Dropout {
102 /** @nocollapse */
103 static className: string;
104 constructor(args: SpatialDropout1DLayerConfig);
105 protected getNoiseShape(input: Tensor): Shape;
106}
107export declare class Dense extends Layer {
108 /** @nocollapse */
109 static className: string;
110 private units;
111 private activation;
112 private useBias;
113 private kernelInitializer;
114 private biasInitializer;
115 private kernel;
116 private bias;
117 readonly DEFAULT_KERNEL_INITIALIZER: InitializerIdentifier;
118 readonly DEFAULT_BIAS_INITIALIZER: InitializerIdentifier;
119 private readonly kernelConstraint?;
120 private readonly biasConstraint?;
121 private readonly kernelRegularizer?;
122 private readonly biasRegularizer?;
123 constructor(args: DenseLayerArgs);
124 build(inputShape: Shape | Shape[]): void;
125 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
126 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
127 getConfig(): serialization.ConfigDict;
128}
129export declare interface FlattenLayerArgs extends LayerArgs {
130 /** Image data format: channelsLast (default) or channelsFirst. */
131 dataFormat?: DataFormat;
132}
133export declare class Flatten extends Layer {
134 private dataFormat;
135 /** @nocollapse */
136 static className: string;
137 constructor(args?: FlattenLayerArgs);
138 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
139 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
140 getConfig(): serialization.ConfigDict;
141}
142export declare interface ActivationLayerArgs extends LayerArgs {
143 /**
144 * Name of the activation function to use.
145 */
146 activation: ActivationIdentifier;
147}
148export declare class Activation extends Layer {
149 /** @nocollapse */
150 static className: string;
151 activation: ActivationFn;
152 constructor(args: ActivationLayerArgs);
153 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
154 getConfig(): serialization.ConfigDict;
155}
156export declare interface ReshapeLayerArgs extends LayerArgs {
157 /** The target shape. Does not include the batch axis. */
158 targetShape: Shape;
159}
160export declare interface RepeatVectorLayerArgs extends LayerArgs {
161 /**
162 * The integer number of times to repeat the input.
163 */
164 n: number;
165}
166export declare class RepeatVector extends Layer {
167 /** @nocollapse */
168 static className: string;
169 readonly n: number;
170 constructor(args: RepeatVectorLayerArgs);
171 computeOutputShape(inputShape: Shape): Shape;
172 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
173 getConfig(): serialization.ConfigDict;
174}
175export declare class Reshape extends Layer {
176 /** @nocollapse */
177 static className: string;
178 private targetShape;
179 constructor(args: ReshapeLayerArgs);
180 private isUnknown;
181 /**
182 * Finds and replaces a missing dimension in output shape.
183 *
184 * This is a near direct port of the internal Numpy function
185 * `_fix_unknown_dimension` in `numpy/core/src/multiarray/shape.c`.
186 *
187 * @param inputShape: Original shape of array begin reshape.
188 * @param outputShape: Target shape of the array, with at most a single
189 * `null` or negative number, which indicates an underdetermined dimension
190 * that should be derived from `inputShape` and the known dimensions of
191 * `outputShape`.
192 * @returns: The output shape with `null` replaced with its computed value.
193 * @throws: ValueError: If `inputShape` and `outputShape` do not match.
194 */
195 private fixUnknownDimension;
196 computeOutputShape(inputShape: Shape): Shape;
197 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
198 getConfig(): serialization.ConfigDict;
199}
200export declare interface PermuteLayerArgs extends LayerArgs {
201 /**
202 * Array of integers. Permutation pattern. Does not include the
203 * sample (batch) dimension. Index starts at 1.
204 * For instance, `[2, 1]` permutes the first and second dimensions
205 * of the input.
206 */
207 dims: number[];
208}
209export declare class Permute extends Layer {
210 /** @nocollapse */
211 static className: string;
212 readonly dims: number[];
213 private readonly dimsIncludingBatch;
214 constructor(args: PermuteLayerArgs);
215 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
216 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
217 getConfig(): serialization.ConfigDict;
218}
219export declare interface MaskingArgs extends LayerArgs {
220 /**
221 * Masking Value. Defaults to `0.0`.
222 */
223 maskValue?: number;
224}
225export declare class Masking extends Layer {
226 /** @nocollapse */
227 static className: string;
228 maskValue: number;
229 constructor(args?: MaskingArgs);
230 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
231 getConfig(): {
232 maskValue: number;
233 };
234 computeMask(inputs: Tensor | Tensor[], mask?: Tensor | Tensor[]): Tensor;
235 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
236}