UNPKG

4.94 kBTypeScriptView Raw
1/**
2 * @license
3 * Copyright 2018 Google LLC
4 *
5 * Use of this source code is governed by an MIT-style
6 * license that can be found in the LICENSE file or at
7 * https://opensource.org/licenses/MIT.
8 * =============================================================================
9 */
10/// <amd-module name="@tensorflow/tfjs-layers/dist/layers/advanced_activations" />
11/**
12 * Advanced activation layers.
13 */
14import { serialization, Tensor } from '@tensorflow/tfjs-core';
15import { Constraint } from '../constraints';
16import { Layer, LayerArgs } from '../engine/topology';
17import { Initializer, InitializerIdentifier } from '../initializers';
18import { Shape } from '../keras_format/common';
19import { Regularizer } from '../regularizers';
20import { Kwargs } from '../types';
21export declare interface ReLULayerArgs extends LayerArgs {
22 /**
23 * Float, the maximum output value.
24 */
25 maxValue?: number;
26}
27export declare class ReLU extends Layer {
28 /** @nocollapse */
29 static className: string;
30 maxValue: number;
31 constructor(args?: ReLULayerArgs);
32 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
33 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
34 getConfig(): serialization.ConfigDict;
35}
36export declare interface LeakyReLULayerArgs extends LayerArgs {
37 /**
38 * Float `>= 0`. Negative slope coefficient. Defaults to `0.3`.
39 */
40 alpha?: number;
41}
42export declare class LeakyReLU extends Layer {
43 /** @nocollapse */
44 static className: string;
45 readonly alpha: number;
46 readonly DEFAULT_ALPHA = 0.3;
47 constructor(args?: LeakyReLULayerArgs);
48 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
49 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
50 getConfig(): serialization.ConfigDict;
51}
52export declare interface PReLULayerArgs extends LayerArgs {
53 /**
54 * Initializer for the learnable alpha.
55 */
56 alphaInitializer?: Initializer | InitializerIdentifier;
57 /**
58 * Regularizer for the learnable alpha.
59 */
60 alphaRegularizer?: Regularizer;
61 /**
62 * Constraint for the learnable alpha.
63 */
64 alphaConstraint?: Constraint;
65 /**
66 * The axes along which to share learnable parameters for the activation
67 * function. For example, if the incoming feature maps are from a 2D
68 * convolution with output shape `[numExamples, height, width, channels]`,
69 * and you wish to share parameters across space (height and width) so that
70 * each filter channels has only one set of parameters, set
71 * `shared_axes: [1, 2]`.
72 */
73 sharedAxes?: number | number[];
74}
75export declare class PReLU extends Layer {
76 /** @nocollapse */
77 static className: string;
78 private readonly alphaInitializer;
79 private readonly alphaRegularizer;
80 private readonly alphaConstraint;
81 private readonly sharedAxes;
82 private alpha;
83 readonly DEFAULT_ALPHA_INITIALIZER: InitializerIdentifier;
84 constructor(args?: PReLULayerArgs);
85 build(inputShape: Shape | Shape[]): void;
86 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
87 getConfig(): serialization.ConfigDict;
88}
89export declare interface ELULayerArgs extends LayerArgs {
90 /**
91 * Float `>= 0`. Negative slope coefficient. Defaults to `1.0`.
92 */
93 alpha?: number;
94}
95export declare class ELU extends Layer {
96 /** @nocollapse */
97 static className: string;
98 readonly alpha: number;
99 readonly DEFAULT_ALPHA = 1;
100 constructor(args?: ELULayerArgs);
101 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
102 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
103 getConfig(): serialization.ConfigDict;
104}
105export declare interface ThresholdedReLULayerArgs extends LayerArgs {
106 /**
107 * Float >= 0. Threshold location of activation.
108 */
109 theta?: number;
110}
111export declare class ThresholdedReLU extends Layer {
112 /** @nocollapse */
113 static className: string;
114 readonly theta: number;
115 readonly DEFAULT_THETA = 1;
116 constructor(args?: ThresholdedReLULayerArgs);
117 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
118 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
119 getConfig(): serialization.ConfigDict;
120}
121export declare interface SoftmaxLayerArgs extends LayerArgs {
122 /**
123 * Integer, axis along which the softmax normalization is applied.
124 * Defaults to `-1` (i.e., the last axis).
125 */
126 axis?: number;
127}
128export declare class Softmax extends Layer {
129 /** @nocollapse */
130 static className: string;
131 readonly axis: number;
132 readonly softmax: (t: Tensor, a?: number) => Tensor;
133 readonly DEFAULT_AXIS = 1;
134 constructor(args?: SoftmaxLayerArgs);
135 call(inputs: Tensor | Tensor[], kwargs: Kwargs): Tensor | Tensor[];
136 computeOutputShape(inputShape: Shape | Shape[]): Shape | Shape[];
137 getConfig(): serialization.ConfigDict;
138}