UNPKG

5.34 kBJavaScriptView Raw
1/**
2 * Created by joonkukang on 2014. 1. 13..
3 */
4var math = require('./utils').math;
5HiddenLayer = require('./HiddenLayer');
6RBM = require('./RBM');
7MLP = require('./MLP');
8
9DBN = module.exports = function (settings) {
10 var self = this;
11 self.x = settings['input'];
12 self.y = settings['label'];
13 self.sigmoidLayers = [];
14 self.rbmLayers = [];
15 self.nLayers = settings['hidden_layer_sizes'].length;
16 self.hiddenLayerSizes = settings['hidden_layer_sizes'];
17 self.nIns = settings['n_ins'];
18 self.nOuts = settings['n_outs'];
19 self.settings = {
20 'log level' : 1 // 0 : nothing, 1 : info, 2: warn
21 };
22
23 // Constructing Deep Neural Network
24 var i;
25 for(i=0 ; i<self.nLayers ; i++) {
26 var inputSize, layerInput;
27 if(i == 0)
28 inputSize = settings['n_ins'];
29 else
30 inputSize = settings['hidden_layer_sizes'][i-1];
31
32 if(i == 0)
33 layerInput = self.x;
34 else
35 layerInput = self.sigmoidLayers[self.sigmoidLayers.length-1].sampleHgivenV();
36
37 var sigmoidLayer = new HiddenLayer({
38 'input' : layerInput,
39 'n_in' : inputSize,
40 'n_out' : settings['hidden_layer_sizes'][i],
41 'activation' : math.sigmoid
42 });
43 self.sigmoidLayers.push(sigmoidLayer);
44
45 var rbmLayer = new RBM({
46 'input' : layerInput,
47 'n_visible' : inputSize,
48 'n_hidden' : settings['hidden_layer_sizes'][i]
49 });
50 self.rbmLayers.push(rbmLayer);
51 }
52 self.outputLayer = new HiddenLayer({
53 'input' : self.sigmoidLayers[self.sigmoidLayers.length-1].sampleHgivenV(),
54 'n_in' : settings['hidden_layer_sizes'][settings['hidden_layer_sizes'].length - 1],
55 'n_out' : settings['n_outs'],
56 'activation' : math.sigmoid
57 });
58};
59
60DBN.prototype.pretrain = function (settings) {
61 var self = this;
62 var lr = 0.6, k = 1, epochs = 2000;
63 if(typeof settings['lr'] !== 'undefined')
64 lr = settings['lr'];
65 if(typeof settings['k'] !== 'undefined')
66 k = settings['k'];
67 if(typeof settings['epochs'] !== 'undefined')
68 epochs = settings['epochs'];
69
70 var i,j;
71 for(i=0; i<self.nLayers ; i++) {
72 var layerInput ,rbm;
73 if (i==0)
74 layerInput = self.x;
75 else
76 layerInput = self.sigmoidLayers[i-1].sampleHgivenV(layerInput);
77 rbm = self.rbmLayers[i];
78 rbm.set('log level',0);
79 rbm.train({
80 'lr' : lr,
81 'k' : k,
82 'input' : layerInput,
83 'epochs' : epochs
84 });
85
86 if(self.settings['log level'] > 0) {
87 console.log("DBN RBM",i,"th Layer Final Cross Entropy: ",rbm.getReconstructionCrossEntropy());
88 console.log("DBN RBM",i,"th Layer Pre-Training Completed.");
89 }
90
91 // Synchronization between RBM and sigmoid Layer
92 self.sigmoidLayers[i].W = rbm.W;
93 self.sigmoidLayers[i].b = rbm.hbias;
94 }
95 if(self.settings['log level'] > 0)
96 console.log("DBN Pre-Training Completed.")
97};
98
99DBN.prototype.finetune = function (settings) {
100 var self = this;
101 var lr = 0.2, epochs = 1000;
102 if(typeof settings['lr'] !== 'undefined')
103 lr = settings['lr'];
104 if(typeof settings['epochs'] !== 'undefined')
105 epochs = settings['epochs'];
106
107 //Fine-Tuning Using MLP (Back Propagation)
108 var i;
109 var pretrainedWArray = [], pretrainedBArray = []; // HiddenLayer W,b values already pretrained by RBM.
110 for(i=0; i<self.nLayers ; i++) {
111 pretrainedWArray.push(self.sigmoidLayers[i].W);
112 pretrainedBArray.push(self.sigmoidLayers[i].b);
113 }
114 // W,b of Final Output Layer are not involved in pretrainedWArray, pretrainedBArray so they will be treated as undefined at MLP Constructor.
115 var mlp = new MLP({
116 'input' : self.x,
117 'label' : self.y,
118 'n_ins' : self.nIns,
119 'n_outs' : self.nOuts,
120 'hidden_layer_sizes' : self.hiddenLayerSizes,
121 'w_array' : pretrainedWArray,
122 'b_array' : pretrainedBArray
123 });
124 mlp.set('log level',self.settings['log level']);
125 mlp.train({
126 'lr' : lr,
127 'epochs' : epochs
128 });
129 for(i=0; i<self.nLayers ; i++) {
130 self.sigmoidLayers[i].W = mlp.sigmoidLayers[i].W;
131 self.sigmoidLayers[i].b = mlp.sigmoidLayers[i].b;
132 }
133 self.outputLayer.W = mlp.sigmoidLayers[self.nLayers].W;
134 self.outputLayer.b = mlp.sigmoidLayers[self.nLayers].b;
135
136};
137
138DBN.prototype.getReconstructionCrossEntropy = function() {
139 var self = this;
140 var reconstructedOutput = self.predict(self.x);
141 var a = math.activateTwoMat(self.y,reconstructedOutput,function(x,y){
142 return x*Math.log(y);
143 });
144
145 var b = math.activateTwoMat(self.y,reconstructedOutput,function(x,y){
146 return (1-x)*Math.log(1-y);
147 });
148
149 var crossEntropy = -math.meanVec(math.sumMatAxis(math.addMat(a,b),1));
150 return crossEntropy
151};
152
153DBN.prototype.predict = function (x) {
154 var self = this;
155 var layerInput = x, i;
156 for(i=0; i<self.nLayers ; i++) {
157 layerInput = self.sigmoidLayers[i].output(layerInput);
158 }
159 var output = self.outputLayer.output(layerInput);
160 return output;
161};
162
163DBN.prototype.set = function(property,value) {
164 var self = this;
165 self.settings[property] = value;
166}
\No newline at end of file