DaNNet
dnn_layer_act.h
Go to the documentation of this file.
1 // Copyright 2019 Claes Rolen (www.rolensystems.com)
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #pragma once
16 namespace dnn
17 {
21 
28 class layer_act: public layer
29 {
30 protected:
31 public:
32  layer_act(void):layer() {};
33  virtual void prop(void) =0;
34  virtual void prop_mb(void) =0;
35  virtual void backprop(void) =0;
36 
42  virtual void init(void)
43  {
44  // Get data from left
45  layer::init();
46 
47  // Set right data to same as previous layer
51  N_right = N_left;
52  }
53 
59  virtual void disp(void)
60  {
61  layer::disp();
62  std::cout << "Nr of outputs: " << get_nrof_outputs() << " ["<< N_rows_right << ","<<N_cols_right<<","<< N_channels_right<<"]"<< std::endl;
63  }
64 
65 }; // End class layer_act
66 
67 
74 class act_sigmoid: public layer_act
75 {
76 private:
77 public:
78 
83  {
84  type = "Sigmoid";
85  id = type;
86  }
87 
93  void prop(void)
94  {
95  arma::Mat<DNN_Dtype> X1 = left->get_Y1();
96  Y1 = sigmoid(X1);
97  }
98 
104  void prop_mb(void)
105  {
106  Y = sigmoid(*(left->get_Y_ptr()));
107  }
108 
114  void backprop(void)
115  {
116  Dleft = Y%(1-Y)%(right->get_Dleft());
117  }
118 }; // End class act_sigmoid
119 
120 
127 class act_softplus: public layer_act
128 {
129 private:
130 public:
135  {
136  type = "Softplus";
137  id = type;
138  }
139 
145  void prop(void)
146  {
147  const arma::Mat<DNN_Dtype> I(N_right,1,arma::fill::ones);
148  *(this->get_Y1_ptr()) = arma::trunc_log(I+arma::trunc_exp(*(left->get_Y1_ptr())));
149  }
150 
156  void prop_mb(void)
157  {
158  *(this->get_Y_ptr()) = arma::trunc_log(1+arma::trunc_exp(*(left->get_Y_ptr())));
159  }
160 
166  void backprop(void)
167  {
168  *(this->get_Dleft_ptr()) = sigmoid(*(this->get_Y_ptr()))%(*(right->get_Dleft_ptr()));
169  }
170 }; // End class act_softplus
171 
172 
173 
180 class act_ReLU: public layer_act
181 {
182 private:
183 public:
188  {
189  type = "ReLU";
190  id = type;
191  }
192 
198  void prop(void)
199  {
200  DNN_Dtype* x_ptr = left->get_Y1_memptr();
201  DNN_Dtype* y_ptr = this->get_Y1_memptr();
202  for(arma::uword n=0; n<N_left; n++)
203  {
204  *y_ptr++ = (*x_ptr <0) ? (DNN_Dtype)0.0 : *x_ptr;
205  x_ptr++;
206  }
207  }
208 
214  void prop_mb(void)
215  {
216  DNN_Dtype* x_ptr = left->get_Y_memptr();
217  DNN_Dtype* y_ptr = this->get_Y_memptr();
218  const arma::uword N_ = N_left*N_batch;
219  for(arma::uword n=0; n<N_; n++)
220  {
221  *y_ptr++ = (*x_ptr <0) ? (DNN_Dtype)0.0 : *x_ptr;
222  x_ptr++;
223  }
224  }
225 
231  void backprop(void)
232  {
233  DNN_Dtype* x_ptr = left->get_Y_memptr();
234  DNN_Dtype* y_ptr = this->get_Dleft_memptr();
235  DNN_Dtype* d_ptr = right->get_Dleft_memptr();
236  const arma::uword N_ = N_left*N_batch;
237  for(arma::uword n=0; n<N_; n++)
238  {
239  *y_ptr++ = (*x_ptr<0 ) ? (DNN_Dtype)0.0: *d_ptr;
240  d_ptr++;
241  x_ptr++;
242  }
243  }
244 }; // End class act_ReLU
245 
246 
253 class act_LReLU: public layer_act
254 {
255 private:
256  DNN_Dtype alpha=(DNN_Dtype)0.01;
257 public:
262  {
263  type = "Leaky ReLU";
264  id = type;
265  }
266 
272  void prop(void)
273  {
274  arma::Mat<DNN_Dtype> X1 = left->get_Y1();
275  for(arma::uword n=0; n<X1.n_elem; n++)
276  {
277  Y1(n) = X1(n)<0 ? X1(n)*alpha: X1(n);
278  }
279  }
280 
286  void prop_mb(void)
287  {
288  DNN_Dtype* x_ptr = left->get_Y_memptr();
289  DNN_Dtype* y_ptr = this->get_Y_memptr();
290  const arma::uword N_ = N_left*N_batch;
291  for(arma::uword n=0; n<N_; n++)
292  {
293  *y_ptr++ = (*x_ptr <0) ? *x_ptr*alpha: *x_ptr;
294  x_ptr++;
295  }
296  }
297 
303  void backprop(void)
304  {
305  DNN_Dtype* x_ptr = this->get_Y_memptr();
306  DNN_Dtype* y_ptr = this->get_Dleft_memptr();
307  DNN_Dtype* d_ptr = right->get_Dleft_memptr();
308  const arma::uword N_ = N_left*N_batch;
309  for(arma::uword n=0; n<N_; n++)
310  {
311  *y_ptr++ = (*x_ptr<0 ) ? *d_ptr*alpha: *d_ptr;
312  d_ptr++;
313  x_ptr++;
314  }
315  }
316 
321  {
322  alpha = a;
323  }
324 
325 }; // End class act_LReLU
326 
327 
334 class act_tanh: public layer_act
335 {
336 private:
337 public:
342  {
343  type = "tanh";
344  id = type;
345  }
346 
352  void prop(void)
353  {
354  Y1 = arma::tanh(*(left->get_Y1_ptr()));
355  }
356 
362  void prop_mb(void)
363  {
364  Y = arma::tanh(*(left->get_Y_ptr()));
365  }
366 
372  void backprop(void)
373  {
374  Dleft = (1-arma::square(Y))%(right->get_Dleft());
375  }
376 }; // End class act_tanh
377 
378 
385 class act_softmax: public layer_act
386 {
387 private:
388 public:
393  {
394  type = "Softmax";
395  id = type;
396  }
397 
403  void prop(void)
404  {
405  Y1 = softmax(*(left->get_Y1_ptr()));
406  }
407 
413  void prop_mb(void)
414  {
415  Y = softmax(*(left->get_Y_ptr()));
416  }
417 
423  void backprop(void)
424  {
425  arma::Mat<DNN_Dtype> dO=*(right->get_Dleft_ptr());
426  for(arma::uword k=0; k<N_batch; k++ )
427  {
428  arma::Mat<DNN_Dtype> jac = -Y.col(k)*(Y.col(k).t()); // Jacobian
429  jac.diag() += Y.col(k);
430  Dleft.col(k) = jac*dO.col(k);
431  }
432  }
433 }; // End class act_softmax
435 } // End namespace dnn
void prop(void)
Forward propagation through layer.
void prop_mb(void)
Forward propagation through layer - mini batch.
std::string type
Layer type string.
void prop(void)
Forward propagation through layer.
Definition: dnn_layer_act.h:93
arma::Mat< DNN_Dtype > Y
Output buffer mini batch [N_right,N_batch].
void prop_mb(void)
Forward propagation through layer - mini batch.
void backprop(void)
Back propagation through layer - mini batch.
virtual void prop_mb(void)=0
Forward propagation - mini batch.
arma::uword N_rows_left
Input rows.
void prop(void)
Forward propagation through layer.
void prop_mb(void)
Forward propagation through layer - mini batch.
void prop_mb(void)
Forward propagation through layer - mini batch.
layer * right
Pointer to next layer.
void backprop(void)
Back propagation through layer - mini batch.
void prop(void)
Forward propagation through layer.
virtual void disp(void)
Display info about activation layer.
Definition: dnn_layer_act.h:59
Leaky ReLU activation class.
virtual arma::Mat< DNN_Dtype > get_Dleft()
Get error buffer - mini batch.
virtual arma::Mat< DNN_Dtype > * get_Y1_ptr(void)
Get output buffer pointer.
void backprop(void)
Back propagation through layer - mini batch.
virtual void backprop(void)=0
Back propagation.
arma::uword N_channels_right
Output channels, number of filters.
virtual void prop(void)=0
Forward propagation.
Softmax activation class.
arma::Mat< DNN_Dtype > Dleft
Error buffer [N_left,N_batch].
act_LReLU(void)
LReLU layer constructor.
Layer base class.
arma::uword N_cols_left
Input cols.
arma::Mat< DNN_Dtype > Y1
Output buffer [N_right,1].
Softplus activation class.
act_tanh(void)
tanh layer constructor
virtual arma::uword get_nrof_outputs(void)
Get total number of layer outputs.
void backprop(void)
Back propagation through layer - mini batch.
virtual arma::Mat< DNN_Dtype > get_Y1(void)
Get output buffer.
virtual void init(void)
Initialize layer.
virtual arma::Mat< DNN_Dtype > * get_Dleft_ptr(void)
Get error buffer pointer - mini batch.
void prop_mb(void)
Forward propagation through layer - mini batch.
DNN_Dtype sigmoid(const DNN_Dtype x)
Sigmoid function - scalar.
void prop_mb(void)
Forward propagation through layer - mini batch.
virtual void disp(void)
Display info about layer.
virtual void init(void)
Initiation method.
Definition: dnn_layer_act.h:42
float DNN_Dtype
Data type used in the network (float or double)
Definition: dnn.h:28
void prop(void)
Forward propagation through layer.
arma::uword N_left
Total size left.
arma::uword N_rows_right
Output rows.
Activation layer base class.
Definition: dnn_layer_act.h:28
void backprop(void)
Back propagation through layer - mini batch.
void prop(void)
Forward propagation through layer.
act_softmax(void)
Softmax layer constructor.
Sigmoid activation class.
Definition: dnn_layer_act.h:74
void backprop(void)
Back propagation through layer - mini batch.
arma::uword N_batch
Mini batch size.
act_softplus(void)
Softplus layer constructor.
ReLU activation class.
Definition: dnn.h:22
arma::uword N_channels_left
Input channels, number of filters.
act_sigmoid(void)
Sigmoid layer constructor.
Definition: dnn_layer_act.h:82
void set_leak(DNN_Dtype a)
Set the LReLU leak parameter.
tanh activation class
virtual DNN_Dtype * get_Dleft_memptr(void)
Get error buffer memory pointer - mini batch.
arma::uword N_right
Total size right.
virtual DNN_Dtype * get_Y1_memptr(void)
Get output buffer memory pointer.
layer * left
Pointer to previous layer.
arma::uword N_cols_right
Output cols.
virtual DNN_Dtype * get_Y_memptr(void)
Get output buffer memory pointer - mini batch.
virtual arma::Mat< DNN_Dtype > * get_Y_ptr(void)
Get output buffer pointer - mini batch.
arma::Mat< DNN_Dtype > softmax(const arma::Mat< DNN_Dtype > &x)
Softmax function - matrix.
act_ReLU(void)
ReLU layer constructor.