darknet  v3
activations.h
Go to the documentation of this file.
1 #ifndef ACTIVATIONS_H
2 #define ACTIVATIONS_H
3 #include "darknet.h"
4 #include "cuda.h"
5 #include "math.h"
6 
8 
10 float activate(float x, ACTIVATION a);
11 float gradient(float x, ACTIVATION a);
12 void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta);
13 void activate_array(float *x, const int n, const ACTIVATION a);
14 #ifdef GPU
15 void activate_array_gpu(float *x, int n, ACTIVATION a);
16 void gradient_array_gpu(float *x, int n, ACTIVATION a, float *delta);
17 #endif
18 
19 static inline float stair_activate(float x)
20 {
21  int n = floor(x);
22  if (n%2 == 0) return floor(x/2.);
23  else return (x - n) + floor(x/2.);
24 }
25 static inline float hardtan_activate(float x)
26 {
27  if (x < -1) return -1;
28  if (x > 1) return 1;
29  return x;
30 }
31 static inline float linear_activate(float x){return x;}
32 static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
33 static inline float loggy_activate(float x){return 2./(1. + exp(-x)) - 1;}
34 static inline float relu_activate(float x){return x*(x>0);}
35 static inline float elu_activate(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);}
36 static inline float selu_activate(float x){return (x >= 0)*1.0507*x + (x < 0)*1.0507*1.6732*(exp(x)-1);}
37 static inline float relie_activate(float x){return (x>0) ? x : .01*x;}
38 static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
39 static inline float leaky_activate(float x){return (x>0) ? x : .1*x;}
40 static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
41 static inline float plse_activate(float x)
42 {
43  if(x < -4) return .01 * (x + 4);
44  if(x > 4) return .01 * (x - 4) + 1;
45  return .125*x + .5;
46 }
47 
48 static inline float lhtan_activate(float x)
49 {
50  if(x < 0) return .001*x;
51  if(x > 1) return .001*(x-1) + 1;
52  return x;
53 }
54 static inline float lhtan_gradient(float x)
55 {
56  if(x > 0 && x < 1) return 1;
57  return .001;
58 }
59 
60 static inline float hardtan_gradient(float x)
61 {
62  if (x > -1 && x < 1) return 1;
63  return 0;
64 }
65 static inline float linear_gradient(float x){return 1;}
66 static inline float logistic_gradient(float x){return (1-x)*x;}
67 static inline float loggy_gradient(float x)
68 {
69  float y = (x+1.)/2.;
70  return 2*(1-y)*y;
71 }
72 static inline float stair_gradient(float x)
73 {
74  if (floor(x) == x) return 0;
75  return 1;
76 }
77 static inline float relu_gradient(float x){return (x>0);}
78 static inline float elu_gradient(float x){return (x >= 0) + (x < 0)*(x + 1);}
79 static inline float selu_gradient(float x){return (x >= 0)*1.0507 + (x < 0)*(x + 1.0507*1.6732);}
80 static inline float relie_gradient(float x){return (x>0) ? 1 : .01;}
81 static inline float ramp_gradient(float x){return (x>0)+.1;}
82 static inline float leaky_gradient(float x){return (x>0) ? 1 : .1;}
83 static inline float tanh_gradient(float x){return 1-x*x;}
84 static inline float plse_gradient(float x){return (x < 0 || x > 1) ? .01 : .125;}
85 
86 #endif
87 
ACTIVATION
Definition: darknet.h:56
void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta)
Definition: activations.c:143
void activate_array(float *x, const int n, const ACTIVATION a)
Definition: activations.c:100
float activate(float x, ACTIVATION a)
Definition: activations.c:65
ACTIVATION get_activation(char *s)
Definition: activations.c:45
float gradient(float x, ACTIVATION a)
Definition: activations.c:108
char * get_activation_string(ACTIVATION a)
Definition: activations.c:8
void activate_array_gpu(float *x, int n, ACTIVATION a)
void gradient_array_gpu(float *x, int n, ACTIVATION a, float *delta)