activations.h
Go to the documentation of this file.
00001 #ifndef ACTIVATIONS_H
00002 #define ACTIVATIONS_H
00003 #include "cuda.h"
00004 #include "math.h"
00005 
00006 typedef enum{
00007     LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY, STAIR, HARDTAN, LHTAN
00008 }ACTIVATION;
00009 
00010 ACTIVATION get_activation(char *s);
00011 
00012 char *get_activation_string(ACTIVATION a);
00013 float activate(float x, ACTIVATION a);
00014 float gradient(float x, ACTIVATION a);
00015 void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta);
00016 void activate_array(float *x, const int n, const ACTIVATION a);
00017 #ifdef GPU
00018 void activate_array_ongpu(float *x, int n, ACTIVATION a);
00019 void gradient_array_ongpu(float *x, int n, ACTIVATION a, float *delta);
00020 #endif
00021 
00022 static inline float stair_activate(float x)
00023 {
00024     int n = floor(x);
00025     if (n%2 == 0) return floor(x/2.);
00026     else return (x - n) + floor(x/2.);
00027 }
00028 static inline float hardtan_activate(float x)
00029 {
00030     if (x < -1) return -1;
00031     if (x > 1) return 1;
00032     return x;
00033 }
00034 static inline float linear_activate(float x){return x;}
00035 static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
00036 static inline float loggy_activate(float x){return 2./(1. + exp(-x)) - 1;}
00037 static inline float relu_activate(float x){return x*(x>0);}
00038 static inline float elu_activate(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);}
00039 static inline float relie_activate(float x){return (x>0) ? x : .01*x;}
00040 static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
00041 static inline float leaky_activate(float x){return (x>0) ? x : .1*x;}
00042 static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
00043 static inline float plse_activate(float x)
00044 {
00045     if(x < -4) return .01 * (x + 4);
00046     if(x > 4)  return .01 * (x - 4) + 1;
00047     return .125*x + .5;
00048 }
00049 
00050 static inline float lhtan_activate(float x)
00051 {
00052     if(x < 0) return .001*x;
00053     if(x > 1) return .001*(x-1) + 1;
00054     return x;
00055 }
00056 static inline float lhtan_gradient(float x)
00057 {
00058     if(x > 0 && x < 1) return 1;
00059     return .001;
00060 }
00061 
00062 static inline float hardtan_gradient(float x)
00063 {
00064     if (x > -1 && x < 1) return 1;
00065     return 0;
00066 }
00067 static inline float linear_gradient(float x){return 1;}
00068 static inline float logistic_gradient(float x){return (1-x)*x;}
00069 static inline float loggy_gradient(float x)
00070 {
00071     float y = (x+1.)/2.;
00072     return 2*(1-y)*y;
00073 }
00074 static inline float stair_gradient(float x)
00075 {
00076     if (floor(x) == x) return 0;
00077     return 1;
00078 }
00079 static inline float relu_gradient(float x){return (x>0);}
00080 static inline float elu_gradient(float x){return (x >= 0) + (x < 0)*(x + 1);}
00081 static inline float relie_gradient(float x){return (x>0) ? 1 : .01;}
00082 static inline float ramp_gradient(float x){return (x>0)+.1;}
00083 static inline float leaky_gradient(float x){return (x>0) ? 1 : .1;}
00084 static inline float tanh_gradient(float x){return 1-x*x;}
00085 static inline float plse_gradient(float x){return (x < 0 || x > 1) ? .01 : .125;}
00086 
00087 #endif
00088 


rail_object_detector
Author(s):
autogenerated on Sat Jun 8 2019 20:26:29