From f047cfff99e00e28c02eb59b6d32386c122f9af6 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 8 Mar 2015 11:31:12 -0700
Subject: [PATCH] renamed sigmoid to logistic

---
 src/activation_kernels.cu | 12 ++++++------
 src/activations.c         | 14 +++++++-------
 src/activations.h         |  6 +++---
 src/detection_layer.c     |  4 ++--
 src/parser.c              |  6 +++---
 5 files changed, 21 insertions(+), 21 deletions(-)

diff --git a/src/activation_kernels.cu b/src/activation_kernels.cu
index a15d64b..5ee1524 100644
--- a/src/activation_kernels.cu
+++ b/src/activation_kernels.cu
@@ -4,13 +4,13 @@ extern "C" {
 }
 
 __device__ float linear_activate_kernel(float x){return x;}
-__device__ float sigmoid_activate_kernel(float x){return 1./(1. + exp(-x));}
+__device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
 __device__ float relu_activate_kernel(float x){return x*(x>0);}
 __device__ float ramp_activate_kernel(float x){return x*(x>0)+.1*x;}
 __device__ float tanh_activate_kernel(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
  
 __device__ float linear_gradient_kernel(float x){return 1;}
-__device__ float sigmoid_gradient_kernel(float x){return (1-x)*x;}
+__device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
 __device__ float relu_gradient_kernel(float x){return (x>0);}
 __device__ float ramp_gradient_kernel(float x){return (x>0)+.1;}
 __device__ float tanh_gradient_kernel(float x){return 1-x*x;}
@@ -20,8 +20,8 @@ __device__ float activate_kernel(float x, ACTIVATION a)
     switch(a){
         case LINEAR:
             return linear_activate_kernel(x);
-        case SIGMOID:
-            return sigmoid_activate_kernel(x);
+        case LOGISTIC:
+            return logistic_activate_kernel(x);
         case RELU:
             return relu_activate_kernel(x);
         case RAMP:
@@ -37,8 +37,8 @@ __device__ float gradient_kernel(float x, ACTIVATION a)
     switch(a){
         case LINEAR:
             return linear_gradient_kernel(x);
-        case SIGMOID:
-            return sigmoid_gradient_kernel(x);
+        case LOGISTIC:
+            return logistic_gradient_kernel(x);
         case RELU:
             return relu_gradient_kernel(x);
         case RAMP:
diff --git a/src/activations.c b/src/activations.c
index 4689046..7da5ce2 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -8,8 +8,8 @@
 char *get_activation_string(ACTIVATION a)
 {
     switch(a){
-        case SIGMOID:
-            return "sigmoid";
+        case LOGISTIC:
+            return "logistic";
         case RELU:
             return "relu";
         case RAMP:
@@ -26,7 +26,7 @@ char *get_activation_string(ACTIVATION a)
 
 ACTIVATION get_activation(char *s)
 {
-    if (strcmp(s, "sigmoid")==0) return SIGMOID;
+    if (strcmp(s, "logistic")==0) return LOGISTIC;
     if (strcmp(s, "relu")==0) return RELU;
     if (strcmp(s, "linear")==0) return LINEAR;
     if (strcmp(s, "ramp")==0) return RAMP;
@@ -40,8 +40,8 @@ float activate(float x, ACTIVATION a)
     switch(a){
         case LINEAR:
             return linear_activate(x);
-        case SIGMOID:
-            return sigmoid_activate(x);
+        case LOGISTIC:
+            return logistic_activate(x);
         case RELU:
             return relu_activate(x);
         case RAMP:
@@ -65,8 +65,8 @@ float gradient(float x, ACTIVATION a)
     switch(a){
         case LINEAR:
             return linear_gradient(x);
-        case SIGMOID:
-            return sigmoid_gradient(x);
+        case LOGISTIC:
+            return logistic_gradient(x);
         case RELU:
             return relu_gradient(x);
         case RAMP:
diff --git a/src/activations.h b/src/activations.h
index 337e5f1..0cb81af 100644
--- a/src/activations.h
+++ b/src/activations.h
@@ -3,7 +3,7 @@
 #define ACTIVATIONS_H
 
 typedef enum{
-    SIGMOID, RELU, LINEAR, RAMP, TANH
+    LOGISTIC, RELU, LINEAR, RAMP, TANH
 }ACTIVATION;
 
 ACTIVATION get_activation(char *s);
@@ -19,13 +19,13 @@ void gradient_array_ongpu(float *x, int n, ACTIVATION a, float *delta);
 #endif
 
 static inline float linear_activate(float x){return x;}
-static inline float sigmoid_activate(float x){return 1./(1. + exp(-x));}
+static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
 static inline float relu_activate(float x){return x*(x>0);}
 static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
 static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
 
 static inline float linear_gradient(float x){return 1;}
-static inline float sigmoid_gradient(float x){return (1-x)*x;}
+static inline float logistic_gradient(float x){return (1-x)*x;}
 static inline float relu_gradient(float x){return (x>0);}
 static inline float ramp_gradient(float x){return (x>0)+.1;}
 static inline float tanh_gradient(float x){return 1-x*x;}
diff --git a/src/detection_layer.c b/src/detection_layer.c
index d3cc1bd..68d151a 100644
--- a/src/detection_layer.c
+++ b/src/detection_layer.c
@@ -53,7 +53,7 @@ void forward_detection_layer(const detection_layer layer, float *in, float *trut
             layer.output[out_i++] = scale*in[in_i++];
         }
         softmax_array(layer.output + out_i - layer.classes, layer.classes, layer.output + out_i - layer.classes);
-        activate_array(in+in_i, layer.coords, SIGMOID);
+        activate_array(in+in_i, layer.coords, LOGISTIC);
         for(j = 0; j < layer.coords; ++j){
             layer.output[out_i++] = mask*in[in_i++];
         }
@@ -75,7 +75,7 @@ void backward_detection_layer(const detection_layer layer, float *in, float *del
             delta[in_i++] = scale*layer.delta[out_i++];
         }
         
-        gradient_array(layer.output + out_i, layer.coords, SIGMOID, layer.delta + out_i);
+        gradient_array(layer.output + out_i, layer.coords, LOGISTIC, layer.delta + out_i);
         for(j = 0; j < layer.coords; ++j){
             delta[in_i++] = layer.delta[out_i++];
         }
diff --git a/src/parser.c b/src/parser.c
index 0ee73a1..7b1057e 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -76,7 +76,7 @@ deconvolutional_layer *parse_deconvolutional(list *options, network *net, int co
     int n = option_find_int(options, "filters",1);
     int size = option_find_int(options, "size",1);
     int stride = option_find_int(options, "stride",1);
-    char *activation_s = option_find_str(options, "activation", "sigmoid");
+    char *activation_s = option_find_str(options, "activation", "logistic");
     ACTIVATION activation = get_activation(activation_s);
     if(count == 0){
         learning_rate = option_find_float(options, "learning_rate", .001);
@@ -120,7 +120,7 @@ convolutional_layer *parse_convolutional(list *options, network *net, int count)
     int size = option_find_int(options, "size",1);
     int stride = option_find_int(options, "stride",1);
     int pad = option_find_int(options, "pad",0);
-    char *activation_s = option_find_str(options, "activation", "sigmoid");
+    char *activation_s = option_find_str(options, "activation", "logistic");
     ACTIVATION activation = get_activation(activation_s);
     if(count == 0){
         learning_rate = option_find_float(options, "learning_rate", .001);
@@ -161,7 +161,7 @@ connected_layer *parse_connected(list *options, network *net, int count)
     int input;
     float learning_rate, momentum, decay;
     int output = option_find_int(options, "output",1);
-    char *activation_s = option_find_str(options, "activation", "sigmoid");
+    char *activation_s = option_find_str(options, "activation", "logistic");
     ACTIVATION activation = get_activation(activation_s);
     if(count == 0){
         input = option_find_int(options, "input",1);
-- 
GitLab