Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added exponential, elu and softplus activation function #18

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 15 additions & 6 deletions KerasWeightsProcessing/convert_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from keras import optimizers

INPUT = ['input']
ACTIVATIONS = ['relu', 'linear', 'leakyrelu', 'sigmoid']
ACTIVATIONS = ['elu', 'exponential', 'relu', 'linear', 'leakyrelu', 'softplus', 'sigmoid', 'tanh']
SUPPORTED_LAYERS = ['dense', 'dropout', 'batchnormalization'] + ACTIVATIONS + INPUT

def txt_to_h5(weights_file_name, output_file_name=''):
Expand Down Expand Up @@ -242,12 +242,21 @@ def h5_to_txt(weights_file_name, output_file_name=''):
)
)
# add information about the activation
layer_info.append(
info_str.format(
name = activation,
info = 0
if (activation == 'elu'):
layer_info.append(
info_str.format(
name = activation,
info = 1
)
)
)
else:
layer_info.append(
info_str.format(
name = activation,
info = 0
)
)

elif class_name == 'batchnormalization':
# get beta, gamma, moving_mean, moving_variance from dictionary
for key in sorted(model_weights[name][name].keys()):
Expand Down
59 changes: 59 additions & 0 deletions src/lib/mod_activation.F90
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,9 @@ module mod_activation
public :: tanhf, tanh_prime
public :: linear, linear_prime
public :: leaky_relu, leaky_relu_prime
public :: elu, elu_prime
public :: exponential, exponential_prime
public :: softplus, softplus_prime

interface
pure function activation_function(x, alpha)
Expand All @@ -28,6 +31,46 @@ end function activation_function

contains

pure function elu(x, alpha) result(res)
!! Exponential Linear Unit (ELU) activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
where (x > 0)
res = x
elsewhere
res = alpha * (exp(x) - 1)
end where
end function elu

pure function elu_prime(x, alpha) result(res)
! First derivative of the Exponential Linear Unit (ELU) activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
where (x > 0)
res = 1
elsewhere
res = alpha * exp(x)
end where
end function elu_prime

pure function exponential(x, alpha) result(res)
!! Exponential activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
res = exp(x)
end function exponential

pure function exponential_prime(x, alpha) result(res)
!! First derivative of the exponential activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
res = exp(x)
end function exponential_prime

pure function gaussian(x, alpha) result(res)
! Gaussian activation function.
real(rk), intent(in) :: x(:)
Expand Down Expand Up @@ -122,6 +165,22 @@ pure function sigmoid_prime(x, alpha) result(res)
res = sigmoid(x, tmp_alpha) * (1 - sigmoid(x, tmp_alpha))
end function sigmoid_prime

pure function softplus(x, alpha) result(res)
!! Softplus activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
res = log(exp(x) + 1)
end function softplus

pure function softplus_prime(x, alpha) result(res)
! First derivative of the Softplus activation function.
real(rk), intent(in) :: x(:)
real(rk), intent(in) :: alpha
real(rk) :: res(size(x))
res = 1 / (1 + exp(-x))
end function softplus_prime

pure function step(x, alpha) result(res)
! Step activation function.
real(rk), intent(in) :: x(:)
Expand Down
9 changes: 9 additions & 0 deletions src/lib/mod_dense_layer.F90
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ type(Dense) function constructor(this_size, next_size, activation, alpha) result

! assign activation function
select case(trim(activation))
case('elu')
layer % activation => elu
layer % activation_prime => elu_prime
case('exponential')
layer % activation => exponential
layer % activation_prime => exponential_prime
case('gaussian')
layer % activation => gaussian
layer % activation_prime => gaussian_prime
Expand All @@ -71,6 +77,9 @@ type(Dense) function constructor(this_size, next_size, activation, alpha) result
case('sigmoid')
layer % activation => sigmoid
layer % activation_prime => sigmoid_prime
case('softplus')
layer % activation => softplus
layer % activation_prime => softplus_prime
case('step')
layer % activation => step
layer % activation_prime => step_prime
Expand Down