Exponential linear unit (ELU)DescriptionIn the context of artificial neural networks, the Exponential linear unit (ELU) is an activation function defined as:![]()
from matplotlib import pyplot as plt
import numpy as np
def tanh_forward(x):
return (x >= 0.0) * x + (x < 0.0) * (np.exp(x) - 1.0)
x = np.arange(-7,7)
y = tanh_forward(x)
plt.style.use('fivethirtyeight')
fig, ax = plt.subplots()
ax.plot(x, y)
ax.set_title("Plot of the ELU")
plt.show()
tf.nn.elu(
features,
name=None
)
Pytorch form of Exponential linear unit (ELU):
class torch.nn.ELU(alpha=1.0, inplace=False) Forward propagation EXAMPLE
/* ANSI C89, C99, C11 compliance */
/* The following example shows the usage of Exponential linear unit (ELU) function forward propagation. */
#include <stdio.h>
#include <math.h>
float elu_forward(float x, float alpha){
float r_elu = (x > 0.0f) * x + (x <= 0.0f) * alpha * ((float)exp(x) - 1.0f);
return r_elu;
}
int main() {
float r_x, r_y;
r_x = 0.1f;
r_y = elu_forward(r_x, 1.0f);
printf("ELU forward propagation for value x: %f\n", r_y);
return 0;
}
Backward propagation EXAMPLE
/* ANSI C89, C99, C11 compliance */
/* The following example shows the usage of Exponential linear unit (ELU) function backward propagation. */
#include <stdio.h>
#include <math.h>
float elu_backward(float x, float alpha){
float r_elu = (x > 0.0) * x + (x <= 0.0) * alpha * ((float)exp(x) - 1.0f);
return (x > 0.0) + (x <= 0.0)*(r_elu + alpha);
}
int main() {
float r_x, r_y;
r_x = 0.1f;
r_y = elu_backward(r_x, 1.0f);
printf("ELU backward propagation for value x: %f\n", r_y);
return 0;
}
REFERENCES: 0. Fast and Accurate Deep Network Learning by Exponential Linear Units (ELUs) |