Skip to content

Commit 84ca977

Browse files
committed
first commit
0 parents  commit 84ca977

12 files changed

+133
-0
lines changed

HyperbolicTangentFunction.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the Tanh Activation Function
5+
def tanh(x):
6+
return np.tanh(x)
7+
8+
# Generate input values
9+
x = np.linspace(-10, 10, 1000)
10+
y = tanh(x)
11+
12+
# Plot the Tanh Activation Function
13+
plt.figure(figsize=(8, 6))
14+
plt.plot(x, y, label='f(x) = tanh(x)', color='purple')
15+
plt.title('Tanh Activation Function')
16+
plt.xlabel('Input (x)')
17+
plt.ylabel('Output (f(x))')
18+
plt.axhline(0, color='grey', lw=0.5)
19+
plt.axvline(0, color='grey', lw=0.5)
20+
plt.grid(True, linestyle='--', alpha=0.5)
21+
plt.legend()
22+
plt.show()

Leaky ReLU.png

40.6 KB
Loading

Leaky ReLU.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the Leaky ReLU Activation Function
5+
def leaky_relu(x, alpha=0.01):
6+
return np.where(x > 0, x, alpha * x)
7+
8+
# Generate input values
9+
x = np.linspace(-10, 10, 1000)
10+
y = leaky_relu(x, alpha=0.01)
11+
12+
# Plot the Leaky ReLU Activation Function
13+
plt.figure(figsize=(8, 6))
14+
plt.plot(x, y, label='f(x) = Leaky ReLU (α=0.01)', color='red')
15+
plt.title('Leaky ReLU Activation Function')
16+
plt.xlabel('Input (x)')
17+
plt.ylabel('Output (f(x))')
18+
plt.axhline(0, color='grey', lw=0.5)
19+
plt.axvline(0, color='grey', lw=0.5)
20+
plt.grid(True, linestyle='--', alpha=0.5)
21+
plt.legend()
22+
plt.show()

Linear.png

44 KB
Loading

ReLU Function Chart.png

38.3 KB
Loading

RectifiedLinearUnit.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the ReLU Activation Function
5+
def relu(x):
6+
return np.maximum(0, x)
7+
8+
# Generate input values
9+
x = np.linspace(-10, 10, 1000)
10+
y = relu(x)
11+
12+
# Plot the ReLU Activation Function
13+
plt.figure(figsize=(8, 6))
14+
plt.plot(x, y, label='f(x) = max(0, x)', color='orange')
15+
plt.title('ReLU Activation Function')
16+
plt.xlabel('Input (x)')
17+
plt.ylabel('Output (f(x))')
18+
plt.axhline(0, color='grey', lw=0.5)
19+
plt.axvline(0, color='grey', lw=0.5)
20+
plt.grid(True, linestyle='--', alpha=0.5)
21+
plt.legend()
22+
plt.show()

Sigmoid Function Chart.png

41.8 KB
Loading

Sigmoid.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the Sigmoid Activation Function
5+
def sigmoid(x):
6+
return 1 / (1 + np.exp(-x))
7+
8+
# Generate input values
9+
x = np.linspace(-10, 10, 1000)
10+
y = sigmoid(x)
11+
12+
# Plot the Sigmoid Activation Function
13+
plt.figure(figsize=(8, 6))
14+
plt.plot(x, y, label='f(x) = 1 / (1 + e^(-x))', color='g')
15+
plt.title('Sigmoid Activation Function')
16+
plt.xlabel('Input (x)')
17+
plt.ylabel('Output (f(x))')
18+
plt.axhline(0, color='grey', lw=0.5)
19+
plt.axvline(0, color='grey', lw=0.5)
20+
plt.grid(True, linestyle='--', alpha=0.5)
21+
plt.legend()
22+
plt.show()

Softmax.png

36.1 KB
Loading

Softmax.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the Softmax Activation Function
5+
def softmax(x):
6+
exp_x = np.exp(x - np.max(x)) # For numerical stability
7+
return exp_x / np.sum(exp_x)
8+
9+
# Generate input values (example logits)
10+
x = np.linspace(-5, 5, 100)
11+
y = softmax(x)
12+
13+
# Plot the Softmax Activation Function
14+
plt.figure(figsize=(8, 6))
15+
plt.plot(x, y, label='Softmax Function', color='gray')
16+
plt.title('Softmax Activation Function')
17+
plt.xlabel('Input (x)')
18+
plt.ylabel('Probability (f(x))')
19+
plt.axhline(0, color='grey', lw=0.5)
20+
plt.axvline(0, color='grey', lw=0.5)
21+
plt.grid(True, linestyle='--', alpha=0.5)
22+
plt.legend()
23+
plt.show()

Tanh Function Chart.png

43.6 KB
Loading

linear.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
4+
# Define the Linear Activation Function
5+
def linear_activation(x):
6+
return x
7+
8+
# Generate input values
9+
x = np.linspace(-10, 10, 100)
10+
y = linear_activation(x)
11+
12+
# Plot the Linear Activation Function
13+
plt.figure(figsize=(8, 6))
14+
plt.plot(x, y, label='f(x) = x', color='b')
15+
plt.title('Linear Activation Function')
16+
plt.xlabel('Input (x)')
17+
plt.ylabel('Output (f(x))')
18+
plt.axhline(0, color='grey', lw=0.5)
19+
plt.axvline(0, color='grey', lw=0.5)
20+
plt.grid(True, linestyle='--', alpha=0.5)
21+
plt.legend()
22+
plt.show()

0 commit comments

Comments
 (0)