|
| 1 | +import matplotlib.pyplot as plt |
| 2 | +import numpy as np |
| 3 | +from sklearn.datasets import make_moons |
| 4 | +from sklearn.neural_network import MLPClassifier |
| 5 | + |
| 6 | + |
| 7 | +# Generate synthetic dataset |
| 8 | +X, y = make_moons(n_samples=200, noise=0.2, random_state=42) |
| 9 | + |
| 10 | +# Define classifiers with different activation functions |
| 11 | +classifiers = { |
| 12 | + "relu": MLPClassifier(hidden_layer_sizes=(10, 10), activation="relu", max_iter=2000, random_state=42), |
| 13 | + "tanh": MLPClassifier(hidden_layer_sizes=(10, 10), activation="tanh", max_iter=2000, random_state=42), |
| 14 | + "logistic": MLPClassifier(hidden_layer_sizes=(10, 10), activation="logistic", max_iter=2000, random_state=42), |
| 15 | +} |
| 16 | + |
| 17 | +# Plot decision boundaries |
| 18 | +fig, axes = plt.subplots(1, 3, figsize=(15, 5)) |
| 19 | + |
| 20 | +for ax, (name, clf) in zip(axes, classifiers.items()): |
| 21 | + clf.fit(X, y) |
| 22 | + |
| 23 | + # Create a mesh grid |
| 24 | + x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 |
| 25 | + y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 |
| 26 | + xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200)) |
| 27 | + |
| 28 | + # Predict on the mesh |
| 29 | + Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) |
| 30 | + Z = Z.reshape(xx.shape) |
| 31 | + |
| 32 | + # Plot the decision boundary and training data |
| 33 | + ax.contourf(xx, yy, Z, alpha=0.3) |
| 34 | + ax.scatter(X[:, 0], X[:, 1], c=y, edgecolor="k", s=20) |
| 35 | + ax.set_title(f"Activation: {name}") |
| 36 | + |
| 37 | +plt.tight_layout() |
| 38 | +plt.show() |
0 commit comments