Loading W Code...
Jacobians, Hessians, auto-differentiation — the math powering backpropagation
ML-Core
import numpy as np
print("=" * 55)
print("GRADIENT VECTORS IN HIGH DIMENSIONS")
print("=" * 55)
# MSE Gradient
np.random.seed(42)
n_samples, n_features = 100, 5
X = np.random.randn(n_samples, n_features)
true_w = np.array([2, -1, 0.5, 3, -0.5])
y = X @ true_w + np.random.randn(n_samples) * 0.1
w = np.zeros(n_features)
# Analytical gradient: (2/N) * Xᵀ(Xw - y)
gradient = (2/n_samples) * X.T @ (X @ w - y)
print(f"\nGradient at w=0 (5D):")
print(f" ∇L = {np.round(gradient, 4)}")
print(f" ‖∇L‖ = {np.linalg.norm(gradient):.4f}")
print(f" Direction: {np.round(gradient / np.linalg.norm(gradient), 4)}")
# Gradient Checking
print("\n" + "=" * 55)
print("GRADIENT CHECKING")
print("=" * 55)
def mse_loss(w, X, y):
return np.mean((X @ w - y) ** 2)
def analytical_gradient(w, X, y):
return (2/len(y)) * X.T @ (X @ w - y)
def numerical_gradient(w, X, y, h=1e-5):
grad = np.zeros_like(w)
for i in range(len(w)):
w_plus = w.copy(); w_plus[i] += h
w_minus = w.copy(); w_minus[i] -= h
grad[i] = (mse_loss(w_plus, X, y) - mse_loss(w_minus, X, y)) / (2*h)
return grad
w_test = np.random.randn(n_features)
ana_grad = analytical_gradient(w_test, X, y)
num_grad = numerical_gradient(w_test, X, y)
print(f"\nAt random w:")
for i in range(n_features):
match = "✓" if abs(ana_grad[i] - num_grad[i]) < 1e-4 else "✗"
print(f" w{i}: analytical={ana_grad[i]:.6f}, numerical={num_grad[i]:.6f} {match}")
rel_error = np.linalg.norm(ana_grad - num_grad) / (np.linalg.norm(ana_grad) + np.linalg.norm(num_grad))
print(f"\n Relative error: {rel_error:.2e} {'✓ PASS' if rel_error < 1e-5 else '✗ FAIL'}")
# Full Gradient Descent
print("\n" + "=" * 55)
print("5D GRADIENT DESCENT")
print("=" * 55)
w = np.zeros(n_features)
lr = 0.01
for step in range(100):
grad = analytical_gradient(w, X, y)
w = w - lr * grad
if step < 3 or step >= 97:
loss = mse_loss(w, X, y)
print(f" Step {step+1:3d}: loss={loss:.6f}")
print(f"\n Learned w: {np.round(w, 3)}")
print(f" True w: {true_w}")