Loading W Code...
š®š³ Prerequisite
Complete Linear Algebra and Calculus modules first!
ML-Core
import numpy as np
print("=" * 55)
print("BROADCASTING & ELEMENT-WISE OPS")
print("=" * 55)
# Element-wise operations
A = np.array([[1, 2], [3, 4]])
B = np.array([[5, 6], [7, 8]])
print(f"A =\n{A}")
print(f"B =\n{B}")
print(f"\nA + B (element-wise) =\n{A + B}")
print(f"A * B (Hadamard) =\n{A * B}")
print(f"A @ B (matrix mult) =\n{A @ B}")
print(f"\nš” A*B ā A@B !")
# Broadcasting
print("\n" + "=" * 55)
print("BROADCASTING")
print("=" * 55)
X = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
# Subtract row mean (centering)
row_mean = X.mean(axis=1, keepdims=True)
X_centered = X - row_mean
print(f"\nX =\n{X}")
print(f"Row means = {row_mean.flatten()}")
print(f"X centered =\n{X_centered}")
# Subtract column mean (feature centering)
col_mean = X.mean(axis=0)
X_feature_centered = X - col_mean
print(f"\nColumn means = {col_mean}")
print(f"Feature centered =\n{X_feature_centered}")
# StandardScaler manually
print("\n" + "=" * 55)
print("STANDARDSCALER = BROADCASTING")
print("=" * 55)
# Simulate features
np.random.seed(42)
data = np.column_stack([
np.random.normal(170, 10, 5), # height (cm)
np.random.normal(65, 15, 5), # weight (kg)
np.random.normal(25, 5, 5) # age
])
features = ['Height', 'Weight', 'Age']
print(f"\nOriginal data:")
for i, f in enumerate(features):
print(f" {f}: mean={data[:,i].mean():.1f}, std={data[:,i].std():.1f}")
# Standardize using broadcasting
mean = data.mean(axis=0) # shape (3,)
std = data.std(axis=0) # shape (3,)
standardized = (data - mean) / std # Broadcasting: (5,3) - (3,) / (3,)
print(f"\nStandardized data:")
for i, f in enumerate(features):
print(f" {f}: mean={standardized[:,i].mean():.4f}, std={standardized[:,i].std():.4f}")
# Neural Network Layer with Broadcasting
print("\n" + "=" * 55)
print("NEURAL NETWORK: Z = XW + b")
print("=" * 55)
X = np.random.randn(4, 3) # 4 samples, 3 features
W = np.random.randn(3, 2) # 3 features ā 2 neurons
b = np.array([0.5, -0.3]) # 2 biases
Z = X @ W + b # b broadcasts from (2,) to (4, 2)
print(f"X shape: {X.shape}")
print(f"W shape: {W.shape}")
print(f"b shape: {b.shape}")
print(f"Z = XW + b shape: {Z.shape}")
print(f"\nš” b (2,) broadcasts to (4, 2) ā adds bias to each sample!")