Clone the Repository
git clone https://github.com/yourusername/micrograd-plus.git
cd micrograd-plus
pip install numpy
Build Your First Model
from micrograd import Tensor
from micrograd.nn import Linear, Sequential, ReLU
from micrograd.optim import Adam
from micrograd.nn.losses import MSELoss
# Create a simple neural network
model = Sequential(
Linear(2, 16),
ReLU(),
Linear(16, 1)
)
# Setup training
optimizer = Adam(model.parameters(), lr=0.01)
criterion = MSELoss()
Train Your Model
# Training loop
for epoch in range(100):
# Forward pass
predictions = model(X)
loss = criterion(predictions, y)
# Backward pass
optimizer.zero_grad()
loss.backward()
optimizer.step()
print(f"Epoch {epoch}, Loss: {loss.item():.4f}")