Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Run Tests

on:
push:
branches:
- develop
- main
pull_request:
branches:
- develop
- main

jobs:
test:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install dependencies
run: |
pipx install poetry
poetry install

- name: Run tests
run: |
poetry run pytest
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# Changelog

## v0.1.4 (05/14/2026)

- Added a copy method to Sequential class to make deep copies of a model instance.
- Added tests for Sequential class and builder, and added a test runner to CI/CD.

## v0.1.3 (05/07/2026)

- Removed datasets and pillow as package dependencies and moved to dev dependencies
Expand Down
52 changes: 51 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "phitodeep"
version = "0.1.3"
version = "0.1.4"
description = "Deep learning framework built from scratch with numpy!"
authors = ["Ralph Dugue"]
license = "Apache License 2.0"
Expand All @@ -23,5 +23,6 @@ dev = [
"sphinx-autoapi (>=3.8.0,<4.0.0)",
"sphinx-rtd-theme (>=3.1.0,<4.0.0)",
"pillow (>=12.2.0,<13.0.0)",
"datasets (>=4.8.4,<5.0.0)"
"datasets (>=4.8.4,<5.0.0)",
"pytest (>=9.0.3,<10.0.0)"
]
25 changes: 25 additions & 0 deletions src/phitodeep/layers/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ def backward(self, dL_dZ):
dL_dX = dL_dZ * (X > 0).astype(float)
return dL_dX

def copy(self):
new_layer = ReLu()
new_layer.cache = self.cache.copy()
return new_layer


class Sigmoid(Layer):
def __init__(self) -> None:
Expand All @@ -39,6 +44,11 @@ def backward(self, dL_dZ):
dL_dX = dL_dZ * Z * (1 - Z)
return dL_dX

def copy(self):
new_layer = Sigmoid()
new_layer.cache = self.cache.copy()
return new_layer


class Tanh(Layer):
def __init__(self) -> None:
Expand All @@ -60,6 +70,11 @@ def backward(self, dL_dZ):
dL_dX = dL_dZ * (1 - Z**2)
return dL_dX

def copy(self):
new_layer = Tanh()
new_layer.cache = self.cache.copy()
return new_layer


class Softmax(Layer):
def __init__(self) -> None:
Expand All @@ -85,6 +100,11 @@ def backward(self, dL_dZ):
"""
return dL_dZ

def copy(self):
new_layer = Softmax()
new_layer.cache = self.cache.copy()
return new_layer


class ELU(Layer):
def __init__(self, alpha=1.0) -> None:
Expand All @@ -104,3 +124,8 @@ def backward(self, dL_dZ):
X = self.cache["X"]
dL_dX = dL_dZ * np.where(X > 0, 1.0, self.alpha_activation * np.exp(X))
return dL_dX

def copy(self):
new_layer = ELU(self.alpha_activation)
new_layer.cache = self.cache.copy()
return new_layer
28 changes: 28 additions & 0 deletions src/phitodeep/layers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@


class Layer:
"""
Base class for all layers in the network.
"""

def __init__(self, name) -> None:
self.name = name
self.cache = {}
Expand All @@ -22,8 +26,15 @@ def backward(self, dL_dZ):
"""
raise NotImplementedError(f"Block '{self.name}' must implement backward method")

def copy(self):
raise NotImplementedError(f"Block '{self.name}' must implement copy method")


class Flatten(Layer):
"""
Flattens the input tensor into a 2D tensor.
"""

def __init__(self):
super().__init__("flatten")

Expand All @@ -41,8 +52,17 @@ def backward(self, dL_dZ):
X = self.cache["X"]
return dL_dZ.reshape(X.shape)

def copy(self):
new_layer = Flatten()
new_layer.cache = self.cache.copy()
return new_layer


class Dense(Layer):
"""
Fully connected layer.
"""

def __init__(self, input_size, output_size):
super().__init__("dense")
self.grads = {}
Expand Down Expand Up @@ -83,3 +103,11 @@ def backward(self, dL_dZ):
dL_dX = np.dot(dL_dZ, self.W.T)

return dL_dX

def copy(self):
new_layer = Dense(self.input_size, self.output_size)
new_layer.W = self.W.copy()
new_layer.b = self.b.copy()
new_layer.grads = {k: v.copy() for k, v in self.grads.items()}
new_layer.cache = {k: v.copy() for k, v in self.cache.items()}
return new_layer
37 changes: 33 additions & 4 deletions src/phitodeep/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,18 @@ def setloss(self, loss_class):
self.loss_class = loss_class

def train(self, X, y, X_test, y_test):
"""
Train the model using the specified optimizer and loss function.

Args:
X (np.ndarray): Training data.
y (np.ndarray): Training labels.
X_test (np.ndarray): Test data.
y_test (np.ndarray): Test labels.

Returns:
list: A list of tuples containing the training and test losses for each epoch.
"""
match self.optimizer:
case "sgd":
optimizer = optimization.SGD(alpha=self.alpha)
Expand All @@ -68,9 +80,15 @@ def train(self, X, y, X_test, y_test):

print("Training complete.")
print("-" * 60)
print(f"Starting Training Loss: {losses[0][0]:.4f} | Starting Test Loss: {losses[0][1]:.4f}")
print(f"Final Training Loss: {losses[-1][0]:.4f} | Final Test Loss: {losses[-1][1]:.4f}")
print(f"Training Loss Improvement: {losses[0][0] - losses[-1][0]:.4f} | Test Loss Improvement: {losses[0][1] - losses[-1][1]:.4f}")
print(
f"Starting Training Loss: {losses[0][0]:.4f} | Starting Test Loss: {losses[0][1]:.4f}"
)
print(
f"Final Training Loss: {losses[-1][0]:.4f} | Final Test Loss: {losses[-1][1]:.4f}"
)
print(
f"Training Loss Improvement: {losses[0][0] - losses[-1][0]:.4f} | Test Loss Improvement: {losses[0][1] - losses[-1][1]:.4f}"
)
print("-" * 60)
return losses

Expand Down Expand Up @@ -129,6 +147,17 @@ def summary(self):
print(f"Layer {i}: {layer.name.upper():<10}")
print("-" * 60)

def copy(self):
"""Return a copy of the model."""
return Sequential(
*[layer.copy() for layer in self.layers],
alpha=self.alpha,
optimizer=self.optimizer,
batch_size=self.batch_size,
epochs=self.epochs,
loss_class=self.loss_class,
)


class SequentialBuilder:
"""Fluent API for building Sequential models."""
Expand Down Expand Up @@ -204,7 +233,7 @@ def loss(self, loss_class):
def build(self):
"""Build and return the Sequential model."""
return Sequential(
*self.layers,
*[layer.copy() for layer in self.layers],
alpha=self.alpha_value,
optimizer=self.optimizer_name,
batch_size=self.batch_size,
Expand Down
4 changes: 3 additions & 1 deletion src/phitodeep/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,13 @@ def step(self, layers):
def train_loop(
model, X, y, X_test, y_test, loss_class, optimizer, epochs=1000, batch_size=1
):

losses = []
rng = np.random.default_rng()

for epoch in range(epochs):
for _ in range(len(X) // batch_size):
indices = np.random.randint(0, len(X), batch_size)
indices = rng.integers(0, len(X), batch_size)
X_batch = X[indices]
y_batch = y[indices]

Expand Down
Loading
Loading