Skip to content

Commit b2c3419

Browse files
committed
Fix additional ruff compliance issues
- Fixed abstract method issue in BaseOptimizer.reset method - Fixed line length violations in test files and demo code - Reduced from 18 to 10 remaining ruff violations - All core functionality preserved and tested
1 parent 63b074f commit b2c3419

File tree

4 files changed

+17
-8
lines changed

4 files changed

+17
-8
lines changed

neural_network/optimizers/base_optimizer.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ def reset(self) -> None:
7777
or when you want to clear any accumulated state (like momentum).
7878
Default implementation does nothing, but optimizers with state should override.
7979
"""
80-
...
8180

8281
def __str__(self) -> str:
8382
"""String representation of the optimizer."""

neural_network/optimizers/momentum_sgd.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,9 @@ def _check_shapes_and_get_velocity(
118118
parameters: float | list[float | list[float]],
119119
gradients: float | list[float | list[float]],
120120
velocity_values: float | list[float | list[float]]
121-
) -> tuple[float | list[float | list[float]], float | list[float | list[float]]]:
121+
) -> tuple[
122+
float | list[float | list[float]], float | list[float | list[float]]
123+
]:
122124
# Handle scalar case
123125
if isinstance(parameters, (int, float)):
124126
if not isinstance(gradients, (int, float)):
@@ -130,7 +132,9 @@ def _check_shapes_and_get_velocity(
130132
velocity_values = 0.0
131133

132134
# Update velocity: v = β * v + (1-β) * g
133-
new_velocity = self.momentum * velocity_values + (1 - self.momentum) * gradients
135+
new_velocity = (
136+
self.momentum * velocity_values + (1 - self.momentum) * gradients
137+
)
134138
# Update parameter: θ = θ - alpha * v
135139
new_param = parameters - self.learning_rate * new_velocity
136140

@@ -271,7 +275,8 @@ def __str__(self) -> str:
271275
f" SGD: f = {f_sgd:.6f}, x = ({x_sgd[0]:6.3f}, {x_sgd[1]:6.3f})"
272276
)
273277
print(
274-
f" Momentum: f = {f_momentum:.6f}, x = ({x_momentum[0]:6.3f}, {x_momentum[1]:6.3f})"
278+
f" Momentum: f = {f_momentum:.6f}, x = "
279+
f"({x_momentum[0]:6.3f}, {x_momentum[1]:6.3f})"
275280
)
276281

277282
print("\\nFinal comparison:")
@@ -280,5 +285,6 @@ def __str__(self) -> str:
280285
print(f"SGD final loss: {f_final_sgd:.6f}")
281286
print(f"Momentum final loss: {f_final_momentum:.6f}")
282287
print(
283-
f"Improvement with momentum: {((f_final_sgd - f_final_momentum) / f_final_sgd * 100):.1f}%"
288+
f"Improvement with momentum: "
289+
f"{((f_final_sgd - f_final_momentum) / f_final_sgd * 100):.1f}%"
284290
)

neural_network/optimizers/nag.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,9 @@ def _nag_update_recursive(
132132
velocity = 0.0
133133

134134
# Update velocity: v = β * v + (1-β) * g
135-
new_velocity = self.momentum * velocity + (1 - self.momentum) * gradients
135+
new_velocity = (
136+
self.momentum * velocity + (1 - self.momentum) * gradients
137+
)
136138

137139
# NAG update: θ = θ - alpha * (β * v + (1-β) * g)
138140
nesterov_update = (

neural_network/optimizers/test_optimizers.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,8 @@ def rosenbrock_grad(x: float, y: float) -> list[float]:
201201
loss = rosenbrock(x, y)
202202
distance_to_optimum = math.sqrt((x - 1) ** 2 + (y - 1) ** 2)
203203
print(
204-
f"{name:12s}: loss = {loss:8.3f}, pos = ({x:6.3f}, {y:6.3f}), dist = {distance_to_optimum:.4f}"
204+
f"{name:12s}: loss = {loss:8.3f}, pos = ({x:6.3f}, {y:6.3f}), "
205+
f"dist = {distance_to_optimum:.4f}"
205206
)
206207

207208
if loss < best_loss:
@@ -255,7 +256,8 @@ def convergence_analysis() -> None:
255256
final_x = positions[name][0]
256257
if steps is not None:
257258
print(
258-
f"{name:12s}: converged in {steps:2d} steps (final |x| = {abs(final_x):.6f})"
259+
f"{name:12s}: converged in {steps:2d} steps "
260+
f"(final |x| = {abs(final_x):.6f})"
259261
)
260262
else:
261263
print(f"{name:12s}: did not converge (final |x| = {abs(final_x):.6f})")

0 commit comments

Comments
 (0)