Skip to content

Commit 30c9882

Browse files
committed
Adding doctest
1 parent 788d95b commit 30c9882

File tree

2 files changed

+47
-0
lines changed

2 files changed

+47
-0
lines changed

data_compression/huffman.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,18 @@ def build_tree(letters: list[Letter]) -> Letter | TreeNode:
3939
"""
4040
Run through the list of Letters and build the min heap
4141
for the Huffman Tree.
42+
43+
>>> letters = [
44+
... Letter('a', 5),
45+
... Letter('b', 9),
46+
... Letter('c', 12),
47+
... Letter('d', 13)
48+
... ]
49+
>>> root = build_tree(letters)
50+
>>> isinstance(root, TreeNode)
51+
True
52+
>>> root.freq
53+
39
4254
"""
4355
response: list[Letter | TreeNode] = list(letters)
4456
while len(response) > 1:
@@ -55,6 +67,16 @@ def traverse_tree(root: Letter | TreeNode, bitstring: str) -> list[Letter]:
5567
"""
5668
Recursively traverse the Huffman Tree to set each
5769
Letter's bitstring dictionary, and return the list of Letters
70+
71+
>>> letters = [Letter('a', 2), Letter('b', 3), Letter('c', 4)]
72+
>>> root = build_tree(letters)
73+
>>> result = traverse_tree(root, "")
74+
>>> sorted([l.letter for l in result])
75+
['a', 'b', 'c']
76+
>>> all(l.bitstring[l.letter] for l in result)
77+
True
78+
>>> sum(l.freq for l in result)
79+
9
5880
"""
5981
if isinstance(root, Letter):
6082
root.bitstring[root.letter] = bitstring

neural_network/back_propagation_neural_network.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,18 @@
2323

2424

2525
def sigmoid(x: np.ndarray) -> np.ndarray:
26+
"""
27+
Compute the sigmoid activation function
28+
29+
>>> import numpy as np
30+
>>> np.allclose(sigmoid(np.array([0])), np.array([0.5]))
31+
True
32+
>>> np.allclose(
33+
... sigmoid(np.array([-1, 0, 1])),
34+
... np.array([0.26894142, 0.5, 0.73105858])
35+
... )
36+
True
37+
"""
2638
return 1 / (1 + np.exp(-x))
2739

2840

@@ -158,6 +170,19 @@ def train(self, xdata, ydata, train_round, accuracy):
158170
return None
159171

160172
def cal_loss(self, ydata, ydata_):
173+
"""
174+
Calculate Mean Squared Error (MSE) loss and its gradient.
175+
176+
>>> import numpy as np
177+
>>> bp = BPNN()
178+
>>> y_true = np.asmatrix([[1.0], [0.5]])
179+
>>> y_pred = np.asmatrix([[0.8], [0.3]])
180+
>>> loss, grad = bp.cal_loss(y_true, y_pred)
181+
>>> float(round(loss, 2))
182+
0.08
183+
>>> np.allclose(grad, np.array([[-0.4], [-0.4]]))
184+
True
185+
"""
161186
self.loss = np.sum(np.power((ydata - ydata_), 2))
162187
self.loss_gradient = 2 * (ydata_ - ydata)
163188
# vector (shape is the same as _ydata.shape)

0 commit comments

Comments
 (0)