Skip to content

Commit 195b58b

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 8b55a8f commit 195b58b

File tree

1 file changed

+4
-6
lines changed

1 file changed

+4
-6
lines changed

neural_network/real_time_encoder_transformer.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,11 @@ def forward(self, time_steps: np.ndarray) -> np.ndarray:
3939
(1, 3, 4)
4040
"""
4141

42-
4342
linear = self.w0 * time_steps + self.b0
4443
periodic = np.sin(time_steps * self.w[None, None, :] + self.b[None, None, :])
4544
return np.concatenate([linear, periodic], axis=-1)
4645

4746

48-
4947
# -------------------------------
5048
# 🔹 LayerNorm
5149
# -------------------------------
@@ -278,16 +276,16 @@ def forward(self, eeg_data: np.ndarray) -> np.ndarray:
278276
if eeg_data.shape[-1] != 1:
279277
eeg_data = eeg_data[..., :1]
280278

281-
# Time2Vec positional encoding
279+
# Time2Vec positional encoding
282280
x = self.time2vec.forward(eeg_data)
283281

284-
# Transformer encoder
282+
# Transformer encoder
285283
x = self.encoder.forward(x)
286284

287-
# Attention pooling
285+
# Attention pooling
288286
x = self.pooling.forward(x)
289287

290-
# Final linear layer
288+
# Final linear layer
291289
out = np.dot(x, self.w_out) + self.b_out # shape (batch, output_dim)
292290
return out
293291

0 commit comments

Comments
 (0)