From bfd84f3ff6711a491d1336a19c35c773e54188f7 Mon Sep 17 00:00:00 2001
From: pswietojanski
Date: Sun, 15 Nov 2015 21:53:47 +0000
Subject: [PATCH] fix to wrong tabulation
---
mlp/layers.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/mlp/layers.py b/mlp/layers.py
index fd6bcdb..9d23ee5 100644
--- a/mlp/layers.py
+++ b/mlp/layers.py
@@ -527,7 +527,7 @@ class Maxout(Linear):
return h[:, :, 0] #get rid of the last reduced dimensison (of size 1)
def bprop(self, h, igrads):
- #hack for dropout backprop (ignore dropped neurons), note, this is not
+ #hack for dropout backprop (ignore dropped neurons), note, this is not
#entirely correct when h fires at 0 exaclty (but is not dropped, when
#derivative should be 1. However, this is rather unlikely to happen and
#probably can be ignored right now