[activation] add sigmoid and relu, in-place
authorPaul Brossier <piem@piem.org>
Thu, 10 Jan 2019 18:12:52 +0000 (19:12 +0100)
committerPaul Brossier <piem@piem.org>
Wed, 29 Dec 2021 16:51:58 +0000 (11:51 -0500)
src/ai/activation.c [new file with mode: 0644]

diff --git a/src/ai/activation.c b/src/ai/activation.c
new file mode 100644 (file)
index 0000000..b27648c
--- /dev/null
@@ -0,0 +1,21 @@
+#include "aubio_priv.h"
+#include "fmat.h"
+#include "ai/tensor.h"
+
+void aubio_activation_relu(aubio_tensor_t *t)
+{
+  uint_t i;
+  AUBIO_ASSERT(t);
+  for (i = 0; i < t->size; i++) {
+    t->buffer[i] = MAX(0, t->buffer[i]);
+  }
+}
+
+void aubio_activation_sigmoid(aubio_tensor_t *t)
+{
+  uint_t i;
+  AUBIO_ASSERT(t);
+  for (i = 0; i < t->size; i++) {
+    t->buffer[i] = 1. / (1. + EXP( - t->buffer[i] ));
+  }
+}