Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
P
pocket-tensor
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
piktomir
pocket-tensor
Commits
85957905
Commit
85957905
authored
Aug 16, 2018
by
Gustavo Valiente
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
SeLU and LeakyReLU activations support
parent
67f36c58
Changes
10
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
174 additions
and
3 deletions
+174
-3
README.md
README.md
+1
-1
kerasify.py
kerasify.py
+8
-0
lib/CMakeLists.txt
lib/CMakeLists.txt
+1
-0
lib/src/pt_activation_layer.cpp
lib/src/pt_activation_layer.cpp
+7
-1
lib/src/pt_layer.cpp
lib/src/pt_layer.cpp
+6
-0
lib/src/pt_leaky_relu_layer.cpp
lib/src/pt_leaky_relu_layer.cpp
+49
-0
lib/src/pt_leaky_relu_layer.h
lib/src/pt_leaky_relu_layer.h
+33
-0
lib/src/pt_selu_activation_layer.h
lib/src/pt_selu_activation_layer.h
+44
-0
make_tests.py
make_tests.py
+23
-1
tests/CMakeLists.txt
tests/CMakeLists.txt
+2
-0
No files found.
README.md
View file @
85957905
...
...
@@ -120,7 +120,7 @@ The most common layer types used in image recognition and sequences prediction a
*
Convolutions:
`Conv1D`
,
`Conv2D`
,
`LocallyConnected1D`
.
*
Sequences related:
`LSTM`
,
`Embedding`
.
*
Activations:
`Linear`
,
`ReLU`
,
`ELU`
,
`Softplus`
,
`Softsign`
,
`Tanh`
,
`Sigmoid`
,
`HardSigmoid`
,
`Softmax`
.
*
Activations:
`Linear`
,
`ReLU`
,
`ELU`
,
`S
eLU`
,
`LeakyReLU`
,
`S
oftplus`
,
`Softsign`
,
`Tanh`
,
`Sigmoid`
,
`HardSigmoid`
,
`Softmax`
.
*
Other:
`Dense`
,
`Flatten`
,
`MaxPooling2D`
,
`BatchNormalization`
,
`ELU`
.
## Performance
...
...
kerasify.py
View file @
85957905
...
...
@@ -12,6 +12,7 @@ LAYER_MAXPOOLING_2D = 9
LAYER_LSTM
=
10
LAYER_EMBEDDING
=
11
LAYER_BATCH_NORMALIZATION
=
12
LAYER_LEAKY_RELU
=
13
ACTIVATION_LINEAR
=
1
ACTIVATION_RELU
=
2
...
...
@@ -22,6 +23,7 @@ ACTIVATION_SIGMOID = 6
ACTIVATION_TANH
=
7
ACTIVATION_HARD_SIGMOID
=
8
ACTIVATION_SOFTMAX
=
9
ACTIVATION_SELU
=
10
def
write_tensor
(
f
,
data
,
dims
=
1
):
...
...
@@ -64,6 +66,8 @@ def export_activation(f, activation):
f
.
write
(
struct
.
pack
(
'I'
,
ACTIVATION_HARD_SIGMOID
))
elif
activation
==
'softmax'
:
f
.
write
(
struct
.
pack
(
'I'
,
ACTIVATION_SOFTMAX
))
elif
activation
==
'selu'
:
f
.
write
(
struct
.
pack
(
'I'
,
ACTIVATION_SELU
))
else
:
assert
False
,
"Unsupported activation type: %s"
%
activation
...
...
@@ -255,5 +259,9 @@ def export_model(model, filename):
elif
layer_type
==
'BatchNormalization'
:
export_layer_normalization
(
f
,
layer
)
elif
layer_type
==
'LeakyReLU'
:
f
.
write
(
struct
.
pack
(
'I'
,
LAYER_LEAKY_RELU
))
f
.
write
(
struct
.
pack
(
'f'
,
layer
.
alpha
))
else
:
assert
False
,
"Unsupported layer type: %s"
%
layer_type
lib/CMakeLists.txt
View file @
85957905
...
...
@@ -16,6 +16,7 @@ set(SOURCES
src/pt_lstm_layer.cpp
src/pt_embedding_layer.cpp
src/pt_batch_normalization_layer.cpp
src/pt_leaky_relu_layer.cpp
src/pt_model.cpp
)
...
...
lib/src/pt_activation_layer.cpp
View file @
85957905
...
...
@@ -18,6 +18,7 @@
#include "pt_tanh_activation_layer.h"
#include "pt_hard_sigmoid_activation_layer.h"
#include "pt_soft_max_activation_layer.h"
#include "pt_selu_activation_layer.h"
namespace
pt
{
...
...
@@ -34,7 +35,8 @@ namespace
Sigmoid
=
6
,
Tanh
=
7
,
HardSigmoid
=
8
,
SoftMax
=
9
SoftMax
=
9
,
Selu
=
10
};
}
...
...
@@ -89,6 +91,10 @@ std::unique_ptr<ActivationLayer> ActivationLayer::create(std::istream& stream)
activationLayer
.
reset
(
new
SoftMaxActivationLayer
());
break
;
case
Selu
:
activationLayer
.
reset
(
new
SeluActivationLayer
());
break
;
default:
PT_LOG_ERROR
<<
"Unknown activation layer ID: "
<<
activationLayerID
<<
std
::
endl
;
}
...
...
lib/src/pt_layer.cpp
View file @
85957905
...
...
@@ -19,6 +19,7 @@
#include "pt_lstm_layer.h"
#include "pt_embedding_layer.h"
#include "pt_batch_normalization_layer.h"
#include "pt_leaky_relu_layer.h"
namespace
pt
{
...
...
@@ -38,6 +39,7 @@ namespace
Lstm
=
10
,
Embedding
=
11
,
BatchNormalization
=
12
,
LeakyRelu
=
13
};
}
...
...
@@ -100,6 +102,10 @@ std::unique_ptr<Layer> Layer::create(std::istream& stream)
layer
=
BatchNormalizationLayer
::
create
(
stream
);
break
;
case
LeakyRelu
:
layer
=
LeakyReluLayer
::
create
(
stream
);
break
;
default:
PT_LOG_ERROR
<<
"Unknown layer ID: "
<<
layerID
<<
std
::
endl
;
}
...
...
lib/src/pt_leaky_relu_layer.cpp
0 → 100644
View file @
85957905
/*
* pocket-tensor (c) 2018 Gustavo Valiente gustavo.valiente.m@gmail.com
* Kerasify (c) 2016 Robert W. Rose
*
* MIT License, see LICENSE file.
*/
#include "pt_leaky_relu_layer.h"
#include "pt_parser.h"
#include "pt_layer_data.h"
namespace
pt
{
std
::
unique_ptr
<
LeakyReluLayer
>
LeakyReluLayer
::
create
(
std
::
istream
&
stream
)
{
float
alpha
=
0
;
if
(
!
Parser
::
parse
(
stream
,
alpha
))
{
PT_LOG_ERROR
<<
"Alpha parse failed"
<<
std
::
endl
;
return
std
::
unique_ptr
<
LeakyReluLayer
>
();
}
return
std
::
unique_ptr
<
LeakyReluLayer
>
(
new
LeakyReluLayer
(
FloatType
(
alpha
)));
}
bool
LeakyReluLayer
::
apply
(
LayerData
&
layerData
)
const
{
layerData
.
out
=
std
::
move
(
layerData
.
in
);
for
(
FloatType
&
value
:
layerData
.
out
)
{
if
(
value
<
0
)
{
value
*=
_alpha
;
}
}
return
true
;
}
LeakyReluLayer
::
LeakyReluLayer
(
FloatType
alpha
)
noexcept
:
_alpha
(
alpha
)
{
}
}
lib/src/pt_leaky_relu_layer.h
0 → 100644
View file @
85957905
/*
* pocket-tensor (c) 2018 Gustavo Valiente gustavo.valiente.m@gmail.com
* Kerasify (c) 2016 Robert W. Rose
*
* MIT License, see LICENSE file.
*/
#ifndef PT_LEAKY_RELU_LAYER_H
#define PT_LEAKY_RELU_LAYER_H
#include "pt_libsimdpp.h"
#include "pt_layer.h"
namespace
pt
{
class
LeakyReluLayer
:
public
Layer
{
public:
static
std
::
unique_ptr
<
LeakyReluLayer
>
create
(
std
::
istream
&
stream
);
bool
apply
(
LayerData
&
layerData
)
const
final
;
protected:
FloatType
_alpha
;
explicit
LeakyReluLayer
(
FloatType
alpha
)
noexcept
;
};
}
#endif
lib/src/pt_selu_activation_layer.h
0 → 100644
View file @
85957905
/*
* pocket-tensor (c) 2018 Gustavo Valiente gustavo.valiente.m@gmail.com
* Kerasify (c) 2016 Robert W. Rose
*
* MIT License, see LICENSE file.
*/
#ifndef PT_SELU_ACTIVATION_LAYER_H
#define PT_SELU_ACTIVATION_LAYER_H
#include "pt_tensor.h"
#include "pt_activation_layer.h"
namespace
pt
{
class
SeluActivationLayer
:
public
ActivationLayer
{
public:
using
ActivationLayer
::
apply
;
SeluActivationLayer
()
=
default
;
void
apply
(
Tensor
&
out
)
const
final
{
constexpr
auto
alpha
=
FloatType
(
1.6732632423543772848170429916717
);
constexpr
auto
scale
=
FloatType
(
1.0507009873554804934193349852946
);
for
(
FloatType
&
value
:
out
)
{
if
(
value
<
0
)
{
value
=
alpha
*
std
::
expm1
(
value
);
}
value
*=
scale
;
}
}
};
}
#endif
make_tests.py
View file @
85957905
...
...
@@ -13,7 +13,7 @@ from keras.layers import (
MaxPooling2D
,
Dropout
,
BatchNormalization
)
from
keras.layers.recurrent
import
LSTM
from
keras.layers.advanced_activations
import
ELU
from
keras.layers.advanced_activations
import
ELU
,
LeakyReLU
from
keras.layers.embeddings
import
Embedding
from
tensorflow
import
ConfigProto
,
Session
...
...
@@ -270,6 +270,17 @@ model = Sequential([
output_testcase
(
model
,
test_x
,
test_y
,
'relu_10'
,
'1e-6'
)
''' Activation LeakyReLU '''
test_x
=
np
.
random
.
rand
(
1
,
10
).
astype
(
'f'
)
test_y
=
np
.
random
.
rand
(
1
,
1
).
astype
(
'f'
)
model
=
Sequential
([
Dense
(
10
,
input_dim
=
10
),
LeakyReLU
(
alpha
=
0.5
),
Dense
(
1
)
])
output_testcase
(
model
,
test_x
,
test_y
,
'leaky_relu_10'
,
'1e-6'
)
''' Dense relu '''
test_x
=
np
.
random
.
rand
(
1
,
10
).
astype
(
'f'
)
test_y
=
np
.
random
.
rand
(
1
,
10
).
astype
(
'f'
)
...
...
@@ -325,6 +336,17 @@ model = Sequential([
output_testcase
(
model
,
test_x
,
test_y
,
'dense_tanh_10'
,
'1e-6'
)
''' Dense selu '''
test_x
=
np
.
random
.
rand
(
1
,
10
).
astype
(
'f'
)
test_y
=
np
.
random
.
rand
(
1
,
10
).
astype
(
'f'
)
model
=
Sequential
([
Dense
(
10
,
input_dim
=
10
,
activation
=
'selu'
),
Dense
(
10
,
input_dim
=
10
,
activation
=
'selu'
),
Dense
(
10
,
input_dim
=
10
,
activation
=
'selu'
)
])
output_testcase
(
model
,
test_x
,
test_y
,
'dense_selu_10'
,
'1e-6'
)
''' Conv softplus '''
test_x
=
np
.
random
.
rand
(
10
,
2
,
2
,
1
).
astype
(
'f'
)
test_y
=
np
.
random
.
rand
(
10
,
1
).
astype
(
'f'
)
...
...
tests/CMakeLists.txt
View file @
85957905
...
...
@@ -26,7 +26,9 @@ set(SOURCES
src/dense_softmax_10_test.cpp
src/dense_softsign_10_test.cpp
src/dense_tanh_10_test.cpp
src/dense_selu_10_test.cpp
src/elu_10_test.cpp
src/leaky_relu_10_test.cpp
src/maxpool2d_1x1_test.cpp
src/maxpool2d_2x2_test.cpp
src/maxpool2d_3x2x2_test.cpp
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment