-
-
Notifications
You must be signed in to change notification settings - Fork 439
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
14 changed files
with
968 additions
and
289 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
# encoding: utf-8 | ||
""" | ||
@author: BrikerMan | ||
@contact: eliyar917@gmail.com | ||
@blog: https://eliyar.biz | ||
@version: 1.0 | ||
@license: Apache Licence | ||
@file: layers | ||
@time: 2019-02-23 | ||
""" | ||
from __future__ import absolute_import, division | ||
import logging | ||
|
||
import tensorflow as tf | ||
from keras.layers import Flatten | ||
from keras.layers import GRU, LSTM | ||
from keras.layers import CuDNNGRU, CuDNNLSTM | ||
from keras import initializers | ||
from keras.engine import InputSpec, Layer | ||
from keras import backend as K | ||
|
||
from kashgari.macros import config | ||
|
||
if config.use_CuDNN_cell: | ||
GRULayer = CuDNNGRU | ||
LSTMLayer = CuDNNLSTM | ||
else: | ||
GRULayer = GRU | ||
LSTMLayer = LSTM | ||
|
||
|
||
class AttentionWeightedAverage(Layer): | ||
''' | ||
Computes a weighted average of the different channels across timesteps. | ||
Uses 1 parameter pr. channel to compute the attention value for a single timestep. | ||
''' | ||
|
||
def __init__(self, return_attention=False, **kwargs): | ||
self.init = initializers.get('uniform') | ||
self.supports_masking = True | ||
self.return_attention = return_attention | ||
super(AttentionWeightedAverage, self).__init__(**kwargs) | ||
|
||
def build(self, input_shape): | ||
self.input_spec = [InputSpec(ndim=3)] | ||
assert len(input_shape) == 3 | ||
|
||
self.W = self.add_weight(shape=(input_shape[2], 1), | ||
name='{}_w'.format(self.name), | ||
initializer=self.init) | ||
self.trainable_weights = [self.W] | ||
super(AttentionWeightedAverage, self).build(input_shape) | ||
|
||
def call(self, x, mask=None): | ||
# computes a probability distribution over the timesteps | ||
# uses 'max trick' for numerical stability | ||
# reshape is done to avoid issue with Tensorflow | ||
# and 1-dimensional weights | ||
logits = K.dot(x, self.W) | ||
x_shape = K.shape(x) | ||
logits = K.reshape(logits, (x_shape[0], x_shape[1])) | ||
ai = K.exp(logits - K.max(logits, axis=-1, keepdims=True)) | ||
|
||
# masked timesteps have zero weight | ||
if mask is not None: | ||
mask = K.cast(mask, K.floatx()) | ||
ai = ai * mask | ||
att_weights = ai / (K.sum(ai, axis=1, keepdims=True) + K.epsilon()) | ||
weighted_input = x * K.expand_dims(att_weights) | ||
result = K.sum(weighted_input, axis=1) | ||
if self.return_attention: | ||
return [result, att_weights] | ||
return result | ||
|
||
def get_output_shape_for(self, input_shape): | ||
return self.compute_output_shape(input_shape) | ||
|
||
def compute_output_shape(self, input_shape): | ||
output_len = input_shape[2] | ||
if self.return_attention: | ||
return [(input_shape[0], output_len), (input_shape[0], input_shape[1])] | ||
return (input_shape[0], output_len) | ||
|
||
def compute_mask(self, input, input_mask=None): | ||
if isinstance(input_mask, list): | ||
return [None] * len(input_mask) | ||
else: | ||
return None | ||
|
||
|
||
class KMaxPooling(Layer): | ||
''' | ||
K-max pooling layer that extracts the k-highest activation from a sequence (2nd dimension). | ||
TensorFlow backend. | ||
''' | ||
|
||
def __init__(self, k=1, **kwargs): | ||
super().__init__(**kwargs) | ||
self.input_spec = InputSpec(ndim=3) | ||
self.k = k | ||
|
||
def compute_output_shape(self, input_shape): | ||
return (input_shape[0], (input_shape[2] * self.k)) | ||
|
||
def call(self, inputs): | ||
# swap last two dimensions since top_k will be applied along the last dimension | ||
shifted_input = tf.transpose(inputs, [0, 2,1]) | ||
|
||
# extract top_k, returns two tensors [values, indices] | ||
top_k = tf.nn.top_k(shifted_input, k=self.k, sorted=True, name=None)[0] | ||
|
||
# return flattened output | ||
return Flatten()(top_k) | ||
|
||
|
||
if __name__ == '__main__': | ||
print("hello, world") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.