Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

keras_attention_layer.py 2.7 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
  1. import tensorflow as tf
  2. from tensorflow.python import keras
  3. from tensorflow.python.keras.layers import Layer
  4. from tensorflow.python.keras import backend as K
  5. from typing import Optional
  6. class AttentionLayer(Layer):
  7. def __init__(self, **kwargs):
  8. super(AttentionLayer, self).__init__(**kwargs)
  9. def build(self, inputs_shape):
  10. inputs_shape = inputs_shape if isinstance(inputs_shape, list) else [inputs_shape]
  11. if len(inputs_shape) < 1 or len(inputs_shape) > 2:
  12. raise ValueError("AttentionLayer expect one or two inputs.")
  13. # The first (and required) input is the actual input to the layer
  14. input_shape = inputs_shape[0]
  15. # Expected input shape consists of a triplet: (batch, input_length, input_dim)
  16. if len(input_shape) != 3:
  17. raise ValueError("Input shape for AttentionLayer should be of 3 dimension.")
  18. self.input_length = int(input_shape[1])
  19. self.input_dim = int(input_shape[2])
  20. attention_param_shape = (self.input_dim, 1)
  21. self.attention_param = self.add_weight(
  22. name='attention_param',
  23. shape=attention_param_shape,
  24. initializer='uniform',
  25. trainable=True,
  26. dtype=tf.float32)
  27. super(AttentionLayer, self).build(input_shape)
  28. def call(self, inputs, **kwargs):
  29. inputs = inputs if isinstance(inputs, list) else [inputs]
  30. if len(inputs) < 1 or len(inputs) > 2:
  31. raise ValueError("AttentionLayer expect one or two inputs.")
  32. actual_input = inputs[0]
  33. mask = inputs[1] if len(inputs) > 1 else None
  34. if mask is not None and not (((len(mask.shape) == 3 and mask.shape[2] == 1) or len(mask.shape) == 2)
  35. and mask.shape[1] == self.input_length):
  36. raise ValueError("`mask` should be of shape (batch, input_length) or (batch, input_length, 1) "
  37. "when calling an AttentionLayer.")
  38. assert actual_input.shape[-1] == self.attention_param.shape[0]
  39. # (batch, input_length, input_dim) * (input_dim, 1) ==> (batch, input_length, 1)
  40. attention_weights = K.dot(actual_input, self.attention_param)
  41. if mask is not None:
  42. if len(mask.shape) == 2:
  43. mask = K.expand_dims(mask, axis=2) # (batch, input_length, 1)
  44. mask = K.log(mask)
  45. attention_weights += mask
  46. attention_weights = K.softmax(attention_weights, axis=1) # (batch, input_length, 1)
  47. result = K.sum(actual_input * attention_weights, axis=1) # (batch, input_length) [multiplication uses broadcast]
  48. return result, attention_weights
  49. def compute_output_shape(self, input_shape):
  50. return input_shape[0], input_shape[2] # (batch, input_dim)
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...