Skip to content

Commit

Permalink
Add some annotations
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 679776357
  • Loading branch information
lingvo-bot authored and copybara-github committed Sep 28, 2024
1 parent 74a0b6a commit 529aa84
Showing 1 changed file with 13 additions and 11 deletions.
24 changes: 13 additions & 11 deletions lingvo/core/layers_with_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,15 +243,17 @@ def _InitAttention(self, atten_tpl):
params.packed_input = p.packed_input
return params

def FProp(self,
theta,
query_vec,
source_paddings,
source_vecs=None,
query_segment_id=None,
source_segment_id=None,
context_vecs=None,
**kwargs):
def FProp(
self,
theta: py_utils.NestedMap,
query_vec: tf.Tensor,
source_paddings: Optional[tf.Tensor],
source_vecs: Optional[tf.Tensor] = None,
query_segment_id: Optional[tf.Tensor] = None,
source_segment_id: Optional[tf.Tensor] = None,
context_vecs: Optional[tf.Tensor] = None,
**kwargs
) -> Tuple[tf.Tensor, tf.Tensor]:
"""Transformer attention, residual and normalization layer.
Args:
Expand Down Expand Up @@ -1431,8 +1433,8 @@ def FProp(
theta: py_utils.NestedMap,
source_vecs: tf.Tensor,
source_paddings: tf.Tensor,
aux_vecs=None,
aux_paddings=None,
aux_vecs: Optional[tf.Tensor] = None,
aux_paddings: Optional[tf.Tensor] = None,
source_segment_id: Optional[tf.Tensor] = None,
aux_segment_id=None,
**kwargs
Expand Down

0 comments on commit 529aa84

Please sign in to comment.