Skip to content

Commit 6f1da56

Browse files
hyunwoongkohyunwoongko
hyunwoongko
authored and
hyunwoongko
committed
Revert changes of embedding
1 parent 243672c commit 6f1da56

File tree

9 files changed

+51
-7
lines changed

9 files changed

+51
-7
lines changed

.idea/.gitignore

+8
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/inspectionProfiles/profiles_settings.xml

+6
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/misc.xml

+4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/modules.xml

+8
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/transformer.iml

+12
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

.idea/vcs.xml

+6
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

models/blocks/decoder_layer.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@ def __init__(self, d_model, ffn_hidden, n_head, drop_prob):
2626
self.norm3 = LayerNorm(d_model=d_model)
2727
self.dropout3 = nn.Dropout(p=drop_prob)
2828

29-
def forward(self, dec, enc, t_mask, s_mask):
29+
def forward(self, dec, enc, trg_mask, src_mask):
3030
# 1. compute self attention
3131
_x = dec
32-
x = self.self_attention(q=dec, k=dec, v=dec, mask=t_mask)
32+
x = self.self_attention(q=dec, k=dec, v=dec, mask=trg_mask)
3333

3434
# 2. add and norm
3535
x = self.dropout1(x)
@@ -38,7 +38,7 @@ def forward(self, dec, enc, t_mask, s_mask):
3838
if enc is not None:
3939
# 3. compute encoder - decoder attention
4040
_x = x
41-
x = self.enc_dec_attention(q=x, k=enc, v=enc, mask=s_mask)
41+
x = self.enc_dec_attention(q=x, k=enc, v=enc, mask=src_mask)
4242

4343
# 4. add and norm
4444
x = self.dropout2(x)

models/blocks/encoder_layer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ def __init__(self, d_model, ffn_hidden, n_head, drop_prob):
2222
self.norm2 = LayerNorm(d_model=d_model)
2323
self.dropout2 = nn.Dropout(p=drop_prob)
2424

25-
def forward(self, x, s_mask):
25+
def forward(self, x, src_mask):
2626
# 1. compute self attention
2727
_x = x
28-
x = self.attention(q=x, k=x, v=x, mask=s_mask)
28+
x = self.attention(q=x, k=x, v=x, mask=src_mask)
2929

3030
# 2. add and norm
3131
x = self.dropout1(x)

models/model/encoder.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,10 @@ def __init__(self, enc_voc_size, max_len, d_model, ffn_hidden, n_head, n_layers,
2525
drop_prob=drop_prob)
2626
for _ in range(n_layers)])
2727

28-
def forward(self, x, s_mask):
28+
def forward(self, x, src_mask):
2929
x = self.emb(x)
3030

3131
for layer in self.layers:
32-
x = layer(x, s_mask)
32+
x = layer(x, src_mask)
3333

3434
return x

0 commit comments

Comments
 (0)