We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7f74c31 commit 69b372aCopy full SHA for 69b372a
src/transformers/models/layoutlmv3/modeling_layoutlmv3.py
@@ -254,6 +254,8 @@ def layoutlmv3_eager_attention_forward(
254
attention_scores = attention_scores + (rel_pos + rel_2d_pos) / math.sqrt(query.size(-1))
255
elif module.has_relative_attention_bias and rel_pos is not None:
256
attention_scores = attention_scores + rel_pos / math.sqrt(query.size(-1))
257
+ elif module.has_spatial_attention_bias and rel_2d_pos is not None:
258
+ attention_scores = attention_scores + rel_2d_pos / math.sqrt(query.size(-1))
259
260
if attention_mask is not None:
261
# Apply the attention mask
0 commit comments