Skip to content

Commit

Permalink
lint fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
dianaml0 committed Dec 23, 2022
1 parent 3d709db commit fe89b3b
Showing 1 changed file with 1 addition and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
)
from megatron.mpu.utils import split_tensor_along_last_dim
from megatron.model.fused_softmax import scaled_upper_triang_masked_softmax_cuda
from megatron.mpu import get_tensor_model_parallel_world_size

has_megatron_submodule = True
except (ImportError, ModuleNotFoundError):
Expand Down Expand Up @@ -434,7 +433,7 @@ def backward(ctx, grad_output):
if xf_eff_attn:
grad_out_proj_input = grad_out_proj_input.reshape(
seq_len, bsz, -1, head_dim
).transpose(0,1)
).transpose(0, 1)
d_q, d_k, d_v = xops.memory_efficient_attention_backward(
grad=grad_out_proj_input,
output=out,
Expand All @@ -457,7 +456,6 @@ def backward(ctx, grad_output):
grad_out_proj_input, q, k, v, attn_probs, seq_len, bsz, head_dim
)


(
mha_layer_norm_output,
mha_layer_norm_mean,
Expand Down

0 comments on commit fe89b3b

Please sign in to comment.