File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
nemo/collections/llm/gpt/model Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -176,7 +176,7 @@ def gpt_forward_step(model, batch) -> torch.Tensor:
176176def get_batch_on_this_context_parallel_rank (batch ):
177177 from megatron .core import parallel_state
178178
179- if cp_size := parallel_state .get_context_parallel_world_size () > 1 :
179+ if ( cp_size := parallel_state .get_context_parallel_world_size () ) > 1 :
180180 num_valid_tokens_in_ub = None
181181 if 'loss_mask' in batch and batch ['loss_mask' ] is not None :
182182 num_valid_tokens_in_ub = batch ['loss_mask' ].sum ()
@@ -206,7 +206,7 @@ def get_packed_seq_params(batch):
206206
207207 cu_seqlens = batch ['cu_seqlens' ].squeeze () # remove batch size dimension (mbs=1)
208208 # remove -1 "paddings" added in collate_fn
209- if cu_seqlens_argmin := batch .get ('cu_seqlens_argmin' , None ) is not None :
209+ if ( cu_seqlens_argmin := batch .get ('cu_seqlens_argmin' , None ) ) is not None :
210210 # pre-compute cu_seqlens_argmin in dataset class for perf
211211 cu_seqlens = cu_seqlens [: cu_seqlens_argmin .item ()]
212212 else :
You can’t perform that action at this time.
0 commit comments