diff options
| author | YurenHao0426 <blackhao0426@gmail.com> | 2026-02-09 12:28:55 -0600 |
|---|---|---|
| committer | YurenHao0426 <blackhao0426@gmail.com> | 2026-02-09 12:28:55 -0600 |
| commit | ef678d2e1ba70b1a9dadb78c73ed372f986aea13 (patch) | |
| tree | b90b5c53960b22a6a5498ca69fbfffad7e1832f8 /src/model/pipeline.py | |
| parent | 93d77b197d457b1fdfa7341ecd59fc460b20d6b1 (diff) | |
Fix NLL double-shift bug and head weight init
- NLL loss was shifting labels twice (olmo_labels already shifted,
then code did logits[:,:-1] vs labels[:,1:]). Fixed in 9 locations:
trainer, pipeline, olmo_graph, sanity_check, eval.
- Head U/V weights init with std=0.01 (was Kaiming ~5.7 std) so
UV^T≈0 at init, ensuring Z≈logit_bias=15 and A≈0.953.
- Updated SVD rank test to subtract logit_bias before checking.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'src/model/pipeline.py')
| -rw-r--r-- | src/model/pipeline.py | 11 |
1 files changed, 6 insertions, 5 deletions
diff --git a/src/model/pipeline.py b/src/model/pipeline.py index bbfcabf..d5ceec0 100644 --- a/src/model/pipeline.py +++ b/src/model/pipeline.py @@ -100,10 +100,10 @@ class DAGFormerPipeline(nn.Module): # logits: [batch, seq_len, vocab_size] # Step 3: Compute NLL (next-token prediction) - # Shift: logits[:, :-1] predicts labels[:, 1:] + # olmo_labels is already shifted (chunk[1:seq_len+1]), no additional shift needed nll = F.cross_entropy( - logits[:, :-1].contiguous().view(-1, self.vocab_size), - olmo_labels[:, 1:].contiguous().view(-1), + logits.contiguous().view(-1, self.vocab_size), + olmo_labels.contiguous().view(-1), ) # Step 4: Sparsity regularization @@ -130,9 +130,10 @@ class DAGFormerPipeline(nn.Module): A = create_all_ones_A(batch).to(olmo_ids.device) with torch.no_grad(): logits = self.olmo_wrapper(olmo_ids, A) + # olmo_labels is already shifted, no additional shift needed nll = F.cross_entropy( - logits[:, :-1].contiguous().view(-1, self.vocab_size), - olmo_labels[:, 1:].contiguous().view(-1), + logits.contiguous().view(-1, self.vocab_size), + olmo_labels.contiguous().view(-1), ) return nll |
