Skip to content

Commit

Permalink
Minor fix
Browse files Browse the repository at this point in the history
  • Loading branch information
ParishadBehnam authored Apr 8, 2024
1 parent beb25ed commit f88b7b3
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion Tutorial.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class BiLlamaForMNTP(LlamaForCausalLM):
if attention_dropout > 0.0: # Augmenting Llama model with attention dropout as there is no such parameter in the initialized LlamaConfig
config.attention_dropout = attention_dropout
LlamaPreTrainedModel.__init__(self, config)
self.model = LlamaBiModel(config) # Initially, MistralModel
self.model = LlamaBiModel(config) # Initially, LlamaModel
self.vocab_size = config.vocab_size
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)

Expand Down

0 comments on commit f88b7b3

Please sign in to comment.