Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add base model class and expose weight decay as parameter #6

Merged
merged 3 commits into from
Aug 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ install_requires =
scipy>=1.0.0
sentencepiece>=0.1.95
tensorboard>=2.2.0
torch<=1.13.1
torch>=1.13.1
torchmetrics>=0.7.0
transformers>=4.22.0
typing_extensions>=3.7.4.3
Expand Down
8 changes: 5 additions & 3 deletions src/gt4sd_trainer/hf_pl/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,11 +213,13 @@ class LanguageModelingModelArguments:
default=0.5,
metadata={"help": "The learning rate decay."},
)
weight_decay: float = field(
default=0.01,
metadata={"help": "Weight decay (L2)."},
)
cache_dir: Union[str, None] = field(
default=None,
metadata={
"help": "Where do you want to store the pretrained models downloaded from huggingface.co."
},
metadata={"help": "Cache directory for HF models."},
)


Expand Down
85 changes: 53 additions & 32 deletions src/gt4sd_trainer/hf_pl/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,11 @@
"""Model for Language Modeling."""

import logging
from typing import Dict, Type, Union
from typing import Any, Dict, Type, Union

import sentencepiece as _sentencepiece
import pytorch_lightning as pl
import torch
import torch.optim as optim
from torch import Tensor
from transformers import (
Expand All @@ -47,49 +48,25 @@
logger.addHandler(logging.NullHandler())


class LMModule(pl.LightningModule):
"""Pytorch lightning model for LM training."""
class BaseLightningModule(pl.LightningModule):
"""Pytorch lightning base model."""

def __init__(
self,
model_args: Dict[str, Union[float, int, str]],
model_args: Dict[str, Any],
) -> None:
"""Construct an LM lightning module.
"""Construct a Pytorch lightning base model.
Args:
model_args: model's arguments.
"""
super().__init__()

self.model_args = model_args

self.model: AutoModel
self.tokenizer: AutoTokenizer

self.cache_dir = None
if "cache_dir" in model_args:
self.cache_dir = model_args["cache_dir"]

self.init_model()

def init_model(self) -> None:
"""Initialize an AutoModel."""

if self.model_args["model_name_or_path"] is not None:
self.model = AutoModel.from_pretrained(
self.model_args["model_name_or_path"],
cache_dir=self.cache_dir,
)
else:
config = AutoConfig.from_pretrained(
self.model_args["model_config_name"], cache_dir=self.cache_dir
)

self.model = AutoModel.from_config(config)

logger.info("Training from scratch")
self.model: torch.nn.Module

def forward(self, x: Tensor) -> Tensor: # type: ignore
"""Forward pass on Transformer model.
"""Forward pass.
Args:
x: tensor of shape (batch_size, seq_length) containing the input_ids.
Returns:
Expand All @@ -114,7 +91,11 @@ def configure_optimizers(
if not isinstance(self.model_args["lr_decay"], float):
raise ValueError("Learning rate decay rate should be float")

optimizer = optim.AdamW(self.parameters(), lr=self.model_args["lr"])
optimizer = optim.AdamW(
self.parameters(),
lr=self.model_args["lr"],
weight_decay=self.model_args["weight_decay"],
)

scheduler = optim.lr_scheduler.StepLR(optimizer, 1, self.model_args["lr_decay"])

Expand Down Expand Up @@ -152,6 +133,46 @@ def validation_step(self, batch: Dict[str, Tensor], batch_idx: int) -> Tensor:
return loss


class LMModule(BaseLightningModule):
"""Pytorch lightning model for LM training."""

def __init__(
self,
model_args: Dict[str, Union[float, int, str]],
) -> None:
"""Construct an LM lightning module.
Args:
model_args: model's arguments.
"""
super().__init__(model_args)

self.model: AutoModel
self.tokenizer: AutoTokenizer

self.cache_dir = None
if "cache_dir" in model_args:
self.cache_dir = model_args["cache_dir"]

self.init_model()

def init_model(self) -> None:
"""Initialize an AutoModel."""

if self.model_args["model_name_or_path"] is not None:
self.model = AutoModel.from_pretrained(
self.model_args["model_name_or_path"],
cache_dir=self.cache_dir,
)
else:
config = AutoConfig.from_pretrained(
self.model_args["model_config_name"], cache_dir=self.cache_dir
)

self.model = AutoModel.from_config(config)

logger.info("Training from scratch")


class MLMModule(LMModule):
"""Pytorch lightning model for MLM training."""

Expand Down