-
-
Notifications
You must be signed in to change notification settings - Fork 886
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Update
get_unpad_data
patching for multipack (#2013)
* Update `get_unpad_data` patching for multipack * Update src/axolotl/utils/models.py * Update src/axolotl/utils/models.py * Add test case --------- Co-authored-by: Wing Lian <[email protected]> Co-authored-by: Wing Lian <[email protected]>
- Loading branch information
1 parent
fd70eec
commit 0c8b1d8
Showing
3 changed files
with
89 additions
and
58 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
""" | ||
E2E tests for llama | ||
""" | ||
|
||
import logging | ||
import os | ||
import unittest | ||
from pathlib import Path | ||
|
||
from axolotl.cli import load_datasets | ||
from axolotl.common.cli import TrainerCliArgs | ||
from axolotl.train import train | ||
from axolotl.utils.config import normalize_config | ||
from axolotl.utils.dict import DictDefault | ||
|
||
from .utils import with_temp_dir | ||
|
||
LOG = logging.getLogger("axolotl.tests.e2e") | ||
os.environ["WANDB_DISABLED"] = "true" | ||
|
||
|
||
class TestLlama(unittest.TestCase): | ||
""" | ||
Test case for Llama models | ||
""" | ||
|
||
@with_temp_dir | ||
def test_fft_trust_remote_code(self, temp_dir): | ||
# pylint: disable=duplicate-code | ||
cfg = DictDefault( | ||
{ | ||
"base_model": "JackFram/llama-68m", | ||
"tokenizer_type": "LlamaTokenizer", | ||
"trust_remote_code": True, | ||
"sequence_len": 512, | ||
"val_set_size": 0.1, | ||
"special_tokens": { | ||
"unk_token": "<unk>", | ||
"bos_token": "<s>", | ||
"eos_token": "</s>", | ||
}, | ||
"datasets": [ | ||
{ | ||
"path": "mhenrichsen/alpaca_2k_test", | ||
"type": "alpaca", | ||
}, | ||
], | ||
"num_epochs": 1, | ||
"micro_batch_size": 8, | ||
"gradient_accumulation_steps": 1, | ||
"output_dir": temp_dir, | ||
"learning_rate": 0.00001, | ||
"optimizer": "adamw_bnb_8bit", | ||
"lr_scheduler": "cosine", | ||
"flash_attention": True, | ||
"sample_packing": True, | ||
"bf16": True, | ||
"save_safetensors": True, | ||
} | ||
) | ||
normalize_config(cfg) | ||
cli_args = TrainerCliArgs() | ||
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args) | ||
|
||
train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta) | ||
assert (Path(temp_dir) / "model.safetensors").exists() |