setup peft using pip
composer run --config /mnt/config/parameters.yaml practice.py
picture: mosaicml/llm-foundry:2.3.0_cu121_flash2-latest
compute:
parameters:
tokenizer_name: meta-llama/Meta-Llama-3-70B-Instruct
max_seq_len: 8000
global_seed: 17
max_split_size_mb: 512
mannequin:
title: hf_causal_lm
init_device: blended
pretrained_model_name_or_path: "meta-llama/Meta-Llama-3-70B-Instruct"
tokenizer:
kwargs: {}
loggers: