diff --git a/wit/train.py b/wit/train.py index 3109c33..54eb5f6 100644 --- a/wit/train.py +++ b/wit/train.py @@ -17,32 +17,49 @@ pretrain_model_name = None # "qwen/Qwen-1_8B-Chat" learning_rate = 0.0001 use_tril_attention_mask = None precision = "32-true" # "precision:bf16-mixed,16-mixed,32-true" -train_batch_size = 16 +train_batch_size = 32 val_batch_size = 32 num_proc = 8 max_epochs = 1000 strategy = "auto" resume_from_ckpt_path = None seed = 42 -vocab_size = 256 +vocab_size = 1024 +level_ratio = 4 + +hidden_size = 256 # 128 1024 2048 32 +num_attention_heads = 8 # 8 8 16 +num_hidden_layers = 1 # 6 12 24 3 + +name = "vocab_level_hidden_head_layer" +version = ( + str(vocab_size) + + "_" + + str(level_ratio) + + "_" + + str(hidden_size) + + "_" + + str(num_attention_heads) + + "_" + + str(num_hidden_layers) +) if __name__ == "__main__": torch.manual_seed(seed) config = ModelConfig() config.vocab_size = vocab_size - config.hidden_size = 1024 # 128 1024 2048 32 - config.num_hidden_layers = 12 # 6 12 24 3 - config.num_attention_heads = 16 # 8 8 16 + config.hidden_size = hidden_size + config.num_hidden_layers = num_hidden_layers + config.num_attention_heads = num_attention_heads lit_module = LitModule(pretrain_model_name, learning_rate, config, use_tril_attention_mask) tokenizer = QWenTokenizer("./wit_b64.tiktoken", "./wit_char.tiktoken") - level_ratio = 6 - start = vocab_size * level_ratio * level_ratio + start = vocab_size * level_ratio * level_ratio * level_ratio * level_ratio end = start * level_ratio - size = end * level_ratio + size = start + start raw_dataset = MeaningDataset(start, end, size, vocab_size, level_ratio) train_dataset, val_dataset = raw_dataset.Split(0.95) train_dataloader = BatchGroupMeaningDataloader(train_dataset, train_batch_size) @@ -57,7 +74,7 @@ if __name__ == "__main__": accelerator="cuda", devices=[0, 1], precision=precision, - logger=TBLogger("./", default_hp_metric=False), + logger=TBLogger("./log/", name=name, version=version, default_hp_metric=False), strategy=strategy, max_epochs=max_epochs, )