|
- import os
- import time
- import argparse
- import random
- import numpy as np
- import mindspore
- from mindspore import context, Tensor
- from mindspore.train.model import Model
- from mindspore.train.loss_scale_manager import DynamicLossScaleManager
- from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
- from mindspore import nn
- from mindspore import save_checkpoint, load_checkpoint
- from mindspore.train.serialization import load_param_into_net
- from mindspore.common import set_seed
- import mindspore.train.summary as summary
- from utils import load_config
- from dataset import get_crohme_dataset
- from models.CAN import CAN
- from training import train, eval
-
-
-
- parser = argparse.ArgumentParser(description='models training')
- parser.add_argument('--dataset', default='CROHME', type=str, help='Dataset name')
- parser.add_argument('--check', action='store_true', help='Test code option')
- args = parser.parse_args()
-
- if not args.dataset:
- print('Please provide the dataset name')
- exit(-1)
-
- if args.dataset == 'CROHME':
- config_file = 'config.yaml'
-
- """Load config file"""
- params = load_config(config_file)
-
- """Set random seed"""
- set_seed(params['seed'])
- random.seed(params['seed'])
- np.random.seed(params['seed'])
-
- context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
- device = 'Ascend'
- params['device'] = device
-
- if args.dataset == 'CROHME':
- train_loader, eval_loader = get_crohme_dataset(params)
-
- model = CAN(params)
- now = time.strftime("%Y-%m-%d-%H-%M", time.localtime())
- model.name = f'{params["experiment"]}_{now}_decoder-{params["decoder"]["net"]}'
-
- print(model.name)
-
- if args.check:
- writer = None
- else:
- writer = summary.SummaryRecord(f'{params["log_dir"]}/{model.name}')
-
- optimizer = getattr(mindspore.nn, params['optimizer'])(model.trainable_params(), float(params['lr']),
- epsilon=float(params['eps']), weight_decay=float(params['weight_decay']))
-
- if params['finetune']:
- print('Loading pre-trained models weights')
- print(f'Pre-trained weights path: {params["checkpoint"]}')
- param_dict = load_checkpoint(params['checkpoint'])
- load_param_into_net(model, param_dict)
-
- if not args.check:
- if not os.path.exists(os.path.join(params['checkpoint_dir'], model.name)):
- os.makedirs(os.path.join(params['checkpoint_dir'], model.name), exist_ok=True)
- os.system(f'cp {config_file} {os.path.join(params["checkpoint_dir"], model.name, model.name)}.yaml')
-
- """Training on CROHME dataset"""
- if args.dataset == 'CROHME':
- min_score, init_epoch = 0, 0
-
- for epoch in range(init_epoch, params['epochs']):
- train_loss, train_word_score, train_exprate = train(params, model, optimizer, epoch, train_loader, writer=writer)
-
- if epoch >= params['valid_start']:
- eval_loss, eval_word_score, eval_exprate = eval(params, model, epoch, eval_loader, writer=writer)
- print(f'Epoch: {epoch+1} loss: {eval_loss:.4f} word score: {eval_word_score:.4f} ExpRate: {eval_exprate:.4f}')
- if eval_exprate > min_score and not args.check and epoch >= params['save_start']:
- min_score = eval_exprate
- save_checkpoint(model, optimizer, eval_word_score, eval_exprate, epoch + 1,
- optimizer_save=params['optimizer_save'], path=params['checkpoint_dir'])
|