|
- import unittest
- import numpy as np
- import mindspore
- from mindspore import Tensor, context
- from src.layers_mindspore import MemoryLayer
-
-
- class Testlayer(unittest.TestCase):
- def test_memory_static_mode(self):
- context.set_context(mode=context.GRAPH_MODE, device_target='Ascend')
-
- bert_config = {
- "attention_probs_dropout_prob": 0.1,
- "directionality": "bidi",
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 1024,
- "initializer_range": 0.02,
- "intermediate_size": 4096,
- "max_position_embeddings": 512,
- "num_attention_heads": 16,
- "num_hidden_layers": 24,
- "pooler_fc_size": 768,
- "pooler_num_attention_heads": 12,
- "pooler_num_fc_layers": 3,
- "pooler_size_per_head": 128,
- "pooler_type": "first_token_transform",
- "type_vocab_size": 2,
- "vocab_size": 28996
- }
- # memory_add = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- # mem_emb_size=100, mem_method='add')
- # memory_cat = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- # mem_emb_size=100, mem_method='cat')
- memory_raw = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- mem_emb_size=100, mem_method='raw')
-
- bert_output = Tensor(np.random.randn(4, 384, 1024))
- memory_embs = Tensor(np.random.randn(4, 384, 49, 100))
- mem_length = Tensor(np.random.randn(4, 384, 1))
-
- # output_add = memory_add(bert_output, memory_embs, mem_length)
- # output_cat = memory_cat(bert_output, memory_embs, mem_length)
- output_raw = memory_raw(bert_output, memory_embs, mem_length)
-
- # print(output_add.shape)
- # print(output_cat.shape)
- print(output_raw.shape)
-
- def test_memory_dynamic_mode(self):
- context.set_context(mode=context.PYNATIVE_MODE, device_target='Ascend')
-
- bert_config = {
- "attention_probs_dropout_prob": 0.1,
- "directionality": "bidi",
- "hidden_act": "gelu",
- "hidden_dropout_prob": 0.1,
- "hidden_size": 1024,
- "initializer_range": 0.02,
- "intermediate_size": 4096,
- "max_position_embeddings": 512,
- "num_attention_heads": 16,
- "num_hidden_layers": 24,
- "pooler_fc_size": 768,
- "pooler_num_attention_heads": 12,
- "pooler_num_fc_layers": 3,
- "pooler_size_per_head": 128,
- "pooler_type": "first_token_transform",
- "type_vocab_size": 2,
- "vocab_size": 28996
- }
- # memory_add = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- # mem_emb_size=100, mem_method='add')
- # memory_cat = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- # mem_emb_size=100, mem_method='cat')
- memory_raw = MemoryLayer(bert_size=1024, bert_config=bert_config, concept_size=49,
- mem_emb_size=100, mem_method='raw')
-
- bert_output = Tensor(np.random.randn(4, 384, 1024), mindspore.float32)
- memory_embs = Tensor(np.random.randn(4, 384, 49, 100), mindspore.float32)
- mem_length = Tensor(np.random.randn(4, 384, 1), mindspore.float32)
-
- # output_add = memory_add(bert_output, memory_embs, mem_length)
- # output_cat = memory_cat(bert_output, memory_embs, mem_length)
- output_raw = memory_raw(bert_output, memory_embs, mem_length)
-
- # print(output_add.shape)
- # print(output_cat.shape)
- print(output_raw.shape)
|