|
- from mindspore.ops import operations as P
- import mindspore.nn as nn
- import mindspore.ops as ops
- import numpy as np
- import mindspore
- from mindspore import Tensor
- from mindspore.ops import functional as F, composite as C
- import random
- import os
- import time
- from mindspore import context
- deviced = int(os.getenv('DEVICE_ID', '7'))
-
- context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend", save_graphs=False, device_id=deviced)
- def max_val(idx):
- uniformreal = ops.UniformReal(seed=2)
- a = uniformreal((160, 160))
- print("a.shape:", a.shape)
- t1 = time.time()
- topk = ops.TopK()
-
- topk(a.view(-1), idx)
- t2 = time.time()
- print("topk time:", t2-t1)
- raw_loss = a.reshape((-1)).asnumpy()
- top_idx_np = raw_loss.argsort()[::-1][0:idx]
- top_idx = np.ascontiguousarray(top_idx_np).astype(np.int32)
- print("np time", time.time()-t2)
-
-
-
- class Net(nn.Cell):
- def __init__(self):
- super(Net, self).__init__()
- self.sigmoid = P.Sigmoid()
- self.cross_entropy = P.SigmoidCrossEntropyWithLogits()
- self.binary_cross_entropy = ops.BinaryCrossEntropy()
- def construct(self, logits, labels, weight):
- result1 = self.binary_cross_entropy(self.sigmoid(logits), labels, weight)
- result2 = self.cross_entropy(logits, labels)
- return result1, result2
-
- def test_SigmoidCrossEntropyWithLogits(net):
- cross_entropy = P.SigmoidCrossEntropyWithLogits()
-
- input1 = Tensor([-0.1933, -0.6950, 0.3137], mindspore.float32)
- target = Tensor([1., 1., 1.], mindspore.float32)
- weight = Tensor(np.array([1, 1, 1]), mindspore.float32)
- loss1, loss2 = net(input1, target, weight)
- print(loss1, loss2)
-
- if __name__ == '__main__':
- # net = Net()
- # test_SigmoidCrossEntropyWithLogits(net)
- #max_val(5)
- #print("===================success=============================")
- max_val(23440)
- max_val(23440)
- max_val(23440)
- #print("=========end=============")
|