|
- import numpy as np
-
- import mindspore
- import mindspore.nn as nn
- from mindspore import Tensor
- from mindspore.ops import operations as P
-
-
- class FeaturesLinear(nn.Cell):
-
- def __init__(self, field_dims, output_dim=1):
- super().__init__()
- self.fc = nn.Embedding(int(sum(field_dims)), output_dim)
- self.bias = Tensor(np.zeros((output_dim,)), dtype=mindspore.float32)
- self.offsets = Tensor(
- np.array((0, *np.cumsum(field_dims)[:-1]), dtype=np.int32)[np.newaxis,:])
- self.sum = P.ReduceSum()
-
- def construct(self, x):
- x = x + self.offsets
- return self.sum(self.fc(x), 1) + self.bias
-
-
- class FeaturesEmbedding(nn.Cell):
-
- def __init__(self, field_dims, embed_dim):
- super().__init__()
- self.embedding = nn.Embedding(int(sum(field_dims)), embed_dim)
- self.offsets = Tensor(
- np.array((0, *np.cumsum(field_dims)[:-1]), dtype=np.int32)[np.newaxis,:])
-
- def construct(self, x):
- x = x + self.offsets
- return self.embedding(x)
-
-
- class FactorizationMachine(nn.Cell):
-
- def __init__(self, reduce_sum=True):
- super().__init__()
- self.reduce_sum = reduce_sum
- self.sum = P.ReduceSum()
- self.sum_keep = P.ReduceSum(keep_dims=True)
-
- def construct(self, x):
- square_of_sum = self.sum(x, 1) ** 2
- sum_of_square = self.sum(x ** 2, 1)
- ix = square_of_sum - sum_of_square
- if self.reduce_sum:
- ix = self.sum_keep(ix, 1)
- return 0.5 * ix
|