新的EM 压缩效果更好class EmAdd(paddle.nn.Layer): def __init__(self, voc_size=9999, hidden_size=256): super(EmAdd, self).__init__() self.hidden_size = hidden_size p = 0 while True: voc_size //= hidden_size if voc_size == 0: break else: p += 1 self.em = paddle.nn.LayerList([paddle.nn.Embedding(hidden_size, hidden_size) for _ in range(p + 1)]) def forward(self, em_add_x): add=0 for i, em in enumerate(self.em): i += 1 x0=em(em_add_x % self.hidden_size ** i // self.hidden_size ** (i - 1)) x0= paddle.sin(x0+add) add=x0 return x0