Coverage for hierarchicalsoftmax/layers.py: 100.00%
15 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-07-02 01:49 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-07-02 01:49 +0000
1from torch import nn
3from .nodes import SoftmaxNode
4from .tensors import LazyLinearTensor
6class HierarchicalSoftmaxLayerError(RuntimeError):
7 pass
9class HierarchicalSoftmaxLayerMixin():
10 def __init__(self, root:SoftmaxNode, out_features=None, **kwargs):
11 self.root = root
13 if out_features is not None:
14 raise HierarchicalSoftmaxLayerError(
15 "Trying to create a HierarchicalSoftmaxLinearLayer by explicitly setting `out_features`. "
16 "This value should be determined from the hierarchy tree and not `out_features` argument should be given to HierarchicalSoftmaxLinearLayer."
17 )
19 super().__init__(out_features=self.root.layer_size, **kwargs)
21 def forward(self, x) -> LazyLinearTensor:
22 return LazyLinearTensor(x, weight=self.weight, bias=self.bias)
25class HierarchicalSoftmaxLinear(HierarchicalSoftmaxLayerMixin, nn.Linear):
26 """
27 Creates a linear layer designed to be the final layer in a neural network model that produces unnormalized scores given to HierarchicalSoftmaxLoss.
29 The `out_features` value is set internally from root.layer_size and cannot be given as an argument.
30 """
33class HierarchicalSoftmaxLazyLinear(HierarchicalSoftmaxLayerMixin, nn.LazyLinear):
34 """
35 Creates a lazy linear layer designed to be the final layer in a neural network model that produces unnormalized scores given to HierarchicalSoftmaxLoss.
37 The `out_features` value is set internally from root.layer_size and cannot be given as an argument.
38 The `in_features` will be inferred from the previous layer at runtime.
39 """