from akida.core import Layer, LayerType, LayerParams
"""This represents a Dense or Linear neural layer.
The FullyConnected layer accepts 1-bit, 2-bit or 4-bit input tensors.
The FullyConnected can be configured with 1-bit, 2-bit or 4-bit weights.
It multiplies the inputs by its internal unit weights, returning a 4D
tensor of values whose first dimension is the number of samples and the
last dimension represents the number of units.
It can optionally apply a step-wise ReLU activation to its outputs.
units (int): number of units.
name (str, optional): name of the layer.
weights_bits (int, optional): number of bits used to quantize weights.
activation (bool, optional): enable or disable activation
act_bits (int, optional): number of bits used to
quantize the neuron response.
params = LayerParams(
# Call parent constructor to initialize C++ bindings
# Note that we invoke directly __init__ instead of using super, as
# specified in pybind documentation
Layer.__init__(self, params, name)
self = None