chop.nn.quantized#
- chop.nn.quantized.functional
- chop.nn.quantized.functional.add
- chop.nn.quantized.functional.gelu
- chop.nn.quantized.functional.matmul
- chop.nn.quantized.functional.mult
- chop.nn.quantized.functional.relu
- chop.nn.quantized.functional.selu
- chop.nn.quantized.functional.softermax
- chop.nn.quantized.functional.softplus
- chop.nn.quantized.functional.softsign
- chop.nn.quantized.functional.sub
- chop.nn.quantized.functional.tanh
- chop.nn.quantized.modules
- chop.nn.quantized.modules.attention
- chop.nn.quantized.modules.attention_head
- chop.nn.quantized.modules.batch_norm1d
- chop.nn.quantized.modules.batch_norm2d
- chop.nn.quantized.modules.conv1d
- chop.nn.quantized.modules.conv2d
- chop.nn.quantized.modules.gelu
- chop.nn.quantized.modules.group_norm
- chop.nn.quantized.modules.instance_norm2d
- chop.nn.quantized.modules.layer_norm
- chop.nn.quantized.modules.linear
- chop.nn.quantized.modules.max_pool2d
- chop.nn.quantized.modules.relu
- chop.nn.quantized.modules.rms_norm
- chop.nn.quantized.modules.selu
- chop.nn.quantized.modules.silu
- chop.nn.quantized.modules.softplus
- chop.nn.quantized.modules.softsign
- chop.nn.quantized.modules.tanh
- chop.nn.quantized.utils