We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 69df6d8 commit b5e9edbCopy full SHA for b5e9edb
nncf/torch/quantization/layers.py
@@ -1078,8 +1078,8 @@ def init_lora(self, lspec: PTLoraSpec):
1078
if rank > out_features or rank > in_features:
1079
msg = f"Specified LoRA rank={rank} cannot exceed any dimension of the weight tensor"
1080
raise nncf.ValidationError(msg)
1081
- self._lora_A = torch.nn.Parameter(torch.ones((rank, in_features), dtype=default_lora_dtype))
1082
- self._lora_B = torch.nn.Parameter(torch.zeros((out_features, rank), dtype=default_lora_dtype))
+ self.lora_A = torch.nn.Parameter(torch.ones((rank, in_features), dtype=default_lora_dtype))
+ self.lora_B = torch.nn.Parameter(torch.zeros((out_features, rank), dtype=default_lora_dtype))
1083
1084
def enable_gradients(self):
1085
self.lora_A.requires_grad = True
0 commit comments