Add bias to LoRA sidecar layer unit tests.

This commit is contained in:
Ryan Dick 2024-09-13 18:15:04 +00:00 committed by Kent Keirsey
parent 78efed4499
commit d51f2c5e00
3 changed files with 6 additions and 4 deletions

View File

@ -19,7 +19,6 @@ class LoRALayer(LoRALayerBase):
self.up = up
self.mid = mid
self.down = down
self.bias = bias
@classmethod
def from_state_dict_values(

View File

@ -26,7 +26,8 @@ def test_concatenated_lora_linear_sidecar_layer():
for out_features in sub_layer_out_features:
down = torch.randn(rank, in_features)
up = torch.randn(out_features, rank)
sub_layers.append(LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=None))
bias = torch.randn(out_features)
sub_layers.append(LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=bias))
concatenated_lora_layer = ConcatenatedLoRALayer(sub_layers, concat_axis=0)
# Patch the ConcatenatedLoRA layer into the linear layer.
@ -34,6 +35,7 @@ def test_concatenated_lora_linear_sidecar_layer():
linear_patched.weight.data += (
concatenated_lora_layer.get_weight(linear_patched.weight) * concatenated_lora_layer.scale()
)
linear_patched.bias.data += concatenated_lora_layer.get_bias(linear_patched.bias) * concatenated_lora_layer.scale()
# Create a ConcatenatedLoRALinearSidecarLayer.
concatenated_lora_linear_sidecar_layer = ConcatenatedLoRALinearSidecarLayer(concatenated_lora_layer, weight=1.0)

View File

@ -20,12 +20,13 @@ def test_lora_linear_sidecar_layer():
rank = 4
down = torch.randn(rank, in_features)
up = torch.randn(out_features, rank)
lora_layer = LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=None)
bias = torch.randn(out_features)
lora_layer = LoRALayer(up=up, mid=None, down=down, alpha=1.0, bias=bias)
# Patch the LoRA layer into the linear layer.
linear_patched = copy.deepcopy(linear)
linear_patched.weight.data += lora_layer.get_weight(linear_patched.weight) * lora_layer.scale()
linear_patched.bias.data += lora_layer.get_bias(linear_patched.bias) * lora_layer.scale()
# Create a LoRALinearSidecarLayer.
lora_linear_sidecar_layer = LoRALinearSidecarLayer(lora_layer, weight=1.0)
linear_with_sidecar = LoRASidecarModule(linear, [lora_linear_sidecar_layer])