We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c949e75 commit c2b1993Copy full SHA for c2b1993
keras/src/layers/core/einsum_dense.py
@@ -209,7 +209,7 @@ def compute_output_shape(self, _):
209
def call(self, inputs, training=None):
210
x = ops.einsum(self.equation, inputs, self.kernel)
211
if self.bias is not None:
212
- x += self.bias
+ x = ops.add(x, self.bias)
213
if self.activation is not None:
214
x = self.activation(x)
215
return x
@@ -518,7 +518,7 @@ def grad_fn(*args, upstream=None):
518
lora_x = ops.matmul(lora_x, self.lora_kernel_b)
519
x = ops.add(x, (self.lora_alpha / self.lora_rank) * lora_x)
520
521
522
523
524
0 commit comments