Skip to content

Commit c2b1993

Browse files
authored
Replace the bias addition in EinsumDense with ops.add(x, bias) instead of using the + operator. (#21299)
1 parent c949e75 commit c2b1993

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

keras/src/layers/core/einsum_dense.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ def compute_output_shape(self, _):
209209
def call(self, inputs, training=None):
210210
x = ops.einsum(self.equation, inputs, self.kernel)
211211
if self.bias is not None:
212-
x += self.bias
212+
x = ops.add(x, self.bias)
213213
if self.activation is not None:
214214
x = self.activation(x)
215215
return x
@@ -518,7 +518,7 @@ def grad_fn(*args, upstream=None):
518518
lora_x = ops.matmul(lora_x, self.lora_kernel_b)
519519
x = ops.add(x, (self.lora_alpha / self.lora_rank) * lora_x)
520520
if self.bias is not None:
521-
x += self.bias
521+
x = ops.add(x, self.bias)
522522
if self.activation is not None:
523523
x = self.activation(x)
524524
return x

0 commit comments

Comments
 (0)