Skip to content

Commit

Permalink
Merge branch 'master' of github.com:keras-team/keras-io
Browse files Browse the repository at this point in the history
  • Loading branch information
fchollet committed Apr 5, 2024
2 parents 9f6d2ce + 2e67fa6 commit 18c6544
Show file tree
Hide file tree
Showing 22 changed files with 66 additions and 13,910 deletions.
4 changes: 2 additions & 2 deletions examples/structured_data/ipynb/tabtransformer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@
" )\n",
" else:\n",
" inputs[feature_name] = layers.Input(\n",
" name=feature_name, shape=(), dtype=\"float32\"\n",
" name=feature_name, shape=(), dtype=\"int32\"\n",
" )\n",
" return inputs\n",
""
Expand Down Expand Up @@ -489,7 +489,7 @@
"def create_mlp(hidden_units, dropout_rate, activation, normalization_layer, name=None):\n",
" mlp_layers = []\n",
" for units in hidden_units:\n",
" mlp_layers.append(normalization_layer()),\n",
" mlp_layers.append(normalization_layer())\n",
" mlp_layers.append(layers.Dense(units, activation=activation))\n",
" mlp_layers.append(layers.Dropout(dropout_rate))\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/structured_data/md/tabtransformer.md
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def create_model_inputs():
)
else:
inputs[feature_name] = layers.Input(
name=feature_name, shape=(), dtype="float32"
name=feature_name, shape=(), dtype="int32"
)
return inputs

Expand Down Expand Up @@ -359,7 +359,7 @@ def encode_inputs(inputs, embedding_dims):
def create_mlp(hidden_units, dropout_rate, activation, normalization_layer, name=None):
mlp_layers = []
for units in hidden_units:
mlp_layers.append(normalization_layer()),
mlp_layers.append(normalization_layer())
mlp_layers.append(layers.Dense(units, activation=activation))
mlp_layers.append(layers.Dropout(dropout_rate))

Expand Down
4 changes: 2 additions & 2 deletions examples/structured_data/tabtransformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def create_model_inputs():
)
else:
inputs[feature_name] = layers.Input(
name=feature_name, shape=(), dtype="float32"
name=feature_name, shape=(), dtype="int32"
)
return inputs

Expand Down Expand Up @@ -328,7 +328,7 @@ def encode_inputs(inputs, embedding_dims):
def create_mlp(hidden_units, dropout_rate, activation, normalization_layer, name=None):
mlp_layers = []
for units in hidden_units:
mlp_layers.append(normalization_layer()),
mlp_layers.append(normalization_layer())
mlp_layers.append(layers.Dense(units, activation=activation))
mlp_layers.append(layers.Dropout(dropout_rate))

Expand Down
Loading

0 comments on commit 18c6544

Please sign in to comment.