Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Tightend up logic, established consistency with master
Browse files Browse the repository at this point in the history
  • Loading branch information
connorgoggins committed Feb 5, 2020
1 parent f5279be commit 48d0667
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 23 deletions.
20 changes: 18 additions & 2 deletions benchmark/opperf/rules/default_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@
DEFAULT_KERNEL = [(1, 1, 1), (1, 1, 1)]
DEFAULT_STRIDE = [(2, 2, 2), (1, 1, 1)]

# BatchNorm
DEFAULT_AXIS_BN = [1]

# LayerNorm
DEFAULT_GAMMA_LN = [(32,), (32,)]
DEFAULT_BETA_LN = [(32,), (32,)]
Expand Down Expand Up @@ -298,26 +301,36 @@
"p_dropout": DEFAULT_P,
"data_nn_basic": DEFAULT_DATA_NN_BASIC,
"num_hidden": DEFAULT_NUM_HIDDEN,
"data_fullyconnected": DEFAULT_DATA_NN_BASIC,
"weight_fullyconnected": DEFAULT_WEIGHT_FC,
"weight_embedding": DEFAULT_WEIGHT_EMBEDDING,
"bias": DEFAULT_BIAS,
"flatten": DEFAULT_FLATTEN,
"data_batchnorm": DEFAULT_DATA_NN_BASIC,
"gamma_batchnorm": DEFAULT_GAMMA,
"beta_batchnorm": DEFAULT_BETA,
"moving_mean": DEFAULT_MOVING_MEAN,
"moving_var": DEFAULT_MOVING_VAR,
"moving_mean_batchnorm": DEFAULT_MOVING_MEAN,
"moving_var_batchnorm": DEFAULT_MOVING_VAR,
"axis_batchnorm": DEFAULT_AXIS_BN,
"data_softmaxoutput": DEFAULT_DATA_NN_BASIC,
"label_softmaxoutput": DEFAULT_LABEL_SM,
"data_maeregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_maeregressionoutput": DEFAULT_LABEL_REG,
"data_logisticregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_logisticregressionoutput": DEFAULT_LABEL_REG,
"data_linearregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_linearregressionoutput": DEFAULT_LABEL_REG,
"data_svmoutput": DEFAULT_DATA_NN_BASIC,
"label_svmoutput": DEFAULT_LABEL_SVM,
"grad_scale": DEFAULT_GRAD_SCALE,
"normalization": DEFAULT_NORMALIZATION,
"margin": DEFAULT_MARGIN,
"regularization_coefficient": DEFAULT_REG_COEFF,
"data_l2normalization": DEFAULT_DATA_NN_BASIC,
"mode_l2normalization": DEFAULT_MODE_L2,
"gamma_layernorm": DEFAULT_GAMMA_LN,
"beta_layernorm": DEFAULT_BETA_LN,
"data_instancenorm": DEFAULT_DATA_NN_BASIC,
"gamma_instancenorm": DEFAULT_GAMMA,
"beta_instancenorm": DEFAULT_BETA,
"input_dim": DEFAULT_INPUT_DIM,
Expand All @@ -329,13 +342,16 @@
"max_displacement": DEFAULT_MAX_DISPLACEMENT,
"stride1": DEFAULT_STRIDE_1,
"stride2": DEFAULT_STRIDE_2,
"data_im2col": DEFAULT_DATA_NN_BASIC,
"kernel_im2col": DEFAULT_KERNEL_I2C,
"stride_im2col": DEFAULT_STRIDE_I2C,
"dilate_im2col": DEFAULT_DILATE,
"pad_im2col": DEFAULT_PAD,
"data_lrn": DEFAULT_DATA_NN_BASIC,
"alpha_lrn": DEFAULT_ALPHA,
"beta_lrn": DEFAULT_BETA_LRN,
"nsize": DEFAULT_NSIZE,
"data_layernorm": DEFAULT_DATA_NN_BASIC,
"axis_layernorm": DEFAULT_AXIS}


Expand Down
25 changes: 4 additions & 21 deletions benchmark/opperf/utils/op_registry_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,39 +121,22 @@ def prepare_op_inputs(op, arg_params):

# 3d tensor is needed by following ops
ops_3d = ['CTCLoss', 'ctc_loss']
# Each of these NN basic ops requires a differently shaped data tensor
ops_nn_alternate_data = ['SpatialTransformer', 'col2im', 'RNN', 'GroupNorm', 'Dropout']

# Each of these NN basic ops uses the standard data tensor shape
ops_nn_standard_data = ['FullyConnected', 'SoftmaxOutput', 'LinearRegressionOutput', 'BatchNorm',
custom_data = ['SpatialTransformer', 'col2im', 'RNN', 'GroupNorm', 'Dropout', 'FullyConnected', 'SoftmaxOutput', 'LinearRegressionOutput', 'BatchNorm',
'LogisticRegressionOutput', 'MAERegressionOutput', 'SVMOutput', 'L2Normalization',
'LayerNorm', 'InstanceNorm', 'Embedding', 'Correlation', 'im2col', 'LRN']

# Args for NN basic ops which vary across different ops
ops_nn_variable_args = ['weight', 'label', 'mode', 'gamma', 'beta', 'kernel', 'stride', 'dilate', 'pad', 'p', 'data', 'axis', 'loc']
ops_nn_variable_args = ['weight', 'label', 'mode', 'gamma', 'beta', 'kernel', 'stride', 'dilate', 'pad', 'p', 'axis', 'loc', "moving_mean","moving_var"]

# Prepare op to default input mapping
arg_values = {}
for arg_name, arg_type in zip(arg_params["params"]["arg_names"],
arg_params["params"]["arg_types"]):
if "NDArray" in arg_type and op == "ravel_multi_index":
arg_values[arg_name] = DEFAULTS_INPUTS["ravel_data"]
elif op in ops_nn_alternate_data:
if arg_name == 'data' or (op == 'Dropout' and arg_name == 'mode') or arg_name == "kernel" or arg_name == "stride":
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()]
elif op == 'RNN' and arg_name == 'p':
pass
elif arg_name in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name]
elif op in ops_nn_standard_data:
if arg_name == "weight" or arg_name == "label" or arg_name == 'mode' or arg_name == "gamma" or arg_name == "beta" or arg_name == "kernel" or arg_name == "stride" or arg_name == "dilate" or arg_name == "pad":
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()]
# general nn basic params category
elif arg_name == "data":
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_nn_basic"]
# Op-specific input
elif arg_name + "_" + op.lower() in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()]
elif op in custom_data and arg_name + "_" + op.lower() in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_" + op.lower()]
elif "NDArray" in arg_type and arg_name + "_nd" in DEFAULTS_INPUTS:
arg_values[arg_name] = DEFAULTS_INPUTS[arg_name + "_nd"]
elif "NDArray" in arg_type and op in ops_4d and arg_name + "_4d" in DEFAULTS_INPUTS:
Expand Down

0 comments on commit 48d0667

Please sign in to comment.