|
| 1 | +# Licensed to the Apache Software Foundation (ASF) under one |
| 2 | +# or more contributor license agreements. See the NOTICE file |
| 3 | +# distributed with this work for additional information |
| 4 | +# regarding copyright ownership. The ASF licenses this file |
| 5 | +# to you under the Apache License, Version 2.0 (the |
| 6 | +# "License"); you may not use this file except in compliance |
| 7 | +# with the License. You may obtain a copy of the License at |
| 8 | +# |
| 9 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +# |
| 11 | +# Unless required by applicable law or agreed to in writing, |
| 12 | +# software distributed under the License is distributed on an |
| 13 | +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| 14 | +# KIND, either express or implied. See the License for the |
| 15 | +# specific language governing permissions and limitations |
| 16 | +# under the License. |
| 17 | +# pylint: disable=invalid-name, import-self, len-as-condition, unused-argument, too-many-lines |
| 18 | +# pylint: disable=import-outside-toplevel |
| 19 | + |
| 20 | +import numpy as np |
| 21 | +import tvm |
| 22 | +from tvm.ir import IRModule |
| 23 | + |
| 24 | +from ... import nd as _nd |
| 25 | +from .. import analysis |
| 26 | +from .. import expr as _expr |
| 27 | +from .. import function as _function |
| 28 | +from .. import op as _op |
| 29 | +from .. import vision as _vision |
| 30 | + |
| 31 | +from ..function import Function |
| 32 | +from ..expr import Call, Let |
| 33 | +from ..expr import If, Tuple, TupleGetItem |
| 34 | +from ..expr import RefCreate, RefRead, RefWrite |
| 35 | +from ..expr_functor import ExprFunctor |
| 36 | +from ..adt import Match, Clause |
| 37 | + |
| 38 | +from .common import AttrCvt, Renamer, ExprTable |
| 39 | +from .common import get_relay_op, new_var, infer_shape, infer_channels |
| 40 | +from .common import infer_type, get_name |
| 41 | +from .common import infer_value as _infer_value |
| 42 | +from .common import infer_value_simulated as _infer_value_simulated |
| 43 | + |
| 44 | + |
| 45 | +def _SimpleImputer(op, inexpr, dshape, dtype, columns=None): |
| 46 | + """ |
| 47 | + Scikit-Learn Transformer: |
| 48 | + Imputation transformer for completing missing values. |
| 49 | + """ |
| 50 | + boolean_mask = _op.isnan(inexpr) |
| 51 | + fill_col = _op.const(np.array(op.statistics_, dtype=dtype)) |
| 52 | + input_shape = _op.shape_of(inexpr) |
| 53 | + reps = _op.take(input_shape, _op.const([0])) |
| 54 | + reps = _op.concatenate([reps, _op.const([1])], axis=0) |
| 55 | + |
| 56 | + fill_val = _op.tile(fill_col, reps=reps) |
| 57 | + indices =_op.const(np.arange(len(op.statistics_))) |
| 58 | + fill_val = _op.take(fill_val, indices=indices, axis=1) |
| 59 | + |
| 60 | + ret = _op.where(boolean_mask, |
| 61 | + fill_val, |
| 62 | + inexpr) |
| 63 | + |
| 64 | + return ret |
| 65 | + |
| 66 | +def _RobustImputer(op, inexpr, dshape, dtype, columns=None): |
| 67 | + """ |
| 68 | + Sagemaker-Scikit-Learn-Extension Transformer: |
| 69 | + Imputation transformer for completing missing values with multi-column support. |
| 70 | + """ |
| 71 | + if columns: |
| 72 | + column_indices = _op.const(columns) |
| 73 | + inexpr = _op.take(inexpr, indices=column_indices, axis=1) |
| 74 | + |
| 75 | + if op.mask_function is not None: |
| 76 | + inf_mask = _op.isinf(inexpr) |
| 77 | + nan_val = _op.full_like(inexpr, _op.const(np.array(np.nan, dtype=dtype))) |
| 78 | + inexpr = _op.where(inf_mask, nan_val, inexpr) |
| 79 | + ret = _SimpleImputer(op.simple_imputer_, inexpr, dshape, dtype, columns) |
| 80 | + |
| 81 | + return ret |
| 82 | + |
| 83 | +def _ThresholdOneHotEncoder(op, inexpr, dshape, dtype, columns=None): |
| 84 | + """ |
| 85 | + Sagemaker-Scikit-Learn-Extension Transformer: |
| 86 | + Encode categorical integer features as a one-hot numeric array, with optional restrictions on |
| 87 | + feature encoding. |
| 88 | + """ |
| 89 | + if columns: |
| 90 | + column_indices = _op.const(columns) |
| 91 | + inexpr = _op.take(inexpr, indices=column_indices, axis=1) |
| 92 | + |
| 93 | + num_cat = len(op.categories_) |
| 94 | + cols = _op.split(inexpr, num_cat, axis=1) |
| 95 | + |
| 96 | + out = [] |
| 97 | + for i in range(num_cat): |
| 98 | + category = op.categories_[i] |
| 99 | + cat_tensor = _op.const(np.array(category, dtype=dtype)) |
| 100 | + tiled_col = _op.tile(cols[i], (1, len(category))) |
| 101 | + one_hot_mask = _op.equal(tiled_col, cat_tensor) |
| 102 | + one_hot = _op.cast(one_hot_mask, dtype) |
| 103 | + out.append(one_hot) |
| 104 | + |
| 105 | + ret = _op.concatenate(out, axis=1) |
| 106 | + return ret |
| 107 | + |
| 108 | +def _RobustStandardScaler(op, inexpr, dshape, dtype, columns=None): |
| 109 | + """ |
| 110 | + Sagemaker-Scikit-Learn-Extension Transformer: |
| 111 | + Standardize features by removing the mean and scaling to unit variance |
| 112 | + """ |
| 113 | + scaler = op.scaler_ |
| 114 | + ret = _op.subtract(inexpr, _op.const(np.array(scaler.mean_, dtype), dtype)) |
| 115 | + ret = _op.divide(ret, _op.const(np.array(scaler.scale_, dtype), dtype)) |
| 116 | + return ret |
| 117 | + |
| 118 | +def _ColumnTransformer(op, inexpr, dshape, dtype, columns=None): |
| 119 | + """ |
| 120 | + Scikit-Learn Compose: |
| 121 | + Applies transformers to columns of an array |
| 122 | + """ |
| 123 | + out = [] |
| 124 | + for _, pipe, cols in op.transformers_: |
| 125 | + mod = pipe.steps[0][1] |
| 126 | + out.append(sklearn_op_to_relay(mod, inexpr, dshape, dtype, cols)) |
| 127 | + |
| 128 | + return _op.concatenate(out, axis=1) |
| 129 | + |
| 130 | +_convert_map = { |
| 131 | + 'ColumnTransformer':_ColumnTransformer, |
| 132 | + 'SimpleImputer': _SimpleImputer, |
| 133 | + 'RobustImputer': _RobustImputer, |
| 134 | + 'RobustStandardScaler': _RobustStandardScaler, |
| 135 | + 'ThresholdOneHotEncoder': _ThresholdOneHotEncoder |
| 136 | +} |
| 137 | + |
| 138 | +def sklearn_op_to_relay(op, inexpr, dshape, dtype, columns=None): |
| 139 | + classname = type(op).__name__ |
| 140 | + return _convert_map[classname](op, inexpr, dshape, dtype, columns) |
| 141 | + |
| 142 | +def from_sklearn(model, |
| 143 | + shape=None, |
| 144 | + dtype="float32", |
| 145 | + columns=None): |
| 146 | + |
| 147 | + try: |
| 148 | + import sklearn |
| 149 | + except ImportError as e: |
| 150 | + raise ImportError( |
| 151 | + "Unable to import scikit-learn which is required {}".format(e)) |
| 152 | + |
| 153 | + inexpr = _expr.var('input', shape=shape, dtype=dtype) |
| 154 | + outexpr = sklearn_op_to_relay(model, inexpr, shape, dtype, columns) |
| 155 | + |
| 156 | + func = _function.Function(analysis.free_vars(outexpr), outexpr) |
| 157 | + return IRModule.from_expr(func), [] |
| 158 | + |
| 159 | +def from_auto_ml(model, |
| 160 | + shape=None, |
| 161 | + dtype="float32"): |
| 162 | + |
| 163 | + try: |
| 164 | + import sklearn |
| 165 | + except ImportError as e: |
| 166 | + raise ImportError( |
| 167 | + "Unable to import scikit-learn which is required {}".format(e)) |
| 168 | + |
| 169 | + outexpr = _expr.var('input', shape=shape, dtype=dtype) |
| 170 | + for _, transformer in model.feature_transformer.steps: |
| 171 | + outexpr = sklearn_op_to_relay(transformer, outexpr, shape, dtype, None) |
| 172 | + |
| 173 | + func = _function.Function(analysis.free_vars(outexpr), outexpr) |
| 174 | + return IRModule.from_expr(func), [] |
0 commit comments