diff --git a/examples/few_shot/pet/export_model.py b/examples/few_shot/pet/export_model.py new file mode 100644 index 000000000000..a4e84b0e18b4 --- /dev/null +++ b/examples/few_shot/pet/export_model.py @@ -0,0 +1,55 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import paddle +from model import ErnieForPretraining + +# yapf: disable +parser = argparse.ArgumentParser() +parser.add_argument("--params_path", type=str, required=True, default='./checkpoint/model_160/model_state.pdparams', + help="The path to model parameters to be loaded.") +parser.add_argument("--output_path", type=str, default='./output', + help="The path of model parameter in static graph to be saved.") +args = parser.parse_args() +# yapf: enable + +if __name__ == "__main__": + model = ErnieForPretraining.from_pretrained('ernie-1.0') + if args.params_path and os.path.isfile(args.params_path): + state_dict = paddle.load(args.params_path) + model.set_dict(state_dict) + print("Loaded parameters from %s" % args.params_path) + model.eval() + + # Convert to static graph with specific input description + model = paddle.jit.to_static( + model, + input_spec=[ + paddle.static.InputSpec(shape=[None, None], + dtype="int64", + name='input_ids'), # input_ids + paddle.static.InputSpec(shape=[None, None], + dtype="int64", + name='token_type_ids'), # segment_ids + None, # position_ids + None, # attention_mask + paddle.static.InputSpec( + shape=[None], dtype="int64", + name='masked_positions'), # masked_positions + ]) + # Save in static graph model. + save_path = os.path.join(args.output_path, "inference") + paddle.jit.save(model, save_path)