Skip to content
This repository was archived by the owner on Nov 22, 2022. It is now read-only.

Back out "[PyText]Export PyText model to ONNX in ZIP then load to caffe2" #838

Closed
wants to merge 1 commit into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions pytext/utils/onnx.py
Original file line number Diff line number Diff line change
@@ -3,10 +3,10 @@

import caffe2.python.predictor.predictor_exporter as pe
import numpy as np
import onnx
import torch
from caffe2.python import core, workspace
from caffe2.python.onnx import backend as caffe2_backend
from torch.onnx import ExportTypes


CAFFE2_DB_TYPE = "minidb"
@@ -40,17 +40,18 @@ def pytorch_to_caffe2(
export_onnx_path = export_path
model.eval()
with torch.no_grad():
torch.onnx._export(
torch.onnx.export(
model,
export_input,
export_onnx_path,
input_names=all_input_names,
output_names=output_names,
export_params=True,
export_type=ExportTypes.ZIP_ARCHIVE,
)
# Convert the ONNX model(zip archive file) to a caffe2 net
c2_prepared = caffe2_backend.prepare_zip_archive(export_onnx_path)
onnx_model = onnx.load(export_onnx_path)
onnx.checker.check_model(onnx_model)
# Convert the ONNX model to a caffe2 net
c2_prepared = caffe2_backend.prepare(onnx_model)
return c2_prepared