Skip to content

Commit

Permalink
Re-Enable ATen in C2 in integration builds to test ONNX ATen conversions
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: pytorch#10060

Differential Revision: D9081387

Pulled By: bddppq

fbshipit-source-id: 13cbff63df5241e013d4ebacfcd6da082e7196f6
  • Loading branch information
bddppq authored and facebook-github-bot committed Jul 31, 2018
1 parent e04f8bb commit ba5d33b
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .jenkins/caffe2/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ CMAKE_ARGS+=("-DUSE_OBSERVERS=ON")
CMAKE_ARGS+=("-DUSE_ZSTD=ON")
CMAKE_ARGS+=("-DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX}")
if [[ $BUILD_ENVIRONMENT == *-aten-* ]]; then
if [[ $BUILD_ENVIRONMENT == *-aten-* || -n "$INTEGRATED" ]]; then
if [[ CMAKE_ARGS != *USE_ATEN* ]] && [[ CMAKE_ARGS != *BUILD_ATEN* ]]; then
CMAKE_ARGS+=("-DBUILD_ATEN=ON")
fi
Expand Down
3 changes: 2 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ if(NOT MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=deprecated-declarations")
# These flags are not available in GCC-4.8.5. Set only when using clang.
# Compared against https://gcc.gnu.org/onlinedocs/gcc-4.8.5/gcc/Option-Summary.html
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-invalid-partial-specialization")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-typedef-redefinition")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unknown-warning-option")
Expand All @@ -226,6 +226,7 @@ if(NOT MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-c++14-extensions")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-constexpr-not-const")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-missing-braces")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Qunused-arguments")
endif()
if ((APPLE AND (NOT ("${CLANG_VERSION_STRING}" VERSION_LESS "9.0")))
OR (CMAKE_COMPILER_IS_GNUCXX
Expand Down
9 changes: 9 additions & 0 deletions cmake/MiscCheck.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,15 @@ if (${COMPILER_SUPPORTS_HIDDEN_INLINE_VISIBILITY})
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${CAFFE2_VISIBILITY_FLAG}")
endif()

# ---[ Checks if linker supports -rdynamic. `-rdynamic` tells linker
# -to add all (including unused) symbols into the dynamic symbol
# -table. We need this to get symbols when generating backtrace at
# -runtime.
check_cxx_compiler_flag("-rdynamic" COMPILER_SUPPORTS_RDYNAMIC)
if (${COMPILER_SUPPORTS_RDYNAMIC})
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -rdynamic")
endif()

# ---[ If we are using msvc, set no warning flags
# Note(jiayq): if you are going to add a warning flag, check if this is
# totally necessary, and only add when you see fit. If it is needed due to
Expand Down
1 change: 1 addition & 0 deletions setup_caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ def run(self):
# configure
cmake_args = [
find_executable('cmake'),
'-DUSE_ATEN=ON',
'-DBUILD_SHARED_LIBS=OFF',
'-DPYTHON_EXECUTABLE:FILEPATH={}'.format(sys.executable),
'-DPYTHON_INCLUDE_DIR={}'.format(sysconfig.get_python_inc()),
Expand Down
12 changes: 12 additions & 0 deletions test/onnx/test_pytorch_onnx_caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -676,6 +676,18 @@ def forward(self, x):
x = Variable(torch.randn(*shape))
self.run_model_test(MyModel(), train=False, input=(x), batch_size=BATCH_SIZE, use_gpu=False)

def test_cumsum(self):
shape = (3, 4, 5)
for params in [{'dim': i} for i in range(len(shape))]:
class MyModel(torch.nn.Module):
def __init__(self):
super(MyModel, self).__init__()

def forward(self, x):
return torch.cumsum(x, **params)
x = Variable(torch.randn(*shape))
self.run_model_test(MyModel(), train=False, input=(x), batch_size=BATCH_SIZE, use_gpu=False)

def test_repeat(self):
class MyModel(torch.nn.Module):
def __init__(self):
Expand Down

0 comments on commit ba5d33b

Please sign in to comment.