Skip to content

Commit

Permalink
Make ATen core HIP compatible (pytorch#13343)
Browse files Browse the repository at this point in the history
Summary:
So caffe2 can include aten core files without hipifying aten

cc xw285cornell
Pull Request resolved: pytorch#13343

Reviewed By: xw285cornell

Differential Revision: D12853162

Pulled By: bddppq

fbshipit-source-id: f9402691292180dde110a58ea3b1cedc62aab0ba
  • Loading branch information
bddppq authored and facebook-github-bot committed Nov 1, 2018
1 parent 10a6a3e commit 86e1009
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 7 deletions.
4 changes: 2 additions & 2 deletions aten/src/ATen/core/Half-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
#include <cuda_fp16.h>
#endif

#if defined(__HIP_DEVICE_COMPILE__)
#ifdef __HIPCC__
#include <hip/hip_fp16.h>
#endif

Expand All @@ -34,7 +34,7 @@ inline AT_HOST_DEVICE Half::operator float() const {
#endif
}

#ifdef __CUDACC__
#if defined(__CUDACC__) || defined(__HIPCC__)
inline AT_HOST_DEVICE Half::Half(const __half& value) {
x = *reinterpret_cast<const unsigned short*>(&value);
}
Expand Down
4 changes: 2 additions & 2 deletions aten/src/ATen/core/Half.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
#include <cuda_fp16.h>
#endif

#if defined(__HIP_DEVICE_COMPILE__)
#ifdef __HIPCC__
#include <hip/hip_fp16.h>
#endif

Expand Down Expand Up @@ -56,7 +56,7 @@ struct alignas(2) Half {
inline AT_HOST_DEVICE Half(float value);
inline AT_HOST_DEVICE operator float() const;

#ifdef __CUDACC__
#if defined(__CUDACC__) || defined(__HIPCC__)
inline AT_HOST_DEVICE Half(const __half& value);
inline AT_HOST_DEVICE operator __half() const;
#endif
Expand Down
2 changes: 1 addition & 1 deletion aten/src/ATen/core/Macros.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

#include "c10/macros/Macros.h"

#ifdef __CUDACC__
#if defined(__CUDACC__) || defined(__HIPCC__)
// Designates functions callable from the host (CPU) and the device (GPU)
#define AT_HOST_DEVICE __host__ __device__
#define AT_DEVICE __device__
Expand Down
2 changes: 1 addition & 1 deletion aten/src/ATen/core/TensorAccessor.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ struct DefaultPtrTraits {
typedef T* PtrType;
};

#ifdef __CUDACC__
#if defined(__CUDACC__) || defined(__HIPCC__)
template <typename T>
struct RestrictPtrTraits {
typedef T* __restrict__ PtrType;
Expand Down
7 changes: 6 additions & 1 deletion tools/amd_build/build_pytorch_amd.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,11 @@

includes = [
"aten/*",
"torch/*"
"torch/*",
]

ignores = [
"aten/src/ATen/core/*",
]

# List of operators currently disabled
Expand Down Expand Up @@ -66,6 +70,7 @@
project_directory=proj_dir,
output_directory=proj_dir,
includes=includes,
ignores=ignores,
yaml_settings=yaml_file,
add_static_casts_option=True,
show_progress=False)

0 comments on commit 86e1009

Please sign in to comment.