Skip to content

Commit

Permalink
delete SupportNPU(), SupportMLU() (#52911)
Browse files Browse the repository at this point in the history
* delete SupportNPU(), SupportMLU()

* delete npu branch
  • Loading branch information
jjyaoao authored Apr 14, 2023
1 parent 468869e commit 8601859
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 56 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -385,21 +385,6 @@ void ApplyDeviceGuard(const OperatorBase* op_base,
}
VLOG(3) << "Switch into " << expected_kernel_key->place_
<< " by device_guard.";
} else if (op_device.find("npu") != std::string::npos &&
platform::is_npu_place(place)) {
// when the Op that does not have NPUKernel is assigned to NPU, the
// CPUKernel will be executed and a warning will be given at the same
// time.
if (op_base->SupportNPU()) {
expected_kernel_key->place_ = place;
} else {
expected_kernel_key->place_ = platform::CPUPlace();
LOG_FIRST_N(WARNING, 1)
<< "Op(" << op_base->Type()
<< ") has no NPU implementation. It will be assigned to CPUPlace.";
}
VLOG(3) << "Switch into " << expected_kernel_key->place_
<< " by device_guard.";
} else if (op_device.find("xpu") != std::string::npos &&
platform::is_xpu_place(place)) {
// when the Op that does not have XPUKernel is assigned to XPU, the
Expand Down
27 changes: 0 additions & 27 deletions paddle/fluid/framework/operator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1326,33 +1326,6 @@ bool OperatorWithKernel::SupportGPU() const {
}
}

bool OperatorWithKernel::SupportNPU() const {
auto phi_kernels = phi::KernelFactory::Instance().SelectKernelMap(
phi::TransToPhiKernelName(type_));
auto has_phi_kernel =
std::any_of(phi_kernels.begin(),
phi_kernels.end(),
[](phi::KernelKeyMap::const_reference kern_pair) {
return kern_pair.first.backend() == phi::Backend::NPU;
});
if (has_phi_kernel) {
return true;
} else {
auto kernel_iter = OperatorWithKernel::AllOpKernels().find(type_);
if (kernel_iter == OperatorWithKernel::AllOpKernels().end()) {
return false;
} else {
auto& op_kernels = kernel_iter->second;
return std::any_of(
op_kernels.begin(),
op_kernels.end(),
[](OpKernelMap::const_reference kern_pair) {
return platform::is_npu_place(kern_pair.first.place_);
});
}
}
}

bool OperatorWithKernel::SupportXPU() const {
#ifdef PADDLE_WITH_XPU
auto phi_kernels = phi::KernelFactory::Instance().SelectKernelMap(
Expand Down
14 changes: 0 additions & 14 deletions paddle/fluid/framework/operator.h
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,6 @@ class OperatorBase {
std::string DebugString() const { return DebugStringEx(nullptr); }

virtual bool SupportGPU() const { return false; }
virtual bool SupportNPU() const { return false; }
virtual bool SupportMLU() const { return false; }
virtual bool SupportXPU() const { return false; }

const std::string& Type() const { return type_; }
Expand Down Expand Up @@ -746,18 +744,6 @@ class OperatorWithKernel : public OperatorBase {

bool SupportGPU() const override;

bool SupportNPU() const override;

bool SupportMLU() const override {
// TODO(zhiqiu): support phi if needed?
auto& op_kernels = OperatorWithKernel::AllOpKernels().at(type_);
return std::any_of(op_kernels.begin(),
op_kernels.end(),
[](OpKernelMap::const_reference kern_pair) {
return platform::is_mlu_place(kern_pair.first.place_);
});
}

bool SupportXPU() const override;

bool SupportsMKLDNN(phi::DataType data_type) const;
Expand Down

0 comments on commit 8601859

Please sign in to comment.