Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions paddle/memory/.clang-format

This file was deleted.

1 change: 1 addition & 0 deletions paddle/memory/.clang-format
5 changes: 0 additions & 5 deletions paddle/operators/.clang-format

This file was deleted.

1 change: 1 addition & 0 deletions paddle/operators/.clang-format
1 change: 1 addition & 0 deletions paddle/pybind/.clang-format
59 changes: 25 additions & 34 deletions paddle/pybind/protobuf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ struct variant_caster<V<Ts...>> {

template <typename T>
typename std::enable_if<
!std::is_same<T, boost::detail::variant::void_>::value,
bool>::type
!std::is_same<T, boost::detail::variant::void_>::value, bool>::type
try_load(handle src, bool convert) {
auto caster = make_caster<T>();
if (!load_success_ && caster.load(src, convert)) {
Expand All @@ -71,8 +70,7 @@ struct variant_caster<V<Ts...>> {
return load_success_;
}

static handle cast(Type const &src,
return_value_policy policy,
static handle cast(Type const &src, return_value_policy policy,
handle parent) {
variant_caster_visitor visitor(policy, parent);
return boost::apply_visitor(visitor, src);
Expand Down Expand Up @@ -101,8 +99,8 @@ inline std::vector<T> RepeatedToVector(
const google::protobuf::RepeatedField<T> &repeated_field) {
std::vector<T> ret;
ret.reserve(repeated_field.size());
std::copy(
repeated_field.begin(), repeated_field.end(), std::back_inserter(ret));
std::copy(repeated_field.begin(), repeated_field.end(),
std::back_inserter(ret));
return ret;
}

Expand Down Expand Up @@ -134,7 +132,7 @@ class VarDescBind;
// read/write speed. Only when we want the protobuf message, the local changes
// will be synchronized (by `Sync` method).
class VarDescBind {
public:
public:
explicit VarDescBind(const std::string &name) { desc_.set_name(name); }

VarDesc *Proto() { return &desc_; }
Expand All @@ -157,12 +155,12 @@ class VarDescBind {
return desc_.lod_tensor().data_type();
}

private:
private:
VarDesc desc_;
};

class OpDescBind {
public:
public:
OpDesc *Proto() {
Sync();
return &op_desc_;
Expand All @@ -174,8 +172,8 @@ class OpDescBind {

const std::vector<std::string> &Input(const std::string &name) const {
auto it = inputs_.find(name);
PADDLE_ENFORCE(
it != inputs_.end(), "Input %s cannot be found in Op %s", name, Type());
PADDLE_ENFORCE(it != inputs_.end(), "Input %s cannot be found in Op %s",
name, Type());
return it->second;
}

Expand All @@ -196,10 +194,8 @@ class OpDescBind {

const std::vector<std::string> &Output(const std::string &name) const {
auto it = outputs_.find(name);
PADDLE_ENFORCE(it != outputs_.end(),
"Output %s cannot be found in Op %s",
name,
Type());
PADDLE_ENFORCE(it != outputs_.end(), "Output %s cannot be found in Op %s",
name, Type());
return it->second;
}

Expand Down Expand Up @@ -258,7 +254,7 @@ class OpDescBind {
return boost::get<BlockDesc *>(it->second)->idx();
}

private:
private:
struct SetAttrDescVisitor : public boost::static_visitor<void> {
explicit SetAttrDescVisitor(OpDesc::Attr *attr) : attr_(attr) {}
mutable OpDesc::Attr *attr_;
Expand Down Expand Up @@ -325,7 +321,7 @@ class OpDescBind {
};

class BlockDescBind {
public:
public:
BlockDescBind(ProgramDescBind *prog, BlockDesc *desc)
: prog_(prog), desc_(desc), need_update_(false) {}

Expand All @@ -349,8 +345,8 @@ class BlockDescBind {
VarDescBind *Var(py::bytes name_bytes) const {
std::string name = name_bytes;
auto it = vars_.find(name);
PADDLE_ENFORCE(
it != vars_.end(), "Can not find variable %s in current block.", name);
PADDLE_ENFORCE(it != vars_.end(),
"Can not find variable %s in current block.", name);
return it->second.get();
}

Expand Down Expand Up @@ -398,7 +394,7 @@ class BlockDescBind {

BlockDesc *RawPtr() { return desc_; }

private:
private:
ProgramDescBind *prog_; // not_own
BlockDesc *desc_; // not_own
bool need_update_;
Expand All @@ -412,7 +408,7 @@ using ProgDescMap =
static ProgDescMap *g_bind_map = nullptr;

class ProgramDescBind {
public:
public:
static ProgramDescBind &Instance(ProgramDesc *prog) {
if (g_bind_map == nullptr) {
g_bind_map = new ProgDescMap();
Expand Down Expand Up @@ -449,7 +445,7 @@ class ProgramDescBind {
return prog_;
}

private:
private:
explicit ProgramDescBind(ProgramDesc *prog) : prog_(prog) {
for (auto &block : *prog->mutable_blocks()) {
blocks_.emplace_back(new BlockDescBind(this, &block));
Expand Down Expand Up @@ -492,8 +488,7 @@ void BindProgramDesc(py::module &m) {
return &ProgramDescBind::Instance(prog_desc);
},
py::return_value_policy::reference)
.def("append_block",
&ProgramDescBind::AppendBlock,
.def("append_block", &ProgramDescBind::AppendBlock,
py::return_value_policy::reference)
.def("block", &ProgramDescBind::Block, py::return_value_policy::reference)
.def("__str__", &ProgramDescBind::DebugString)
Expand All @@ -504,20 +499,16 @@ void BindBlockDesc(py::module &m) {
py::class_<BlockDescBind>(m, "BlockDesc", "")
.def_property_readonly("id", &BlockDescBind::ID)
.def_property_readonly("parent", &BlockDescBind::Parent)
.def("append_op",
&BlockDescBind::AppendOp,
.def("append_op", &BlockDescBind::AppendOp,
py::return_value_policy::reference)
.def("prepend_op",
&BlockDescBind::PrependOp,
.def("prepend_op", &BlockDescBind::PrependOp,
py::return_value_policy::reference)
.def("new_var", &BlockDescBind::NewVar,
py::return_value_policy::reference)
.def(
"new_var", &BlockDescBind::NewVar, py::return_value_policy::reference)
.def("var", &BlockDescBind::Var, py::return_value_policy::reference)
.def("all_vars",
&BlockDescBind::AllVars,
.def("all_vars", &BlockDescBind::AllVars,
py::return_value_policy::reference)
.def("all_ops",
&BlockDescBind::AllOps,
.def("all_ops", &BlockDescBind::AllOps,
py::return_value_policy::reference);
}

Expand Down
19 changes: 8 additions & 11 deletions paddle/pybind/pybind.cc
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,7 @@ All parameter, weight, gradient are variables in Paddle.
py::return_value_policy::reference)
.def("find_var", &Scope::FindVar, py::return_value_policy::reference)
.def(py::init<>())
.def("new_scope",
[](Scope &self) -> Scope * { return &self.NewScope(); },
.def("new_scope", [](Scope &self) -> Scope * { return &self.NewScope(); },
py::return_value_policy::reference)
.def("drop_kids", &Scope::DropKids);

Expand Down Expand Up @@ -230,8 +229,7 @@ All parameter, weight, gradient are variables in Paddle.
})
.def("infer_shape", &OperatorBase::InferShape)
.def("run",
[](OperatorBase &self,
const Scope &scope,
[](OperatorBase &self, const Scope &scope,
const platform::DeviceContext &dev_ctx) {
self.Run(scope, dev_ctx);
dev_ctx.Wait();
Expand Down Expand Up @@ -259,10 +257,8 @@ All parameter, weight, gradient are variables in Paddle.
retv->SetType("plain_net");
return retv;
})
.def("append_op",
[](operators::NetOp &self, const OperatorBase &op) {
self.AppendOp(op);
})
.def("append_op", [](operators::NetOp &self,
const OperatorBase &op) { self.AppendOp(op); })
.def("complete_add_op", &operators::NetOp::CompleteAddOp)
.def("complete_add_op", [](std::shared_ptr<operators::NetOp> &self) {
self->CompleteAddOp();
Expand All @@ -282,9 +278,10 @@ All parameter, weight, gradient are variables in Paddle.
auto rnn_op = OpRegistry::CreateOp(desc);
return static_cast<operators::RecurrentOp *>(rnn_op.release());
})
.def("set_stepnet",
[](operators::RecurrentOp &self, const operators::NetOp &net)
-> void { self.set_stepnet(net.Clone()); });
.def("set_stepnet", [](operators::RecurrentOp &self,
const operators::NetOp &net) -> void {
self.set_stepnet(net.Clone());
});

// cond_op
py::class_<operators::CondOp, OperatorBase>(m, "CondOp")
Expand Down
11 changes: 4 additions & 7 deletions paddle/pybind/tensor_py.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,8 @@ struct CastToPyBufferImpl<true, I, ARGS...> {
}
return py::buffer_info(
dst_tensor.mutable_data<CUR_TYPE>(dst_tensor.holder_->place()),
sizeof(CUR_TYPE),
py::format_descriptor<CUR_TYPE>::format(),
(size_t)framework::arity(dst_tensor.dims()),
dims_outside,
strides);
sizeof(CUR_TYPE), py::format_descriptor<CUR_TYPE>::format(),
(size_t)framework::arity(dst_tensor.dims()), dims_outside, strides);
} else {
constexpr bool less = I + 1 < std::tuple_size<std::tuple<ARGS...>>::value;
return CastToPyBufferImpl<less, I + 1, ARGS...>()(tensor);
Expand Down Expand Up @@ -110,8 +107,8 @@ void PyCUDATensorSetFromArray(

self.Resize(framework::make_ddim(dims));
auto *dst = self.mutable_data<T>(place);
paddle::platform::GpuMemcpySync(
dst, array.data(), sizeof(T) * array.size(), cudaMemcpyHostToDevice);
paddle::platform::GpuMemcpySync(dst, array.data(), sizeof(T) * array.size(),
cudaMemcpyHostToDevice);
}
#endif

Expand Down
1 change: 1 addition & 0 deletions paddle/string/.clang-format
4 changes: 2 additions & 2 deletions paddle/string/piece.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ namespace string {
// its syntax is simple as it doesn't own/manage the string, it is
// cheap to construct Pieces and pass them around.
class Piece {
public:
public:
static const size_t npos = static_cast<size_t>(-1);

// We provide non-explicit singleton constructors so users can
Expand All @@ -57,7 +57,7 @@ class Piece {
// Return a string that contains the copy of the referenced data.
std::string ToString() const { return std::string(data_, size_); }

private:
private:
const char* data_;
size_t size_;

Expand Down
4 changes: 2 additions & 2 deletions paddle/string/printf_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ TEST(StringPrintf, StringPrintf) {
long hour = 14;
int min = 44;
EXPECT_EQ(std::string("Wednesday, July 27, 14:44"),
paddle::string::Sprintf(
"%s, %s %d, %.2d:%.2d", weekday, month, day, hour, min));
paddle::string::Sprintf("%s, %s %d, %.2d:%.2d", weekday, month, day,
hour, min));
}
Loading