Skip to content

Commit

Permalink
Cleanup std::move (pytorch#91987)
Browse files Browse the repository at this point in the history
fix use after move and remove unnecessary lint suppression
Pull Request resolved: pytorch#91987
Approved by: https://github.com/Skylion007
  • Loading branch information
cyyever authored and pytorchmergebot committed Jan 14, 2023
1 parent 1490dc6 commit a0626c3
Show file tree
Hide file tree
Showing 16 changed files with 27 additions and 72 deletions.
2 changes: 1 addition & 1 deletion aten/src/ATen/core/List_inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ ListElementReference<T, Iterator>& ListElementReference<T, Iterator>::operator=(

template<class T, class Iterator>
ListElementReference<T, Iterator>& ListElementReference<T, Iterator>::operator=(const T& new_value) && {
*iterator_ = c10::detail::ListElementFrom<T>::from(std::move(new_value));
*iterator_ = c10::detail::ListElementFrom<T>::from(new_value);
return *this;
}

Expand Down
15 changes: 4 additions & 11 deletions aten/src/ATen/native/RNN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -196,10 +196,10 @@ struct QuantizedCellParams : public CellParamsBase {
Tensor _packed_hh,
Tensor _col_offsets_ih,
Tensor _col_offsets_hh,
const Scalar& _scale_ih,
const Scalar& _scale_hh,
const Scalar& _zero_point_ih,
const Scalar& _zero_point_hh)
Scalar _scale_ih,
Scalar _scale_hh,
Scalar _zero_point_ih,
Scalar _zero_point_hh)
: w_ih(std::move(_w_ih)),
w_hh(std::move(_w_hh)),
b_ih_(std::move(_b_ih)),
Expand All @@ -208,13 +208,9 @@ struct QuantizedCellParams : public CellParamsBase {
packed_hh(std::move(_packed_hh)),
col_offsets_ih(std::move(_col_offsets_ih)),
col_offsets_hh(std::move(_col_offsets_hh)),
// NOLINTNEXTLINE(performance-move-const-arg)
scale_ih(std::move(_scale_ih)),
// NOLINTNEXTLINE(performance-move-const-arg)
scale_hh(std::move(_scale_hh)),
// NOLINTNEXTLINE(performance-move-const-arg)
zero_point_ih(std::move(_zero_point_ih)),
// NOLINTNEXTLINE(performance-move-const-arg)
zero_point_hh(std::move(_zero_point_hh)) {}

const Tensor w_ih;
Expand Down Expand Up @@ -259,11 +255,8 @@ struct QuantizedCellParams : public CellParamsBase {
zero_point_hh.toLong()};
return CellParamsSerializationType(
"quantized",
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(tensors_to_serialize),
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(doubles_to_serialize),
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(longs_to_serialize),
{});
}
Expand Down
3 changes: 1 addition & 2 deletions c10/core/TensorImpl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,7 @@ TensorImpl::TensorImpl(
DispatchKeySet key_set,
const caffe2::TypeMeta data_type,
c10::optional<c10::Device> device_opt)
// NOLINTNEXTLINE(performance-move-const-arg)
: TensorImpl({}, key_set, data_type, std::move(device_opt)) {}
: TensorImpl({}, key_set, data_type, device_opt) {}

// NOLINTNEXTLINE(cppcoreguidelines-pro-type-member-init)
TensorImpl::TensorImpl(
Expand Down
14 changes: 3 additions & 11 deletions torch/csrc/api/include/torch/data/dataloader.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,7 @@ torch::disable_if_t<
std::unique_ptr<StatelessDataLoader<Dataset, Sampler>>>
make_data_loader(Dataset dataset, Sampler sampler, DataLoaderOptions options) {
return torch::make_unique<StatelessDataLoader<Dataset, Sampler>>(
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(dataset),
std::move(sampler),
std::move(options));
std::move(dataset), std::move(sampler), std::move(options));
}

/// Creates a `DataLoader` instance for a stateless `dataset` and some
Expand All @@ -46,10 +43,7 @@ make_data_loader(
"Expected the dataset to be sized in "
"order to construct the Sampler");
return make_data_loader(
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(dataset),
Sampler(*size),
std::move(options));
std::move(dataset), Sampler(*size), std::move(options));
}

/// Creates a `DataLoader` for a stateful `dataset` and some `options`.
Expand All @@ -58,9 +52,7 @@ std::unique_ptr<StatefulDataLoader<Dataset>> make_data_loader(
Dataset dataset,
DataLoaderOptions options = DataLoaderOptions()) {
return torch::make_unique<StatefulDataLoader<Dataset>>(
// NOLINTNEXTLINE(performance-move-const-arg)
std::move(dataset),
std::move(options));
std::move(dataset), std::move(options));
}
} // namespace data
} // namespace torch
1 change: 0 additions & 1 deletion torch/csrc/api/include/torch/data/datasets/chunk.h
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,6 @@ class ChunkDataset final
: chunk_reader_(std::move(chunk_reader)),
chunk_sampler_(std::move(chunk_sampler)),
example_sampler_(std::move(example_sampler)),
// NOLINTNEXTLINE(performance-move-const-arg)
options_(std::move(options)),
preprocessing_policy_(preprocessing_policy),
quit_worker_(false),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,6 @@ class Sequential : public torch::nn::ModuleHolder<SequentialImpl> {
/// Constructs the `Sequential` from a braced-init-list of named `AnyModule`s.
/// It enables the following use case:
/// `Sequential sequential({{"m1", M(1)}, {"m2", M(2)}})`
// NOLINTNEXTLINE(performance-move-const-arg)
Sequential(std::initializer_list<NamedAnyModule> named_modules)
: ModuleHolder(
std::make_shared<SequentialImpl>(std::move(named_modules))) {}
Expand Down
2 changes: 0 additions & 2 deletions torch/csrc/api/include/torch/nn/options/fold.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@ namespace nn {
/// ```
struct TORCH_API FoldOptions {
FoldOptions(ExpandingArray<2> output_size, ExpandingArray<2> kernel_size)
// NOLINTNEXTLINE(performance-move-const-arg)
: output_size_(std::move(output_size)),
// NOLINTNEXTLINE(performance-move-const-arg)
kernel_size_(std::move(kernel_size)) {}

/// describes the spatial shape of the large containing tensor of the sliding
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/adagrad.h
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,8 @@ class TORCH_API Adagrad : public Optimizer {
}
}

explicit Adagrad(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
AdagradOptions defaults = {})
: Adagrad({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit Adagrad(std::vector<Tensor> params, AdagradOptions defaults = {})
: Adagrad({OptimizerParamGroup(std::move(params))}, defaults) {}

torch::Tensor step(LossClosure closure = nullptr) override;
void save(serialize::OutputArchive& archive) const override;
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/adam.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,8 @@ class TORCH_API Adam : public Optimizer {
"Invalid weight_decay value: ",
defaults.weight_decay());
}
explicit Adam(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
AdamOptions defaults = {})
: Adam({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit Adam(std::vector<Tensor> params, AdamOptions defaults = {})
: Adam({OptimizerParamGroup(std::move(params))}, defaults) {}

torch::Tensor step(LossClosure closure = nullptr) override;
void save(serialize::OutputArchive& archive) const override;
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/adamw.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,8 @@ class TORCH_API AdamW : public Optimizer {
"Invalid weight_decay value: ",
defaults.weight_decay());
}
explicit AdamW(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
AdamWOptions defaults = {})
: AdamW({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit AdamW(std::vector<Tensor> params, AdamWOptions defaults = {})
: AdamW({OptimizerParamGroup(std::move(params))}, defaults) {}

torch::Tensor step(LossClosure closure = nullptr) override;
void save(serialize::OutputArchive& archive) const override;
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/lbfgs.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,8 @@ class TORCH_API LBFGS : public Optimizer {
}
_numel_cache = c10::nullopt;
}
explicit LBFGS(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
LBFGSOptions defaults = {})
: LBFGS({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit LBFGS(std::vector<Tensor> params, LBFGSOptions defaults = {})
: LBFGS({OptimizerParamGroup(std::move(params))}, defaults) {}

Tensor step(LossClosure closure) override;
void save(serialize::OutputArchive& archive) const override;
Expand Down
3 changes: 1 addition & 2 deletions torch/csrc/api/include/torch/optim/optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,12 +114,11 @@ class TORCH_API Optimizer {
}

/// Constructs the `Optimizer` from a vector of parameters.
// NOLINTNEXTLINE(performance-move-const-arg)
explicit Optimizer(
std::vector<Tensor> parameters,
std::unique_ptr<OptimizerOptions> defaults)
: Optimizer(
{std::move(OptimizerParamGroup(parameters))},
{OptimizerParamGroup(std::move(parameters))},
std::move(defaults)){};

/// Adds the given param_group to the optimizer's param_group list.
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/rmsprop.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,8 @@ class TORCH_API RMSprop : public Optimizer {
defaults.alpha() >= 0, "Invalid alpha value: ", defaults.alpha());
}

explicit RMSprop(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
RMSpropOptions defaults = {})
: RMSprop({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit RMSprop(std::vector<Tensor> params, RMSpropOptions defaults = {})
: RMSprop({OptimizerParamGroup(std::move(params))}, defaults) {}

torch::Tensor step(LossClosure closure = nullptr) override;
void save(serialize::OutputArchive& archive) const override;
Expand Down
7 changes: 2 additions & 5 deletions torch/csrc/api/include/torch/optim/sgd.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,8 @@ class TORCH_API SGD : public Optimizer {
"Nesterov momentum requires a momentum and zero dampening");
}

explicit SGD(
std::vector<Tensor> params,
// NOLINTNEXTLINE(performance-move-const-arg)
SGDOptions defaults)
: SGD({std::move(OptimizerParamGroup(params))}, defaults) {}
explicit SGD(std::vector<Tensor> params, SGDOptions defaults)
: SGD({OptimizerParamGroup(std::move(params))}, defaults) {}

torch::Tensor step(LossClosure closure = nullptr) override;

Expand Down
12 changes: 4 additions & 8 deletions torch/csrc/api/src/serialize/input-archive.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,14 +94,12 @@ void InputArchive::read(const std::string& key, InputArchive& archive) {
void InputArchive::load_from(
const std::string& filename,
c10::optional<torch::Device> device /*= c10::nullopt*/) {
// NOLINTNEXTLINE(performance-move-const-arg)
module_ = torch::jit::load(filename, std::move(device));
}

void InputArchive::load_from(
std::istream& stream,
c10::optional<torch::Device> device /*= c10::nullopt*/) {
// NOLINTNEXTLINE(performance-move-const-arg)
module_ = torch::jit::load(stream, std::move(device));
}

Expand Down Expand Up @@ -131,9 +129,8 @@ void InputArchive::load_from(
const char* data_;
size_t size_;
};
std::unique_ptr<OurAdapter> adapter(new OurAdapter(data, size));
// NOLINTNEXTLINE(performance-move-const-arg)
module_ = torch::jit::load(std::move(adapter), std::move(device));
module_ = torch::jit::load(
std::make_unique<OurAdapter>(data, size), std::move(device));
}

void InputArchive::load_from(
Expand All @@ -160,9 +157,8 @@ void InputArchive::load_from(
const std::function<size_t(uint64_t, void*, size_t)>& read_func_;
const std::function<size_t(void)>& size_func_;
};
std::unique_ptr<OurAdapter> adapter(new OurAdapter(read_func, size_func));
// NOLINTNEXTLINE(performance-move-const-arg)
module_ = torch::jit::load(std::move(adapter), std::move(device));
module_ = torch::jit::load(
std::make_unique<OurAdapter>(read_func, size_func), std::move(device));
}

std::vector<std::string> InputArchive::keys() {
Expand Down
4 changes: 1 addition & 3 deletions torch/csrc/autograd/VariableTypeUtils.h
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,8 @@ inline void rebase_history(
if (grad_fn) {
for (auto& var : vars) {
if (var.defined()) {
// TODO: eliminate const_cast
// NOLINTNEXTLINE(bugprone-use-after-move)
auto output_nr = grad_fn->add_input_metadata(var);
impl::rebase_history(var, {std::move(grad_fn), output_nr});
impl::rebase_history(var, {grad_fn, output_nr});
} else {
grad_fn->add_input_metadata(Node::undefined_input());
}
Expand Down

0 comments on commit a0626c3

Please sign in to comment.