Skip to content

Commit

Permalink
NULL -> nullptr (pytorch#11047)
Browse files Browse the repository at this point in the history
Summary:
How did we get so many uses of `NULL` again?

ezyang
Pull Request resolved: pytorch#11047

Differential Revision: D9566799

Pulled By: goldsborough

fbshipit-source-id: 83469f352ac69aa65bdaf1a1a21f922d892e0db3
  • Loading branch information
goldsborough authored and facebook-github-bot committed Aug 30, 2018
1 parent 302e9cb commit 7ddc6f8
Show file tree
Hide file tree
Showing 33 changed files with 332 additions and 332 deletions.
1 change: 1 addition & 0 deletions .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ Checks: '
,-performance-unnecessary-value-param
,-readability-braces-around-statements
,-readability-else-after-return
,-readability-implicit-bool-conversion
,-readability-named-parameter
'
WarningsAsErrors: ''
Expand Down
20 changes: 10 additions & 10 deletions torch/csrc/DataLoader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ static void HANDLER_NAME(int sig, siginfo_t *info, void *ctx) \
struct sigaction sa; \
sa.sa_handler = SIG_DFL; \
sa.sa_flags = 0; \
if (sigemptyset(&sa.sa_mask) != 0 || sigaction(SIGNAL, &sa, NULL) != 0) { \
if (sigemptyset(&sa.sa_mask) != 0 || sigaction(SIGNAL, &sa, nullptr) != 0) { \
_exit(EXIT_FAILURE); \
} else { \
raise(SIGNAL); \
Expand Down Expand Up @@ -80,7 +80,7 @@ static void handler_SIGTERM(int sig, siginfo_t *info, void *ctx)
struct sigaction sa;
sa.sa_handler = SIG_DFL;
sa.sa_flags = 0;
if (sigemptyset(&sa.sa_mask) != 0 || sigaction(SIGTERM, &sa, NULL) != 0) {
if (sigemptyset(&sa.sa_mask) != 0 || sigaction(SIGTERM, &sa, nullptr) != 0) {
_exit(EXIT_FAILURE);
} else {
raise(SIGTERM);
Expand All @@ -89,9 +89,9 @@ static void handler_SIGTERM(int sig, siginfo_t *info, void *ctx)

static PyObject *THPModule_setWorkerSignalHandlers(PyObject *module, PyObject *arg) {
HANDLE_TH_ERRORS
setSignalHandler(SIGBUS, &handler_SIGBUS, NULL);
setSignalHandler(SIGSEGV, &handler_SIGSEGV, NULL);
setSignalHandler(SIGTERM, &handler_SIGTERM, NULL);
setSignalHandler(SIGBUS, &handler_SIGBUS, nullptr);
setSignalHandler(SIGSEGV, &handler_SIGSEGV, nullptr);
setSignalHandler(SIGTERM, &handler_SIGTERM, nullptr);
Py_RETURN_NONE;
END_HANDLE_TH_ERRORS
}
Expand Down Expand Up @@ -212,9 +212,9 @@ static PyObject *THPModule_errorIfAnyWorkerFails(PyObject *module, PyObject *_ig
#endif

PyMethodDef DataLoaderMethods[] = {
{"_set_worker_signal_handlers", (PyCFunction)THPModule_setWorkerSignalHandlers, METH_NOARGS, NULL},
{"_update_worker_pids", (PyCFunction)THPModule_updateWorkerPIDs, METH_VARARGS, NULL},
{"_remove_worker_pids", (PyCFunction)THPModule_removeWorkerPIDs, METH_O, NULL},
{"_error_if_any_worker_fails", (PyCFunction)THPModule_errorIfAnyWorkerFails, METH_NOARGS, NULL},
{NULL, NULL, 0, NULL}
{"_set_worker_signal_handlers", (PyCFunction)THPModule_setWorkerSignalHandlers, METH_NOARGS, nullptr},
{"_update_worker_pids", (PyCFunction)THPModule_updateWorkerPIDs, METH_VARARGS, nullptr},
{"_remove_worker_pids", (PyCFunction)THPModule_removeWorkerPIDs, METH_O, nullptr},
{"_error_if_any_worker_fails", (PyCFunction)THPModule_errorIfAnyWorkerFails, METH_NOARGS, nullptr},
{nullptr, nullptr, 0, nullptr}
};
2 changes: 1 addition & 1 deletion torch/csrc/Device.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ static struct PyGetSetDef THPDevice_properties[] = {

static PyMethodDef THPDevice_methods[] = {
{"__reduce__", (PyCFunction)THPDevice_reduce, METH_NOARGS, nullptr},
{NULL} /* Sentinel */
{nullptr} /* Sentinel */
};

PyTypeObject THPDeviceType = {
Expand Down
2 changes: 1 addition & 1 deletion torch/csrc/Dtype.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ static struct PyGetSetDef THPDtype_properties[] = {

static PyMethodDef THPDtype_methods[] = {
{"__reduce__", (PyCFunction)THPDtype_reduce, METH_NOARGS, nullptr},
{NULL} /* Sentinel */
{nullptr} /* Sentinel */
};

PyObject *THPDtype_repr(THPDtype *self)
Expand Down
2 changes: 1 addition & 1 deletion torch/csrc/Exceptions.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ PyObject *THPException_FatalError;
#define ASSERT_TRUE(cond) if (!(cond)) return false
bool THPException_init(PyObject *module)
{
ASSERT_TRUE(THPException_FatalError = PyErr_NewException("torch.FatalError", NULL, NULL));
ASSERT_TRUE(THPException_FatalError = PyErr_NewException("torch.FatalError", nullptr, nullptr));
ASSERT_TRUE(PyModule_AddObject(module, "FatalError", THPException_FatalError) == 0);
return true;
}
Expand Down
2 changes: 1 addition & 1 deletion torch/csrc/Exceptions.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
return retval; \
}

#define END_HANDLE_TH_ERRORS END_HANDLE_TH_ERRORS_RET(NULL)
#define END_HANDLE_TH_ERRORS END_HANDLE_TH_ERRORS_RET(nullptr)

extern PyObject *THPException_FatalError;

Expand Down
28 changes: 14 additions & 14 deletions torch/csrc/Generator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,17 @@
using namespace at;
using namespace torch;

PyObject *THPGeneratorClass = NULL;
PyObject *THPGeneratorClass = nullptr;

PyObject * THPGenerator_New()
{
PyObject *args = PyTuple_New(0);
if (!args) {
PyErr_SetString(PyExc_RuntimeError, "Could not create a new generator object - "
"failed to allocate argument tuple");
return NULL;
return nullptr;
}
PyObject *result = PyObject_Call((PyObject*)THPGeneratorClass, args, NULL);
PyObject *result = PyObject_Call((PyObject*)THPGeneratorClass, args, nullptr);
Py_DECREF(args);
return result;
}
Expand Down Expand Up @@ -52,7 +52,7 @@ static PyObject * THPGenerator_pynew(PyTypeObject *type, PyObject *args, PyObjec
HANDLE_TH_ERRORS
if ((args && PyTuple_Size(args) != 0) || kwargs) {
THPUtils_setError("torch.Generator constructor doesn't accept any arguments");
return NULL;
return nullptr;
}
THPGeneratorPtr self((THPGenerator *)type->tp_alloc(type, 0));
// having to pick a specific type rather than just a backend here is strange,
Expand Down Expand Up @@ -120,21 +120,21 @@ static PyObject * THPGenerator_initialSeed(THPGenerator *self)
}

static PyMethodDef THPGenerator_methods[] = {
{"get_state", (PyCFunction)THPGenerator_getState, METH_NOARGS, NULL},
{"set_state", (PyCFunction)THPGenerator_setState, METH_O, NULL},
{"manual_seed", (PyCFunction)THPGenerator_manualSeed, METH_O, NULL},
{"seed", (PyCFunction)THPGenerator_seed, METH_NOARGS, NULL},
{"initial_seed", (PyCFunction)THPGenerator_initialSeed, METH_NOARGS, NULL},
{NULL}
{"get_state", (PyCFunction)THPGenerator_getState, METH_NOARGS, nullptr},
{"set_state", (PyCFunction)THPGenerator_setState, METH_O, nullptr},
{"manual_seed", (PyCFunction)THPGenerator_manualSeed, METH_O, nullptr},
{"seed", (PyCFunction)THPGenerator_seed, METH_NOARGS, nullptr},
{"initial_seed", (PyCFunction)THPGenerator_initialSeed, METH_NOARGS, nullptr},
{nullptr}
};

static struct PyMemberDef THPGenerator_members[] = {
{(char*)"_cdata", T_ULONGLONG, offsetof(THPGenerator, cdata), READONLY, NULL},
{NULL}
{(char*)"_cdata", T_ULONGLONG, offsetof(THPGenerator, cdata), READONLY, nullptr},
{nullptr}
};

PyTypeObject THPGeneratorType = {
PyVarObject_HEAD_INIT(NULL, 0)
PyVarObject_HEAD_INIT(nullptr, 0)
"torch._C.Generator", /* tp_name */
sizeof(THPGenerator), /* tp_basicsize */
0, /* tp_itemsize */
Expand All @@ -154,7 +154,7 @@ PyTypeObject THPGeneratorType = {
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
NULL, /* tp_doc */
nullptr, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
Expand Down
84 changes: 42 additions & 42 deletions torch/csrc/Module.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ namespace py = pybind11;

PyObject* module;

THPGenerator *THPDefaultGenerator = NULL;
THPGenerator *THPDefaultGenerator = nullptr;

////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
Expand All @@ -63,7 +63,7 @@ static PyObject * THPModule_initNames(PyObject *self, PyObject *arg)
static std::vector<std::string> names;

THPObjectPtr types(PySequence_Fast(arg, "expected a sequence"));
if (!types) return NULL;
if (!types) return nullptr;

int num_classes = PySequence_Fast_GET_SIZE(types.get());
names.reserve(names.size() + num_classes);
Expand All @@ -73,7 +73,7 @@ static PyObject * THPModule_initNames(PyObject *self, PyObject *arg)
PyTypeObject* type = (PyTypeObject*)obj;

THPObjectPtr module_name(PyObject_GetAttrString(obj, "__module__"));
if (!module_name) return NULL;
if (!module_name) return nullptr;
THPUtils_assert(THPUtils_checkString(module_name.get()),
"expected __module__ to be a string");
std::string name = THPUtils_unpackString(module_name.get());
Expand All @@ -89,7 +89,7 @@ static PyObject * THPModule_initExtension(PyObject *_unused, PyObject *shm_manag
HANDLE_TH_ERRORS
if (!THPUtils_checkString(shm_manager_path)) {
THPUtils_setError("initialization error - expected bytes/string object as shm_manager_path!");
return NULL;
return nullptr;
}
torch::utils::initializeLayouts();
torch::utils::initializeDtypes();
Expand Down Expand Up @@ -172,8 +172,8 @@ PyObject * THPModule_setDefaultDtype(PyObject *_unused, PyObject *dtype)

PyObject *THPModule_safeCall(PyObject *_unused, PyObject *args, PyObject *kwargs)
{
PyObject *result = NULL;
PyObject *args_slice = NULL;
PyObject *result = nullptr;
PyObject *args_slice = nullptr;
PyThreadState *thread_state = PyThreadState_Get();
Py_ssize_t num_args = args ? PyTuple_Size(args) : 0;
THPUtils_assert(num_args > 0, "expected at least one argument");
Expand All @@ -197,7 +197,7 @@ PyObject *THPModule_addDocStr(PyObject *_unused, PyObject *args)
PyObject *obj;
PyObject *doc_obj;
if (!PyArg_ParseTuple(args, "OO", &obj, &doc_obj)) {
return NULL;
return nullptr;
}

const char* doc_str = "<invalid string>";
Expand Down Expand Up @@ -403,36 +403,36 @@ PyObject *THPModule_isDefaultTypeCuda(PyObject *_unused, PyObject *arg) {
}

static PyMethodDef TorchMethods[] = {
{"_initExtension", (PyCFunction)THPModule_initExtension, METH_O, NULL},
{"_autograd_init", (PyCFunction)THPAutograd_initExtension, METH_NOARGS, NULL},
{"_add_docstr", (PyCFunction)THPModule_addDocStr, METH_VARARGS, NULL},
{"_init_names", (PyCFunction)THPModule_initNames, METH_O, NULL},
{"_has_distributed",(PyCFunction)THPModule_hasDistributed, METH_NOARGS, NULL},
{"_safe_call", (PyCFunction)THPModule_safeCall, METH_VARARGS | METH_KEYWORDS, NULL},
{"_set_default_tensor_type", (PyCFunction)THPModule_setDefaultTensorType, METH_O, NULL},
{"_set_default_dtype", (PyCFunction)THPModule_setDefaultDtype, METH_O, NULL},
{"_infer_size", (PyCFunction)THPModule_inferSize, METH_VARARGS, NULL},
{"_crash_if_csrc_asan", (PyCFunction)THPModule_crashIfCsrcASAN, METH_O, NULL},
{"_crash_if_csrc_ubsan", (PyCFunction)THPModule_crashIfCsrcUBSAN, METH_O, NULL},
{"_crash_if_aten_asan", (PyCFunction)THPModule_crashIfATenASAN, METH_O, NULL},
{"_set_backcompat_broadcast_warn", (PyCFunction)THPModule_setBackcompatBroadcastWarn, METH_O, NULL},
{"_get_backcompat_broadcast_warn", (PyCFunction)THPModule_getBackcompatBroadcastWarn, METH_NOARGS, NULL},
{"_set_backcompat_keepdim_warn", (PyCFunction)THPModule_setBackcompatKeepdimWarn, METH_O, NULL},
{"_get_backcompat_keepdim_warn", (PyCFunction)THPModule_getBackcompatKeepdimWarn, METH_NOARGS, NULL},
{"get_num_threads", (PyCFunction)THPModule_getNumThreads, METH_NOARGS, NULL},
{"set_num_threads", (PyCFunction)THPModule_setNumThreads, METH_O, NULL},
{"_get_cudnn_enabled", (PyCFunction)THPModule_userEnabledCuDNN, METH_NOARGS, NULL},
{"_set_cudnn_enabled", (PyCFunction)THPModule_setUserEnabledCuDNN, METH_O, NULL},
{"_get_cudnn_benchmark", (PyCFunction)THPModule_benchmarkCuDNN, METH_NOARGS, NULL},
{"_set_cudnn_benchmark", (PyCFunction)THPModule_setBenchmarkCuDNN, METH_O, NULL},
{"_get_cudnn_deterministic", (PyCFunction)THPModule_deterministicCuDNN, METH_NOARGS, NULL},
{"_set_cudnn_deterministic", (PyCFunction)THPModule_setDeterministicCuDNN, METH_O, NULL},
{"_to_dlpack", (PyCFunction)THPModule_toDLPack, METH_O, NULL},
{"_from_dlpack", (PyCFunction)THPModule_fromDLPack, METH_O, NULL},
{"set_flush_denormal", (PyCFunction)THPModule_setFlushDenormal, METH_O, NULL},
{"get_default_dtype", (PyCFunction)THPModule_getDefaultDtype, METH_NOARGS, NULL},
{"_is_default_type_cuda", (PyCFunction)THPModule_isDefaultTypeCuda, METH_NOARGS, NULL},
{NULL, NULL, 0, NULL}
{"_initExtension", (PyCFunction)THPModule_initExtension, METH_O, nullptr},
{"_autograd_init", (PyCFunction)THPAutograd_initExtension, METH_NOARGS, nullptr},
{"_add_docstr", (PyCFunction)THPModule_addDocStr, METH_VARARGS, nullptr},
{"_init_names", (PyCFunction)THPModule_initNames, METH_O, nullptr},
{"_has_distributed",(PyCFunction)THPModule_hasDistributed, METH_NOARGS, nullptr},
{"_safe_call", (PyCFunction)THPModule_safeCall, METH_VARARGS | METH_KEYWORDS, nullptr},
{"_set_default_tensor_type", (PyCFunction)THPModule_setDefaultTensorType, METH_O, nullptr},
{"_set_default_dtype", (PyCFunction)THPModule_setDefaultDtype, METH_O, nullptr},
{"_infer_size", (PyCFunction)THPModule_inferSize, METH_VARARGS, nullptr},
{"_crash_if_csrc_asan", (PyCFunction)THPModule_crashIfCsrcASAN, METH_O, nullptr},
{"_crash_if_csrc_ubsan", (PyCFunction)THPModule_crashIfCsrcUBSAN, METH_O, nullptr},
{"_crash_if_aten_asan", (PyCFunction)THPModule_crashIfATenASAN, METH_O, nullptr},
{"_set_backcompat_broadcast_warn", (PyCFunction)THPModule_setBackcompatBroadcastWarn, METH_O, nullptr},
{"_get_backcompat_broadcast_warn", (PyCFunction)THPModule_getBackcompatBroadcastWarn, METH_NOARGS, nullptr},
{"_set_backcompat_keepdim_warn", (PyCFunction)THPModule_setBackcompatKeepdimWarn, METH_O, nullptr},
{"_get_backcompat_keepdim_warn", (PyCFunction)THPModule_getBackcompatKeepdimWarn, METH_NOARGS, nullptr},
{"get_num_threads", (PyCFunction)THPModule_getNumThreads, METH_NOARGS, nullptr},
{"set_num_threads", (PyCFunction)THPModule_setNumThreads, METH_O, nullptr},
{"_get_cudnn_enabled", (PyCFunction)THPModule_userEnabledCuDNN, METH_NOARGS, nullptr},
{"_set_cudnn_enabled", (PyCFunction)THPModule_setUserEnabledCuDNN, METH_O, nullptr},
{"_get_cudnn_benchmark", (PyCFunction)THPModule_benchmarkCuDNN, METH_NOARGS, nullptr},
{"_set_cudnn_benchmark", (PyCFunction)THPModule_setBenchmarkCuDNN, METH_O, nullptr},
{"_get_cudnn_deterministic", (PyCFunction)THPModule_deterministicCuDNN, METH_NOARGS, nullptr},
{"_set_cudnn_deterministic", (PyCFunction)THPModule_setDeterministicCuDNN, METH_O, nullptr},
{"_to_dlpack", (PyCFunction)THPModule_toDLPack, METH_O, nullptr},
{"_from_dlpack", (PyCFunction)THPModule_fromDLPack, METH_O, nullptr},
{"set_flush_denormal", (PyCFunction)THPModule_setFlushDenormal, METH_O, nullptr},
{"get_default_dtype", (PyCFunction)THPModule_getDefaultDtype, METH_NOARGS, nullptr},
{"_is_default_type_cuda", (PyCFunction)THPModule_isDefaultTypeCuda, METH_NOARGS, nullptr},
{nullptr, nullptr, 0, nullptr}
};

bool THCPDoubleStorage_init(PyObject *module);
Expand Down Expand Up @@ -487,8 +487,8 @@ static PyObject * THCUDNN_cudnn_version(PyObject *self, PyObject *args)
}

static PyMethodDef _THCUDNN_methods[] = {
{"_cudnn_version", (PyCFunction)THCUDNN_cudnn_version, METH_VARARGS, NULL},
{NULL}
{"_cudnn_version", (PyCFunction)THCUDNN_cudnn_version, METH_VARARGS, nullptr},
{nullptr}
};

PyMethodDef* THCUDNN_methods() {
Expand All @@ -508,7 +508,7 @@ static PyObject* initModule() {
HANDLE_TH_ERRORS
THInferNumThreads();

#define ASSERT_TRUE(cmd) if (!(cmd)) return NULL
#define ASSERT_TRUE(cmd) if (!(cmd)) return nullptr

THPUtils_addPyMethodDefs(methods, TorchMethods);
THPUtils_addPyMethodDefs(methods, DataLoaderMethods);
Expand All @@ -532,7 +532,7 @@ static PyObject* initModule() {
static struct PyModuleDef torchmodule = {
PyModuleDef_HEAD_INIT,
"torch._C",
NULL,
nullptr,
-1,
methods.data()
};
Expand Down Expand Up @@ -634,7 +634,7 @@ static PyObject* initModule() {
ASSERT_TRUE(set_module_attr("default_generator", (PyObject*)THPDefaultGenerator, /* incref= */ false));

#ifdef USE_NUMPY
if (_import_array() < 0) return NULL;
if (_import_array() < 0) return nullptr;
#endif

torch::nn::init__THNN(module);
Expand Down
14 changes: 7 additions & 7 deletions torch/csrc/PtrWrapper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#include "ATen/Utils.h"
#include <functional>

static PyObject* THPWrapperClass = NULL;
static PyObject* THPWrapperClass = nullptr;

struct THPWrapper {
PyObject_HEAD
Expand All @@ -14,9 +14,9 @@ PyObject * THPWrapper_New(void *data, void (*destructor)(void*))
{
PyObject *args = PyTuple_New(0);
if (!args) {
return NULL;
return nullptr;
}
PyObject *result = PyObject_Call(THPWrapperClass, args, NULL);
PyObject *result = PyObject_Call(THPWrapperClass, args, nullptr);
if (result) {
THPWrapper* wrapper = (THPWrapper*) result;
wrapper->data = data;
Expand All @@ -40,8 +40,8 @@ static PyObject * THPWrapper_pynew(PyTypeObject *type, PyObject *args, PyObject
{
PyObject* self = type->tp_alloc(type, 0);
THPWrapper* wrapper = (THPWrapper*) self;
wrapper->data = NULL;
wrapper->destructor = NULL;
wrapper->data = nullptr;
wrapper->destructor = nullptr;
return self;
}

Expand All @@ -52,7 +52,7 @@ static void THPWrapper_dealloc(THPWrapper* self)
}

PyTypeObject THPWrapperType = {
PyVarObject_HEAD_INIT(NULL, 0)
PyVarObject_HEAD_INIT(nullptr, 0)
"torch._C._PtrWrapper", /* tp_name */
sizeof(THPWrapper), /* tp_basicsize */
0, /* tp_itemsize */
Expand All @@ -72,7 +72,7 @@ PyTypeObject THPWrapperType = {
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
NULL, /* tp_doc */
nullptr, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
Expand Down
10 changes: 5 additions & 5 deletions torch/csrc/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,18 @@ important gotchas:

## Notes

### Note [Storage is not NULL]
### Note [Storage is not nullptr]

Historically, Torch supported NULL storage, as a minor optimization to
Historically, Torch supported nullptr storage, as a minor optimization to
avoid having to allocate a storage object when it would be empty.
However, this is actually a confusing special case to deal with, so
by-in-large, PyTorch assumes that, in fact, storage is never NULL.
by-in-large, PyTorch assumes that, in fact, storage is never nullptr.

One important case where this assumption is important is when tracking
the CUDA device a tensor is stored in: this information is stored
solely in the storage, so if a storage is NULL, we lose this information.
solely in the storage, so if a storage is nullptr, we lose this information.

Although storage is never NULL, the data field of THStorage may be NULL. This
Although storage is never nullptr, the data field of THStorage may be nullptr. This
mostly occurs when we want to pre-allocate an output tensor struct, but then
have it be resized and filled with data by some operator: there's no point in
allocating data for it in this case!
Expand Down
Loading

0 comments on commit 7ddc6f8

Please sign in to comment.