Skip to content

Commit 592e80b

Browse files
authored
Minor nativeops changes for compiler (deeplearning4j#10177)
* Build changes Update cpp standard to 17 Add more new flags for functrace for easier debugging * legacy changes: adds missing linker methods for nativeopexecutioner updates some methods to avoid compiler warnigns about comparing elements of different sizes
1 parent 1925771 commit 592e80b

File tree

4 files changed

+21
-70
lines changed

4 files changed

+21
-70
lines changed

libnd4j/include/legacy/NativeOpExecutioner.h

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -454,17 +454,8 @@ class SD_LIB_EXPORT NativeOpExecutioner {
454454
const sd::LongType *dZShapeBuffer, void *extraArguments);
455455

456456
static void execSort(sd::NDArray *x, bool descending);
457-
static void execSortCooIndices(sd::LongType *indices, void *x, sd::LongType length,
458-
const sd::LongType *xShapeInfo);
459457

460-
static void execRavelMultiIndex(sd::LongType *indices, sd::LongType *flatIndices, sd::LongType length,
461-
sd::LongType *shapeInfo, int mode);
462-
463-
static void execUnravelIndex(sd::LongType *indices, sd::LongType *flatIndices, sd::LongType length,
464-
sd::LongType *shapeInfo);
465-
static sd::LongType encodeBitmap(sd::NDArray *x, sd::LongType N, long long int *dz,
466-
float threshold);
467-
static void execSort(sd::NDArray *x, sd::LongType *dimension, sd::LongType dimensionLength,
458+
static void execSort(sd::NDArray *x, sd::LongType *dimension, sd::LongType dimensionLength,
468459
bool descending) ;
469460

470461

libnd4j/include/legacy/NativeOps.h

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -88,16 +88,8 @@ SD_LIB_EXPORT void shuffle(sd::Pointer *extras,
8888
OpaqueNDArray dimension,
8989
OpaqueNDArray shuffleMap);
9090

91-
SD_LIB_EXPORT void average(sd::Pointer *extras,
92-
OpaqueNDArrayArr x,
93-
OpaqueNDArray z,int n,
94-
sd::LongType length, bool propagate);
9591

96-
SD_LIB_EXPORT void accumulate(sd::Pointer *extras,
97-
OpaqueNDArrayArr x,
98-
OpaqueNDArray z,
99-
int n,
100-
sd::LongType length);
92+
10193

10294

10395
SD_LIB_EXPORT void pullRows(sd::Pointer *extraPointers,
@@ -255,8 +247,6 @@ SD_LIB_EXPORT int getDeviceMinor(int device) ;
255247
SD_LIB_EXPORT int getShapeInfoLength(OpaqueTadPack *pack) ;
256248
SD_LIB_EXPORT int memcpyConstantAsync(sd::LongType dst, sd::Pointer src, sd::LongType size, int flags, sd::Pointer reserved) ;
257249
SD_LIB_EXPORT sd::Pointer getConstantSpace() ;
258-
SD_LIB_EXPORT void average(sd::Pointer *extras, OpaqueNDArray *x, OpaqueNDArray z,int n, sd::LongType length, bool propagate) ;
259-
SD_LIB_EXPORT void accumulate(sd::Pointer *extras, OpaqueNDArray *x, OpaqueNDArray z, int n, sd::LongType length) ;
260250
SD_LIB_EXPORT bool isExperimentalEnabled() ;
261251
SD_LIB_EXPORT void setOmpMinThreads(int threads) ;
262252
SD_LIB_EXPORT int getDevice() ;
@@ -305,7 +295,6 @@ SD_LIB_EXPORT void sortTadByValue(sd::Pointer *extraPointers,
305295
OpaqueNDArray y,
306296
OpaqueNDArray dimension,
307297
bool descending);
308-
SD_LIB_EXPORT void sortCooIndices(sd::Pointer *extraPointers, OpaqueNDArray indices, OpaqueNDArray values);
309298
SD_LIB_EXPORT void munmapFile(sd::Pointer *extraPointers, sd::LongType *ptrMap, sd::LongType length) ;
310299
SD_LIB_EXPORT sd::LongType* mmapFile(sd::Pointer* extraPointers, const char* fileName, sd::LongType length);
311300

libnd4j/include/legacy/impl/NativeOpsHelpers.cpp

Lines changed: 13 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,7 @@ static sd::Pointer _numpyHeaderForNd4j(sd::Pointer data, const sd::Pointer shape
261261
auto npHeader = cnpy::createNpyHeader<T>(npShape, rank, wordSize);
262262
char* ret = new char[npHeader.size() + 1];
263263
int count = 0;
264-
for (int i = 0; i < npHeader.size(); i++) {
264+
for (size_t i = 0; i < npHeader.size(); i++) {
265265
ret[count] = npHeader[i];
266266
count++;
267267
}
@@ -292,10 +292,6 @@ sd::Pointer loadNpyFromHeader(sd::Pointer data) {
292292

293293
cnpy::NpyArray arr = cnpy::loadNpyFromHeader(header);
294294
cnpy::NpyArray* ret = new cnpy::NpyArray();
295-
int totalLengthOfShape = 1;
296-
for (int i = 0; i < arr.shape.size(); i++) {
297-
totalLengthOfShape *= arr.shape[i];
298-
}
299295

300296
ret->data = arr.data;
301297
ret->wordSize = arr.wordSize;
@@ -539,8 +535,8 @@ void setGraphContextOutputArraysArr(OpaqueContext* ptr, int numArrays,OpaqueNDAr
539535
errorMessage += " was null!";
540536
THROW_EXCEPTION(errorMessage.c_str());
541537
}
542-
for (int i = 0; i < numArrays; i++) {
543-
ptr->setOutputArray(i, *arr[i], false);
538+
for (int j = 0; j < numArrays; j++) {
539+
ptr->setOutputArray(j, *arr[j], false);
544540
}
545541
}
546542
}
@@ -625,12 +621,12 @@ sd::LongType *mmapFile(sd::Pointer *extraPointers, const char *fileName, sd::Lon
625621
THROW_EXCEPTION("Failed to open file for MMAP");
626622
}
627623

628-
void *ptr = mmap(nullptr, length, PROT_READ | PROT_WRITE, MAP_FILE | MAP_SHARED, fd, 0);
629-
if (ptr == MAP_FAILED) {
624+
void *ptr2 = mmap(nullptr, length, PROT_READ | PROT_WRITE, MAP_FILE | MAP_SHARED, fd, 0);
625+
if (ptr2 == MAP_FAILED) {
630626
sd_printf("Errno: %i\n", errno);
631627
THROW_EXCEPTION("Failed to mmap file");
632628
}
633-
hZ[0] = (sd::LongType)ptr;
629+
hZ[0] = (sd::LongType)ptr2;
634630
hZ[1] = fd;
635631

636632
#endif
@@ -666,7 +662,7 @@ OpaqueShapeList *calculateOutputShapes2(sd::Pointer *extraPointers, sd::LongType
666662
fflush(stdout);
667663
sd::ShapeList inShapes;
668664

669-
for (int e = 0; e < context->width(); e++) {
665+
for (size_t e = 0; e < context->width(); e++) {
670666
if (context->array(e) == nullptr) {
671667
std::string errorMessage = "Input array at index " + std::to_string(e) + " was null!";
672668
THROW_EXCEPTION(errorMessage.c_str());
@@ -738,7 +734,7 @@ void purgeOpTrace() { sd::ops::OpRegistrator::getInstance().purgeOpExecs();
738734

739735
void printOpTrace() {
740736
auto execTrace = *sd::ops::OpRegistrator::getInstance().execTrace();
741-
for(int i = 0; i < execTrace.size(); i++) {
737+
for(size_t i = 0; i < execTrace.size(); i++) {
742738
auto curr = execTrace[i];
743739
if(curr->opName != nullptr) {
744740
sd_printf("Op name: %s\n", curr->opName->c_str());
@@ -749,7 +745,7 @@ void printOpTrace() {
749745
continue;
750746
} else {
751747
auto currInputShapeBuffers = *(curr->inputShapeBuffers);
752-
for(int j = 0; j < currInputShapeBuffers.size(); j++) {
748+
for(size_t j = 0; j < currInputShapeBuffers.size(); j++) {
753749
auto buff = currInputShapeBuffers[j];
754750
shape::printShapeInfo(buff);
755751
sd_printf("\n",0);
@@ -761,7 +757,7 @@ void printOpTrace() {
761757
continue;
762758
} else {
763759
auto currOutputShapeBuffers = *(curr->outputShapeBuffers);
764-
for(int j = 0; j < curr->outputShapeBuffers->size(); j++) {
760+
for(size_t j = 0; j < curr->outputShapeBuffers->size(); j++) {
765761
shape::printShapeInfo(currOutputShapeBuffers[j]);
766762
sd_printf("\n",0);
767763
}
@@ -887,7 +883,7 @@ std::vector<double> * tArgs(void *execTrace) {
887883
std::vector<int> * dArgs(void *execTrace) {
888884
ExecTrace *trace = (ExecTrace *) execTrace;
889885
std::vector<int> *dArgs = new std::vector<int>();
890-
for (int e = 0; e < trace->dArgs.size(); e++) {
886+
for (size_t e = 0; e < trace->dArgs.size(); e++) {
891887
dArgs->push_back(trace->dArgs[e]);
892888
}
893889
return dArgs;
@@ -965,7 +961,8 @@ static VariablesSet *executeStoredGraphT(sd::Pointer *extraPointers, sd::LongTyp
965961
if (hZ == sd::Status::OK) {
966962
// pull back results, and provide them
967963
auto outputs = graph->fetchOutputs();
968-
for (int e = 0; e < outputs->size(); e++) {
964+
int size = static_cast<int>(outputs->size());
965+
for (int e = 0; e < size; e++) {
969966
// we're only getting variable ID/Index from original grap. values will be taken from cloned workspace
970967
std::pair<int, int> varId(outputs->at(e)->id(), outputs->at(e)->index());
971968

@@ -1121,24 +1118,9 @@ sd::Status execCustomOpWithScope_(sd::Pointer *extraPointers, sd::graph::GraphSt
11211118
varSpace->dropVariable(0, e);
11221119
}
11231120

1124-
// after some bla-bla-bla we should have Graph and Node for current op
11251121
return sd::Status::OK;
11261122
}
11271123

1128-
sd::Status execCustomOpWithScope(sd::Pointer *extraPointers, sd::Pointer state, sd::LongType opHash, sd::LongType *scopes, int numScopes,
1129-
sd::Pointer *inputBuffers, sd::Pointer *inputShapes, int numInputs, sd::Pointer *outputBuffers,
1130-
sd::Pointer *outputShapes, int numOutputs) {
1131-
try {
1132-
return execCustomOpWithScope(extraPointers, reinterpret_cast<GraphState *>(state), opHash, scopes,
1133-
numScopes, inputBuffers, inputShapes, numInputs, outputBuffers, outputShapes,
1134-
numOutputs);
1135-
} catch (std::exception &e) {
1136-
sd::LaunchContext::defaultContext()->errorReference()->setErrorCode(1);
1137-
sd::LaunchContext::defaultContext()->errorReference()->setErrorMessage(e.what());
1138-
return sd::Status::BAD_INPUT;
1139-
}
1140-
}
1141-
11421124
void deleteResultWrapper(sd::Pointer ptr) {
11431125
auto p = reinterpret_cast<ResultWrapper *>(ptr);
11441126
delete p;

libnd4j/include/legacy/impl/cnpy.cpp

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,8 @@ sd::DataType cnpy::dataTypeFromHeader(char *data) {
172172
template <typename T>
173173
std::vector<char> &operator+=(std::vector<char> &lhs, const T rhs) {
174174
// write in little endian
175-
for (char byte = 0; byte < sizeof(T); byte++) {
175+
char size = sizeof(T);
176+
for (char byte = 0; byte < size; byte++) {
176177
char val = *((char *)&rhs + byte);
177178
lhs.push_back(val);
178179
}
@@ -225,13 +226,6 @@ char *cnpy::loadFile(const char *path) {
225226
fseek(f, 0, SEEK_SET);
226227
buffer = (char *)malloc((length + 1) * sizeof(char));
227228

228-
// just getting rid of compiler warning
229-
sd::LongType fps = 0;
230-
231-
if (buffer) {
232-
fps += fread(buffer, sizeof(char), length, f);
233-
}
234-
235229
fclose(f);
236230
}
237231

@@ -388,7 +382,7 @@ cnpy::NpyArray cnpy::loadNpyFromHeader(char *data) {
388382
std::string firstError;
389383
firstError += std::string("Pointer doesn't look like a NumPy header. Missing expected characters in middle.");
390384
std::string header;
391-
for(int i = 0; i < hdr.size(); i++) {
385+
for(size_t i = 0; i < hdr.size(); i++) {
392386
header+= hdr[i];
393387
}
394388

@@ -406,19 +400,14 @@ cnpy::NpyArray cnpy::loadNpyFromHeader(char *data) {
406400
parseNpyHeaderStr(std::string(data), wordSize, shape, ndims, fortranOrder);
407401
// the "real" data starts after the \n
408402
char currChar = data[0];
409-
int count = 0;
410403
while (currChar != '\n') {
411404
data++;
412405
currChar = data[0];
413-
count++;
414406
}
415407

416408
// move pass the \n
417409
data++;
418-
count++;
419410

420-
unsigned long long size = 1; // long long so no overflow when multiplying by word_size
421-
for (unsigned int i = 0; i < ndims; i++) size *= shape[i];
422411
char *cursor = data;
423412
NpyArray arr;
424413
arr.wordSize = wordSize;
@@ -615,7 +604,7 @@ void cnpy::npy_save(std::string fname, const void *data, const unsigned int *sha
615604
assert(tmp_dims == ndims);
616605
}
617606

618-
for (int i = 1; i < ndims; i++) {
607+
for (size_t i = 1; i < ndims; i++) {
619608
if (shape[i] != tmp_shape[i]) {
620609
std::cout << "libnpy error: npy_save attempting to append misshaped data to " << fname << "\n";
621610
assert(shape[i] == tmp_shape[i]);
@@ -637,7 +626,7 @@ void cnpy::npy_save(std::string fname, const void *data, const unsigned int *sha
637626
}
638627

639628
unsigned long long nels = 1;
640-
for (int i = 0; i < ndims; i++) nels *= shape[i];
629+
for (unsigned int i = 0; i < ndims; i++) nels *= shape[i];
641630

642631
fwrite(data, sizeof(T), nels, fp);
643632
fclose(fp);
@@ -663,7 +652,7 @@ std::vector<char> cnpy::createNpyHeader( const unsigned int *shape, const unsign
663652
dict += "', 'fortran_order': False, 'shape': (";
664653
if (ndims > 0) {
665654
dict += tostring(shape[0]);
666-
for (int i = 1; i < ndims; i++) {
655+
for (size_t i = 1; i < ndims; i++) {
667656
dict += ", ";
668657
dict += tostring(shape[i]);
669658
}

0 commit comments

Comments
 (0)