Skip to content

Commit

Permalink
fix: mute few warning print (#373)
Browse files Browse the repository at this point in the history
* feat: taosdump support multiple directories

initial work to reduce buffer overflow

* fix: tune taosadapter log level to error

* fix: temporary disable double type case

* feat: refactor dump filename generation

* feat: add db dir and check if it exist

* feat: taosdump dump out to multiple directories

* feat: support dump in from multiple directories

* fix: build with websocket enabled

* test: enable error output

* fix: inpath buffer overflow

* fix: create db clause for db stb

* test: fix taosdump/native/taosdumpTestInspect.py

* fix: 3.0-coveralls.yml coveralls.yml

* feat: re-write dump subtable desc logic for cluster

* feat: add percent indicator for tbname fetch

* feat: fix debug print row

* feat: print to stderr

* feat: reuse stb desc for sub-tables

* feat: use directories tmp branch

* refactor: again

* feat: native dump out refactor

* refactor: ws impl

* refactor: ws done

* feat: fix unknown codec

* fix: unknown codec dump in

* test: check result in 3.0-coveralls.yml

* fix: unknown codec to dump normal table

* fix: add progress indicator 0

* fix: improve tag fetch

* fix: ntb get table des

* fix: tag value is null

* fix: dump db tb with unknown codec

* fix: unknown codec ntb/stb

* fix: revert codec specified

* fix: code cleanup

* chore: add info print macro

* feat: add errno check for thread_join

* fix: mute few warning pring

* fix: process value for v3

* test: add float and double to coverage test

* fix: process tag value for v3

* test: fix ws3 in ../.github/workflows/3.0-coveralls.yml

* fix: duration buffer

* fix: make 3.0.0.0 ver compatible first

* fix: reuse v2 way get tag value first to improve performance

* test: update workflow for taos-tools commit
  • Loading branch information
sangshuduo authored Sep 16, 2022
1 parent e7270c9 commit 9e75540
Show file tree
Hide file tree
Showing 8 changed files with 205 additions and 29 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/3.0-coveralls.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
with:
repository: 'taosdata/TDengine'
path: 'TDengine'
ref: '3.0'
ref: 'feat/sangshuduo/TD-14141-update-taostools-for3.0'

- name: Change time zone
if:
Expand Down Expand Up @@ -230,7 +230,7 @@ jobs:
rm -rf *.sql *.avro* taosdump.*
echo "TEST: complex columns"
./build/bin/taosBenchmark -t 2 -n 10 -b bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
./build/bin/taosBenchmark -t 2 -n 10 -b bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
echo "TEST: whole db with null codec"
./build/bin/taosdump -D test -d null -gg > /dev/null
Expand Down Expand Up @@ -343,7 +343,7 @@ jobs:
rm -rf *.sql *.avro* taosdump.*
echo "TEST: complex with larger binary"
./build/bin/taosBenchmark -t 2 -n 10 -w 40 -b bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
./build/bin/taosBenchmark -t 2 -n 10 -w 40 -b bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
./build/bin/taosdump -D test -gg > /dev/null
./build/bin/taos -s "drop database test"
./build/bin/taosdump -i . -gg > /dev/null
Expand Down Expand Up @@ -408,7 +408,7 @@ jobs:
if find taosdump/ws3 -name "*.py"|grep -q .;
then
for i in `find taosdump/ws -name "*.py"`; do python3 ./test.py -f $i > /dev/null 2>&1 && echo -e "\033[32m develop-test/$i success! \033[0m"|| echo -e "\033[31m develop-test/$i failed! \033[0m" | tee -a ~/taosdump-failed.txt ;done
for i in `find taosdump/ws3 -name "*.py"`; do python3 ./test.py -f $i > /dev/null 2>&1 && echo -e "\033[32m develop-test/$i success! \033[0m"|| echo -e "\033[31m develop-test/$i failed! \033[0m" | tee -a ~/taosdump-failed.txt ;done
fi
- name: System Test cases
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/3.0-taosdump-release-ws.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ jobs:
with:
repository: 'taosdata/TDengine'
path: 'TDengine'
ref: '3.0'
ref: 'feat/sangshuduo/TD-14141-update-taostools-for3.0'

- name: Change time zone
if:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/3.0-taosdump-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ jobs:
with:
repository: 'taosdata/TDengine'
path: 'TDengine'
ref: '3.0'
ref: 'feat/sangshuduo/TD-14141-update-taostools-for3.0'

- name: Change time zone
if:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/coveralls.yml
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ jobs:
rm -rf *.sql *.avro* taosdump.*
echo "TEST: complex columns"
./build/bin/taosBenchmark -t 2 -n 10 -b bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
./build/bin/taosBenchmark -t 2 -n 10 -b bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -A bool,tinyint,smallint,int,bigint,float,double,utinyint,usmallint,uint,ubigint,binary,nchar,timestamp -y > /dev/null
echo "TEST: whole db with null codec"
./build/bin/taosdump -D test -d null -gg > /dev/null
Expand Down
2 changes: 1 addition & 1 deletion src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ INCLUDE_DIRECTORIES(${CMAKE_CURRENT_LIST_DIR}/../deps/avro/lang/c/src)
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_LIST_DIR}/../deps/toolscJson/src)

if(NOT DEFINED TD_VER_COMPATIBLE)
SET(TD_VER_COMPATIBLE "2.0.0.0")
SET(TD_VER_COMPATIBLE "3.0.0.0")
ENDIF()

IF (${TD_VER_COMPATIBLE} STRGREATER_EQUAL "3.0.0.0")
Expand Down
212 changes: 193 additions & 19 deletions src/taosdump.c
Original file line number Diff line number Diff line change
Expand Up @@ -1987,7 +1987,163 @@ static int64_t getNtbCountOfStbNative(
return count;
}

static int processFieldsValue(
static int processFieldsValueV3(
int index,
TableDes *tableDes,
const void *value,
int32_t len) {
switch (tableDes->cols[index].type) {
case TSDB_DATA_TYPE_BOOL:
if (0 == strncmp(value, "true", len)) {
strcpy(tableDes->cols[index].value, "1");
} else {
strcpy(tableDes->cols[index].value, "0");
}
break;

case TSDB_DATA_TYPE_TINYINT:
case TSDB_DATA_TYPE_SMALLINT:
case TSDB_DATA_TYPE_INT:
case TSDB_DATA_TYPE_BIGINT:
case TSDB_DATA_TYPE_UTINYINT:
case TSDB_DATA_TYPE_USMALLINT:
case TSDB_DATA_TYPE_UINT:
case TSDB_DATA_TYPE_UBIGINT:
case TSDB_DATA_TYPE_TIMESTAMP:
strncpy(tableDes->cols[index].value, (char*)value, len);
break;

case TSDB_DATA_TYPE_FLOAT:
case TSDB_DATA_TYPE_DOUBLE:
memset(tableDes->cols[index].value, 0,
sizeof(tableDes->cols[index].value));

if (len < (COL_VALUEBUF_LEN -1)) {
strncpy(tableDes->cols[index].value, (char*)value, len);
} else {
if (tableDes->cols[index].var_value) {
free(tableDes->cols[index].var_value);
tableDes->cols[index].var_value = NULL;
}
tableDes->cols[index].var_value =
calloc(1, len + 1);

if (NULL == tableDes->cols[index].var_value) {
errorPrint("%s() LN%d, memory allocation failed!\n",
__func__, __LINE__);
return -1;
}
strncpy(tableDes->cols[index].var_value, (char*)value, len);
}
break;

case TSDB_DATA_TYPE_BINARY:
memset(tableDes->cols[index].value, 0,
sizeof(tableDes->cols[index].value));

if (g_args.avro) {
if (len < (COL_VALUEBUF_LEN - 1)) {
strncpy(tableDes->cols[index].value, (char *)value, len);
} else {
if (tableDes->cols[index].var_value) {
free(tableDes->cols[index].var_value);
tableDes->cols[index].var_value = NULL;
}
tableDes->cols[index].var_value = calloc(1,
1 + len);

if (NULL == tableDes->cols[index].var_value) {
errorPrint("%s() LN%d, memory allocation failed!\n",
__func__, __LINE__);
return -1;
}
strncpy(tableDes->cols[index].var_value, (char *)value, len);
}
} else {
if (len < (COL_VALUEBUF_LEN - 2)) {
convertStringToReadable(
(char *)value,
len,
tableDes->cols[index].value,
len);
} else {
if (tableDes->cols[index].var_value) {
free(tableDes->cols[index].var_value);
tableDes->cols[index].var_value = NULL;
}
tableDes->cols[index].var_value = calloc(1,
len * 2);

if (NULL == tableDes->cols[index].var_value) {
errorPrint("%s() LN%d, memory allocation failed!\n",
__func__, __LINE__);
return -1;
}
convertStringToReadable((char *)value,
len,
(char *)(tableDes->cols[index].var_value),
len);
}
}
break;

case TSDB_DATA_TYPE_NCHAR:
case TSDB_DATA_TYPE_JSON:
{
if (g_args.avro) {
if (len < (COL_VALUEBUF_LEN - 1)) {
strncpy(tableDes->cols[index].value,
(char *)value,
len);
} else {
tableDes->cols[index].var_value = calloc(1, len + 1);

if (NULL == tableDes->cols[index].var_value) {
errorPrint("%s() LN%d, memory allocation failed!\n",
__func__, __LINE__);
return -1;
}
strncpy(tableDes->cols[index].var_value,
(char *)value, len);
}
} else {
if (len < (COL_VALUEBUF_LEN-2)) {
char tbuf[COL_VALUEBUF_LEN-2]; // need reserve 2 bytes for ' '
convertNCharToReadable(
(char *)value,
len, tbuf, COL_VALUEBUF_LEN-2);
sprintf(tableDes->cols[index].value, "%s", tbuf);
} else {
if (tableDes->cols[index].var_value) {
free(tableDes->cols[index].var_value);
tableDes->cols[index].var_value = NULL;
}
tableDes->cols[index].var_value = calloc(1, len * 5);

if (NULL == tableDes->cols[index].var_value) {
errorPrint("%s() LN%d, memory allocation failed!\n",
__func__, __LINE__);
return -1;
}
convertStringToReadable(
(char *)value,
len,
(char *)(tableDes->cols[index].var_value), len);
}
}
}
break;

default:
errorPrint("%s() LN%d, unknown type: %d\n",
__func__, __LINE__, tableDes->cols[index].type);
break;
}

return 0;
}

static int processFieldsValueV2(
int index,
TableDes *tableDes,
const void *value,
Expand Down Expand Up @@ -2113,11 +2269,8 @@ static int processFieldsValue(
__func__, __LINE__);
return -1;
}
strncpy(
(char *)(tableDes->cols[index].var_value),
(char *)value,
min(TSDB_TABLE_NAME_LEN,
len));
strncpy(tableDes->cols[index].var_value,
(char *)value, len);
}
} else {
if (len < (COL_VALUEBUF_LEN - 2)) {
Expand Down Expand Up @@ -2284,8 +2437,15 @@ static int getTableTagValueWSV3(
if (NULL == value1) {
strcpy(tableDes->cols[index].value, "NULL");
strcpy(tableDes->cols[index].note , "NULL");
} else {
strncpy(tableDes->cols[index].value, value1, len);
} else if (0 != processFieldsValueV3(
index,
tableDes,
value1,
len)) {
errorPrint("%s() LN%d, processFieldsValueV3 tag_value: %p\n",
__func__, __LINE__, value1);
ws_free_result(ws_res);
return -1;
}
index ++;
}
Expand Down Expand Up @@ -2363,12 +2523,12 @@ static int getTableTagValueWSV2(
if (NULL == value) {
strcpy(tableDes->cols[j].value, "NULL");
strcpy(tableDes->cols[j].note , "NULL");
} else if (0 != processFieldsValue(
} else if (0 != processFieldsValueV2(
j,
tableDes,
value,
len)) {
errorPrint("%s() LN%d, processFieldsValue value0: %p\n",
errorPrint("%s() LN%d, processFieldsValueV2 value0: %p\n",
__func__, __LINE__, value);
ws_free_result(ws_res);
return -1;
Expand All @@ -2390,7 +2550,10 @@ static int getTableTagValueWS(
int ret = -1;
if (3 == g_majorVersionOfClient) {
// if child-table have tag, V3 using select tag_value from information_schema.ins_tag where table to get tagValue
ret = getTableTagValueWSV3(ws_taos, dbName, table, ppTableDes);
ret = getTableTagValueWSV2(ws_taos, dbName, table, ppTableDes);
if (ret < 0) {
ret = getTableTagValueWSV3(ws_taos, dbName, table, ppTableDes);
}
} else if (2 == g_majorVersionOfClient) {
// if child-table have tag, using select tagName from table to get tagValue
ret = getTableTagValueWSV2(ws_taos, dbName, table, ppTableDes);
Expand Down Expand Up @@ -2568,8 +2731,15 @@ static int getTableTagValueNativeV3(
if (NULL == row[1]) {
strcpy(tableDes->cols[index].value, "NULL");
strcpy(tableDes->cols[index].note , "NULL");
} else {
strncpy(tableDes->cols[index].value, row[1], length[1]);
} else if (0 != processFieldsValueV3(
index,
tableDes,
row[1],
length[1])) {
errorPrint("%s() LN%d, processFieldsValueV3 tag_value: %p\n",
__func__, __LINE__, row[1]);
taos_free_result(res);
return -1;
}

index ++;
Expand Down Expand Up @@ -2642,7 +2812,7 @@ static int getTableTagValueNativeV2(
if (NULL == row[j - tableDes->columns]) {
strcpy(tableDes->cols[j].value, "NULL");
strcpy(tableDes->cols[j].note , "NULL");
} else if (0 != processFieldsValue(
} else if (0 != processFieldsValueV2(
j, tableDes,
row[j- tableDes->columns],
length[j- tableDes->columns])) {
Expand All @@ -2663,7 +2833,10 @@ static int getTableTagValueNative(
int ret = -1;
if (3 == g_majorVersionOfClient) {
// if child-table have tag, V3 using select tag_value from information_schema.ins_tag where table to get tagValue
ret = getTableTagValueNativeV3(taos, dbName, table, ppTableDes);
ret = getTableTagValueNativeV2(taos, dbName, table, ppTableDes);
if (ret < 0) {
ret = getTableTagValueNativeV3(taos, dbName, table, ppTableDes);
}
} else if (2 == g_majorVersionOfClient) {
// if child-table have tag, using select tagName from table to get tagValue
ret = getTableTagValueNativeV2(taos, dbName, table, ppTableDes);
Expand Down Expand Up @@ -4168,7 +4341,7 @@ int64_t queryDbForDumpOutCountWS(
if (NULL == value0) {
if (0 == ws_errno(ws_res)) {
count = 0;
warnPrint("%s fetch row, count: %" PRId64 "\n",
debugPrint("%s fetch row, count: %" PRId64 "\n",
sqlstr, count);
} else {
count = -1;
Expand Down Expand Up @@ -4216,7 +4389,7 @@ int64_t queryDbForDumpOutCountNative(
if (NULL == row) {
if (0 == taos_errno(res)) {
count = 0;
warnPrint("%s fetch row, count: %" PRId64 "\n",
debugPrint("%s fetch row, count: %" PRId64 "\n",
sqlstr, count);
} else {
count = -1;
Expand Down Expand Up @@ -8355,7 +8528,7 @@ static int createMTableAvroHeadImp(
} else {
avro_value_set_branch(&value, 1, &branch);
int tmp = atoi((const char *)
subTableDes->cols[subTableDes->columns+tag].value);
subTableDes->cols[subTableDes->columns+tag].value);
verbosePrint("%s() LN%d, before set_bool() tmp=%d\n",
__func__, __LINE__, (int)tmp);
avro_value_set_boolean(&branch, (tmp)?1:0);
Expand Down Expand Up @@ -11190,7 +11363,8 @@ static bool fillDBInfoWithFieldsNative(const int index,
min(DURATION_LEN, lengths[f] + 1));
debugPrint("%s() LN%d: field: %d, duration: %s, length:%d\n",
__func__, __LINE__, f,
(char*)row[f], lengths[f]);
g_dbInfos[index]->duration,
lengths[f]);
} else if ((0 == strcmp(fields[f].name, "cache"))
|| (0 == strcmp(fields[f].name, "cache(MB)"))) {
g_dbInfos[index]->cache = *((int32_t *)row[f]);
Expand Down
3 changes: 2 additions & 1 deletion tests/pytest/util/dnodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,8 @@ def getPath(self, tool="taosd"):
elif ("/tools/" in selfPath):
projPath = selfPath[:selfPath.find("/tools/")]
else:
tdLog.exit("path %s is not support" % selfPath)
tdLog.info("cannot found %s in path: %s, use system's" % (tool, selfPath))
projPath = "/usr/local/taos/bin"

paths = []
for root, dirs, files in os.walk(projPath):
Expand Down
3 changes: 2 additions & 1 deletion tests/taosdump/native/taosdumpTestInspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ def getPath(self, tool="taosdump"):
elif "/tools/" in selfPath:
projPath = selfPath[: selfPath.find("/tools/")]
else:
tdLog.exit("path: %s is not supported" % selfPath)
tdLog.info("cannot found %s in path: %s, use system's" % (tool, selfPath))
projPath = "/usr/local/taos/bin"

paths = []
for root, dirs, files in os.walk(projPath):
Expand Down

0 comments on commit 9e75540

Please sign in to comment.