Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add node.js 10 to CI matrices and update tests to account for changes in node.js 10 #279

Merged
merged 5 commits into from
May 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
language: node_js
node_js:
- "10"
- "8"
- "6"
- "4"
Expand Down
9 changes: 5 additions & 4 deletions build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ schedules:
exclude:
# Exclude all builds for 6.x.
- nodejs: ['6']
# Only build with latest for 4.x
- nodejs: '4'
# Only build with latest for 4.x and 10.x
- nodejs: ['4','10']
cassandra: ['2.1', '2.2', '3.0']
# Only build with 2.1 and latest for 8.x
- nodejs: '8'
Expand All @@ -21,14 +21,15 @@ schedules:
adhoc:
# adhoc job for non-primary braches that doesn't have a schedule but may be used to run all configs.
schedule: adhoc
branches:
branches:
exclude: ["/((\\d+(\\.[\\dx]+)+)|master)/"]
nodejs:
- '4'
- '6'
- '8'
- '10'
os:
- ubuntu/trusty64
- ubuntu/bionic64/nodejs-driver
cassandra:
- '2.1'
- '2.2'
Expand Down
5 changes: 3 additions & 2 deletions ci/appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@ environment:
appveyor_build_worker_cloud: gce
ci_type: ci_unit
matrix:
- nodejs_version: 6
- nodejs_version: 8
TEST_CASSANDRA_VERSION: 3.11.2
ci_type: ci
- nodejs_version: 8
- nodejs_version: 10
- nodejs_version: 6
- nodejs_version: 4
image: Visual Studio 2017
platform:
Expand Down
6 changes: 3 additions & 3 deletions test/unit/address-resolution-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ describe('EC2MultiRegionTranslator', function () {
describe('#translate()', function () {
it('should return the same address when it could not be resolved', function (done) {
const t = new EC2MultiRegionTranslator();
t.translate('127.100.100.1', 9042, function (endPoint) {
assert.strictEqual(endPoint, '127.100.100.1:9042');
t.translate('192.0.2.1', 9042, function (endPoint) {
assert.strictEqual(endPoint, '192.0.2.1:9042');
done();
});
});
Expand All @@ -27,4 +27,4 @@ describe('EC2MultiRegionTranslator', function () {
});
});
});
});
});
5 changes: 4 additions & 1 deletion test/unit/duration-type-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,10 @@ describe('Duration', function () {
'-PW',
'-PTW',
].forEach(function (testInfo) {
assert.throws(function() { Duration.fromString(testInfo); }, 'Unable to convert \'' + testInfo + '\' to a duration');
const expectedError = testInfo.startsWith('-') ? testInfo.substr(1) : testInfo;
assert.throws(function() { Duration.fromString(testInfo); }, (err) =>
err instanceof TypeError && err.message === 'Unable to convert \'' + expectedError + '\' to a duration'
);
});
});
});
Expand Down
108 changes: 66 additions & 42 deletions test/unit/parser-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,22 @@ describe('Parser', function () {
header: getFrameHeader(4, types.opcodes.result, 2, true),
chunk: body
}, null, doneIfError(done));
assert.strictEqual(responseCounter, 1);
parser.setOptions(88, { byRow: true });
for (let i = 0; i < body.length; i++) {
parser._transform({
header: getFrameHeader(4, types.opcodes.result, 2, true, 88),
chunk: body.slice(i, i + 1),
offset: 0
}, null, doneIfError(done));
}
assert.strictEqual(responseCounter, 2);
done();

process.nextTick(() => {
assert.strictEqual(responseCounter, 1);
parser.setOptions(88, { byRow: true });
for (let i = 0; i < body.length; i++) {
parser._transform({
header: getFrameHeader(4, types.opcodes.result, 2, true, 88),
chunk: body.slice(i, i + 1),
offset: 0
}, null, doneIfError(done));
}
process.nextTick(() => {
assert.strictEqual(responseCounter, 2);
done();
});
});
});
it('should read a RESULT result with trace id chunked', function (done) {
const parser = newInstance();
Expand All @@ -125,17 +130,21 @@ describe('Parser', function () {
chunk: body,
offset: 0
}, null, doneIfError(done));
assert.strictEqual(responseCounter, 1);
parser.setOptions(88, { byRow: true });
for (let i = 0; i < body.length; i++) {
parser._transform({
header: getFrameHeader(4, types.opcodes.result, 2, true, 88),
chunk: body.slice(i, i + 1),
offset: 0
}, null, doneIfError(done));
}
assert.strictEqual(responseCounter, 2);
done();
process.nextTick(() => {
assert.strictEqual(responseCounter, 1);
parser.setOptions(88, { byRow: true });
for (let i = 0; i < body.length; i++) {
parser._transform({
header: getFrameHeader(4, types.opcodes.result, 2, true, 88),
chunk: body.slice(i, i + 1),
offset: 0
}, null, doneIfError(done));
}
process.nextTick(() => {
assert.strictEqual(responseCounter, 2);
done();
});
});
});
it('should read a VOID result with warnings and custom payload', function (done) {
const parser = newInstance();
Expand Down Expand Up @@ -743,11 +752,16 @@ describe('Parser', function () {
const rowLength = 2;
let rowCounter = 0;
parser.on('readable', function () {
const item = parser.read();
assert.strictEqual(item.header.opcode, types.opcodes.result);
assert.ok(item.row);
if ((++rowCounter) === rowLength) {
done();
let item;
while ((item = parser.read())) {
if (!item.row && item.frameEnded) {
continue;
}
assert.strictEqual(item.header.opcode, types.opcodes.result);
assert.ok(item.row);
if ((++rowCounter) === rowLength) {
done();
}
}
});
parser.setOptions(33, { byRow: true });
Expand All @@ -773,7 +787,7 @@ describe('Parser', function () {
}
});
[1, 3, 5, 13].forEach(function (chunkLength) {
it('should emit rows chunked with chunk length of ' + chunkLength, function () {
it('should emit rows chunked with chunk length of ' + chunkLength, function (done) {
result = {};
const expected = [
{ columnLength: 3, rowLength: 10 },
Expand Down Expand Up @@ -814,10 +828,13 @@ describe('Parser', function () {
for (let i = 0; i < items.length; i++) {
transformChunkedItem(i);
}
//assert result
expected.forEach(function (expectedItem, index) {
assert.ok(result[index], 'Result not found for index ' + index);
assert.strictEqual(result[index].length, expectedItem.rowLength);
process.nextTick(() => {
//assert result
expected.forEach(function (expectedItem, index) {
assert.ok(result[index], 'Result not found for index ' + index);
assert.strictEqual(result[index].length, expectedItem.rowLength);
});
done();
});
});
});
Expand Down Expand Up @@ -847,7 +864,7 @@ describe('Parser', function () {
{ columnLength: 1, rowLength: 20 }
];
[1, 2, 7, 11].forEach(function (chunkLength) {
it('should emit rows chunked with chunk length of ' + chunkLength, function () {
it('should emit rows chunked with chunk length of ' + chunkLength, function (done) {
result = {};
const buffer = Buffer.concat(expected.map(function (expectedItem, index) {
parser.setOptions(index, { byRow: true });
Expand All @@ -862,11 +879,14 @@ describe('Parser', function () {
}
protocol._transform(buffer.slice(j, end), null, helper.throwop);
}
//assert result
expected.forEach(function (expectedItem, index) {
assert.ok(result[index], 'Result not found for index ' + index);
assert.strictEqual(result[index].length, expectedItem.rowLength);
assert.strictEqual(result[index][0].keys().length, expectedItem.columnLength);
process.nextTick(() => {
//assert result
expected.forEach(function (expectedItem, index) {
assert.ok(result[index], 'Result not found for index ' + index);
assert.strictEqual(result[index].length, expectedItem.rowLength);
assert.strictEqual(result[index][0].keys().length, expectedItem.columnLength);
});
done();
});
});
});
Expand Down Expand Up @@ -1021,10 +1041,14 @@ describe('Parser', function () {
parser._transform(getBodyChunks(3, rowLength, 0, 10), null, doneIfError(done));
parser._transform(getBodyChunks(3, rowLength, 10, 32), null, doneIfError(done));
parser._transform(getBodyChunks(3, rowLength, 32, 55), null, doneIfError(done));
assert.strictEqual(rowCounter, 1);
parser._transform(getBodyChunks(3, rowLength, 55, null), null, doneIfError(done));
assert.strictEqual(rowCounter, 2);
done();
process.nextTick(() => {
assert.strictEqual(rowCounter, 1);
parser._transform(getBodyChunks(3, rowLength, 55, null), null, doneIfError(done));
process.nextTick(() => {
assert.strictEqual(rowCounter, 2);
done();
});
});
});
});
});
Expand Down
71 changes: 43 additions & 28 deletions test/unit/protocol-stream-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const types = require('../../lib/types');
const utils = require('../../lib/utils');

describe('Protocol', function () {
it('should emit a single frame with 0-length body', function () {
it('should emit a single frame with 0-length body', function (done) {
const p = newInstance();
const items = [];
p.on('readable', function () {
Expand All @@ -17,10 +17,13 @@ describe('Protocol', function () {
});
const buffer = generateBuffer(4, [ 0 ]);
p.readItems(buffer);
assert.strictEqual(items.length, 1);
assert.strictEqual(items[0].header.bodyLength, 0);
process.nextTick(() => {
assert.strictEqual(items.length, 1);
assert.strictEqual(items[0].header.bodyLength, 0);
done();
});
});
it('should emit a single frame with 0-length body chunked', function () {
it('should emit a single frame with 0-length body chunked', function (done) {
const p = newInstance();
const items = [];
p.on('readable', function () {
Expand All @@ -32,10 +35,13 @@ describe('Protocol', function () {
const buffer = generateBuffer(4, [ 0 ]);
p.readItems(buffer.slice(0, 2));
p.readItems(buffer.slice(2));
assert.strictEqual(items.length, 1);
assert.strictEqual(items[0].header.bodyLength, 0);
process.nextTick(() => {
assert.strictEqual(items.length, 1);
assert.strictEqual(items[0].header.bodyLength, 0);
done();
});
});
it('should emit multiple frames from a single chunk', function () {
it('should emit multiple frames from a single chunk', function (done) {
const p = newInstance();
const items = [];
p.on('readable', function () {
Expand All @@ -47,12 +53,15 @@ describe('Protocol', function () {
const bodyLengths = [ 0, 10, 0, 20, 30, 0];
const buffer = generateBuffer(4, bodyLengths);
p.readItems(buffer);
assert.strictEqual(items.length, bodyLengths.length);
bodyLengths.forEach(function (length, index) {
assert.strictEqual(items[index].header.bodyLength, length);
process.nextTick(() => {
assert.strictEqual(items.length, bodyLengths.length);
bodyLengths.forEach(function (length, index) {
assert.strictEqual(items[index].header.bodyLength, length);
});
done();
});
});
it('should emit multiple frames from multiples chunks', function () {
it('should emit multiple frames from multiples chunks', function (done) {
const p = newInstance();
const items = {};
p.on('readable', function () {
Expand All @@ -70,17 +79,20 @@ describe('Protocol', function () {
p.readItems(buffer.slice(33, 45));
p.readItems(buffer.slice(45, 65));
p.readItems(buffer.slice(65));
bodyLengths.forEach(function (length, index) {
const item = items[index];
assert.ok(item);
assert.ok(item.length);
const sumLength = item.reduce(function (previousValue, subItem) {
return previousValue + subItem.chunk.length - subItem.offset;
}, 0);
assert.ok(sumLength >= length, sumLength + ' >= ' + length + ' failed');
process.nextTick(() => {
bodyLengths.forEach(function (length, index) {
const item = items[index];
assert.ok(item);
assert.ok(item.length);
const sumLength = item.reduce(function (previousValue, subItem) {
return previousValue + subItem.chunk.length - subItem.offset;
}, 0);
assert.ok(sumLength >= length, sumLength + ' >= ' + length + ' failed');
});
done();
});
});
it('should emit multiple frames from multiples small chunks', function () {
it('should emit multiple frames from multiples small chunks', function (done) {
const p = newInstance();
const items = {};
p.on('readable', function () {
Expand All @@ -99,14 +111,17 @@ describe('Protocol', function () {
}
p.readItems(buffer.slice(i, i + 2));
}
bodyLengths.forEach(function (length, index) {
const item = items[index];
assert.ok(item);
assert.ok(item.length);
const sumLength = item.reduce(function (previousValue, subItem) {
return previousValue + subItem.chunk.length - subItem.offset;
}, 0);
assert.ok(sumLength >= length, sumLength + ' >= ' + length + ' failed');
process.nextTick(() => {
bodyLengths.forEach(function (length, index) {
const item = items[index];
assert.ok(item);
assert.ok(item.length);
const sumLength = item.reduce(function (previousValue, subItem) {
return previousValue + subItem.chunk.length - subItem.offset;
}, 0);
assert.ok(sumLength >= length, sumLength + ' >= ' + length + ' failed');
});
done();
});
});
});
Expand Down
4 changes: 3 additions & 1 deletion test/unit/token-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,9 @@ describe('TokenRange', () => {
]));
it('should not allow split on minToken', () => {
const range = new token.TokenRange(_tokenizer.minToken(), _tokenizer.minToken(), _tokenizer);
assert.throws(() => range.splitEvenly(2), 'Cannot split whole ring with ordered partitioner');
assert.throws(() => range.splitEvenly(2), (err) =>
err instanceof Error && err.message === 'Cannot split whole ring with ordered partitioner'
);
});
});
describe('#compare()', () => {
Expand Down