From f89fc1e7efd573696268f5cc231a9e2fa5021890 Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Wed, 3 Jun 2020 14:06:18 +0200 Subject: [PATCH 01/35] Make Lock timeout mandatory --- src/utils/lock.js | 6 +++++- src/utils/lock.spec.js | 9 ++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/utils/lock.js b/src/utils/lock.js index ffb24bc65..07de4bcc5 100644 --- a/src/utils/lock.js +++ b/src/utils/lock.js @@ -11,7 +11,11 @@ const PRIVATE = { const TIMEOUT_MESSAGE = 'Timeout while acquiring lock (%d waiting locks)' module.exports = class Lock { - constructor({ timeout = 1000, description = null } = {}) { + constructor({ timeout, description = null } = {}) { + if (typeof timeout !== 'number') { + throw new TypeError(`'timeout' is not a number, received '${typeof timeout}'`) + } + this[PRIVATE.LOCKED] = false this[PRIVATE.TIMEOUT] = timeout this[PRIVATE.WAITING] = new Set() diff --git a/src/utils/lock.spec.js b/src/utils/lock.spec.js index dc47002c1..4fc43fb5d 100644 --- a/src/utils/lock.spec.js +++ b/src/utils/lock.spec.js @@ -6,7 +6,7 @@ const sleep = value => waitFor(delay => delay >= value) describe('Utils > Lock', () => { it('allows only one resource at a time', async () => { - const lock = new Lock() + const lock = new Lock({ timeout: 1000 }) const resource = jest.fn() const callResource = async () => { try { @@ -40,6 +40,13 @@ describe('Utils > Lock', () => { ).rejects.toHaveProperty('message', 'Timeout while acquiring lock (2 waiting locks)') }) + it('throws if the lock is initiated with an undefined timeout', async () => { + expect(() => new Lock()).toThrowWithMessage( + TypeError, + `'timeout' is not a number, received 'undefined'` + ) + }) + it('allows lock to be acquired after timeout', async () => { const lock = new Lock({ timeout: 60 }) const resource = jest.fn() From 82c157ddff4c87b46d87260b115e025acd0bee03 Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Wed, 3 Jun 2020 14:25:23 +0200 Subject: [PATCH 02/35] Move default request timeout up to cluster level The cluster needs to know the request timeout to know how long it should hold the target topic mutation lock. Fixes #695. --- src/cluster/index.js | 4 ++-- src/network/connection.js | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/cluster/index.js b/src/cluster/index.js index 44d9f49a5..4ea2519db 100644 --- a/src/cluster/index.js +++ b/src/cluster/index.js @@ -28,7 +28,7 @@ const mergeTopics = (obj, { topic, partitions }) => ({ * @param {number} connectionTimeout - in milliseconds * @param {number} authenticationTimeout - in milliseconds * @param {number} reauthenticationThreshold - in milliseconds - * @param {number} requestTimeout - in milliseconds + * @param {number} [requestTimeout=30000] - in milliseconds * @param {number} metadataMaxAge - in milliseconds * @param {boolean} allowAutoTopicCreation * @param {number} maxInFlightRequests @@ -49,7 +49,7 @@ module.exports = class Cluster { connectionTimeout, authenticationTimeout, reauthenticationThreshold, - requestTimeout, + requestTimeout = 30000, enforceRequestTimeout, metadataMaxAge, retry, diff --git a/src/network/connection.js b/src/network/connection.js index 5e5e48df6..fd93c30c3 100644 --- a/src/network/connection.js +++ b/src/network/connection.js @@ -15,6 +15,8 @@ const requestInfo = ({ apiName, apiKey, apiVersion }) => * @param {number} port * @param {Object} logger * @param {string} clientId='kafkajs' + * @param {number} requestTimeout The maximum amount of time the client will wait for the response of a request, + * in milliseconds * @param {string} [rack=null] * @param {Object} [ssl=null] Options for the TLS Secure Context. It accepts all options, * usually "cert", "key" and "ca". More information at @@ -23,8 +25,6 @@ const requestInfo = ({ apiName, apiKey, apiVersion }) => * key "mechanism". Connection is not actively using the SASL attributes * but acting as a data object for this information * @param {number} [connectionTimeout=1000] The connection timeout, in milliseconds - * @param {number} [requestTimeout=30000] The maximum amount of time the client will wait for the response of a request, - * in milliseconds * @param {Object} [retry=null] Configurations for the built-in retry mechanism. More information at the * retry module inside network * @param {number} [maxInFlightRequests=null] The maximum number of unacknowledged requests on a connection before @@ -37,12 +37,12 @@ module.exports = class Connection { port, logger, socketFactory, + requestTimeout, rack = null, ssl = null, sasl = null, clientId = 'kafkajs', connectionTimeout = 1000, - requestTimeout = 30000, enforceRequestTimeout = false, maxInFlightRequests = null, instrumentationEmitter = null, From 9c886fdd354e504bcfe0342eb7b546a70c2d740e Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Wed, 3 Jun 2020 14:33:03 +0200 Subject: [PATCH 03/35] Mark flaky test as such The intention is to make it easier to target flaky tests for fixing, by running `yarn test:local:watch --testNamePattern=flaky`. --- .../__tests__/assignmentForUnsubscribedTopic.spec.js | 3 ++- testHelpers/index.js | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/consumer/__tests__/assignmentForUnsubscribedTopic.spec.js b/src/consumer/__tests__/assignmentForUnsubscribedTopic.spec.js index 0cb5d48e6..fcc0006c0 100644 --- a/src/consumer/__tests__/assignmentForUnsubscribedTopic.spec.js +++ b/src/consumer/__tests__/assignmentForUnsubscribedTopic.spec.js @@ -6,6 +6,7 @@ const { createTopic, newLogger, waitForConsumerToJoinGroup, + flakyTest, } = require('testHelpers') describe('Consumer', () => { @@ -61,7 +62,7 @@ describe('Consumer', () => { expect(event.payload.memberAssignment[topicNames[1]]).toBeUndefined() }) - it('starts consuming from new topics after already having assignments', async () => { + flakyTest('starts consuming from new topics after already having assignments', async () => { consumer2 = createConsumer({ cluster: createCluster({ metadataMaxAge: 50 }), groupId, diff --git a/testHelpers/index.js b/testHelpers/index.js index 56f8efef3..1b3d2ae5b 100644 --- a/testHelpers/index.js +++ b/testHelpers/index.js @@ -206,6 +206,11 @@ testIfKafka_1_1_0.only = (description, callback) => { return testIfKafka_1_1_0(description, callback, test.only) } +const flakyTest = (description, callback, testFn = test) => + testFn(`[flaky] ${description}`, callback) +flakyTest.skip = (description, callback) => flakyTest(description, callback, test.skip) +flakyTest.only = (description, callback) => flakyTest(description, callback, test.only) + const unsupportedVersionResponse = () => Buffer.from({ type: 'Buffer', data: [0, 35, 0, 0, 0, 0] }) const unsupportedVersionResponseWithTimeout = () => Buffer.from({ type: 'Buffer', data: [0, 0, 0, 0, 0, 35] }) @@ -248,6 +253,7 @@ module.exports = { waitForConsumerToJoinGroup, testIfKafka_0_11, testIfKafka_1_1_0, + flakyTest, addPartitions, unsupportedVersionResponse, generateMessages, From a0069300ab081409ad7bcdcc5f14af68aba76197 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 26 Jun 2020 10:26:38 +0200 Subject: [PATCH 04/35] Consistently check for objects to be valid in `afterEach` before calling methods on them --- src/admin/__tests__/alterConfigs.spec.js | 2 +- src/admin/__tests__/connection.spec.js | 2 +- src/admin/__tests__/createPartitions.spec.js | 2 +- src/admin/__tests__/createTopics.spec.js | 2 +- src/admin/__tests__/deleteTopics.spec.js | 6 ++---- src/admin/__tests__/describeConfigs.spec.js | 2 +- src/admin/__tests__/fetchOffsets.spec.js | 2 +- .../__tests__/fetchTopicMetadata.spec.js | 2 +- src/admin/__tests__/fetchTopicOffsets.spec.js | 2 +- src/admin/__tests__/getTopicMetadata.spec.js | 2 +- src/admin/__tests__/listTopics.spec.js | 6 ++++-- src/admin/__tests__/resetOffsets.spec.js | 2 +- src/admin/__tests__/setOffsets.spec.js | 2 +- src/broker/__tests__/addOffsetsToTxn.spec.js | 4 ++-- .../__tests__/addPartitionsToTxn.spec.js | 9 +++----- src/broker/__tests__/describeGroups.spec.js | 6 +++--- src/broker/__tests__/endTxn.spec.js | 4 ++-- src/broker/__tests__/fetch.spec.js | 4 ++-- .../__tests__/findGroupCoordinator.spec.js | 2 +- src/broker/__tests__/heartbeat.spec.js | 6 +++--- src/broker/__tests__/initProducerId.spec.js | 4 ++-- src/broker/__tests__/joinGroup.spec.js | 4 ++-- src/broker/__tests__/leaveGroup.spec.js | 4 ++-- src/broker/__tests__/listOffsets.spec.js | 4 ++-- src/broker/__tests__/offsetCommit.spec.js | 6 +++--- src/broker/__tests__/offsetFetch.spec.js | 6 +++--- src/broker/__tests__/produce.spec.js | 2 +- src/broker/__tests__/syncGroup.spec.js | 4 ++-- src/broker/__tests__/txnOffsetCommit.spec.js | 21 ++++++------------- src/cluster/__tests__/brokerPool.spec.js | 2 +- src/network/connection.spec.js | 4 +--- 31 files changed, 58 insertions(+), 72 deletions(-) diff --git a/src/admin/__tests__/alterConfigs.spec.js b/src/admin/__tests__/alterConfigs.spec.js index 6b206bc17..32fe40928 100644 --- a/src/admin/__tests__/alterConfigs.spec.js +++ b/src/admin/__tests__/alterConfigs.spec.js @@ -20,7 +20,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) describe('alterConfigs', () => { diff --git a/src/admin/__tests__/connection.spec.js b/src/admin/__tests__/connection.spec.js index b48c53c0e..662e6819e 100644 --- a/src/admin/__tests__/connection.spec.js +++ b/src/admin/__tests__/connection.spec.js @@ -15,7 +15,7 @@ describe('Admin', () => { let admin afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) test('support SSL connections', async () => { diff --git a/src/admin/__tests__/createPartitions.spec.js b/src/admin/__tests__/createPartitions.spec.js index 995882cdc..1978a4844 100644 --- a/src/admin/__tests__/createPartitions.spec.js +++ b/src/admin/__tests__/createPartitions.spec.js @@ -14,7 +14,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) describe('createPartitions', () => { diff --git a/src/admin/__tests__/createTopics.spec.js b/src/admin/__tests__/createTopics.spec.js index afb83bf25..51d0cca3d 100644 --- a/src/admin/__tests__/createTopics.spec.js +++ b/src/admin/__tests__/createTopics.spec.js @@ -15,7 +15,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) describe('createTopics', () => { diff --git a/src/admin/__tests__/deleteTopics.spec.js b/src/admin/__tests__/deleteTopics.spec.js index 83c494a8a..d390412d6 100644 --- a/src/admin/__tests__/deleteTopics.spec.js +++ b/src/admin/__tests__/deleteTopics.spec.js @@ -15,7 +15,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) describe('deleteTopics', () => { @@ -102,9 +102,7 @@ describe('Admin', () => { ) expect(broker.deleteTopics).toHaveBeenCalledTimes(1) - expect( - loggerInstance.error - ).toHaveBeenCalledWith( + expect(loggerInstance.error).toHaveBeenCalledWith( 'Could not delete topics, check if "delete.topic.enable" is set to "true" (the default value is "false") or increase the timeout', { error: 'The request timed out', retryCount: 0, retryTime: expect.any(Number) } ) diff --git a/src/admin/__tests__/describeConfigs.spec.js b/src/admin/__tests__/describeConfigs.spec.js index f919ffed9..0260eb4f8 100644 --- a/src/admin/__tests__/describeConfigs.spec.js +++ b/src/admin/__tests__/describeConfigs.spec.js @@ -20,7 +20,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) }) describe('describeConfigs', () => { diff --git a/src/admin/__tests__/fetchOffsets.spec.js b/src/admin/__tests__/fetchOffsets.spec.js index f9b885466..bfef38fbb 100644 --- a/src/admin/__tests__/fetchOffsets.spec.js +++ b/src/admin/__tests__/fetchOffsets.spec.js @@ -12,7 +12,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) consumer && (await consumer.disconnect()) }) diff --git a/src/admin/__tests__/fetchTopicMetadata.spec.js b/src/admin/__tests__/fetchTopicMetadata.spec.js index 39bf0aeef..3391bdcf8 100644 --- a/src/admin/__tests__/fetchTopicMetadata.spec.js +++ b/src/admin/__tests__/fetchTopicMetadata.spec.js @@ -13,7 +13,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) consumer && (await consumer.disconnect()) }) diff --git a/src/admin/__tests__/fetchTopicOffsets.spec.js b/src/admin/__tests__/fetchTopicOffsets.spec.js index 96ce39591..bf252983b 100644 --- a/src/admin/__tests__/fetchTopicOffsets.spec.js +++ b/src/admin/__tests__/fetchTopicOffsets.spec.js @@ -25,7 +25,7 @@ describe('Admin', () => { }) }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) producer && (await producer.disconnect()) }) diff --git a/src/admin/__tests__/getTopicMetadata.spec.js b/src/admin/__tests__/getTopicMetadata.spec.js index 4a09c50dd..3dded2f2d 100644 --- a/src/admin/__tests__/getTopicMetadata.spec.js +++ b/src/admin/__tests__/getTopicMetadata.spec.js @@ -13,7 +13,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) consumer && (await consumer.disconnect()) }) diff --git a/src/admin/__tests__/listTopics.spec.js b/src/admin/__tests__/listTopics.spec.js index f825b85c2..b5fb6d854 100644 --- a/src/admin/__tests__/listTopics.spec.js +++ b/src/admin/__tests__/listTopics.spec.js @@ -20,8 +20,10 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.deleteTopics({ topics: existingTopicNames }) - await admin.disconnect() + if (admin) { + await admin.deleteTopics({ topics: existingTopicNames }) + await admin.disconnect() + } }) describe('listTopics', () => { diff --git a/src/admin/__tests__/resetOffsets.spec.js b/src/admin/__tests__/resetOffsets.spec.js index 09d57e590..ba323320b 100644 --- a/src/admin/__tests__/resetOffsets.spec.js +++ b/src/admin/__tests__/resetOffsets.spec.js @@ -14,7 +14,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) consumer && (await consumer.disconnect()) }) diff --git a/src/admin/__tests__/setOffsets.spec.js b/src/admin/__tests__/setOffsets.spec.js index a5e84bc3b..27ef61482 100644 --- a/src/admin/__tests__/setOffsets.spec.js +++ b/src/admin/__tests__/setOffsets.spec.js @@ -14,7 +14,7 @@ describe('Admin', () => { }) afterEach(async () => { - await admin.disconnect() + admin && (await admin.disconnect()) consumer && (await consumer.disconnect()) }) diff --git a/src/broker/__tests__/addOffsetsToTxn.spec.js b/src/broker/__tests__/addOffsetsToTxn.spec.js index a4505e4a3..716cd7a45 100644 --- a/src/broker/__tests__/addOffsetsToTxn.spec.js +++ b/src/broker/__tests__/addOffsetsToTxn.spec.js @@ -44,8 +44,8 @@ describe('Broker > AddOffsetsToTxn', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/addPartitionsToTxn.spec.js b/src/broker/__tests__/addPartitionsToTxn.spec.js index 9f4b6e0cb..9f39f9e84 100644 --- a/src/broker/__tests__/addPartitionsToTxn.spec.js +++ b/src/broker/__tests__/addPartitionsToTxn.spec.js @@ -51,8 +51,8 @@ describe('Broker > AddPartitionsToTxn', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request', async () => { @@ -73,10 +73,7 @@ describe('Broker > AddPartitionsToTxn', () => { errors: [ { topic: topicName, - partitionErrors: [ - { errorCode: 0, partition: 1 }, - { errorCode: 0, partition: 2 }, - ], + partitionErrors: [{ errorCode: 0, partition: 1 }, { errorCode: 0, partition: 2 }], }, ], }) diff --git a/src/broker/__tests__/describeGroups.spec.js b/src/broker/__tests__/describeGroups.spec.js index a1c508c74..adaaa05f1 100644 --- a/src/broker/__tests__/describeGroups.spec.js +++ b/src/broker/__tests__/describeGroups.spec.js @@ -43,9 +43,9 @@ describe('Broker > DescribeGroups', () => { }) afterEach(async () => { - await consumer.disconnect() - await seedBroker.disconnect() - await broker.disconnect() + consumer && (await consumer.disconnect()) + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/endTxn.spec.js b/src/broker/__tests__/endTxn.spec.js index 2579a3c92..a4bd98a35 100644 --- a/src/broker/__tests__/endTxn.spec.js +++ b/src/broker/__tests__/endTxn.spec.js @@ -61,8 +61,8 @@ describe('Broker > EndTxn', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await transactionBroker.disconnect() + seedBroker && (await seedBroker.disconnect()) + transactionBroker && (await transactionBroker.disconnect()) }) test('commit transaction', async () => { diff --git a/src/broker/__tests__/fetch.spec.js b/src/broker/__tests__/fetch.spec.js index 935cf7950..5c58a3267 100644 --- a/src/broker/__tests__/fetch.spec.js +++ b/src/broker/__tests__/fetch.spec.js @@ -104,8 +104,8 @@ describe('Broker > Fetch', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('rejects the Promise if lookupRequest is not defined', async () => { diff --git a/src/broker/__tests__/findGroupCoordinator.spec.js b/src/broker/__tests__/findGroupCoordinator.spec.js index 1575721c9..7be84ad96 100644 --- a/src/broker/__tests__/findGroupCoordinator.spec.js +++ b/src/broker/__tests__/findGroupCoordinator.spec.js @@ -14,7 +14,7 @@ describe('Broker > FindGroupCoordinator', () => { }) afterEach(async () => { - await seedBroker.disconnect() + seedBroker && (await seedBroker.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/heartbeat.spec.js b/src/broker/__tests__/heartbeat.spec.js index 33b4bf08f..e7ff4bda4 100644 --- a/src/broker/__tests__/heartbeat.spec.js +++ b/src/broker/__tests__/heartbeat.spec.js @@ -54,9 +54,9 @@ describe('Broker > Heartbeat', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() - await groupCoordinator.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) + groupCoordinator && (await groupCoordinator.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/initProducerId.spec.js b/src/broker/__tests__/initProducerId.spec.js index d83f338ab..8afd241a0 100644 --- a/src/broker/__tests__/initProducerId.spec.js +++ b/src/broker/__tests__/initProducerId.spec.js @@ -33,8 +33,8 @@ describe('Broker > InitProducerId', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request with transaction id', async () => { diff --git a/src/broker/__tests__/joinGroup.spec.js b/src/broker/__tests__/joinGroup.spec.js index 2c84fd4f8..ed564e6b1 100644 --- a/src/broker/__tests__/joinGroup.spec.js +++ b/src/broker/__tests__/joinGroup.spec.js @@ -29,8 +29,8 @@ describe('Broker > JoinGroup', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/leaveGroup.spec.js b/src/broker/__tests__/leaveGroup.spec.js index bab093bd8..06e345468 100644 --- a/src/broker/__tests__/leaveGroup.spec.js +++ b/src/broker/__tests__/leaveGroup.spec.js @@ -30,8 +30,8 @@ describe('Broker > LeaveGroup', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await groupCoordinator.disconnect() + seedBroker && (await seedBroker.disconnect()) + groupCoordinator && (await groupCoordinator.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/listOffsets.spec.js b/src/broker/__tests__/listOffsets.spec.js index 31e9b0887..01388ba8c 100644 --- a/src/broker/__tests__/listOffsets.spec.js +++ b/src/broker/__tests__/listOffsets.spec.js @@ -41,8 +41,8 @@ describe('Broker > ListOffsets', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/offsetCommit.spec.js b/src/broker/__tests__/offsetCommit.spec.js index ace912749..637738bd4 100644 --- a/src/broker/__tests__/offsetCommit.spec.js +++ b/src/broker/__tests__/offsetCommit.spec.js @@ -54,9 +54,9 @@ describe('Broker > OffsetCommit', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() - await groupCoordinator.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) + groupCoordinator && (await groupCoordinator.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/offsetFetch.spec.js b/src/broker/__tests__/offsetFetch.spec.js index 076500221..86a1090bb 100644 --- a/src/broker/__tests__/offsetFetch.spec.js +++ b/src/broker/__tests__/offsetFetch.spec.js @@ -53,9 +53,9 @@ describe('Broker > OffsetFetch', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await broker.disconnect() - await groupCoordinator.disconnect() + seedBroker && (await seedBroker.disconnect()) + broker && (await broker.disconnect()) + groupCoordinator && (await groupCoordinator.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/produce.spec.js b/src/broker/__tests__/produce.spec.js index 280459b90..b9345de8b 100644 --- a/src/broker/__tests__/produce.spec.js +++ b/src/broker/__tests__/produce.spec.js @@ -62,7 +62,7 @@ describe('Broker > Produce', () => { }) afterEach(async () => { - await broker.disconnect() + broker && (await broker.disconnect()) broker2 && (await broker2.disconnect()) }) diff --git a/src/broker/__tests__/syncGroup.spec.js b/src/broker/__tests__/syncGroup.spec.js index d25567f6e..12ad05154 100644 --- a/src/broker/__tests__/syncGroup.spec.js +++ b/src/broker/__tests__/syncGroup.spec.js @@ -30,8 +30,8 @@ describe('Broker > SyncGroup', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await groupCoordinator.disconnect() + seedBroker && (await seedBroker.disconnect()) + groupCoordinator && (await groupCoordinator.disconnect()) }) test('request', async () => { diff --git a/src/broker/__tests__/txnOffsetCommit.spec.js b/src/broker/__tests__/txnOffsetCommit.spec.js index 531ef474c..e1f78b2f1 100644 --- a/src/broker/__tests__/txnOffsetCommit.spec.js +++ b/src/broker/__tests__/txnOffsetCommit.spec.js @@ -78,9 +78,9 @@ describe('Broker > TxnOffsetCommit', () => { }) afterEach(async () => { - await seedBroker.disconnect() - await transactionBroker.disconnect() - await consumerBroker.disconnect() + seedBroker && (await seedBroker.disconnect()) + transactionBroker && (await transactionBroker.disconnect()) + consumerBroker && (await consumerBroker.disconnect()) }) test('request', async () => { @@ -92,10 +92,7 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [ - { partition: 0, offset: 0 }, - { partition: 1, offset: 0 }, - ], + partitions: [{ partition: 0, offset: 0 }, { partition: 1, offset: 0 }], }, ], }) @@ -106,10 +103,7 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [ - { errorCode: 0, partition: 0 }, - { errorCode: 0, partition: 1 }, - ], + partitions: [{ errorCode: 0, partition: 0 }, { errorCode: 0, partition: 1 }], }, ], }) @@ -124,10 +118,7 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [ - { partition: 0, offset: 0 }, - { partition: 1, offset: 0 }, - ], + partitions: [{ partition: 0, offset: 0 }, { partition: 1, offset: 0 }], }, ], }) diff --git a/src/cluster/__tests__/brokerPool.spec.js b/src/cluster/__tests__/brokerPool.spec.js index 693a2a4cf..03ff3f84f 100644 --- a/src/cluster/__tests__/brokerPool.spec.js +++ b/src/cluster/__tests__/brokerPool.spec.js @@ -20,7 +20,7 @@ describe('Cluster > BrokerPool', () => { }) afterEach(async () => { - await brokerPool.disconnect() + brokerPool && (await brokerPool.disconnect()) }) it('defaults metadataMaxAge to 0', () => { diff --git a/src/network/connection.spec.js b/src/network/connection.spec.js index 337b40018..3f0bb649f 100644 --- a/src/network/connection.spec.js +++ b/src/network/connection.spec.js @@ -180,9 +180,7 @@ describe('Network > Connection', () => { }) afterEach(async () => { - if (connection) { - await connection.disconnect() - } + connection && (await connection.disconnect()) }) test('logs the full payload in case of non-retriable error when "KAFKAJS_DEBUG_PROTOCOL_BUFFERS" runtime flag is set', async () => { From 507d08171459cfffa6f22702949b5ae5e1451cd7 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 26 Jun 2020 10:39:07 +0200 Subject: [PATCH 05/35] Fix `yarn lint` issues --- src/admin/__tests__/deleteTopics.spec.js | 4 +++- src/broker/__tests__/addPartitionsToTxn.spec.js | 5 ++++- src/broker/__tests__/txnOffsetCommit.spec.js | 15 ++++++++++++--- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/admin/__tests__/deleteTopics.spec.js b/src/admin/__tests__/deleteTopics.spec.js index d390412d6..639add9de 100644 --- a/src/admin/__tests__/deleteTopics.spec.js +++ b/src/admin/__tests__/deleteTopics.spec.js @@ -102,7 +102,9 @@ describe('Admin', () => { ) expect(broker.deleteTopics).toHaveBeenCalledTimes(1) - expect(loggerInstance.error).toHaveBeenCalledWith( + expect( + loggerInstance.error + ).toHaveBeenCalledWith( 'Could not delete topics, check if "delete.topic.enable" is set to "true" (the default value is "false") or increase the timeout', { error: 'The request timed out', retryCount: 0, retryTime: expect.any(Number) } ) diff --git a/src/broker/__tests__/addPartitionsToTxn.spec.js b/src/broker/__tests__/addPartitionsToTxn.spec.js index 9f39f9e84..ca8b821a0 100644 --- a/src/broker/__tests__/addPartitionsToTxn.spec.js +++ b/src/broker/__tests__/addPartitionsToTxn.spec.js @@ -73,7 +73,10 @@ describe('Broker > AddPartitionsToTxn', () => { errors: [ { topic: topicName, - partitionErrors: [{ errorCode: 0, partition: 1 }, { errorCode: 0, partition: 2 }], + partitionErrors: [ + { errorCode: 0, partition: 1 }, + { errorCode: 0, partition: 2 }, + ], }, ], }) diff --git a/src/broker/__tests__/txnOffsetCommit.spec.js b/src/broker/__tests__/txnOffsetCommit.spec.js index e1f78b2f1..ee8416ce5 100644 --- a/src/broker/__tests__/txnOffsetCommit.spec.js +++ b/src/broker/__tests__/txnOffsetCommit.spec.js @@ -92,7 +92,10 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [{ partition: 0, offset: 0 }, { partition: 1, offset: 0 }], + partitions: [ + { partition: 0, offset: 0 }, + { partition: 1, offset: 0 }, + ], }, ], }) @@ -103,7 +106,10 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [{ errorCode: 0, partition: 0 }, { errorCode: 0, partition: 1 }], + partitions: [ + { errorCode: 0, partition: 0 }, + { errorCode: 0, partition: 1 }, + ], }, ], }) @@ -118,7 +124,10 @@ describe('Broker > TxnOffsetCommit', () => { topics: [ { topic: topicName, - partitions: [{ partition: 0, offset: 0 }, { partition: 1, offset: 0 }], + partitions: [ + { partition: 0, offset: 0 }, + { partition: 1, offset: 0 }, + ], }, ], }) From 08874083d2ef657f2c5502d970f1b4eef773aaec Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 26 Jun 2020 12:01:22 +0200 Subject: [PATCH 06/35] Wait for consumer to join the group before testing behavior When `consumer.run()` returns it doesn't mean that the consumer is now part of the group, but rather that we at least once tried to connect and sync. Due to errors, delays, etc the actual joining might only happen later. This likely fixes one of the cases where the tests "import after the jest environment is torn down". Note that this pattern of "don't await run, and then await waitForConsumerToJoinGroup" is used in other tests, but there also also plenty of tests using the simpler "await run" approach that is also documented as the canonical way of handling things. --- src/admin/__tests__/resetOffsets.spec.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/admin/__tests__/resetOffsets.spec.js b/src/admin/__tests__/resetOffsets.spec.js index 09d57e590..684145e41 100644 --- a/src/admin/__tests__/resetOffsets.spec.js +++ b/src/admin/__tests__/resetOffsets.spec.js @@ -1,7 +1,13 @@ const createAdmin = require('../index') const createConsumer = require('../../consumer') -const { secureRandom, createCluster, newLogger, createTopic } = require('testHelpers') +const { + secureRandom, + createCluster, + newLogger, + createTopic, + waitForConsumerToJoinGroup, +} = require('testHelpers') describe('Admin', () => { let topicName, groupId, admin, consumer @@ -88,7 +94,8 @@ describe('Admin', () => { consumer = createConsumer({ groupId, cluster: createCluster(), logger: newLogger() }) await consumer.connect() await consumer.subscribe({ topic: topicName }) - await consumer.run({ eachMessage: () => true }) + consumer.run({ eachMessage: () => true }) + await waitForConsumerToJoinGroup(consumer) admin = createAdmin({ cluster: createCluster(), logger: newLogger() }) From d12378653b72dce7d9ee7611a9026c7a85b8e7ec Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 26 Jun 2020 14:55:58 +0200 Subject: [PATCH 07/35] Declare the `offlineReplicas` property on the PartitionMetadata type --- types/index.d.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/types/index.d.ts b/types/index.d.ts index e0deb9200..0905e10c2 100644 --- a/types/index.d.ts +++ b/types/index.d.ts @@ -82,6 +82,7 @@ export type PartitionMetadata = { leader: number replicas: number[] isr: number[] + offlineReplicas?: number[] } export interface IHeaders { From 78fefe2fee8c0a22aa1297539c2d9771d5d910c6 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 26 Jun 2020 14:14:16 +0200 Subject: [PATCH 08/35] Remove an unneeded `await` Cluster.findTopicPartitionMetadata() returns the metadata directly, any waiting would have happened before when refreshing the metadata. While there: Fix the JSDoc `returns` annotation to actually be correct --- src/admin/index.js | 2 +- src/cluster/index.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/admin/index.js b/src/admin/index.js index c1d65db3f..899847cc0 100644 --- a/src/admin/index.js +++ b/src/admin/index.js @@ -602,7 +602,7 @@ module.exports = ({ topics: await Promise.all( targetTopics.map(async topic => ({ name: topic, - partitions: await cluster.findTopicPartitionMetadata(topic), + partitions: cluster.findTopicPartitionMetadata(topic), })) ), } diff --git a/src/cluster/index.js b/src/cluster/index.js index 44d9f49a5..d31a4806b 100644 --- a/src/cluster/index.js +++ b/src/cluster/index.js @@ -241,7 +241,7 @@ module.exports = class Cluster { /** * @public * @param {string} topic - * @returns {Object} Example: + * @returns {import("../../types").PartitionMetadata[]} Example: * [{ * isr: [2], * leader: 2, From 8569a489eaef0e1dafcc1402f63beb7de6fd7f4f Mon Sep 17 00:00:00 2001 From: likai Date: Fri, 26 Jun 2020 22:26:19 -0400 Subject: [PATCH 09/35] fix admin.createTopics timeout . --- src/admin/index.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/admin/index.js b/src/admin/index.js index c1d65db3f..ad0d34fac 100644 --- a/src/admin/index.js +++ b/src/admin/index.js @@ -115,6 +115,7 @@ module.exports = ({ const topicNamesArray = Array.from(topicNames.values()) await retryOnLeaderNotAvailable(async () => await broker.metadata(topicNamesArray), { delay: 100, + maxWait: timeout, timeoutMessage: 'Timed out while waiting for topic leaders', }) } From 48408b37f371d8ca83d659dac2981c30c457c770 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Thu, 25 Jun 2020 15:02:02 +0200 Subject: [PATCH 10/35] Fix the regular expression to match on `message.format.version` `[2,3]` would be a range allowing '2', '3' ... and ','. This was introduced with 7c2f69ce, and this fix is mostly "cosmetical" as we wouldn't ever see such a Kafka version in the wild (nor do we particularly care about it anywhere) --- src/broker/__tests__/describeConfigs.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/broker/__tests__/describeConfigs.spec.js b/src/broker/__tests__/describeConfigs.spec.js index df2db655a..da90e5694 100644 --- a/src/broker/__tests__/describeConfigs.spec.js +++ b/src/broker/__tests__/describeConfigs.spec.js @@ -116,7 +116,7 @@ describe('Broker > describeConfigs', () => { }, { configName: 'message.format.version', - configValue: expect.stringMatching(/^(0\.11\.0-IV2|1\.1-IV0|2\.[2,3]-IV1)$/), + configValue: expect.stringMatching(/^(0\.11\.0-IV2|1\.1-IV0|2\.[23]-IV1)$/), isDefault: false, isSensitive: false, readOnly: false, From a9ad9ccab5a3f29f8da81ff4a3ae8bbdd603c9f2 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 12 Jun 2020 09:58:00 +0200 Subject: [PATCH 11/35] Add a docker configuration for running with Kafka 2.4.2 This is cherry-picked from https://github.com/figma/kafkajs/commit/f1a090999f078fcb16f24a40d589e39aebd112d8, and then I adjusted for the recent changes in master as well as updated to Confluent Kafka 5.4.2. --- docker-compose.2_4.yml | 143 +++++++++++++++++++ package.json | 2 +- scripts/dockerComposeUp.sh | 2 +- src/broker/__tests__/describeConfigs.spec.js | 2 +- 4 files changed, 146 insertions(+), 3 deletions(-) create mode 100644 docker-compose.2_4.yml diff --git a/docker-compose.2_4.yml b/docker-compose.2_4.yml new file mode 100644 index 000000000..80bec117f --- /dev/null +++ b/docker-compose.2_4.yml @@ -0,0 +1,143 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + hostname: zookeeper + container_name: zookeeper + ports: + - '2181:2181' + environment: + ZOOKEEPER_CLIENT_PORT: '2181' + ZOOKEEPER_TICK_TIME: '2000' + KAFKA_OPTS: '-Djava.security.auth.login.config=/etc/kafka/server-jaas.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider' + volumes: + - ./testHelpers/kafka/server-jaas.conf:/etc/kafka/server-jaas.conf:ro,z + + kafka1: + image: confluentinc/cp-kafka:5.4.2 + hostname: kafka1 + container_name: kafka1 + labels: + - 'custom.project=kafkajs' + - 'custom.service=kafka1' + depends_on: + - zookeeper + ports: + - '29092:29092' + - '9092:9092' + - '29093:29093' + - '9093:9093' + - '29094:29094' + - '9094:9094' + environment: + KAFKA_BROKER_ID: '0' + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092,SSL://kafka1:29093,SSL_HOST://localhost:9093,SASL_SSL://kafka1:29094,SASL_SSL_HOST://localhost:9094 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' + volumes: + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas.conf:/opt/kafka/config/server-jaas.conf:ro,z + + kafka2: + image: confluentinc/cp-kafka:5.4.2 + hostname: kafka2 + container_name: kafka2 + labels: + - 'custom.project=kafkajs' + - 'custom.service=kafka2' + depends_on: + - zookeeper + ports: + - '29095:29095' + - '9095:9095' + - '29096:29096' + - '9096:9096' + - '29097:29097' + - '9097:9097' + environment: + KAFKA_BROKER_ID: '1' + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka2:29095,PLAINTEXT_HOST://localhost:9095,SSL://kafka2:29096,SSL_HOST://localhost:9096,SASL_SSL://kafka2:29097,SASL_SSL_HOST://localhost:9097 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' + volumes: + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas.conf:/opt/kafka/config/server-jaas.conf:ro,z + + kafka3: + image: confluentinc/cp-kafka:5.4.2 + hostname: kafka3 + container_name: kafka3 + labels: + - 'custom.project=kafkajs' + - 'custom.service=kafka3' + depends_on: + - zookeeper + ports: + - '29098:29098' + - '9098:9098' + - '29099:29099' + - '9099:9099' + - '29100:29100' + - '9100:9100' + environment: + KAFKA_BROKER_ID: '2' + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka3:29098,PLAINTEXT_HOST://localhost:9098,SSL://kafka3:29099,SSL_HOST://localhost:9099,SASL_SSL://kafka3:29100,SASL_SSL_HOST://localhost:9100 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN,SCRAM-SHA-256,SCRAM-SHA-512' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' + volumes: + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas.conf:/opt/kafka/config/server-jaas.conf:ro,z diff --git a/package.json b/package.json index d9cd0e101..ebd521f9c 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ }, "homepage": "https://kafka.js.org", "scripts": { - "jest": "export KAFKA_VERSION=${KAFKA_VERSION:='2.3'} && NODE_ENV=test echo \"KAFKA_VERSION: ${KAFKA_VERSION}\" && KAFKAJS_DEBUG_PROTOCOL_BUFFERS=1 jest", + "jest": "export KAFKA_VERSION=${KAFKA_VERSION:='2.4'} && NODE_ENV=test echo \"KAFKA_VERSION: ${KAFKA_VERSION}\" && KAFKAJS_DEBUG_PROTOCOL_BUFFERS=1 jest", "test:local": "yarn jest --detectOpenHandles", "test:debug": "NODE_ENV=test KAFKAJS_DEBUG_PROTOCOL_BUFFERS=1 node --inspect-brk $(yarn bin 2>/dev/null)/jest --detectOpenHandles --runInBand --watch", "test:local:watch": "yarn test:local --watch", diff --git a/scripts/dockerComposeUp.sh b/scripts/dockerComposeUp.sh index 9ce743b86..6407b22c0 100755 --- a/scripts/dockerComposeUp.sh +++ b/scripts/dockerComposeUp.sh @@ -1,6 +1,6 @@ #!/bin/bash -e -COMPOSE_FILE=${COMPOSE_FILE:="docker-compose.2_3.yml"} +COMPOSE_FILE=${COMPOSE_FILE:="docker-compose.2_4.yml"} echo "Running compose file: ${COMPOSE_FILE}:" docker-compose -f "${COMPOSE_FILE}" up --force-recreate -d diff --git a/src/broker/__tests__/describeConfigs.spec.js b/src/broker/__tests__/describeConfigs.spec.js index da90e5694..bc648d3c3 100644 --- a/src/broker/__tests__/describeConfigs.spec.js +++ b/src/broker/__tests__/describeConfigs.spec.js @@ -116,7 +116,7 @@ describe('Broker > describeConfigs', () => { }, { configName: 'message.format.version', - configValue: expect.stringMatching(/^(0\.11\.0-IV2|1\.1-IV0|2\.[23]-IV1)$/), + configValue: expect.stringMatching(/^(0\.11\.0-IV2|1\.1-IV0|2\.[234]-IV1)$/), isDefault: false, isSensitive: false, readOnly: false, From 5b2ab6b15239a679d3227cbb61e6ee257c184e22 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Mon, 15 Jun 2020 12:59:22 +0200 Subject: [PATCH 12/35] Point to docker_compose.2_4.yml --- docs/DockerLocal.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/DockerLocal.md b/docs/DockerLocal.md index c1f5ce932..74a50c0de 100644 --- a/docs/DockerLocal.md +++ b/docs/DockerLocal.md @@ -45,4 +45,4 @@ You will now be able to connect to your Kafka broker at `$(HOST_IP):9092`. See t ## SSL & authentication methods -To configure Kafka to use SSL and/or authentication methods such as SASL, see [docker-compose.yml](https://github.com/tulios/kafkajs/blob/master/docker-compose.2_3.yml). This configuration is used while developing KafkaJS, and is more complicated to set up, but may give you a more production-like development environment. \ No newline at end of file +To configure Kafka to use SSL and/or authentication methods such as SASL, see [docker-compose.yml](https://github.com/tulios/kafkajs/blob/master/docker-compose.2_4.yml). This configuration is used while developing KafkaJS, and is more complicated to set up, but may give you a more production-like development environment. \ No newline at end of file From dd3dcfb94a27b1a7d6d6893bc8ce323c50441270 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Mon, 15 Jun 2020 12:59:39 +0200 Subject: [PATCH 13/35] Switch Azure to run Kafka 2.4 for checks --- azure-pipelines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 2c8c06625..9dccd0a6a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -17,9 +17,9 @@ pr: variables: - group: website_secrets - name: KAFKA_VERSION - value: 2.3 + value: 2.4 - name: COMPOSE_FILE - value: docker-compose.2_3.yml + value: docker-compose.2_4.yml ####### Linter jobs: From 4261a62cf30582e21ce76c7902d09569bdffb143 Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Thu, 25 Jun 2020 20:53:55 +0200 Subject: [PATCH 14/35] Remove the DeleteGroups v2 and ListGroups v3 APIs These require the support for tagged fields, which KafkaJS doesn't have. The definition here would lead to errors with Kafka versions that support these versions. See https://cwiki.apache.org/confluence/display/KAFKA/KIP-482%3A+The+Kafka+Protocol+should+Support+Optional+Tagged+Fields --- src/protocol/requests/deleteGroups/index.js | 5 --- .../requests/deleteGroups/v2/request.js | 22 ----------- .../requests/deleteGroups/v2/response.js | 39 ------------------- src/protocol/requests/listGroups/index.js | 5 --- .../requests/listGroups/v3/request.js | 7 ---- .../requests/listGroups/v3/response.js | 34 ---------------- 6 files changed, 112 deletions(-) delete mode 100644 src/protocol/requests/deleteGroups/v2/request.js delete mode 100644 src/protocol/requests/deleteGroups/v2/response.js delete mode 100644 src/protocol/requests/listGroups/v3/request.js delete mode 100644 src/protocol/requests/listGroups/v3/response.js diff --git a/src/protocol/requests/deleteGroups/index.js b/src/protocol/requests/deleteGroups/index.js index b1cf7d9a9..32fda47bf 100644 --- a/src/protocol/requests/deleteGroups/index.js +++ b/src/protocol/requests/deleteGroups/index.js @@ -9,11 +9,6 @@ const versions = { const response = require('./v1/response') return { request: request(groupIds), response } }, - 2: groupIds => { - const request = require('./v2/request') - const response = require('./v2/response') - return { request: request(groupIds), response } - }, } module.exports = { diff --git a/src/protocol/requests/deleteGroups/v2/request.js b/src/protocol/requests/deleteGroups/v2/request.js deleted file mode 100644 index 60a1eeafa..000000000 --- a/src/protocol/requests/deleteGroups/v2/request.js +++ /dev/null @@ -1,22 +0,0 @@ -const Encoder = require('../../../encoder') -const { DeleteGroups: apiKey } = require('../../apiKeys') - -/** - * DeleteGroups Request (Version: 2) => [groups_names] TAG_BUFFER - * groups_names => COMPACT_STRING - */ - -/** - */ -module.exports = groupIds => ({ - apiKey, - apiVersion: 2, - apiName: 'DeleteGroups', - encode: async () => { - return new Encoder().writeArray(groupIds.map(encodeGroups)) - }, -}) - -const encodeGroups = group => { - return new Encoder().writeVarIntString(group) -} diff --git a/src/protocol/requests/deleteGroups/v2/response.js b/src/protocol/requests/deleteGroups/v2/response.js deleted file mode 100644 index 60d8652bb..000000000 --- a/src/protocol/requests/deleteGroups/v2/response.js +++ /dev/null @@ -1,39 +0,0 @@ -const Decoder = require('../../../decoder') -const { failure, createErrorFromCode } = require('../../../error') -/** - * DeleteGroups Response (Version: 2) => throttle_time_ms [results] TAG_BUFFER - * throttle_time_ms => INT32 - * results => group_id error_code TAG_BUFFER - * group_id => COMPACT_STRING - * error_code => INT16 - */ - -const decodeGroup = decoder => ({ - groupId: decoder.readVarIntString(), - errorCode: decoder.readInt16(), -}) - -const decode = async rawData => { - const decoder = new Decoder(rawData) - const throttleTimeMs = decoder.readInt32() - const results = decoder.readArray(decodeGroup) - - for (const result of results) { - if (failure(result.errorCode)) { - result.error = createErrorFromCode(result.errorCode) - } - } - return { - throttleTimeMs, - results, - } -} - -const parse = async data => { - return data -} - -module.exports = { - decode, - parse, -} diff --git a/src/protocol/requests/listGroups/index.js b/src/protocol/requests/listGroups/index.js index 4d61c2c64..4ddefbbdd 100644 --- a/src/protocol/requests/listGroups/index.js +++ b/src/protocol/requests/listGroups/index.js @@ -14,11 +14,6 @@ const versions = { const response = require('./v2/response') return { request: request(), response } }, - 3: () => { - const request = require('./v3/request') - const response = require('./v3/response') - return { request: request(), response } - }, } module.exports = { diff --git a/src/protocol/requests/listGroups/v3/request.js b/src/protocol/requests/listGroups/v3/request.js deleted file mode 100644 index 5001f492b..000000000 --- a/src/protocol/requests/listGroups/v3/request.js +++ /dev/null @@ -1,7 +0,0 @@ -const requestV2 = require('../v2/request') - -/** - * ListGroups Request (Version: 3) - */ - -module.exports = () => Object.assign(requestV2(), { apiVersion: 3 }) diff --git a/src/protocol/requests/listGroups/v3/response.js b/src/protocol/requests/listGroups/v3/response.js deleted file mode 100644 index f950381b3..000000000 --- a/src/protocol/requests/listGroups/v3/response.js +++ /dev/null @@ -1,34 +0,0 @@ -const responseV2 = require('../v2/response') -const Decoder = require('../../../decoder') - -/** - * ListGroups Response (Version: 3) => throttle_time_ms error_code [groups] TAG_BUFFER - * throttle_time_ms => INT32 - * error_code => INT16 - * groups => group_id protocol_type TAG_BUFFER - * group_id => COMPACT_STRING - * protocol_type => COMPACT_STRING - */ - -const decodeGroup = decoder => ({ - groupId: decoder.readVarIntString(), - protocolType: decoder.readVarIntString(), -}) - -const decode = async rawData => { - const decoder = new Decoder(rawData) - const throttleTime = decoder.readInt32() - const errorCode = decoder.readInt16() - const groups = decoder.readArray(decodeGroup) - - return { - throttleTime, - errorCode, - groups, - } -} - -module.exports = { - decode, - parse: responseV2.parse, -} From 3176e0736895a7e8523bae7f90fb8f2b399e4bf6 Mon Sep 17 00:00:00 2001 From: Kristjan Tammekivi Date: Mon, 6 Jul 2020 10:59:54 +0300 Subject: [PATCH 15/35] feat: add error property for log entries that happen in eachMessage/eachBatch callbacks --- src/consumer/runner.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/consumer/runner.js b/src/consumer/runner.js index 3ed8caf8b..4f95cdd3f 100644 --- a/src/consumer/runner.js +++ b/src/consumer/runner.js @@ -178,6 +178,7 @@ module.exports = class Runner extends EventEmitter { partition, offset: message.offset, stack: e.stack, + error: e, }) } @@ -245,6 +246,7 @@ module.exports = class Runner extends EventEmitter { partition, offset: batch.firstOffset(), stack: e.stack, + error: e, }) } From deb69716051ebfc03d5f00631b50eefcc506205e Mon Sep 17 00:00:00 2001 From: Andreas Kohn Date: Fri, 3 Jul 2020 16:31:26 +0200 Subject: [PATCH 16/35] Avoid repeated costly copies of buffers when working with encoders Instead of using `Buffer.concat` whenever writing into the encoder, and thereby copying the byte contents of earlier added content potentially many times, this commit changes the encoder to use a bigger pre-allocated buffer to write into directly. If that buffer fills up it a new one will be created and the old contents copied into that. --- src/protocol/encoder.js | 121 ++++++++++++++++++++++------------- src/protocol/encoder.spec.js | 17 +++++ 2 files changed, 95 insertions(+), 43 deletions(-) diff --git a/src/protocol/encoder.js b/src/protocol/encoder.js index 62758792f..33744cf27 100644 --- a/src/protocol/encoder.js +++ b/src/protocol/encoder.js @@ -54,44 +54,77 @@ module.exports = class Encoder { return Encoder.sizeOfVarInt(size) + size } - constructor() { - this.buffer = Buffer.alloc(0) + static nextPowerOfTwo(value) { + return 1 << (31 - Math.clz32(value) + 1) + } + + /** + * Construct a new encoder with the given initial size + * + * @param {number} [initialSize] initial size + */ + constructor(initialSize = 511) { + this.buf = Buffer.alloc(Encoder.nextPowerOfTwo(initialSize)) + this.offset = 0 + } + + /** + * @param {Buffer} buffer + */ + writeBufferInternal(buffer) { + const bufferLength = buffer.length + this.ensureAvailable(bufferLength) + buffer.copy(this.buf, this.offset, 0) + this.offset += bufferLength + } + + ensureAvailable(length) { + if (this.offset + length > this.buf.length) { + const newLength = Encoder.nextPowerOfTwo(this.offset + length) + const newBuffer = Buffer.alloc(newLength) + this.buf.copy(newBuffer, 0, 0, this.offset) + this.buf = newBuffer + } + } + + get buffer() { + return this.buf.slice(0, this.offset) } writeInt8(value) { - const tempBuffer = Buffer.alloc(INT8_SIZE) - tempBuffer.writeInt8(value) - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(INT8_SIZE) + this.buf.writeInt8(value, this.offset) + this.offset += INT8_SIZE return this } writeInt16(value) { - const tempBuffer = Buffer.alloc(INT16_SIZE) - tempBuffer.writeInt16BE(value) - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(INT16_SIZE) + this.buf.writeInt16BE(value, this.offset) + this.offset += INT16_SIZE return this } writeInt32(value) { - const tempBuffer = Buffer.alloc(INT32_SIZE) - tempBuffer.writeInt32BE(value) - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(INT32_SIZE) + this.buf.writeInt32BE(value, this.offset) + this.offset += INT32_SIZE return this } writeUInt32(value) { - const tempBuffer = Buffer.alloc(INT32_SIZE) - tempBuffer.writeUInt32BE(value) - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(INT32_SIZE) + this.buf.writeUInt32BE(value, this.offset) + this.offset += INT32_SIZE return this } writeInt64(value) { - const tempBuffer = Buffer.alloc(INT64_SIZE) + this.ensureAvailable(INT64_SIZE) const longValue = Long.fromValue(value) - tempBuffer.writeInt32BE(longValue.getHighBits(), 0) - tempBuffer.writeInt32BE(longValue.getLowBits(), 4) - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.buf.writeInt32BE(longValue.getHighBits(), this.offset) + this.buf.writeInt32BE(longValue.getLowBits(), this.offset + INT32_SIZE) + this.offset += INT64_SIZE return this } @@ -107,10 +140,10 @@ module.exports = class Encoder { } const byteLength = Buffer.byteLength(value, 'utf8') + this.ensureAvailable(INT16_SIZE + byteLength) this.writeInt16(byteLength) - const tempBuffer = Buffer.alloc(byteLength) - tempBuffer.write(value, 0, byteLength, 'utf8') - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.buf.write(value, this.offset, byteLength, 'utf8') + this.offset += byteLength return this } @@ -122,9 +155,9 @@ module.exports = class Encoder { const byteLength = Buffer.byteLength(value, 'utf8') this.writeVarInt(byteLength) - const tempBuffer = Buffer.alloc(byteLength) - tempBuffer.write(value, 0, byteLength, 'utf8') - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(byteLength) + this.buf.write(value, this.offset, byteLength, 'utf8') + this.offset += byteLength return this } @@ -136,15 +169,16 @@ module.exports = class Encoder { if (Buffer.isBuffer(value)) { // raw bytes + this.ensureAvailable(INT32_SIZE + value.length) this.writeInt32(value.length) - this.buffer = Buffer.concat([this.buffer, value]) + this.writeBufferInternal(value) } else { const valueToWrite = String(value) const byteLength = Buffer.byteLength(valueToWrite, 'utf8') + this.ensureAvailable(INT32_SIZE + byteLength) this.writeInt32(byteLength) - const tempBuffer = Buffer.alloc(byteLength) - tempBuffer.write(valueToWrite, 0, byteLength, 'utf8') - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.buf.write(valueToWrite, this.offset, byteLength, 'utf8') + this.offset += byteLength } return this @@ -159,37 +193,36 @@ module.exports = class Encoder { if (Buffer.isBuffer(value)) { // raw bytes this.writeVarInt(value.length) - this.buffer = Buffer.concat([this.buffer, value]) + this.writeBufferInternal(value) } else { const valueToWrite = String(value) const byteLength = Buffer.byteLength(valueToWrite, 'utf8') this.writeVarInt(byteLength) - const tempBuffer = Buffer.alloc(byteLength) - tempBuffer.write(valueToWrite, 0, byteLength, 'utf8') - this.buffer = Buffer.concat([this.buffer, tempBuffer]) + this.ensureAvailable(byteLength) + this.buf.write(valueToWrite, this.offset, byteLength, 'utf8') + this.offset += byteLength } return this } writeEncoder(value) { - if (value == null || value.buffer == null) { + if (value == null || !Buffer.isBuffer(value.buf)) { throw new Error('value should be an instance of Encoder') } - return this.writeBuffer(value.buffer) + this.writeBufferInternal(value.buffer) + return this } writeEncoderArray(value) { - if (!Array.isArray(value) || value.some(v => v == null || !Buffer.isBuffer(v.buffer))) { + if (!Array.isArray(value) || value.some(v => v == null || !Buffer.isBuffer(v.buf))) { throw new Error('all values should be an instance of Encoder[]') } - const newBuffer = [this.buffer] value.forEach(v => { - newBuffer.push(v.buffer) + this.writeBufferInternal(v.buffer) }) - this.buffer = Buffer.concat(newBuffer) return this } @@ -198,7 +231,7 @@ module.exports = class Encoder { throw new Error('value should be an instance of Buffer') } - this.buffer = Buffer.concat([this.buffer, value]) + this.writeBufferInternal(value) return this } @@ -210,7 +243,8 @@ module.exports = class Encoder { // A null value is encoded with length of -1 and there are no following bytes // On the context of this library, empty array and null are the same thing const length = array.length !== 0 ? array.length : -1 - return this.writeArray(array, type, length) + this.writeArray(array, type, length) + return this } /** @@ -276,7 +310,7 @@ module.exports = class Encoder { } byteArray.push(encodedValue & OTHER_BITS) - this.buffer = Buffer.concat([this.buffer, Buffer.from(byteArray)]) + this.writeBufferInternal(Buffer.from(byteArray)) return this } @@ -296,12 +330,13 @@ module.exports = class Encoder { byteArray.push(longValue.toInt()) - this.buffer = Buffer.concat([this.buffer, Buffer.from(byteArray)]) + this.writeBufferInternal(Buffer.from(byteArray)) return this } size() { - return Buffer.byteLength(this.buffer) + // We can use the offset here directly, because we anyways will not re-encode the buffer when writing + return this.offset } toJSON() { diff --git a/src/protocol/encoder.spec.js b/src/protocol/encoder.spec.js index 6a1755b71..576f50998 100644 --- a/src/protocol/encoder.spec.js +++ b/src/protocol/encoder.spec.js @@ -280,4 +280,21 @@ describe('Protocol > Encoder', () => { expect(Encoder.sizeOfVarLong(Long.MAX_VALUE)).toEqual(signed64(Long.MAX_VALUE).length) }) }) + + describe('resizing', () => { + it('copies existing content when resizing', () => { + const encoder = new Encoder(4) + encoder.writeBuffer(B(1, 2, 3, 4)) + encoder.writeBuffer(B(5, 6, 7, 8)) + expect(encoder.buffer).toEqual(B(1, 2, 3, 4, 5, 6, 7, 8)) + }) + it('obeys offset when resizing', () => { + const encoder = new Encoder(4) + // Only two bytes in, ... + encoder.writeBuffer(B(1, 2)) + // ... but this write will require resizing + encoder.writeBuffer(B(5, 6, 7, 8)) + expect(encoder.buffer).toEqual(B(1, 2, 5, 6, 7, 8)) + }) + }) }) From b0226af89b5c1fdec4cdd02efab8c69337ab4ac8 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 11:21:47 +0100 Subject: [PATCH 17/35] Add support for OAUTHBEARER request/response --- src/broker/saslAuthenticator/index.js | 2 + src/broker/saslAuthenticator/oauthBearer.js | 50 +++++++++++++++++++++ src/protocol/sasl/oauthBearer/index.js | 4 ++ src/protocol/sasl/oauthBearer/request.js | 49 ++++++++++++++++++++ src/protocol/sasl/oauthBearer/response.js | 4 ++ 5 files changed, 109 insertions(+) create mode 100644 src/broker/saslAuthenticator/oauthBearer.js create mode 100644 src/protocol/sasl/oauthBearer/index.js create mode 100644 src/protocol/sasl/oauthBearer/request.js create mode 100644 src/protocol/sasl/oauthBearer/response.js diff --git a/src/broker/saslAuthenticator/index.js b/src/broker/saslAuthenticator/index.js index f558fbe2f..97813ceb8 100644 --- a/src/broker/saslAuthenticator/index.js +++ b/src/broker/saslAuthenticator/index.js @@ -4,6 +4,7 @@ const PlainAuthenticator = require('./plain') const SCRAM256Authenticator = require('./scram256') const SCRAM512Authenticator = require('./scram512') const AWSIAMAuthenticator = require('./awsIam') +const OAuthBearerAuthenticator = require('./oauthBearer') const { KafkaJSSASLAuthenticationError } = require('../../errors') const AUTHENTICATORS = { @@ -11,6 +12,7 @@ const AUTHENTICATORS = { 'SCRAM-SHA-256': SCRAM256Authenticator, 'SCRAM-SHA-512': SCRAM512Authenticator, AWS: AWSIAMAuthenticator, + OAUTHBEARER: OAuthBearerAuthenticator, } const SUPPORTED_MECHANISMS = Object.keys(AUTHENTICATORS) diff --git a/src/broker/saslAuthenticator/oauthBearer.js b/src/broker/saslAuthenticator/oauthBearer.js new file mode 100644 index 000000000..61543509f --- /dev/null +++ b/src/broker/saslAuthenticator/oauthBearer.js @@ -0,0 +1,50 @@ +const oauthBearer = require('../../protocol/sasl/oauthBearer') +const { KafkaJSSASLAuthenticationError } = require('../../errors') + +module.exports = class OAuthBearerAuthenticator { + constructor(connection, logger, saslAuthenticate) { + this.connection = connection + this.logger = logger.namespace('SASLOAuthBearerAuthenticator') + this.saslAuthenticate = saslAuthenticate + } + + async authenticate() { + const { sasl } = this.connection + if (sasl.oauthBearerResolver == null) { + throw new KafkaJSSASLAuthenticationError( + 'SASL OAUTHBEARER: Missing OAuth Bearer Token resolver' + ) + } + + const { oauthBearerResolver } = sasl + + const oauthBearerToken = await oauthBearerResolver() + + if ( + oauthBearerToken.value == null || + oauthBearerToken.scope == null || + oauthBearerToken.lifetimeMs == null || + oauthBearerToken.principalName == null || + oauthBearerToken.startTimeMs == null + ) { + throw new KafkaJSSASLAuthenticationError('SASL OAUTHBEARER: Invalid OAuth Bearer Token') + } + + const request = await oauthBearer.request(sasl, oauthBearerToken) + const response = oauthBearer.response + const { host, port } = this.connection + const broker = `${host}:${port}` + + try { + this.logger.debug('Authenticate with SASL OAUTHBEARER', { broker }) + await this.saslAuthenticate({ request, response }) + this.logger.debug('SASL OAUTHBEARER authentication successful', { broker }) + } catch (e) { + const error = new KafkaJSSASLAuthenticationError( + `SASL OAUTHBEARER authentication failed: ${e.message}` + ) + this.logger.error(error.message, { broker }) + throw error + } + } +} diff --git a/src/protocol/sasl/oauthBearer/index.js b/src/protocol/sasl/oauthBearer/index.js new file mode 100644 index 000000000..ecb3c9ab8 --- /dev/null +++ b/src/protocol/sasl/oauthBearer/index.js @@ -0,0 +1,4 @@ +module.exports = { + request: require('./request'), + response: require('./response'), +} diff --git a/src/protocol/sasl/oauthBearer/request.js b/src/protocol/sasl/oauthBearer/request.js new file mode 100644 index 000000000..c30ea926b --- /dev/null +++ b/src/protocol/sasl/oauthBearer/request.js @@ -0,0 +1,49 @@ +/** + * http://www.ietf.org/rfc/rfc5801.txt + * + * See org.apache.kafka.common.security.oauthbearer.internals.OAuthBearerClientInitialResponse + * for official Java client implementation. + * + * The mechanism consists of a message from the client to the server. + * The client sends the "n,"" GS header, followed by the authorizationIdentitty + * prefixed by "a=" (if present), followed by ",", followed by a US-ASCII SOH + * character, followed by "auth=Bearer ", followed by the token value and then + * closed by two additionals US-ASCII SOH characters. + * The client may leave the authorization identity empty to + * indicate that it is the same as the authentication identity. + * + * The server will verify the authentication token and verify that the + * authentication credentials permit the client to login as the authorization + * identity. If both steps succeed, the user is logged in. + * + * Kafkajs at the moment doesn't seem to mention SASL extension, but as the + * Java implementation of OAUTHBEARER included it, it was also introduced here, + * as it's just a matter of key-value concatenation. + * Between the token and the closing SOH characters, SASL extension map can be + * listed in OAuth "friendly" format: for each extension, concatenate the + * extension entry key, followed by "=" followed by extension entry value + * followed by a US-ASCII SOH character. + */ + +const Encoder = require('../../encoder') + +const SEPARATOR = '\u0001' // SOH - Start Of Header ASCII + +module.exports = async ({ authorizationIdentity = null, extensions = {} }, oauthBearerToken) => { + const authzid = authorizationIdentity == null ? '' : `"a=${authorizationIdentity}` + let ext = '' + for (const k in extensions) { + ext += `${k}=${extensions[k]}${SEPARATOR}` + } + if (ext.length > 0) { + ext = `${SEPARATOR}${ext}` + } + + const oauthMsg = `n,${authzid},${SEPARATOR}auth=Bearer ${oauthBearerToken.value}${ext}${SEPARATOR}${SEPARATOR}` + + return { + encode: async () => { + return new Encoder().writeBytes(Buffer.from(oauthMsg)) + }, + } +} diff --git a/src/protocol/sasl/oauthBearer/response.js b/src/protocol/sasl/oauthBearer/response.js new file mode 100644 index 000000000..234bb8515 --- /dev/null +++ b/src/protocol/sasl/oauthBearer/response.js @@ -0,0 +1,4 @@ +module.exports = { + decode: async () => true, + parse: async () => true, +} From 4972a0fedb73fbb704064eeebef638ee2a6a7b8a Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 15:51:16 +0100 Subject: [PATCH 18/35] Remove checks over unneeded token props --- src/broker/saslAuthenticator/oauthBearer.js | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/broker/saslAuthenticator/oauthBearer.js b/src/broker/saslAuthenticator/oauthBearer.js index 61543509f..48f7dd320 100644 --- a/src/broker/saslAuthenticator/oauthBearer.js +++ b/src/broker/saslAuthenticator/oauthBearer.js @@ -20,13 +20,7 @@ module.exports = class OAuthBearerAuthenticator { const oauthBearerToken = await oauthBearerResolver() - if ( - oauthBearerToken.value == null || - oauthBearerToken.scope == null || - oauthBearerToken.lifetimeMs == null || - oauthBearerToken.principalName == null || - oauthBearerToken.startTimeMs == null - ) { + if (oauthBearerToken.value == null) { throw new KafkaJSSASLAuthenticationError('SASL OAUTHBEARER: Invalid OAuth Bearer Token') } From 72f1e8c365f001ac8c7c4018d8473ab9ec0f9fc1 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 15:51:44 +0100 Subject: [PATCH 19/35] Rename oauthBearerResolver to oauthBearerProvider --- src/broker/saslAuthenticator/oauthBearer.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/broker/saslAuthenticator/oauthBearer.js b/src/broker/saslAuthenticator/oauthBearer.js index 48f7dd320..3a6452bcf 100644 --- a/src/broker/saslAuthenticator/oauthBearer.js +++ b/src/broker/saslAuthenticator/oauthBearer.js @@ -10,15 +10,15 @@ module.exports = class OAuthBearerAuthenticator { async authenticate() { const { sasl } = this.connection - if (sasl.oauthBearerResolver == null) { + if (sasl.oauthBearerProvider == null) { throw new KafkaJSSASLAuthenticationError( - 'SASL OAUTHBEARER: Missing OAuth Bearer Token resolver' + 'SASL OAUTHBEARER: Missing OAuth Bearer Token provider' ) } - const { oauthBearerResolver } = sasl + const { oauthBearerProvider } = sasl - const oauthBearerToken = await oauthBearerResolver() + const oauthBearerToken = await oauthBearerProvider() if (oauthBearerToken.value == null) { throw new KafkaJSSASLAuthenticationError('SASL OAUTHBEARER: Invalid OAuth Bearer Token') From b2df4f84d61791e50f970f19758eb07ce223fb89 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 15:52:07 +0100 Subject: [PATCH 20/35] Add comments --- src/broker/saslAuthenticator/oauthBearer.js | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/broker/saslAuthenticator/oauthBearer.js b/src/broker/saslAuthenticator/oauthBearer.js index 3a6452bcf..0dd27a0ea 100644 --- a/src/broker/saslAuthenticator/oauthBearer.js +++ b/src/broker/saslAuthenticator/oauthBearer.js @@ -1,3 +1,15 @@ +/** + * The sasl object must include a property named oauthBearerProvider, an + * async function that is used to return the OAuth bearer token. + * + * The OAuth bearer token must be an object with properties value and + * (optionally) extensions, that will be sent during the SASL/OAUTHBEARER + * request. + * + * The implementation of the oauthBearerProvider must take care that tokens are + * reused and refreshed when appropriate. + */ + const oauthBearer = require('../../protocol/sasl/oauthBearer') const { KafkaJSSASLAuthenticationError } = require('../../errors') From da8d00e0cb9107726338c7264c2cc98e5bb6fb6d Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 15:52:45 +0100 Subject: [PATCH 21/35] Correct extensions handling --- src/protocol/sasl/oauthBearer/request.js | 41 ++++++++++++++++-------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/src/protocol/sasl/oauthBearer/request.js b/src/protocol/sasl/oauthBearer/request.js index c30ea926b..fac9b86e8 100644 --- a/src/protocol/sasl/oauthBearer/request.js +++ b/src/protocol/sasl/oauthBearer/request.js @@ -7,34 +7,47 @@ * The mechanism consists of a message from the client to the server. * The client sends the "n,"" GS header, followed by the authorizationIdentitty * prefixed by "a=" (if present), followed by ",", followed by a US-ASCII SOH - * character, followed by "auth=Bearer ", followed by the token value and then - * closed by two additionals US-ASCII SOH characters. + * character, followed by "auth=Bearer ", followed by the token value, followed + * by US-ASCII SOH character, followed by SASL extensions in OAuth "friendly" + * format and then closed by two additionals US-ASCII SOH characters. + * + * SASL extensions are optional an must be expressed as key-value pairs in an + * object. Each expression is converted as, the extension entry key, followed + * by "=", followed by extension entry value. Each extension is separated by a + * US-ASCII SOH character. If extensions are not present, their relative part + * in the message, including the US-ASCII SOH character, is omitted. + * * The client may leave the authorization identity empty to * indicate that it is the same as the authentication identity. * * The server will verify the authentication token and verify that the * authentication credentials permit the client to login as the authorization * identity. If both steps succeed, the user is logged in. - * - * Kafkajs at the moment doesn't seem to mention SASL extension, but as the - * Java implementation of OAUTHBEARER included it, it was also introduced here, - * as it's just a matter of key-value concatenation. - * Between the token and the closing SOH characters, SASL extension map can be - * listed in OAuth "friendly" format: for each extension, concatenate the - * extension entry key, followed by "=" followed by extension entry value - * followed by a US-ASCII SOH character. */ const Encoder = require('../../encoder') const SEPARATOR = '\u0001' // SOH - Start Of Header ASCII -module.exports = async ({ authorizationIdentity = null, extensions = {} }, oauthBearerToken) => { - const authzid = authorizationIdentity == null ? '' : `"a=${authorizationIdentity}` - let ext = '' +function formatExtensions(extensions) { + let msg = '' + + if (extensions == null) { + return msg + } + + let prefix = '' for (const k in extensions) { - ext += `${k}=${extensions[k]}${SEPARATOR}` + msg += `${prefix}${k}=${extensions[k]}` + prefix = SEPARATOR } + + return msg +} + +module.exports = async ({ authorizationIdentity = null }, oauthBearerToken) => { + const authzid = authorizationIdentity == null ? '' : `"a=${authorizationIdentity}` + let ext = formatExtensions(oauthBearerToken.extensions) if (ext.length > 0) { ext = `${SEPARATOR}${ext}` } From d06acb0c26d970c6ea2d1fcf9aa11bc7401fbfbf Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 15:59:41 +0100 Subject: [PATCH 22/35] Add OAUTHBEARER spec --- .../saslAuthenticator/oauthBearer.spec.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 src/broker/saslAuthenticator/oauthBearer.spec.js diff --git a/src/broker/saslAuthenticator/oauthBearer.spec.js b/src/broker/saslAuthenticator/oauthBearer.spec.js new file mode 100644 index 000000000..7bf1b08a0 --- /dev/null +++ b/src/broker/saslAuthenticator/oauthBearer.spec.js @@ -0,0 +1,18 @@ +const { newLogger } = require('testHelpers') +const OAuthBearer = require('./oauthBearer') + +describe('Broker > SASL Authenticator > OAUTHBEARER', () => { + it('throws KafkaJSSASLAuthenticationError for missing oauthBearerProvider', async () => { + const oauthBearer = new OAuthBearer({ sasl: {} }, newLogger()) + await expect(oauthBearer.authenticate()).rejects.toThrow('Missing OAuth bearer token provider') + }) + + it('throws KafkaJSSASLAuthenticationError for invalid OAuth bearer token', async () => { + async function oauthBearerProvider() { + return {} + } + + const oauthBearer = new OAuthBearer({ sasl: { oauthBearerProvider } }, newLogger()) + await expect(oauthBearer.authenticate()).rejects.toThrow('Invalid OAuth bearer token') + }) +}) From 707b719e5e88f9d0a2a3b7f6eb1f768043662569 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Sat, 28 Mar 2020 16:00:09 +0100 Subject: [PATCH 23/35] Correct exception message case --- src/broker/saslAuthenticator/oauthBearer.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/broker/saslAuthenticator/oauthBearer.js b/src/broker/saslAuthenticator/oauthBearer.js index 0dd27a0ea..2ede35674 100644 --- a/src/broker/saslAuthenticator/oauthBearer.js +++ b/src/broker/saslAuthenticator/oauthBearer.js @@ -24,7 +24,7 @@ module.exports = class OAuthBearerAuthenticator { const { sasl } = this.connection if (sasl.oauthBearerProvider == null) { throw new KafkaJSSASLAuthenticationError( - 'SASL OAUTHBEARER: Missing OAuth Bearer Token provider' + 'SASL OAUTHBEARER: Missing OAuth bearer token provider' ) } @@ -33,7 +33,7 @@ module.exports = class OAuthBearerAuthenticator { const oauthBearerToken = await oauthBearerProvider() if (oauthBearerToken.value == null) { - throw new KafkaJSSASLAuthenticationError('SASL OAUTHBEARER: Invalid OAuth Bearer Token') + throw new KafkaJSSASLAuthenticationError('SASL OAUTHBEARER: Invalid OAuth bearer token') } const request = await oauthBearer.request(sasl, oauthBearerToken) From 8ad335b6cc8472bc2117847ef9c5bc1da28ca24d Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 30 Mar 2020 21:29:54 +0200 Subject: [PATCH 24/35] Add OAUTHBEARER example --- docs/Configuration.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/docs/Configuration.md b/docs/Configuration.md index c5ea75196..869e5adc4 100644 --- a/docs/Configuration.md +++ b/docs/Configuration.md @@ -66,6 +66,41 @@ new Kafka({ }) ``` +### OAUTHBEARER Example + +```javascript +new Kafka({ + clientId: 'my-app', + brokers: ['kafka1:9092', 'kafka2:9092'], + // authenticationTimeout: 1000, + // reauthenticationThreshold: 10000, + ssl: true, + sasl: { + mechanism: 'oauthbearer', + oauthBearerProvider: async () => { + // Use an unsecured token... + const token = jwt.sign({ sub: 'test' }, 'abc', { algorithm: 'none' }) + + // ...or, more realistically, grab the token from some OAuth endpoint + + return { + value: token + } + } + }, +}) +``` + +The sasl object must include a property named oauthBearerProvider, an +async function that is used to return the OAuth bearer token. + +The OAuth bearer token must be an object with properties value and +(optionally) extensions, that will be sent during the SASL/OAUTHBEARER +request. + +The implementation of the oauthBearerProvider must take care that tokens are +reused and refreshed when appropriate. + ### AWS IAM Example ```javascript From 89f8ea59ecf9106c03b105a897d260110e86b062 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 30 Mar 2020 19:03:11 +0200 Subject: [PATCH 25/35] Add test for SASL/OAUTHBEARER --- src/consumer/__tests__/connection.spec.js | 14 ++++++++++++++ testHelpers/index.js | 22 ++++++++++++++++++++++ testHelpers/kafka/server-jaas.conf | 5 +++++ 3 files changed, 41 insertions(+) diff --git a/src/consumer/__tests__/connection.spec.js b/src/consumer/__tests__/connection.spec.js index 15d3ed52c..59fbc4d94 100644 --- a/src/consumer/__tests__/connection.spec.js +++ b/src/consumer/__tests__/connection.spec.js @@ -10,6 +10,7 @@ const { sslConnectionOpts, saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, + saslOAuthBearerConnectionOpts, sslBrokers, saslBrokers, waitFor, @@ -92,6 +93,19 @@ describe('Consumer', () => { await consumer.connect() }) + test('support SASL OAUTHBEARER connections', async () => { + cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) + + consumer = createConsumer({ + cluster, + groupId, + maxWaitTimeInMs: 1, + logger: newLogger(), + }) + + await consumer.connect() + }) + test('reconnects the cluster if disconnected', async () => { consumer = createConsumer({ cluster, diff --git a/testHelpers/index.js b/testHelpers/index.js index 1b3d2ae5b..6e1626d08 100644 --- a/testHelpers/index.js +++ b/testHelpers/index.js @@ -79,6 +79,27 @@ const saslSCRAM512ConnectionOpts = () => }, }) +const saslOAuthBearerConnectionOpts = () => + Object.assign(sslConnectionOpts(), { + port: 9094, + sasl: { + mechanism: 'oauthbearer', + oauthBearerProvider: () => { + let sig = { alg: 'none' } + sig = Buffer.from(JSON.stringify(sig)).toString('base64') + + let payload = { sub: 'test', scope: 'KAFKAJS' } + payload = Buffer.from(JSON.stringify(payload)).toString('base64') + + const token = `${sig}.${payload}` + + return { + value: token, + } + }, + }, + }) + const createConnection = (opts = {}) => new Connection(Object.assign(connectionOpts(), opts)) const createConnectionBuilder = (opts = {}, brokers = plainTextBrokers()) => { @@ -237,6 +258,7 @@ module.exports = { saslConnectionOpts, saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, + saslOAuthBearerConnectionOpts, createConnection, createConnectionBuilder, createCluster, diff --git a/testHelpers/kafka/server-jaas.conf b/testHelpers/kafka/server-jaas.conf index d3fcdf782..f30f68e54 100644 --- a/testHelpers/kafka/server-jaas.conf +++ b/testHelpers/kafka/server-jaas.conf @@ -7,6 +7,11 @@ KafkaServer { org.apache.kafka.common.security.scram.ScramLoginModule required username="admin" password="adminadmin"; + + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required + unsecuredLoginStringClaim_sub="admin" + unsecuredLoginListClaim_scope=",KAFKA_BROKER,KAFKAJS" + unsecuredValidatorRequiredScope="KAFKAJS"; }; KafkaClient { org.apache.kafka.common.security.plain.PlainLoginModule required From 2dd0ce65125afc948b43069eaf65238100e2e48c Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 30 Mar 2020 21:17:21 +0200 Subject: [PATCH 26/35] Fix OAuth bearer token creation --- package.json | 1 + testHelpers/index.js | 9 ++------- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index ebd521f9c..bfca5f2aa 100644 --- a/package.json +++ b/package.json @@ -62,6 +62,7 @@ "jest-circus": "^25.1.0", "jest-extended": "^0.11.2", "jest-junit": "^10.0.0", + "jsonwebtoken": "^8.5.1", "lint-staged": "^9.2.0", "mockdate": "^2.0.5", "prettier": "^1.18.2", diff --git a/testHelpers/index.js b/testHelpers/index.js index 6e1626d08..ce04c457b 100644 --- a/testHelpers/index.js +++ b/testHelpers/index.js @@ -3,6 +3,7 @@ const execa = require('execa') const uuid = require('uuid/v4') const semver = require('semver') const crypto = require('crypto') +const jwt = require('jsonwebtoken') const Cluster = require('../src/cluster') const waitFor = require('../src/utils/waitFor') const connectionBuilder = require('../src/cluster/connectionBuilder') @@ -85,13 +86,7 @@ const saslOAuthBearerConnectionOpts = () => sasl: { mechanism: 'oauthbearer', oauthBearerProvider: () => { - let sig = { alg: 'none' } - sig = Buffer.from(JSON.stringify(sig)).toString('base64') - - let payload = { sub: 'test', scope: 'KAFKAJS' } - payload = Buffer.from(JSON.stringify(payload)).toString('base64') - - const token = `${sig}.${payload}` + const token = jwt.sign({ sub: 'test' }, 'abc', { algorithm: 'none' }) return { value: token, From d710839efaa415f40752f1218f6c7435537f260d Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 30 Mar 2020 21:19:05 +0200 Subject: [PATCH 27/35] Remove required scope --- testHelpers/kafka/server-jaas.conf | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/testHelpers/kafka/server-jaas.conf b/testHelpers/kafka/server-jaas.conf index f30f68e54..eebcca32f 100644 --- a/testHelpers/kafka/server-jaas.conf +++ b/testHelpers/kafka/server-jaas.conf @@ -1,5 +1,5 @@ KafkaServer { - org.apache.kafka.common.security.plain.PlainLoginModule required + org.apache.kafka.common.security.plain.PlainLoginModule required username="test" password="testtest" user_test="testtest"; @@ -9,9 +9,7 @@ KafkaServer { password="adminadmin"; org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required - unsecuredLoginStringClaim_sub="admin" - unsecuredLoginListClaim_scope=",KAFKA_BROKER,KAFKAJS" - unsecuredValidatorRequiredScope="KAFKAJS"; + unsecuredLoginStringClaim_sub="admin"; }; KafkaClient { org.apache.kafka.common.security.plain.PlainLoginModule required From 33ade4b1d76535ec357a67ef41bb3656bcbebcf0 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Tue, 2 Jun 2020 07:56:42 +0200 Subject: [PATCH 28/35] Update docs/Configuration.md Co-authored-by: Tommy Brunn --- docs/Configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Configuration.md b/docs/Configuration.md index 869e5adc4..7f59eb235 100644 --- a/docs/Configuration.md +++ b/docs/Configuration.md @@ -91,7 +91,7 @@ new Kafka({ }) ``` -The sasl object must include a property named oauthBearerProvider, an +The `sasl` object must include a property named `oauthBearerProvider`, an async function that is used to return the OAuth bearer token. The OAuth bearer token must be an object with properties value and From 73ccec1b705132fed9eae9d81b9f5d310df6f7e9 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Tue, 2 Jun 2020 23:47:13 +0200 Subject: [PATCH 29/35] Make OAUTHBEARER tests conditional --- docker-compose.2_3_oauthbearer.yml | 146 +++++++++++++ src/admin/__tests__/connection.spec.js | 41 ++-- src/broker/__tests__/connect.spec.js | 255 ++++++++++++++-------- src/broker/__tests__/disconnect.spec.js | 68 +++--- src/consumer/__tests__/connection.spec.js | 96 ++++---- src/producer/index.spec.js | 133 +++++------ testHelpers/index.js | 16 ++ testHelpers/kafka/server-jaas.conf | 5 +- testHelpers/kafka/server-jaas_oauth.conf | 22 ++ 9 files changed, 544 insertions(+), 238 deletions(-) create mode 100644 docker-compose.2_3_oauthbearer.yml create mode 100644 testHelpers/kafka/server-jaas_oauth.conf diff --git a/docker-compose.2_3_oauthbearer.yml b/docker-compose.2_3_oauthbearer.yml new file mode 100644 index 000000000..8dec95330 --- /dev/null +++ b/docker-compose.2_3_oauthbearer.yml @@ -0,0 +1,146 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + hostname: zookeeper + container_name: zookeeper + ports: + - "2181:2181" + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/server-jaas.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider" + volumes: + - ./testHelpers/kafka/server-jaas_oauth.conf:/etc/kafka/server-jaas.conf + + kafka1: + image: confluentinc/cp-kafka:5.3.1 + hostname: kafka1 + container_name: kafka1 + labels: + - "custom.project=kafkajs" + - "custom.service=kafka1" + depends_on: + - zookeeper + ports: + - "29092:29092" + - "9092:9092" + - "29093:29093" + - "9093:9093" + - "29094:29094" + - "9094:9094" + environment: + KAFKA_BROKER_ID: 0 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092,SSL://kafka1:29093,SSL_HOST://localhost:9093,SASL_SSL://kafka1:29094,SASL_SSL_HOST://localhost:9094 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" + KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" + KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" + KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" + KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf + + kafka2: + image: confluentinc/cp-kafka:5.3.1 + hostname: kafka2 + container_name: kafka2 + labels: + - "custom.project=kafkajs" + - "custom.service=kafka2" + depends_on: + - zookeeper + ports: + - "29095:29095" + - "9095:9095" + - "29096:29096" + - "9096:9096" + - "29097:29097" + - "9097:9097" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka2:29095,PLAINTEXT_HOST://localhost:9095,SSL://kafka2:29096,SSL_HOST://localhost:9096,SASL_SSL://kafka2:29097,SASL_SSL_HOST://localhost:9097 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" + KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" + KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" + KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" + KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf + + kafka3: + image: confluentinc/cp-kafka:5.3.1 + hostname: kafka3 + container_name: kafka3 + labels: + - "custom.project=kafkajs" + - "custom.service=kafka3" + depends_on: + - zookeeper + ports: + - "29098:29098" + - "9098:9098" + - "29099:29099" + - "9099:9099" + - "29100:29100" + - "9100:9100" + environment: + KAFKA_BROKER_ID: 2 + KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka3:29098,PLAINTEXT_HOST://localhost:9098,SSL://kafka3:29099,SSL_HOST://localhost:9099,SASL_SSL://kafka3:29100,SASL_SSL_HOST://localhost:9100 + KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' + KAFKA_DELETE_TOPIC_ENABLE: 'true' + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" + KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" + KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" + KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" + KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" + KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + # suppress verbosity + # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template + KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf \ No newline at end of file diff --git a/src/admin/__tests__/connection.spec.js b/src/admin/__tests__/connection.spec.js index 662e6819e..fab6670de 100644 --- a/src/admin/__tests__/connection.spec.js +++ b/src/admin/__tests__/connection.spec.js @@ -5,10 +5,13 @@ const { saslConnectionOpts, saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, + saslOAuthBearerConnectionOpts, createCluster, sslBrokers, saslBrokers, newLogger, + describeIfOauthbearerEnabled, + describeIfOauthbearerDisabled, } = require('testHelpers') describe('Admin', () => { @@ -25,21 +28,31 @@ describe('Admin', () => { await admin.connect() }) - test('support SASL PLAIN connections', async () => { - const cluster = createCluster(saslConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() - }) - - test('support SASL SCRAM 256 connections', async () => { - const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('support SASL PLAIN connections', async () => { + const cluster = createCluster(saslConnectionOpts(), saslBrokers()) + admin = createAdmin({ cluster, logger: newLogger() }) + await admin.connect() + }) + + test('support SASL SCRAM 256 connections', async () => { + const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) + admin = createAdmin({ cluster, logger: newLogger() }) + await admin.connect() + }) + + test('support SASL SCRAM 512 connections', async () => { + const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) + admin = createAdmin({ cluster, logger: newLogger() }) + await admin.connect() + }) }) - test('support SASL SCRAM 512 connections', async () => { - const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + test('support SASL OAUTHBEARER connections', async () => { + const cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) + admin = createAdmin({ cluster, logger: newLogger() }) + await admin.connect() + }) }) }) diff --git a/src/broker/__tests__/connect.spec.js b/src/broker/__tests__/connect.spec.js index b13b6ea30..ece8aae9f 100644 --- a/src/broker/__tests__/connect.spec.js +++ b/src/broker/__tests__/connect.spec.js @@ -4,8 +4,11 @@ const { saslConnectionOpts, saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, + saslOAuthBearerConnectionOpts, newLogger, testIfKafka_1_1_0, + describeIfOauthbearerEnabled, + describeIfOauthbearerDisabled, } = require('testHelpers') const Long = require('long') @@ -36,53 +39,69 @@ describe('Broker > connect', () => { expect(broker.versions).toBeTruthy() }) - test('authenticate with SASL PLAIN if configured', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('authenticate with SASL PLAIN if configured', async () => { + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - test('authenticate with SASL SCRAM 256 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), + test('authenticate with SASL SCRAM 256 if configured', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM256ConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - test('authenticate with SASL SCRAM 512 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), - logger: newLogger(), + test('authenticate with SASL SCRAM 512 if configured', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM512ConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - test('parallel calls to connect using SCRAM', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), - }) + test('parallel calls to connect using SCRAM', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM256ConnectionOpts()), + logger: newLogger(), + }) + + expect(broker.isConnected()).toEqual(false) + + await Promise.all([ + broker.connect(), + broker.connect(), + broker.connect(), + broker.connect(), + broker.connect(), + ]) - expect(broker.isConnected()).toEqual(false) + expect(broker.isConnected()).toEqual(true) + }) + }) - await Promise.all([ - broker.connect(), - broker.connect(), - broker.connect(), - broker.connect(), - broker.connect(), - ]) + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + console.log(saslOAuthBearerConnectionOpts) - expect(broker.isConnected()).toEqual(true) + test('authenticate with SASL OAUTHBEARER if configured', async () => { + broker = new Broker({ + connection: createConnection(saslOAuthBearerConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + }) }) test('sets the authenticatedAt timer', async () => { @@ -118,9 +137,41 @@ describe('Broker > connect', () => { expect(broker.isConnected()).toEqual(true) }) + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('returns true when connected and authenticated on connections with SASL', async () => { + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + }) + + test('returns false when the session lifetime has expired', async () => { + const sessionLifetime = 15000 + const reauthenticationThreshold = 10000 + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + reauthenticationThreshold, + }) + + await broker.connect() + expect(broker.isConnected()).toEqual(true) + + broker.sessionLifetime = Long.fromValue(sessionLifetime) + const [seconds] = broker.authenticatedAt + broker.authenticatedAt = [seconds - sessionLifetime / 1000, 0] + + expect(broker.isConnected()).toEqual(false) + }) + }) + }) + + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { test('returns true when connected and authenticated on connections with SASL', async () => { broker = new Broker({ - connection: createConnection(saslConnectionOpts()), + connection: createConnection(saslOAuthBearerConnectionOpts()), logger: newLogger(), }) await broker.connect() @@ -131,7 +182,7 @@ describe('Broker > connect', () => { const sessionLifetime = 15000 const reauthenticationThreshold = 10000 broker = new Broker({ - connection: createConnection(saslConnectionOpts()), + connection: createConnection(saslOAuthBearerConnectionOpts()), logger: newLogger(), reauthenticationThreshold, }) @@ -147,72 +198,106 @@ describe('Broker > connect', () => { }) }) - test('returns true when the session lifetime is 0', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - - await broker.connect() - expect(broker.isConnected()).toEqual(true) - - broker.sessionLifetime = Long.ZERO - broker.authenticatedAt = [0, 0] - - expect(broker.isConnected()).toEqual(true) - }) - - describe('when SaslAuthenticate protocol is available', () => { - testIfKafka_1_1_0('authenticate with SASL PLAIN if configured', async () => { + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('returns true when the session lifetime is 0', async () => { broker = new Broker({ connection: createConnection(saslConnectionOpts()), logger: newLogger(), }) - expect(broker.isConnected()).toEqual(false) + await broker.connect() expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) - }) - testIfKafka_1_1_0('authenticate with SASL SCRAM 256 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() + broker.sessionLifetime = Long.ZERO + broker.authenticatedAt = [0, 0] + expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) }) + }) - testIfKafka_1_1_0('authenticate with SASL SCRAM 512 if configured', async () => { + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + test('returns true when the session lifetime is 0', async () => { broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), + connection: createConnection(saslOAuthBearerConnectionOpts()), logger: newLogger(), }) - expect(broker.isConnected()).toEqual(false) + await broker.connect() expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) + + broker.sessionLifetime = Long.ZERO + broker.authenticatedAt = [0, 0] + + expect(broker.isConnected()).toEqual(true) }) + }) - testIfKafka_1_1_0('parallel calls to connect using SCRAM', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), + describe('when SaslAuthenticate protocol is available', () => { + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + testIfKafka_1_1_0('authenticate with SASL PLAIN if configured', async () => { + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + expect(broker.supportAuthenticationProtocol).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) + testIfKafka_1_1_0('authenticate with SASL SCRAM 256 if configured', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM256ConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + expect(broker.supportAuthenticationProtocol).toEqual(true) + }) - await Promise.all([ - broker.connect(), - broker.connect(), - broker.connect(), - broker.connect(), - broker.connect(), - ]) + testIfKafka_1_1_0('authenticate with SASL SCRAM 512 if configured', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM512ConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + expect(broker.supportAuthenticationProtocol).toEqual(true) + }) - expect(broker.isConnected()).toEqual(true) + testIfKafka_1_1_0('parallel calls to connect using SCRAM', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM256ConnectionOpts()), + logger: newLogger(), + }) + + expect(broker.isConnected()).toEqual(false) + + await Promise.all([ + broker.connect(), + broker.connect(), + broker.connect(), + broker.connect(), + broker.connect(), + ]) + + expect(broker.isConnected()).toEqual(true) + }) + }) + + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + testIfKafka_1_1_0('authenticate with SASL PLAIN if configured', async () => { + broker = new Broker({ + connection: createConnection(saslOAuthBearerConnectionOpts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + expect(broker.supportAuthenticationProtocol).toEqual(true) + }) }) }) }) diff --git a/src/broker/__tests__/disconnect.spec.js b/src/broker/__tests__/disconnect.spec.js index bbbfe0a55..16e58b1fc 100644 --- a/src/broker/__tests__/disconnect.spec.js +++ b/src/broker/__tests__/disconnect.spec.js @@ -4,7 +4,10 @@ const { saslConnectionOpts, saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, + saslOAuthBearerConnectionOpts, newLogger, + describeIfOauthbearerEnabled, + describeIfOauthbearerDisabled, } = require('testHelpers') const Broker = require('../index') @@ -30,36 +33,51 @@ describe('Broker > disconnect', () => { expect(broker.connection.connected).toEqual(false) }) - test('when authenticated with SASL set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('when authenticated with SASL set authenticated to false', async () => { + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.authenticatedAt).not.toBe(null) + await broker.disconnect() + expect(broker.authenticatedAt).toBe(null) }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) - }) - test('when authenticated with SASL SCRAM 256 set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), + test('when authenticated with SASL SCRAM 256 set authenticated to false', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM256ConnectionOpts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.authenticatedAt).not.toBe(null) + await broker.disconnect() + expect(broker.authenticatedAt).toBe(null) + }) + + test('when authenticated with SASL SCRAM 512 set authenticated to false', async () => { + broker = new Broker({ + connection: createConnection(saslSCRAM512ConnectionOpts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.authenticatedAt).not.toBe(null) + await broker.disconnect() + expect(broker.authenticatedAt).toBe(null) }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) }) - test('when authenticated with SASL SCRAM 512 set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), - logger: newLogger(), + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + test('when authenticated with SASL set authenticated to false', async () => { + broker = new Broker({ + connection: createConnection(saslOAuthBearerConnectionOpts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.authenticatedAt).not.toBe(null) + await broker.disconnect() + expect(broker.authenticatedAt).toBe(null) }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) }) }) diff --git a/src/consumer/__tests__/connection.spec.js b/src/consumer/__tests__/connection.spec.js index 59fbc4d94..9b0ab9500 100644 --- a/src/consumer/__tests__/connection.spec.js +++ b/src/consumer/__tests__/connection.spec.js @@ -15,6 +15,8 @@ const { saslBrokers, waitFor, waitForConsumerToJoinGroup, + describeIfOauthbearerEnabled, + describeIfOauthbearerDisabled, } = require('testHelpers') describe('Consumer', () => { @@ -45,65 +47,69 @@ describe('Consumer', () => { await consumer.connect() }) - test('support SASL PLAIN connections', async () => { - cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'test', - password: 'testtest', - }, - }), - saslBrokers() - ) + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('support SASL PLAIN connections', async () => { + cluster = createCluster( + Object.assign(sslConnectionOpts(), { + sasl: { + mechanism: 'plain', + username: 'test', + password: 'testtest', + }, + }), + saslBrokers() + ) + + consumer = createConsumer({ + cluster, + groupId, + maxWaitTimeInMs: 1, + logger: newLogger(), + }) - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), + await consumer.connect() }) - await consumer.connect() - }) + test('support SASL SCRAM 256 connections', async () => { + cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) - test('support SASL SCRAM 256 connections', async () => { - cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) + consumer = createConsumer({ + cluster, + groupId, + maxWaitTimeInMs: 1, + logger: newLogger(), + }) - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), + await consumer.connect() }) - await consumer.connect() - }) + test('support SASL SCRAM 512 connections', async () => { + cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - test('support SASL SCRAM 512 connections', async () => { - cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) + consumer = createConsumer({ + cluster, + groupId, + maxWaitTimeInMs: 1, + logger: newLogger(), + }) - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), + await consumer.connect() }) - - await consumer.connect() }) - test('support SASL OAUTHBEARER connections', async () => { - cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) + describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { + test('support SASL OAUTHBEARER connections', async () => { + cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), - }) + consumer = createConsumer({ + cluster, + groupId, + maxWaitTimeInMs: 1, + logger: newLogger(), + }) - await consumer.connect() + await consumer.connect() + }) }) test('reconnects the cluster if disconnected', async () => { diff --git a/src/producer/index.spec.js b/src/producer/index.spec.js index 0e969dd7b..10be867f0 100644 --- a/src/producer/index.spec.js +++ b/src/producer/index.spec.js @@ -37,6 +37,7 @@ const { testIfKafka_0_11, createTopic, waitForMessages, + describeIfOauthbearerDisabled, } = require('testHelpers') const createRetrier = require('../retry') @@ -91,79 +92,81 @@ describe('Producer', () => { await producer.connect() }) - test('support SASL PLAIN connections', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'test', - password: 'testtest', - }, - }), - saslBrokers() - ) - producer = createProducer({ cluster, logger: newLogger() }) - await producer.connect() - }) + describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { + test('support SASL PLAIN connections', async () => { + const cluster = createCluster( + Object.assign(sslConnectionOpts(), { + sasl: { + mechanism: 'plain', + username: 'test', + password: 'testtest', + }, + }), + saslBrokers() + ) + producer = createProducer({ cluster, logger: newLogger() }) + await producer.connect() + }) - test('support SASL SCRAM 256 connections', async () => { - const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) - producer = createProducer({ cluster, logger: newLogger() }) - await producer.connect() - }) + test('support SASL SCRAM 256 connections', async () => { + const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) + producer = createProducer({ cluster, logger: newLogger() }) + await producer.connect() + }) - test('support SASL SCRAM 512 connections', async () => { - const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - producer = createProducer({ cluster, logger: newLogger() }) - await producer.connect() - }) + test('support SASL SCRAM 512 connections', async () => { + const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) + producer = createProducer({ cluster, logger: newLogger() }) + await producer.connect() + }) - test('throws an error if SASL PLAIN fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) + test('throws an error if SASL PLAIN fails to authenticate', async () => { + const cluster = createCluster( + Object.assign(sslConnectionOpts(), { + sasl: { + mechanism: 'plain', + username: 'wrong', + password: 'wrong', + }, + }), + saslBrokers() + ) - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL PLAIN authentication failed/) - }) + producer = createProducer({ cluster, logger: newLogger() }) + await expect(producer.connect()).rejects.toThrow(/SASL PLAIN authentication failed/) + }) - test('throws an error if SASL SCRAM 256 fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'SCRAM-SHA-256', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) + test('throws an error if SASL SCRAM 256 fails to authenticate', async () => { + const cluster = createCluster( + Object.assign(sslConnectionOpts(), { + sasl: { + mechanism: 'SCRAM-SHA-256', + username: 'wrong', + password: 'wrong', + }, + }), + saslBrokers() + ) - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA256 authentication failed/) - }) + producer = createProducer({ cluster, logger: newLogger() }) + await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA256 authentication failed/) + }) - test('throws an error if SASL SCRAM 512 fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'SCRAM-SHA-512', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) + test('throws an error if SASL SCRAM 512 fails to authenticate', async () => { + const cluster = createCluster( + Object.assign(sslConnectionOpts(), { + sasl: { + mechanism: 'SCRAM-SHA-512', + username: 'wrong', + password: 'wrong', + }, + }), + saslBrokers() + ) - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA512 authentication failed/) + producer = createProducer({ cluster, logger: newLogger() }) + await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA512 authentication failed/) + }) }) test('reconnects the cluster if disconnected', async () => { diff --git a/testHelpers/index.js b/testHelpers/index.js index ce04c457b..011daecce 100644 --- a/testHelpers/index.js +++ b/testHelpers/index.js @@ -226,6 +226,20 @@ const flakyTest = (description, callback, testFn = test) => testFn(`[flaky] ${description}`, callback) flakyTest.skip = (description, callback) => flakyTest(description, callback, test.skip) flakyTest.only = (description, callback) => flakyTest(description, callback, test.only) +const describeIfEnv = (key, value) => (description, callback, describeFn = describe) => { + return value === process.env[key] + ? describeFn(description, callback) + : describe.skip(description, callback) +} + +const describeIfNotEnv = (key, value) => (description, callback, describeFn = describe) => { + return value !== process.env[key] + ? describeFn(description, callback) + : describe.skip(description, callback) +} + +const describeIfOauthbearerEnabled = describeIfEnv('OAUTHBEARER_ENABLED', '1') +const describeIfOauthbearerDisabled = describeIfNotEnv('OAUTHBEARER_ENABLED', '1') const unsupportedVersionResponse = () => Buffer.from({ type: 'Buffer', data: [0, 35, 0, 0, 0, 0] }) const unsupportedVersionResponseWithTimeout = () => @@ -271,6 +285,8 @@ module.exports = { testIfKafka_0_11, testIfKafka_1_1_0, flakyTest, + describeIfOauthbearerEnabled, + describeIfOauthbearerDisabled, addPartitions, unsupportedVersionResponse, generateMessages, diff --git a/testHelpers/kafka/server-jaas.conf b/testHelpers/kafka/server-jaas.conf index eebcca32f..d3fcdf782 100644 --- a/testHelpers/kafka/server-jaas.conf +++ b/testHelpers/kafka/server-jaas.conf @@ -1,5 +1,5 @@ KafkaServer { - org.apache.kafka.common.security.plain.PlainLoginModule required + org.apache.kafka.common.security.plain.PlainLoginModule required username="test" password="testtest" user_test="testtest"; @@ -7,9 +7,6 @@ KafkaServer { org.apache.kafka.common.security.scram.ScramLoginModule required username="admin" password="adminadmin"; - - org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required - unsecuredLoginStringClaim_sub="admin"; }; KafkaClient { org.apache.kafka.common.security.plain.PlainLoginModule required diff --git a/testHelpers/kafka/server-jaas_oauth.conf b/testHelpers/kafka/server-jaas_oauth.conf new file mode 100644 index 000000000..1f840c9a4 --- /dev/null +++ b/testHelpers/kafka/server-jaas_oauth.conf @@ -0,0 +1,22 @@ +KafkaServer { + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required + unsecuredLoginStringClaim_sub="admin"; +}; +KafkaClient { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="test" + password="testtest" + user_test="testtest"; +}; +Server { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="test" + password="testtest" + user_test="testtest"; +}; +Client { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="test" + password="testtest" + user_test="testtest"; +}; \ No newline at end of file From c83c8b58e6d7d248d16ebccb2d9bc15306628ad6 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Wed, 3 Jun 2020 09:01:47 +0200 Subject: [PATCH 30/35] Reduce SASL tests code duplications --- src/admin/__tests__/connection.spec.js | 35 +--- src/broker/__tests__/connect.spec.js | 237 ++++++---------------- src/broker/__tests__/disconnect.spec.js | 55 +---- src/consumer/__tests__/connection.spec.js | 64 +----- src/producer/index.spec.js | 86 ++------ testHelpers/index.js | 69 +++++++ 6 files changed, 156 insertions(+), 390 deletions(-) diff --git a/src/admin/__tests__/connection.spec.js b/src/admin/__tests__/connection.spec.js index fab6670de..33ee3531b 100644 --- a/src/admin/__tests__/connection.spec.js +++ b/src/admin/__tests__/connection.spec.js @@ -2,16 +2,11 @@ const createAdmin = require('../index') const { sslConnectionOpts, - saslConnectionOpts, - saslSCRAM256ConnectionOpts, - saslSCRAM512ConnectionOpts, - saslOAuthBearerConnectionOpts, + saslEntries, createCluster, sslBrokers, saslBrokers, newLogger, - describeIfOauthbearerEnabled, - describeIfOauthbearerDisabled, } = require('testHelpers') describe('Admin', () => { @@ -28,31 +23,11 @@ describe('Admin', () => { await admin.connect() }) - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('support SASL PLAIN connections', async () => { - const cluster = createCluster(saslConnectionOpts(), saslBrokers()) + for (const e of saslEntries) { + test(`support SASL ${e.name} connections`, async () => { + const cluster = createCluster(e.opts(), saslBrokers()) admin = createAdmin({ cluster, logger: newLogger() }) await admin.connect() }) - - test('support SASL SCRAM 256 connections', async () => { - const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() - }) - - test('support SASL SCRAM 512 connections', async () => { - const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() - }) - }) - - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - test('support SASL OAUTHBEARER connections', async () => { - const cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) - admin = createAdmin({ cluster, logger: newLogger() }) - await admin.connect() - }) - }) + } }) diff --git a/src/broker/__tests__/connect.spec.js b/src/broker/__tests__/connect.spec.js index ece8aae9f..02e2431b6 100644 --- a/src/broker/__tests__/connect.spec.js +++ b/src/broker/__tests__/connect.spec.js @@ -1,14 +1,11 @@ const { createConnection, connectionOpts, - saslConnectionOpts, saslSCRAM256ConnectionOpts, - saslSCRAM512ConnectionOpts, - saslOAuthBearerConnectionOpts, newLogger, testIfKafka_1_1_0, - describeIfOauthbearerEnabled, describeIfOauthbearerDisabled, + saslEntries, } = require('testHelpers') const Long = require('long') @@ -39,37 +36,19 @@ describe('Broker > connect', () => { expect(broker.versions).toBeTruthy() }) - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('authenticate with SASL PLAIN if configured', async () => { + for (const e of saslEntries) { + test(`authenticate with SASL ${e.name} if configured`, async () => { broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - - test('authenticate with SASL SCRAM 256 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - - test('authenticate with SASL SCRAM 512 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), + connection: createConnection(e.opts()), logger: newLogger(), }) expect(broker.isConnected()).toEqual(false) await broker.connect() expect(broker.isConnected()).toEqual(true) }) + } + describeIfOauthbearerDisabled('when SASL SCRAM is configured', () => { test('parallel calls to connect using SCRAM', async () => { broker = new Broker({ connection: createConnection(saslSCRAM256ConnectionOpts()), @@ -90,20 +69,6 @@ describe('Broker > connect', () => { }) }) - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - console.log(saslOAuthBearerConnectionOpts) - - test('authenticate with SASL OAUTHBEARER if configured', async () => { - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - }) - test('sets the authenticatedAt timer', async () => { const error = new Error('not connected') const timer = process.hrtime() @@ -122,151 +87,82 @@ describe('Broker > connect', () => { expect(broker.isConnected()).toEqual(false) }) - test('returns false when connected but not authenticated on connections with SASL', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connection.connect() - expect(broker.isConnected()).toEqual(false) - }) - - test('returns true when connected', async () => { - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('returns true when connected and authenticated on connections with SASL', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - - test('returns false when the session lifetime has expired', async () => { - const sessionLifetime = 15000 - const reauthenticationThreshold = 10000 + for (const e of saslEntries) { + test(`returns false when connected but not authenticated on connections with SASL ${e.name}`, async () => { broker = new Broker({ - connection: createConnection(saslConnectionOpts()), + connection: createConnection(e.opts()), logger: newLogger(), - reauthenticationThreshold, }) - - await broker.connect() - expect(broker.isConnected()).toEqual(true) - - broker.sessionLifetime = Long.fromValue(sessionLifetime) - const [seconds] = broker.authenticatedAt - broker.authenticatedAt = [seconds - sessionLifetime / 1000, 0] - + expect(broker.isConnected()).toEqual(false) + await broker.connection.connect() expect(broker.isConnected()).toEqual(false) }) - }) - }) - - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - test('returns true when connected and authenticated on connections with SASL', async () => { - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - }) - - test('returns false when the session lifetime has expired', async () => { - const sessionLifetime = 15000 - const reauthenticationThreshold = 10000 - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - reauthenticationThreshold, - }) + } + test('returns true when connected', async () => { await broker.connect() expect(broker.isConnected()).toEqual(true) - - broker.sessionLifetime = Long.fromValue(sessionLifetime) - const [seconds] = broker.authenticatedAt - broker.authenticatedAt = [seconds - sessionLifetime / 1000, 0] - - expect(broker.isConnected()).toEqual(false) }) - }) - - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('returns true when the session lifetime is 0', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.isConnected()).toEqual(true) + describe('when SaslAuthenticate protocol is available', () => { + for (const e of saslEntries) { + test(`returns true when connected and authenticated on connections with SASL ${e.name}`, async () => { + broker = new Broker({ + connection: createConnection(e.opts()), + logger: newLogger(), + }) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + }) - broker.sessionLifetime = Long.ZERO - broker.authenticatedAt = [0, 0] + test('returns false when the session lifetime has expired', async () => { + const sessionLifetime = 15000 + const reauthenticationThreshold = 10000 + broker = new Broker({ + connection: createConnection(e.opts()), + logger: newLogger(), + reauthenticationThreshold, + }) - expect(broker.isConnected()).toEqual(true) - }) - }) + await broker.connect() + expect(broker.isConnected()).toEqual(true) - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - test('returns true when the session lifetime is 0', async () => { - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - }) + broker.sessionLifetime = Long.fromValue(sessionLifetime) + const [seconds] = broker.authenticatedAt + broker.authenticatedAt = [seconds - sessionLifetime / 1000, 0] - await broker.connect() - expect(broker.isConnected()).toEqual(true) + expect(broker.isConnected()).toEqual(false) + }) - broker.sessionLifetime = Long.ZERO - broker.authenticatedAt = [0, 0] + test('returns true when the session lifetime is 0', async () => { + broker = new Broker({ + connection: createConnection(e.opts()), + logger: newLogger(), + }) - expect(broker.isConnected()).toEqual(true) - }) - }) + await broker.connect() + expect(broker.isConnected()).toEqual(true) - describe('when SaslAuthenticate protocol is available', () => { - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - testIfKafka_1_1_0('authenticate with SASL PLAIN if configured', async () => { - broker = new Broker({ - connection: createConnection(saslConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) - }) + broker.sessionLifetime = Long.ZERO + broker.authenticatedAt = [0, 0] - testIfKafka_1_1_0('authenticate with SASL SCRAM 256 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), + expect(broker.isConnected()).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) - }) - testIfKafka_1_1_0('authenticate with SASL SCRAM 512 if configured', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), - logger: newLogger(), + testIfKafka_1_1_0(`authenticate with SASL ${e.name} if configured`, async () => { + broker = new Broker({ + connection: createConnection(e.opts()), + logger: newLogger(), + }) + expect(broker.isConnected()).toEqual(false) + await broker.connect() + expect(broker.isConnected()).toEqual(true) + expect(broker.supportAuthenticationProtocol).toEqual(true) }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) - }) + } + }) + describeIfOauthbearerDisabled('when SASL SCRAM is configured', () => { testIfKafka_1_1_0('parallel calls to connect using SCRAM', async () => { broker = new Broker({ connection: createConnection(saslSCRAM256ConnectionOpts()), @@ -286,18 +182,5 @@ describe('Broker > connect', () => { expect(broker.isConnected()).toEqual(true) }) }) - - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - testIfKafka_1_1_0('authenticate with SASL PLAIN if configured', async () => { - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - }) - expect(broker.isConnected()).toEqual(false) - await broker.connect() - expect(broker.isConnected()).toEqual(true) - expect(broker.supportAuthenticationProtocol).toEqual(true) - }) - }) }) }) diff --git a/src/broker/__tests__/disconnect.spec.js b/src/broker/__tests__/disconnect.spec.js index 16e58b1fc..bc38cf13c 100644 --- a/src/broker/__tests__/disconnect.spec.js +++ b/src/broker/__tests__/disconnect.spec.js @@ -1,14 +1,4 @@ -const { - createConnection, - connectionOpts, - saslConnectionOpts, - saslSCRAM256ConnectionOpts, - saslSCRAM512ConnectionOpts, - saslOAuthBearerConnectionOpts, - newLogger, - describeIfOauthbearerEnabled, - describeIfOauthbearerDisabled, -} = require('testHelpers') +const { createConnection, connectionOpts, saslEntries, newLogger } = require('testHelpers') const Broker = require('../index') @@ -33,10 +23,10 @@ describe('Broker > disconnect', () => { expect(broker.connection.connected).toEqual(false) }) - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('when authenticated with SASL set authenticated to false', async () => { + for (const e of saslEntries) { + test(`when authenticated with SASL ${e.name} set authenticated to false`, async () => { broker = new Broker({ - connection: createConnection(saslConnectionOpts()), + connection: createConnection(e.opts()), logger: newLogger(), }) await broker.connect() @@ -44,40 +34,5 @@ describe('Broker > disconnect', () => { await broker.disconnect() expect(broker.authenticatedAt).toBe(null) }) - - test('when authenticated with SASL SCRAM 256 set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM256ConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) - }) - - test('when authenticated with SASL SCRAM 512 set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslSCRAM512ConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) - }) - }) - - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - test('when authenticated with SASL set authenticated to false', async () => { - broker = new Broker({ - connection: createConnection(saslOAuthBearerConnectionOpts()), - logger: newLogger(), - }) - await broker.connect() - expect(broker.authenticatedAt).not.toBe(null) - await broker.disconnect() - expect(broker.authenticatedAt).toBe(null) - }) - }) + } }) diff --git a/src/consumer/__tests__/connection.spec.js b/src/consumer/__tests__/connection.spec.js index 9b0ab9500..6e33a54b5 100644 --- a/src/consumer/__tests__/connection.spec.js +++ b/src/consumer/__tests__/connection.spec.js @@ -8,15 +8,11 @@ const { createModPartitioner, newLogger, sslConnectionOpts, - saslSCRAM256ConnectionOpts, - saslSCRAM512ConnectionOpts, - saslOAuthBearerConnectionOpts, + saslEntries, sslBrokers, saslBrokers, waitFor, waitForConsumerToJoinGroup, - describeIfOauthbearerEnabled, - describeIfOauthbearerDisabled, } = require('testHelpers') describe('Consumer', () => { @@ -47,18 +43,9 @@ describe('Consumer', () => { await consumer.connect() }) - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('support SASL PLAIN connections', async () => { - cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'test', - password: 'testtest', - }, - }), - saslBrokers() - ) + for (const e of saslEntries) { + test(`support SASL ${e.name} connections`, async () => { + cluster = createCluster(e.opts(), saslBrokers()) consumer = createConsumer({ cluster, @@ -69,48 +56,7 @@ describe('Consumer', () => { await consumer.connect() }) - - test('support SASL SCRAM 256 connections', async () => { - cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) - - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), - }) - - await consumer.connect() - }) - - test('support SASL SCRAM 512 connections', async () => { - cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), - }) - - await consumer.connect() - }) - }) - - describeIfOauthbearerEnabled('when SASL OAUTHBEARER is configured', () => { - test('support SASL OAUTHBEARER connections', async () => { - cluster = createCluster(saslOAuthBearerConnectionOpts(), saslBrokers()) - - consumer = createConsumer({ - cluster, - groupId, - maxWaitTimeInMs: 1, - logger: newLogger(), - }) - - await consumer.connect() - }) - }) + } test('reconnects the cluster if disconnected', async () => { consumer = createConsumer({ diff --git a/src/producer/index.spec.js b/src/producer/index.spec.js index 10be867f0..5dc7ed629 100644 --- a/src/producer/index.spec.js +++ b/src/producer/index.spec.js @@ -27,8 +27,7 @@ const { secureRandom, connectionOpts, sslConnectionOpts, - saslSCRAM256ConnectionOpts, - saslSCRAM512ConnectionOpts, + saslEntries, createCluster, createModPartitioner, sslBrokers, @@ -37,7 +36,6 @@ const { testIfKafka_0_11, createTopic, waitForMessages, - describeIfOauthbearerDisabled, } = require('testHelpers') const createRetrier = require('../retry') @@ -92,82 +90,22 @@ describe('Producer', () => { await producer.connect() }) - describeIfOauthbearerDisabled('when SASL PLAIN and SCRAM are configured', () => { - test('support SASL PLAIN connections', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'test', - password: 'testtest', - }, - }), - saslBrokers() - ) - producer = createProducer({ cluster, logger: newLogger() }) - await producer.connect() - }) - - test('support SASL SCRAM 256 connections', async () => { - const cluster = createCluster(saslSCRAM256ConnectionOpts(), saslBrokers()) + for (const e of saslEntries) { + test(`support SASL ${e.name} connections`, async () => { + const cluster = createCluster(e.opts(), saslBrokers()) producer = createProducer({ cluster, logger: newLogger() }) await producer.connect() }) - test('support SASL SCRAM 512 connections', async () => { - const cluster = createCluster(saslSCRAM512ConnectionOpts(), saslBrokers()) - producer = createProducer({ cluster, logger: newLogger() }) - await producer.connect() - }) - - test('throws an error if SASL PLAIN fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'plain', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) - - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL PLAIN authentication failed/) - }) - - test('throws an error if SASL SCRAM 256 fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'SCRAM-SHA-256', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) - - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA256 authentication failed/) - }) + if (e.wrongOpts) { + test(`throws an error if SASL ${e.name} fails to authenticate`, async () => { + const cluster = createCluster(e.wrongOpts(), saslBrokers()) - test('throws an error if SASL SCRAM 512 fails to authenticate', async () => { - const cluster = createCluster( - Object.assign(sslConnectionOpts(), { - sasl: { - mechanism: 'SCRAM-SHA-512', - username: 'wrong', - password: 'wrong', - }, - }), - saslBrokers() - ) - - producer = createProducer({ cluster, logger: newLogger() }) - await expect(producer.connect()).rejects.toThrow(/SASL SCRAM SHA512 authentication failed/) - }) - }) + producer = createProducer({ cluster, logger: newLogger() }) + await expect(producer.connect()).rejects.toThrow(e.expectedErr) + }) + } + } test('reconnects the cluster if disconnected', async () => { const cluster = createCluster( diff --git a/testHelpers/index.js b/testHelpers/index.js index 011daecce..aa4e44ea8 100644 --- a/testHelpers/index.js +++ b/testHelpers/index.js @@ -60,6 +60,16 @@ const saslConnectionOpts = () => }, }) +const saslWrongConnectionOpts = () => + Object.assign(sslConnectionOpts(), { + port: 9094, + sasl: { + mechanism: 'plain', + username: 'wrong', + password: 'wrong', + }, + }) + const saslSCRAM256ConnectionOpts = () => Object.assign(sslConnectionOpts(), { port: 9094, @@ -70,6 +80,16 @@ const saslSCRAM256ConnectionOpts = () => }, }) +const saslSCRAM256WrongConnectionOpts = () => + Object.assign(sslConnectionOpts(), { + port: 9094, + sasl: { + mechanism: 'scram-sha-256', + username: 'wrong', + password: 'wrong', + }, + }) + const saslSCRAM512ConnectionOpts = () => Object.assign(sslConnectionOpts(), { port: 9094, @@ -80,6 +100,16 @@ const saslSCRAM512ConnectionOpts = () => }, }) +const saslSCRAM512WrongConnectionOpts = () => + Object.assign(sslConnectionOpts(), { + port: 9094, + sasl: { + mechanism: 'scram-sha-512', + username: 'wrong', + password: 'wrong', + }, + }) + const saslOAuthBearerConnectionOpts = () => Object.assign(sslConnectionOpts(), { port: 9094, @@ -95,6 +125,39 @@ const saslOAuthBearerConnectionOpts = () => }, }) +/** + * List of the possible SASL setups. + * OAUTHBEARER must be enabled as a special case. + */ +const saslEntries = [] +if (process.env['OAUTHBEARER_ENABLED'] !== '1') { + saslEntries.push({ + name: 'PLAIN', + opts: saslConnectionOpts, + wrongOpts: saslWrongConnectionOpts, + expectedErr: /SASL PLAIN authentication failed/, + }) + + saslEntries.push({ + name: 'SCRAM 256', + opts: saslSCRAM256ConnectionOpts, + wrongOpts: saslSCRAM256WrongConnectionOpts, + expectedErr: /SASL SCRAM SHA256 authentication failed/, + }) + + saslEntries.push({ + name: 'SCRAM 512', + opts: saslSCRAM512ConnectionOpts, + wrongOpts: saslSCRAM512WrongConnectionOpts, + expectedErr: /SASL SCRAM SHA512 authentication failed/, + }) +} else { + saslEntries.push({ + name: 'OAUTHBEARER', + opts: saslOAuthBearerConnectionOpts, + }) +} + const createConnection = (opts = {}) => new Connection(Object.assign(connectionOpts(), opts)) const createConnectionBuilder = (opts = {}, brokers = plainTextBrokers()) => { @@ -238,6 +301,11 @@ const describeIfNotEnv = (key, value) => (description, callback, describeFn = de : describe.skip(description, callback) } +/** + * Conditional describes for SASL OAUTHBEARER. + * OAUTHBEARER must be enabled as a special case as current Kafka impl + * doesn't allow it to be enabled aside of other SASL mechanisms. + */ const describeIfOauthbearerEnabled = describeIfEnv('OAUTHBEARER_ENABLED', '1') const describeIfOauthbearerDisabled = describeIfNotEnv('OAUTHBEARER_ENABLED', '1') @@ -268,6 +336,7 @@ module.exports = { saslSCRAM256ConnectionOpts, saslSCRAM512ConnectionOpts, saslOAuthBearerConnectionOpts, + saslEntries, createConnection, createConnectionBuilder, createCluster, From b187c40ebd2595bc77fca3df028d32ee052dc7a1 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 13 Jul 2020 22:28:01 +0200 Subject: [PATCH 31/35] Correct YML quoting style --- docker-compose.2_3_oauthbearer.yml | 163 ++++++++++++++--------------- 1 file changed, 80 insertions(+), 83 deletions(-) diff --git a/docker-compose.2_3_oauthbearer.yml b/docker-compose.2_3_oauthbearer.yml index 8dec95330..1f00bb3ac 100644 --- a/docker-compose.2_3_oauthbearer.yml +++ b/docker-compose.2_3_oauthbearer.yml @@ -5,142 +5,139 @@ services: hostname: zookeeper container_name: zookeeper ports: - - "2181:2181" + - '2181:2181' environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/server-jaas.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider" + ZOOKEEPER_CLIENT_PORT: '2181' + ZOOKEEPER_TICK_TIME: '2000' + KAFKA_OPTS: '-Djava.security.auth.login.config=/etc/kafka/server-jaas.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider' volumes: - - ./testHelpers/kafka/server-jaas_oauth.conf:/etc/kafka/server-jaas.conf + - ./testHelpers/kafka/server-jaas_oauth.conf:/etc/kafka/server-jaas.conf:ro,z kafka1: image: confluentinc/cp-kafka:5.3.1 hostname: kafka1 container_name: kafka1 labels: - - "custom.project=kafkajs" - - "custom.service=kafka1" + - 'custom.project=kafkajs' + - 'custom.service=kafka1' depends_on: - zookeeper ports: - - "29092:29092" - - "9092:9092" - - "29093:29093" - - "9093:9093" - - "29094:29094" - - "9094:9094" + - '29092:29092' + - '9092:9092' + - '29093:29093' + - '9093:9093' + - '29094:29094' + - '9094:9094' environment: - KAFKA_BROKER_ID: 0 + KAFKA_BROKER_ID: '0' KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092,SSL://kafka1:29093,SSL_HOST://localhost:9093,SASL_SSL://kafka1:29094,SASL_SSL_HOST://localhost:9094 KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" - KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" - KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" - KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" - KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" - KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'OAUTHBEARER' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' # suppress verbosity # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template - KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' volumes: - - /var/run/docker.sock:/var/run/docker.sock - - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks - - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks - - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds - - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds - - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds - - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf:ro,z kafka2: image: confluentinc/cp-kafka:5.3.1 hostname: kafka2 container_name: kafka2 labels: - - "custom.project=kafkajs" - - "custom.service=kafka2" + - 'custom.project=kafkajs' + - 'custom.service=kafka2' depends_on: - zookeeper ports: - - "29095:29095" - - "9095:9095" - - "29096:29096" - - "9096:9096" - - "29097:29097" - - "9097:9097" + - '29095:29095' + - '9095:9095' + - '29096:29096' + - '9096:9096' + - '29097:29097' + - '9097:9097' environment: - KAFKA_BROKER_ID: 1 + KAFKA_BROKER_ID: '1' KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka2:29095,PLAINTEXT_HOST://localhost:9095,SSL://kafka2:29096,SSL_HOST://localhost:9096,SASL_SSL://kafka2:29097,SASL_SSL_HOST://localhost:9097 KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" - KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" - KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" - KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" - KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" - KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'OAUTHBEARER' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' # suppress verbosity # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template - KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' volumes: - - /var/run/docker.sock:/var/run/docker.sock - - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks - - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks - - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds - - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds - - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds - - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf:ro,z kafka3: image: confluentinc/cp-kafka:5.3.1 hostname: kafka3 container_name: kafka3 labels: - - "custom.project=kafkajs" - - "custom.service=kafka3" + - 'custom.project=kafkajs' + - 'custom.service=kafka3' depends_on: - zookeeper ports: - - "29098:29098" - - "9098:9098" - - "29099:29099" - - "9099:9099" - - "29100:29100" - - "9100:9100" + - '29098:29098' + - '9098:9098' + - '29099:29099' + - '9099:9099' + - '29100:29100' + - '9100:9100' environment: - KAFKA_BROKER_ID: 2 + KAFKA_BROKER_ID: '2' KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,SSL:SSL,SSL_HOST:SSL,SASL_SSL:SASL_SSL,SASL_SSL_HOST:SASL_SSL KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka3:29098,PLAINTEXT_HOST://localhost:9098,SSL://kafka3:29099,SSL_HOST://localhost:9099,SASL_SSL://kafka3:29100,SASL_SSL_HOST://localhost:9100 KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true' KAFKA_DELETE_TOPIC_ENABLE: 'true' - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_SSL_KEYSTORE_FILENAME: "kafka.server.keystore.jks" - KAFKA_SSL_KEYSTORE_CREDENTIALS: "keystore_creds" - KAFKA_SSL_KEY_CREDENTIALS: "sslkey_creds" - KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.server.truststore.jks" - KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "truststore_creds" - KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: "PLAIN" - KAFKA_SASL_ENABLED_MECHANISMS: "OAUTHBEARER" - KAFKA_OPTS: "-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf" + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: '0' + KAFKA_SSL_KEYSTORE_FILENAME: 'kafka.server.keystore.jks' + KAFKA_SSL_KEYSTORE_CREDENTIALS: 'keystore_creds' + KAFKA_SSL_KEY_CREDENTIALS: 'sslkey_creds' + KAFKA_SSL_TRUSTSTORE_FILENAME: 'kafka.server.truststore.jks' + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: 'truststore_creds' + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN' + KAFKA_SASL_ENABLED_MECHANISMS: 'OAUTHBEARER' + KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/kafka/config/server-jaas.conf' # suppress verbosity # https://github.com/confluentinc/cp-docker-images/blob/master/debian/kafka/include/etc/confluent/docker/log4j.properties.template - KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + KAFKA_LOG4J_LOGGERS: 'kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO' volumes: - - /var/run/docker.sock:/var/run/docker.sock - - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks - - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks - - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds - - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds - - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds - - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf \ No newline at end of file + - ./testHelpers/certs/kafka.server.keystore.jks:/etc/kafka/secrets/kafka.server.keystore.jks:ro,z + - ./testHelpers/certs/kafka.server.truststore.jks:/etc/kafka/secrets/kafka.server.truststore.jks:ro,z + - ./testHelpers/certs/keystore_creds:/etc/kafka/secrets/keystore_creds:ro,z + - ./testHelpers/certs/sslkey_creds:/etc/kafka/secrets/sslkey_creds:ro,z + - ./testHelpers/certs/truststore_creds:/etc/kafka/secrets/truststore_creds:ro,z + - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf:ro,z \ No newline at end of file From 0226942156bc525b4402b3aea0b8fd4af9f581e1 Mon Sep 17 00:00:00 2001 From: Michele Tibaldi Date: Mon, 13 Jul 2020 22:28:31 +0200 Subject: [PATCH 32/35] Upgraded OAUTHBEARER tests to 2.4 --- ..._3_oauthbearer.yml => docker-compose.2_4_oauthbearer.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename docker-compose.2_3_oauthbearer.yml => docker-compose.2_4_oauthbearer.yml (98%) diff --git a/docker-compose.2_3_oauthbearer.yml b/docker-compose.2_4_oauthbearer.yml similarity index 98% rename from docker-compose.2_3_oauthbearer.yml rename to docker-compose.2_4_oauthbearer.yml index 1f00bb3ac..1cfc18f6f 100644 --- a/docker-compose.2_3_oauthbearer.yml +++ b/docker-compose.2_4_oauthbearer.yml @@ -14,7 +14,7 @@ services: - ./testHelpers/kafka/server-jaas_oauth.conf:/etc/kafka/server-jaas.conf:ro,z kafka1: - image: confluentinc/cp-kafka:5.3.1 + image: confluentinc/cp-kafka:5.4.2 hostname: kafka1 container_name: kafka1 labels: @@ -57,7 +57,7 @@ services: - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf:ro,z kafka2: - image: confluentinc/cp-kafka:5.3.1 + image: confluentinc/cp-kafka:5.4.2 hostname: kafka2 container_name: kafka2 labels: @@ -100,7 +100,7 @@ services: - ./testHelpers/kafka/server-jaas_oauth.conf:/opt/kafka/config/server-jaas.conf:ro,z kafka3: - image: confluentinc/cp-kafka:5.3.1 + image: confluentinc/cp-kafka:5.4.2 hostname: kafka3 container_name: kafka3 labels: From 54a37e3e3569f70ebe5bd48aee2644407efa65f8 Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Mon, 20 Jul 2020 15:51:21 +0200 Subject: [PATCH 33/35] Add pipeline job for running Oauthbearer tests --- azure-pipelines.yml | 31 ++++++++++++++++++++++++++++++- package.json | 2 ++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 9dccd0a6a..180eb6a29 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -177,7 +177,36 @@ jobs: condition: succeededOrFailed() inputs: testRunner: JUnit - testResultsFiles: '**/test-report.xml' + testResultsFiles: "**/test-report.xml" + + - job: test_oauthbearer + displayName: 'OauthBearer' + dependsOn: lint + variables: + test_retries: + - name: test_retries + value: 2 + - name: COMPOSE_FILE + value: docker-compose.2_4_oauthbearer.yml + pool: + vmImage: 'Ubuntu 16.04' + steps: + - task: NodeTool@0 + inputs: + versionSpec: '10.x' + - bash: ./scripts/pipeline/shouldRunTests.sh && SKIP_TESTS=true && echo "Only non-code has changed!" || test true + displayName: should skip tests + - bash: test $SKIP_TESTS && echo "Skipped!" || yarn install + displayName: yarn install + - bash: test $SKIP_TESTS && echo "Skipped!" || docker-compose -f ${COMPOSE_FILE} pull + displayName: docker-compose pull + - bash: test $SKIP_TESTS && echo "Skipped!" || yarn test:group:oauthbearer:ci + displayName: test + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testRunner: JUnit + testResultsFiles: "**/test-report.xml" ####### Deploy - job: npm_release diff --git a/package.json b/package.json index bfca5f2aa..7561cfdc6 100644 --- a/package.json +++ b/package.json @@ -36,11 +36,13 @@ "test:group:producer": "yarn jest --forceExit --testPathPattern 'src/producer/.*'", "test:group:consumer": "yarn jest --forceExit --testPathPattern 'src/consumer/.*.spec.js'", "test:group:others": "yarn jest --forceExit --testPathPattern 'src/(?!(broker|admin|producer|consumer)/).*'", + "test:group:oauthbearer": "OAUTHBEARER_ENABLED=1 yarn jest --forceExit --testNamePattern 'SASL OAUTHBEARER'", "test:group:broker:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml ./scripts/testWithKafka.sh \"yarn test:group:broker --ci --maxWorkers=4 --no-watchman\"", "test:group:admin:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml ./scripts/testWithKafka.sh \"yarn test:group:admin --ci --maxWorkers=4 --no-watchman\"", "test:group:producer:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml ./scripts/testWithKafka.sh \"yarn test:group:producer --ci --maxWorkers=4 --no-watchman\"", "test:group:consumer:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml ./scripts/testWithKafka.sh \"yarn test:group:consumer --ci --maxWorkers=4 --no-watchman\"", "test:group:others:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml ./scripts/testWithKafka.sh \"yarn test:group:others --ci --maxWorkers=4 --no-watchman\"", + "test:group:oauthbearer:ci": "JEST_JUNIT_OUTPUT_NAME=test-report.xml COMPOSE_FILE='docker-compose.2_4_oauthbearer.yml' ./scripts/testWithKafka.sh \"yarn test:group:oauthbearer --ci --maxWorkers=4 --no-watchman\"", "test:types": "tsc -p types/" }, "devDependencies": { From ceea6506242ca81d85f16e32341a39398cd17e11 Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Mon, 20 Jul 2020 15:57:35 +0200 Subject: [PATCH 34/35] Fix pipeline job variable definition --- azure-pipelines.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 180eb6a29..ba659e159 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -183,11 +183,10 @@ jobs: displayName: 'OauthBearer' dependsOn: lint variables: - test_retries: - - name: test_retries - value: 2 - - name: COMPOSE_FILE - value: docker-compose.2_4_oauthbearer.yml + - name: test_retries + value: 2 + - name: COMPOSE_FILE + value: docker-compose.2_4_oauthbearer.yml pool: vmImage: 'Ubuntu 16.04' steps: From bd5d1721938617b25f8dd25f0e3b2b05283f5986 Mon Sep 17 00:00:00 2001 From: Tommy Brunn Date: Mon, 20 Jul 2020 16:06:23 +0200 Subject: [PATCH 35/35] Only run oauthbearer tests in CI on master Not ideal, but because we have to scan for matching test names, the job ends up taking 5+ minutes in CI. The tests could be reorganized to instead move all OauthBearer tests into a single spec (or specs with a specific name) to be able to use `testPathPattern` instead of `testNamePattern`, at which point we could probably enable this again. --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ba659e159..cd3951a67 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -181,6 +181,7 @@ jobs: - job: test_oauthbearer displayName: 'OauthBearer' + condition: contains(variables['Build.SourceBranch'], 'refs/heads/master') dependsOn: lint variables: - name: test_retries