Skip to content

Do not require Confluent libraries for Kafka connector if Protobuf features are not being used #17858

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Jun 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public Connector create(String catalogName, Map<String, String> config, Connecto
new CatalogNameModule(catalogName),
new JsonModule(),
new TypeDeserializerModule(context.getTypeManager()),
new KafkaConnectorModule(),
new KafkaConnectorModule(context.getTypeManager()),
extension,
binder -> {
binder.bind(ClassLoader.class).toInstance(KafkaConnectorFactory.class.getClassLoader());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,24 @@
import io.trino.spi.connector.ConnectorPageSinkProvider;
import io.trino.spi.connector.ConnectorRecordSetProvider;
import io.trino.spi.connector.ConnectorSplitManager;
import io.trino.spi.type.TypeManager;

import static com.google.inject.multibindings.Multibinder.newSetBinder;
import static io.airlift.configuration.ConditionalModule.conditionalModule;
import static io.airlift.configuration.ConfigBinder.configBinder;
import static io.airlift.json.JsonCodecBinder.jsonCodecBinder;
import static java.util.Objects.requireNonNull;

public class KafkaConnectorModule
extends AbstractConfigurationAwareModule
{
private final TypeManager typeManager;

public KafkaConnectorModule(TypeManager typeManager)
{
this.typeManager = requireNonNull(typeManager, "typeManager is null");
}

@Override
public void setup(Binder binder)
{
Expand All @@ -58,7 +67,7 @@ public void setup(Binder binder)

configBinder(binder).bindConfig(KafkaConfig.class);
bindTopicSchemaProviderModule(FileTableDescriptionSupplier.NAME, new FileTableDescriptionSupplierModule());
bindTopicSchemaProviderModule(ConfluentSchemaRegistryTableDescriptionSupplier.NAME, new ConfluentModule());
bindTopicSchemaProviderModule(ConfluentSchemaRegistryTableDescriptionSupplier.NAME, new ConfluentModule(typeManager));
newSetBinder(binder, SessionPropertiesProvider.class).addBinding().to(KafkaSessionProperties.class).in(Scopes.SINGLETON);
jsonCodecBinder(binder).bindJsonCodec(KafkaTopicDescription.class);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public KafkaTopicFieldGroup parse(ConnectorSession session, String subject, Pars
Optional.empty(),
Optional.of(subject),
Streams.concat(getFields(protobufSchema.toDescriptor()),
getOneofs(protobufSchema))
getOneofs(protobufSchema.toDescriptor()))
.collect(toImmutableList()));
}

Expand All @@ -101,9 +101,9 @@ private Stream<KafkaTopicFieldDescription> getFields(Descriptor descriptor)
false));
}

private Stream<KafkaTopicFieldDescription> getOneofs(ProtobufSchema protobufSchema)
private Stream<KafkaTopicFieldDescription> getOneofs(Descriptor descriptor)
{
return protobufSchema.toDescriptor()
return descriptor
.getOneofs()
.stream()
.map(oneofDescriptor ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Binder;
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.Provides;
import com.google.inject.Scopes;
Expand Down Expand Up @@ -52,6 +53,7 @@
import io.trino.plugin.kafka.schema.TableDescriptionSupplier;
import io.trino.spi.HostAddress;
import io.trino.spi.TrinoException;
import io.trino.spi.type.TypeManager;

import java.util.List;
import java.util.Map;
Expand All @@ -74,9 +76,18 @@
public class ConfluentModule
extends AbstractConfigurationAwareModule
{
private final TypeManager typeManager;

public ConfluentModule(TypeManager typeManager)
{
this.typeManager = requireNonNull(typeManager, "typeManager is null");
}

@Override
protected void setup(Binder binder)
{
binder.bind(TypeManager.class).toInstance(typeManager);

configBinder(binder).bindConfig(ConfluentSchemaRegistryConfig.class);
install(new ConfluentDecoderModule());
install(new ConfluentEncoderModule());
Expand All @@ -89,7 +100,7 @@ protected void setup(Binder binder)
newSetBinder(binder, SessionPropertiesProvider.class).addBinding().to(ConfluentSessionProperties.class).in(Scopes.SINGLETON);
binder.bind(TableDescriptionSupplier.class).toProvider(ConfluentSchemaRegistryTableDescriptionSupplier.Factory.class).in(Scopes.SINGLETON);
newMapBinder(binder, String.class, SchemaParser.class).addBinding("AVRO").to(AvroSchemaParser.class).in(Scopes.SINGLETON);
newMapBinder(binder, String.class, SchemaParser.class).addBinding("PROTOBUF").to(ProtobufSchemaParser.class).in(Scopes.SINGLETON);
newMapBinder(binder, String.class, SchemaParser.class).addBinding("PROTOBUF").to(LazyLoadedProtobufSchemaParser.class).in(Scopes.SINGLETON);
}

@Provides
Expand Down Expand Up @@ -157,7 +168,7 @@ private static class LazyLoadedProtobufSchemaProvider
implements SchemaProvider
{
// Make JVM to load lazily ProtobufSchemaProvider, so Kafka connector can be used
// with protobuf dependency for non protobuf based topics
// without protobuf dependency for non protobuf based topics
private final Supplier<SchemaProvider> delegate = Suppliers.memoize(this::create);
private final AtomicReference<Map<String, ?>> configuration = new AtomicReference<>();

Expand Down Expand Up @@ -195,4 +206,24 @@ private SchemaProvider create()
return schemaProvider;
}
}

public static class LazyLoadedProtobufSchemaParser
extends ForwardingSchemaParser
{
// Make JVM to load lazily ProtobufSchemaParser, so Kafka connector can be used
// without protobuf dependency for non protobuf based topics
private final Supplier<SchemaParser> delegate;

@Inject
public LazyLoadedProtobufSchemaParser(TypeManager typeManager)
{
this.delegate = Suppliers.memoize(() -> new ProtobufSchemaParser(requireNonNull(typeManager, "typeManager is null")));
}

@Override
protected SchemaParser delegate()
{
return delegate.get();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.kafka.schema.confluent;

import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.trino.plugin.kafka.KafkaTopicFieldGroup;
import io.trino.spi.connector.ConnectorSession;

public abstract class ForwardingSchemaParser
implements SchemaParser
{
protected abstract SchemaParser delegate();

@Override
public KafkaTopicFieldGroup parse(ConnectorSession session, String subject, ParsedSchema parsedSchema)
{
return delegate().parse(session, subject, parsedSchema);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.kafka.schema.confluent;

import org.testng.annotations.Test;

import static io.trino.spi.testing.InterfaceTestUtils.assertAllMethodsOverridden;

public class TestForwardingSchemaParser
{
@Test
public void testAllMethodsOverridden()
{
assertAllMethodsOverridden(SchemaParser.class, ForwardingSchemaParser.class);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,25 +23,20 @@
import org.testcontainers.containers.startupcheck.IsRunningStartupCheckStrategy;
import org.testcontainers.utility.MountableFile;

import java.io.File;
import java.time.Duration;

import static io.trino.testing.TestingProperties.getConfluentVersion;
import static io.trino.tests.product.launcher.docker.ContainerUtil.forSelectedPorts;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.isTrinoContainer;
import static io.trino.tests.product.launcher.env.common.Standard.CONTAINER_TRINO_ETC;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.containers.wait.strategy.Wait.forLogMessage;
import static org.testcontainers.utility.MountableFile.forClasspathResource;
import static org.testcontainers.utility.MountableFile.forHostPath;

public class Kafka
implements EnvironmentExtender
{
private static final String CONFLUENT_VERSION = "7.3.1";
private static final int SCHEMA_REGISTRY_PORT = 8081;
private static final File KAFKA_PROTOBUF_PROVIDER = new File("testing/trino-product-tests-launcher/target/kafka-protobuf-provider-" + getConfluentVersion() + ".jar");
private static final File KAFKA_PROTOBUF_TYPES = new File("testing/trino-product-tests-launcher/target/kafka-protobuf-types-" + getConfluentVersion() + ".jar");
static final String KAFKA = "kafka";
static final String SCHEMA_REGISTRY = "schema-registry";
static final String ZOOKEEPER = "zookeeper";
Expand Down Expand Up @@ -69,10 +64,7 @@ public void extendEnvironment(Environment.Builder builder)
if (isTrinoContainer(container.getLogicalName())) {
MountableFile logConfigFile = forHostPath(configDir.getPath("log.properties"));
container
.withCopyFileToContainer(logConfigFile, CONTAINER_TRINO_ETC + "/log.properties")
.withCopyFileToContainer(forHostPath(KAFKA_PROTOBUF_PROVIDER.getAbsolutePath()), "/docker/kafka-protobuf-provider/kafka-protobuf-provider.jar")
.withCopyFileToContainer(forHostPath(KAFKA_PROTOBUF_TYPES.getAbsolutePath()), "/docker/kafka-protobuf-provider/kafka-protobuf-types.jar")
.withCopyFileToContainer(forClasspathResource("install-kafka-protobuf-provider.sh", 0755), "/docker/presto-init.d/install-kafka-protobuf-provider.sh");
.withCopyFileToContainer(logConfigFile, CONTAINER_TRINO_ETC + "/log.properties");
}
});

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.trino.tests.product.launcher.env.environment;

import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import io.trino.tests.product.launcher.docker.DockerFiles;
import io.trino.tests.product.launcher.docker.DockerFiles.ResourceProvider;
import io.trino.tests.product.launcher.env.Environment;
import io.trino.tests.product.launcher.env.EnvironmentProvider;
import io.trino.tests.product.launcher.env.common.Kafka;
import io.trino.tests.product.launcher.env.common.StandardMultinode;
import io.trino.tests.product.launcher.env.common.TestsEnvironment;

import java.io.File;

import static io.trino.testing.TestingProperties.getConfluentVersion;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.configureTempto;
import static io.trino.tests.product.launcher.env.EnvironmentContainers.isTrinoContainer;
import static io.trino.tests.product.launcher.env.common.Standard.CONTAINER_TRINO_ETC;
import static java.util.Objects.requireNonNull;
import static org.testcontainers.utility.MountableFile.forClasspathResource;
import static org.testcontainers.utility.MountableFile.forHostPath;

/**
* {@link EnvMultinodeConfluentKafka} is intended to be the only Kafka product test environment which copies the non-free Confluent License libraries to the Kafka connector
* classpath to test functionality which requires those classes.
* The other {@link Kafka} environments MUST NOT copy these jars otherwise it's not possible to verify the out of box Trino setup which doesn't ship with the Confluent licensed
* libraries.
*/
@TestsEnvironment
public final class EnvMultinodeConfluentKafka
extends EnvironmentProvider
{
private static final File KAFKA_PROTOBUF_PROVIDER = new File("testing/trino-product-tests-launcher/target/kafka-protobuf-provider-" + getConfluentVersion() + ".jar");
private static final File KAFKA_PROTOBUF_TYPES = new File("testing/trino-product-tests-launcher/target/kafka-protobuf-types-" + getConfluentVersion() + ".jar");

private final ResourceProvider configDir;

@Inject
public EnvMultinodeConfluentKafka(Kafka kafka, StandardMultinode standardMultinode, DockerFiles dockerFiles)
{
super(ImmutableList.of(standardMultinode, kafka));
requireNonNull(dockerFiles, "dockerFiles is null");
configDir = dockerFiles.getDockerFilesHostDirectory("conf/environment/multinode-kafka-confluent-license/");
}

@Override
public void extendEnvironment(Environment.Builder builder)
{
builder.configureContainers(container -> {
if (isTrinoContainer(container.getLogicalName())) {
builder.addConnector("kafka", forHostPath(configDir.getPath("kafka.properties")), CONTAINER_TRINO_ETC + "/catalog/kafka.properties");
builder.addConnector("kafka", forHostPath(configDir.getPath("kafka_schema_registry.properties")), CONTAINER_TRINO_ETC + "/catalog/kafka_schema_registry.properties");
container
.withCopyFileToContainer(forHostPath(KAFKA_PROTOBUF_PROVIDER.getAbsolutePath()), "/docker/kafka-protobuf-provider/kafka-protobuf-provider.jar")
.withCopyFileToContainer(forHostPath(KAFKA_PROTOBUF_TYPES.getAbsolutePath()), "/docker/kafka-protobuf-provider/kafka-protobuf-types.jar")
.withCopyFileToContainer(forClasspathResource("install-kafka-protobuf-provider.sh", 0755), "/docker/presto-init.d/install-kafka-protobuf-provider.sh");
}
});

configureTempto(builder, configDir);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.environment.EnvMultinodeConfluentKafka;
import io.trino.tests.product.launcher.env.environment.EnvMultinodeKafka;
import io.trino.tests.product.launcher.env.environment.EnvMultinodeKafkaSaslPlaintext;
import io.trino.tests.product.launcher.env.environment.EnvMultinodeKafkaSsl;
Expand All @@ -35,6 +36,10 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
testOnEnvironment(EnvMultinodeKafka.class)
.withGroups("configured_features", "kafka")
.build(),
testOnEnvironment(EnvMultinodeConfluentKafka.class)
// testing kafka group with this env is slightly redundant but helps verify that copying confluent libraries doesn't break non-confluent functionality
.withGroups("configured_features", "kafka", "kafka_confluent_license")
.build(),
testOnEnvironment(EnvMultinodeKafkaSsl.class)
.withGroups("configured_features", "kafka")
.build(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
connector.name=kafka
kafka.table-names=product_tests.read_simple_key_and_value,\
product_tests.read_all_datatypes_raw,\
product_tests.read_all_datatypes_csv,\
product_tests.read_all_datatypes_json,\
product_tests.read_all_datatypes_avro,\
product_tests.read_all_null_avro,\
product_tests.read_structural_datatype_avro,\
product_tests.write_simple_key_and_value,\
product_tests.write_all_datatypes_raw,\
product_tests.write_all_datatypes_csv,\
product_tests.write_all_datatypes_json,\
product_tests.write_all_datatypes_avro,\
product_tests.write_structural_datatype_avro,\
product_tests.pushdown_partition,\
product_tests.pushdown_offset,\
product_tests.pushdown_create_time,\
product_tests.all_datatypes_protobuf,\
product_tests.structural_datatype_protobuf,\
product_tests.read_basic_datatypes_protobuf,\
product_tests.read_basic_structural_datatypes_protobuf
kafka.nodes=kafka:9092
kafka.table-description-dir=/docker/presto-product-tests/conf/presto/etc/catalog/kafka
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
connector.name=kafka
kafka.nodes=kafka:9092
kafka.table-description-supplier=confluent
kafka.confluent-schema-registry-url=http://schema-registry:8081
kafka.default-schema=product_tests
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
schema-registry:
url: http://schema-registry:8081
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ public final class TestGroups
public static final String CANCEL_QUERY = "cancel_query";
public static final String LARGE_QUERY = "large_query";
public static final String KAFKA = "kafka";
public static final String KAFKA_CONFLUENT_LICENSE = "kafka_confluent_license";
public static final String TWO_HIVES = "two_hives";
public static final String ICEBERG = "iceberg";
public static final String ICEBERG_FORMAT_VERSION_COMPATIBILITY = "iceberg_format_version_compatibility";
Expand Down
Loading