Skip to content

Commit

Permalink
Downgrading to Java 9 (#23)
Browse files Browse the repository at this point in the history
* Removing var

* Downgrade to Java 10

* Downgrade to Java 9
  • Loading branch information
mfvanek authored Dec 26, 2019
1 parent 13e1c6a commit dab739f
Show file tree
Hide file tree
Showing 46 changed files with 315 additions and 273 deletions.
2 changes: 1 addition & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ end_of_line = crlf
indent_size = 4
indent_style = space
insert_final_newline = true
max_line_length = 120
max_line_length = 130
tab_width = 4
ij_continuation_indent_size = 8

Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Set up JDK 11
- name: Set up JDK 9
uses: actions/setup-java@v1
with:
java-version: 11
java-version: 9
- name: Build with Gradle
run: ./gradlew build
18 changes: 9 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ public class DemoApp {
final String readUrl = "jdbc:postgresql://host-name-1:6432,host-name-2:6432,host-name-3:6432/db_name_testing?targetServerType=preferSlave&loadBalanceHosts=true&ssl=true&prepareThreshold=0&preparedStatementCacheQueries=0&sslmode=require";
final String userName = "user_name_testing";
final String password = "password_testing";
final var haPgConnectionFactory = new HighAvailabilityPgConnectionFactoryImpl(new PgConnectionFactoryImpl());
final var haPgConnection = haPgConnectionFactory.of(writeUrl, userName, password, readUrl);
final var indexesHealth = new IndexesHealthImpl(haPgConnection, new MaintenanceFactoryImpl());
final var logger = new SimpleHealthLogger(indexesHealth);
final HighAvailabilityPgConnectionFactory haPgConnectionFactory = new HighAvailabilityPgConnectionFactoryImpl(new PgConnectionFactoryImpl());
final HighAvailabilityPgConnection haPgConnection = haPgConnectionFactory.of(writeUrl, userName, password, readUrl);
final IndexesHealth indexesHealth = new IndexesHealthImpl(haPgConnection, new MaintenanceFactoryImpl());
final IndexesHealthLogger logger = new SimpleHealthLogger(indexesHealth);
logger.logAll(Exclusions.empty(), PgContext.ofPublic())
.forEach(System.out::println);
// Resetting current statistics
Expand All @@ -59,14 +59,14 @@ public class DemoApp {
final String cascadeAsyncReadUrl = "jdbc:postgresql://host-name-6:6432/db_name_production?ssl=true&targetServerType=preferSlave&loadBalanceHosts=true&prepareThreshold=0&preparedStatementCacheQueries=0&connectTimeout=2&socketTimeout=50&loginTimeout=10&sslmode=require";
final String userName = "user_name_production";
final String password = "password_production";
final var haPgConnectionFactory = new HighAvailabilityPgConnectionFactoryImpl(new PgConnectionFactoryImpl());
final var haPgConnection = haPgConnectionFactory.of(writeUrl, userName, password, readUrl, cascadeAsyncReadUrl);
final var indexesHealth = new IndexesHealthImpl(haPgConnection, new MaintenanceFactoryImpl());
final var exclusions = Exclusions.builder()
final HighAvailabilityPgConnectionFactory haPgConnectionFactory = new HighAvailabilityPgConnectionFactoryImpl(new PgConnectionFactoryImpl());
final HighAvailabilityPgConnection haPgConnection = haPgConnectionFactory.of(writeUrl, userName, password, readUrl, cascadeAsyncReadUrl);
final IndexesHealth indexesHealth = new IndexesHealthImpl(haPgConnection, new MaintenanceFactoryImpl());
final Exclusions exclusions = Exclusions.builder()
.withIndexSizeThreshold(10, MemoryUnit.MB)
.withTableSizeThreshold(10, MemoryUnit.MB)
.build();
final var logger = new SimpleHealthLogger(indexesHealth);
final IndexesHealthLogger logger = new SimpleHealthLogger(indexesHealth);
logger.logAll(exclusions, PgContext.ofPublic())
.forEach(System.out::println);
}
Expand Down
6 changes: 3 additions & 3 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@ plugins {
}

group 'io.github.mfvanek'
version '0.1.0'
version '0.1.1'

sourceCompatibility = 11
targetCompatibility = 11
sourceCompatibility = 9
targetCompatibility = 9

repositories {
mavenCentral()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,15 @@
package io.github.mfvanek.pg.connection;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;

public class HighAvailabilityPgConnectionFactoryImpl implements HighAvailabilityPgConnectionFactory {

Expand Down Expand Up @@ -59,7 +61,7 @@ private HighAvailabilityPgConnection create(@Nonnull final String writeUrl,
@Nonnull final String password,
@Nullable final String readUrl,
@Nullable final String cascadeAsyncReadUrl) {
final var connectionToMaster = pgConnectionFactory.forUrl(writeUrl, userName, password);
final PgConnection connectionToMaster = pgConnectionFactory.forUrl(writeUrl, userName, password);
final Map<String, PgConnection> connectionsToReplicas = new HashMap<>();
addReplicasDataSources(connectionsToReplicas, writeUrl, userName, password);
if (StringUtils.isNotBlank(readUrl)) {
Expand All @@ -68,15 +70,15 @@ private HighAvailabilityPgConnection create(@Nonnull final String writeUrl,
if (StringUtils.isNotBlank(cascadeAsyncReadUrl)) {
addReplicasDataSources(connectionsToReplicas, cascadeAsyncReadUrl, userName, password);
}
return HighAvailabilityPgConnectionImpl.of(connectionToMaster, Set.copyOf(connectionsToReplicas.values()));
return HighAvailabilityPgConnectionImpl.of(connectionToMaster, new HashSet<>(connectionsToReplicas.values()));
}

private void addReplicasDataSources(@Nonnull final Map<String, PgConnection> connectionsToReplicas,
@Nonnull final String readUrl,
@Nonnull final String userName,
@Nonnull final String password) {
final var allHosts = PgUrlParser.extractNameWithPortAndUrlForEachHost(readUrl);
for (var host : allHosts) {
final List<Pair<String, String>> allHosts = PgUrlParser.extractNameWithPortAndUrlForEachHost(readUrl);
for (Pair<String, String> host : allHosts) {
connectionsToReplicas.computeIfAbsent(
host.getKey(), h -> pgConnectionFactory.forUrl(host.getValue(), userName, password));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import javax.sql.DataSource;

public class PgConnectionFactoryImpl implements PgConnectionFactory {

Expand All @@ -23,7 +24,7 @@ public PgConnection forUrl(@Nonnull final String pgUrl,
@Nonnull final String password) {
LOGGER.debug("Creating {} with pgUrl = {}, userName = {}, password = {}",
PgConnection.class.getSimpleName(), pgUrl, userName, "*****");
final var dataSource = PgConnectionHelper.createDataSource(pgUrl, userName, password);
final DataSource dataSource = PgConnectionHelper.createDataSource(pgUrl, userName, password);
return PgConnectionImpl.of(dataSource, PgHostImpl.ofUrl(pgUrl));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ static DataSource createDataSource(@Nonnull final String pgUrl,
PgConnectionValidators.pgUrlNotBlankAndValid(pgUrl, "pgUrl");
PgConnectionValidators.userNameNotBlank(userName);
PgConnectionValidators.passwordNotBlank(password);
final var dataSource = new BasicDataSource();
final BasicDataSource dataSource = new BasicDataSource();
setCommonProperties(dataSource, userName, password);
dataSource.setUrl(pgUrl);
return dataSource;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

package io.github.mfvanek.pg.connection;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;

import javax.annotation.Nonnull;
Expand All @@ -15,8 +16,6 @@
import java.util.Set;
import java.util.stream.Collectors;

import static java.util.function.Predicate.not;

final class PgUrlParser {

static final String URL_HEADER = "jdbc:postgresql://";
Expand Down Expand Up @@ -54,7 +53,7 @@ static Set<String> extractHostNames(@Nonnull final String pgUrl) {
PgConnectionValidators.pgUrlNotBlankAndValid(pgUrl, "pgUrl");
final String allHostsWithPort = extractAllHostsWithPort(pgUrl);
return Arrays.stream(allHostsWithPort.split(","))
.filter(not(String::isBlank))
.filter(StringUtils::isNotBlank)
.map(h -> h.substring(0, h.lastIndexOf(':')))
.collect(Collectors.toSet());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public List<DuplicatedIndexes> getIntersectedIndexes(@Nonnull final PgContext pg
@Override
public List<UnusedIndex> getUnusedIndexes(@Nonnull final PgContext pgContext) {
final List<List<UnusedIndex>> potentiallyUnusedIndexesFromAllHosts = new ArrayList<>();
for (var maintenanceForReplica : maintenanceForReplicas) {
for (IndexMaintenance maintenanceForReplica : maintenanceForReplicas) {
potentiallyUnusedIndexesFromAllHosts.add(
doOnHost(maintenanceForReplica.getHost(),
() -> maintenanceForReplica.getPotentiallyUnusedIndexes(pgContext)));
Expand All @@ -92,7 +92,7 @@ public List<ForeignKey> getForeignKeysNotCoveredWithIndex(@Nonnull final PgConte
@Override
public List<TableWithMissingIndex> getTablesWithMissingIndexes(@Nonnull final PgContext pgContext) {
final List<List<TableWithMissingIndex>> tablesWithMissingIndexesFromAllHosts = new ArrayList<>();
for (var maintenanceForReplica : maintenanceForReplicas) {
for (IndexMaintenance maintenanceForReplica : maintenanceForReplicas) {
tablesWithMissingIndexesFromAllHosts.add(
doOnHost(maintenanceForReplica.getHost(),
() -> maintenanceForReplica.getTablesWithMissingIndexes(pgContext)));
Expand All @@ -116,7 +116,7 @@ public List<IndexWithNulls> getIndexesWithNullValues(@Nonnull final PgContext pg

@Override
public void resetStatistics() {
for (var statisticsMaintenance : statisticsMaintenanceForReplicas) {
for (StatisticsMaintenance statisticsMaintenance : statisticsMaintenanceForReplicas) {
doOnHost(statisticsMaintenance.getHost(), statisticsMaintenance::resetStatistics);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.slf4j.LoggerFactory;

import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
Expand Down Expand Up @@ -55,13 +56,13 @@ static List<UnusedIndex> getUnusedIndexesAsIntersectionResult(
@Nonnull final List<List<UnusedIndex>> potentiallyUnusedIndexesFromAllHosts) {
LOGGER.debug("potentiallyUnusedIndexesFromAllHosts = {}", potentiallyUnusedIndexesFromAllHosts);
Collection<UnusedIndex> unusedIndexes = null;
for (var unusedIndexesFromHost : potentiallyUnusedIndexesFromAllHosts) {
for (List<UnusedIndex> unusedIndexesFromHost : potentiallyUnusedIndexesFromAllHosts) {
if (unusedIndexes == null) {
unusedIndexes = unusedIndexesFromHost;
}
unusedIndexes = CollectionUtils.intersection(unusedIndexes, unusedIndexesFromHost);
}
final List<UnusedIndex> result = unusedIndexes == null ? Collections.emptyList() : List.copyOf(unusedIndexes);
final List<UnusedIndex> result = unusedIndexes == null ? Collections.emptyList() : new ArrayList<>(unusedIndexes);
LOGGER.debug("Intersection result {}", result);
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,17 @@

import io.github.mfvanek.pg.index.health.IndexesHealth;
import io.github.mfvanek.pg.model.DuplicatedIndexes;
import io.github.mfvanek.pg.model.ForeignKey;
import io.github.mfvanek.pg.model.Index;
import io.github.mfvanek.pg.model.IndexNameAware;
import io.github.mfvanek.pg.model.IndexSizeAware;
import io.github.mfvanek.pg.model.IndexWithNulls;
import io.github.mfvanek.pg.model.PgContext;
import io.github.mfvanek.pg.model.Table;
import io.github.mfvanek.pg.model.TableNameAware;
import io.github.mfvanek.pg.model.TableSizeAware;
import io.github.mfvanek.pg.model.TableWithMissingIndex;
import io.github.mfvanek.pg.model.UnusedIndex;
import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -63,7 +69,7 @@ private String writeZeroToLog(@Nonnull final LoggingKey key) {

@Nonnull
private String logInvalidIndexes(@Nonnull final PgContext pgContext) {
final var invalidIndexes = indexesHealth.getInvalidIndexes(pgContext);
final List<Index> invalidIndexes = indexesHealth.getInvalidIndexes(pgContext);
final LoggingKey key = SimpleLoggingKey.INVALID_INDEXES;
if (CollectionUtils.isNotEmpty(invalidIndexes)) {
LOGGER.error("There are invalid indexes in the database {}", invalidIndexes);
Expand All @@ -75,8 +81,8 @@ private String logInvalidIndexes(@Nonnull final PgContext pgContext) {
@Nonnull
private String logDuplicatedIndexes(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawDuplicatedIndexes = indexesHealth.getDuplicatedIndexes(pgContext);
final var duplicatedIndexes = applyExclusions(rawDuplicatedIndexes,
final List<DuplicatedIndexes> rawDuplicatedIndexes = indexesHealth.getDuplicatedIndexes(pgContext);
final List<DuplicatedIndexes> duplicatedIndexes = applyExclusions(rawDuplicatedIndexes,
exclusions.getDuplicatedIndexesExclusions());
final LoggingKey key = SimpleLoggingKey.DUPLICATED_INDEXES;
if (CollectionUtils.isNotEmpty(duplicatedIndexes)) {
Expand All @@ -89,8 +95,8 @@ private String logDuplicatedIndexes(@Nonnull final Exclusions exclusions,
@Nonnull
private String logIntersectedIndexes(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawIntersectedIndexes = indexesHealth.getIntersectedIndexes(pgContext);
final var intersectedIndexes = applyExclusions(rawIntersectedIndexes,
final List<DuplicatedIndexes> rawIntersectedIndexes = indexesHealth.getIntersectedIndexes(pgContext);
final List<DuplicatedIndexes> intersectedIndexes = applyExclusions(rawIntersectedIndexes,
exclusions.getIntersectedIndexesExclusions());
final LoggingKey key = SimpleLoggingKey.INTERSECTED_INDEXES;
if (CollectionUtils.isNotEmpty(intersectedIndexes)) {
Expand All @@ -103,10 +109,10 @@ private String logIntersectedIndexes(@Nonnull final Exclusions exclusions,
@Nonnull
private String logUnusedIndexes(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawUnusedIndexes = indexesHealth.getUnusedIndexes(pgContext);
final var filteredUnusedIndexes = applyIndexesExclusions(
final List<UnusedIndex> rawUnusedIndexes = indexesHealth.getUnusedIndexes(pgContext);
final List<UnusedIndex> filteredUnusedIndexes = applyIndexesExclusions(
rawUnusedIndexes, exclusions.getUnusedIndexesExclusions());
final var unusedIndexes = applyIndexSizeExclusions(
final List<UnusedIndex> unusedIndexes = applyIndexSizeExclusions(
filteredUnusedIndexes, exclusions.getIndexSizeThresholdInBytes());
final LoggingKey key = SimpleLoggingKey.UNUSED_INDEXES;
if (CollectionUtils.isNotEmpty(unusedIndexes)) {
Expand All @@ -118,7 +124,7 @@ private String logUnusedIndexes(@Nonnull final Exclusions exclusions,

@Nonnull
private String logForeignKeysNotCoveredWithIndex(@Nonnull final PgContext pgContext) {
final var foreignKeys = indexesHealth.getForeignKeysNotCoveredWithIndex(pgContext);
final List<ForeignKey> foreignKeys = indexesHealth.getForeignKeysNotCoveredWithIndex(pgContext);
final LoggingKey key = SimpleLoggingKey.FOREIGN_KEYS;
if (CollectionUtils.isNotEmpty(foreignKeys)) {
LOGGER.warn("There are foreign keys without index in the database {}", foreignKeys);
Expand All @@ -130,10 +136,10 @@ private String logForeignKeysNotCoveredWithIndex(@Nonnull final PgContext pgCont
@Nonnull
private String logTablesWithMissingIndexes(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawTablesWithMissingIndexes = indexesHealth.getTablesWithMissingIndexes(pgContext);
final var tablesFilteredBySize = applyTableSizeExclusions(
final List<TableWithMissingIndex> rawTablesWithMissingIndexes = indexesHealth.getTablesWithMissingIndexes(pgContext);
final List<TableWithMissingIndex> tablesFilteredBySize = applyTableSizeExclusions(
rawTablesWithMissingIndexes, exclusions.getTableSizeThresholdInBytes());
final var tablesWithMissingIndexes = applyTablesExclusions(
final List<TableWithMissingIndex> tablesWithMissingIndexes = applyTablesExclusions(
tablesFilteredBySize, exclusions.getTablesWithMissingIndexesExclusions());
final LoggingKey key = SimpleLoggingKey.TABLES_WITH_MISSING_INDEXES;
if (CollectionUtils.isNotEmpty(tablesWithMissingIndexes)) {
Expand All @@ -146,10 +152,10 @@ private String logTablesWithMissingIndexes(@Nonnull final Exclusions exclusions,
@Nonnull
private String logTablesWithoutPrimaryKey(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawTablesWithoutPrimaryKey = indexesHealth.getTablesWithoutPrimaryKey(pgContext);
final var tablesFilteredBySize = applyTableSizeExclusions(
final List<Table> rawTablesWithoutPrimaryKey = indexesHealth.getTablesWithoutPrimaryKey(pgContext);
final List<Table> tablesFilteredBySize = applyTableSizeExclusions(
rawTablesWithoutPrimaryKey, exclusions.getTableSizeThresholdInBytes());
final var tablesWithoutPrimaryKey = applyTablesExclusions(
final List<Table> tablesWithoutPrimaryKey = applyTablesExclusions(
tablesFilteredBySize, exclusions.getTablesWithoutPrimaryKeyExclusions());
final LoggingKey key = SimpleLoggingKey.TABLES_WITHOUT_PK;
if (CollectionUtils.isNotEmpty(tablesWithoutPrimaryKey)) {
Expand All @@ -162,8 +168,8 @@ private String logTablesWithoutPrimaryKey(@Nonnull final Exclusions exclusions,
@Nonnull
private String logIndexesWithNullValues(@Nonnull final Exclusions exclusions,
@Nonnull final PgContext pgContext) {
final var rawIndexesWithNullValues = indexesHealth.getIndexesWithNullValues(pgContext);
final var indexesWithNullValues = applyIndexesExclusions(rawIndexesWithNullValues,
final List<IndexWithNulls> rawIndexesWithNullValues = indexesHealth.getIndexesWithNullValues(pgContext);
final List<IndexWithNulls> indexesWithNullValues = applyIndexesExclusions(rawIndexesWithNullValues,
exclusions.getIndexesWithNullValuesExclusions());
final LoggingKey key = SimpleLoggingKey.INDEXES_WITH_NULLS;
if (CollectionUtils.isNotEmpty(indexesWithNullValues)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import io.github.mfvanek.pg.utils.QueryExecutor;

import javax.annotation.Nonnull;
import java.util.List;
import java.util.Objects;

public class StatisticsMaintenanceImpl implements StatisticsMaintenance {
Expand All @@ -24,7 +25,7 @@ public StatisticsMaintenanceImpl(@Nonnull final PgConnection pgConnection) {

@Override
public boolean resetStatistics() {
final var result = QueryExecutor.executeQuery(pgConnection, "select pg_stat_reset()", rs -> true);
final List<Boolean> result = QueryExecutor.executeQuery(pgConnection, "select pg_stat_reset()", rs -> true);
return result.size() == 1;
}

Expand Down
Loading

0 comments on commit dab739f

Please sign in to comment.