Skip to content

Commit

Permalink
HADOOP-13614. Purge some superfluous/obsolete S3 FS tests that are sl…
Browse files Browse the repository at this point in the history
…owing test runs down. Contributed by Steve Loughran.
  • Loading branch information
cnauroth committed Oct 26, 2016
1 parent e90af4a commit 9cad3e2
Show file tree
Hide file tree
Showing 26 changed files with 255 additions and 326 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -834,6 +834,7 @@ public static void verifyReceivedData(FileSystem fs, Path path,

long totalBytesRead = 0;
int nextExpectedNumber = 0;
NanoTimer timer = new NanoTimer();
try (InputStream inputStream = fs.open(path)) {
while (true) {
final int bytesRead = inputStream.read(testBuffer);
Expand Down Expand Up @@ -862,6 +863,8 @@ public static void verifyReceivedData(FileSystem fs, Path path,
" bytes but only received " + totalBytesRead);
}
}
timer.end("Time to read %d bytes", expectedSize);
bandwidth(timer, expectedSize);
}

/**
Expand Down Expand Up @@ -925,9 +928,12 @@ public static void createAndVerifyFile(FileSystem fs, Path parent, final long fi
final Path objectPath = new Path(parent, objectName);

// Write test file in a specific pattern
NanoTimer timer = new NanoTimer();
assertEquals(fileSize,
generateTestFile(fs, objectPath, fileSize, testBufferSize, modulus));
assertPathExists(fs, "not created successful", objectPath);
timer.end("Time to write %d bytes", fileSize);
bandwidth(timer, fileSize);

// Now read the same file back and verify its content
try {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -46,7 +46,7 @@ public void setUp() throws Exception {
fcTarget = FileSystem.getLocal(conf);
super.setUp();
}

@Override
@After
public void tearDown() throws Exception {
Expand Down
7 changes: 0 additions & 7 deletions hadoop-tools/hadoop-aws/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,6 @@
</includes>
<excludes>
<exclude>**/ITestJets3tNativeS3FileSystemContract.java</exclude>
<exclude>**/ITestS3ABlockingThreadPool.java</exclude>
<exclude>**/ITestS3AFileSystemContract.java</exclude>
<exclude>**/ITestS3AMiniYarnCluster.java</exclude>
<exclude>**/ITest*Root*.java</exclude>
<exclude>**/ITestS3AFileContextStatistics.java</exclude>
<include>**/ITestS3AHuge*.java</include>
Expand Down Expand Up @@ -211,10 +208,6 @@
<!-- parallel execution. -->
<includes>
<include>**/ITestJets3tNativeS3FileSystemContract.java</include>
<include>**/ITestS3ABlockingThreadPool.java</include>
<include>**/ITestS3AFastOutputStream.java</include>
<include>**/ITestS3AFileSystemContract.java</include>
<include>**/ITestS3AMiniYarnCluster.java</include>
<include>**/ITest*Root*.java</include>
<include>**/ITestS3AFileContextStatistics.java</include>
<include>**/ITestS3AHuge*.java</include>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.fs.contract.s3a;

import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT_MILLIS;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tools.contract.AbstractContractDistCpTest;
Expand All @@ -32,6 +33,11 @@ public class ITestS3AContractDistCp extends AbstractContractDistCpTest {

private static final long MULTIPART_SETTING = MULTIPART_MIN_SIZE;

@Override
protected int getTestTimeoutMillis() {
return SCALE_TEST_TIMEOUT_MILLIS;
}

@Override
protected Configuration createConfiguration() {
Configuration newConf = super.createConfiguration();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.AbstractBondedFSContract;
import org.apache.hadoop.fs.s3a.S3ATestUtils;

/**
* The contract of S3A: only enabled if the test bucket is provided.
Expand All @@ -29,7 +30,6 @@ public class S3AContract extends AbstractBondedFSContract {

public static final String CONTRACT_XML = "contract/s3a.xml";


public S3AContract(Configuration conf) {
super(conf);
//insert the base features
Expand All @@ -43,8 +43,6 @@ public String getScheme() {

@Override
public Path getTestPath() {
String testUniqueForkId = System.getProperty("test.unique.fork.id");
return testUniqueForkId == null ? super.getTestPath() :
new Path("/" + testUniqueForkId, "test");
return S3ATestUtils.createTestPath(super.getTestPath());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
import org.apache.hadoop.fs.contract.s3a.S3AContract;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;

Expand All @@ -40,6 +40,9 @@
public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
implements S3ATestConstants {

protected static final Logger LOG =
LoggerFactory.getLogger(AbstractS3ATestBase.class);

@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
Expand All @@ -52,14 +55,16 @@ public void teardown() throws Exception {
IOUtils.closeStream(getFileSystem());
}

@Rule
public TestName methodName = new TestName();

@Before
public void nameThread() {
Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
}

@Override
protected int getTestTimeoutMillis() {
return S3A_TEST_TIMEOUT;
}

protected Configuration getConfiguration() {
return getContract().getConf();
}
Expand All @@ -73,6 +78,17 @@ public S3AFileSystem getFileSystem() {
return (S3AFileSystem) super.getFileSystem();
}

/**
* Describe a test in the logs.
* @param text text to print
* @param args arguments to format in the printing
*/
protected void describe(String text, Object... args) {
LOG.info("\n\n{}: {}\n",
methodName.getMethodName(),
String.format(text, args));
}

/**
* Write a file, read it back, validate the dataset. Overwrites the file
* if it is present
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand All @@ -18,16 +18,11 @@

package org.apache.hadoop.fs.s3a;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
import org.apache.hadoop.fs.contract.s3a.S3AContract;
import org.junit.Rule;

import org.junit.Test;
import org.junit.rules.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -38,19 +33,11 @@
/**
* S3A tests for configuring block size.
*/
public class ITestS3ABlocksize extends AbstractFSContractTestBase {
public class ITestS3ABlocksize extends AbstractS3ATestBase {

private static final Logger LOG =
LoggerFactory.getLogger(ITestS3ABlocksize.class);

@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
}

@Rule
public Timeout testTimeout = new Timeout(30 * 60 * 1000);

@Test
@SuppressWarnings("deprecation")
public void testBlockSize() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,34 +68,37 @@ public class ITestS3AConfiguration {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3AConfiguration.class);

private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint";

@Rule
public Timeout testTimeout = new Timeout(30 * 60 * 1000);
public Timeout testTimeout = new Timeout(
S3ATestConstants.S3A_TEST_TIMEOUT
);

@Rule
public final TemporaryFolder tempDir = new TemporaryFolder();

/**
* Test if custom endpoint is picked up.
* <p/>
* The test expects TEST_ENDPOINT to be defined in the Configuration
* <p>
* The test expects {@link S3ATestConstants#CONFIGURATION_TEST_ENDPOINT}
* to be defined in the Configuration
* describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
* (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
* (i.e. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
* Evidently, the bucket has to be hosted in the region denoted by the
* endpoint for the test to succeed.
* <p/>
* <p>
* More info and the list of endpoint identifiers:
* http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
* @see <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">endpoint list</a>.
*
* @throws Exception
*/
@Test
public void testEndpoint() throws Exception {
conf = new Configuration();
String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
String endpoint = conf.getTrimmed(
S3ATestConstants.CONFIGURATION_TEST_ENDPOINT, "");
if (endpoint.isEmpty()) {
LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
LOG.warn("Custom endpoint test skipped as " +
S3ATestConstants.CONFIGURATION_TEST_ENDPOINT + "config " +
"setting was not detected");
} else {
conf.set(Constants.ENDPOINT, endpoint);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.IOException;
Expand All @@ -48,15 +47,9 @@ protected Configuration createConfiguration() {
}

private static final int[] SIZES = {
0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 10 - 3, 2 ^ 11 - 2, 2 ^ 12 - 1
0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 12 - 1
};

@Override
public void teardown() throws Exception {
super.teardown();
IOUtils.closeStream(getFileSystem());
}

@Test
public void testEncryption() throws Throwable {
for (int size: SIZES) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.IOException;
Expand All @@ -43,12 +42,6 @@ protected Configuration createConfiguration() {
return conf;
}

@Override
public void teardown() throws Exception {
super.teardown();
IOUtils.closeStream(getFileSystem());
}

@Test
public void testEncrypt0() throws Throwable {
writeThenReadFileToFailure(0);
Expand Down
Loading

0 comments on commit 9cad3e2

Please sign in to comment.