Skip to content

HBASE-25888 Backup tests are categorically flakey #3279

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
package org.apache.hadoop.hbase.backup.mapreduce;

import static org.apache.hadoop.hbase.backup.util.BackupUtils.succeeded;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;

import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
Expand Down Expand Up @@ -240,13 +240,12 @@ protected void copyMetaData(FileSystem fs, Path tmpBackupDir, Path backupDirPath
* @throws IOException exception
*/
protected void copyFile(FileSystem fs, Path p, Path newPath) throws IOException {
File f = File.createTempFile("data", "meta");
Path localPath = new Path(f.getAbsolutePath());
fs.copyToLocalFile(p, localPath);
fs.copyFromLocalFile(localPath, newPath);
try (InputStream in = fs.open(p); OutputStream out = fs.create(newPath, true)) {
IOUtils.copy(in, out);
}
boolean exists = fs.exists(newPath);
if (!exists) {
throw new IOException("Failed to copy meta file to: "+ newPath);
throw new IOException("Failed to copy meta file to: " + newPath);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -78,9 +78,9 @@
public class TestBackupBase {
private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class);

protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static HBaseTestingUtility TEST_UTIL;
protected static HBaseTestingUtility TEST_UTIL2;
protected static Configuration conf1 = TEST_UTIL.getConfiguration();
protected static Configuration conf1;
protected static Configuration conf2;

protected static TableName table1 = TableName.valueOf("table1");
Expand All @@ -98,14 +98,13 @@ public class TestBackupBase {
protected static final byte[] qualName = Bytes.toBytes("q1");
protected static final byte[] famName = Bytes.toBytes("f");

protected static String BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT";
protected static String BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT";
protected static String BACKUP_ROOT_DIR;
protected static String BACKUP_REMOTE_ROOT_DIR;
protected static String provider = "defaultProvider";
protected static boolean secure = false;

protected static boolean autoRestoreOnFailure = true;
protected static boolean setupIsDone = false;
protected static boolean useSecondCluster = false;
protected static boolean autoRestoreOnFailure;
protected static boolean useSecondCluster;

static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient {
public IncrementalTableBackupClientForTest() {
Expand Down Expand Up @@ -270,14 +269,10 @@ public void execute() throws IOException {
}
}

/**
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@Before
public void setUp() throws Exception {
if (setupIsDone) {
return;
}
public static void setUpHelper() throws Exception {
BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT";
BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT";

if (secure) {
// set the always on security provider
UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(),
Expand Down Expand Up @@ -324,9 +319,24 @@ public void setUp() throws Exception {
}
createTables();
populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1);
setupIsDone = true;
}


/**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
autoRestoreOnFailure = true;
useSecondCluster = false;
setUpHelper();
}


private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) {
Iterator<Entry<String, String>> it = masterConf.iterator();
while (it.hasNext()) {
Expand All @@ -350,6 +360,8 @@ public static void tearDown() throws Exception {
}
TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.shutdownMiniMapReduceCluster();
autoRestoreOnFailure = true;
useSecondCluster = false;
}

Table insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
Expand All @@ -41,7 +42,7 @@
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Expand Down Expand Up @@ -113,15 +114,18 @@ public void postDeleteSnapshot(ObserverContext<MasterCoprocessorEnvironment> ctx
}

/**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@Override
@Before
public void setUp() throws Exception {
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
conf1.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
MasterSnapshotObserver.class.getName());
conf1.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
super.setUp();
setUpHelper();
}

private MasterSnapshotObserver getMasterSnapshotObserver() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
Expand All @@ -38,6 +39,7 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Expand All @@ -55,11 +57,18 @@ public class TestRemoteBackup extends TestBackupBase {

private static final Logger LOG = LoggerFactory.getLogger(TestRemoteBackup.class);

@Override
public void setUp() throws Exception {
useSecondCluster = true;
/**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
conf1.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 10);
super.setUp();
useSecondCluster = true;
setUpHelper();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,12 @@
import static org.junit.Assert.assertTrue;

import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Expand All @@ -39,10 +41,17 @@ public class TestRemoteRestore extends TestBackupBase {

private static final Logger LOG = LoggerFactory.getLogger(TestRemoteRestore.class);

@Override
public void setUp() throws Exception {
/**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
useSecondCluster = true;
super.setUp();
setUpHelper();
}

/**
Expand Down