Skip to content

[SPARK-4236] Cleanup removed applications' files in shuffle service #3126

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -755,6 +755,7 @@ private[spark] object Utils extends Logging {
/**
* Delete a file or directory and its contents recursively.
* Don't follow directories if they are symlinks.
* Throws an exception if deletion is unsuccessful.
*/
def deleteRecursively(file: File) {
if (file != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,9 @@ class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll {
rdd.count()
rdd.count()

// Invalidate the registered executors, disallowing access to their shuffle blocks.
rpcHandler.clearRegisteredExecutors()
// Invalidate the registered executors, disallowing access to their shuffle blocks (without
// deleting the actual shuffle files, so we could access them without the shuffle service).
rpcHandler.applicationRemoved(sc.conf.getAppId, false /* cleanupLocalDirs */)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you just do cleanupLocalDirs = false? Or can we not because this is a Java method even though we're using it in Scala

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Right, not possible if the method is written in Java, unfortunately.


// Now Spark will receive FetchFailed, and not retry the stage due to "spark.test.noStageRetry"
// being set.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,22 @@
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;

import com.google.common.base.Preconditions;
import com.google.common.io.Closeables;
import com.google.common.base.Charsets;
import io.netty.buffer.Unpooled;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* General utilities available in the network package. Many of these are sourced from Spark's
* own Utils, just accessible within this package.
*/
public class JavaUtils {
private static final Logger logger = LoggerFactory.getLogger(JavaUtils.class);

Expand Down Expand Up @@ -93,4 +99,57 @@ public static ByteBuffer stringToBytes(String s) {
public static String bytesToString(ByteBuffer b) {
return Unpooled.wrappedBuffer(b).toString(Charsets.UTF_8);
}

/*
* Delete a file or directory and its contents recursively.
* Don't follow directories if they are symlinks.
* Throws an exception if deletion is unsuccessful.
*/
public static void deleteRecursively(File file) throws IOException {
if (file == null) { return; }

if (file.isDirectory() && !isSymlink(file)) {
IOException savedIOException = null;
for (File child : listFilesSafely(file)) {
try {
deleteRecursively(child);
} catch (IOException e) {
// In case of multiple exceptions, only last one will be thrown
savedIOException = e;
}
}
if (savedIOException != null) {
throw savedIOException;
}
}

boolean deleted = file.delete();
// Delete can also fail if the file simply did not exist.
if (!deleted && file.exists()) {
throw new IOException("Failed to delete: " + file.getAbsolutePath());
}
}

private static File[] listFilesSafely(File file) throws IOException {
if (file.exists()) {
File[] files = file.listFiles();
if (files == null) {
throw new IOException("Failed to list files for dir: " + file);
}
return files;
} else {
return new File[0];
}
}

private static boolean isSymlink(File file) throws IOException {
Preconditions.checkNotNull(file);
File fileInCanonicalDir = null;
if (file.getParent() == null) {
fileInCanonicalDir = file;
} else {
fileInCanonicalDir = new File(file.getParentFile().getCanonicalFile(), file.getName());
}
return !fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,11 @@ public StreamManager getStreamManager() {
return streamManager;
}

/** For testing, clears all executors registered with "RegisterExecutor". */
@VisibleForTesting
public void clearRegisteredExecutors() {
blockManager.clearRegisteredExecutors();
/**
* Removes an application (once it has been terminated), and optionally will clean up any
* local directories associated with the executors of that application in a separate thread.
*/
public void applicationRemoved(String appId, boolean cleanupLocalDirs) {
blockManager.applicationRemoved(appId, cleanupLocalDirs);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,15 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -43,21 +49,30 @@
public class ExternalShuffleBlockManager {
private final Logger logger = LoggerFactory.getLogger(ExternalShuffleBlockManager.class);

// Map from "appId-execId" to the executor's configuration.
private final ConcurrentHashMap<String, ExecutorShuffleInfo> executors =
new ConcurrentHashMap<String, ExecutorShuffleInfo>();
// Map containing all registered executors' metadata.
private final ConcurrentMap<AppExecId, ExecutorShuffleInfo> executors;

// Returns an id suitable for a single executor within a single application.
private String getAppExecId(String appId, String execId) {
return appId + "-" + execId;
// Single-threaded Java executor used to perform expensive recursive directory deletion.
private final Executor directoryCleaner;

public ExternalShuffleBlockManager() {
// TODO: Give this thread a name.
this(Executors.newSingleThreadExecutor());
}

// Allows tests to have more control over when directories are cleaned up.
@VisibleForTesting
ExternalShuffleBlockManager(Executor directoryCleaner) {
this.executors = Maps.newConcurrentMap();
this.directoryCleaner = directoryCleaner;
}

/** Registers a new Executor with all the configuration we need to find its shuffle files. */
public void registerExecutor(
String appId,
String execId,
ExecutorShuffleInfo executorInfo) {
String fullId = getAppExecId(appId, execId);
AppExecId fullId = new AppExecId(appId, execId);
logger.info("Registered executor {} with {}", fullId, executorInfo);
executors.put(fullId, executorInfo);
}
Expand All @@ -78,7 +93,7 @@ public ManagedBuffer getBlockData(String appId, String execId, String blockId) {
int mapId = Integer.parseInt(blockIdParts[2]);
int reduceId = Integer.parseInt(blockIdParts[3]);

ExecutorShuffleInfo executor = executors.get(getAppExecId(appId, execId));
ExecutorShuffleInfo executor = executors.get(new AppExecId(appId, execId));
if (executor == null) {
throw new RuntimeException(
String.format("Executor is not registered (appId=%s, execId=%s)", appId, execId));
Expand All @@ -94,6 +109,56 @@ public ManagedBuffer getBlockData(String appId, String execId, String blockId) {
}
}

/**
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The following 32 lines are the only material change in this entire PR. The rest is utility functions imported from core Utils, Java cruft around lambdas and classes, and updating/adding tests.

* Removes our metadata of all executors registered for the given application, and optionally
* also deletes the local directories associated with the executors of that application in a
* separate thread.
*
* It is not valid to call registerExecutor() for an executor with this appId after invoking
* this method.
*/
public void applicationRemoved(String appId, boolean cleanupLocalDirs) {
logger.info("Application {} removed, cleanupLocalDirs = {}", appId, cleanupLocalDirs);
Iterator<Map.Entry<AppExecId, ExecutorShuffleInfo>> it = executors.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<AppExecId, ExecutorShuffleInfo> entry = it.next();
AppExecId fullId = entry.getKey();
final ExecutorShuffleInfo executor = entry.getValue();

// Only touch executors associated with the appId that was removed.
if (appId.equals(fullId.appId)) {
it.remove();

if (cleanupLocalDirs) {
logger.info("Cleaning up executor {}'s {} local dirs", fullId, executor.localDirs.length);

// Execute the actual deletion in a different thread, as it may take some time.
directoryCleaner.execute(new Runnable() {
@Override
public void run() {
deleteExecutorDirs(executor.localDirs);
}
});
}
}
}
}

/**
* Synchronously deletes each directory one at a time.
* Should be executed in its own thread, as this may take a long time.
*/
private void deleteExecutorDirs(String[] dirs) {
for (String localDir : dirs) {
try {
JavaUtils.deleteRecursively(new File(localDir));
logger.debug("Successfully cleaned up directory: " + localDir);
} catch (Exception e) {
logger.error("Failed to delete directory: " + localDir, e);
}
}
}

/**
* Hash-based shuffle data is simply stored as one file per block.
* This logic is from FileShuffleBlockManager.
Expand Down Expand Up @@ -146,9 +211,36 @@ static File getFile(String[] localDirs, int subDirsPerLocalDir, String filename)
return new File(new File(localDir, String.format("%02x", subDirId)), filename);
}

/** For testing, clears all registered executors. */
@VisibleForTesting
void clearRegisteredExecutors() {
executors.clear();
/** Simply encodes an executor's full ID, which is appId + execId. */
private static class AppExecId {
final String appId;
final String execId;

private AppExecId(String appId, String execId) {
this.appId = appId;
this.execId = execId;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;

AppExecId appExecId = (AppExecId) o;
return Objects.equal(appId, appExecId.appId) && Objects.equal(execId, appExecId.execId);
}

@Override
public int hashCode() {
return Objects.hashCode(appId, execId);
}

@Override
public String toString() {
return Objects.toStringHelper(this)
.add("appId", appId)
.add("execId", execId)
.toString();
}
}
}
Loading