Skip to content

Commit 409eee8

Browse files
committed
HDFS-451. Add fault injection tests, Pipeline_Fi_06,07,14,15, for DataTransferProtocol.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@802264 13f79535-47bb-0310-9956-ffa450edef68
1 parent d9a206f commit 409eee8

File tree

10 files changed

+536
-10
lines changed

10 files changed

+536
-10
lines changed

CHANGES.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,9 @@ Trunk (unreleased changes)
8989
HDFS-530. Refactor TestFileAppend* to remove code duplication.
9090
(Konstantin Boudnik via szetszwo)
9191

92+
HDFS-451. Add fault injection tests, Pipeline_Fi_06,07,14,15, for
93+
DataTransferProtocol. (szetszwo)
94+
9295
BUG FIXES
9396

9497
HDFS-76. Better error message to users when commands fail because of

src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
7777
private String clientName;
7878
DatanodeInfo srcDataNode = null;
7979
private Checksum partialCrc = null;
80-
private DataNode datanode = null;
80+
private final DataNode datanode;
8181

8282
BlockReceiver(Block block, DataInputStream in, String inAddr,
8383
String myAddr, boolean isRecovery, String clientName,
@@ -128,6 +128,9 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
128128
}
129129
}
130130

131+
/** Return the datanode object. */
132+
DataNode getDataNode() {return datanode;}
133+
131134
/**
132135
* close files.
133136
*/

src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -490,6 +490,11 @@ DataNodeMetrics getMetrics() {
490490
return myMetrics;
491491
}
492492

493+
/** Return DatanodeRegistration */
494+
public DatanodeRegistration getDatanodeRegistration() {
495+
return dnRegistration;
496+
}
497+
493498
/**
494499
* Return the namenode's identifier
495500
*/

src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,9 @@ public DataXceiver(Socket s, DataNode datanode,
8484
}
8585
}
8686

87+
/** Return the datanode object. */
88+
DataNode getDataNode() {return datanode;}
89+
8790
/**
8891
* Read/write data from/to the DataXceiveServer.
8992
*/
Lines changed: 195 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,195 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.fi;
19+
20+
import java.util.ArrayList;
21+
import java.util.List;
22+
23+
import org.apache.hadoop.fi.FiTestUtil.Action;
24+
import org.apache.hadoop.fi.FiTestUtil.ActionContainer;
25+
import org.apache.hadoop.hdfs.protocol.DatanodeID;
26+
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
27+
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
28+
import org.apache.hadoop.hdfs.server.datanode.DataNode;
29+
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
30+
31+
/**
32+
* Utilities for DataTransferProtocol related tests,
33+
* e.g. TestFiDataTransferProtocol.
34+
*/
35+
public class DataTransferTestUtil {
36+
private static DataTransferTest thepipelinetest;
37+
/** initialize pipeline test */
38+
public static DataTransferTest initTest() {
39+
return thepipelinetest = new DataTransferTest();
40+
}
41+
/** get the pipeline test object */
42+
public static DataTransferTest getPipelineTest() {
43+
return thepipelinetest;
44+
}
45+
46+
/**
47+
* The DataTransferTest class includes a pipeline
48+
* and some actions.
49+
*/
50+
public static class DataTransferTest {
51+
private Pipeline thepipeline;
52+
/** Simulate action for the receiverOpWriteBlock pointcut */
53+
public final ActionContainer<DataNode> fiReceiverOpWriteBlock
54+
= new ActionContainer<DataNode>();
55+
/** Simulate action for the callReceivePacket pointcut */
56+
public final ActionContainer<DataNode> fiCallReceivePacket
57+
= new ActionContainer<DataNode>();
58+
/** Simulate action for the statusRead pointcut */
59+
public final ActionContainer<DataNode> fiStatusRead
60+
= new ActionContainer<DataNode>();
61+
62+
/** Initialize the pipeline. */
63+
public Pipeline initPipeline(LocatedBlock lb) {
64+
if (thepipeline != null) {
65+
throw new IllegalStateException("thepipeline != null");
66+
}
67+
return thepipeline = new Pipeline(lb);
68+
}
69+
70+
/** Return the pipeline. */
71+
public Pipeline getPipeline() {
72+
if (thepipeline == null) {
73+
throw new IllegalStateException("thepipeline == null");
74+
}
75+
return thepipeline;
76+
}
77+
}
78+
79+
/** A pipeline contains a list of datanodes. */
80+
public static class Pipeline {
81+
private final List<String> datanodes = new ArrayList<String>();
82+
83+
private Pipeline(LocatedBlock lb) {
84+
for(DatanodeInfo d : lb.getLocations()) {
85+
datanodes.add(d.getName());
86+
}
87+
}
88+
89+
/** Does the pipeline contains d at the n th position? */
90+
public boolean contains(int n, DatanodeID d) {
91+
return d.getName().equals(datanodes.get(n));
92+
}
93+
94+
/** {@inheritDoc} */
95+
public String toString() {
96+
return getClass().getSimpleName() + datanodes;
97+
}
98+
}
99+
100+
/** Action for DataNode */
101+
public static abstract class DataNodeAction implements Action<DataNode> {
102+
/** The name of the test */
103+
final String currentTest;
104+
/** The index of the datanode */
105+
final int index;
106+
107+
/**
108+
* @param currentTest The name of the test
109+
* @param index The index of the datanode
110+
*/
111+
private DataNodeAction(String currentTest, int index) {
112+
this.currentTest = currentTest;
113+
this.index = index;
114+
}
115+
116+
/** {@inheritDoc} */
117+
public String toString() {
118+
return currentTest + ", index=" + index;
119+
}
120+
121+
/** {@inheritDoc} */
122+
String toString(DataNode datanode) {
123+
return "FI: " + this + ", datanode="
124+
+ datanode.getDatanodeRegistration().getName();
125+
}
126+
}
127+
128+
/** Throws OutOfMemoryError. */
129+
public static class OomAction extends DataNodeAction {
130+
/** Create an action for datanode i in the pipeline. */
131+
public OomAction(String currentTest, int i) {
132+
super(currentTest, i);
133+
}
134+
135+
@Override
136+
public void run(DataNode datanode) {
137+
final Pipeline p = getPipelineTest().getPipeline();
138+
if (p.contains(index, datanode.getDatanodeRegistration())) {
139+
final String s = toString(datanode);
140+
FiTestUtil.LOG.info(s);
141+
throw new OutOfMemoryError(s);
142+
}
143+
}
144+
}
145+
146+
/** Throws DiskOutOfSpaceException. */
147+
public static class DoosAction extends DataNodeAction {
148+
/** Create an action for datanode i in the pipeline. */
149+
public DoosAction(String currentTest, int i) {
150+
super(currentTest, i);
151+
}
152+
153+
@Override
154+
public void run(DataNode datanode) throws DiskOutOfSpaceException {
155+
final Pipeline p = getPipelineTest().getPipeline();
156+
if (p.contains(index, datanode.getDatanodeRegistration())) {
157+
final String s = toString(datanode);
158+
FiTestUtil.LOG.info(s);
159+
throw new DiskOutOfSpaceException(s);
160+
}
161+
}
162+
}
163+
164+
/**
165+
* Sleep some period of time so that it slows down the datanode
166+
* or sleep forever so that datanode becomes not responding.
167+
*/
168+
public static class SleepAction extends DataNodeAction {
169+
/** In milliseconds, duration <= 0 means sleeping forever.*/
170+
final long duration;
171+
172+
/**
173+
* Create an action for datanode i in the pipeline.
174+
* @param duration In milliseconds, duration <= 0 means sleeping forever.
175+
*/
176+
public SleepAction(String currentTest, int i, long duration) {
177+
super(currentTest, i);
178+
this.duration = duration;
179+
}
180+
181+
@Override
182+
public void run(DataNode datanode) {
183+
final Pipeline p = getPipelineTest().getPipeline();
184+
if (p.contains(index, datanode.getDatanodeRegistration())) {
185+
final String s = toString(datanode) + ", duration=" + duration;
186+
FiTestUtil.LOG.info(s);
187+
if (duration <= 0) {
188+
for(; true; FiTestUtil.sleep(1000)); //sleep forever
189+
} else {
190+
FiTestUtil.sleep(duration);
191+
}
192+
}
193+
}
194+
}
195+
}
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.fi;
19+
20+
import java.io.IOException;
21+
22+
import org.apache.commons.logging.Log;
23+
import org.apache.commons.logging.LogFactory;
24+
25+
/** Test Utilities */
26+
public class FiTestUtil {
27+
/** Logging */
28+
public static final Log LOG = LogFactory.getLog(FiTestUtil.class);
29+
30+
/** Return the method name of the callee. */
31+
public static String getMethodName() {
32+
return Thread.currentThread().getStackTrace()[2].getMethodName();
33+
}
34+
35+
/**
36+
* Sleep.
37+
* If there is an InterruptedException, re-throw it as a RuntimeException.
38+
*/
39+
public static void sleep(long ms) {
40+
try {
41+
Thread.sleep(ms);
42+
} catch (InterruptedException e) {
43+
throw new RuntimeException(e);
44+
}
45+
}
46+
47+
/** Action interface */
48+
public static interface Action<T> {
49+
/** Run the action with the parameter. */
50+
public void run(T parameter) throws IOException;
51+
}
52+
53+
/** An ActionContainer contains at most one action. */
54+
public static class ActionContainer<T> {
55+
private Action<T> action;
56+
57+
/** Create an empty container. */
58+
public ActionContainer() {}
59+
60+
/** Set action. */
61+
public void set(Action<T> a) {action = a;}
62+
63+
/** Run the action if it exists. */
64+
public void run(T obj) throws IOException {
65+
if (action != null) {
66+
action.run(obj);
67+
}
68+
}
69+
}
70+
}
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol;
19+
20+
import org.apache.commons.logging.Log;
21+
import org.apache.commons.logging.LogFactory;
22+
import org.apache.hadoop.fi.DataTransferTestUtil;
23+
24+
/** Aspect for ClientProtocol */
25+
public aspect ClientProtocolAspects {
26+
public static final Log LOG = LogFactory.getLog(ClientProtocolAspects.class);
27+
28+
pointcut addBlock():
29+
call(LocatedBlock ClientProtocol.addBlock(String, String));
30+
31+
after() returning(LocatedBlock lb): addBlock() {
32+
LOG.info("FI: addBlock "
33+
+ DataTransferTestUtil.getPipelineTest().initPipeline(lb));
34+
}
35+
}

0 commit comments

Comments
 (0)