Skip to content

Commit 9eb153c

Browse files
committed
Add interface DataSetSubLockStrategy
1 parent d4f9eb1 commit 9eb153c

File tree

5 files changed

+128
-54
lines changed

5 files changed

+128
-54
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1744,6 +1744,10 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
17441744
public static final boolean
17451745
DFS_DATANODE_LOCKMANAGER_TRACE_DEFAULT = false;
17461746

1747+
public static final String DFS_DATANODE_DATASET_SUBLOCK_COUNT_KEY =
1748+
"dfs.datanode.dataset.sublock.count";
1749+
public static final long DFS_DATANODE_DATASET_SUBLOCK_COUNT_DEFAULT = 1000L;
1750+
17471751
// dfs.client.retry confs are moved to HdfsClientConfigKeys.Retry
17481752
@Deprecated
17491753
public static final String DFS_CLIENT_RETRY_POLICY_ENABLED_KEY
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
* <p>
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
* <p>
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.hdfs.server.datanode;
20+
21+
import java.util.List;
22+
23+
/**
24+
* This interface is used to generate sub lock name for a blockid.
25+
*/
26+
public interface DataSetSubLockStrategy {
27+
28+
/**
29+
* Generate sub lock name for the given blockid.
30+
* @param blockid the block id.
31+
* @return sub lock name for the input blockid.
32+
*/
33+
String blockIdToSubLock(long blockid);
34+
35+
List<String> getAllSubLockName();
36+
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeUtil.java

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@
2121
import java.io.FileInputStream;
2222
import java.io.FileNotFoundException;
2323
import java.io.IOException;
24-
import java.util.ArrayList;
25-
import java.util.List;
2624

2725
import org.apache.hadoop.classification.InterfaceAudience;
2826
import org.apache.hadoop.hdfs.protocol.Block;
@@ -129,31 +127,6 @@ public static File idToBlockDir(File root, long blockId) {
129127
return new File(root, path);
130128
}
131129

132-
/**
133-
* Take an example. We hava a block with blockid mapping to:
134-
* "/data1/hadoop/hdfs/datanode/current/BP-xxxx/current/finalized/subdir0/subdir0"
135-
* We return "subdir0/subdir0"
136-
* @param blockId blockId
137-
* @return The two-level subdir name
138-
*/
139-
public static String idToBlockDirSuffixName(long blockId) {
140-
int d1 = (int) ((blockId >> 16) & 0x1F);
141-
int d2 = (int) ((blockId >> 8) & 0x1F);
142-
return DataStorage.BLOCK_SUBDIR_PREFIX + d1 + SEP +
143-
DataStorage.BLOCK_SUBDIR_PREFIX + d2;
144-
}
145-
146-
public static List<String> getAllSubDirNameForDataSetLock() {
147-
List<String> res = new ArrayList<>();
148-
for (int d1 = 0; d1 <= 0x1F; d1++) {
149-
for (int d2 = 0; d2 <= 0x1F; d2++) {
150-
res.add(DataStorage.BLOCK_SUBDIR_PREFIX + d1 + SEP +
151-
DataStorage.BLOCK_SUBDIR_PREFIX + d2);
152-
}
153-
}
154-
return res;
155-
}
156-
157130
/**
158131
* @return the FileInputStream for the meta data of the given block.
159132
* @throws FileNotFoundException
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
* <p>
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
* <p>
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.hdfs.server.datanode;
20+
21+
import org.slf4j.Logger;
22+
import org.slf4j.LoggerFactory;
23+
24+
import java.util.ArrayList;
25+
import java.util.List;
26+
27+
public class ModDataSetSubLockStrategy implements DataSetSubLockStrategy {
28+
public static final Logger LOG = LoggerFactory.getLogger(DataSetSubLockStrategy.class);
29+
30+
private static final String LOCK_NAME_PERFIX = "SubLock";
31+
private long modFactor;
32+
33+
public ModDataSetSubLockStrategy(long mod) {
34+
if (mod <= 0) {
35+
mod = 1L;
36+
}
37+
this.modFactor = mod;
38+
}
39+
40+
@Override
41+
public String blockIdToSubLock(long blockid) {
42+
return LOCK_NAME_PERFIX + String.valueOf(blockid % modFactor);
43+
}
44+
45+
@Override
46+
public List<String> getAllSubLockName() {
47+
List<String> res = new ArrayList<>();
48+
for (long i = 0L; i < modFactor; i++) {
49+
res.add(LOCK_NAME_PERFIX + i);
50+
}
51+
return res;
52+
}
53+
}

0 commit comments

Comments
 (0)