1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.chaos.actions;
20
21 import org.apache.hadoop.hbase.ServerName;
22 import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
23 import org.apache.hadoop.hbase.util.FSUtils;
24 import org.apache.hadoop.hdfs.DFSClient;
25 import org.apache.hadoop.hdfs.DistributedFileSystem;
26 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
27 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
28
29 import java.io.IOException;
30 import java.util.LinkedList;
31 import java.util.List;
32
33
34
35
36 public class RestartRandomDataNodeAction extends RestartActionBaseAction {
37 public RestartRandomDataNodeAction(long sleepTime) {
38 super(sleepTime);
39 }
40
41 @Override
42 public void perform() throws Exception {
43 LOG.info("Performing action: Restart random data node");
44 ServerName server = PolicyBasedChaosMonkey.selectRandomItem(getDataNodes());
45 restartDataNode(server, sleepTime);
46 }
47
48 public ServerName[] getDataNodes() throws IOException {
49 DistributedFileSystem fs = (DistributedFileSystem) FSUtils.getRootDir(getConf())
50 .getFileSystem(getConf());
51 DFSClient dfsClient = fs.getClient();
52 List<ServerName> hosts = new LinkedList<ServerName>();
53 for (DatanodeInfo dataNode: dfsClient.datanodeReport(HdfsConstants.DatanodeReportType.LIVE)) {
54 hosts.add(ServerName.valueOf(dataNode.getHostName(), -1, -1));
55 }
56 return hosts.toArray(new ServerName[hosts.size()]);
57 }
58 }