public void testClusterConfigWithExternalHDFSFailure() throws Exception {
String[] hdfsArray =
new String[] { "hdfs://168.192.0.70:8020",
"hdfs://168.192.0.71:8020", "hdfs://168.192.0.72:8020",
"hdfs://168.192.0.73:8020" };
ClusterCreate spec = new ClusterCreate();
spec.setName("my-cluster-external-hdfs-failure");
List<String> rps = new ArrayList<String>();
rps.add("myRp1");
spec.setRpNames(rps);
spec.setNetworkConfig(createNetConfigs());
spec.setDistro("bigtop");
spec.setDistroVendor(Constants.DEFAULT_VENDOR);
String clusterConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[1] + "\"}}}}";
Map clusterConfig = (new Gson()).fromJson(clusterConfigJson, Map.class);
spec.setConfiguration((Map<String, Object>) (clusterConfig
.get("configuration")));
//build a master group, a compute node group and a datanode.
NodeGroupCreate ng0 = new NodeGroupCreate();
List<String> masterRole = new ArrayList<String>();
masterRole.add("hadoop_namenode");
masterRole.add("hadoop_resourcemanager");
ng0.setRoles(masterRole);
ng0.setName("master");
ng0.setInstanceNum(1);
ng0.setInstanceType(InstanceType.LARGE);
String ng0ConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[2] + "\"}}}}";
Map ng0Config = (new Gson()).fromJson(ng0ConfigJson, Map.class);
ng0.setConfiguration((Map<String, Object>) (ng0Config
.get("configuration")));
NodeGroupCreate ng1 = new NodeGroupCreate();
List<String> computeRoles = new ArrayList<String>();
computeRoles.add("hadoop_nodemanager");
ng1.setRoles(computeRoles);
ng1.setName("compute1");
ng1.setInstanceNum(4);
ng1.setCpuNum(2);
ng1.setMemCapacityMB(7500);
ng1.setInstanceType(InstanceType.MEDIUM);
StorageRead storage = new StorageRead();
storage.setType("LOCAL");
storage.setSizeGB(10);
ng1.setStorage(storage);
String ng1ConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[3] + "\"}}}}";
Map ng1Config = (new Gson()).fromJson(ng1ConfigJson, Map.class);
ng1.setConfiguration((Map<String, Object>) (ng1Config
.get("configuration")));
NodeGroupCreate ng2 = new NodeGroupCreate();
List<String> dataRoles = new ArrayList<String>();
dataRoles.add("hadoop_datanode");
ng2.setRoles(dataRoles);
ng2.setName("data1");
ng2.setInstanceNum(2);
ng2.setInstanceType(InstanceType.MEDIUM);
StorageRead storageCompute = new StorageRead();
storageCompute.setType("LOCAL");
storageCompute.setSizeGB(10);
ng2.setStorage(storageCompute);
NodeGroupCreate[] ngs = new NodeGroupCreate[] { ng0, ng1, ng2 };
spec.setNodeGroups(ngs);
spec = ClusterSpecFactory.getCustomizedSpec(spec, null);
clusterConfigMgr.createClusterConfig(spec);
ClusterEntity cluster = clusterEntityMgr.findClusterById(1l);
List<ClusterEntity> cs = clusterEntityMgr.findAllClusters();
for (ClusterEntity c : cs) {
System.out.println(c.getId());
}
cluster = clusterEntityMgr.findByName("my-cluster-external-hdfs-failure");
Assert.assertTrue(cluster != null);
ClusterCreate attrs =
clusterConfigMgr
.getClusterConfig("my-cluster-external-hdfs-failure");
String manifest = gson.toJson(attrs);
System.out.println(manifest);
Assert.assertTrue(