public void testClusterConfigWithExternalHDFS() throws Exception {
String[] hdfsArray =
new String[] { "hdfs://168.192.0.70:8020",
"hdfs://168.192.0.71:8020", "hdfs://168.192.0.72:8020",
"hdfs://168.192.0.73:8020" };
ClusterCreate spec = new ClusterCreate();
spec.setName("my-cluster-external-hdfs");
List<String> rps = new ArrayList<String>();
rps.add("myRp1");
spec.setRpNames(rps);
spec.setNetworkConfig(createNetConfigs());
spec.setDistro("bigtop");
spec.setDistroVendor(Constants.DEFAULT_VENDOR);
spec.setExternalHDFS(hdfsArray[0]);
String clusterConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[1] + "\"}}}}";
Map clusterConfig = (new Gson()).fromJson(clusterConfigJson, Map.class);
spec.setConfiguration((Map<String, Object>) (clusterConfig
.get("configuration")));
//build a jobtracker group, two compute node groups.
NodeGroupCreate ng0 = new NodeGroupCreate();
List<String> computerMasterRoles = new ArrayList<String>();
computerMasterRoles.add("hadoop_resourcemanager");
ng0.setRoles(computerMasterRoles);
ng0.setName("resourcemanager");
ng0.setInstanceNum(1);
ng0.setInstanceType(InstanceType.LARGE);
String ng0ConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[2] + "\"}}}}";
Map ng0Config = (new Gson()).fromJson(ng0ConfigJson, Map.class);
ng0.setConfiguration((Map<String, Object>) (ng0Config
.get("configuration")));
NodeGroupCreate ng1 = new NodeGroupCreate();
List<String> computeWorkerRoles = new ArrayList<String>();
computeWorkerRoles.add("hadoop_nodemanager");
ng1.setRoles(computeWorkerRoles);
ng1.setName("compute1");
ng1.setInstanceNum(4);
ng1.setInstanceType(InstanceType.MEDIUM);
StorageRead storage = new StorageRead();
storage.setType("LOCAL");
storage.setSizeGB(10);
ng1.setStorage(storage);
String ng1ConfigJson =
"{\"configuration\":{\"hadoop\":{\"core-site.xml\":{\"fs.default.name\":\""
+ hdfsArray[3] + "\"}}}}";
Map ng1Config = (new Gson()).fromJson(ng1ConfigJson, Map.class);
ng1.setConfiguration((Map<String, Object>) (ng1Config
.get("configuration")));
NodeGroupCreate ng2 = new NodeGroupCreate();
ng2.setRoles(computeWorkerRoles);
ng2.setName("compute2");
ng2.setInstanceNum(2);
ng2.setInstanceType(InstanceType.MEDIUM);
StorageRead storageCompute = new StorageRead();
storageCompute.setType("LOCAL");
storageCompute.setSizeGB(10);
ng2.setStorage(storageCompute);
NodeGroupCreate[] ngs = new NodeGroupCreate[] { ng0, ng1, ng2 };
spec.setNodeGroups(ngs);
spec = ClusterSpecFactory.getCustomizedSpec(spec, null);
clusterConfigMgr.createClusterConfig(spec);
ClusterEntity cluster = clusterEntityMgr.findClusterById(1l);
List<ClusterEntity> cs = clusterEntityMgr.findAllClusters();
for (ClusterEntity c : cs) {
System.out.println(c.getId());
}
cluster = clusterEntityMgr.findByName("my-cluster-external-hdfs");
Assert.assertTrue(cluster != null);
Assert.assertEquals(cluster.getAdvancedProperties(), "{\"ExternalHDFS\":\"hdfs://168.192.0.70:8020\"}");
ClusterRead clusterRead = clusterEntityMgr.toClusterRead("my-cluster-external-hdfs");
Assert.assertEquals(clusterRead.getExternalHDFS(), "hdfs://168.192.0.70:8020");
ClusterCreate attrs =
clusterConfigMgr.getClusterConfig("my-cluster-external-hdfs");
String manifest = gson.toJson(attrs);
System.out.println(manifest);
Assert.assertTrue(
Pattern.compile("([\\s\\S]*" + hdfsArray[0] + "[\\s\\S]*){3}")