}
}
private void createConfigs() throws AmbariException {
String clusterName = "c1";
ClusterRequest r = new ClusterRequest(null, clusterName, "HDP-2.0.6", null);
controller.createCluster(r);
clusters.getCluster(clusterName).setDesiredStackVersion(new StackId("HDP-2.0.6"));
String serviceName = "HDFS";
String serviceName2 = "YARN";
createService(clusterName, serviceName, null);
createService(clusterName, serviceName2, null);
String componentName1 = "NAMENODE";
String componentName2 = "DATANODE";
String componentName3 = "HDFS_CLIENT";
String componentName4 = "RESOURCEMANAGER";
createServiceComponent(clusterName, serviceName, componentName1,
State.INIT);
createServiceComponent(clusterName, serviceName, componentName2,
State.INIT);
createServiceComponent(clusterName, serviceName, componentName3,
State.INIT);
createServiceComponent(clusterName, serviceName2, componentName4,
State.INIT);
String host1 = "h1";
clusters.addHost(host1);
Map<String, String> hostAttributes = new HashMap<String, String>();
hostAttributes.put("os_family", "redhat");
hostAttributes.put("os_release_version", "5.9");
clusters.getHost("h1").setHostAttributes(hostAttributes);
clusters.getHost("h1").persist();
String host2 = "h2";
clusters.addHost(host2);
hostAttributes = new HashMap<String, String>();
hostAttributes.put("os_family", "redhat");
hostAttributes.put("os_release_version", "6.3");
clusters.getHost("h2").setHostAttributes(hostAttributes);
clusters.getHost("h2").persist();
clusters.mapHostToCluster(host1, clusterName);
clusters.mapHostToCluster(host2, clusterName);
createServiceComponentHost(clusterName, null, componentName1,
host1, null);
createServiceComponentHost(clusterName, serviceName, componentName2,
host1, null);
createServiceComponentHost(clusterName, serviceName, componentName2,
host2, null);
createServiceComponentHost(clusterName, serviceName, componentName3,
host1, null);
createServiceComponentHost(clusterName, serviceName, componentName3,
host2, null);
createServiceComponentHost(clusterName, serviceName2, componentName4,
host2, null);
// Create configs
Map<String, String> configs = new HashMap<String, String>();
configs.put(NAMENODE_PORT_V1, "localhost:${ambari.dfs.datanode.http.port}");
configs.put(DATANODE_PORT, "localhost:70075");
configs.put("ambari.dfs.datanode.http.port", "70070");
Map<String, String> yarnConfigs = new HashMap<String, String>();
yarnConfigs.put(RESOURCEMANAGER_PORT, "8088");
yarnConfigs.put(NODEMANAGER_PORT, "8042");
ConfigurationRequest cr1 = new ConfigurationRequest(clusterName,
"hdfs-site", "versionN", configs);
ClusterRequest crReq = new ClusterRequest(null, clusterName, null, null);
crReq.setDesiredConfig(cr1);
controller.updateClusters(Collections.singleton(crReq), null);
Cluster cluster = clusters.getCluster(clusterName);
Assert.assertEquals("versionN", cluster.getDesiredConfigByType("hdfs-site")
.getVersionTag());
ConfigurationRequest cr2 = new ConfigurationRequest(clusterName,
"yarn-site", "versionN", yarnConfigs);
crReq.setDesiredConfig(cr2);
controller.updateClusters(Collections.singleton(crReq), null);
Assert.assertEquals("versionN", cluster.getDesiredConfigByType("yarn-site")
.getVersionTag());
Assert.assertEquals("localhost:${ambari.dfs.datanode.http.port}", cluster.getDesiredConfigByType