throw new IllegalStateException("Profile " + profileId + " already exists. Use --force to recreate the profiles.");
}
}
}
Version version = fabricService.getRequiredDefaultVersion();
Profile hadoop = version.getRequiredProfile("hadoop");
Map<String, Map<String, String>> configs;
String versionId = version.getId();
ProfileBuilder builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name);
builder.addParent(hadoop.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("fs.default.name", "hdfs://${zk:" + nameNode + "/ip}:9000");
configs.get("io.fabric8.hadoop").put("dfs.http.address", "hdfs://${zk:" + nameNode + "/ip}:9002");
Profile cluster = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name + "-namenode");
builder.addParent(cluster.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("nameNode", "true");
Profile nameNodeProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name + "-secondary-namenode");
builder.addParent(cluster.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("secondaryNameNode", "true");
Profile secondaryNameNodeProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name + "-datanode");
builder.addParent(cluster.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("dataNode", "true");
Profile dataNodeProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name + "-job-tracker");
builder.addParent(cluster.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("jobTracker", "true");
Profile jobTrackerProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "hadoop-" + name + "-task-tracker");
builder.addParent(cluster.getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.hadoop", new HashMap<String, String>());
configs.get("io.fabric8.hadoop").put("taskTracker", "true");
Profile taskTrackerProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());
builder = ProfileBuilder.Factory.create(versionId, "insight-hdfs-" + name);
builder.addParent(version.getRequiredProfile("insight-hdfs").getId());
configs = new HashMap<String, Map<String, String>>();
configs.put("io.fabric8.insight.elasticsearch-default", new HashMap<String, String>());
configs.get("io.fabric8.insight.elasticsearch-default").put("gateway.hdfs.uri", "hdfs://${zk:" + nameNode + "/ip}:9000");
Profile insightProfile = profileService.createProfile(builder.setConfigurations(configs).getProfile());