String hadoopConfDir = (String) stormconf.get("hadoop.conf.dir");
String localWorkDirList = (String) stormconf.get("higo.workdir.list");
Integer msCount = StormUtils.parseInt(stormconf
.get("higo.mergeServer.count"));
Config conf = new Config();
String[] tablelist=tableName.split(",");
for(String tbl:tablelist)
{
String key="higo.mode."+tbl;
Object val=stormconf.get(key);
if(val!=null)
{
conf.put(key, val);
}
}
conf.setNumWorkers(shards + msCount);
conf.setNumAckers(0);
// conf.setMaxSpoutPending(100);
List<String> assignment=(List<String>) stormconf.get(MdrillDefaultTaskAssignment.MDRILL_ASSIGNMENT_DEFAULT+"."+topologyName);
String assignmentports=String.valueOf(stormconf.get(MdrillDefaultTaskAssignment.MDRILL_ASSIGNMENT_PORTS+"."+topologyName));
if((assignment==null||assignment.size()==0)&&stormconf.containsKey("higo.fixed.shards"))
{
//兼容旧的调度
Integer fixassign = StormUtils.parseInt(stormconf.containsKey("higo.fixed.shards")?stormconf.get("higo.fixed.shards"):0);
conf.put(CustomAssignment.TOPOLOGY_CUSTOM_ASSIGNMENT,MdrillTaskAssignment.class.getName());
conf.put(MdrillTaskAssignment.SHARD_REPLICATION, replication);
conf.put(MdrillTaskAssignment.HIGO_FIX_SHARDS,fixassign);
for(int i=1;i<=fixassign;i++)
{
conf.put(MdrillTaskAssignment.HIGO_FIX_SHARDS+"."+i,(String) stormconf.get("higo.fixed.shards"+"."+i));
}
conf.put(MdrillTaskAssignment.MS_PORTS, (String) stormconf.get("higo.merge.ports"));
conf.put(MdrillTaskAssignment.MS_NAME, "merge");
conf.put(MdrillTaskAssignment.SHARD_NAME, "shard");
conf.put(MdrillTaskAssignment.REALTIME_NAME, "realtime");
}else{
conf.put(CustomAssignment.TOPOLOGY_CUSTOM_ASSIGNMENT, MdrillDefaultTaskAssignment.class.getName());
conf.put(MdrillDefaultTaskAssignment.MDRILL_ASSIGNMENT_DEFAULT, assignment);
conf.put(MdrillDefaultTaskAssignment.MDRILL_ASSIGNMENT_PORTS, assignmentports);
}
BoltParams paramsMs=new BoltParams();
paramsMs.compname="merge_0";
paramsMs.replication=1;
paramsMs.replicationindex=0;