Package eas.users.students.fabian.diplomarbeit.EvolvableBoxAgents3D.Agents

Examples of eas.users.students.fabian.diplomarbeit.EvolvableBoxAgents3D.Agents.EvolvableBoxAgent3D


  public void runDuringSimulation(EvolvableBoxAgent3DEnvironment env, Wink simZyk,
      ParCollection params) {

   
      env.resetEnvironment();
      EvolvableBoxAgent3D agent = EvolvableBoxAgent3DGenomeHandler
          .createAgentFromGenome(0, null, env,
              genome);
      env.addAgent(agent);
      // Drop agent
      env.stepPhysicalSimulationBy10Seconds();
      // Start fitness measurement
      agent.setFitnessRelevantStartingPosition();

      // 600 steps à 2*1/60s --> 600 * 1/30s = 20s
      for (int j = 0; j < numberOf60HertzSteps; j++) {

        //env.stepPhysicalSimulationAt60Hertz();
        env.step(simZyk);
        if (j % whichStepToThinkAt == 0) {
          agent.getBrain().decideAndReact(simZyk);
        }

      }
      System.out.println("Fitness: " + agent.getFitness());
     
      env.removeAgent(0);
      env.resetEnvironment();
 
  }
View Full Code Here


      // ground.
      agentInitialPosition.y = 20;
      agentInitialPosition.z = 0;
    }

    EvolvableBoxAgent3D agent = new EvolvableBoxAgent3D(agentId, env,
        genome.getAgentSize(), agentInitialPosition, genome);

    LinkedList<EvolvableBoxAgent3DAppendixInfo> appendixInfoList = new LinkedList<EvolvableBoxAgent3DAppendixInfo>(
        genome.getAppendixInfoList());

    Iterator<EvolvableBoxAgent3DAppendixInfo> iter = appendixInfoList
        .iterator();

    while (iter.hasNext()) {
      EvolvableBoxAgent3DAppendixInfo ainfo = iter.next();
      EvolvableBoxAgent3DConnectable predecessor = null;
      // If predecessorId == 0 then attach it directly to the core.
      if (ainfo.getPredecessorId() == 0) {
        predecessor = agent;
      }
      // If not, attach it to the appendix with the id.
      else {
        predecessor = agent.getAppendices().get(
            ainfo.getPredecessorId());
      }

      new EvolvableBoxAgent3DAppendix(ainfo.getAppendixId(), predecessor,
          ainfo.getSize(), ainfo.getPositionOnPredecessor(),
          ainfo.getFreeAxis());//.hashCode();
    }

    EvoBoxNeuroBrain brain = new EvoBoxNeuroBrain(agent, agent.id(),
        new Random(), genome.getSparseNet().createOffspring(agentId));
    agent.implantBrain(brain);

    return agent;
  }
View Full Code Here

    int numberOfNewParents = oldPopulation.size() / 2;
    EvolvableBoxAgent3DGenome[] newParents = new EvolvableBoxAgent3DGenome[numberOfNewParents];   
   
    System.out.println("selected parents: ");
    for (int i = 0; i < numberOfNewParents; i++) {
      EvolvableBoxAgent3D bestAgent = Collections.max(oldPopulation);
      EvolvableBoxAgent3DGenome genome = EvolvableBoxAgent3DGenomeHandler.parseGenomeFromString(bestAgent.getGenome().toString());
      oldPopulation.remove(bestAgent);
      newParents[i] = genome;   
      System.out.println(newParents[i]);
    }
   
View Full Code Here

        + " in generation: " + lastImprovementInGeneration + ".");
   
    for (int i = 0; i < population.length; i++) {
      env.resetEnvironment();
      System.out.println(i + ": " + population[i].toString());
      EvolvableBoxAgent3D agent = EvolvableBoxAgent3DGenomeHandler
          .createAgentFromGenome(0, null, env,
              population[i].toString());
      env.addAgent(agent);
      currentAgents.add(agent);
      // Drop agent
      env.stepPhysicalSimulationBy10Seconds();
      // Start fitness measurement
      agent.setFitnessRelevantStartingPosition();

      // 600 steps à 2*1/60s --> 600 * 1/30s = 20s
      for (int j = 0; j < numberOf60HertzSteps; j++) {

        //env.stepPhysicalSimulationAt60Hertz();
        env.step(simZyk);
        if (j % whichStepToThinkAt == 0) {
          agent.getBrain().decideAndReact(simZyk);
        }

      }
      System.out.println("Fitness: " + agent.getFitness());
      if (agent.getFitness() > highestFitnessSoFar) {
        elitistGenomes.add(EvolvableBoxAgent3DGenomeHandler
            .parseGenomeFromString(agent.getGenome().toString()));
        highestFitnessSoFar = agent.getFitness();
        lastImprovementInGeneration = simZyk.getLastTick();
      }

      env.removeAgent(0);
      env.resetEnvironment();
    }

    /*
    if (simZyk.getLastTick() % 20 == 0) {
      EvolvableBoxAgent3DGenomeHandler.savePopulationToFile(params.getParWertInt("sameParamsId")
          + File.separator + fileName + "_" + params.getParWertInt("sameParamsId")
          + "_pop_at_" + simZyk.getLastTick(), population, params);
    }*/
    statistics.logToFiles(currentAgents, simZyk, highestFitnessSoFar);
   
    if (simZyk.getLastTick() == params.getParValueLong("simulationlength")) {
      EvolvableBoxAgent3DGenomeHandler.savePopulationToFile(params.getParValueInt("sameParamsId")
          + File.separator + fileName + "_" + params.getParValueString("folderName").toString()
          + "_"+ params.getParValueInt("sameParamsId")
          + "_finalpopulation", population, params);
      EvolvableBoxAgent3DGenome championGenome = EvolvableBoxAgent3DGenomeHandler.parseGenomeFromString(Collections.max(currentAgents).getGenome().toString());
      env.resetEnvironment();
      EvolvableBoxAgent3D championAgent = EvolvableBoxAgent3DGenomeHandler
          .createAgentFromGenome(0, null, env,
              championGenome);
      env.addAgent(championAgent);
      // Drop agent
      env.stepPhysicalSimulationBy10Seconds();
      // Start fitness measurement
      championAgent.setFitnessRelevantStartingPosition();
      EvolvableBoxAgent3DTrack track = new EvolvableBoxAgent3DTrack(championAgent);
      // example: 600 steps à 2*1/60s --> 600 * 1/30s = 20s
      for (int j = 0; j < numberOf60HertzSteps; j++) {
        track.addTrackPoint();
        //env.stepPhysicalSimulationAt60Hertz();
        env.step(simZyk);
        if (j % whichStepToThinkAt == 0) {
          championAgent.getBrain().decideAndReact(simZyk);
        }

      }
      EvolvableBoxAgent3DLogger.saveTrackAsPng(track,params);
      env.removeAgent(0);
View Full Code Here

        newPopulation[i] = g;
      }
     
      // allow the next 35 in new population
      for (int i = 8; i < 44; i++) {
        EvolvableBoxAgent3D a = Collections.min(currentAgents);
        EvolvableBoxAgent3DGenome origGenome = a.getGenome();
        newPopulation[i] = origGenome;
        currentAgents.remove(a);
      }

      // add the next 5 in new population and 5 mutated, too
      for (int i = 44; i < 50; i++) {
        EvolvableBoxAgent3D a = Collections.min(currentAgents);
        newPopulation[i++] = a.getGenome();
        //System.out.println(i + "::::::: " + a.getGenome().toString());
        newPopulation[i] = EvolvableBoxAgent3DGenomeHandler.mutate(a
            .getGenome(), params, new Random());
        //System.out
        //    .println(i + "::::::: " + newPopulation[i].toString());

        currentAgents.remove(a);
View Full Code Here

TOP

Related Classes of eas.users.students.fabian.diplomarbeit.EvolvableBoxAgents3D.Agents.EvolvableBoxAgent3D

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.