Package com.greentea.relaxation.jnmf.model

Examples of com.greentea.relaxation.jnmf.model.Neuron


      {
         isFirstStep = false;
         Layer lastLayer = builder.getNetwork().getLayers().getLast();
         for (int i = 0; i < targetImage.length(); ++i)
         {
            Neuron owner = lastLayer.getNeurons().get(i);
            Double value = targetImage.charAt(i) == '0' ? -1.0 : 1.0;
            owner.getOutputSynapses().get(0).sendSignal(owner, value);
//            ThresholdFunction function
//               = (ThresholdFunction) owner.getActivationFunction();
//            function.setThresholdValue(getValue);
         }
      }
View Full Code Here


            row.add(inputColumns.get(j).getName());

            double sum = 0;
            for (int i = 0; i < outputNeuronsCount; ++i)
            {
               Neuron neuron = network.getLayers().getLast().getNeurons().get(i);

               double w = neuron.getInputSynapses().get(j).getWeight();
               sum += w;

               row.add(JNMFMathUtils.roundDouble4(w));
            }
View Full Code Here

   }

   public DoubleList resolveClusterCenter(int clusterNum)
   {
      Layer lastLayer = getNetwork().getLayers().getLast();
      Neuron clusterNeuron = lastLayer.getNeurons().get(clusterNum);

      DoubleList res = new ArrayDoubleList(clusterNeuron.getInputSynapses().size());
      for (Synapse s : clusterNeuron.getInputSynapses())
      {
         res.add(s.getWeight());
      }

      return res;
View Full Code Here

      {
         addOutputNeuron();
         return;
      }

      Neuron winner = null;
      double minF = Double.MAX_VALUE;

      for (Neuron neuron : outputNeurons)
      {
         if (neuron.getActivation() < minF)
View Full Code Here

      {
         super.afterStep();

         for (int i = 0; i < outputNeurons.size(); ++i)
         {
            Neuron neuron = outputNeurons.get(i);
            double targetValue = getCurrentLearningPair().getOutput().get(i);

            casCorMath.collectInfoFromOutput(neuron, targetValue);

            if (learningOutputsAlgorithm == LearningOutputsAlgorithm.BACK_PROPAGATION)
            {
               backPropagation.learnNeuron(neuron, targetValue);
            }
         }

         if (learningOutputsAlgorithm == LearningOutputsAlgorithm.QUICK_PROPAGATION)
         {
            quickPropagation.collectInfoFromOutputs(getCurrentLearningPair().getOutput());
         }
      }
      else if (currentState == CasCorState.CANDIDATES_COLLECT_INFO)
      {
         for (int i = 0; i < outputNeurons.size(); ++i)
         {
            Neuron neuron = outputNeurons.get(i);
            double targetValue = getCurrentLearningPair().getOutput().get(i);

            casCorMath.collectInfoFromOutput(neuron, targetValue);
         }
View Full Code Here

      double shrinkFactor = maxFactor / (1.0 + maxFactor);

      for (int i = 0; i < getOutputNeurons().size(); ++i)
      {
         Neuron neuron = getOutputNeurons().get(i);

         double connectionsCount = neuron.getInputSynapses().size();
         for (Synapse s : neuron.getInputSynapses())
         {
            double nextStep = 0;

            double slope = getSlope(s);
            double prevSlope = getPrevSlope(s);
View Full Code Here

   public void collectInfoFromOutputs(DoubleList targetValues)
   {
      for (int i = 0; i < getOutputNeurons().size(); ++i)
      {
         Neuron neuron = getOutputNeurons().get(i);

         double activation = neuron.getActivation();
         double e = calcError(targetValues.get(i), activation);
         double net = neuron.getNet();

         IFunction deriviative = neuron.getActivationFunction().getDerivative();
         double deriviativeValue = deriviative.calc(net);

         for (Synapse s : neuron.getInputSynapses())
         {
            slopes.put(s, getSlope(s) + (e * deriviativeValue * s.getSource().getActivation()));
         }
      }
   }
View Full Code Here


   @Override
   public void afterLayerActivation(Layer layer)
   {
      Neuron winner = null;
      double maxActivation = Double.MIN_VALUE;

      for (Neuron neuron : layer.getNeurons())
      {
         if (maxActivation < neuron.getActivation())
View Full Code Here

         {
            Layer sourceLayer = network.getLayers().get(sourceLayerIndex);

            for (int i = 0; i < sourceLayer.getNeurons().size(); ++i)
            {
               Neuron sourceNeuron = sourceLayer.getNeurons().get(i);
               String layerString = (i == 0) ? "" + sourceLayerIndex : emptyString;
               String neuronString = "" + sourceNeuronkIndex;

               Color layerColor = (sourceLayerIndex % 2 == 0) ? CellColor.EvenClass.getColor() :
                       CellColor.OddClass.getColor();
               Color neuronColor = CellColor.DefaultCell.getColor();
               row1Colors.add(layerColor);
               row2Colors.add(neuronColor);

               layersRow1.add(layerString);
               neuronsRow2.add(neuronString);

               Vector<Object> contentRow = new Vector<Object>();
               contentRow.add(layerString);
               contentRow.add(neuronString);

               List<Color> rowColors = new ArrayList<Color>();
               rowColors.add(layerColor);
               rowColors.add(neuronColor);

               for (
                       int destLayerIndex = 0;
                       destLayerIndex < network.getLayers().size(); ++destLayerIndex)
               {
                  Layer destLayer = network.getLayers().get(destLayerIndex);
                  for (Neuron destNeuron : destLayer.getNeurons())
                  {
                     double w = 0.0;

                     Color connectionColor = CellColor.DefaultCell.getColor();
                     Collection<Synapse> synapses = CollectionUtils
                             .intersection(sourceNeuron.getOutputSynapses(),
                                     destNeuron.getInputSynapses());
                     if (synapses.size() > 0)
                     {
                        w = synapses.iterator().next().getWeight();
View Full Code Here

TOP

Related Classes of com.greentea.relaxation.jnmf.model.Neuron

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.