[Insight-developers] itk Genetic Algorithm optimizer

Alize Scheenstra alize_scheenstra at hotmail.com
Tue Feb 7 11:25:44 EST 2006


Hi,

For a registration of 3D images, I have implemented a genetic algorithm 
optimizer, see the code below. But for some reason, the cost function 
doesn't work in itkGeneticOptimizer::GetFitness()
this->GetValue(tempValues);
The first 4 times it returns an error and all times afterwards it returns 
the same value, although the tempValues change each run.
It also happens with an identity transform with the same moving and fixed 
image. Also changing the cost function doens't work.

Can someone please tell me what went wrong???

Thank you in advance,

Greetings Alize

/////////////////////////////itkGeneticOptimizer.h////////////
#ifndef __itkGeneticOptimizer_h
#define __itkGeneticOptimizer_h

#include <itkSingleValuedNonLinearOptimizer.h>
#include <itkMersenneTwisterRandomVariateGenerator.h>
#include "itkArray2D.h"

namespace itk
{

	/** \class GeneticOptimizer
	* \brief Genetic optimizer
	*
	* This optimizer searches for the optimal parameters based on a genetic 
approach using crossover and mutation.
	* This optimizer needs a cost function
	* variate generator. Implemented from "Intelligent Optimisation 
Techniques", D.T.Pham and D. Karaboga, ISBN 1-85233-028-7
	*
	* Optional Parameters which could be set
	* The cost function								SetCostFunction();
	* If the cost function should be maximized or not: SetMaximizeOff();
	*
	* Optional Parameters which could be set:
	* Population size:								SetPopulationSize()
	* Maximal number of iterations:					SetMaximumIteration()
	* The percentage of randomly selected children: SetGenerationGap()
	* the crossover percentage:						SetCrossOverRate()
	* the mutation percentage:						SetMutationRate()
	* The variance of the N-dist to create the initial population 
SetGenerationVariance();
	*
	*
	* For now the optimizer can only be stopped when it reaches 
MaximumIteration.
	*
	*
	* \ingroup Numerics Optimizers
	*
	*/

	class ITK_EXPORT GeneticOptimizer:
		public SingleValuedNonLinearOptimizer
	{
	public:
		/** Standard "Self" typedef. */
		typedef GeneticOptimizer     Self ;
		typedef SingleValuedNonLinearOptimizer      Superclass;
		typedef SmartPointer<Self>                  Pointer;
		typedef SmartPointer<const Self>            ConstPointer;

		//! Method for creation through the object factory. */
		itkNewMacro(Self);

		//! Run-time type information (and related methods). */
		itkTypeMacro(GeneticOptimizer, SingleValuedNonLinearOptimizer );

		//! Type of the Cost Function
		typedef  SingleValuedCostFunction         CostFunctionType;
		typedef  CostFunctionType::Pointer        CostFunctionPointer;

		//! Normal random variate generator type.
		typedef Statistics::MersenneTwisterRandomVariateGenerator GeneratorType;

		//! Set if the Optimizer should Maximize the metric
		itkSetMacro( Maximize, bool );
		itkBooleanMacro( Maximize );

		//! Set/Get maximum iteration limit.
		itkSetMacro( MaximumIteration, unsigned int );
		itkGetConstReferenceMacro( MaximumIteration, unsigned int );

		//! Set/Get MutationRate, value between 0 and 1.
		itkSetMacro( MutationRate, double );
		itkGetConstReferenceMacro( MutationRate, double );

		//! Set/Get CrossOverRate .
		itkSetMacro( CrossOverRate, double );
		itkGetConstReferenceMacro( CrossOverRate, double );

		//! Set/Get NumberOfChildren (the number of children generated from each 
parent/source)
		itkSetMacro( NumberOfChildren, unsigned int ) ;
		itkGetConstReferenceMacro( NumberOfChildren, unsigned int );

		//! Set/Get PopulationSize
		itkSetMacro( PopulationSize, unsigned int ) ;
		itkGetConstReferenceMacro( PopulationSize, unsigned int );

		//! Set/Get the percentage of subjects which are put in the new generation 
(before mutation and crossover)
		//! without being selected by their fitness value.
		itkSetMacro( GenerationGap, double ) ;
		itkGetConstReferenceMacro( GenerationGap, double );

		//! Get/Set the Variance needed to create a decent first generation
		//! Generation is created by calling 
itkMersenneTwisterRandomVariateGenerator->GetNormalVariate(initialParameterValue,Variance);
		itkSetMacro( GenerationVariance, double ) ;
		itkGetConstReferenceMacro( GenerationVariance, double );

		//! Returns Current Iteration
		itkGetConstReferenceMacro( CurrentIteration, unsigned int) ;
		//! Returns Current Best Value
		itkGetConstReferenceMacro( CurrentBestCost, MeasureType) ;
		//! Returns Current Best Parameter setting
		ParametersType GetCurrentBestParameterSetting() const
		{
			return m_CurrentBestParameterSetting;
		}

		//! Start optimization.
		//! Optimization will stop when it meets either of two termination 
conditions,
		//! the maximum iteration limit or epsilon (minimal search radius)
		void StartOptimization() ;

	protected:
		GeneticOptimizer() ;
		GeneticOptimizer(const GeneticOptimizer&) ;
		virtual ~GeneticOptimizer() ;
		void PrintSelf(std::ostream& os, Indent indent) const;

	private:


		//! This routine simulates the spinning of a roulette wheel in order to 
select the seeded members of the nest generation.
		//! The width of each individual's slot on the wheel depends on its 
fitness compared to the rest of its generation.
		//! The search performed to find the correct source index(mid_index) is a 
binary one.
		void Seeded_selection();

		//! This routine uses the generation gap to select at random a portion of 
an existing population to survive intact to the next generation
		void Random_selection();

		//!
		void CrossOver();
		void Mutation();
		void GetFitness();

		//! method used to create the first generation of children
		void CreateFirstGeneration();

		/** Smart pointer to the normal random variate generator. */
		GeneratorType::Pointer		m_Generator;

		/** Maximum iteration limit. */
		unsigned int m_MaximumIteration ;

		/** Current iteration */
		unsigned int m_CurrentIteration ;

		/** Set if the Metric should be maximized: Default = False */
		bool m_Maximize;

		/** The minimal size of search radius
		* (frobenius_norm of covariance matrix). */
		double m_Epsilon ;

		/** NumberOfChildren (the number of children generated from each 
parent/source) */
		unsigned int m_NumberOfChildren ;

		/** Population size*/
		unsigned int m_PopulationSize;

		/** MutationRate. */
		double m_MutationRate ;

		/** CrossOverRate, */
		double m_CrossOverRate ;

		/*The generation gap*/
		double m_GenerationGap;

		/** The number of parameters to be optimized*/
		unsigned int m_NumberOfParameters;

		/** Internal storage for the value type / used as a cache  */
		MeasureType       m_CurrentBestCost;

		/**  The variation needed to create the children */
		double m_GenerationVariance;

		typedef  Array2D<double>	 Double2DVectorType;
		typedef  std::vector<double> DoubleVectorType;

		Double2DVectorType	m_Pool;
		Double2DVectorType	m_Source;
		DoubleVectorType	part_sum;
		DoubleVectorType    m_Rating;
		ParametersType		m_CurrentBestParameterSetting;

		int gencut1, gencut2, currentBestLocation;

	} ; // end of class

} // end of namespace itk

#endif

/////////////////////////////itkGeneticOptimizer.cxx////////////
#ifndef _itkGeneticOptimizer_cxx
#define _itkGeneticOptimizer_cxx

#include "itkGeneticOptimizer.h"
#include "vcl_cmath.h"
#include "vnl/vnl_matrix.h"
#include "itkImageFileWriter.h"

namespace itk
{

GeneticOptimizer
::GeneticOptimizer()
{
  m_Maximize = false;
  m_Generator = GeneratorType::New();
  m_Generator->Initialize(12345);
  //12345 is used for reproduction reasons. In 'real'life use Initialize();

  m_MutationRate = 0.2;
  m_CrossOverRate = m_MutationRate/2;
  m_PopulationSize = 50;   //number of individuals per iterations.
  m_MaximumIteration = 100 ; //number of generations
  m_CurrentBestCost = 0;
  m_CurrentIteration = 0;
  m_GenerationGap = 0.2; //value between 0 and 1 (percentage of subject 
copied without modification)
  m_NumberOfParameters = 0; //number of parameters to be optimized
  m_GenerationVariance = 1;

  //other variables needed
  gencut1 = (int) (m_PopulationSize * m_GenerationGap);
  gencut2 = m_PopulationSize - gencut1 ;
}


GeneticOptimizer
::~GeneticOptimizer()
{

}


void
GeneticOptimizer
::Seeded_selection()
{
	int high, low, mid;
	bool found;
	double spin;

	//second part of the
	for(int tc = gencut1; tc < m_PopulationSize; tc++)
	{
		//
		spin = 
(int)(m_Generator->GetVariateWithClosedRange()*part_sum[m_PopulationSize-1]);
		high = m_PopulationSize-1;
		low = 0;
		found = false;
		mid = (high+low)/2;
		while(!found)
		{
			if((part_sum[mid]>spin) && (part_sum[mid-1]<=spin) || ((low-high)*2 == 
1))
				found=true;
			else
			{
				if(part_sum[mid]>spin)
					high = mid-1;
				else
					low = mid+1;
				mid = (high+low)/2;
			}
		}
		for(int i=0; i<m_NumberOfParameters;i++)
			m_Pool[tc][i] = m_Source[mid][i];
	}
}

void
GeneticOptimizer
::Random_selection()
{
	int choice = 0;
	if(gencut1>0)
	{
		for(int randcount = 0; randcount<gencut1; randcount++)
		{
			//while(m_Rating[choice]<0.0)
			choice = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));

			for(int i=0; i<m_NumberOfParameters;i++)//copy all the parameters to the 
pool
				m_Pool[randcount][i] =m_Source[choice][i];
		}
	}
}



void
GeneticOptimizer
::CrossOver()
{
	int crossPoint,nominee;
	std::vector<double> lowerPart;
	std::vector<double> upperPart;
	for(int cc = 0; cc<m_PopulationSize;cc++)
	{
		if(m_Generator->GetVariateWithClosedRange() <= m_CrossOverRate)
		{
			//find another parameter configuration to mate except yourself
			nominee = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));
			while(nominee == cc)
			{
				nominee = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));
			}
			//set crosspoint
			crossPoint = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));

			//take first part from cc or take first part from nominee
			if(m_Generator->GetVariateWithClosedRange() <= 0.5)
			{
				for(int i=0;i<=m_NumberOfParameters;i++)
				{
					if(i<=crossPoint) { 
lowerPart[i]=m_Pool[cc][i];		upperPart[i]=m_Pool[nominee][i];}
					else 	{			lowerPart[i]=m_Pool[nominee][i];upperPart[i]=m_Pool[cc][i];	}
				}
			}
			else
			{
				for(int i=0;i<=m_NumberOfParameters;i++)
				{
					if(i<=crossPoint) { 
upperPart[i]=m_Pool[cc][i];		lowerPart[i]=m_Pool[nominee][i];}
					else 	{			upperPart[i]=m_Pool[nominee][i];lowerPart[i]=m_Pool[cc][i];	}
				}
			}
			//copy the elements back into the vector
			for(int i=0;i<=m_NumberOfParameters;i++)
			{
				m_Pool[cc][i]	   = lowerPart[i];
				m_Pool[nominee][i] = upperPart[i];
			}
		}
	}
}

void
GeneticOptimizer
::Mutation()
{
	for(int s=0;s< m_PopulationSize; s++)
	{
		for(int p=0;p< m_NumberOfParameters; p++)
		{
			if(m_Generator->GetVariateWithClosedRange() <= m_MutationRate)
			{
				//Add noise to this variable
				m_Pool[s][p] = (double) m_Generator->GetNormalVariate(m_Pool[s][p], 
m_GenerationVariance);
			}
		}
	}
}

void
GeneticOptimizer
::GetFitness()
{
	ParametersType tempValues(m_NumberOfParameters);
	double sumOfValues = 0;
	double bestValue;
	if(m_Maximize)
		bestValue = 2.2250738585072014E-308; //minimal value of double
	if(!m_Maximize)
		bestValue = 1.7976931348623157E+308; //maximal value of double
	currentBestLocation = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));

	double testValue;
	for(int i=0; i<m_PopulationSize;i++)
	{
		for(int p=0; p<m_NumberOfParameters;p++)
		{
			testValue = 	m_Pool[i][p];
			tempValues[i] = testValue;
		}
		try
		{
			testValue = this->GetValue(tempValues);
			//To be worked out for a more generic approach
			m_Rating[i] = testValue;
			if(m_Maximize)
				sumOfValues += m_Rating[i];
			if(!m_Maximize) //for the mutual information: it is negative. ToDo: also 
make it work for MeanSquares minimalisation
				sumOfValues += (0-m_Rating[i]);
			part_sum[i] = sumOfValues;
			if(m_Maximize && m_Rating[i] > bestValue)
			{
				bestValue = m_Rating[i];
				currentBestLocation = i;
			}
			if((!m_Maximize) && (m_Rating[i] < bestValue))
			{
				bestValue = m_Rating[i];
				currentBestLocation = i;
			}
		}
		catch( ExceptionObject & exp )
		{
			//error: no measure, so worst case scenario
			std::string errorMessage = exp.GetDescription();
			if(m_Maximize)
				m_Rating[i] = 2.2250738585072014E-308; //minimal value of double
			if(!m_Maximize)
				m_Rating[i] = 1.7976931348623157E+308; //maximal value of double
			if(i>0)
				part_sum[i] = part_sum[i-1];//set smallest chance of selection.
			else
				part_sum[i] = 0;
		}
	}
	//I have to call the m_CurrentBestCost->GetValue() again,
	//otherwise my tempValues cannot be deleted since there is a reference to 
it
	//in the m_costFunction and m_Transform
	for(int p=0; p<m_NumberOfParameters;p++)
		m_CurrentBestParameterSetting[p] =  m_Pool[currentBestLocation][p];
	m_CurrentBestCost = this->GetValue(m_CurrentBestParameterSetting);
}


void
GeneticOptimizer
::CreateFirstGeneration()
{
	double testValue;
	ParametersType initialPosition = this->GetInitialPosition();
	typedef ParametersType::ValueType SingleParameterType;
	for(int s=0; s<m_PopulationSize;s++)
	{
		for(int p=0; p<m_NumberOfParameters;p++)
		{
			testValue = (SingleParameterType) 
m_Generator->GetNormalVariate(initialPosition[p], m_GenerationVariance);
			m_Source[s][p] = testValue;
			m_Pool[s][p] = m_Source[s][p];
		}
	}
}

void
GeneticOptimizer
::StartOptimization()
{
  if( m_CostFunction.IsNull() )
    {
    return ;
    }
  this->InvokeEvent( StartEvent() );
  //Initialization
  m_NumberOfParameters = this->GetNumberOfParameters();
  //resize all the vectors to the correct size:
  m_Pool.SetSize(m_PopulationSize,m_NumberOfParameters);
  m_Source.SetSize(m_PopulationSize,m_NumberOfParameters);
  part_sum.resize(m_PopulationSize);
  m_Rating.resize(m_PopulationSize);
  m_CurrentBestParameterSetting.SetSize(m_NumberOfParameters);
  m_CurrentBestCost = 0;

  //Create first generation, so the m_Source is filled
  CreateFirstGeneration();
  //calculate the fitness
  GetFitness();

  //start iterations
  m_CurrentIteration = 0 ;
  for (unsigned int iter = 0 ; iter < m_MaximumIteration ; iter++)
  {
    //Select the new generation from the source and put them in the pool
	Random_selection();
	Seeded_selection();
	//Perform crossover and mutation on the subjects in the pool
	CrossOver();
	Mutation();

	//To keep the 'fittest' parameter in the without editting,
	//we have to random select one nominee which has to give up his place
	//and will be replaced with the m_CurrentBestParameterSetting of the 
previous round
	int nominee = 
(int)(m_Generator->GetVariateWithClosedRange()*(m_PopulationSize-1));
	for(int p=0;p<m_NumberOfParameters;p++)
	{
		m_Pool[nominee][p] = m_CurrentBestParameterSetting[p];
	}

	//Calculate the fitness values of the new created pool
	GetFitness();

	//Now the pool (children) become parents (m_Source) and the process starts 
again
	for(int p=0;p<m_NumberOfParameters;p++)
	{
		for(int s=0;s<m_PopulationSize;s++)
		{
			m_Source[s][p] = m_Pool[s][p];
		}
	}

	this->InvokeEvent( IterationEvent() );
    //itkDebugMacro(<< "Current position: " << this->GetCurrentPosition()) ;
  }
  this->InvokeEvent( EndEvent() );
}

/**
*
*/
void
GeneticOptimizer
::PrintSelf(std::ostream& os, Indent indent) const
{
  Superclass::PrintSelf(os,indent);

  if (m_Generator)
    {
		os << "Random Generator: " << m_Generator << std::endl;
    }
  else
    {
    os << indent << "Random Generator  " << "(none)" << std::endl;
    }
  os << indent << "Maximum Iteration " << m_MaximumIteration << std::endl;
  os << indent << "Epsilon           " << m_Epsilon          << std::endl;
  os << indent << "Maximal number of children    " << m_NumberOfChildren    
<< std::endl;
  os << indent << "Mutation Rate     " << m_MutationRate     << std::endl;
  os << indent << "Cross over rate   " << m_CrossOverRate    << std::endl;
  os << indent << "Current Cost      " << m_CurrentBestCost  << std::endl;
  os << indent << "Current Iteration " << m_CurrentIteration << std::endl;
  os << indent << "Maximize On/Off   " << m_Maximize         << std::endl;
  os << indent << "The variance of the N-dist: " << m_GenerationVariance << 
std::endl;
}

} // end of namespace itk
#endif




More information about the Insight-developers mailing list