<?xml version="1.0"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en">
	<id>https://www.jstacs.de/index.php?action=history&amp;feed=atom&amp;title=Train_classifiers_using_GenDisMix_%28a_hybrid_learning_principle%29</id>
	<title>Train classifiers using GenDisMix (a hybrid learning principle) - Revision history</title>
	<link rel="self" type="application/atom+xml" href="https://www.jstacs.de/index.php?action=history&amp;feed=atom&amp;title=Train_classifiers_using_GenDisMix_%28a_hybrid_learning_principle%29"/>
	<link rel="alternate" type="text/html" href="https://www.jstacs.de/index.php?title=Train_classifiers_using_GenDisMix_(a_hybrid_learning_principle)&amp;action=history"/>
	<updated>2026-04-04T14:01:46Z</updated>
	<subtitle>Revision history for this page on the wiki</subtitle>
	<generator>MediaWiki 1.38.2</generator>
	<entry>
		<id>https://www.jstacs.de/index.php?title=Train_classifiers_using_GenDisMix_(a_hybrid_learning_principle)&amp;diff=245&amp;oldid=prev</id>
		<title>Keilwagen: New page: &lt;source lang=&quot;java5&quot;&gt; //read FastA-files Sample[] data = {          new DNASample( args[0] ),          new DNASample( args[1] ) }; AlphabetContainer container = data[0].getAlphabetContaine...</title>
		<link rel="alternate" type="text/html" href="https://www.jstacs.de/index.php?title=Train_classifiers_using_GenDisMix_(a_hybrid_learning_principle)&amp;diff=245&amp;oldid=prev"/>
		<updated>2009-12-02T16:33:09Z</updated>

		<summary type="html">&lt;p&gt;New page: &amp;lt;source lang=&amp;quot;java5&amp;quot;&amp;gt; //read FastA-files Sample[] data = {          new DNASample( args[0] ),          new DNASample( args[1] ) }; AlphabetContainer container = data[0].getAlphabetContaine...&lt;/p&gt;
&lt;p&gt;&lt;b&gt;New page&lt;/b&gt;&lt;/p&gt;&lt;div&gt;&amp;lt;source lang=&amp;quot;java5&amp;quot;&amp;gt;&lt;br /&gt;
//read FastA-files&lt;br /&gt;
Sample[] data = {&lt;br /&gt;
         new DNASample( args[0] ),&lt;br /&gt;
         new DNASample( args[1] )&lt;br /&gt;
};&lt;br /&gt;
AlphabetContainer container = data[0].getAlphabetContainer();&lt;br /&gt;
int length = data[0].getElementLength();&lt;br /&gt;
&lt;br /&gt;
//equivalent sample size =^= ESS&lt;br /&gt;
double essFg = 4, essBg = 4;&lt;br /&gt;
//create ScoringFunction, here PWM&lt;br /&gt;
NormalizableScoringFunction pwmFg = new BayesianNetworkScoringFunction( container, length, essFg, true, new InhomogeneousMarkov(0) );&lt;br /&gt;
NormalizableScoringFunction pwmBg = new BayesianNetworkScoringFunction( container, length, essBg, true, new InhomogeneousMarkov(0) );&lt;br /&gt;
&lt;br /&gt;
//create parameters of the classifier&lt;br /&gt;
GenDisMixClassifierParameterSet cps = new GenDisMixClassifierParameterSet(&lt;br /&gt;
		container,//the used alphabets&lt;br /&gt;
		length,//sequence length that can be modeled/classified&lt;br /&gt;
		Optimizer.QUASI_NEWTON_BFGS, 1E-9, 1E-11, 1,//optimization parameter&lt;br /&gt;
		false,//use free parameters or all&lt;br /&gt;
		KindOfParameter.PLUGIN,//how to start the numerical optimization&lt;br /&gt;
		true,//use a normalized objective function&lt;br /&gt;
		AbstractMultiThreadedOptimizableFunction.getNumberOfAvailableProcessors()//number of compute threads		&lt;br /&gt;
);&lt;br /&gt;
&lt;br /&gt;
//create classifiers&lt;br /&gt;
LearningPrinciple[] lp = LearningPrinciple.values();&lt;br /&gt;
GenDisMixClassifier[] cl = new GenDisMixClassifier[lp.length+1];&lt;br /&gt;
//elementary learning principles&lt;br /&gt;
int i = 0;&lt;br /&gt;
for( ; i &amp;lt; cl.length-1; i++ ){&lt;br /&gt;
	System.out.println( &amp;quot;classifier &amp;quot; + i + &amp;quot; uses &amp;quot; + lp[i] );&lt;br /&gt;
	cl[i] = new GenDisMixClassifier( cps, new CompositeLogPrior(), lp[i], pwmFg, pwmBg );&lt;br /&gt;
}&lt;br /&gt;
&lt;br /&gt;
//use some weighted version of log conditional likelihood, log likelihood, and log prior&lt;br /&gt;
double[] beta = {0.3,0.3,0.4};&lt;br /&gt;
System.out.println( &amp;quot;classifier &amp;quot; + i + &amp;quot; uses the weights &amp;quot; + Arrays.toString( beta ) );&lt;br /&gt;
cl[i] = new GenDisMixClassifier( cps, new CompositeLogPrior(), beta, pwmFg, pwmBg );&lt;br /&gt;
&lt;br /&gt;
//do what ever you like&lt;br /&gt;
&lt;br /&gt;
//e.g., train&lt;br /&gt;
for( i = 0; i &amp;lt; cl.length; i++ ){&lt;br /&gt;
	cl[i].train( data );&lt;br /&gt;
}&lt;br /&gt;
&lt;br /&gt;
//e.g., evaluate (normally done on a test data set)&lt;br /&gt;
MeasureParameters mp = new MeasureParameters( false, 0.95, 0.999, 0.999 );&lt;br /&gt;
for( i = 0; i &amp;lt; cl.length; i++ ){&lt;br /&gt;
	System.out.println( cl[i].evaluate( mp, true, data ) );&lt;br /&gt;
}&lt;br /&gt;
&amp;lt;/source&amp;gt;&lt;/div&gt;</summary>
		<author><name>Keilwagen</name></author>
	</entry>
</feed>