repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/SCWTest.java |
package jsat.classifiers.linear;
import java.util.Random;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPointPair;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SCWTest
{
public SCWTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_Full()
{
System.out.println("TrainC_Full");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for (SCW.Mode mode : SCW.Mode.values())
{
SCW scwFull = new SCW(0.9, mode, false);
scwFull.train(train);
for (DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), scwFull.classify(dpp.getDataPoint()).mostLikely());
}
}
@Test
public void testTrainC_Diag()
{
System.out.println("TrainC_Diag");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for (SCW.Mode mode : SCW.Mode.values())
{
SCW scwDiag = new SCW(0.9, mode, true);
scwDiag.train(train);
for (DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), scwDiag.classify(dpp.getDataPoint()).mostLikely());
}
}
}
| 2,022 | 23.373494 | 107 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/SDCATest.java | package jsat.classifiers.linear;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.datatransform.LinearTransform;
import jsat.datatransform.PNormNormalization;
import jsat.linear.*;
import jsat.lossfunctions.*;
import jsat.math.OnLineStatistics;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SDCATest
{
/*
* This test case is based off of the grouping example in the Elatic Net
* Paper Zou, H.,&Hastie, T. (2005). Regularization and variable selection
* via the elastic net. Journal of the Royal Statistical Society, Series B,
* 67(2), 301–320. doi:10.1111/j.1467-9868.2005.00503.x
*/
public SDCATest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class LogisticRegressionDCD.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("trainR");
for(double alpha : new double[]{0.0, 0.5, 1.0})
for(LossR loss : new LossR[]{new SquaredLoss(), new AbsoluteLoss(), new HuberLoss(), new EpsilonInsensitiveLoss(1.0)})
{
RegressionDataSet train = FixedProblems.getLinearRegression(400, RandomUtil.getRandom());
SDCA sdca = new SDCA();
sdca.setLoss(loss);
sdca.setTolerance(1e-10);
sdca.setLambda(1.0/train.size());
sdca.setAlpha(alpha);
sdca.train(train);
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
OnLineStatistics avgRelError = new OnLineStatistics();
for(DataPointPair<Double> dpp : test.getAsDPPList())
{
double truth = dpp.getPair();
double pred = sdca.regress(dpp.getDataPoint());
double relErr = (truth-pred)/truth;
avgRelError.add(relErr);
}
if(loss instanceof AbsoluteLoss || loss instanceof EpsilonInsensitiveLoss)//sensative to small errors make it a little off at time
assertEquals("Loss: " + loss.toString() + " alpha: " + alpha, 0.0, avgRelError.getMean(), 0.2);
else
assertEquals("Loss: " + loss.toString() + " alpha: " + alpha, 0.0, avgRelError.getMean(), 0.01);
}
}
/**
* Test of train method, of class LogisticRegressionDCD.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(double alpha : new double[]{0.0, 0.5, 1.0})
for(LossC loss : new LossC[]{new LogisticLoss(), new HingeLoss()})
{
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
SDCA sdca = new SDCA();
sdca.setLoss(loss);
sdca.setLambda(1.0/train.size());
sdca.setAlpha(alpha);
sdca.train(train, true);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), sdca.classify(dpp.getDataPoint()).mostLikely());
}
}
/**
* Test of train method, of class LogisticRegressionDCD.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(double alpha : new double[]{0.0, 0.5, 1.0})
for(LossC loss : new LossC[]{new LogisticLoss(), new HingeLoss()})
{
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
SDCA sdca = new SDCA();
sdca.setLoss(loss);
sdca.setLambda(1.0/train.size());
sdca.setAlpha(alpha);
sdca.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), sdca.classify(dpp.getDataPoint()).mostLikely());
}
}
@Test
public void testScale()
{
System.out.println("testScale");
ClassificationDataSet train = FixedProblems.get2ClassLinear(1000, RandomUtil.getRandom());
Vec base = null;
for(double max : new double[]{1.0, 2.0, 4.0, 5.0, 6.0, 10.0, 20.0, 50.0})
{
SDCA sdca = new SDCA();
sdca.setUseBias(false);//bias term makes scaling non-trivial, so remove from this test
sdca.setLoss(new LogisticLoss());
sdca.setLambda(1.0 / train.size());
sdca.setAlpha(0.0);
ClassificationDataSet t = train.shallowClone();
t.applyTransform(new LinearTransform(t, 0, max));
sdca.train(t);
if(base == null)
base = sdca.getRawWeight(0).clone();
else
assertTrue("Failed on scale " + max, base.equals(sdca.getRawWeight(0).multiply(max), 0.1));
// System.out.println(sdca.getRawWeight(0).multiply(max));
// System.out.println(sdca.getBias(0));
}
}
/**
* Test of setLambda method, of class NewGLMNET.
*/
@Test
public void testSetC()
{
System.out.println("train");
// for (int round = 0; round < 100; round++)
{
for (int attempts = 5; attempts >= 0; attempts--)
{
Random rand = RandomUtil.getRandom();
ClassificationDataSet data = new ClassificationDataSet(6, new CategoricalData[0], new CategoricalData(2));
/**
* B/c of the what SDCA works, it has trouble picking just 1 of
* perfectly correlated features. So we will make a 2nd version
* of the dataset which has 1 pure strong feature, 2 weak
* features with noise, and 3 weak features.
*/
ClassificationDataSet dataN = new ClassificationDataSet(6, new CategoricalData[0], new CategoricalData(2));
for (int i = 0; i < 500; i++)
{
double Z1 = rand.nextDouble() * 20 - 10;
double Z2 = rand.nextDouble() * 20 - 10;
Vec v = DenseVector.toDenseVec(Z1, -Z1, Z1, Z2, -Z2, Z2);
data.addDataPoint(v, (int) (Math.signum(Z1 + 0.1 * Z2) + 1) / 2);
double eps_1 = rand.nextGaussian()*10;
double eps_2 = rand.nextGaussian()*10;
v = DenseVector.toDenseVec(Z1, -Z1/10 + eps_1, Z1/10+ eps_2, Z2, -Z2, Z2);
dataN.addDataPoint(v, (int) (Math.signum(Z1 + 0.1 * Z2) + 1) / 2);
}
data.applyTransform(new PNormNormalization());
dataN.applyTransform(new PNormNormalization());
for (LossC loss : new LossC[]{new LogisticLoss(), new HingeLoss()})
{
Vec w = new ConstantVector(1.0, 6);
SDCA sdca = new SDCA();
sdca.setLoss(loss);
double maxLam = LinearTools.maxLambdaLogisticL1(data);
sdca.setMaxIters(100);
sdca.setUseBias(false);
sdca.setAlpha(1.0);
sdca.setLambda(maxLam);
double search_const = 0.025;
while(w.nnz() != 1)// I should be able to find a value of lambda that results in only 1 feature
{//SDCA requires a bit more searching b/c it behaved differently than normal coordinate descent solvers when selecting features
do
{
sdca.setLambda(sdca.getLambda() * (1+search_const));
sdca.train(dataN);
w = sdca.getRawWeight(0);
}
while (w.nnz() > 1);
//did we go too far?
while (w.nnz() == 0)
{
sdca.setLambda(sdca.getLambda()/ (1+search_const/3));
sdca.train(dataN);
w = sdca.getRawWeight(0);
}
search_const *= 0.95;
}
assertEquals(1, w.nnz());
int nonZeroIndex = w.getNonZeroIterator().next().getIndex();
assertTrue(nonZeroIndex == 0);//should be one of the more important weights
assertEquals(1, (int)Math.signum(w.get(nonZeroIndex)));
//elastic case
sdca.setLambda(maxLam / 10);
sdca.setAlpha(0.5);//now we should get the top 3 on
do
{
sdca.setLambda(sdca.getLambda() * 1.05);
sdca.train(data, sdca);
w = sdca.getRawWeight(0);
}
while (w.nnz() > 3);//we should be able to find this pretty easily
assertEquals(3, w.nnz());
assertEquals(1, (int) Math.signum(w.get(0)));
assertEquals(-1, (int) Math.signum(w.get(1)));
assertEquals(1, (int) Math.signum(w.get(2)));
//also want to make sure that they are all about equal in size
assertTrue(Math.abs((w.get(0) + w.get(1) * 2 + w.get(2)) / 3) < 0.4);
//Lets increase reg but switch to L2, we should see all features turn on!
sdca.setLambda(sdca.getLambda() * 3);
sdca.setAlpha(0.0);//now everyone should turn on
sdca.train(data);
w = sdca.getRawWeight(0);
if ((int) Math.signum(w.get(3)) != 1 && attempts > 0)//model probablly still right, but got a bad epsilon solution... try again please!
{
continue;
}
assertEquals(6, w.nnz());
assertEquals(1, (int) Math.signum(w.get(0)));
assertEquals(-1, (int) Math.signum(w.get(1)));
assertEquals(1, (int) Math.signum(w.get(2)));
assertEquals(1, (int) Math.signum(w.get(3)));
assertEquals(-1, (int) Math.signum(w.get(4)));
assertEquals(1, (int) Math.signum(w.get(5)));
}
break;//made it throgh the test no problemo!
}
}
}
@Test
public void testWarmOther()
{
System.out.println("testWarm");
Random rand = RandomUtil.getRandom();
ClassificationDataSet train = new ClassificationDataSet(600, new CategoricalData[0], new CategoricalData(2));
for(int i = 0; i < 200; i++)
{
double Z1 = rand.nextDouble()*20-10;
double Z2 = rand.nextDouble()*20-10;
Vec v = new DenseVector(train.getNumNumericalVars());
for(int j = 0; j < v.length(); j++)
{
if (j > 500)
{
if (j % 2 == 0)
v.set(j, Z2 * ((j + 1) / 600.0) + rand.nextGaussian() / (j + 1));
else
v.set(j, Z1 * ((j + 1) / 600.0) + rand.nextGaussian() / (j + 1));
}
else
v.set(j, rand.nextGaussian()*20);
}
train.addDataPoint(v, (int) (Math.signum(Z1+0.1*Z2)+1)/2);
}
train.applyTransform(new LinearTransform(train));
SDCA truth = new SDCA();
truth.setMaxIters(1000);
truth.setAlpha(0.5);
truth.setLoss(new LogisticLoss());
truth.setTolerance(1e-10);
truth.setLambda(1.0/train.size());
truth.train(train);
SDCA warm = new SDCA();
warm.setMaxIters(100);
warm.setLoss(new LogisticLoss());
warm.setAlpha(0.5);
warm.setTolerance(1e-7);
warm.setLambda(1.0/train.size());
warm.train(train, truth);
assertEquals(0, warm.getRawWeight(0).subtract(truth.getRawWeight(0)).pNorm(2), 1e-4);
assertTrue(warm.epochs_taken + " ?< " + truth.epochs_taken, warm.epochs_taken < truth.epochs_taken);
}
}
| 13,439 | 37.956522 | 155 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/SMIDASTest.java | package jsat.classifiers.linear;
import java.util.Random;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPointPair;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SMIDASTest
{
public SMIDASTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class SMIDAS.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
SMIDAS smidas = new SMIDAS(0.1);
smidas.setLoss(StochasticSTLinearL1.Loss.LOG);
smidas.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), smidas.classify(dpp.getDataPoint()).mostLikely());
}
/**
* Test of train method, of class SMIDAS.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
Random rand = new Random(123);
SMIDAS smidas = new SMIDAS(0.02);
smidas.setMinScaled(-1);
smidas.setLoss(StochasticSTLinearL1.Loss.SQUARED);
smidas.train(FixedProblems.getLinearRegression(500, rand));
for(DataPointPair<Double> dpp : FixedProblems.getLinearRegression(100, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = smidas.regress(dpp.getDataPoint());
double relErr = (truth-pred)/truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
}
| 2,241 | 23.637363 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/STGDTest.java | package jsat.classifiers.linear;
import java.util.Random;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class STGDTest
{
public STGDTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class STGD.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
Random rand = new Random(123);
STGD scd = new STGD(5, 0.1, Double.POSITIVE_INFINITY, 0.1);
scd.train(FixedProblems.getLinearRegression(400, rand));
for(DataPointPair<Double> dpp : FixedProblems.getLinearRegression(400, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = scd.regress(dpp.getDataPoint());
double relErr = (truth-pred)/truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
/**
* Test of train method, of class STGD.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
STGD scd = new STGD(5, 0.5, Double.POSITIVE_INFINITY, 0.1);
scd.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), scd.classify(dpp.getDataPoint()).mostLikely());
}
}
| 2,055 | 22.906977 | 100 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/StochasticMultinomialLogisticRegressionTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.linear;
import java.util.Random;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.exceptions.UntrainedModelException;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class StochasticMultinomialLogisticRegressionTest
{
public StochasticMultinomialLogisticRegressionTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class StochasticMultinomialLogisticRegression.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
for(StochasticMultinomialLogisticRegression.Prior prior : StochasticMultinomialLogisticRegression.Prior.values())
{
StochasticMultinomialLogisticRegression smlgr = new StochasticMultinomialLogisticRegression();
smlgr.setPrior(prior);
smlgr.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), smlgr.classify(dpp.getDataPoint()).mostLikely());
}
}
/**
* Test of clone method, of class StochasticMultinomialLogisticRegression.
*/
@Test
public void testClone()
{
System.out.println("clone");
StochasticMultinomialLogisticRegression smlgr = new StochasticMultinomialLogisticRegression();
Classifier cloned = smlgr.clone();
ClassificationDataSet train = FixedProblems.get2ClassLinear(400, RandomUtil.getRandom());
cloned.train(train);
try
{
smlgr.classify(train.getDataPoint(0));
fail("Exception should have occured");
}
catch(UntrainedModelException ex)
{
}
train.classSampleCount(train.getDataPointCategory(0));
}
}
| 2,617 | 23.240741 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/ALMA2KTest.java | package jsat.classifiers.linear.kernelized;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ALMA2KTest
{
public ALMA2KTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ALMA2K instance = new ALMA2K(new RBFKernel(0.5), 0.8);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ALMA2K instance = new ALMA2K(new RBFKernel(0.5), 0.8);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testClone()
{
System.out.println("clone");
ALMA2K instance = new ALMA2K(new RBFKernel(0.5), 0.8);
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
instance.setAveraged(true);
ALMA2K result = instance.clone();
assertTrue(result.isAveraged());
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,121 | 26.385965 | 109 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/BOGDTest.java | package jsat.classifiers.linear.kernelized;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.distributions.kernels.RBFKernel;
import jsat.lossfunctions.HingeLoss;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class BOGDTest
{
public BOGDTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(boolean sampling : new boolean[]{true, false})
{
BOGD instance = new BOGD(new RBFKernel(0.5), 50, 0.5, 1e-3, 10, new HingeLoss());
instance.setUniformSampling(sampling);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom(), 1, 4);
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(boolean sampling : new boolean[]{true, false})
{
BOGD instance = new BOGD(new RBFKernel(0.5), 50, 0.5, 1e-3, 10, new HingeLoss());
instance.setUniformSampling(sampling);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom(), 1, 4);
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
BOGD instance = new BOGD(new RBFKernel(0.5), 50, 0.5, 1e-3, 10, new HingeLoss());
ClassificationDataSet t1 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationDataSet t2 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 0.5, 3.0);
instance.setUniformSampling(true);
instance = instance.clone();
instance.train(t1);
instance.setUniformSampling(false);
BOGD result = instance.clone();
assertFalse(result.isUniformSampling());
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,587 | 28.409836 | 111 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/CSKLRBatchTest.java | package jsat.classifiers.linear.kernelized;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.CategoricalResults;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.classifiers.svm.SupportVectorLearner;
import jsat.distributions.kernels.PukKernel;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CSKLRBatchTest
{
public CSKLRBatchTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values())
{
CSKLRBatch instance = new CSKLRBatch(0.5, new RBFKernel(0.5), 10, mode, SupportVectorLearner.CacheMode.NONE);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values())
{
CSKLRBatch instance = new CSKLRBatch(0.5, new RBFKernel(0.5), 10, mode, SupportVectorLearner.CacheMode.NONE);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values())
{
CSKLRBatch instance = new CSKLRBatch(0.5, new RBFKernel(0.5), 10, mode, SupportVectorLearner.CacheMode.NONE);
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
CSKLRBatch result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
@Test
public void testSerializable_WithTrainedModel() throws Exception {
System.out.println("Serializable");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values()) {
CSKLRBatch instance = new CSKLRBatch(0.5, new RBFKernel(0.5), 10, mode, SupportVectorLearner.CacheMode.NONE);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
instance.train(train);
CSKLRBatch serializedBatch = serializeAndDeserialize(instance);
for (int i = 0; i < test.size(); i++)
assertEquals(test.getDataPointCategory(i), serializedBatch.classify(test.getDataPoint(i)).mostLikely());
}
}
private CSKLRBatch serializeAndDeserialize(CSKLRBatch batch) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(batch);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
return (CSKLRBatch) ois.readObject();
}
}
| 5,044 | 31.548387 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/CSKLRTest.java | package jsat.classifiers.linear.kernelized;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CSKLRTest
{
public CSKLRTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values())
{
CSKLR instance = new CSKLR(0.5, new RBFKernel(0.5), 10, mode);
instance.setMode(mode);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(CSKLR.UpdateMode mode : CSKLR.UpdateMode.values())
{
CSKLR instance = new CSKLR(0.5, new RBFKernel(0.5), 10, mode);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
CSKLR instance = new CSKLR(0.5, new RBFKernel(0.5), 10, CSKLR.UpdateMode.MARGIN);
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
CSKLR result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,351 | 26.933333 | 109 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/DUOLTest.java |
package jsat.classifiers.linear.kernelized;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DUOLTest
{
public DUOLTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
DUOL instance = new DUOL(new RBFKernel(0.5));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
DUOL instance = new DUOL(new RBFKernel(0.5));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testClone()
{
System.out.println("clone");
DUOL instance = new DUOL(new RBFKernel(0.5));
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
DUOL result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,032 | 25.840708 | 109 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/ForgetronTest.java |
package jsat.classifiers.linear.kernelized;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ForgetronTest
{
public ForgetronTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(boolean selfTuned : new boolean[]{true, false})
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 0.0, RandomUtil.getRandom(), 1.0, 4.0);
Forgetron instance = new Forgetron(new RBFKernel(0.5), 40);
instance.setSelfTurned(selfTuned);
instance.setEpochs(30);
//add some miss labled data to get the error code to cick in and get exercised
for(int i = 0; i < 500; i+=20)
{
DataPoint dp = train.getDataPoint(i);
int y = train.getDataPointCategory(i);
int badY = (y == 0) ? 1 : 0;
train.addDataPoint(dp, badY);
}
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.3);//given some leway due to label noise
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(boolean selfTuned : new boolean[]{true, false})
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 0.0, RandomUtil.getRandom(), 1.0, 4.0);
Forgetron instance = new Forgetron(new RBFKernel(0.5), 40);
instance.setSelfTurned(selfTuned);
instance.setEpochs(30);
//add some miss labled data to get the error code to cick in and get exercised
for(int i = 0; i < 500; i+=20)
{
DataPoint dp = train.getDataPoint(i);
int y = train.getDataPointCategory(i);
int badY = (y == 0) ? 1 : 0;
train.addDataPoint(dp, badY);
}
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.3);//given some leway due to label noise
}
}
@Test
public void testClone()
{
System.out.println("clone");
Forgetron instance = new Forgetron(new RBFKernel(0.5), 100);
instance.setEpochs(30);
ClassificationDataSet t1 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 1, 4);
ClassificationDataSet t2 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
Forgetron result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 4,266 | 29.697842 | 112 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/KernelPointTest.java | package jsat.classifiers.linear.kernelized;
import jsat.distributions.kernels.KernelPoint;
import static java.lang.Math.*;
import java.util.List;
import java.util.Random;
import jsat.distributions.kernels.LinearKernel;
import jsat.distributions.multivariate.NormalM;
import jsat.linear.*;
import jsat.linear.distancemetrics.EuclideanDistance;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class KernelPointTest
{
List<Vec> toAdd;
List<Vec> toTest;
double[] coeff;
public KernelPointTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
Vec mean = new DenseVector(new double[]{2.0, -1.0, 3.0});
Matrix cov = new DenseMatrix(new double[][]
{
{1.07142, 1.15924, 0.38842},
{1.15924, 1.33071, 0.51373},
{0.38842, 0.51373, 0.92986},
});
NormalM normal = new NormalM(mean, cov);
Random rand = new Random(42);
toAdd = normal.sample(10, rand);
toTest = normal.sample(10, rand);
coeff = new double[toAdd.size()];
for(int i = 0; i < coeff.length; i++)
coeff[i] = Math.round(rand.nextDouble()*9+0.5);
for(int i = 0; i < coeff.length; i++)
if(i % 2 != 0)
coeff[i] *= -1;
}
@After
public void tearDown()
{
}
/**
* Test of getSqrdNorm method, of class KernelPoint.
*/
@Test
public void testGetSqrdNorm()
{
System.out.println("getSqrdNorm");
KernelPoint kpSimple = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeff = new KernelPoint(new LinearKernel(0), 1e-2);
for(int i = 0; i < toAdd.size(); i++)
{
Vec sumSimple = toAdd.get(0).clone();
Vec sumCoeff = toAdd.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimple.mutableAdd(toAdd.get(ii));
sumCoeff.mutableAdd(coeff[ii], toAdd.get(ii));
}
kpSimple.mutableAdd(toAdd.get(i));
kpCoeff.mutableAdd(coeff[i], toAdd.get(i));
double expectedSimple = Math.pow(sumSimple.pNorm(2), 2);
double expectedCoeff = Math.pow(sumCoeff.pNorm(2), 2);
assertEquals(expectedSimple, kpSimple.getSqrdNorm(), 1e-2*4);
assertEquals(expectedCoeff, kpCoeff.getSqrdNorm(), 1e-2*4);
KernelPoint kp0 = kpSimple.clone();
KernelPoint kp1 = kpCoeff.clone();
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(toAdd.get(j));
kp1.mutableAdd(coeff[j], toAdd.get(j));
}
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(-1, toAdd.get(j));
kp1.mutableAdd(-coeff[j], toAdd.get(j));
}
assertEquals(expectedSimple, kp0.getSqrdNorm(), 1e-2*4);
assertEquals(expectedCoeff, kp1.getSqrdNorm(), 1e-2*4);
kp0.mutableMultiply(1.0/(i+1));
kp1.mutableMultiply(1.0/(i+1));
assertEquals(expectedSimple/pow(i+1,2), kp0.getSqrdNorm(), 1e-2*4);
assertEquals(expectedCoeff/pow(i+1,2), kp1.getSqrdNorm(), 1e-2*4);
}
}
@Test
public void testDot_KernelPoint()
{
System.out.println("dot_KernelPoint");
KernelPoint kpSimple = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeff = new KernelPoint(new LinearKernel(0), 1e-2);
for(int i = 0; i < toAdd.size(); i++)
{
Vec sumSimple = toAdd.get(0).clone();
Vec sumCoeff = toAdd.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimple.mutableAdd(toAdd.get(ii));
sumCoeff.mutableAdd(coeff[ii], toAdd.get(ii));
}
kpSimple.mutableAdd(toAdd.get(i));
kpCoeff.mutableAdd(coeff[i], toAdd.get(i));
double expectedSimple = sumSimple.dot(sumSimple);
double expectedCoeff = sumCoeff.dot(sumCoeff);
double expectedSC = sumSimple.dot(sumCoeff);
assertEquals(expectedSimple, kpSimple.dot(kpSimple), 1e-2*4);
assertEquals(expectedCoeff, kpCoeff.dot(kpCoeff), 1e-2*4);
assertEquals(expectedSC, kpSimple.dot(kpCoeff), 1e-2*4);
KernelPoint kp0 = kpSimple.clone();
KernelPoint kp1 = kpCoeff.clone();
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(toAdd.get(j));
kp1.mutableAdd(coeff[j], toAdd.get(j));
}
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(-1, toAdd.get(j));
kp1.mutableAdd(-coeff[j], toAdd.get(j));
}
assertEquals(expectedSimple, kp0.dot(kpSimple), 1e-2*4);
assertEquals(expectedCoeff, kp1.dot(kpCoeff), 1e-2*4);
assertEquals(expectedSC, kp0.dot(kp1), 1e-2*4);
assertEquals(expectedSC, kp1.dot(kp0), 1e-2*4);
assertEquals(expectedSC, kp0.dot(kpCoeff), 1e-2*4);
assertEquals(expectedSC, kpSimple.dot(kp1), 1e-2*4);
kp0.mutableMultiply(1.0/(i+1));
kp1.mutableMultiply(1.0/(i+1));
assertEquals(expectedSimple/(i+1), kp0.dot(kpSimple), 1e-2*4);
assertEquals(expectedCoeff/(i+1), kp1.dot(kpCoeff), 1e-2*4);
assertEquals(expectedSC/pow(i+1, 2), kp0.dot(kp1), 1e-2*4);
assertEquals(expectedSC/pow(i+1, 2), kp1.dot(kp0), 1e-2*4);
assertEquals(expectedSC/(i+1), kp0.dot(kpCoeff), 1e-2*4);
assertEquals(expectedSC/(i+1), kpSimple.dot(kp1), 1e-2*4);
}
}
/**
* Test of dot method, of class KernelPoint.
*/
@Test
public void testDot_Vec()
{
System.out.println("dot_Vec");
KernelPoint kpSimple = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeff = new KernelPoint(new LinearKernel(0), 1e-2);
for(int i = 0; i < toAdd.size(); i++)
{
Vec sumSimple = toAdd.get(0).clone();
Vec sumCoeff = toAdd.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimple.mutableAdd(toAdd.get(ii));
sumCoeff.mutableAdd(coeff[ii], toAdd.get(ii));
}
kpSimple.mutableAdd(toAdd.get(i));
kpCoeff.mutableAdd(coeff[i], toAdd.get(i));
for(Vec v : toTest)
{
double expectedSimple = sumSimple.dot(v);
double expectedCoeff = sumCoeff.dot(v);
assertEquals(expectedSimple, kpSimple.dot(v), 1e-2*4);
assertEquals(expectedCoeff, kpCoeff.dot(v), 1e-2*4);
KernelPoint kp0 = kpSimple.clone();
KernelPoint kp1 = kpCoeff.clone();
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(toAdd.get(j));
kp1.mutableAdd(coeff[j], toAdd.get(j));
}
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(-1, toAdd.get(j));
kp1.mutableAdd(-coeff[j], toAdd.get(j));
}
assertEquals(expectedSimple, kp0.dot(v), 1e-2*4);
assertEquals(expectedCoeff, kp1.dot(v), 1e-2*4);
kp0.mutableMultiply(1.0/(i+1));
kp1.mutableMultiply(1.0/(i+1));
assertEquals(expectedSimple/(i+1), kp0.dot(v), 1e-2*4);
assertEquals(expectedCoeff/(i+1), kp1.dot(v), 1e-2*4);
}
}
}
/**
* Test of dist method, of class KernelPoint.
*/
@Test
public void testDistance_Vec()
{
System.out.println("distance_Vec");
KernelPoint kpSimple = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeff = new KernelPoint(new LinearKernel(0), 1e-2);
EuclideanDistance d = new EuclideanDistance();
for(int i = 0; i < toAdd.size(); i++)
{
Vec sumSimple = toAdd.get(0).clone();
Vec sumCoeff = toAdd.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimple.mutableAdd(toAdd.get(ii));
sumCoeff.mutableAdd(coeff[ii], toAdd.get(ii));
}
kpSimple.mutableAdd(toAdd.get(i));
kpCoeff.mutableAdd(coeff[i], toAdd.get(i));
for(Vec v : toTest)
{
double expectedSimple = d.dist(sumSimple, v);
double expectedCoeff = d.dist(sumCoeff, v);
assertEquals(expectedSimple, kpSimple.dist(v), 1e-2*4);
assertEquals(expectedCoeff, kpCoeff.dist(v), 1e-2*4);
KernelPoint kp0 = kpSimple.clone();
KernelPoint kp1 = kpCoeff.clone();
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(toAdd.get(j));
kp1.mutableAdd(coeff[j], toAdd.get(j));
}
for(int j = i+1; j < coeff.length; j++ )
{
kp0.mutableAdd(-1, toAdd.get(j));
kp1.mutableAdd(-coeff[j], toAdd.get(j));
}
assertEquals(expectedSimple, kp0.dist(v), 1e-2*4);
assertEquals(expectedCoeff, kp1.dist(v), 1e-2*4);
kp0.mutableMultiply(1.0/(i+1));
kp1.mutableMultiply(1.0/(i+1));
assertEquals(d.dist(sumSimple.divide(i+1), v), kp0.dist(v), 1e-2*4);
assertEquals(d.dist(sumCoeff.divide(i+1), v), kp1.dist(v), 1e-2*4);
}
}
}
@Test
public void testDistance_KernelPoint()
{
System.out.println("distance_KernelPoint");
KernelPoint kpSimpleA = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeffA = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpSimpleB = new KernelPoint(new LinearKernel(0), 1e-2);
KernelPoint kpCoeffB = new KernelPoint(new LinearKernel(0), 1e-2);
EuclideanDistance d = new EuclideanDistance();
for(int i = 0; i < toAdd.size(); i++)
{
Vec sumSimpleA = toAdd.get(0).clone();
Vec sumCoeffA = toAdd.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimpleA.mutableAdd(toAdd.get(ii));
sumCoeffA.mutableAdd(coeff[ii], toAdd.get(ii));
}
Vec sumSimpleB = toTest.get(0).clone();
Vec sumCoeffB = toTest.get(0).multiply(coeff[0]);
for(int ii = 1; ii < i+1; ii++ )
{
sumSimpleB.mutableAdd(toTest.get(ii));
sumCoeffB.mutableAdd(coeff[ii], toTest.get(ii));
}
kpSimpleA.mutableAdd(toAdd.get(i));
kpCoeffA.mutableAdd(coeff[i], toAdd.get(i));
kpSimpleB.mutableAdd(toTest.get(i));
kpCoeffB.mutableAdd(coeff[i], toTest.get(i));
assertEquals(0.0, kpSimpleA.dist(kpSimpleA), 1e-2*4);
assertEquals(0.0, kpSimpleB.dist(kpSimpleB), 1e-2*4);
assertEquals(0.0, kpCoeffA.dist(kpCoeffA), 1e-2*4);
assertEquals(0.0, kpCoeffB.dist(kpCoeffB), 1e-2*4);
assertEquals(d.dist(sumSimpleA, sumSimpleB), kpSimpleA.dist(kpSimpleB), 1e-2*4);
assertEquals(d.dist(sumSimpleA, sumCoeffA), kpSimpleA.dist(kpCoeffA), 1e-2*4);
assertEquals(d.dist(sumSimpleA, sumCoeffB), kpSimpleA.dist(kpCoeffB), 1e-2*4);
assertEquals(d.dist(sumCoeffA, sumSimpleB), kpCoeffA.dist(kpSimpleB), 1e-2*4);
assertEquals(d.dist(sumCoeffB, sumSimpleB), kpCoeffB.dist(kpSimpleB), 1e-2*4);
KernelPoint kpSimpleAClone = kpSimpleA.clone();
KernelPoint kpSimpleBClone = kpSimpleB.clone();
kpSimpleAClone.mutableMultiply(1.0/(i+1));
kpSimpleBClone.mutableMultiply(1.0/(i+1));
assertEquals(d.dist(sumSimpleA.divide(i+1), sumSimpleB.divide(i+1)), kpSimpleAClone.dist(kpSimpleBClone), 1e-2*4);
assertEquals(d.dist(sumSimpleA.divide(i+1), sumCoeffA), kpSimpleAClone.dist(kpCoeffA), 1e-2*4);
assertEquals(d.dist(sumSimpleA.divide(i+1), sumCoeffB), kpSimpleAClone.dist(kpCoeffB), 1e-2*4);
assertEquals(d.dist(sumCoeffA, sumSimpleB.divide(i+1)), kpCoeffA.dist(kpSimpleBClone), 1e-2*4);
assertEquals(d.dist(sumCoeffB, sumSimpleB.divide(i+1)), kpCoeffB.dist(kpSimpleBClone), 1e-2*4);
}
}
}
| 13,504 | 35.401617 | 126 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/KernelSGDTest.java | package jsat.classifiers.linear.kernelized;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.kernels.KernelPoint;
import jsat.distributions.kernels.RBFKernel;
import jsat.lossfunctions.*;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class KernelSGDTest
{
public KernelSGDTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
KernelSGD classifier = new KernelSGD(new HingeLoss(), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.STOP, 100);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
KernelSGD classifier = new KernelSGD(new HingeLoss(), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.STOP, 100);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
@Test
public void testTrainC_ClassificationDataSet_Multi_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getCircles(150, new Random(2), 1.0, 2.0, 4.0);
ClassificationDataSet testSet = FixedProblems.getCircles(50, new Random(3), 1.0, 2.0, 4.0);
KernelSGD classifier = new KernelSGD(new HingeLoss(), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.STOP, 100);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
@Test
public void testTrainC_ClassificationDataSet_Multi()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getCircles(150, new Random(2), 1.0, 2.0, 4.0);
ClassificationDataSet testSet = FixedProblems.getCircles(50, new Random(3), 1.0, 2.0, 4.0);
KernelSGD classifier = new KernelSGD(new HingeLoss(), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.STOP, 100);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
/**
* Test of train method, of class PlatSMO.
*/
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
KernelSGD classifier = new KernelSGD(new EpsilonInsensitiveLoss(0.1), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.MERGE_RBF, 50);
classifier.setEpochs(10);
classifier.train(trainSet, true);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - classifier.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors / testSet.size() < 1);
}
/**
* Test of train method, of class PlatSMO.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
KernelSGD classifier = new KernelSGD(new EpsilonInsensitiveLoss(0.1), new RBFKernel(0.5), 1e-5, KernelPoint.BudgetStrategy.MERGE_RBF, 50);
classifier.setEpochs(10);
classifier.train(trainSet);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - classifier.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors / testSet.size() < 1);
}
@Test
public void testClone()
{
System.out.println("clone");
KernelSGD instance = new KernelSGD(new LogisticLoss(), new RBFKernel(0.5), 1e-4, KernelPoint.BudgetStrategy.MERGE_RBF, 100);
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
KernelSGD result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 6,322 | 32.994624 | 146 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/OSKLTest.java | package jsat.classifiers.linear.kernelized;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class OSKLTest
{
public OSKLTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(boolean useAverageModel : new boolean[]{true, false})
for(int burnin : new int[]{0, 50, 100, 250})
{
OSKL instance = new OSKL(new RBFKernel(0.5), 1.5);
instance.setBurnIn(burnin);
instance.setUseAverageModel(useAverageModel);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(boolean useAverageModel : new boolean[]{true, false})
for(int burnin : new int[]{0, 50, 100, 250})
{
OSKL instance = new OSKL(new RBFKernel(0.5), 1.5);
instance.setBurnIn(burnin);
instance.setUseAverageModel(useAverageModel);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
OSKL instance = new OSKL(new RBFKernel(0.5), 10);
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
instance.setUseAverageModel(true);
OSKL result = instance.clone();
assertTrue(result.isUseAverageModel());
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,646 | 28.41129 | 109 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/linear/kernelized/ProjectronTest.java | package jsat.classifiers.linear.kernelized;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ProjectronTest
{
static private ExecutorService ex;
public ProjectronTest()
{
}
@BeforeClass
public static void setUpClass()
{
ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
}
@AfterClass
public static void tearDownClass()
{
ex.shutdown();
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(boolean useMargin : new boolean[]{true, false})
{
Projectron instance = new Projectron(new RBFKernel(0.5));
instance.setUseMarginUpdates(useMargin);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(1000, RandomUtil.getRandom());
//add some miss labled data to get the error code to cick in and get exercised
for(int i = 0; i < 500; i+=20)
{
DataPoint dp = train.getDataPoint(i);
int y = train.getDataPointCategory(i);
int badY = (y == 0) ? 1 : 0;
train.addDataPoint(dp, badY);
}
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.3);//given some leway due to label noise
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(boolean useMargin : new boolean[]{true, false})
{
Projectron instance = new Projectron(new RBFKernel(0.5));
instance.setUseMarginUpdates(useMargin);
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(1000, RandomUtil.getRandom());
//add some miss labled data to get the error code to cick in and get exercised
for(int i = 0; i < 500; i+=20)
{
DataPoint dp = train.getDataPoint(i);
int y = train.getDataPointCategory(i);
int badY = (y == 0) ? 1 : 0;
train.addDataPoint(dp, badY);
}
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.3);//given some leway due to label noise
}
}
@Test
public void testClone()
{
System.out.println("clone");
Projectron instance = new Projectron(new RBFKernel(0.5));
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
Projectron result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 4,254 | 29.833333 | 109 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/BackPropagationNetTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.neuralnetwork;
import java.util.EnumSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.TestTools;
import jsat.classifiers.ClassificationDataSet;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class BackPropagationNetTest
{
/*
* RBF is a bit heuristic and works best with more data - so the training set size is enlarged
*/
public BackPropagationNetTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class DReDNetSimple.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
BackPropagationNet net = new BackPropagationNet(500).clone();
net.setEpochs(20);
net.train(trainSet, true);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
/**
* Test of train method, of class DReDNetSimple.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
BackPropagationNet net = new BackPropagationNet(500).clone();
net.setEpochs(20);
//serialization check
net = TestTools.deepCopy(net);
net.train(trainSet);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
//serialization check
net = TestTools.deepCopy(net);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
}
| 2,883 | 26.207547 | 110 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/DReDNetSimpleTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.neuralnetwork;
import java.util.EnumSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.TestTools;
import jsat.classifiers.ClassificationDataSet;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DReDNetSimpleTest
{
/*
* RBF is a bit heuristic and works best with more data - so the training set size is enlarged
*/
public DReDNetSimpleTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class DReDNetSimple.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
DReDNetSimple net = new DReDNetSimple(500).clone();
net.setEpochs(20);
net.train(trainSet, true);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
/**
* Test of train method, of class DReDNetSimple.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
DReDNetSimple net = new DReDNetSimple(500).clone();
net.setEpochs(20);
//serialization check
net = TestTools.deepCopy(net);
net.train(trainSet);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
//serialization check
net = TestTools.deepCopy(net);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
}
| 2,845 | 25.849057 | 110 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/LVQLLCTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.neuralnetwork;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class LVQLLCTest
{
public LVQLLCTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(LVQ.LVQVersion method : LVQ.LVQVersion.values())
{
LVQLLC instance = new LVQLLC(new EuclideanDistance(), 5);
instance.setRepresentativesPerClass(20);
instance.setLVQMethod(method);
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(LVQ.LVQVersion method : LVQ.LVQVersion.values())
{
LVQLLC instance = new LVQLLC(new EuclideanDistance(), 5);
instance.setRepresentativesPerClass(20);
instance.setLVQMethod(method);
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
}
@Test
public void testClone()
{
System.out.println("clone");
LVQLLC instance = new LVQLLC(new EuclideanDistance(), 5);
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(100, 3);
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(100, 6);
instance = instance.clone();
instance.train(t1);
LVQLLC result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,932 | 28.133333 | 105 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/LVQTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.neuralnetwork;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class LVQTest
{
public LVQTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
static int max_trials = 3;
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(LVQ.LVQVersion method : LVQ.LVQVersion.values())
{
LVQ instance = new LVQ(new EuclideanDistance(), 5);
instance.setRepresentativesPerClass(20);
instance.setLVQMethod(method);
for(int trials = 0; trials < max_trials; trials++)
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
if(cme.getErrorRate() > 0.001 && trials == max_trials)//wrong too many times, something is broken
assertEquals(cme.getErrorRate(), 0.0, 0.001);
else
break;//did good
}
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(LVQ.LVQVersion method : LVQ.LVQVersion.values())
{
LVQ instance = new LVQ(new EuclideanDistance(), 5);
instance.setRepresentativesPerClass(20);
instance.setLVQMethod(method);
for(int trials = 0; trials < max_trials; trials++)
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
if (cme.getErrorRate() > 0.001 && trials == max_trials)//wrong too many times, something is broken
assertEquals(cme.getErrorRate(), 0.0, 0.001);
else
break;//did good
}
}
}
@Test
public void testClone()
{
System.out.println("clone");
LVQ instance = new LVQ(new EuclideanDistance(), 5);
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(100, 3);
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(100, 6);
instance = instance.clone();
instance.train(t1);
LVQ result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 4,567 | 29.453333 | 114 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/PerceptronTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.neuralnetwork;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPointPair;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PerceptronTest
{
public PerceptronTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class Perceptron.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
Perceptron instance = new Perceptron();
instance = instance.clone();
instance.train(train, true);
instance = instance.clone();
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
/**
* Test of train method, of class Perceptron.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
Perceptron instance = new Perceptron();
instance = instance.clone();
instance.train(train);
instance = instance.clone();
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for (DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
}
| 2,397 | 24.510638 | 104 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/RBFNetTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.neuralnetwork;
import java.util.EnumSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class RBFNetTest
{
/*
* RBF is a bit heuristic and works best with more data - so the training set size is enlarged
*/
public RBFNetTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class RBFNet.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
for(RBFNet.Phase1Learner p1l : RBFNet.Phase1Learner.values())
for(RBFNet.Phase2Learner p2l : RBFNet.Phase2Learner.values())
{
RBFNet net = new RBFNet(25).clone();
net.setAlpha(1);//CLOSEST_OPPOSITE_CENTROID needs a smaller value, shoudld be fine for others on this data set
net.setPhase1Learner(p1l);
net.setPhase2Learner(p2l);
net.train(trainSet, true);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
}
/**
* Test of train method, of class RBFNet.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(2000, RandomUtil.getRandom());
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
for(RBFNet.Phase1Learner p1l : RBFNet.Phase1Learner.values())
for(RBFNet.Phase2Learner p2l : RBFNet.Phase2Learner.values())
{
RBFNet net = new RBFNet(25);
net.setAlpha(1);//CLOSEST_OPPOSITE_CENTROID needs a smaller value, shoudld be fine for others on this data set
net.setPhase1Learner(p1l);
net.setPhase2Learner(p2l);
net = net.clone();
net.train(trainSet);
net = net.clone();
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), net.classify(testSet.getDataPoint(i)).mostLikely());
}
}
/**
* Test of train method, of class RBFNet.
*/
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(2000, RandomUtil.getRandom());
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(200, RandomUtil.getRandom());
for(RBFNet.Phase1Learner p1l : RBFNet.Phase1Learner.values())
for(RBFNet.Phase2Learner p2l : EnumSet.complementOf(EnumSet.of(RBFNet.Phase2Learner.CLOSEST_OPPOSITE_CENTROID)))
{
RBFNet net = new RBFNet(25);
net.setAlpha(1);//CLOSEST_OPPOSITE_CENTROID needs a smaller value, shoudld be fine for others on this data set
net.setPhase1Learner(p1l);
net.setPhase2Learner(p2l);
net = net.clone();
net.train(trainSet, true);
net = net.clone();
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - net.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors/testSet.size() < 1);
}
}
/**
* Test of train method, of class RBFNet.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(2000, RandomUtil.getRandom());
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(200, RandomUtil.getRandom());
for(RBFNet.Phase1Learner p1l : RBFNet.Phase1Learner.values())
for(RBFNet.Phase2Learner p2l : EnumSet.complementOf(EnumSet.of(RBFNet.Phase2Learner.CLOSEST_OPPOSITE_CENTROID)))
{
RBFNet net = new RBFNet(25);
net.setAlpha(1);//CLOSEST_OPPOSITE_CENTROID needs a smaller value, shoudld be fine for others on this data set
net.setPhase1Learner(p1l);
net.setPhase2Learner(p2l);
net = net.clone();
net.train(trainSet);
net = net.clone();
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - net.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors/testSet.size() < 1);
}
}
}
| 5,826 | 33.892216 | 127 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/neuralnetwork/SOMTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.neuralnetwork;
import jsat.FixedProblems;
import jsat.classifiers.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class SOMTest
{
public SOMTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
SOM instance = new SOM(5, 5);
instance.setMaxIterations(200);
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.1);
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
SOM instance = new SOM(5, 5);
instance.setMaxIterations(50);
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(100, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.1);
}
@Test
public void testClone()
{
System.out.println("clone");
SOM instance = new SOM(5, 5);
instance.setMaxIterations(50);
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(5000, 3);
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(5000, 6);
instance = instance.clone();
instance.train(t1);
SOM result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,409 | 26.063492 | 105 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/DCDTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPointPair;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DCDTest
{
public DCDTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
/**
* Test of train method, of class DCD.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
DCD instance = new DCD();
instance.train(train, true);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
/**
* Test of train method, of class DCD.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
DCD instance = new DCD();
instance.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for (DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
Random rand = RandomUtil.getRandom();
DCD dcd = new DCD();
dcd.train(FixedProblems.getLinearRegression(400, rand), true);
for (DataPointPair<Double> dpp : FixedProblems.getLinearRegression(100, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = dcd.regress(dpp.getDataPoint());
double relErr = (truth - pred) / truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
Random rand = RandomUtil.getRandom();
DCD dcd = new DCD();
dcd.train(FixedProblems.getLinearRegression(400, rand));
for (DataPointPair<Double> dpp : FixedProblems.getLinearRegression(100, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = dcd.regress(dpp.getDataPoint());
double relErr = (truth - pred) / truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
}
| 3,195 | 27.792793 | 104 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/DCDsTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPointPair;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DCDsTest
{
public DCDsTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class DCDs.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
DCDs instance = new DCDs();
instance.train(train, true);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
/**
* Test of train method, of class DCDs.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
DCDs instance = new DCDs();
instance.train(train);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for (DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
Random rand = RandomUtil.getRandom();
DCDs dcds = new DCDs();
dcds.train(FixedProblems.getLinearRegression(400, rand), true);
for (DataPointPair<Double> dpp : FixedProblems.getLinearRegression(100, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = dcds.regress(dpp.getDataPoint());
double relErr = (truth - pred) / truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
Random rand = RandomUtil.getRandom();
DCDs dcds = new DCDs();
dcds.train(FixedProblems.getLinearRegression(400, rand));
for (DataPointPair<Double> dpp : FixedProblems.getLinearRegression(100, rand).getAsDPPList())
{
double truth = dpp.getPair();
double pred = dcds.regress(dpp.getDataPoint());
double relErr = (truth - pred) / truth;
assertEquals(0.0, relErr, 0.1);//Give it a decent wiggle room b/c of regularization
}
}
@Test()
public void testTrainWarmC()
{
ClassificationDataSet train = FixedProblems.getHalfCircles(10000, RandomUtil.getRandom(), 0.1, 0.5);
DCDs warmModel = new DCDs();
warmModel.train(train);
warmModel.setC(1);
long start, end;
DCDs notWarm = new DCDs();
notWarm.setC(1e1);
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
DCDs warm = new DCDs();
warm.setC(1e1);
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
assertTrue(warmTime < normTime*0.80);
}
@Test()
public void testTrainWarR()
{
RegressionDataSet train = FixedProblems.getSimpleRegression1(4000, RandomUtil.getRandom());
double eps = train.getTargetValues().mean()/0.9;
DCDs warmModel = new DCDs();
warmModel.setEps(eps);
warmModel.train(train);
DCDs warm = new DCDs();
warm.setEps(eps);
warm.setC(1e1);//too large to train efficently like noraml
long start, end;
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
DCDs notWarm = new DCDs();
notWarm.setEps(eps);
notWarm.setC(1e1);//too large to train efficently like noraml
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
assertTrue(warmTime < normTime*0.80);
}
}
| 5,363 | 26.649485 | 108 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/DCSVMTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DCSVMTest
{
public DCSVMTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(600, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
DCSVM classifier = new DCSVM(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setClusterSampleSize(200);//make smaller to test sub-sampling
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
DCSVM classifier = new DCSVM(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setEndLevel(0);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(600, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
DCSVM classifier = new DCSVM(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setClusterSampleSize(200);//make smaller to test sub-sampling
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
DCSVM classifier = new DCSVM(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setEndLevel(0);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
}
| 3,915 | 31.363636 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/LSSVMTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.distributions.kernels.RBFKernel;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class LSSVMTest
{
public LSSVMTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class LSSVM.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
LSSVM classifier = new LSSVM(new RBFKernel(0.5), cacheMode);
classifier.setCacheMode(cacheMode);
classifier.setC(1);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
/**
* Test of train method, of class LSSVM.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
LSSVM classifier = new LSSVM(new RBFKernel(0.5), cacheMode);
classifier.setCacheMode(cacheMode);
classifier.setC(1);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
/**
* Test of train method, of class LSSVM.
*/
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
LSSVM lssvm = new LSSVM(new RBFKernel(0.5), cacheMode);
lssvm.setCacheMode(cacheMode);
lssvm.setC(1);
lssvm.train(trainSet, true);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - lssvm.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors / testSet.size() < 1);
}
}
/**
* Test of train method, of class LSSVM.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
LSSVM lssvm = new LSSVM(new RBFKernel(0.5), cacheMode);
lssvm.setCacheMode(cacheMode);
lssvm.setC(1);
lssvm.train(trainSet);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - lssvm.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors / testSet.size() < 1);
}
}
@Test()
public void testTrainWarmC()
{
ClassificationDataSet train = FixedProblems.getHalfCircles(100, RandomUtil.getRandom(), 0.1, 0.2);
LSSVM warmModel = new LSSVM();
warmModel.setC(1);
warmModel.setCacheMode(SupportVectorLearner.CacheMode.FULL);
warmModel.train(train);
LSSVM warm = new LSSVM();
warm.setC(2e1);
warm.setCacheMode(SupportVectorLearner.CacheMode.FULL);
long start, end;
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
LSSVM notWarm = new LSSVM();
notWarm.setC(2e1);
notWarm.setCacheMode(SupportVectorLearner.CacheMode.FULL);
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
assertTrue("Warm start was slower? "+warmTime + " vs " + normTime, warmTime < normTime*1.35);
}
@Test()
public void testTrainWarmR()
{
RegressionDataSet train = FixedProblems.getSimpleRegression1(75, RandomUtil.getRandom());
LSSVM warmModel = new LSSVM();
warmModel.setC(1);
warmModel.setCacheMode(SupportVectorLearner.CacheMode.FULL);
warmModel.train(train);
LSSVM warm = new LSSVM();
warm.setC(1e1);
warm.setCacheMode(SupportVectorLearner.CacheMode.FULL);
long start, end;
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
LSSVM notWarm = new LSSVM();
notWarm.setC(1e1);
notWarm.setCacheMode(SupportVectorLearner.CacheMode.FULL);
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
assertTrue("Warm start was slower? "+warmTime + " vs " + normTime, warmTime < normTime*1.35);
}
}
| 6,663 | 29.429224 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/PegasosKTest.java |
package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PegasosKTest
{
public PegasosKTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PegasosK classifier = new PegasosK(1e-6, trainSet.size(), new RBFKernel(0.5), cacheMode);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PegasosK classifier = new PegasosK(1e-6, trainSet.size(), new RBFKernel(0.5), cacheMode);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
}
| 2,314 | 26.235294 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/PegasosTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.DataPointPair;
import jsat.datatransform.DataModelPipeline;
import jsat.datatransform.PNormNormalization;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PegasosTest
{
public PegasosTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class Pegasos.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet train = FixedProblems.get2ClassLinear(2000, RandomUtil.getRandom());
for(boolean parallel : new boolean[]{true, false})
{
Classifier instance = new DataModelPipeline(new Pegasos(), new PNormNormalization());
instance.train(train, parallel);
ClassificationDataSet test = FixedProblems.get2ClassLinear(200, RandomUtil.getRandom());
for(DataPointPair<Integer> dpp : test.getAsDPPList())
assertEquals(dpp.getPair().longValue(), instance.classify(dpp.getDataPoint()).mostLikely());
}
}
}
| 1,869 | 22.974359 | 108 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/PlattSMOTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPoint;
import jsat.datatransform.DataTransform;
import jsat.distributions.kernels.LinearKernel;
import jsat.distributions.kernels.RBFKernel;
import jsat.regression.RegressionDataSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PlattSMOTest
{
public PlattSMOTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (boolean modification1 : new boolean[] {true, false})
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PlattSMO classifier = new PlattSMO(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setModificationOne(modification1);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (boolean modification1 : new boolean[] {true, false})
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PlattSMO classifier = new PlattSMO(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.setModificationOne(modification1);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
/**
* Test of train method, of class PlattSMO.
*/
@Test
public void testTrain_RegressionDataSet_ExecutorService()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
for (boolean modification1 : new boolean[] {true, false})
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PlattSMO smo = new PlattSMO(new RBFKernel(0.5));
smo.setCacheMode(cacheMode);
smo.setC(1);
smo.setEpsilon(0.1);
smo.setModificationOne(modification1);
smo.train(trainSet, true);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - smo.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors/testSet.size() < 1);
}
}
/**
* Test of train method, of class PlattSMO.
*/
@Test
public void testTrain_RegressionDataSet()
{
System.out.println("train");
RegressionDataSet trainSet = FixedProblems.getSimpleRegression1(150, new Random(2));
RegressionDataSet testSet = FixedProblems.getSimpleRegression1(50, new Random(3));
for (boolean modification1 : new boolean[] {true, false})
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
PlattSMO smo = new PlattSMO(new RBFKernel(0.5));
smo.setCacheMode(cacheMode);
smo.setC(1);
smo.setEpsilon(0.1);
smo.setModificationOne(modification1);
smo.train(trainSet);
double errors = 0;
for (int i = 0; i < testSet.size(); i++)
errors += Math.pow(testSet.getTargetValue(i) - smo.regress(testSet.getDataPoint(i)), 2);
assertTrue(errors/testSet.size() < 1);
}
}
@Test()
public void testTrainWarmCFastSMO()
{
//problem needs to be non-linear to make SMO work harder
ClassificationDataSet train = FixedProblems.getHalfCircles(250, RandomUtil.getRandom(), 0.1, 0.2);
PlattSMO warmModel = new PlattSMO(new LinearKernel(1));
warmModel.setC(1);
warmModel.train(train);
PlattSMO warm = new PlattSMO(new LinearKernel(1));
warm.setC(1e4);//too large to train efficently like noraml
long start, end;
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
PlattSMO notWarm = new PlattSMO(new LinearKernel(1));
notWarm.setC(1e4);//too large to train efficently like noraml
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
assertTrue(warmTime < normTime*0.75);
}
@Test()
public void testTrainWarmCFastOther()
{
ClassificationDataSet train = FixedProblems.getHalfCircles(250, RandomUtil.getRandom(), 0.1, 0.2);
DCDs warmModel = new DCDs();
warmModel.setUseL1(true);
warmModel.setUseBias(true);
warmModel.train(train);
PlattSMO warm = new PlattSMO(new LinearKernel(1));
warm.setC(1e4);//too large to train efficently like noraml
long start, end;
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
PlattSMO notWarm = new PlattSMO(new LinearKernel(1));
notWarm.setC(1e4);//too large to train efficently like noraml
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
assertTrue(warmTime < normTime*0.75);
}
@Test()
public void testTrainWarmRFastOther()
{
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom());
double eps = train.getTargetValues().mean()/20;
DCDs warmModel = new DCDs();
warmModel.setEps(eps);
warmModel.setUseL1(true);
warmModel.setUseBias(true);
warmModel.train(train);
long start, end;
PlattSMO notWarm = new PlattSMO(new LinearKernel(1));
notWarm.setEpsilon(eps);
notWarm.setC(1e2);
start = System.currentTimeMillis();
notWarm.train(train);
end = System.currentTimeMillis();
long normTime = (end-start);
PlattSMO warm = new PlattSMO(new LinearKernel(1));
warm.setEpsilon(eps);
warm.setC(1e2);
start = System.currentTimeMillis();
warm.train(train, warmModel);
end = System.currentTimeMillis();
long warmTime = (end-start);
assertTrue(warmTime < normTime*0.75);
}
}
| 8,492 | 31.665385 | 125 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/SBPTest.java |
package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SBPTest
{
public SBPTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
SBP classifier = new SBP(new RBFKernel(0.5), cacheMode, trainSet.size(), 0.01);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
SBP classifier = new SBP(new RBFKernel(0.5), cacheMode, trainSet.size(), 0.01);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
}
| 2,280 | 26.154762 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/SVMnoBiasTest.java | package jsat.classifiers.svm;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import static java.lang.Math.*;
import java.util.Arrays;
import jsat.utils.random.RandomUtil;
/**
*
* @author Edward Raff
*/
public class SVMnoBiasTest
{
public SVMnoBiasTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for(SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
SVMnoBias classifier = new SVMnoBias(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.train(trainSet, true);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
ClassificationDataSet trainSet = FixedProblems.getInnerOuterCircle(150, new Random(2));
ClassificationDataSet testSet = FixedProblems.getInnerOuterCircle(50, new Random(3));
for (SupportVectorLearner.CacheMode cacheMode : SupportVectorLearner.CacheMode.values())
{
SVMnoBias classifier = new SVMnoBias(new RBFKernel(0.5));
classifier.setCacheMode(cacheMode);
classifier.setC(10);
classifier.train(trainSet);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier.classify(testSet.getDataPoint(i)).mostLikely());
//test warm start off corrupted solution
double[] a = classifier.alphas;
Random rand = RandomUtil.getRandom();
for(int i = 0; i < a.length; i++)
a[i] = min(max(a[i]+rand.nextDouble()*2-1, 0), 10);
SVMnoBias classifier2 = new SVMnoBias(new RBFKernel(0.5));
classifier2.setCacheMode(cacheMode);
classifier2.setC(10);
classifier2.train(trainSet, a);
for (int i = 0; i < testSet.size(); i++)
assertEquals(testSet.getDataPointCategory(i), classifier2.classify(testSet.getDataPoint(i)).mostLikely());
}
}
}
| 3,372 | 30.820755 | 126 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/extended/AMMTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.svm.extended;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class AMMTest
{
public AMMTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of getSubEpochs method, of class AMM.
*/
@Test
public void testSubEpochs()
{
System.out.println("getSubEpochs");
AMM instance = new AMM();
instance.setSubEpochs(10);
assertEquals(10, instance.getSubEpochs());
for (int i = -3; i < 1; i++)
try
{
instance.setSubEpochs(i);
fail("Invalid value should have thrown an error");
}
catch (Exception ex)
{
}
}
/**
* Test of train method, of class AMM.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
//Hard to come up witha good test problem for AMM, since it works better on higher dim problems
System.out.println("trainC");
AMM instance = new AMM();
ClassificationDataSet train = FixedProblems.getSimpleKClassLinear(10000, 3, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getSimpleKClassLinear(1000, 3, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
/**
* Test of clone method, of class AMM.
*/
@Test
public void testClone()
{
System.out.println("clone");
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(10000, 3, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(10000, 6, RandomUtil.getRandom());
AMM instance = new AMM();
instance = instance.clone();
instance.train(t1);
AMM result = instance.clone();
result.train(t2);
for(int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for(int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,568 | 26.037879 | 108 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/svm/extended/CPMTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.svm.extended;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CPMTest
{
public CPMTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of train method, of class AMM.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
//Hard to come up witha good test problem for AMM, since it works better on higher dim problems
System.out.println("trainC");
CPM instance = new CPM(2, 20, 1.5, 50);
ClassificationDataSet train = FixedProblems.getSimpleKClassLinear(10000, 2, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getSimpleKClassLinear(1000, 2, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
/**
* Test of clone method, of class AMM.
*/
@Test
public void testClone()
{
System.out.println("clone");
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(10000, 2, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(10000, 2, RandomUtil.getRandom());
CPM instance = new CPM(2, 20, 1.5, 50);
instance = instance.clone();
instance.train(t1);
CPM result = instance.clone();
result.train(t2);
for(int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for(int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,049 | 27.240741 | 108 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/DecisionStumpTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.classifiers.trees;
import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.Executors;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import java.util.concurrent.ExecutorService;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.DataPointPair;
import jsat.datatransform.InsertMissingValuesTransform;
import jsat.datatransform.NumericalToHistogram;
import jsat.distributions.Uniform;
import jsat.regression.RegressionDataSet;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DecisionStumpTest
{
static private ClassificationDataSet easyNumAtTrain;
static private ClassificationDataSet easyNumAtTest;
static private ClassificationDataSet easyCatAtTrain;
static private ClassificationDataSet easyCatAtTest;
static private RegressionDataSet easyNumAtTrain_R;
static private RegressionDataSet easyNumAtTest_R;
static private RegressionDataSet easyCatAtTrain_R;
static private RegressionDataSet easyCatAtTest_R;
static private boolean parallel = true;
static private DecisionStump stump;
public DecisionStumpTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
stump = new DecisionStump();
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2);
easyNumAtTrain = new ClassificationDataSet(gdg.generateData(40).getList(), 0);
easyNumAtTest = new ClassificationDataSet(gdg.generateData(40).getList(), 0);
easyCatAtTrain = new ClassificationDataSet(gdg.generateData(40).getList(), 0);
easyCatAtTest = new ClassificationDataSet(gdg.generateData(40).getList(), 0);
NumericalToHistogram nth = new NumericalToHistogram(easyCatAtTrain, 2);
easyCatAtTrain.applyTransform(nth);
easyCatAtTest.applyTransform(nth);
easyNumAtTrain_R = new RegressionDataSet(easyNumAtTrain.getAsFloatDPPList());
easyNumAtTest_R = new RegressionDataSet(easyNumAtTest.getAsFloatDPPList());
easyCatAtTrain_R = new RegressionDataSet(easyCatAtTrain.getAsFloatDPPList());
easyCatAtTest_R = new RegressionDataSet(easyCatAtTest.getAsFloatDPPList());
}
@After
public void tearDown() throws Exception
{
}
/**
* Test of train method, of class DecisionStump.
*/
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC(ClassificationDataSet, ExecutorService)");
stump.train(easyNumAtTrain, parallel);
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), stump.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService_missing()
{
System.out.println("trainC(ClassificationDataSet, ExecutorService)");
ClassificationDataSet toTrain = easyNumAtTrain.shallowClone();
toTrain.applyTransform(new InsertMissingValuesTransform(0.25));
stump.train(toTrain, parallel);
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), stump.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
//test applying missing values, just make sure no error since we can/t pred if only feat is missing
easyNumAtTest.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < easyNumAtTest.size(); i++)
stump.classify(easyNumAtTest.getDataPoint(i));
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService_missing_cat()
{
System.out.println("trainC(ClassificationDataSet, ExecutorService)");
ClassificationDataSet toTrain = easyCatAtTrain.shallowClone();
toTrain.applyTransform(new InsertMissingValuesTransform(0.25));
stump.train(toTrain, parallel);
for(int i = 0; i < easyCatAtTest.size(); i++)
assertEquals(easyCatAtTest.getDataPointCategory(i), stump.classify(easyCatAtTest.getDataPoint(i)).mostLikely());
//test applying missing values, just make sure no error since we can/t pred if only feat is missing
easyCatAtTest.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < easyCatAtTest.size(); i++)
stump.classify(easyCatAtTest.getDataPoint(i));
}
/**
* Test of train method, of class DecisionStump.
*/
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC(ClassificationDataSet)");
stump.train(easyNumAtTrain);
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), stump.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
}
@Test
public void testTrainC_RegressionDataSet_ExecutorService_missing()
{
System.out.println("trainR(RegressionDataSet, ExecutorService)");
RegressionDataSet toTrain = easyNumAtTrain_R.shallowClone();
toTrain.applyTransform(new InsertMissingValuesTransform(0.25));
stump.train(toTrain, parallel);
for(int i = 0; i < easyNumAtTest_R.size(); i++)
assertEquals(easyNumAtTest_R.getTargetValue(i), stump.regress(easyNumAtTest_R.getDataPoint(i)), 0.2);
//test applying missing values, just make sure no error since we can/t pred if only feat is missing
easyNumAtTest_R.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < easyNumAtTest_R.size(); i++)
stump.regress(easyNumAtTest_R.getDataPoint(i));
}
@Test
public void testTrainC_RegressionDataSet_ExecutorService_missing_cat()
{
System.out.println("trainR(RegressionDataSet, ExecutorService)");
RegressionDataSet toTrain = easyCatAtTrain_R.shallowClone();
toTrain.applyTransform(new InsertMissingValuesTransform(0.25));
stump.train(toTrain, parallel);
for(int i = 0; i < easyCatAtTest_R.size(); i++)
assertEquals(easyCatAtTest_R.getTargetValue(i), stump.regress(easyCatAtTest_R.getDataPoint(i)), 0.2);
//test applying missing values, just make sure no error since we can/t pred if only feat is missing
easyCatAtTest_R.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < easyCatAtTest_R.size(); i++)
stump.regress(easyCatAtTest_R.getDataPoint(i));
}
/**
* Test of train method, of class DecisionStump.
*/
@Test
public void testTrainC_List_Set()
{
System.out.println("trainC(List<DataPointPair>, Set<integer>)");
stump.setPredicting(easyNumAtTrain.getPredicting());
stump.trainC(easyNumAtTrain, new IntSet(Arrays.asList(0)));
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), stump.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
}
/**
* Test of supportsWeightedData method, of class DecisionStump.
*/
@Test
public void testSupportsWeightedData()
{
System.out.println("supportsWeightedData");
assertTrue(stump.supportsWeightedData());
}
/**
* Test of clone method, of class DecisionStump.
*/
@Test
public void testClone()
{
System.out.println("clone");
Classifier clone = stump.clone();
clone.train(easyNumAtTrain);
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), clone.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
try
{
stump.classify(easyNumAtTest.getDataPoint(0));
fail("Stump should not have been trained");
}
catch(Exception ex )
{
}
clone = null;
stump.train(easyNumAtTrain);
clone = stump.clone();
for(int i = 0; i < easyNumAtTest.size(); i++)
assertEquals(easyNumAtTest.getDataPointCategory(i), clone.classify(easyNumAtTest.getDataPoint(i)).mostLikely());
}
@Test
public void testInfoGainSplit()
{
System.out.println("testInfoGainSplit");
DecisionStump instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.INFORMATION_GAIN);
instance.train(easyCatAtTrain);
for(DataPointPair<Integer> dpp : easyCatAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.INFORMATION_GAIN);
instance.train(easyNumAtTrain);
for(DataPointPair<Integer> dpp : easyNumAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
}
@Test
public void testInfoGainRatioSplit()
{
System.out.println("testInfoGainRatioSplit");
DecisionStump instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.INFORMATION_GAIN_RATIO);
instance.train(easyCatAtTrain);
for(DataPointPair<Integer> dpp : easyCatAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.INFORMATION_GAIN_RATIO);
instance.train(easyNumAtTrain);
for(DataPointPair<Integer> dpp : easyNumAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
}
@Test
public void testGiniSplit()
{
System.out.println("testGiniSplit");
DecisionStump instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.GINI);
instance.train(easyCatAtTrain);
for(DataPointPair<Integer> dpp : easyCatAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
instance = new DecisionStump();
instance.setGainMethod(ImpurityScore.ImpurityMeasure.GINI);
instance.train(easyNumAtTrain);
for(DataPointPair<Integer> dpp : easyNumAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
}
@Test
public void testNumericCBinary()
{
System.out.println("testNumericCBinary");
DecisionStump instance = new DecisionStump();
instance.train(easyNumAtTrain);
for(DataPointPair<Integer> dpp : easyNumAtTest.getAsDPPList())
assertEquals(dpp.getPair().longValue(),
instance.classify(dpp.getDataPoint()).mostLikely());
}
}
| 11,647 | 36.695793 | 124 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/DecisionTreeTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.TestTools;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.datatransform.DataTransformProcess;
import jsat.datatransform.InsertMissingValuesTransform;
import jsat.datatransform.NumericalToHistogram;
import jsat.regression.RegressionDataSet;
import jsat.regression.RegressionModelEvaluation;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DecisionTreeTest
{
public DecisionTreeTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_RegressionDataSet()
{
System.out.println("train");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
RegressionDataSet train = FixedProblems.getLinearRegression(3000, RandomUtil.getRandom());
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train);
if(useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(10)));
if(useCatFeatures)
rme.evaluateTestSet(train);
else
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean()*3);
}
}
@Test
public void testTrainC_RegressionDataSet_ExecutorService()
{
System.out.println("train");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for (boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
RegressionDataSet train = FixedProblems.getLinearRegression(3000, RandomUtil.getRandom());
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train, true);
if (useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(10)));
if(useCatFeatures)
rme.evaluateTestSet(train);
else
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean() * 3);
}
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
int attempts = 3;
do
{
ClassificationDataSet train = FixedProblems.getCircles(5000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(200, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(50)));
cme.evaluateTestSet(test);
if(cme.getErrorRate() < 0.075)
break;
}
while(attempts-- > 0);
assertTrue(attempts > 0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
int attempts = 3;
do
{
ClassificationDataSet train = FixedProblems.getCircles(5000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(200, 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(50)));
cme.evaluateTestSet(test);
if(cme.getErrorRate() < 0.075)
break;
}
while(attempts-- > 0);
assertTrue(attempts > 0);
}
}
@Test
public void testTrainC_ClassificationDataSet_missing()
{
System.out.println("trainC");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
int attempts = 3;
do
{
ClassificationDataSet train = FixedProblems.getCircles(5000, 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(200, 1.0, 10.0, 100.0);
train.applyTransform(new InsertMissingValuesTransform(0.01));
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(50)));
cme.evaluateTestSet(test);
if(cme.getErrorRate() < 0.25)
break;
instance.train(train);
test.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < test.size(); i++)
instance.classify(test.getDataPoint(i));
}
while(attempts-- > 0);
assertTrue(attempts > 0);
}
}
@Test
public void testTrain_RegressionDataSet_missing()
{
System.out.println("train");
for (TreePruner.PruningMethod pruneMethod : TreePruner.PruningMethod.values())
for (ImpurityScore.ImpurityMeasure gainMethod : ImpurityScore.ImpurityMeasure.values())
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
instance.setGainMethod(gainMethod);
instance.setTestProportion(0.3);
instance.setPruningMethod(pruneMethod);
RegressionDataSet train = FixedProblems.getLinearRegression(3000, RandomUtil.getRandom());
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
train.applyTransform(new InsertMissingValuesTransform(0.01));
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train);
if(useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(10)));
if(useCatFeatures)
rme.evaluateTestSet(train);
else
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean()*3);
instance.train(train);
test.applyTransform(new InsertMissingValuesTransform(0.5));
for(int i = 0; i < test.size(); i++)
instance.regress(test.getDataPoint(i));
}
}
@Test
public void testClone()
{
System.out.println("clone");
for(boolean useCatFeatures : new boolean[]{true, false})
{
DecisionTree instance = new DecisionTree();
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(1000, 3);
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(1000, 2);
if(useCatFeatures)
{
t1.applyTransform(new NumericalToHistogram(t1));
t2.applyTransform(new NumericalToHistogram(t2));
}
instance = instance.clone();
instance = TestTools.deepCopy(instance);
instance.train(t1);
DecisionTree result = instance.clone();
int errors = 0;
for(int i = 0; i < t1.size(); i++)
errors += Math.abs(t1.getDataPointCategory(i) - result.classify(t1.getDataPoint(i)).mostLikely());
assertTrue(errors < 100);
result.train(t2);
errors = 0;
for(int i = 0; i < t1.size(); i++)
errors += Math.abs(t1.getDataPointCategory(i) - instance.classify(t1.getDataPoint(i)).mostLikely());
assertTrue(errors < 100);
errors = 0;
for(int i = 0; i < t2.size(); i++)
errors += Math.abs(t2.getDataPointCategory(i) - result.classify(t2.getDataPoint(i)).mostLikely());
assertTrue(errors < 100);
}
}
}
| 12,672 | 38.603125 | 117 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/ERTreesTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.datatransform.DataTransformProcess;
import jsat.datatransform.NumericalToHistogram;
import jsat.regression.RegressionDataSet;
import jsat.regression.RegressionModelEvaluation;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ERTreesTest
{
public ERTreesTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(int i = 0; i < 3; i++)
{
boolean useCatFeatures = i < 2;
ERTrees instance = new ERTrees();
instance.setBinaryCategoricalSplitting(i == 1);
ClassificationDataSet train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
}
@Test
public void testTrainC_RegressionDataSet()
{
System.out.println("train");
for(int i = 0; i < 3; i++)
{
boolean useCatFeatures = i < 2;
ERTrees instance = new ERTrees();
instance.setBinaryCategoricalSplitting(i == 1);
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom());
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
RegressionModelEvaluation cme = new RegressionModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
assertTrue(cme.getMeanError() <= test.getTargetValues().mean()*2.5);
}
}
@Test
public void testTrainC_RegressionDataSet_ExecutorService()
{
System.out.println("train");
for(int i = 0; i < 3; i++)
{
boolean useCatFeatures = i < 2;
ERTrees instance = new ERTrees();
instance.setBinaryCategoricalSplitting(i == 1);
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom());
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom());
RegressionModelEvaluation cme = new RegressionModelEvaluation(instance, train, true);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
assertTrue(cme.getMeanError() <= test.getTargetValues().mean()*2.5);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(int i = 0; i < 3; i++)
{
boolean useCatFeatures = i < 2;
ERTrees instance = new ERTrees();
instance.setBinaryCategoricalSplitting(i == 1);
ClassificationDataSet train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationDataSet test = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
assertTrue(cme.getErrorRate() <= 0.001);
}
}
@Test
public void testClone()
{
System.out.println("clone");
for(int k = 0; k < 3; k++)
{
boolean useCatFeatures = k < 2;
ERTrees instance = new ERTrees();
instance.setBinaryCategoricalSplitting(k == 1);
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(1000, 3, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(1000, 6, RandomUtil.getRandom());
if(useCatFeatures)
{
t1.applyTransform(new NumericalToHistogram(t1));
t2.applyTransform(new NumericalToHistogram(t2));
}
instance = instance.clone();
instance.train(t1);
ERTrees result = instance.clone();
for(int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for(int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for(int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
}
| 6,619 | 32.434343 | 117 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/ImportanceByUsesTest.java | /*
* Copyright (C) 2016 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPoint;
import jsat.datatransform.NumericalToHistogram;
import jsat.linear.ConcatenatedVec;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class ImportanceByUsesTest
{
public ImportanceByUsesTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of getImportanceStats method, of class ImportanceByUses.
*/
@Test
public void testGetImportanceStats()
{
System.out.println("getImportanceStats");
for(boolean weightByDepth : new boolean[]{true, false})
{
ImportanceByUses instance = new ImportanceByUses(weightByDepth);
int randomFeatures = 30;
//make the circles close to force tree to do lots of splits / make it harder
ClassificationDataSet train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 1.35);
int good_featres = train.getNumNumericalVars();
ClassificationDataSet train_noise = new ClassificationDataSet(train.getNumNumericalVars()+randomFeatures, train.getCategories(), train.getPredicting());
for(int i = 0; i < train.size(); i++)
{
DataPoint dp = train.getDataPoint(i);
Vec n = dp.getNumericalValues();
train_noise.addDataPoint(new ConcatenatedVec(n, DenseVector.random(randomFeatures)), train.getDataPointCategory(i));
}
DecisionTree tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train_noise);
double[] importances = instance.getImportanceStats(tree, train_noise);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
//categorical features, make space wider b/c we lose resolution
train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 1.5);
// train.applyTransformMutate(new PCA(train, 2, 0));
good_featres = train.getNumNumericalVars();
train_noise = new ClassificationDataSet(train.getNumNumericalVars()+randomFeatures, train.getCategories(), train.getPredicting());
for(int i = 0; i < train.size(); i++)
{
DataPoint dp = train.getDataPoint(i);
Vec n = dp.getNumericalValues();
train_noise.addDataPoint(new ConcatenatedVec(n, DenseVector.random(randomFeatures)), train.getDataPointCategory(i));
}
train_noise.applyTransform(new NumericalToHistogram(train_noise));
tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train_noise);
importances = instance.getImportanceStats(tree, train_noise);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
if(importances[j] == 0)//sometimes it happens b/c we can seperate on just the first var when discretized
assertTrue(importances[j] >= importances[i]);
else
assertTrue(importances[j] > importances[i]);
}
}
}
}
| 4,903 | 34.028571 | 164 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/MDATest.java | /*
* Copyright (C) 2016 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import java.util.Random;
import jsat.FixedProblems;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPoint;
import jsat.datatransform.NumericalToHistogram;
import jsat.linear.ConcatenatedVec;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.regression.RegressionDataSet;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class MDATest
{
public MDATest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
public static ClassificationDataSet getHarderC(int datums, Random rand)
{
ClassificationDataSet cds = new ClassificationDataSet(20, new CategoricalData[0], new CategoricalData(2));
for(int i = 0; i < datums; i++)
{
double x = (rand.nextDouble()-0.5)*10;
double y = (rand.nextDouble()-0.5)*10;
double score = 10*x*y + x*x-y*y;
Vec n = DenseVector.random(20);
n.set(0, x);
n.set(1, y);
cds.addDataPoint(n, score + (rand.nextDouble()-0.5)*20 > 0 ? 0 : 1);
}
return cds;
}
public static RegressionDataSet getHarderR(int datums, Random rand)
{
RegressionDataSet rds = new RegressionDataSet(20, new CategoricalData[0]);
for(int i = 0; i < datums; i++)
{
double x = (rand.nextDouble()-0.5)*10;
double y = (rand.nextDouble()-0.5)*10;
double score = 10*x*y + x*x-y*y;
Vec n = DenseVector.random(20);
n.set(0, x);
n.set(1, y);
rds.addDataPoint(n, score + (rand.nextDouble()-0.5)*20);
}
return rds;
}
/**
* Test of getImportanceStats method, of class MDA.
*/
@Test
public void testGetImportanceStats()
{
System.out.println("getImportanceStats");
MDA instance = new MDA();
//make the circles close to force tree to do lots of splits / make it harder
ClassificationDataSet train = getHarderC(10000, RandomUtil.getRandom());
int good_featres = 2;
DecisionTree tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train);
double[] importances = instance.getImportanceStats(tree, train);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
//categorical features, make space wider b/c we lose resolution
train = getHarderC(10000, RandomUtil.getRandom());
train.applyTransform(new NumericalToHistogram(train, 7));
tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train);
importances = instance.getImportanceStats(tree, train);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
}
@Test
public void testGetImportanceStatsR()
{
System.out.println("getImportanceStatsR");
MDA instance = new MDA();
//make the circles close to force tree to do lots of splits / make it harder
RegressionDataSet train = getHarderR(10000, RandomUtil.getRandom());
int good_featres = 2;
DecisionTree tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train);
double[] importances = instance.getImportanceStats(tree, train);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
//categorical features, make space wider b/c we lose resolution
train = getHarderR(10000, RandomUtil.getRandom());
train.applyTransform(new NumericalToHistogram(train, 7));
tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train);
importances = instance.getImportanceStats(tree, train);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
}
}
| 6,271 | 29.745098 | 114 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/MDITest.java | /*
* Copyright (C) 2016 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.DataPoint;
import jsat.datatransform.NumericalToHistogram;
import jsat.linear.ConcatenatedVec;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class MDITest
{
public MDITest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of getImportanceStats method, of class ImportanceByUses.
*/
@Test
public void testGetImportanceStats()
{
System.out.println("getImportanceStats");
for(ImpurityScore.ImpurityMeasure im : ImpurityScore.ImpurityMeasure.values())
{
MDI instance = new MDI(im);
int randomFeatures = 30;
//make the circles close to force tree to do lots of splits / make it harder
ClassificationDataSet train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 1.35);
int good_featres = train.getNumNumericalVars();
ClassificationDataSet train_noise = new ClassificationDataSet(train.getNumNumericalVars()+randomFeatures, train.getCategories(), train.getPredicting());
for(int i = 0; i < train.size(); i++)
{
DataPoint dp = train.getDataPoint(i);
Vec n = dp.getNumericalValues();
train_noise.addDataPoint(new ConcatenatedVec(n, DenseVector.random(randomFeatures)), train.getDataPointCategory(i));
}
DecisionTree tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train_noise);
double[] importances = instance.getImportanceStats(tree, train_noise);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] > importances[i]);
}
//categorical features, make space wider b/c we lose resolution
train = FixedProblems.getCircles(10000, RandomUtil.getRandom(), 1.0, 1.5);
good_featres = train.getNumNumericalVars();
train_noise = new ClassificationDataSet(train.getNumNumericalVars()+randomFeatures, train.getCategories(), train.getPredicting());
for(int i = 0; i < train.size(); i++)
{
DataPoint dp = train.getDataPoint(i);
Vec n = dp.getNumericalValues().add(DenseVector.random(good_featres).multiply(0.3));
train_noise.addDataPoint(new ConcatenatedVec(n, DenseVector.random(randomFeatures)), train.getDataPointCategory(i));
}
train_noise.applyTransform(new NumericalToHistogram(train_noise));
tree = new DecisionTree();
tree.setPruningMethod(TreePruner.PruningMethod.NONE);
tree.train(train_noise);
importances = instance.getImportanceStats(tree, train_noise);
//make sure the first 2 features were infered as more important than the others!
for(int i = good_featres; i < importances.length; i++)
{
for(int j = 0; j < good_featres; j++)
assertTrue(importances[j] >= importances[i]);
}
}
}
}
| 4,628 | 33.036765 | 164 | java |
JSAT | JSAT-master/JSAT/test/jsat/classifiers/trees/RandomForestTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.classifiers.trees;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.TestTools;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.datatransform.*;
import jsat.linear.DenseVector;
import jsat.regression.RegressionDataSet;
import jsat.regression.RegressionModelEvaluation;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class RandomForestTest
{
static DenseVector coefs = new DenseVector(new double[]{0.1, 0.9, -0.2, 0.4, -0.5});
public RandomForestTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
static int max_trials = 3;
@Test
public void testTrainC_RegressionDataSet()
{
System.out.println("train");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom(), coefs);
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom(), coefs);
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train);
if(useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean()*2.5);
}
}
@Test
public void testTrainC_RegressionDataSetMiingValue()
{
System.out.println("train");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom(), coefs);
RegressionDataSet test = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom(), coefs);
train.applyTransform(new InsertMissingValuesTransform(0.1));
test.applyTransform(new InsertMissingValuesTransform(0.01));
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train);
if(useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram(10)));
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean()*3.5);
}
}
@Test
public void testTrainC_RegressionDataSet_ExecutorService()
{
System.out.println("train");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
RegressionDataSet train = FixedProblems.getLinearRegression(1000, RandomUtil.getRandom(), coefs);
RegressionDataSet test = FixedProblems.getLinearRegression(100, RandomUtil.getRandom(), coefs);
RegressionModelEvaluation rme = new RegressionModelEvaluation(instance, train, true);
if(useCatFeatures)
rme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
rme.evaluateTestSet(test);
assertTrue(rme.getMeanError() <= test.getTargetValues().mean()*2.5);
}
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
for(int trials = 0; trials < max_trials; trials++)
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
//RF may not get boundry perfect, so use noiseless for testing
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
if(cme.getErrorRate() > 0.001 && trials == max_trials)//wrong too many times, something is broken
assertEquals(cme.getErrorRate(), 0.0, 0.001);
else
break;//did good
}
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
for(int trials = 0; trials < max_trials; trials++)
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
//RF may not get boundry perfect, so use noiseless for testing
ClassificationDataSet test = FixedProblems.getCircles(100, 0.0, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
if(cme.getErrorRate() > 0.001 && trials == max_trials)//wrong too many times, something is broken
assertEquals(cme.getErrorRate(), 0.0, 0.001);
else
break;//did good
}
}
}
@Test
public void testTrainC_ClassificationDataSetMissingFeat()
{
System.out.println("trainC");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
for(int trials = 0; trials < max_trials; trials++)
{
ClassificationDataSet train = FixedProblems.getCircles(1000, 1.0, 10.0, 100.0);
//RF may not get boundry perfect, so use noiseless for testing
ClassificationDataSet test = FixedProblems.getCircles(1000, 0.0, RandomUtil.getRandom(), 1.0, 10.0, 100.0);
train.applyTransform(new InsertMissingValuesTransform(0.1));
test.applyTransform(new InsertMissingValuesTransform(0.01));
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
if(useCatFeatures)
cme.setDataTransformProcess(new DataTransformProcess(new NumericalToHistogram()));
cme.evaluateTestSet(test);
double target = 0.1;
if(useCatFeatures)//hard to get right with only 2 features like this
target = 0.17;
if(cme.getErrorRate() > 0.001 && trials == max_trials)//wrong too many times, something is broken
assertEquals(cme.getErrorRate(), 0.0, target);
else
break;//did good
}
}
}
@Test
public void testClone()
{
System.out.println("clone");
for(boolean useCatFeatures : new boolean[]{true, false})
{
RandomForest instance = new RandomForest();
ClassificationDataSet t1 = FixedProblems.getSimpleKClassLinear(1000, 2);
ClassificationDataSet t2 = FixedProblems.getSimpleKClassLinear(1000, 3);
if(useCatFeatures)
{
t1.applyTransform(new NumericalToHistogram(t1));
t2.applyTransform(new NumericalToHistogram(t2));
}
instance = instance.clone();
instance = TestTools.deepCopy(instance);
instance.train(t1);
double errors = 0;
RandomForest result = instance.clone();
errors = 0;
for(int i = 0; i < t1.size(); i++)
if(t1.getDataPointCategory(i) != result.classify(t1.getDataPoint(i)).mostLikely())
errors++;
assertEquals(0.0, errors/t1.size(), 0.02);
result = TestTools.deepCopy(instance);
errors = 0;
for(int i = 0; i < t1.size(); i++)
if(t1.getDataPointCategory(i) != result.classify(t1.getDataPoint(i)).mostLikely())
errors++;
assertEquals(0.0, errors/t1.size(), 0.02);
result.train(t2);
errors = 0;
for(int i = 0; i < t1.size(); i++)
if(t1.getDataPointCategory(i) != instance.classify(t1.getDataPoint(i)).mostLikely())
errors++;
assertEquals(0.0, errors/t1.size(), 0.02);
errors = 0;
for(int i = 0; i < t2.size(); i++)
if(t2.getDataPointCategory(i) != result.classify(t2.getDataPoint(i)).mostLikely())
errors++;
assertEquals(0.0, errors/t2.size(), 0.02);
}
}
}
| 10,334 | 36.310469 | 123 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/BayesianHACTest.java | /*
* This code was contributed under the Public Domain
*/
package jsat.clustering;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import jsat.DataSet;
import jsat.NormalClampedSample;
import jsat.SimpleDataSet;
import jsat.TestTools;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.distributions.multivariate.MultivariateDistribution;
import jsat.distributions.multivariate.NormalM;
import jsat.linear.ConstantVector;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.GridDataGenerator;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class BayesianHACTest {
public BayesianHACTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of log_exp_sum method, of class BayesianHAC.
*/
@Test
public void testLog_exp_sum()
{
System.out.println("log_exp_sum");
double log_a = 1.0;
double log_b = 3.0;
double expResult = Math.log(Math.exp(log_a)+Math.exp(log_b));
double result = BayesianHAC.log_exp_sum(log_a, log_b);
assertEquals(expResult, result, 1e-10);
}
// @Test
public void testBinaryClustering()
{
System.out.println("cluster_BernoulliBeta");
Random rand = RandomUtil.getRandom();
int d = 5;
SimpleDataSet sds = new SimpleDataSet(d, new CategoricalData[0]);
//Hard coded test to correctly identify that there are two clusters
for(int i = 0; i < 20; i++)
{
Vec x = DenseVector.random(d, rand).multiply(0.05);
sds.add(new DataPoint(x));
}
for(int i = 0; i < 20; i++)
{
Vec x = DenseVector.random(d, rand).multiply(0.05).add(0.9);
sds.add(new DataPoint(x));
}
BayesianHAC bhac = new BayesianHAC(BayesianHAC.Distributions.BERNOULLI_BETA);
int[] designations = new int[sds.size()];
bhac.cluster(sds, false, designations);
//check both classes are homogonous
for(int i = 1; i < 20; i++)
assertEquals(designations[0], designations[i]);
//check both classes are homogonous
for(int i = 21; i < sds.size(); i++)
assertEquals(designations[20], designations[i]);
//Both classes have different values
assertEquals(1, Math.abs(designations[0]-designations[20]));
// for(int i = 0; i < designations.length; i++)
// System.out.println(designations[i]);
}
@Test
public void testClusterGuass()
{
System.out.println("cluster_guass");
Random rand = RandomUtil.getRandom();
GridDataGenerator gdg = new GridDataGenerator(new NormalClampedSample(0, 0.05), rand, 2, 2);
SimpleDataSet sds = gdg.generateData(10);
for(BayesianHAC.Distributions cov_type : EnumSet.of(BayesianHAC.Distributions.GAUSSIAN_FULL, BayesianHAC.Distributions.GAUSSIAN_DIAG))
for (boolean parallel : new boolean[]{ false})
{
BayesianHAC em = new BayesianHAC(cov_type);
int[] designations = new int[sds.size()];
em.cluster(sds, parallel);
List<List<DataPoint>> grouped = ClustererBase.createClusterListFromAssignmentArray(designations, sds);
em = em.clone();
TestTools.checkClusteringByCat(grouped);
//Check that the found means correspond to expected quadrants
List<Vec> found_means = em.getClusterDistributions().stream()
.map(x->((NormalM)x).getMean())
.collect(Collectors.toList());
List<Vec> expectedMeans = new ArrayList<>();
expectedMeans.add(DenseVector.toDenseVec(0,0));
expectedMeans.add(DenseVector.toDenseVec(0,1));
expectedMeans.add(DenseVector.toDenseVec(1,0));
expectedMeans.add(DenseVector.toDenseVec(1,1));
for(Vec expected : expectedMeans)
assertEquals(1, found_means.stream().filter(f->f.subtract(expected).pNorm(2) < 0.05).count());
// for(int i = 0; i < designations.length; i++)
// System.out.println(designations[i]);
}
}
}
| 4,964 | 29.460123 | 142 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/CLARATest.java | package jsat.clustering;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.XORWOW;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CLARATest
{
public CLARATest()
{
}
static private CLARA algo;
static private SimpleDataSet easyData10;
static private SimpleDataSet easyData2;
@BeforeClass
public static void setUpClass() throws Exception
{
algo = new CLARA();
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.005, 0.005), new XORWOW(12), 2, 3);
easyData10 = gdg.generateData(40);
gdg = new GridDataGenerator(new Uniform(-0.005, 0.005), new XORWOW(12), 2, 1);
easyData2 = gdg.generateData(40);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
CLARA toUse = algo.clone();
toUse.setSampleSize(easyData10.size()/2);
List<List<DataPoint>> clusters = toUse.cluster(easyData10, 6);
assertEquals(6, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
CLARA toUse = algo.clone();
toUse.setSampleCount(6);
toUse.setSampleSize(easyData10.size()/2);
List<List<DataPoint>> clusters = toUse.cluster(easyData10, 6, true);
assertEquals(6, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
CLARA toUse = algo.clone();
List<List<DataPoint>> clusters = toUse.cluster(easyData2, true);
assertEquals(2, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 3,252 | 28.572727 | 104 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/DBSCANTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering;
import java.util.Set;
import jsat.classifiers.DataPoint;
import java.util.Random;
import java.util.concurrent.Executors;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.SimpleDataSet;
import java.util.List;
import java.util.concurrent.ExecutorService;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.linear.vectorcollection.VectorArray;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class DBSCANTest
{
static private DBSCAN dbscan;
static private SimpleDataSet easyData10;
static private ExecutorService ex;
public DBSCANTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
dbscan = new DBSCAN(new EuclideanDistance(), new VectorArray<>());
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 5);
easyData10 = gdg.generateData(40);
ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
}
@AfterClass
public static void tearDownClass() throws Exception
{
ex.shutdown();
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = dbscan.cluster(easyData10, 5);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = dbscan.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, executorService)");
List<List<DataPoint>> clusters = dbscan.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster(dataset, double, int)");
//We know the range is [-.15, .15]
List<List<DataPoint>> clusters = dbscan.cluster(easyData10, 0.15, 5);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_3args_2()
{
System.out.println("cluster(dataset, int, executorService)");
List<List<DataPoint>> clusters = dbscan.cluster(easyData10, 3, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_4args()
{
System.out.println("cluster(dataset, double, int, executorService)");
//We know the range is [-.15, .15]
List<List<DataPoint>> clusters = dbscan.cluster(easyData10, 0.15, 5, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 5,508 | 29.269231 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/EMGaussianMixtureTest.java | package jsat.clustering;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.NormalClampedSample;
import jsat.SimpleDataSet;
import static jsat.TestTools.checkClusteringByCat;
import jsat.classifiers.DataPoint;
import jsat.clustering.kmeans.HamerlyKMeans;
import jsat.distributions.Normal;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class EMGaussianMixtureTest
{
static private SimpleDataSet easyData;
public EMGaussianMixtureTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testCluster_3args_2()
{
System.out.println("cluster(dataset, int, threadpool)");
boolean good = false;
int count = 0;
do
{
GridDataGenerator gdg = new GridDataGenerator(new NormalClampedSample(0, 0.05), RandomUtil.getRandom(), 2, 2);
easyData = gdg.generateData(50);
good = true;
for (boolean parallel : new boolean[]{true, false})
{
EMGaussianMixture em = new EMGaussianMixture(SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
List<List<DataPoint>> clusters = em.cluster(easyData, 4, parallel);
assertEquals(4, clusters.size());
good = good & checkClusteringByCat(clusters);
}
}
while (!good && count++ < 3);
assertTrue(good);
}
}
| 2,071 | 22.022222 | 122 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/FLAMETest.java | package jsat.clustering;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.distributions.Normal;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class FLAMETest
{
public FLAMETest()
{
}
static private FLAME algo;
static private SimpleDataSet easyData10;
@BeforeClass
public static void setUpClass() throws Exception
{
algo = new FLAME(new EuclideanDistance(), 30, 800);
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.05), new Random(12), 2, 5);
easyData10 = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
Clusterer toUse = algo.clone();
List<List<DataPoint>> clusters = toUse.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
Clusterer toUse = algo.clone();
List<List<DataPoint>> clusters = toUse.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,352 | 25.738636 | 97 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/GapStatisticTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.*;
import static jsat.TestTools.checkClusteringByCat;
import jsat.classifiers.DataPoint;
import jsat.clustering.kmeans.HamerlyKMeans;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class GapStatisticTest
{
static private SimpleDataSet easyData10;
static private int K = 2*2;
public GapStatisticTest()
{
}
@BeforeClass
public static void setUpClass()
{
GridDataGenerator gdg = new GridDataGenerator(new NormalClampedSample(0.0, 0.05), RandomUtil.getRandom(1), 2, 2);
easyData10 = gdg.generateData(200);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testCluster_4args_1_findK()
{
System.out.println("cluster findK");
boolean good = false;
int count = 0;
do
{
GridDataGenerator gdg = new GridDataGenerator(new NormalClampedSample(0.0, 0.05), RandomUtil.getRandom(), 2, 2);
easyData10 = gdg.generateData(200);
good = true;
for(boolean parallel: new boolean[]{true, false})
for(boolean PCSample: new boolean[]{true, false})
{
GapStatistic gap = new GapStatistic(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
gap.setPCSampling(PCSample);
List<List<DataPoint>> clusters = gap.cluster(easyData10, 1, 20, parallel);
assertEquals(K, clusters.size());
good = good & checkClusteringByCat(clusters);
}
}
while(!good && count++ < 3);
assertTrue(good);
}
}
| 2,478 | 25.655914 | 151 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/HDBSCANTest.java | package jsat.clustering;
import java.util.Set;
import jsat.classifiers.DataPoint;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.SimpleDataSet;
import java.util.List;
import jsat.utils.IntSet;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class HDBSCANTest
{
static private HDBSCAN hdbscan;
static private SimpleDataSet easyData10;
public HDBSCANTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
hdbscan = new HDBSCAN();
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(40);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = hdbscan.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class DBSCAN.
*/
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, executorService)");
List<List<DataPoint>> clusters = hdbscan.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,321 | 23.702128 | 110 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/LSDBCTest.java | package jsat.clustering;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.distributions.Normal;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class LSDBCTest
{
public LSDBCTest()
{
}
static private LSDBC algo;
static private SimpleDataSet easyData10;
@BeforeClass
public static void setUpClass() throws Exception
{
algo = new LSDBC();
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.10), new Random(12), 2, 5);
easyData10 = gdg.generateData(40);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
Clusterer toUse = algo.clone();
List<List<DataPoint>> clusters = toUse.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
Clusterer toUse = algo.clone();
List<List<DataPoint>> clusters = toUse.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,265 | 25.045977 | 97 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/MEDDITTest.java | package jsat.clustering;
import java.util.Set;
import jsat.classifiers.DataPoint;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.SimpleDataSet;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static jsat.TestTools.checkClusteringByCat;
import jsat.clustering.SeedSelectionMethods.SeedSelection;
import jsat.distributions.Normal;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.DistanceCounter;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class MEDDITTest
{
//Like KMeans the cluster number detection isnt stable enough yet that we can test that it getst he right result.
static private MEDDIT pam;
static private SimpleDataSet easyData10;
public MEDDITTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
pam = new MEDDIT(new EuclideanDistance(), RandomUtil.getRandom(), SeedSelection.FARTHEST_FIRST);
pam.setMaxIterations(500);
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.05, 0.05), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(200);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster(dataSet, int, ExecutorService)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
//This test works but takes a while... so just commenting it out but leaving incase I need for debuging later
// @Test
public void testCluster_AvoidingCalcs()
{
System.out.println("cluster(dataset, int)");
//Use a deterministic seed initialization. Lets see that the new method does LESS distance computations
DistanceCounter dm = new DistanceCounter(new EuclideanDistance());
MEDDIT newMethod = new MEDDIT(dm, RandomUtil.getRandom(), SeedSelection.MEAN_QUANTILES);
PAM oldMethod = new PAM(dm, RandomUtil.getRandom(), SeedSelection.MEAN_QUANTILES);
//MEDDIT works best when dimenion is higher, and poorly when dimension is low. So lets put it in the happy area
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.1), RandomUtil.getRandom(), 2, 2, 2, 2);
SimpleDataSet data = gdg.generateData(500);
long N = data.size();
newMethod.setStoreMedoids(true);
oldMethod.setStoreMedoids(true);
//To make this test run faster, lets just do a few iterations. We should both reach the same result
newMethod.setMaxIterations(5);
oldMethod.setMaxIterations(5);
newMethod.cluster(data, 10);
long newDistanceCalcs = dm.getCallCount();
dm.resetCounter();
oldMethod.cluster(data, 10);
long oldDistanceCalcs = dm.getCallCount();
dm.resetCounter();
assertTrue(newDistanceCalcs < oldDistanceCalcs);
//We did less calculations. Did we get the same centroids?
Set<Integer> newMedioids = IntStream.of(newMethod.getMedoids()).boxed().collect(Collectors.toSet());
Set<Integer> oldMedioids = IntStream.of(newMethod.getMedoids()).boxed().collect(Collectors.toSet());
for(int i : newMedioids)
assertTrue(oldMedioids.contains(i));
}
@Test
public void test_medoid()
{
System.out.println("cluster(dataset, int)");
//Use a deterministic seed initialization. Lets see that the new method does LESS distance computations
DistanceCounter dm = new DistanceCounter(new EuclideanDistance());
//MEDDIT works best when dimenion is higher, and poorly when dimension is low. So lets put it in the happy area
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.1), RandomUtil.getRandom(), 2, 2, 2, 2);
List<Vec> X = gdg.generateData(500).getDataVectors();
double tol = 0.01;
int tureMed = PAM.medoid(true, X, dm);
long pamD = dm.getCallCount();
dm.resetCounter();
for(boolean parallel : new boolean[]{false, true})
{
dm.resetCounter();
int approxMed = MEDDIT.medoid(parallel, X, tol , dm);
assertEquals(tureMed, approxMed);
assertTrue(pamD > dm.getCallCount());
}
}
}
| 5,570 | 31.202312 | 119 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/MeanShiftTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering;
import java.util.List;
import java.util.Random;
import java.util.Set;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.distributions.Normal;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class MeanShiftTest
{
public MeanShiftTest()
{
}
static private MeanShift meanShift;
static private SimpleDataSet easyData10;
@BeforeClass
public static void setUpClass() throws Exception
{
meanShift = new MeanShift();
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.10), new Random(12), 2, 5);
easyData10 = gdg.generateData(40);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = meanShift.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
seenBefore.add(thisClass);
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = meanShift.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
seenBefore.add(thisClass);
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,283 | 24.954545 | 97 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/OPTICSTest.java | package jsat.clustering;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.distributions.Normal;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class OPTICSTest
{
public OPTICSTest()
{
}
static private OPTICS optics;
static private EnumSet<OPTICS.ExtractionMethod> toTest = EnumSet.of(OPTICS.ExtractionMethod.THRESHHOLD, OPTICS.ExtractionMethod.THRESHHOLD);
static private SimpleDataSet easyData10;
@BeforeClass
public static void setUpClass() throws Exception
{
optics = new OPTICS();
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.05), new Random(12), 2, 5);
easyData10 = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
for(OPTICS.ExtractionMethod method : toTest)
{
optics.setExtractionMethod(method);
List<List<DataPoint>> clusters = optics.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
for(OPTICS.ExtractionMethod method : toTest)
{
optics.setExtractionMethod(method);
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = optics.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
}
| 2,694 | 27.072917 | 144 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/PAMTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering;
import java.util.Set;
import jsat.classifiers.DataPoint;
import java.util.concurrent.Executors;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.SimpleDataSet;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import static jsat.TestTools.checkClusteringByCat;
import jsat.clustering.SeedSelectionMethods.SeedSelection;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PAMTest
{
//Like KMeans the cluster number detection isnt stable enough yet that we can test that it getst he right result.
static private PAM pam;
static private SimpleDataSet easyData10;
public PAMTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
pam = new PAM(new EuclideanDistance(), RandomUtil.getRandom(), SeedSelection.FARTHEST_FIRST);
pam.setMaxIterations(1000);
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.05, 0.05), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster(dataSet, int, ExecutorService)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
}
| 2,676 | 23.787037 | 118 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/TRIKMEDSTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering;
import java.util.Set;
import jsat.classifiers.DataPoint;
import jsat.distributions.Uniform;
import jsat.utils.GridDataGenerator;
import jsat.SimpleDataSet;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static jsat.TestTools.checkClusteringByCat;
import jsat.clustering.SeedSelectionMethods.SeedSelection;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.DistanceCounter;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class TRIKMEDSTest
{
//Like KMeans the cluster number detection isnt stable enough yet that we can test that it getst he right result.
static private TRIKMEDS pam;
static private SimpleDataSet easyData10;
public TRIKMEDSTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
pam = new TRIKMEDS(new EuclideanDistance(), RandomUtil.getRandom(), SeedSelection.FARTHEST_FIRST);
pam.setMaxIterations(1000);
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.05, 0.05), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster(dataSet, int, ExecutorService)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
/**
* Test of cluster method, of class PAM.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
boolean good = false;
int count = 0;
do
{
List<List<DataPoint>> clusters = pam.cluster(easyData10, 10);
assertEquals(10, clusters.size());
good = checkClusteringByCat(clusters);
}
while(!good && count++ < 3);
assertTrue(good);
}
@Test
public void testCluster_AvoidingCalcs()
{
System.out.println("cluster(dataset, int)");
//Use a deterministic seed initialization. Lets see that the new method does LESS distance computations
DistanceCounter dm = new DistanceCounter(new EuclideanDistance());
TRIKMEDS newMethod = new TRIKMEDS(dm, RandomUtil.getRandom(), SeedSelection.MEAN_QUANTILES);
PAM oldMethod = new PAM(dm, RandomUtil.getRandom(), SeedSelection.MEAN_QUANTILES);
newMethod.setStoreMedoids(true);
oldMethod.setStoreMedoids(true);
newMethod.cluster(easyData10, 10);
long newDistanceCalcs = dm.getCallCount();
dm.resetCounter();
oldMethod.cluster(easyData10, 10);
long oldDistanceCalcs = dm.getCallCount();
dm.resetCounter();
assertTrue(newDistanceCalcs < oldDistanceCalcs);
//We did less calculations. Did we get the same centroids?
Set<Integer> newMedioids = IntStream.of(newMethod.getMedoids()).boxed().collect(Collectors.toSet());
Set<Integer> oldMedioids = IntStream.of(newMethod.getMedoids()).boxed().collect(Collectors.toSet());
for(int i : newMedioids)
assertTrue(oldMedioids.contains(i));
}
@Test
public void test_medoid()
{
System.out.println("cluster(dataset, int)");
//Use a deterministic seed initialization. Lets see that the new method does LESS distance computations
DistanceCounter dm = new DistanceCounter(new EuclideanDistance());
List<Vec> X = easyData10.getDataVectors();
for(boolean parallel : new boolean[]{true, false})
{
assertEquals(PAM.medoid(parallel, X, dm), TRIKMEDS.medoid(parallel, X, dm));
}
}
}
| 4,462 | 28.95302 | 118 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/VBGMMTest.java | /*
* This code contributed under the Public Domain
*/
package jsat.clustering;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import jsat.NormalClampedSample;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.GridDataGenerator;
import jsat.utils.random.RandomUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author edwardraff
*/
public class VBGMMTest {
public VBGMMTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of cluster method, of class VBGMM.
*/
@Test
public void testCluster()
{
System.out.println("cluster");
GridDataGenerator gdg = new GridDataGenerator(new NormalClampedSample(0, 0.05), RandomUtil.getRandom(), 2, 2);
SimpleDataSet easyData = gdg.generateData(500);
for(VBGMM.COV_FIT_TYPE cov_type : VBGMM.COV_FIT_TYPE.values())
// for(VBGMM.COV_FIT_TYPE cov_type : Arrays.asList(VBGMM.COV_FIT_TYPE.DIAG))//if I want to test a specific cov
for (boolean parallel : new boolean[]{true, false})
{
VBGMM em = new VBGMM(cov_type);
List<List<DataPoint>> clusters = em.cluster(easyData, parallel);
assertEquals(4, clusters.size());
em = em.clone();
List<Vec> means = Arrays.stream(em.normals).map(n->n.getMean()).collect(Collectors.toList());
//we should have 1 mean at each of the coordinates of our 2x2 grid
//(0,0), (0,1), (1,0), (1,1)
List<Vec> expectedMeans = new ArrayList<>();
expectedMeans.add(DenseVector.toDenseVec(0,0));
expectedMeans.add(DenseVector.toDenseVec(0,1));
expectedMeans.add(DenseVector.toDenseVec(1,0));
expectedMeans.add(DenseVector.toDenseVec(1,1));
for(Vec expected : expectedMeans)
assertEquals(1, means.stream().filter(f->f.subtract(expected).pNorm(2) < 0.05).count());
}
}
}
| 2,483 | 27.227273 | 118 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/biclustering/SpectralCoClusteringTest.java | /*
* This code was contributed under the public domain.
*/
package jsat.clustering.biclustering;
import java.util.ArrayList;
import java.util.List;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.clustering.HDBSCAN;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.IntList;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author edwardraff
*/
public class SpectralCoClusteringTest {
public SpectralCoClusteringTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of cluster method, of class SpectralCoClustering.
*/
@Test
public void testCluster_4args()
{
System.out.println("cluster");
IntList labels = new IntList();
List<Vec> data = new ArrayList<>();
int true_k = 4;
int features_per = 3;
int n_c = 5;
int noisy_features = 0;
int d = features_per*true_k + noisy_features;
List<List<Integer>> true_row_assingments = new ArrayList<>();
List<List<Integer>> true_col_assingments = new ArrayList<>();
for(int y = 0; y < true_k; y++)
{
IntList row_asign = new IntList();
IntList col_asign = new IntList();
for(int j = y*features_per; j < (y+1)*features_per; j++)
col_asign.add(j);
for(int i = 0; i < n_c; i++)
{
row_asign.add(data.size());
labels.add(y);
// DenseVector x = new DenseVector(d);
Vec x = DenseVector.random(d).multiply(0.01);
for(int j = y*features_per; j < (y+1)*features_per; j++)
x.increment(j, 1.0);
data.add(x);
}
true_row_assingments.add(row_asign);
true_col_assingments.add(col_asign);
}
ClassificationDataSet dataSet = new ClassificationDataSet(d, new CategoricalData[0], new CategoricalData(true_k));
for(int i = 0; i < labels.size(); i++)
dataSet.addDataPoint(data.get(i), labels.get(i));
// boolean parallel = false;
for(boolean parallel : new boolean[]{false, true})
for(SpectralCoClustering.InputNormalization in : SpectralCoClustering.InputNormalization.values())
{
System.out.println(in + " " + parallel);
SpectralCoClustering instance = new SpectralCoClustering(in);
List<List<Integer>> row_assignments = new ArrayList<>();
List<List<Integer>> col_assignments = new ArrayList<>();
instance.bicluster(dataSet, true_k, parallel, row_assignments, col_assignments);
assertEquals(true_k, row_assignments.size());
assertEquals(true_k, col_assignments.size());
double score = ConsensusScore.score(parallel,
true_row_assingments, true_col_assingments,
row_assignments, col_assignments);
// for(int c = 0; c < true_k; c++)
// {
// System.out.println(c);
// System.out.println(row_assignments.get(c));
// System.out.println(col_assignments.get(c));
// System.out.println("\n\n");
// }
//
// System.out.println("Score: " + score);
//Should be able to get a perfect score
assertEquals(1.0, score, 0.0);
}
}
@Test
public void testCluster_UnkK()
{
System.out.println("cluster");
IntList labels = new IntList();
List<Vec> data = new ArrayList<>();
int true_k = 4;
int features_per = 3;
int n_c = 5;
int noisy_features = 0;
int d = features_per*true_k + noisy_features;
List<List<Integer>> true_row_assingments = new ArrayList<>();
List<List<Integer>> true_col_assingments = new ArrayList<>();
for(int y = 0; y < true_k; y++)
{
IntList row_asign = new IntList();
IntList col_asign = new IntList();
for(int j = y*features_per; j < (y+1)*features_per; j++)
col_asign.add(j);
for(int i = 0; i < n_c; i++)
{
row_asign.add(data.size());
labels.add(y);
// DenseVector x = new DenseVector(d);
Vec x = DenseVector.random(d).multiply(0.01);
for(int j = y*features_per; j < (y+1)*features_per; j++)
x.increment(j, 1.0);
data.add(x);
}
true_row_assingments.add(row_asign);
true_col_assingments.add(col_asign);
}
ClassificationDataSet dataSet = new ClassificationDataSet(d, new CategoricalData[0], new CategoricalData(true_k));
for(int i = 0; i < labels.size(); i++)
dataSet.addDataPoint(data.get(i), labels.get(i));
for(boolean parallel : new boolean[]{false, true})
for(SpectralCoClustering.InputNormalization in : SpectralCoClustering.InputNormalization.values())
{
System.out.println(in + " " + parallel);
SpectralCoClustering instance = new SpectralCoClustering(in);
instance.setBaseClusterAlgo(new HDBSCAN(5));
List<List<Integer>> row_assignments = new ArrayList<>();
List<List<Integer>> col_assignments = new ArrayList<>();
instance.bicluster(dataSet, parallel, row_assignments, col_assignments);
assertEquals(true_k, row_assignments.size());
assertEquals(true_k, col_assignments.size());
double score = ConsensusScore.score(parallel,
true_row_assingments, true_col_assingments,
row_assignments, col_assignments);
// for(int c = 0; c < row_assignments.size(); c++)
// {
// System.out.println(c);
// System.out.println(row_assignments.get(c));
// System.out.println(col_assignments.get(c));
// System.out.println("\n\n");
// }
//
// System.out.println("Score: " + score);
//Should be able to do pretty well
assertEquals(1.0, score, 0.1);
}
}
}
| 7,013 | 32.721154 | 122 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/AdjustedRandIndexTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.evaluation;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class AdjustedRandIndexTest
{
public AdjustedRandIndexTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class AdjustedRandIndex.
*/
@Test
public void testEvaluate_intArr_DataSet()
{
System.out.println("evaluate");
//using example from http://www.otlet-institute.org/wikics/Clustering_Problems.html
ClassificationDataSet cds = new ClassificationDataSet(1, new CategoricalData[0], new CategoricalData(3));
for(int i = 0; i < 3; i++)
for(int j = 0; j < 3; j++)
cds.addDataPoint(Vec.random(1), new int[0], i);
int[] d = new int[9];
d[0] = d[1] = 0;
d[2] = d[3] = d[4] = d[5] = 1;
d[6] = d[7] = 2;
d[8] = 3;
AdjustedRandIndex ari = new AdjustedRandIndex();
double score = ari.evaluate(d, cds);
//conver tot ARI
score = 1.0-score;
assertEquals(0.46, score, 0.005);
}
}
| 1,752 | 22.065789 | 113 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/CompletenessTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.evaluation;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CompletenessTest
{
public CompletenessTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class AdjustedRandIndex.
*/
@Test
public void testEvaluate_intArr_DataSet()
{
System.out.println("evaluate");
ClassificationDataSet cds = new ClassificationDataSet(1, new CategoricalData[0], new CategoricalData(2));
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 0);
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 1);
//class labels are now [0, 0, 1, 1]
int[] d = new int[4];
d[0] = d[1] = 1;
d[2] = d[3] = 0;
Completeness eval = new Completeness();
double score;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(1.0, score, 0.005);
d[1] = 2;
d[3] = 3;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.5, score, 0.005);
d[0] = d[2] = 0;
d[1] = d[3] = 1;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.0, score, 0.005);
d[0] = d[1] = d[2] = d[3] = 0;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(1.0, score, 0.005);
}
}
| 2,026 | 20.56383 | 113 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/HomogeneityTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.evaluation;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class HomogeneityTest
{
public HomogeneityTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class AdjustedRandIndex.
*/
@Test
public void testEvaluate_intArr_DataSet()
{
System.out.println("evaluate");
ClassificationDataSet cds = new ClassificationDataSet(1, new CategoricalData[0], new CategoricalData(2));
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 0);
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 1);
//class labels are now [0, 0, 1, 1]
int[] d = new int[4];
d[0] = d[1] = 1;
d[2] = d[3] = 0;
Homogeneity eval = new Homogeneity();
double score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(1.0, score, 0.005);
d[1] = 2;
d[3] = 3;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(1.0, score, 0.005);
d[0] = d[2] = 0;
d[1] = d[3] = 1;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.0, score, 0.005);
d[0] = d[1] = d[2] = d[3] = 0;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.0, score, 0.005);
}
}
| 2,005 | 20.804348 | 113 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/NormalizedMutualInformationTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.evaluation;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class NormalizedMutualInformationTest
{
public NormalizedMutualInformationTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class NormalizedMutualInformation.
*/
@Test
public void testEvaluate_intArr_DataSet()
{
System.out.println("evaluate");
ClassificationDataSet cds = new ClassificationDataSet(0,
new CategoricalData[]{},
new CategoricalData(3));
//Using example case from Manning's book http://nlp.stanford.edu/IR-book/html/htmledition/evaluation-of-clustering-1.html
Vec emptyVec = new DenseVector(0);
int[] clusterAssign = new int[17];
int X = 0, O = 1, D = 2;
clusterAssign[0] = 0; cds.addDataPoint(emptyVec, X);
clusterAssign[1] = 0; cds.addDataPoint(emptyVec, X);
clusterAssign[2] = 0; cds.addDataPoint(emptyVec, X);
clusterAssign[3] = 0; cds.addDataPoint(emptyVec, X);
clusterAssign[4] = 0; cds.addDataPoint(emptyVec, X);
clusterAssign[5] = 0; cds.addDataPoint(emptyVec, O);
clusterAssign[6] = 1; cds.addDataPoint(emptyVec, X);
clusterAssign[7] = 1; cds.addDataPoint(emptyVec, D);
clusterAssign[8] = 1; cds.addDataPoint(emptyVec, O);
clusterAssign[9] = 1; cds.addDataPoint(emptyVec, O);
clusterAssign[10] = 1; cds.addDataPoint(emptyVec, O);
clusterAssign[11] = 1; cds.addDataPoint(emptyVec, O);
clusterAssign[12] = 2; cds.addDataPoint(emptyVec, X);
clusterAssign[13] = 2; cds.addDataPoint(emptyVec, X);
clusterAssign[14] = 2; cds.addDataPoint(emptyVec, D);
clusterAssign[15] = 2; cds.addDataPoint(emptyVec, D);
clusterAssign[16] = 2; cds.addDataPoint(emptyVec, D);
//True NMI for this should be 0.36
NormalizedMutualInformation nmi = new NormalizedMutualInformation();
assertEquals(0.36, 1.0-nmi.evaluate(clusterAssign, cds), 1e-2);
}
}
| 2,691 | 29.247191 | 129 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/VMeasureTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.evaluation;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class VMeasureTest
{
public VMeasureTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class AdjustedRandIndex.
*/
@Test
public void testEvaluate_intArr_DataSet()
{
System.out.println("evaluate");
ClassificationDataSet cds = new ClassificationDataSet(1, new CategoricalData[0], new CategoricalData(2));
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 0);
for(int i = 0; i < 2; i++)
cds.addDataPoint(Vec.random(1), new int[0], 1);
//class labels are now [0, 0, 1, 1]
int[] d = new int[4];
d[0] = d[1] = 1;
d[2] = d[3] = 0;
VMeasure eval = new VMeasure();
double score;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(1.0, score, 0.005);
d[1] = 2;
d[3] = 3;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(2.0/3.0, score, 0.005);
d[0] = d[2] = 0;
d[1] = d[3] = 1;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.0, score, 0.005);
d[0] = d[1] = d[2] = d[3] = 0;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.0, score, 0.005);
d[0] = d[2] = d[3] = 0;
d[1] = 1;
score = eval.naturalScore(eval.evaluate(d, cds));
assertEquals(0.34371101848545077, score, 0.005);
}
}
| 2,163 | 20.64 | 113 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/evaluation/intra/SumOfSqrdPairwiseDistancesTest.java | package jsat.clustering.evaluation.intra;
import java.util.List;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseVector;
import jsat.linear.distancemetrics.MinkowskiDistance;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SumOfSqrdPairwiseDistancesTest
{
public SumOfSqrdPairwiseDistancesTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of evaluate method, of class SumOfSqrdPairwiseDistances.
*/
@Test
public void testEvaluate_3args()
{
System.out.println("evaluate");
int[] designations = new int[10];
SimpleDataSet dataSet = new SimpleDataSet(1, new CategoricalData[0]);
int clusterID = 2;
for(int i = 0; i < 10; i++)
dataSet.add(new DataPoint(new DenseVector(new double[]{i})));
designations[1] = designations[3] = designations[5] = designations[9] = clusterID;
SumOfSqrdPairwiseDistances instance = new SumOfSqrdPairwiseDistances();
double expResult = 280/(2*4);
double result = instance.evaluate(designations, dataSet, clusterID);
assertEquals(expResult, result, 1e-14);
//minkowski p=2 is equivalent to euclidean, but implementation wont check for that
//just to make sure in future, make it not quite 2 - but numericaly close enought
instance = new SumOfSqrdPairwiseDistances(new MinkowskiDistance(Math.nextUp(2)));
result = instance.evaluate(designations, dataSet, clusterID);
assertEquals(expResult, result, 1e-14);
}
/**
* Test of evaluate method, of class SumOfSqrdPairwiseDistances.
*/
@Test
public void testEvaluate_List()
{
System.out.println("evaluate");
SimpleDataSet dataSet = new SimpleDataSet(1, new CategoricalData[0]);
for(int i = 0; i < 10; i++)
dataSet.add(new DataPoint(new DenseVector(new double[]{i})));
List<DataPoint> dataPoints = dataSet.getList();
SumOfSqrdPairwiseDistances instance = new SumOfSqrdPairwiseDistances();
double expResult = 1650.0/(2*10);
double result = instance.evaluate(dataPoints);
assertEquals(expResult, result, 1e-14);
instance = new SumOfSqrdPairwiseDistances(new MinkowskiDistance(Math.nextUp(2)));
result = instance.evaluate(dataPoints);
assertEquals(expResult, result, 1e-14);
}
}
| 2,886 | 28.161616 | 90 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/hierarchical/DivisiveGlobalClustererTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.hierarchical;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.kmeans.ElkanKMeans;
import jsat.clustering.evaluation.DaviesBouldinIndex;
import jsat.clustering.kmeans.HamerlyKMeans;
import jsat.clustering.kmeans.NaiveKMeans;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.DistanceMetric;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.junit.*;
/**
*
* @author Edward Raff
*/
public class DivisiveGlobalClustererTest
{
static private DivisiveGlobalClusterer dgc;
static private SimpleDataSet easyData;
public DivisiveGlobalClustererTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.05, 0.05), RandomUtil.getRandom(), 2, 2);
easyData = gdg.generateData(60);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
DistanceMetric dm = new EuclideanDistance();
dgc = new DivisiveGlobalClusterer(new NaiveKMeans(), new DaviesBouldinIndex(dm));
}
@After
public void tearDown()
{
}
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = dgc.cluster(easyData, 4);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = dgc.cluster(easyData);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = dgc.cluster(easyData, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int()
{
System.out.println("cluster(dataset, int, int)");
List<List<DataPoint>> clusters = dgc.cluster(easyData, 2, 20);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int_ExecutorService()
{
System.out.println("cluster(dataset, int, int, ExecutorService)");
List<List<DataPoint>> clusters = dgc.cluster(easyData, 2, 20, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
List<List<DataPoint>> clusters = dgc.cluster(easyData, 4, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 5,260 | 31.475309 | 110 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/hierarchical/DivisiveLocalClustererTest.java | package jsat.clustering.hierarchical;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.kmeans.ElkanKMeans;
import jsat.clustering.evaluation.DaviesBouldinIndex;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.DistanceMetric;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.junit.*;
/**
*
* @author Edward Raff
*/
public class DivisiveLocalClustererTest
{
static private DivisiveLocalClusterer dlc;
static private SimpleDataSet easyData;
public DivisiveLocalClustererTest() {
}
@BeforeClass
public static void setUpClass() throws Exception
{
DistanceMetric dm = new EuclideanDistance();
dlc = new DivisiveLocalClusterer(new ElkanKMeans(dm), new DaviesBouldinIndex(dm));
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 2);
easyData = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = dlc.cluster(easyData, 10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
List<List<DataPoint>> clusters = dlc.cluster(easyData, 10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,643 | 28.377778 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/hierarchical/NNChainHACTest.java | package jsat.clustering.hierarchical;
import java.util.Set;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.dissimilarity.SingleLinkDissimilarity;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class NNChainHACTest
{
/*
* README:
* KMeans is a very heuristic algorithm, so its not easy to make a test where we are very
* sure it will get the correct awnser. That is why only 2 of the methods are tested
* [ Using KPP, becase random seed selection still isnt consistent enough]
*
*/
static private NNChainHAC hac;
static private SimpleDataSet easyData10;
public NNChainHACTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
hac = new NNChainHAC(new SingleLinkDissimilarity());
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 5);
easyData10 = gdg.generateData(50);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = hac.cluster(easyData10, 10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = hac.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = hac.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int()
{
System.out.println("cluster(dataset, int, int)");
List<List<DataPoint>> clusters = hac.cluster(easyData10, 2, 20);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int_ExecutorService()
{
System.out.println("cluster(dataset, int, int, ExecutorService)");
List<List<DataPoint>> clusters = hac.cluster(easyData10, 2, 20, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
List<List<DataPoint>> clusters = hac.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 5,281 | 30.628743 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/hierarchical/PriorityHACTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.hierarchical;
import java.util.Set;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.dissimilarity.SingleLinkDissimilarity;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class PriorityHACTest
{
/*
* README:
* KMeans is a very heuristic algorithm, so its not easy to make a test where we are very
* sure it will get the correct awnser. That is why only 2 of the methods are tested
* [ Using KPP, becase random seed selection still isnt consistent enough]
*
*/
static private PriorityHAC priorityHAC;
static private SimpleDataSet easyData10;
public PriorityHACTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
priorityHAC = new PriorityHAC(new SingleLinkDissimilarity(new EuclideanDistance()));
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 5);
easyData10 = gdg.generateData(50);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10, 10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int()
{
System.out.println("cluster(dataset, int, int)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10, 2, 20);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int_ExecutorService()
{
System.out.println("cluster(dataset, int, int, ExecutorService)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10, 2, 20, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
List<List<DataPoint>> clusters = priorityHAC.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 5,476 | 31.02924 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/hierarchical/SimpleHACTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.hierarchical;
import jsat.clustering.hierarchical.SimpleHAC;
import java.util.Set;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.dissimilarity.SingleLinkDissimilarity;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SimpleHACTest
{
/*
* README:
* KMeans is a very heuristic algorithm, so its not easy to make a test where we are very
* sure it will get the correct awnser. That is why only 2 of the methods are tested
* [ Using KPP, becase random seed selection still isnt consistent enough]
*
*/
static private SimpleHAC simpleHAC;
static private SimpleDataSet easyData10;
public SimpleHACTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
simpleHAC = new SimpleHAC(new SingleLinkDissimilarity(new EuclideanDistance()));
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 5);
easyData10 = gdg.generateData(30);//HAC is O(n^3), so we make the data set a good deal smaller
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10, 10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet()
{
System.out.println("cluster(dataset)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_ExecutorService()
{
System.out.println("cluster(dataset, ExecutorService)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int()
{
System.out.println("cluster(dataset, int, int)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10, 2, 20);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_DataSet_int_int_ExecutorService()
{
System.out.println("cluster(dataset, int, int, ExecutorService)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10, 2, 20, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class KMeans.
*/
@Test
public void testCluster_DataSet_int_ExecutorService()
{
System.out.println("cluster(dataset, int, ExecutorService)");
List<List<DataPoint>> clusters = simpleHAC.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for (List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for (DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 5,559 | 31.325581 | 102 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/ElkanKMeansTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.kmeans;
import java.util.ArrayList;
import java.util.Set;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.KClustererBase;
import jsat.distributions.Uniform;
import jsat.linear.ConstantVector;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.XORWOW;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ElkanKMeansTest
{
static private SimpleDataSet easyData10;
/**
* Used as the starting seeds for k-means clustering to get consistent desired behavior
*/
static private List<Vec> seeds;
public ElkanKMeansTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new XORWOW(1238962356), 2, 5);
easyData10 = gdg.generateData(110);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
//generate seeds that should lead to exact solution
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-1e-10, 1e-10), new XORWOW(5638973498234L), 2, 5);
SimpleDataSet seedData = gdg.generateData(1);
seeds = seedData.getDataVectors();
for(Vec v : seeds)
v.mutableAdd(0.1);//shift off center so we aren't starting at the expected solution
}
/**
* Test of cluster method, of class ElkanKMeans.
*/
@Test
public void testCluster_DataSet_int()
{
System.out.println("cluster(dataset, int)");
ElkanKMeans kMeans = new ElkanKMeans(new EuclideanDistance());
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, false, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class ElkanKMeans.
*/
@Test
public void testCluster_3args_2()
{
System.out.println("cluster(dataset, int, threadpool)");
ElkanKMeans kMeans = new ElkanKMeans(new EuclideanDistance());
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_Weighted()
{
System.out.println("cluster(dataset, int, threadpool)");
ElkanKMeans kMeans = new ElkanKMeans();
kMeans.setStoreMeans(true);
ElkanKMeans kMeans2 = new ElkanKMeans();
kMeans2.setStoreMeans(true);
SimpleDataSet data2 = easyData10.getTwiceShallowClone();
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 15.0);
int[] assignment = new int[easyData10.size()];
List<Vec> orig_seeds = new ArrayList<Vec>();
List<Vec> seeds2 = new ArrayList<Vec>();
for(Vec v : seeds)
{
orig_seeds.add(v.clone());
seeds2.add(v.clone());
}
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get same solutions
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 1e-10);
}
//restore means and try again with randomish weights, should end up with something close
for(int i = 0; i < orig_seeds.size(); i++)
{
orig_seeds.get(i).copyTo(seeds.get(i));
orig_seeds.get(i).copyTo(seeds2.get(i));
}
Random rand = new XORWOW(897654);
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 0.5+5*rand.nextDouble());
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get similar solutions, but slightly different
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 0.1);
assertTrue(Math.abs(diff) > 1e-10 );
}
}
}
| 5,901 | 32.157303 | 117 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/ElkanKernelKMeansTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.kmeans;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.Uniform;
import jsat.distributions.kernels.LinearKernel;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ElkanKernelKMeansTest
{
public ElkanKernelKMeansTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of cluster method, of class ElkanKernelKMeans.
*/
@Test
public void testCluster_4args()
{
System.out.println("cluster");
ElkanKernelKMeans kmeans = new ElkanKernelKMeans(new RBFKernel(0.1));
ClassificationDataSet toCluster = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1e-3, 1.0);
int[] result = kmeans.cluster(toCluster, 2, true, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length; i++)
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
}
/**
* Test of cluster method, of class ElkanKernelKMeans.
*/
@Test
public void testCluster_3args()
{
System.out.println("cluster");
ElkanKernelKMeans kmeans = new ElkanKernelKMeans(new RBFKernel(0.1));
ClassificationDataSet toCluster = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1e-3, 1.0);
int[] result = kmeans.cluster(toCluster, 2, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length; i++)
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
}
@Test
public void testCluster_Weighted()
{
System.out.println("cluster(dataset, int, threadpool)");
LloydKernelKMeans kmeans = new LloydKernelKMeans(new LinearKernel());
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new XORWOW(1238962356), 2);
ClassificationDataSet toCluster = gdg.generateData(200).asClassificationDataSet(0);
//make the LAST data point so far out it will screw everything up, UNLCESS you understand that it has a tiny weight
toCluster.getDataPoint(toCluster.size()-1).getNumericalValues().set(0, 1.9e100);
Random rand = new XORWOW(897654);
for(int i = 0; i < toCluster.size(); i++)
toCluster.setWeight(i, 0.5+5*rand.nextDouble());
toCluster.setWeight(toCluster.size()-1, 1e-200);
int[] result = kmeans.cluster(toCluster, 2, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
IntSet allSeen = new IntSet();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length-1; i++)
{
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
allSeen.add(result[i]);
}
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
assertEquals(2, allSeen.size());//make sure we saw both clusters!
result = kmeans.cluster(toCluster, 2, true, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
tmp = new HashMap<>();
allSeen = new IntSet();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length-1; i++)
{
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
allSeen.add(result[i]);
}
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
assertEquals(2, allSeen.size());//make sure we saw both clusters!
}
}
| 5,249 | 34.234899 | 123 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/GMeansTest.java | package jsat.clustering.kmeans;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Normal;
import jsat.distributions.TruncatedDistribution;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class GMeansTest
{
static private SimpleDataSet easyData10;
public GMeansTest()
{
}
@BeforeClass
public static void setUpClass()
{
GridDataGenerator gdg = new GridDataGenerator(new TruncatedDistribution(new Normal(0, 0.01), -0.15, 0.15), RandomUtil.getRandom(), 2, 2);
easyData10 = gdg.generateData(50);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testCluster_4args_1_findK()
{
System.out.println("cluster findK");
GMeans kMeans = new GMeans(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 1, 20, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_3args_1_findK()
{
System.out.println("cluster findK");
GMeans kMeans = new GMeans(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 1, 20);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,799 | 27.571429 | 145 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/HamerlyKMeansTest.java |
package jsat.clustering.kmeans;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.KClustererBase;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Uniform;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class HamerlyKMeansTest
{
static private SimpleDataSet easyData10;
static private ExecutorService ex;
/**
* Used as the starting seeds for k-means clustering to get consistent desired behavior
*/
static private List<Vec> seeds;
public HamerlyKMeansTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(110);
ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
}
@AfterClass
public static void tearDownClass() throws Exception
{
ex.shutdown();
}
@Before
public void setUp()
{
//generate seeds that should lead to exact solution
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-1e-10, 1e-10), RandomUtil.getRandom(), 2, 5);
SimpleDataSet seedData = gdg.generateData(1);
seeds = seedData.getDataVectors();
for(Vec v : seeds)
v.mutableAdd(0.1);//shift off center so we aren't starting at the expected solution
}
@After
public void tearDown()
{
}
/**
* Test of cluster method, of class HamerlyKMeans.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster");
HamerlyKMeans kMeans = new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class HamerlyKMeans.
*/
@Test
public void testCluster_DataSet_intArr()
{
System.out.println("cluster");
HamerlyKMeans kMeans = new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_Weighted()
{
System.out.println("cluster(dataset, int, threadpool)");
HamerlyKMeans kMeans = new HamerlyKMeans();
kMeans.setStoreMeans(true);
HamerlyKMeans kMeans2 = new HamerlyKMeans();
kMeans2.setStoreMeans(true);
SimpleDataSet data2 = easyData10.getTwiceShallowClone();
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 15.0);
int[] assignment = new int[easyData10.size()];
List<Vec> orig_seeds = new ArrayList<Vec>();
List<Vec> seeds2 = new ArrayList<Vec>();
for(Vec v : seeds)
{
orig_seeds.add(v.clone());
seeds2.add(v.clone());
}
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get same solutions
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 1e-10);
}
//restore means and try again with randomish weights, should end up with something close
for(int i = 0; i < orig_seeds.size(); i++)
{
orig_seeds.get(i).copyTo(seeds.get(i));
orig_seeds.get(i).copyTo(seeds2.get(i));
}
Random rand = new XORWOW(897654);
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 0.5+5*rand.nextDouble());
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get similar solutions, but slightly different
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 0.1);
assertTrue(Math.abs(diff) > 1e-10 );
}
}
}
| 6,061 | 32.865922 | 125 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/KMeansPDNTest.java | package jsat.clustering.kmeans;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class KMeansPDNTest
{
static private SimpleDataSet easyData10;
public KMeansPDNTest()
{
}
@BeforeClass
public static void setUpClass()
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 2);
easyData10 = gdg.generateData(110);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testCluster_4args_1_findK()
{
System.out.println("cluster findK");
KMeansPDN kMeans = new KMeansPDN(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 1, 20, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_3args_1_findK()
{
System.out.println("cluster findK");
KMeansPDN kMeans = new KMeansPDN(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 1, 20);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,647 | 26.873684 | 136 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/LloydKernelKMeansTest.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.clustering.kmeans;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import jsat.FixedProblems;
import jsat.classifiers.ClassificationDataSet;
import jsat.distributions.Uniform;
import jsat.distributions.kernels.LinearKernel;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.*;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class LloydKernelKMeansTest
{
public LloydKernelKMeansTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of cluster method, of class LloydKernelKMeans.
*/
@Test
public void testCluster_4args()
{
System.out.println("cluster");
LloydKernelKMeans kmeans = new LloydKernelKMeans(new RBFKernel(0.1));
ClassificationDataSet toCluster = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1e-3, 1.0);
int[] result = kmeans.cluster(toCluster, 2, true, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length; i++)
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
}
/**
* Test of cluster method, of class LloydKernelKMeans.
*/
@Test
public void testCluster_3args()
{
System.out.println("cluster");
LloydKernelKMeans kmeans = new LloydKernelKMeans(new RBFKernel(0.1));
ClassificationDataSet toCluster = FixedProblems.getCircles(1000, RandomUtil.getRandom(), 1e-3, 1.0);
int[] result = kmeans.cluster(toCluster, 2, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length; i++)
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
}
@Test
public void testCluster_Weighted()
{
System.out.println("cluster(dataset, int, threadpool)");
LloydKernelKMeans kmeans = new LloydKernelKMeans(new LinearKernel());
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new XORWOW(1238962356), 2);
ClassificationDataSet toCluster = gdg.generateData(200).asClassificationDataSet(0);
//make the LAST data point so far out it will screw everything up, UNLCESS you understand that it has a tiny weight
toCluster.getDataPoint(toCluster.size()-1).getNumericalValues().set(0, 1.9e100);
Random rand = new XORWOW(897654);
for(int i = 0; i < toCluster.size(); i++)
toCluster.setWeight(i, 0.5+5*rand.nextDouble());
toCluster.setWeight(toCluster.size()-1, 1e-200);
int[] result = kmeans.cluster(toCluster, 2, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
Map<Integer, Set<Integer>> tmp = new HashMap<>();
IntSet allSeen = new IntSet();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length-1; i++)
{
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
allSeen.add(result[i]);
}
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
assertEquals(2, allSeen.size());//make sure we saw both clusters!
result = kmeans.cluster(toCluster, 2, true, (int[])null);
//make sure each cluster has points from only 1 class. If true then everyone is good
tmp = new HashMap<>();
allSeen = new IntSet();
for(int c = 0; c< toCluster.getClassSize(); c++)
tmp.put(c, new IntSet());
for(int i = 0; i < result.length-1; i++)
{
tmp.get(toCluster.getDataPointCategory(i)).add(result[i]);
allSeen.add(result[i]);
}
for(Set<Integer> set : tmp.values())
assertEquals(1, set.size());
assertEquals(2, allSeen.size());//make sure we saw both clusters!
}
}
| 5,035 | 33.731034 | 123 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/MiniBatchKMeansTest.java | package jsat.clustering.kmeans;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class MiniBatchKMeansTest
{
//NOTE: FARTHER FIST seed + 2 x 2 grid of 4 classes results in a deterministic result given a high density
static private SimpleDataSet easyData10;
public MiniBatchKMeansTest()
{
}
@BeforeClass
public static void setUpClass()
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), new Random(12), 2, 2);
easyData10 = gdg.generateData(110);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of cluster method, of class MiniBatchKMeans.
*/
@Test
public void testCluster_DataSet_intArr()
{
System.out.println("cluster");
MiniBatchKMeans kMeans = new MiniBatchKMeans(new EuclideanDistance(), 50, 50, SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class MiniBatchKMeans.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster");
MiniBatchKMeans kMeans = new MiniBatchKMeans(new EuclideanDistance(), 50, 50, SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 10, true);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,840 | 27.128713 | 137 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/NaiveKMeansTest.java |
package jsat.clustering.kmeans;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.KClustererBase;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Uniform;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class NaiveKMeansTest
{
static private SimpleDataSet easyData10;
/**
* Used as the starting seeds for k-means clustering to get consistent desired behavior
*/
static private List<Vec> seeds;
public NaiveKMeansTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-0.15, 0.15), RandomUtil.getRandom(), 2, 5);
easyData10 = gdg.generateData(110);
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
//generate seeds that should lead to exact solution
GridDataGenerator gdg = new GridDataGenerator(new Uniform(-1e-10, 1e-10), RandomUtil.getRandom(), 2, 5);
SimpleDataSet seedData = gdg.generateData(1);
seeds = seedData.getDataVectors();
for(Vec v : seeds)
v.mutableAdd(0.1);//shift off center so we aren't starting at the expected solution
}
@After
public void tearDown()
{
}
/**
* Test of cluster method, of class NaiveKMeans.
*/
@Test
public void testCluster_DataSet_intArr()
{
System.out.println("cluster");
NaiveKMeans kMeans = new NaiveKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
/**
* Test of cluster method, of class NaiveKMeans.
*/
@Test
public void testCluster_3args_1()
{
System.out.println("cluster");
NaiveKMeans kMeans = new NaiveKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST);
int[] assignment = new int[easyData10.size()];
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
List<List<DataPoint>> clusters = KClustererBase.createClusterListFromAssignmentArray(assignment, easyData10);
assertEquals(10, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_Weighted()
{
System.out.println("cluster(dataset, int, threadpool)");
NaiveKMeans kMeans = new NaiveKMeans();
kMeans.setStoreMeans(true);
NaiveKMeans kMeans2 = new NaiveKMeans();
kMeans2.setStoreMeans(true);
SimpleDataSet data2 = easyData10.getTwiceShallowClone();
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 15.0);
int[] assignment = new int[easyData10.size()];
List<Vec> orig_seeds = new ArrayList<Vec>();
List<Vec> seeds2 = new ArrayList<Vec>();
for(Vec v : seeds)
{
orig_seeds.add(v.clone());
seeds2.add(v.clone());
}
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get same solutions
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 1e-10);
}
//restore means and try again with randomish weights, should end up with something close
for(int i = 0; i < orig_seeds.size(); i++)
{
orig_seeds.get(i).copyTo(seeds.get(i));
orig_seeds.get(i).copyTo(seeds2.get(i));
}
Random rand = new XORWOW(897654);
for(int i = 0; i < data2.size(); i++)
data2.setWeight(i, 0.5+5*rand.nextDouble());
kMeans.cluster(easyData10, null, 10, seeds, assignment, true, true, true, null);
kMeans2.cluster(data2, null, 10, seeds2, assignment, true, true, true, null);
//multiplied weights by a constant, should get similar solutions, but slightly different
for(int i = 0; i < 10; i++)
{
double diff = seeds.get(i).subtract(seeds2.get(i)).sum();
assertEquals(0.0, diff, 0.1);
assertTrue(Math.abs(diff) > 1e-10 );
}
}
}
| 5,906 | 32.754286 | 121 | java |
JSAT | JSAT-master/JSAT/test/jsat/clustering/kmeans/XMeansTest.java | package jsat.clustering.kmeans;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.DataPoint;
import jsat.clustering.SeedSelectionMethods;
import jsat.distributions.Normal;
import jsat.distributions.TruncatedDistribution;
import jsat.distributions.Uniform;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.GridDataGenerator;
import jsat.utils.IntSet;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class XMeansTest
{
static private SimpleDataSet easyData10;
public XMeansTest()
{
}
@BeforeClass
public static void setUpClass()
{
GridDataGenerator gdg = new GridDataGenerator(new TruncatedDistribution(new Normal(0, 0.05), -.15, .15), RandomUtil.getRandom(), 2, 2);
easyData10 = gdg.generateData(100);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testCluster_4args_1_findK()
{
System.out.println("cluster findK");
XMeans kMeans = new XMeans(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 2, 40, true);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
@Test
public void testCluster_3args_1_findK()
{
System.out.println("cluster findK");
XMeans kMeans = new XMeans(new HamerlyKMeans(new EuclideanDistance(), SeedSelectionMethods.SeedSelection.FARTHEST_FIRST));
List<List<DataPoint>> clusters = kMeans.cluster(easyData10, 2, 40);
assertEquals(4, clusters.size());
Set<Integer> seenBefore = new IntSet();
for(List<DataPoint> cluster : clusters)
{
int thisClass = cluster.get(0).getCategoricalValue(0);
assertFalse(seenBefore.contains(thisClass));
for(DataPoint dp : cluster)
assertEquals(thisClass, dp.getCategoricalValue(0));
}
}
}
| 2,798 | 27.561224 | 143 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/FastICATest.java |
package jsat.datatransform;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import static java.lang.Math.abs;
import jsat.linear.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class FastICATest
{
public FastICATest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test the transform method with data of the same dimension
*/
@Test
public void testTransform2_2()
{
System.out.println("transform");
SimpleDataSet source = new SimpleDataSet(2, new CategoricalData[0]);
SimpleDataSet X = new SimpleDataSet(2, new CategoricalData[0]);
Matrix mixing_true = new DenseMatrix(new double[][]
{
{2, -1.5},
{0.5, 0}
});
DenseVector time = new DenseVector(200);
for(int i = 0; i < time.length(); i++)
{
double t = i/(time.length()+0.0);
time.set(i, t);
Vec s = DenseVector.toDenseVec(Math.cos(4*t*3.14) , Math.sin(12*t*3.14));
source.add(new DataPoint(s));
X.add(new DataPoint(s.multiply(mixing_true.transpose())));
}
SimpleDataSet origX = X.shallowClone();
FastICA ica = new FastICA(X, 2);
X.applyTransform(ica);
//make sure scales match up. Keep 0 as the axis around which the sign changes so comparisons work when ICA acidently gest the wrong sign
LinearTransform linearX = new LinearTransform(X, -1, 1);
LinearTransform linearS = new LinearTransform(source, -1, 1);
X.applyTransform(linearX);
source.applyTransform(linearS);
//Lets go through and comapre our found components to truth. Check differnces in absolute value, becasue the independent compontents may have the wrong sign!
for(int found_c= 0; found_c < X.getNumNumericalVars(); found_c++)
{
Vec x_c = X.getNumericColumn(found_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_c = 0; true_c < source.getNumNumericalVars(); true_c++)
{
Vec t_c = source.getNumericColumn(true_c);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + found_c + " component didn't match any of the true components");
}
X.applyTransform(new InverseOfTransform(linearX));
source.applyTransform(new InverseOfTransform(linearS));
X.applyTransform(new InverseOfTransform(ica));
//make sure inverse maps back up to original data
for(int inverted_c= 0; inverted_c < X.getNumNumericalVars(); inverted_c++)
{
Vec x_c = X.getNumericColumn(inverted_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_x = 0; true_x < origX.getNumNumericalVars(); true_x++)
{
Vec t_c = origX.getNumericColumn(true_x);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + inverted_c + " component didn't match any of the true components");
}
}
/**
* Tests the transform method with data pre-whitened
*/
@Test
public void testTransform2_2_prewhite()
{
System.out.println("transform");
SimpleDataSet source = new SimpleDataSet(2, new CategoricalData[0]);
SimpleDataSet X = new SimpleDataSet(2, new CategoricalData[0]);
Matrix mixing_true = new DenseMatrix(new double[][]
{
{2, -1.5},
{0.5, 1}
});
DenseVector time = new DenseVector(200);
for(int i = 0; i < time.length(); i++)
{
double t = i/(time.length()+0.0);
time.set(i, t);
Vec s = DenseVector.toDenseVec(Math.cos(4*t*3.14) , Math.sin(12*t*3.14));
source.add(new DataPoint(s));
X.add(new DataPoint(s.multiply(mixing_true.transpose())));
}
ZeroMeanTransform zeroMean = new ZeroMeanTransform(X);
X.applyTransform(zeroMean);
WhitenedPCA whiten = new WhitenedPCA(X);
X.applyTransform(whiten);
SimpleDataSet origX = X.shallowClone();
FastICA ica = new FastICA(X, 2, FastICA.DefaultNegEntropyFunc.LOG_COSH, true);
X.applyTransform(ica);
//make sure scales match up. Keep 0 as the axis around which the sign changes so comparisons work when ICA acidently gest the wrong sign
LinearTransform linearX = new LinearTransform(X, -1, 1);
LinearTransform linearS = new LinearTransform(source, -1, 1);
X.applyTransform(linearX);
source.applyTransform(linearS);
//Lets go through and comapre our found components to truth. Check differnces in absolute value, becasue the independent compontents may have the wrong sign!
for(int found_c= 0; found_c < X.getNumNumericalVars(); found_c++)
{
Vec x_c = X.getNumericColumn(found_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_c = 0; true_c < source.getNumNumericalVars(); true_c++)
{
Vec t_c = source.getNumericColumn(true_c);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + found_c + " component didn't match any of the true components");
}
X.applyTransform(new InverseOfTransform(linearX));
source.applyTransform(new InverseOfTransform(linearS));
X.applyTransform(new InverseOfTransform(ica));
//make sure inverse maps back up to original data
for(int inverted_c= 0; inverted_c < X.getNumNumericalVars(); inverted_c++)
{
Vec x_c = X.getNumericColumn(inverted_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_x = 0; true_x < origX.getNumNumericalVars(); true_x++)
{
Vec t_c = origX.getNumericColumn(true_x);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + inverted_c + " component didn't match any of the true components");
}
}
/**
* Tests the transform method with data of a higher dimension
*/
@Test
public void testTransform2_3()
{
System.out.println("transform");
SimpleDataSet source = new SimpleDataSet(2, new CategoricalData[0]);
SimpleDataSet X = new SimpleDataSet(3, new CategoricalData[0]);
Matrix mixing_true = new DenseMatrix(new double[][]
{
{2, 1.5, -1},
{-0.5, 1, 2},
});
DenseVector time = new DenseVector(200);
for(int i = 0; i < time.length(); i++)
{
double t = i/(time.length()+0.0);
time.set(i, t);
Vec s = DenseVector.toDenseVec(Math.cos(4*t*3.14) , Math.sin(12*t*3.14));
source.add(new DataPoint(s));
X.add(new DataPoint(mixing_true.transpose().multiply(s)));
}
SimpleDataSet origX = X.shallowClone();
FastICA ica = new FastICA(X, 2);
X.applyTransform(ica);
//make sure scales match up. Keep 0 as the axis around which the sign changes so comparisons work when ICA acidently gest the wrong sign
LinearTransform linearX = new LinearTransform(X, -1, 1);
LinearTransform linearS = new LinearTransform(source, -1, 1);
X.applyTransform(linearX);
source.applyTransform(linearS);
//Lets go through and comapre our found components to truth. Check differnces in absolute value, becasue the independent compontents may have the wrong sign!
for(int found_c= 0; found_c < X.getNumNumericalVars(); found_c++)
{
Vec x_c = X.getNumericColumn(found_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_c = 0; true_c < source.getNumNumericalVars(); true_c++)
{
Vec t_c = source.getNumericColumn(true_c);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + found_c + " component didn't match any of the true components");
}
X.applyTransform(new InverseOfTransform(linearX));
source.applyTransform(new InverseOfTransform(linearS));
X.applyTransform(new InverseOfTransform(ica));
//make sure inverse maps back up to original data
for(int inverted_c= 0; inverted_c < X.getNumNumericalVars(); inverted_c++)
{
Vec x_c = X.getNumericColumn(inverted_c);
boolean found_match = false;
//It has to match up to ONE of the true components
SearchLoop:
for(int true_x = 0; true_x < origX.getNumNumericalVars(); true_x++)
{
Vec t_c = origX.getNumericColumn(true_x);
for(int i = 0; i < x_c.length(); i++)
{
double cmp = abs(x_c.get(i))-abs(t_c.get(i));
if(abs(cmp) > 1e-3)
continue SearchLoop;
}
//we made it!
found_match = true;
}
if(!found_match)
fail("The " + inverted_c + " component didn't match any of the true components");
}
}
}
| 11,946 | 33.134286 | 166 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/ImputerTest.java | /*
* Copyright (C) 2016 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import jsat.DataSet;
import jsat.SimpleDataSet;
import jsat.distributions.Normal;
import jsat.utils.GridDataGenerator;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class ImputerTest
{
public ImputerTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of fit method, of class Imputer.
*/
@Test
public void testFit()
{
System.out.println("fit");
GridDataGenerator gdg = new GridDataGenerator(new Normal(0.0, 0.5), RandomUtil.getRandom(), 1, 1, 1, 1);
SimpleDataSet data = gdg.generateData(10000);
//remove class label feature
data.applyTransform(new RemoveAttributeTransform(data, new HashSet<Integer>(Arrays.asList(0)), Collections.EMPTY_SET));
//true mean and median should be 0
data.applyTransform(new InsertMissingValuesTransform(0.1));
Imputer imputer = new Imputer(data, Imputer.NumericImputionMode.MEAN);
for(int i = 0; i < data.getNumNumericalVars(); i++)
assertEquals(0.0, imputer.numeric_imputs[i], 0.25);
imputer = new Imputer(data, Imputer.NumericImputionMode.MEDIAN);
for(int i = 0; i < data.getNumNumericalVars(); i++)
assertEquals(0.0, imputer.numeric_imputs[i], 0.25);
imputer = imputer.clone();
for(int i = 0; i < data.getNumNumericalVars(); i++)
assertEquals(0.0, imputer.numeric_imputs[i], 0.25);
data.applyTransform(imputer);
assertEquals(0, data.countMissingValues());
//test categorical features
data = gdg.generateData(10000);
//remove class label feature
data.applyTransform(new RemoveAttributeTransform(data, new HashSet<Integer>(Arrays.asList(0)), Collections.EMPTY_SET));
//breaking into 3 even sized bins, so the middle bin, indx 1, should be the mode
data.applyTransform(new NumericalToHistogram(data, 3));
data.applyTransform(new InsertMissingValuesTransform(0.1));
imputer.fit(data);
for(int i = 0; i < data.getNumCategoricalVars(); i++)
assertEquals(1, imputer.cat_imputs[i]);
imputer = imputer.clone();
for(int i = 0; i < data.getNumCategoricalVars(); i++)
assertEquals(1, imputer.cat_imputs[i]);
}
}
| 3,656 | 30.25641 | 127 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/JLTransformTest.java |
package jsat.datatransform;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import jsat.DataSet;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Tests for JL are inherently probabilistic, so occasional failures can be
* tolerated.
*
* @author Edward Raff
*/
public class JLTransformTest
{
static DataSet ds;
static double eps = 0.2;
public JLTransformTest()
{
}
@BeforeClass
public static void setUpClass()
{
List<DataPoint> dps = new ArrayList<DataPoint>(100);
Random rand = RandomUtil.getRandom();
for(int i = 0; i < 100; i++)
{
Vec v = DenseVector.random(2000, rand);
dps.add(new DataPoint(v, new int[0], new CategoricalData[0]));
}
ds = new SimpleDataSet(dps);
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class JLTransform.
*/
@Test
public void testTransform()
{
System.out.println("transform");
Random rand = new XORWOW(124);
int k = 550;
List<Vec> transformed = new ArrayList<Vec>(ds.size());
for( JLTransform.TransformMode mode : JLTransform.TransformMode.values())
{
JLTransform jl = new JLTransform(k, mode, true);
jl.fit(ds);
jl = jl.clone();
transformed.clear();
for(int i = 0; i < ds.size(); i++)
transformed.add(jl.transform(ds.getDataPoint(i)).getNumericalValues());
int violations = 0;
int count = 0;
EuclideanDistance d = new EuclideanDistance();
for(int i = 0; i < ds.size(); i++)
{
DataPoint dpi = ds.getDataPoint(i);
Vec vi = dpi.getNumericalValues();
Vec vti = transformed.get(i);
for(int j = i+1; j < ds.size(); j++)
{
count++;
DataPoint dpj = ds.getDataPoint(j);
Vec vj = dpj.getNumericalValues();
Vec vtj = transformed.get(j);
double trueDist = Math.pow(d.dist(vi, vj), 2);
double embDist = Math.pow(d.dist(vti, vtj), 2);
double err = (embDist-trueDist)/trueDist;
if( Math.abs(err) > eps)
violations++;
}
}
assertTrue("Too many violations occured", violations < 150);
}
}
}
| 3,147 | 24.387097 | 87 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/PCATest.java | /*
* Copyright (C) 2016 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform;
import java.util.Random;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.ClassificationModelEvaluation;
import jsat.classifiers.Classifier;
import jsat.classifiers.knn.NearestNeighbour;
import jsat.distributions.Normal;
import jsat.utils.GridDataGenerator;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class PCATest
{
public PCATest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class PCA.
*/
@Test
public void testTransform()
{
System.out.println("transform");
GridDataGenerator gdg = new GridDataGenerator(new Normal(0, 0.05), new Random(12), 1, 1, 1);
ClassificationDataSet easyTrain = new ClassificationDataSet(gdg.generateData(80).getList(), 0);
ClassificationDataSet easyTest = new ClassificationDataSet(gdg.generateData(10).getList(), 0);
//lets project the data into a higher dimension
JLTransform jl = new JLTransform(30, JLTransform.TransformMode.GAUSS);
jl.fit(easyTrain);
easyTrain.applyTransform(jl);
easyTest.applyTransform(jl);
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(new NearestNeighbour(3), easyTrain);
cme.evaluateTestSet(easyTest);
double errorRate = cme.getErrorRate();
PCA pca = new PCA(10);
pca.fit(easyTrain);
assertEquals(10, pca.transform(easyTrain.getDataPoint(0)).getNumericalValues().length());
cme = new ClassificationModelEvaluation(new DataModelPipeline((Classifier)new NearestNeighbour(3), new PCA(10)), easyTrain);
cme.evaluateTestSet(easyTest);
assertTrue(cme.getErrorRate() < (errorRate+0.01)*1.05);
cme = new ClassificationModelEvaluation(new DataModelPipeline((Classifier)new NearestNeighbour(3), new PCA(3)), easyTrain);
cme.evaluateTestSet(easyTest);
assertTrue(cme.getErrorRate() < (errorRate+0.01)*1.05);
}
}
| 3,187 | 30.254902 | 132 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/RemoveAttributeTransformTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.datatransform;
import java.util.*;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.utils.IntSet;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class RemoveAttributeTransformTest
{
public RemoveAttributeTransformTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of consolidate method, of class RemoveAttributeTransform.
*/
@Test
public void testConsolidate()
{
System.out.println("consolidate");
CategoricalData[] catIndo = new CategoricalData[]
{
new CategoricalData(2), new CategoricalData(3), new CategoricalData(4)
};
int[] catVals = new int[] {0, 1, 2};
Vec numVals = DenseVector.toDenseVec(0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0);
DataPoint dp = new DataPoint(numVals, catVals, catIndo);
SimpleDataSet dataSet =new SimpleDataSet(Arrays.asList(dp));
Set<Integer> catToRemove = new IntSet();
catToRemove.add(1);
Set<Integer> numToRemove = new IntSet();
numToRemove.addAll(Arrays.asList(0, 2, 3));
RemoveAttributeTransform transform = new RemoveAttributeTransform(dataSet, catToRemove, numToRemove);
DataPoint transformed = transform.transform(dp);
catToRemove.clear();
catToRemove.add(0);
numToRemove.clear();
numToRemove.addAll(Arrays.asList(0, 3));
dataSet = new SimpleDataSet(Arrays.asList(transformed));
RemoveAttributeTransform transform2 = new RemoveAttributeTransform(dataSet, catToRemove, numToRemove);
//Consolidate and make sure it is right
transform2.consolidate(transform);
transformed = transform2.transform(dp);
int[] tranCatVals = transformed.getCategoricalValues();
assertEquals(1, tranCatVals.length);
assertEquals(2, tranCatVals[0]);
Vec tranNumVals = transformed.getNumericalValues();
assertEquals(2, tranNumVals.length());
assertEquals(4.0, tranNumVals.get(0), 0.0);
assertEquals(5.0, tranNumVals.get(1), 0.0);
}
/**
* Test of transform method, of class RemoveAttributeTransform.
*/
@Test
public void testTransform()
{
System.out.println("transform");
CategoricalData[] catIndo = new CategoricalData[]
{
new CategoricalData(2), new CategoricalData(3), new CategoricalData(4)
};
int[] catVals = new int[] {0, 1, 2};
Vec numVals = DenseVector.toDenseVec(0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0);
DataPoint dp = new DataPoint(numVals, catVals, catIndo);
SimpleDataSet dataSet =new SimpleDataSet(Arrays.asList(dp));
Set<Integer> catToRemove = new IntSet();
catToRemove.add(1);
Set<Integer> numToRemove = new IntSet();
numToRemove.addAll(Arrays.asList(0, 2, 3));
RemoveAttributeTransform transform = new RemoveAttributeTransform(dataSet, catToRemove, numToRemove);
DataPoint transFormed = transform.transform(dp);
int[] tranCatVals = transFormed.getCategoricalValues();
assertEquals(2, tranCatVals.length);
assertEquals(0, tranCatVals[0]);
assertEquals(2, tranCatVals[1]);
Vec tranNumVals = transFormed.getNumericalValues();
assertEquals(4, tranNumVals.length());
assertEquals(1.0, tranNumVals.get(0), 0.0);
assertEquals(4.0, tranNumVals.get(1), 0.0);
assertEquals(5.0, tranNumVals.get(2), 0.0);
assertEquals(6.0, tranNumVals.get(3), 0.0);
}
/**
* Test of clone method, of class RemoveAttributeTransform.
*/
@Test
public void testClone()
{
System.out.println("clone");
CategoricalData[] catIndo = new CategoricalData[]
{
new CategoricalData(2), new CategoricalData(3), new CategoricalData(4)
};
int[] catVals = new int[] {0, 1, 2};
Vec numVals = DenseVector.toDenseVec(0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0);
DataPoint dp = new DataPoint(numVals, catVals, catIndo);
SimpleDataSet dataSet =new SimpleDataSet(Arrays.asList(dp));
Set<Integer> catToRemove = new IntSet();
catToRemove.add(1);
Set<Integer> numToRemove = new IntSet();
numToRemove.addAll(Arrays.asList(0, 2, 3));
RemoveAttributeTransform transform = new RemoveAttributeTransform(dataSet, catToRemove, numToRemove);
transform = transform.clone();
DataPoint transFormed = transform.transform(dp);
int[] tranCatVals = transFormed.getCategoricalValues();
assertEquals(2, tranCatVals.length);
assertEquals(0, tranCatVals[0]);
assertEquals(2, tranCatVals[1]);
Vec tranNumVals = transFormed.getNumericalValues();
assertEquals(4, tranNumVals.length());
assertEquals(1.0, tranNumVals.get(0), 0.0);
assertEquals(4.0, tranNumVals.get(1), 0.0);
assertEquals(5.0, tranNumVals.get(2), 0.0);
assertEquals(6.0, tranNumVals.get(3), 0.0);
}
}
| 5,859 | 29.680628 | 110 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/WhitenedPCATest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.datatransform;
import java.util.*;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.distributions.multivariate.NormalM;
import jsat.linear.*;
import static org.junit.Assert.*;
import org.junit.*;
/**
*
* @author Edward Raff
*/
public class WhitenedPCATest
{
public WhitenedPCATest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of setUpTransform method, of class WhitenedPCA.
*/
@Test
public void testTransform()
{
System.out.println("testTransform");
NormalM normal = new NormalM(new DenseVector(3), new DenseMatrix(new double[][]
{
{133.138, -57.278, 40.250},
{-57.278, 25.056, -17.500},
{ 40.250, -17.500, 12.250},
}));
List<Vec> sample = normal.sample(500, new Random(17));
List<DataPoint> dataPoints = new ArrayList<DataPoint>(sample.size());
for( Vec v : sample)
dataPoints.add(new DataPoint(v, new int[0], new CategoricalData[0]));
SimpleDataSet data = new SimpleDataSet(dataPoints);
DataTransform transform = new WhitenedPCA(data, 0.0);
data.applyTransform(transform);
Matrix whiteCov = MatrixStatistics.covarianceMatrix(MatrixStatistics.meanVector(data), data);
assertTrue(Matrix.eye(3).equals(whiteCov, 1e-8));
}
}
| 1,823 | 22.088608 | 101 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/WhitenedZCATest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.datatransform;
import java.util.*;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.distributions.multivariate.NormalM;
import jsat.linear.*;
import static org.junit.Assert.assertTrue;
import org.junit.*;
/**
*
* @author Edward Raff
*/
public class WhitenedZCATest
{
public WhitenedZCATest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of setUpTransform method, of class WhitenedZCA.
*/
@Test
public void testTransform()
{
System.out.println("testTransform");
NormalM normal = new NormalM(new DenseVector(3), new DenseMatrix(new double[][]
{
{133.138, -57.278, 40.250},
{-57.278, 25.056, -17.500},
{ 40.250, -17.500, 12.250},
}));
List<Vec> sample = normal.sample(500, new Random(17));
List<DataPoint> dataPoints = new ArrayList<DataPoint>(sample.size());
for( Vec v : sample)
dataPoints.add(new DataPoint(v, new int[0], new CategoricalData[0]));
SimpleDataSet data = new SimpleDataSet(dataPoints);
DataTransform transform = new WhitenedZCA(data, 0);
data.applyTransform(transform);
Matrix whiteCov = MatrixStatistics.covarianceMatrix(MatrixStatistics.meanVector(data), data);
assertTrue(Matrix.eye(3).equals(whiteCov, 1e-8));
}
}
| 1,834 | 22.227848 | 101 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/BDSTest.java |
package jsat.datatransform.featureselection;
import java.util.*;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.knn.NearestNeighbour;
import jsat.regression.MultipleLinearRegression;
import jsat.regression.RegressionDataSet;
import jsat.utils.IntSet;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class BDSTest
{
public BDSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class BDS.
*/
@Test
public void testTransformC()
{
System.out.println("transformC");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
ClassificationDataSet cds = SFSTest.
generate3DimIn10(rand, t0, t1, t2);
BDS bds = new BDS(3, (Classifier)new NearestNeighbour(7), 5).clone();
bds.fit(cds);
Set<Integer> found = bds.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(bds);
assertEquals(shouldHave.size(), cds.getNumFeatures());
}
@Test
public void testTransformR()
{
System.out.println("transformR");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
RegressionDataSet rds = SFSTest.
generate3DimIn10R(rand, t0, t1, t2);
BDS bds = new BDS(3, new MultipleLinearRegression(), 5).clone().clone();
bds.fit(rds);
Set<Integer> found = bds.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
rds.applyTransform(bds);
assertEquals(shouldHave.size(), rds.getNumFeatures());
}
}
| 2,444 | 23.207921 | 80 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/LRSTest.java |
package jsat.datatransform.featureselection;
import java.util.*;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.knn.NearestNeighbour;
import jsat.regression.MultipleLinearRegression;
import jsat.regression.RegressionDataSet;
import jsat.utils.IntSet;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class LRSTest
{
public LRSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class LRS.
*/
@Test
public void testTransformC()
{
System.out.println("transformC");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
ClassificationDataSet cds = SFSTest.
generate3DimIn10(rand, t0, t1, t2);
//L > R
LRS lrs = new LRS(6, 3, (Classifier) new NearestNeighbour(3), 5).clone();
lrs.fit(cds);
Set<Integer> found = lrs.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
ClassificationDataSet copyData = cds.getTwiceShallowClone();
copyData.applyTransform(lrs);
assertEquals(shouldHave.size(), copyData.getNumFeatures());
//L < R (Leave 1 left then add 2 back
lrs = new LRS( 2, 10-1, (Classifier)new NearestNeighbour(3),5).clone();
lrs.fit(cds);
found = lrs.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(lrs);
assertEquals(shouldHave.size(), cds.getNumFeatures());
}
@Test
public void testTransformR()
{
System.out.println("transformR");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
RegressionDataSet cds = SFSTest.
generate3DimIn10R(rand, t0, t1, t2);
//L > R
LRS lrs = new LRS(6, 3, (Classifier) new NearestNeighbour(3), 5).clone();
lrs.fit(cds);
Set<Integer> found = lrs.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
RegressionDataSet copyData = cds.getTwiceShallowClone();
copyData.applyTransform(lrs);
assertEquals(shouldHave.size(), copyData.getNumFeatures());
//L < R (Leave 1 left then add 2 back
lrs = new LRS( 2, 10-1, (Classifier)new NearestNeighbour(3),5).clone();
lrs.fit(cds);
found = lrs.getSelectedNumerical();
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(lrs);
assertEquals(shouldHave.size(), cds.getNumFeatures());
}
}
| 3,451 | 26.616 | 81 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/MutualInfoFSTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.datatransform.featureselection;
import jsat.classifiers.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static jsat.linear.DenseVector.*;
import jsat.linear.Vec;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class MutualInfoFSTest
{
public MutualInfoFSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testSomeMethod()
{
// List<DataPoint> dps = new ArrayList<DataPoint>();
CategoricalData predicting = new CategoricalData(3);
CategoricalData[] catAtrs = new CategoricalData[]
{
new CategoricalData(3),
new CategoricalData(3),
new CategoricalData(2),//Info
new CategoricalData(3)//Info
};
//Making numeric attributes at indecies 1 and 3 informative
ClassificationDataSet cds = new ClassificationDataSet(4, catAtrs, predicting);
cds.addDataPoint(toDenseVec(0.0, 0.0, 1.0, 1.0), new int[]{0, 1, 0, 0}, 0);
cds.addDataPoint(toDenseVec(1.0, 0.0, 0.0, 1.0), new int[]{1, 2, 0, 0}, 0);
cds.addDataPoint(toDenseVec(0.0, 0.0, 1.0, 1.0), new int[]{2, 0, 0, 0}, 0);
cds.addDataPoint(toDenseVec(1.0, 0.0, 0.0, 1.0), new int[]{0, 1, 0, 0}, 0);
cds.addDataPoint(toDenseVec(1.0, 1.0, 0.0, 1.0), new int[]{1, 2, 0, 1}, 1);
cds.addDataPoint(toDenseVec(0.0, 1.0, 1.0, 1.0), new int[]{2, 0, 0, 1}, 1);
cds.addDataPoint(toDenseVec(1.0, 1.0, 0.0, 1.0), new int[]{0, 1, 0, 1}, 1);
cds.addDataPoint(toDenseVec(0.0, 1.0, 1.0, 1.0), new int[]{1, 2, 0, 1}, 1);
cds.addDataPoint(toDenseVec(0.0, 1.0, 1.0, 0.0), new int[]{2, 0, 1, 2}, 2);
cds.addDataPoint(toDenseVec(1.0, 1.0, 0.0, 0.0), new int[]{0, 1, 1, 2}, 2);
cds.addDataPoint(toDenseVec(0.0, 1.0, 1.0, 0.0), new int[]{1, 2, 1, 2}, 2);
cds.addDataPoint(toDenseVec(1.0, 1.0, 0.0, 0.0), new int[]{2, 0, 1, 2}, 2);
MutualInfoFS minFS = new MutualInfoFS(4, MutualInfoFS.NumericalHandeling.BINARY).clone();
minFS.fit(cds);
for(int i = 0; i < cds.size(); i++)
{
DataPoint dp = cds.getDataPoint(i);
DataPoint trDp = minFS.transform(dp);
int[] origCat = dp.getCategoricalValues();
int[] tranCat = trDp.getCategoricalValues();
Vec origVals = dp.getNumericalValues();
Vec tranVals = trDp.getNumericalValues();
assertEquals(origCat[2], tranCat[0]);
assertEquals(origCat[3], tranCat[1]);
assertEquals(origVals.get(1), tranVals.get(0), 0.0);
assertEquals(origVals.get(3), tranVals.get(1), 0.0);
}
}
}
| 3,231 | 29.490566 | 97 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/ReliefFTest.java |
package jsat.datatransform.featureselection;
import java.util.*;
import jsat.classifiers.ClassificationDataSet;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.IntSet;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ReliefFTest
{
public ReliefFTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class ReliefF.
*/
@Test
public void testTransformC()
{
System.out.println("transformC");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
ClassificationDataSet cds = SFSTest.
generate3DimIn10(rand, t0, t1, t2);
ReliefF relieff = new ReliefF(3, 50, 7, new EuclideanDistance()).clone();
relieff.fit(cds);
Set<Integer> found = new IntSet(relieff.getKeptNumeric());
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(relieff);
assertEquals(shouldHave.size(), cds.getNumFeatures());
}
}
| 1,598 | 20.039474 | 81 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/SBSTest.java |
package jsat.datatransform.featureselection;
import java.util.*;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.knn.NearestNeighbour;
import jsat.regression.MultipleLinearRegression;
import jsat.regression.RegressionDataSet;
import jsat.utils.IntSet;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class SBSTest
{
public SBSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTransformC()
{
System.out.println("transformC");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
ClassificationDataSet cds = SFSTest.
generate3DimIn10(rand, t0, t1, t2);
SBS sbs = new SBS(1, 7, (Classifier)new NearestNeighbour(7), 1e-3).clone();
sbs.setFolds(5);
sbs.fit(cds);
Set<Integer> found = sbs.getSelectedNumerical();
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(sbs);
assertEquals(3, cds.getNumFeatures());
}
@Test
public void testTransformR()
{
System.out.println("transformR");
Random rand = new XORWOW(13);
int t0 = 1, t1 = 5, t2 = 8;
RegressionDataSet cds = SFSTest.
generate3DimIn10R(rand, t0, t1, t2);
SBS sbs = new SBS(1, 7, new MultipleLinearRegression(), 1.0).clone();
sbs.setFolds(5);
sbs.fit(cds);
Set<Integer> found = sbs.getSelectedNumerical();
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(sbs);
assertEquals(3, cds.getNumFeatures());
}
}
| 2,417 | 22.940594 | 83 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/featureselection/SFSTest.java |
package jsat.datatransform.featureselection;
import java.util.*;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.ClassificationDataSet;
import jsat.classifiers.Classifier;
import jsat.classifiers.knn.NearestNeighbour;
import jsat.linear.DenseVector;
import jsat.linear.Vec;
import jsat.regression.MultipleLinearRegression;
import jsat.regression.RegressionDataSet;
import jsat.utils.IntSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.*;
/**
*
* @author Edward Raff
*/
public class SFSTest
{
public SFSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class SequentialForwardSelection.
*/
@Test
public void testTransform()
{
System.out.println("transform");
Random rand = new Random(12343);
int t0 = 1, t1 = 5, t2 = 8;
ClassificationDataSet cds = generate3DimIn10(rand, t0, t1, t2);
SFS sfs = new SFS(3, 7, (Classifier)new NearestNeighbour(7), 1e-3).clone();
sfs.fit(cds);
Set<Integer> found = sfs.getSelectedNumerical();
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
cds.applyTransform(sfs);
assertEquals(3, cds.getNumFeatures());
}
@Test
public void testTransformR()
{
System.out.println("transformR");
Random rand = new Random(12343);
int t0 = 1, t1 = 5, t2 = 8;
RegressionDataSet rds = generate3DimIn10R(rand, t0, t1, t2);
SFS sfs = new SFS(3, 7, new MultipleLinearRegression(), 10.0).clone();
sfs.fit(rds);
Set<Integer> found = sfs.getSelectedNumerical();
Set<Integer> shouldHave = new IntSet();
shouldHave.addAll(Arrays.asList(t0, t1, t2));
assertEquals(shouldHave.size(), found.size());
assertTrue(shouldHave.containsAll(found));
rds.applyTransform(sfs);
assertEquals(3, rds.getNumFeatures());
}
/**
* Creates a naive test case where 4 classes that can be separated with 3
* features are placed into a 10 dimensional space. The other 7 dimensions
* are all small random noise values.
*
* @param rand source of randomness
* @param t0 the true index in the 10 dimensional space to place the first value
* @param t1 the true index in the 10 dimensional space to place the second value
* @param t2 the true index in the 10 dimensional space to place the third value
*/
public static ClassificationDataSet generate3DimIn10(Random rand,
int t0, int t1, int t2)
{
ClassificationDataSet cds = new ClassificationDataSet(10,
new CategoricalData[0], new CategoricalData(4));
int cSize = 40;
for(int i = 0; i < cSize; i++)
{
Vec dv = DenseVector.random(10, rand);
dv.mutableDivide(3);
dv.set(t0, 5.0);
dv.set(t1, 5.0);
dv.set(t2, 0.0);
cds.addDataPoint(dv, new int[0], 0);
}
for(int i = 0; i < cSize; i++)
{
Vec dv = DenseVector.random(10, rand);
dv.mutableDivide(3);
dv.set(t0, 5.0);
dv.set(t1, 5.0);
dv.set(t2, 5.0);
cds.addDataPoint(dv, new int[0], 1);
}
for(int i = 0; i < cSize; i++)
{
Vec dv = DenseVector.random(10, rand);
dv.mutableDivide(3);
dv.set(t0, 5.0);
dv.set(t1, 0.0);
dv.set(t2, 5.0);
cds.addDataPoint(dv, new int[0], 2);
}
for(int i = 0; i < cSize; i++)
{
Vec dv = DenseVector.random(10, rand);
dv.mutableDivide(3);
dv.set(t0, 0.0);
dv.set(t1, 5.0);
dv.set(t2, 5.0);
cds.addDataPoint(dv, new int[0], 3);
}
return cds;
}
public static RegressionDataSet generate3DimIn10R(Random rand,
int t0, int t1, int t2)
{
RegressionDataSet cds = new RegressionDataSet(10, new CategoricalData[0]);
int cSize = 80;
for(int i = 0; i < cSize; i++)
{
Vec dv = DenseVector.random(10, rand);
cds.addDataPoint(dv, new int[0], dv.get(t0)*6 + dv.get(t1)*4 + dv.get(t2)*8);
}
return cds;
}
}
| 4,932 | 26.558659 | 89 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/kernel/KernelPCATest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.kernel;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.classifiers.svm.DCDs;
import jsat.datatransform.DataModelPipeline;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class KernelPCATest
{
//Test uses Transform to solve a problem that is not linearly seprable in the original space
public KernelPCATest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(Nystrom.SamplingMethod sampMethod : Nystrom.SamplingMethod.values())
{
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new KernelPCA(new RBFKernel(0.5), 20, 100, sampMethod));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(Nystrom.SamplingMethod sampMethod : Nystrom.SamplingMethod.values())
{
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new KernelPCA(new RBFKernel(0.5), 20, 100, sampMethod));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new KernelPCA(new RBFKernel(0.5), 20, 100, Nystrom.SamplingMethod.KMEANS));
ClassificationDataSet t1 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 1.0, 4.0);
ClassificationDataSet t2 = FixedProblems.getCircles(500, 0.0, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
DataModelPipeline result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 4,365 | 30.868613 | 159 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/kernel/NystromTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.kernel;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.classifiers.svm.DCDs;
import jsat.datatransform.DataModelPipeline;
import jsat.distributions.kernels.RBFKernel;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class NystromTest
{
public NystromTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
for(Nystrom.SamplingMethod sampMethod : Nystrom.SamplingMethod.values())
{
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new Nystrom(new RBFKernel(0.5), 250, sampMethod, 1e-5, false));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(400, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
for(Nystrom.SamplingMethod sampMethod : Nystrom.SamplingMethod.values())
{
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new Nystrom(new RBFKernel(0.5), 250, sampMethod, 1e-5, false));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(400, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
}
@Test
public void testClone()
{
System.out.println("clone");
DataModelPipeline instance = new DataModelPipeline((Classifier)new DCDs(), new Nystrom(new RBFKernel(0.5), 250, Nystrom.SamplingMethod.NORM, 1e-5, true));
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
DataModelPipeline result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 4,285 | 30.284672 | 163 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/kernel/RFF_RBFTest.java | /*
* Copyright (C) 2015 Edward Raff
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.kernel;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.FixedProblems;
import jsat.classifiers.*;
import jsat.classifiers.svm.DCDs;
import jsat.datatransform.DataModelPipeline;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class RFF_RBFTest
{
public RFF_RBFTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
@Test
public void testTrainC_ClassificationDataSet_ExecutorService()
{
System.out.println("trainC");
DataModelPipeline instance = new DataModelPipeline((Classifier) new DCDs(), new RFF_RBF(0.5, 100, true));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train, true);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testTrainC_ClassificationDataSet()
{
System.out.println("trainC");
DataModelPipeline instance = new DataModelPipeline((Classifier) new DCDs(), new RFF_RBF(0.5, 100, true));
ClassificationDataSet train = FixedProblems.getInnerOuterCircle(200, RandomUtil.getRandom());
ClassificationDataSet test = FixedProblems.getInnerOuterCircle(100, RandomUtil.getRandom());
ClassificationModelEvaluation cme = new ClassificationModelEvaluation(instance, train);
cme.evaluateTestSet(test);
assertEquals(0, cme.getErrorRate(), 0.0);
}
@Test
public void testClone()
{
System.out.println("clone");
DataModelPipeline instance = new DataModelPipeline((Classifier) new DCDs(), new RFF_RBF(0.5, 100, false));
ClassificationDataSet t1 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom());
ClassificationDataSet t2 = FixedProblems.getInnerOuterCircle(500, RandomUtil.getRandom(), 2.0, 10.0);
instance = instance.clone();
instance.train(t1);
DataModelPipeline result = instance.clone();
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), result.classify(t1.getDataPoint(i)).mostLikely());
result.train(t2);
for (int i = 0; i < t1.size(); i++)
assertEquals(t1.getDataPointCategory(i), instance.classify(t1.getDataPoint(i)).mostLikely());
for (int i = 0; i < t2.size(); i++)
assertEquals(t2.getDataPointCategory(i), result.classify(t2.getDataPoint(i)).mostLikely());
}
}
| 3,865 | 28.738462 | 114 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/visualization/IsomapTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.visualization;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseMatrix;
import jsat.linear.Matrix;
import jsat.linear.Vec;
import jsat.linear.VecPaired;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.linear.vectorcollection.VectorArray;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class IsomapTest
{
public IsomapTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class MDS.
*/
@Test
public void testTransform_DataSet_ExecutorService()
{
System.out.println("transform");
ExecutorService ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
Random rand = RandomUtil.getRandom();
Isomap instance = new Isomap();
//create a small data set, and apply a random projection to a higher dimension
//should still get similar nns when projected back down
final int K = 5;//num neighbors we want to see stay the same
instance.setNeighbors(K);
Matrix orig_dim = new DenseMatrix(200, 2);
for (int i = 0; i < orig_dim.rows(); i++)
{
int offset = i % 2 == 0 ? -5 : 5;
for (int j = 0; j < orig_dim.cols(); j++)
{
orig_dim.set(i, j, rand.nextGaussian()+offset);
}
}
Matrix s = Matrix.random(2, 10, rand);
Matrix proj_data = orig_dim.multiply(s);
SimpleDataSet proj = new SimpleDataSet(proj_data.cols(), new CategoricalData[0]);
for(int i = 0; i < proj_data.rows(); i++)
proj.add(new DataPoint(proj_data.getRow(i)));
List<Set<Integer>> origNNs = new ArrayList<Set<Integer>>();
VectorArray<VecPaired<Vec, Integer>> proj_vc = new VectorArray<VecPaired<Vec, Integer>>(new EuclideanDistance());
for(int i = 0; i < proj.size(); i++)
proj_vc.add(new VecPaired<Vec, Integer>(proj.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < proj.size(); i++)
{
Set<Integer> nns = new HashSet<Integer>();
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : proj_vc.search(proj_vc.get(i), K))
nns.add(neighbor.getVector().getPair());
origNNs.add(nns);
}
for(boolean cIsomap : new boolean[]{true, false})
{
instance.setCIsomap(cIsomap);
SimpleDataSet transformed_0 = instance.transform(proj, true);
SimpleDataSet transformed_1 = instance.transform(proj);
for(SimpleDataSet transformed : new SimpleDataSet[]{transformed_0, transformed_1})
{
double sameNN = 0;
VectorArray<VecPaired<Vec, Integer>> trans_vc = new VectorArray<VecPaired<Vec, Integer>>(new EuclideanDistance());
for (int i = 0; i < transformed.size(); i++)
trans_vc.add(new VecPaired<Vec, Integer>(transformed.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < orig_dim.rows(); i++)
{
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : trans_vc.search(trans_vc.get(i), K*3))
if(origNNs.get(i).contains(neighbor.getVector().getPair()))
sameNN++;
}
double score = sameNN/(transformed.size()*K);
if(cIsomap)//gets more leniency, as exagerating higher density means errors at the edge of the normal samples
assertTrue("was " + score, score >= 0.40);
else
assertTrue("was " + score, score >= 0.50);
}
}
ex.shutdown();
}
}
| 5,242 | 32.183544 | 130 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/visualization/LargeVizTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.visualization;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.DataSet;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseMatrix;
import jsat.linear.Matrix;
import jsat.linear.Vec;
import jsat.linear.VecPaired;
import jsat.linear.distancemetrics.ChebyshevDistance;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.linear.distancemetrics.ManhattanDistance;
import jsat.linear.vectorcollection.VectorArray;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class LargeVizTest
{
public LargeVizTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class LargeViz.
*/
@Test
public void testTransform_DataSet_ExecutorService()
{
System.out.println("transform");
ExecutorService ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
Random rand = RandomUtil.getRandom();
LargeViz instance = new LargeViz();
//create a small data set, and apply a random projection to a higher dimension
//should still get similar nns when projected back down
final int K = 5;//num neighbors we want to see stay the same
instance.setPerplexity(K*3);
Matrix orig_dim = new DenseMatrix(200, 2);
for (int i = 0; i < orig_dim.rows(); i++)
{
int offset = i % 2 == 0 ? -5 : 5;
for (int j = 0; j < orig_dim.cols(); j++)
{
orig_dim.set(i, j, rand.nextGaussian()+offset);
}
}
Matrix s = Matrix.random(2, 10, rand);
Matrix proj_data = orig_dim.multiply(s);
SimpleDataSet proj = new SimpleDataSet(proj_data.cols(), new CategoricalData[0]);
for(int i = 0; i < proj_data.rows(); i++)
proj.add(new DataPoint(proj_data.getRow(i)));
List<Set<Integer>> origNNs = new ArrayList<>();
VectorArray<VecPaired<Vec, Integer>> proj_vc = new VectorArray<>(new EuclideanDistance());
for(int i = 0; i < proj.size(); i++)
proj_vc.add(new VecPaired<>(proj.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < proj.size(); i++)
{
Set<Integer> nns = new HashSet<>();
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : proj_vc.search(proj_vc.get(i), K))
nns.add(neighbor.getVector().getPair());
origNNs.add(nns);
}
SimpleDataSet transformed_0 = instance.transform(proj, true);
SimpleDataSet transformed_1 = instance.transform(proj);
for(SimpleDataSet transformed : new SimpleDataSet[]{transformed_0, transformed_1})
{
double sameNN = 0;
VectorArray<VecPaired<Vec, Integer>> trans_vc = new VectorArray<VecPaired<Vec, Integer>>(new EuclideanDistance());
for (int i = 0; i < transformed.size(); i++)
trans_vc.add(new VecPaired<Vec, Integer>(transformed.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < orig_dim.rows(); i++)
{
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : trans_vc.search(trans_vc.get(i), K*3))
if(origNNs.get(i).contains(neighbor.getVector().getPair()))
sameNN++;
}
double score = sameNN/(transformed.size()*K);
assertTrue("was " + score, score >= 0.50);
}
ex.shutdown();
}
@Test
public void testTransform_DiffDists()
{
System.out.println("transform");
ExecutorService ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
Random rand = RandomUtil.getRandom();
LargeViz instance = new LargeViz();
//create a small data set, and apply a random projection to a higher dimension
//should still get similar nns when projected back down
final int K = 5;//num neighbors we want to see stay the same
instance.setPerplexity(K*3);
instance.setDistanceMetricSource(new ChebyshevDistance());
instance.setDistanceMetricEmbedding(new ManhattanDistance());
Matrix orig_dim = new DenseMatrix(200, 2);
for (int i = 0; i < orig_dim.rows(); i++)
{
int offset = i % 2 == 0 ? -5 : 5;
for (int j = 0; j < orig_dim.cols(); j++)
{
orig_dim.set(i, j, rand.nextGaussian()+offset);
}
}
Matrix s = Matrix.random(2, 10, rand);
Matrix proj_data = orig_dim.multiply(s);
SimpleDataSet proj = new SimpleDataSet(proj_data.cols(), new CategoricalData[0]);
for(int i = 0; i < proj_data.rows(); i++)
proj.add(new DataPoint(proj_data.getRow(i)));
List<Set<Integer>> origNNs = new ArrayList<>();
VectorArray<VecPaired<Vec, Integer>> proj_vc = new VectorArray<>(new EuclideanDistance());
for(int i = 0; i < proj.size(); i++)
proj_vc.add(new VecPaired<>(proj.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < proj.size(); i++)
{
Set<Integer> nns = new HashSet<>();
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : proj_vc.search(proj_vc.get(i), K))
nns.add(neighbor.getVector().getPair());
origNNs.add(nns);
}
SimpleDataSet transformed_0 = instance.transform(proj, true);
SimpleDataSet transformed_1 = instance.transform(proj);
for(SimpleDataSet transformed : new SimpleDataSet[]{transformed_0, transformed_1})
{
double sameNN = 0;
VectorArray<VecPaired<Vec, Integer>> trans_vc = new VectorArray<VecPaired<Vec, Integer>>(new EuclideanDistance());
for (int i = 0; i < transformed.size(); i++)
trans_vc.add(new VecPaired<Vec, Integer>(transformed.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < orig_dim.rows(); i++)
{
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : trans_vc.search(trans_vc.get(i), K*3))
if(origNNs.get(i).contains(neighbor.getVector().getPair()))
sameNN++;
}
double score = sameNN/(transformed.size()*K);
assertTrue("was " + score, score >= 0.50);
}
ex.shutdown();
}
}
| 7,984 | 34.176211 | 126 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/visualization/MDSTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.visualization;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.DataSet;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseMatrix;
import jsat.linear.Matrix;
import jsat.linear.Vec;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class MDSTest
{
public MDSTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class MDS.
*/
@Test
public void testTransform_DataSet_ExecutorService()
{
System.out.println("transform");
ExecutorService ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
Random rand = RandomUtil.getRandom();
MDS instance = new MDS();
//create a small data set, and apply a random projection to a higher dimension
//shouuld still be able to distances on the same scaling
Matrix orig_dim = new DenseMatrix(20, 2);
for (int i = 0; i < orig_dim.rows(); i++)
{
int offset = i % 2 == 0 ? -5 : 5;
for (int j = 0; j < orig_dim.cols(); j++)
{
orig_dim.set(i, j, rand.nextDouble() + offset);
}
}
Matrix s = Matrix.random(2, 4, rand);
Matrix proj_data = orig_dim.multiply(s);
SimpleDataSet proj = new SimpleDataSet(proj_data.cols(), new CategoricalData[0]);
for(int i = 0; i < proj_data.rows(); i++)
proj.add(new DataPoint(proj_data.getRow(i)));
SimpleDataSet transformed_0 = instance.transform(proj, true);
SimpleDataSet transformed_1 = instance.transform(proj);
for(SimpleDataSet transformed : new SimpleDataSet[]{transformed_0, transformed_1})
{
EuclideanDistance dist = new EuclideanDistance();
for(int i = 0; i < orig_dim.rows(); i++)
for(int j = 0; j < orig_dim.rows(); j++)
{
Vec orig_i = orig_dim.getRowView(i);
Vec orig_j = orig_dim.getRowView(j);
Vec new_i = transformed.getDataPoint(i).getNumericalValues();
Vec new_j = transformed.getDataPoint(j).getNumericalValues();
double d_o = dist.dist(orig_i, orig_j);
double d_n = dist.dist(new_i, new_j);
//assert the magnitudes are about the same
if(d_o > 6)
assertTrue(d_n > 6);
else//do is small, we should also be small
assertTrue(d_o < 2);
}
}
ex.shutdown();
}
}
| 4,097 | 28.695652 | 90 | java |
JSAT | JSAT-master/JSAT/test/jsat/datatransform/visualization/TSNETest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.datatransform.visualization;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import jsat.DataSet;
import jsat.SimpleDataSet;
import jsat.classifiers.CategoricalData;
import jsat.classifiers.DataPoint;
import jsat.linear.DenseMatrix;
import jsat.linear.Matrix;
import jsat.linear.Vec;
import jsat.linear.VecPaired;
import jsat.linear.distancemetrics.EuclideanDistance;
import jsat.linear.vectorcollection.VectorArray;
import jsat.utils.SystemInfo;
import jsat.utils.random.RandomUtil;
import jsat.utils.random.XORWOW;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class TSNETest
{
public TSNETest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of transform method, of class MDS.
*/
@Test
public void testTransform_DataSet_ExecutorService()
{
System.out.println("transform");
ExecutorService ex = Executors.newFixedThreadPool(SystemInfo.LogicalCores);
Random rand = RandomUtil.getRandom();
TSNE instance = new TSNE();
//create a small data set, and apply a random projection to a higher dimension
//should still get similar nns when projected back down
final int K = 5;//num neighbors we want to see stay the same
instance.setPerplexity(K*3);
Matrix orig_dim = new DenseMatrix(200, 2);
for (int i = 0; i < orig_dim.rows(); i++)
{
int offset = i % 2 == 0 ? -5 : 5;
for (int j = 0; j < orig_dim.cols(); j++)
{
orig_dim.set(i, j, rand.nextGaussian()+offset);
}
}
Matrix s = Matrix.random(2, 10, rand);
Matrix proj_data = orig_dim.multiply(s);
SimpleDataSet proj = new SimpleDataSet(proj_data.cols(), new CategoricalData[0]);
for(int i = 0; i < proj_data.rows(); i++)
proj.add(new DataPoint(proj_data.getRow(i)));
List<Set<Integer>> origNNs = new ArrayList<>();
VectorArray<VecPaired<Vec, Integer>> proj_vc = new VectorArray<>(new EuclideanDistance());
for(int i = 0; i < proj.size(); i++)
proj_vc.add(new VecPaired<>(proj.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < proj.size(); i++)
{
Set<Integer> nns = new HashSet<>();
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : proj_vc.search(proj_vc.get(i), K))
nns.add(neighbor.getVector().getPair());
origNNs.add(nns);
}
SimpleDataSet transformed_0 = instance.transform(proj, true);
SimpleDataSet transformed_1 = instance.transform(proj);
for(SimpleDataSet transformed : new SimpleDataSet[]{transformed_0, transformed_1})
{
double sameNN = 0;
VectorArray<VecPaired<Vec, Integer>> trans_vc = new VectorArray<VecPaired<Vec, Integer>>(new EuclideanDistance());
for (int i = 0; i < transformed.size(); i++)
trans_vc.add(new VecPaired<Vec, Integer>(transformed.getDataPoint(i).getNumericalValues(), i));
for(int i = 0; i < orig_dim.rows(); i++)
{
for(VecPaired<VecPaired<Vec, Integer>, Double> neighbor : trans_vc.search(trans_vc.get(i), K*3))
if(origNNs.get(i).contains(neighbor.getVector().getPair()))
sameNN++;
}
double score = sameNN/(transformed.size()*K);
assertTrue("was " + score, score >= 0.50);
}
ex.shutdown();
}
}
| 4,817 | 30.907285 | 126 | java |
JSAT | JSAT-master/JSAT/test/jsat/distributions/BetaTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.distributions;
import java.util.Arrays;
import java.util.Random;
import jsat.linear.Vec;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class BetaTest
{
double[] range = new double[]
{
-3., -2.75, -2.5, -2.25, -2., -1.75,
-1.5, -1.25, -1., -0.75, -0.5, -0.25,
0., 0.25, 0.5, 0.75, 1., 1.25, 1.5,
1.75, 2., 2.25, 2.5, 2.75, 3.
};
public BetaTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of pdf method, of class Beta.
*/
@Test
public void testPdf()
{
System.out.println("pdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{0,1.7589346862069402,1.1992165982748448,0.9843543098421845,0.8660395722419083,0.7907925823252256,0.7393296651378021,0.7028344377314711,0.6766845883748794,0.6582485884902135,0.6459658355538042,0.63891821212204,0.6366197723675814,0.63891821212204,0.6459658355538042,0.6582485884902135,0.6766845883748794,0.7028344377314711,0.7393296651378021,0.7907925823252256,0.8660395722419083,0.9843543098421845,1.1992165982748448,1.7589346862069402,0};
double[] paramTwo1 = new double[]{0,4.752563983397751,2.8965431767081506,2.1142236480652308,1.6445302034980571,1.3191626662009392,1.0757944013125078,0.8850491290558684,0.7309205031500751,0.6037892363480897,0.49744882216410985,0.407672421967636,0.33145630368119416,0.2665911929745277,0.21140530443767036,0.1646030046584622,0.12515953295020701,0.09225001734633169,0.06520024954176369,0.04345170496780734,0.026536147348631647,0.014056838886003111,0.005674401156432602,0.0010960132952870466,0};
double[] paramTwo2 = new double[]{0,0.0010960132952870466,0.005674401156432602,0.014056838886003111,0.026536147348631647,0.04345170496780734,0.06520024954176369,0.09225001734633169,0.12515953295020701,0.1646030046584622,0.21140530443767036,0.2665911929745277,0.33145630368119416,0.407672421967636,0.49744882216410985,0.6037892363480897,0.7309205031500751,0.8850491290558684,1.0757944013125078,1.3191626662009392,1.6445302034980571,2.1142236480652308,2.8965431767081506,4.752563983397751,0};
double[] paramTwo3 = new double[]{0,0.03217532266307823,0.14891252930402912,0.32803182145023146,0.5474832246889396,0.7875378145455926,1.0307877164838113,1.2621461059054015,1.4688472081503543,1.6404462984968413,1.7688197021612204,1.8481647942980308,1.8749999999999987,1.8481647942980308,1.7688197021612204,1.6404462984968413,1.4688472081503543,1.2621461059054015,1.0307877164838113,0.7875378145455926,0.5474832246889396,0.32803182145023146,0.14891252930402912,0.03217532266307823,0};
instance = new Beta(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.pdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.pdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.pdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.pdf(range[i]/5.9+0.5), 1e-10);
}
/**
* Test of cdf method, of class Beta.
*/
@Test
public void testCdf()
{
System.out.println("cdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{0,0.11788372107011924,0.17813214287521345,0.22386746294904356,0.2628616068083092,0.29785628990636376,0.3302095813792621,0.3607209558369871,0.3899170658416282,0.4181755799950677,0.44578746335898967,0.4729921922455666,0.5000000000000001,0.5270078077544333,0.5542125366410103,0.5818244200049323,0.6100829341583718,0.639279044163013,0.669790418620738,0.7021437100936363,0.7371383931916908,0.7761325370509564,0.8218678571247866,0.8821162789298808,1.00000000000000};
double[] paramTwo1 = new double[]{0,0.33749333898201467,0.49209569417277615,0.5965739352074145,0.6755170848657505,0.7379399861678736,0.788455195917326,0.8298465182750109,0.863973724796828,0.8921687214544558,0.9154350625005302,0.9345587974142613,0.9501747372194232,0.962808391664012,0.9729037492836445,0.9808423820196484,0.9869570017772313,0.9915413394571007,0.9948575107152673,0.9971416182210584,0.998608087714689,0.9994530763322207,0.999857188867766,0.9999876694226759,1.00000000000000};
double[] paramTwo2 = new double[]{0,0.000012330577324056844,0.00014281113223401983,0.0005469236677792935,0.0013919122853110215,0.0028583817789416396,0.005142489284732719,0.008458660542899338,0.01304299822276872,0.019157617980351684,0.02709625071635551,0.037191608335987954,0.04982526278057676,0.0654412025857387,0.08456493749946976,0.10783127854554417,0.13602627520317195,0.1701534817249891,0.21154480408267395,0.2620600138321264,0.32448291513424954,0.40342606479258547,0.5079043058272239,0.6625066610179853,1.00000000000000};
double[] paramTwo3 = new double[]{0,0.00036998602561136404,0.003944791957616764,0.013869702028410147,0.03231266867319054,0.06055649816429026,0.09909720018482686,0.14774233740235485,0.2057093750425177,0.27172403046269944,0.34411862272567684,0.42093042217327137,0.5000000000000001,0.5790695778267286,0.6558813772743232,0.7282759695373006,0.7942906249574823,0.8522576625976451,0.9009027998151732,0.9394435018357097,0.9676873313268095,0.9861302979715898,0.9960552080423832,0.9996300139743887,1.00000000000000};
instance = new Beta(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.cdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.cdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.cdf(range[i]/5.9+0.5), 1e-10);
instance = new Beta(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.cdf(range[i]/5.9+0.5), 1e-10);
}
/**
* Test of invCdf method, of class Beta.
*/
@Test
public void testInvCdf()
{
System.out.println("invCdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{0.00016576624284345113,0.005956051954461413,0.019925063164199123,0.04184154775649763,0.07134268596183385,0.10794009671164481,0.1510279226168282,0.19989285972581594,0.2537259660230678,0.31163605318240706,0.3726644398793855,0.4358008224267242,0.5000000000000002,0.5641991775732754,0.6273355601206145,0.6883639468175929,0.746274033976932,0.8001071402741841,0.8489720773831718,0.8920599032883552,0.9286573140381662,0.9581584522435024,0.9800749368358008,0.9940439480455386,0.9998342337571565};
double[] paramTwo1 = new double[]{0.000019111239764582307,0.0006886190862176469,0.0023195643945001826,0.0049245874336406245,0.008524203064237706,0.013147431007302195,0.01883272167757733,0.025629245127778394,0.03359864352684645,0.04281739498704826,0.053380007394563556,0.06540337027538033,0.079032767076173,0.0944503376606125,0.11188727260931457,0.13164189883735045,0.15410746275722342,0.17981669319696458,0.20951722718400426,0.24430834148810293,0.2859123406197187,0.33728555405656346,0.40428132830521096,0.5019552958188651,0.7147352141973307};
double[] paramTwo2 = new double[]{0.28526478580267,0.4980447041811349,0.5957186716947891,0.6627144459434365,0.7140876593802813,0.7556916585118971,0.7904827728159958,0.8201833068030354,0.8458925372427766,0.8683581011626496,0.8881127273906855,0.9055496623393875,0.920967232923827,0.9345966297246198,0.9466199926054364,0.9571826050129517,0.9664013564731535,0.9743707548722216,0.9811672783224227,0.9868525689926978,0.9914757969357623,0.9950754125663593,0.9976804356054998,0.9993113809137824,0.9999808887602354};
double[] paramTwo3 = new double[]{0.09848468152143303,0.18808934769778385,0.23687906456075017,0.2746049499190648,0.30675508011185254,0.3355120289556864,0.36200518342617627,0.3869129426302727,0.41068664797302484,0.4336514783763557,0.45605842357305015,0.4781141375560683,0.5,0.5218858624439316,0.5439415764269498,0.5663485216236444,0.5893133520269751,0.6130870573697274,0.6379948165738237,0.6644879710443137,0.6932449198881474,0.7253950500809352,0.7631209354392499,0.8119106523022162,0.9015153184785673};
instance = new Beta(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Beta(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Beta(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Beta(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
}
/**
* Test of min method, of class Beta.
*/
@Test
public void testMin()
{
System.out.println("min");
ContinuousDistribution dist = new Beta(0.5, 3);
assertTrue(0 == dist.min());
}
/**
* Test of max method, of class Beta.
*/
@Test
public void testMax()
{
System.out.println("max");
ContinuousDistribution dist = new Beta(0.5, 3);
assertTrue(1 == dist.max());
}
/**
* Test of mean method, of class Beta.
*/
@Test
public void testMean()
{
System.out.println("mean");
ContinuousDistribution dist = new Beta(0.5, 0.5);
assertEquals(0.5, dist.mean(), 1e-10);
dist = new Beta(0.5, 3);
assertEquals(0.14285714285714285, dist.mean(), 1e-10);
dist = new Beta(3, 0.5);
assertEquals(0.8571428571428571, dist.mean(), 1e-10);
dist = new Beta(3, 3);
assertEquals(0.5, dist.mean(), 1e-10);
}
/**
* Test of median method, of class Beta.
*/
@Test
public void testMedian()
{
System.out.println("median");
ContinuousDistribution dist = new Beta(0.5, 0.5);
assertEquals(0.5, dist.median(), 1e-10);
dist = new Beta(0.5, 3);
assertEquals(0.079032767076173, dist.median(), 1e-10);
dist = new Beta(3, 0.5);
assertEquals(0.920967232923827, dist.median(), 1e-10);
dist = new Beta(3, 3);
assertEquals(0.5, dist.median(), 1e-10);
}
/**
* Test of mode method, of class Beta.
*/
@Test
public void testMode()
{
System.out.println("mode");
ContinuousDistribution dist = new Beta(0.5, 0.5);
assertTrue(Double.isNaN(dist.mode()));
dist = new Beta(0.5, 3);
assertTrue(Double.isNaN(dist.mode()));
dist = new Beta(3, 0.5);
assertTrue(Double.isNaN(dist.mode()));
dist = new Beta(3, 3);
assertEquals(0.5, dist.mode(), 1e-10);
}
/**
* Test of variance method, of class Beta.
*/
@Test
public void testVariance()
{
System.out.println("variance");
ContinuousDistribution dist = new Beta(0.5, 0.5);
assertEquals(0.125, dist.variance(), 1e-10);
dist = new Beta(0.5, 3);
assertEquals(0.0272108843537415, dist.variance(), 1e-10);
dist = new Beta(3, 0.5);
assertEquals(0.0272108843537415, dist.variance(), 1e-10);
dist = new Beta(3, 3);
assertEquals(0.03571428571428571, dist.variance(), 1e-10);
}
/**
* Test of skewness method, of class Beta.
*/
@Test
public void testSkewness()
{
System.out.println("skewness");
ContinuousDistribution dist = new Beta(0.5, 0.5);
assertEquals(0, dist.skewness(), 1e-10);
dist = new Beta(0.5, 3);
assertEquals(1.5745916432444336, dist.skewness(), 1e-10);
dist = new Beta(3, 0.5);
assertEquals(-1.5745916432444336, dist.skewness(), 1e-10);
dist = new Beta(3, 3);
assertEquals(0, dist.skewness(), 1e-10);
}
@Test
public void testSample(){
System.out.println("hashCode");
Beta d1 = new Beta(0.5, 0.5);
Beta d2 = new Beta(1.6, 0.5);
Beta d3 = new Beta(0.5, 2.5);
Beta d4 = new Beta(3.5, 2.5);
Random rand = RandomUtil.getRandom();
for(Beta d : Arrays.asList(d1, d2, d3, d4))
{
Vec sample = d.sampleVec(1000000, rand);
assertEquals(d.mean(), sample.mean(), 1e-2);
assertEquals(d.standardDeviation(), sample.standardDeviation(), 1e-2);
}
}
@Test
public void testEquals(){
System.out.println("equals");
ContinuousDistribution d1 = new Beta(0.5, 0.5);
ContinuousDistribution d2 = new Beta(0.6, 0.5);
ContinuousDistribution d3 = new Beta(0.5, 0.6);
ContinuousDistribution d4 = new Beta(0.5, 0.5);
Integer i = new Integer(1);
assertFalse(d1.equals(d2));
assertFalse(d1.equals(d3));
assertFalse(d2.equals(d3));
assertFalse(d1.equals(i));
assertFalse(d1.equals(null));
assertEquals(d1, d1);
assertEquals(d1, d4);
assertEquals(d1, d1.clone());
}
@Test
public void testHashCode(){
System.out.println("hashCode");
ContinuousDistribution d1 = new Beta(0.5, 0.5);
ContinuousDistribution d2 = new Beta(0.6, 0.5);
ContinuousDistribution d4 = new Beta(0.5, 0.5);
assertEquals(d1.hashCode(), d4.hashCode());
assertFalse(d1.hashCode()==d2.hashCode());
}
}
| 14,250 | 48.311419 | 551 | java |
JSAT | JSAT-master/JSAT/test/jsat/distributions/CauchyTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.distributions;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class CauchyTest
{
double[] range = new double[]
{
-3., -2.75, -2.5, -2.25, -2., -1.75,
-1.5, -1.25, -1., -0.75, -0.5, -0.25,
0., 0.25, 0.5, 0.75, 1., 1.25, 1.5,
1.75, 2., 2.25, 2.5, 2.75, 3.
};
public CauchyTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of pdf method, of class Cauchy.
*/
@Test
public void testPdf()
{
System.out.println("pdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{0.012732395447351628,0.014719532309077025,0.017205939793718417,0.020371832715762605,0.02448537586029159,0.0299585775231803,0.03744822190397538,0.048046775273025005,0.06366197723675814,0.08780962377483881,0.12732395447351627,0.19588300688233273,0.3183098861837907,0.5092958178940651,0.6366197723675814,0.5092958178940651,0.3183098861837907,0.19588300688233273,0.12732395447351627,0.08780962377483881,0.06366197723675814,0.048046775273025005,0.03744822190397538,0.0299585775231803,0.02448537586029159};
double[] paramTwo1 = new double[]{0.04493786628477045,0.048814295644798576,0.05305164769729845,0.057656130327630006,0.06261833826566374,0.067906109052542,0.07345612758087477,0.07916515304052826,0.08488263631567752,0.0904075416379997,0.09549296585513718,0.09986192507726767,0.1032356387623105,0.10537154852980657,0.1061032953945969,0.10537154852980657,0.1032356387623105,0.09986192507726767,0.09549296585513718,0.0904075416379997,0.08488263631567752,0.07916515304052826,0.07345612758087477,0.067906109052542,0.06261833826566374};
double[] paramTwo2 = new double[]{0.00439048118874194,0.0047776343142032374,0.005218194855471979,0.005722424920158035,0.00630316606304536,0.006976655039644728,0.007763655760580261,0.008691054912868005,0.009794150344116638,0.011119996023887883,0.012732395447351628,0.014719532309077025,0.017205939793718417,0.020371832715762605,0.02448537586029159,0.0299585775231803,0.03744822190397538,0.048046775273025005,0.06366197723675814,0.08780962377483881,0.12732395447351627,0.19588300688233273,0.3183098861837907,0.5092958178940651,0.6366197723675814};
double[] paramTwo3 = new double[]{0.02122065907891938,0.02270263675605045,0.02432941805226426,0.026117734250977697,0.028086166427981528,0.030255197102617728,0.03264716781372212,0.035286084380651166,0.03819718634205488,0.04140616405642805,0.04493786628477045,0.048814295644798576,0.05305164769729845,0.057656130327630006,0.06261833826566374,0.067906109052542,0.07345612758087477,0.07916515304052826,0.08488263631567752,0.0904075416379997,0.09549296585513718,0.09986192507726767,0.1032356387623105,0.10537154852980657,0.1061032953945969};
instance = new Cauchy(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.pdf(range[i]), 1e-10);
instance = new Cauchy(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.pdf(range[i]), 1e-10);
instance = new Cauchy(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.pdf(range[i]), 1e-10);
instance = new Cauchy(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.pdf(range[i]), 1e-10);
}
/**
* Test of cdf method, of class Cauchy.
*/
@Test
public void testCdf()
{
System.out.println("cdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{0.045167235300866526,0.04858979034752886,0.052568456711253375,0.0572491470487001,0.06283295818900114,0.06960448727306395,0.07797913037736925,0.08858553278290471,0.10241638234956668,0.12111894159084341,0.14758361765043326,0.1871670418109988,0.25,0.35241638234956674,0.5,0.6475836176504333,0.75,0.8128329581890013,0.8524163823495667,0.8788810584091566,0.8975836176504333,0.9114144672170953,0.9220208696226307,0.9303955127269361,0.9371670418109989};
double[] paramTwo1 = new double[]{0.22556274802780257,0.23727438865200817,0.25,0.2638308495666619,0.27885793837630446,0.2951672353008665,0.3128329581890012,0.3319086824248374,0.35241638234956674,0.3743340836219976,0.39758361765043326,0.4220208696226307,0.44743154328874657,0.47353532394041015,0.5,0.5264646760595899,0.5525684567112534,0.5779791303773694,0.6024163823495667,0.6256659163780024,0.6475836176504333,0.6680913175751626,0.6871670418109987,0.7048327646991335,0.7211420616236955};
double[] paramTwo2 = new double[]{0.026464676059589853,0.027609670711723877,0.02885793837630446,0.030224066838919483,0.03172551743055352,0.033383366430525085,0.035223287477277265,0.03727687115420514,0.03958342416056554,0.04219246315884134,0.045167235300866526,0.04858979034752886,0.052568456711253375,0.0572491470487001,0.06283295818900114,0.06960448727306395,0.07797913037736925,0.08858553278290471,0.10241638234956668,0.12111894159084341,0.14758361765043326,0.1871670418109988,0.25,0.35241638234956674,0.5};
double[] paramTwo3 = new double[]{0.14758361765043326,0.15307117542621002,0.15894699814425117,0.16524934053856788,0.17202086962263063,0.17930913508098678,0.1871670418109988,0.1956532942677373,0.20483276469913347,0.21477671252272273,0.22556274802780257,0.23727438865200817,0.25,0.2638308495666619,0.27885793837630446,0.2951672353008665,0.3128329581890012,0.3319086824248374,0.35241638234956674,0.3743340836219976,0.39758361765043326,0.4220208696226307,0.44743154328874657,0.47353532394041015,0.5};
instance = new Cauchy(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.cdf(range[i]), 1e-10);
instance = new Cauchy(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.cdf(range[i]), 1e-10);
instance = new Cauchy(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.cdf(range[i]), 1e-10);
instance = new Cauchy(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.cdf(range[i]), 1e-10);
}
/**
* Test of invCdf method, of class Cauchy.
*/
@Test
public void testInvCdf()
{
System.out.println("invCdf");
ContinuousDistribution instance = null;
double[] parmTwo0 = new double[]{-18.912611074230398,-2.7103586757986284,-1.2177088085070724,-0.644097849410133,-0.3326793086091363,-0.13173366628867933,0.012712405159932383,0.12479067856711157,0.2170191767409292,0.29665435282523944,0.3683227410588536,0.43526498965126525,0.5,0.5647350103487349,0.6316772589411463,0.7033456471747606,0.7829808232590707,0.8752093214328884,0.9872875948400677,1.131733666288679,1.3326793086091362,1.644097849410134,2.2177088085070724,3.7103586757986284,19.912611074230565};
double[] paramTwo1 = new double[]{-115.97566644538239,-18.76215205479177,-9.806252851042434,-6.364587096460799,-4.496075851654818,-3.2904019977320758,-2.4237255690404056,-1.7512559285973306,-1.1978849395544249,-0.7200738830485633,-0.2900635536468784,0.11158993790759136,0.5,0.8884100620924091,1.2900635536468785,1.7200738830485633,2.1978849395544247,2.7512559285973306,3.4237255690404056,4.290401997732075,5.496075851654818,7.364587096460804,10.806252851042434,19.76215205479177,116.97566644538338};
double[] paramTwo2 = new double[]{-16.412611074230398,-0.21035867579862844,1.2822911914929276,1.855902150589867,2.167320691390864,2.3682663337113206,2.5127124051599323,2.624790678567112,2.717019176740929,2.7966543528252394,2.8683227410588534,2.935264989651265,3.,3.064735010348735,3.1316772589411466,3.2033456471747606,3.282980823259071,3.375209321432888,3.4872875948400677,3.631733666288679,3.832679308609136,4.144097849410134,4.717708808507073,6.210358675798629,22.412611074230565};
double[] paramTwo3 = new double[]{-113.47566644538239,-16.26215205479177,-7.306252851042434,-3.8645870964607987,-1.996075851654818,-0.7904019977320758,0.07627443095959441,0.7487440714026694,1.3021150604455751,1.7799261169514367,2.2099364463531215,2.6115899379075915,3.,3.388410062092409,3.7900635536468785,4.220073883048563,4.697884939554425,5.251255928597331,5.923725569040405,6.790401997732075,7.996075851654818,9.864587096460804,13.306252851042434,22.26215205479177,119.47566644538338};
instance = new Cauchy(0.5, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(parmTwo0[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Cauchy(0.5, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo1[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Cauchy(3, 0.5);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo2[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Cauchy(3, 3);
for(int i = 0; i < range.length; i++)
assertEquals(paramTwo3[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
}
/**
* Test of min method, of class Cauchy.
*/
@Test
public void testMin()
{
System.out.println("min");
Cauchy instance = new Cauchy();
assertTrue(Double.NEGATIVE_INFINITY == instance.min());
}
/**
* Test of max method, of class Cauchy.
*/
@Test
public void testMax()
{
System.out.println("max");
Cauchy instance = new Cauchy();
assertTrue(Double.POSITIVE_INFINITY == instance.max());
}
/**
* Test of mean method, of class Cauchy.
*/
@Test
public void testMean()
{
System.out.println("mean");
Cauchy instance = new Cauchy();
assertTrue(Double.isNaN(instance.mean()));
}
/**
* Test of median method, of class Cauchy.
*/
@Test
public void testMedian()
{
System.out.println("median");
ContinuousDistribution dist = new Cauchy(0.5, 0.5);
assertEquals(0.5, dist.median(), 1e-10);
dist = new Cauchy(0.5, 3);
assertEquals(0.5, dist.median(), 1e-10);
dist = new Cauchy(3, 0.5);
assertEquals(3, dist.median(), 1e-10);
dist = new Cauchy(3, 3);
assertEquals(3, dist.median(), 1e-10);
}
/**
* Test of mode method, of class Cauchy.
*/
@Test
public void testMode()
{
System.out.println("mode");
ContinuousDistribution dist = new Cauchy(0.5, 0.5);
assertEquals(0.5, dist.mode(), 1e-10);
dist = new Cauchy(0.5, 3);
assertEquals(0.5, dist.mode(), 1e-10);
dist = new Cauchy(3, 0.5);
assertEquals(3, dist.mode(), 1e-10);
dist = new Cauchy(3, 3);
assertEquals(3, dist.mode(), 1e-10);
}
/**
* Test of variance method, of class Cauchy.
*/
@Test
public void testVariance()
{
System.out.println("variance");
Cauchy instance = new Cauchy();
assertTrue(Double.isNaN(instance.variance()));
}
/**
* Test of standardDeviation method, of class Cauchy.
*/
@Test
public void testStandardDeviation()
{
System.out.println("standardDeviation");
Cauchy instance = new Cauchy();
assertTrue(Double.isNaN(instance.standardDeviation()));
}
/**
* Test of skewness method, of class Cauchy.
*/
@Test
public void testSkewness()
{
System.out.println("skewness");
Cauchy instance = new Cauchy();
assertTrue(Double.isNaN(instance.skewness()));
}
@Test
public void testEquals(){
System.out.println("equals");
ContinuousDistribution d1 = new Cauchy(0.5, 0.5);
ContinuousDistribution d2 = new Cauchy(0.6, 0.5);
ContinuousDistribution d3 = new Cauchy(0.5, 0.6);
ContinuousDistribution d4 = new Cauchy(0.5, 0.5);
Integer i = new Integer(1);
assertFalse(d1.equals(d2));
assertFalse(d1.equals(d3));
assertFalse(d2.equals(d3));
assertFalse(d1.equals(i));
assertFalse(d1.equals(null));
assertEquals(d1, d1);
assertEquals(d1, d4);
assertEquals(d1, d1.clone());
}
@Test
public void testHashCode(){
System.out.println("hashCode");
ContinuousDistribution d1 = new Cauchy(0.5, 0.5);
ContinuousDistribution d2 = new Cauchy(0.6, 0.5);
ContinuousDistribution d4 = new Cauchy(0.5, 0.5);
assertEquals(d1.hashCode(), d4.hashCode());
assertFalse(d1.hashCode()==d2.hashCode());
}
}
| 13,048 | 49.188462 | 553 | java |
JSAT | JSAT-master/JSAT/test/jsat/distributions/ChiSquaredTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.distributions;
import java.util.Arrays;
import java.util.Random;
import jsat.linear.Vec;
import jsat.utils.random.RandomUtil;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ChiSquaredTest
{
double[] range = new double[]
{
-3., -2.75, -2.5, -2.25, -2., -1.75,
-1.5, -1.25, -1., -0.75, -0.5, -0.25,
0., 0.25, 0.5, 0.75, 1., 1.25, 1.5,
1.75, 2., 2.25, 2.5, 2.75, 3.
};
public ChiSquaredTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of pdf method, of class ChiSquared.
*/
@Test
public void testPdf()
{
System.out.println("pdf");
ChiSquared instance = null;
double[] df0p5 = new double[]{0,0.57892140748046,0.303780786595025,0.19779032668795404,0.14067411288159115,0.10501343513705805,0.08082991466920406,0.06354409778550153,0.05073345595415418,0.04098672759009646,0.03342245268591643,0.027460408471784374,0.02270276544056991,0.01886776125462765,0.015750525546907305,0.013198841745077544,0.011097559156248158,0.00935823516479523,0.00791205789422622,0.006704892579220859,0.005693741133655941,0.004844165066959195,0.0041283792228703564,0.003523821549028722,0.0030120664071556255};
double[] df2 = new double[]{0,0.44124845129229767,0.38940039153570244,0.3436446393954861,0.3032653298563167,0.26763071425949514,0.23618327637050734,0.2084310098392542,0.18393972058572117,0.16232623367917487,0.14325239843009505,0.12641979790237323,0.11156508007421492,0.09845583760209702,0.08688697172522256,0.07667748342246423,0.06766764161830635,0.05971648413335981,0.052699612280932166,0.046507244605331746,0.0410424993119494,0.036219878517125735,0.031963930603353785,0.02820806975188867,0.024893534183931972};
double[] df12 = new double[]{0,1.1221528404039961e-7,3.1689484988257036e-6,0.00002123658431322813,0.00007897534631674914,0.0002126937820589504,0.0004670616549319115,0.0008908843949301007,0.0015328310048810098,0.002437642866140136,0.0036430968839033773,0.005177824623610207,0.007059977723446413,0.00929666221893194,0.011884027781460088,0.014807882683800707,0.018044704431548358,0.021562924197423262,0.025324376672987984,0.029285823023482597,0.03340047144527132,0.03761943615510857,0.041893090719406986,0.04617228502682505,0.05040940672246224};
instance = new ChiSquared(0.5);
for(int i = 0; i < range.length; i++)
assertEquals(df0p5[i], instance.pdf(range[i]+3), 1e-10);
instance = new ChiSquared(2);
for(int i = 0; i < range.length; i++)
assertEquals(df2[i], instance.pdf(range[i]+3), 1e-10);
instance = new ChiSquared(12);
for(int i = 0; i < range.length; i++)
assertEquals(df12[i], instance.pdf(range[i]+3), 1e-10);
}
/**
* Test of cdf method, of class ChiSquared.
*/
@Test
public void testCdf()
{
System.out.println("cdf");
ChiSquared instance = null;
double[] df0p5 = new double[]{0,0.640157206083084,0.7436779447314611,0.8047991126008802,0.8464864041916775,0.87688573996339,0.8999365132844983,0.9178700519520441,0.932078867989891,0.9434907075698542,0.9527532988560908,0.9603349624798424,0.9665835558410207,0.9717630228513373,0.9760771069754204,0.9796853136500356,0.9827139881404834,0.9852642025957218,0.9874174943013079,0.9892401185815257,0.9907862515204601,0.992100435436148,0.9932194688421462,0.9941738826536312,0.9949891040512912};
double[] df2 = new double[]{0,0.11750309741540454,0.22119921692859512,0.31271072120902776,0.3934693402873666,0.4647385714810097,0.5276334472589853,0.5831379803214916,0.6321205588285577,0.6753475326416503,0.7134952031398099,0.7471604041952535,0.7768698398515702,0.8030883247958059,0.8262260565495548,0.8466450331550716,0.8646647167633873,0.8805670317332803,0.8946007754381357,0.9069855107893365,0.9179150013761012,0.9275602429657486,0.9360721387932924,0.9435838604962227,0.950212931632136};
double[] df12 = new double[]{0,4.7604532844419965e-9,2.7381356338284025e-7,2.803736903342154e-6,0.00001416493732234249,0.00004859953912795168,0.00013055446292196965,0.0002962521561470425,0.0005941848175816929,0.0010845927305314122,0.0018380854505885185,0.0029336100162652462,0.0044559807752478486,0.006493173769154503,0.009133564163468133,0.01246325444683668,0.016563608480614434,0.021509074844324818,0.02736535406330157,0.03418793918529287,0.04202103819530612,0.05089686994361906,0.06083531238569402,0.0718438726220419,0.08391794203130346};
instance = new ChiSquared(0.5);
for(int i = 0; i < range.length; i++)
assertEquals(df0p5[i], instance.cdf(range[i]+3), 1e-10);
instance = new ChiSquared(2);
for(int i = 0; i < range.length; i++)
assertEquals(df2[i], instance.cdf(range[i]+3), 1e-10);
instance = new ChiSquared(12);
for(int i = 0; i < range.length; i++)
assertEquals(df12[i], instance.cdf(range[i]+3), 1e-10);
}
/**
* Test of invCdf method, of class ChiSquared.
*/
@Test
public void testInvCdf()
{
System.out.println("invCdf");
ChiSquared instance = null;
double[] df0p5 = new double[]
{
6.093614961311839e-9, 7.897349917808368e-6, 0.0000892198004181562, 0.0003994149546910942, 0.0011856544464383886,
0.002787741883826599, 0.005640285791335964, 0.010277055179909702, 0.017338752494932063, 0.027585874814306646,
0.041918981404673365, 0.061409712302900245, 0.08734760470574683, 0.1213106759556879, 0.16527293485434283,
0.22177158982679546, 0.29417538482483335, 0.38713399406906546, 0.5073739757420875, 0.6652156884578238,
0.8777617025944231, 1.1765830034154225, 1.6305260036139653, 2.4434484059336072, 5.189836396971451
};
double[] df2 = new double[]
{
0.016460998273030734, 0.10086170725378371, 0.18898168684184483, 0.28116390124237867, 0.37780105578399414,
0.4793457065308407, 0.5863230764328129, 0.6993474969594976, 0.8191437801216357, 0.9465754088938508,
1.0826823353838824, 1.2287326054136622, 1.3862943611198906, 1.5573387079962149, 1.7443908240178614,
1.9507592964883234, 2.180897956057897, 2.4410042125542932, 2.740067680496221, 3.091849013423549,
3.5189972140196675, 4.062864644986952, 4.812251543869772, 6.024523151010405, 9.608042089466538
};
double[] df12 = new double[]
{
3.4180950469151483, 5.203890364257783, 6.1214555157081225, 6.8266639318647435, 7.430985599781199,
7.97751232692583, 8.488313547594942, 8.976714493919122, 9.451780012809346, 9.920320955923309,
10.387938467434852, 10.859649089467469, 11.34032237742414, 11.835049834156907, 12.349524382578897,
12.890506293567935, 13.466478617419032, 14.08866824134716, 14.772779308969527, 15.5422029995657,
16.434605452689, 17.51743977608438, 18.932767402341046, 21.08284981490997, 26.821781360074844
};
instance = new ChiSquared(0.5);
for(int i = 0; i < range.length-2; i++)//-2 b/c it enters a numerically unstable range that isnt fair
assertEquals(df0p5[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new ChiSquared(2);
for(int i = 0; i < range.length; i++)
assertEquals(df2[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new ChiSquared(12);
for(int i = 0; i < range.length; i++)
assertEquals(df12[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
}
/**
* Test of min method, of class ChiSquared.
*/
@Test
public void testMin()
{
System.out.println("min");
ChiSquared dist = new ChiSquared(0.5);
assertTrue(0 == dist.min());
}
/**
* Test of max method, of class ChiSquared.
*/
@Test
public void testMax()
{
System.out.println("max");
ChiSquared dist = new ChiSquared(0.5);
assertTrue(Double.POSITIVE_INFINITY == dist.max());
}
/**
* Test of mean method, of class ChiSquared.
*/
@Test
public void testMean()
{
System.out.println("mean");
ChiSquared dist = new ChiSquared(12);
assertEquals(12, dist.mean(), 1e-10);
dist = new ChiSquared(2);
assertEquals(2, dist.mean(), 1e-10);
dist = new ChiSquared(0.5);
assertEquals(0.5, dist.mean(), 1e-10);
}
/**
* Test of median method, of class ChiSquared.
*/
@Test
public void testMedian()
{
System.out.println("median");
ChiSquared dist = new ChiSquared(12);
assertEquals(11.34032237742414, dist.median(), 1e-10);
dist = new ChiSquared(2);
assertEquals(1.3862943611198906, dist.median(), 1e-10);
dist = new ChiSquared(0.5);
assertEquals(0.08734760470574683, dist.median(), 1e-10);
}
/**
* Test of mode method, of class ChiSquared.
*/
@Test
public void testMode()
{
System.out.println("mode");
ChiSquared dist = new ChiSquared(12);
assertEquals(10, dist.mode(), 1e-10);
dist = new ChiSquared(2);
assertEquals(0, dist.mode(), 1e-10);
dist = new ChiSquared(0.5);
assertEquals(0, dist.mode(), 1e-10);
}
/**
* Test of variance method, of class ChiSquared.
*/
@Test
public void testVariance()
{
System.out.println("variance");
ChiSquared dist = new ChiSquared(12);
assertEquals(24, dist.variance(), 1e-10);
dist = new ChiSquared(2);
assertEquals(4, dist.variance(), 1e-10);
dist = new ChiSquared(0.5);
assertEquals(1, dist.variance(), 1e-10);
}
/**
* Test of skewness method, of class ChiSquared.
*/
@Test
public void testSkewness()
{
System.out.println("skewness");
ChiSquared dist = new ChiSquared(12);
assertEquals(0.816496580927726, dist.skewness(), 1e-10);
dist = new ChiSquared(2);
assertEquals(2, dist.skewness(), 1e-10);
dist = new ChiSquared(0.5);
assertEquals(4, dist.skewness(), 1e-10);
}
@Test
public void testSample(){
System.out.println("hashCode");
ChiSquared d1 = new ChiSquared(1);
ChiSquared d2 = new ChiSquared(2);
ChiSquared d3 = new ChiSquared(3.5);
ChiSquared d4 = new ChiSquared(10.5);
Random rand = RandomUtil.getRandom();
for(ChiSquared d : Arrays.asList(d1, d2, d3, d4))
{
Vec sample = d.sampleVec(1000000, rand);
assertEquals(0, (d.mean()-sample.mean())/d.mean(), 1e-2);
assertEquals(0, (d.standardDeviation()-sample.standardDeviation())/d.standardDeviation(), 1e-2);
}
}
@Test
public void testEquals(){
System.out.println("equals");
ContinuousDistribution d1 = new ChiSquared(0.5);
ContinuousDistribution d2 = new ChiSquared(0.6);
ContinuousDistribution d4 = new ChiSquared(0.5);
Integer i = new Integer(1);
assertFalse(d1.equals(d2));
assertFalse(d1.equals(i));
assertFalse(d1.equals(null));
assertEquals(d1, d1);
assertEquals(d1, d4);
assertEquals(d1, d1.clone());
}
@Test
public void testHashCode(){
System.out.println("hashCode");
ContinuousDistribution d1 = new ChiSquared(0.5);
ContinuousDistribution d2 = new ChiSquared(0.6);
ContinuousDistribution d4 = new ChiSquared(0.5);
assertEquals(d1.hashCode(), d4.hashCode());
assertFalse(d1.hashCode()==d2.hashCode());
}
}
| 12,049 | 41.132867 | 550 | java |
JSAT | JSAT-master/JSAT/test/jsat/distributions/ContinuousDistributionTest.java | /*
* Copyright (C) 2015 Edward Raff <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jsat.distributions;
import jsat.linear.Vec;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff <[email protected]>
*/
public class ContinuousDistributionTest
{
static private final ContinuousDistribution dumbNormal_0_1 = new ContinuousDistribution()
{
@Override
public double pdf(double x)
{
return Normal.pdf(x, 0, 1);
}
@Override
public String getDistributionName()
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public String[] getVariables()
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public double[] getCurrentVariableValues()
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void setVariable(String var, double value)
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public ContinuousDistribution clone()
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void setUsingData(Vec data)
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public double mode()
{
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public double min()
{
return Double.NEGATIVE_INFINITY;
}
@Override
public double max()
{
return Double.POSITIVE_INFINITY;
}
};
public ContinuousDistributionTest()
{
}
@BeforeClass
public static void setUpClass()
{
}
@AfterClass
public static void tearDownClass()
{
}
@Before
public void setUp()
{
}
@After
public void tearDown()
{
}
/**
* Test of logPdf method, of class ContinuousDistribution.
*/
@Test
public void testLogPdf()
{
System.out.println("logPdf");
Normal norm = new Normal();
for(double i = -3; i < 3; i += 0.1)
assertEquals(norm.logPdf(i), dumbNormal_0_1.logPdf(i), 0.01);
}
/**
* Test of cdf method, of class ContinuousDistribution.
*/
@Test
public void testCdf()
{
System.out.println("cdf");
Normal norm = new Normal();
for(double i = -3; i < 3; i += 0.1)
{
assertEquals(norm.cdf(i), dumbNormal_0_1.cdf(i), 0.01);
}
}
@Test
public void testInvCdf()
{
System.out.println("invCdf");
Normal norm = new Normal();
for(double p = 0.01; p < 1; p += 0.1)
{
assertEquals(norm.invCdf(p), dumbNormal_0_1.invCdf(p), 0.01);
}
}
/**
* Test of mean method, of class ContinuousDistribution.
*/
@Test
public void testMean()
{
System.out.println("mean");
Normal norm = new Normal();
assertEquals(norm.mean(), dumbNormal_0_1.mean(), 0.01);
}
/**
* Test of variance method, of class ContinuousDistribution.
*/
@Test
public void testVariance()
{
System.out.println("variance");
Normal norm = new Normal();
assertEquals(norm.variance(), dumbNormal_0_1.variance(), 0.01);
}
/**
* Test of skewness method, of class ContinuousDistribution.
*/
@Test
public void testSkewness()
{
System.out.println("skewness");
Normal norm = new Normal();
assertEquals(norm.skewness(), dumbNormal_0_1.skewness(), 0.01);
}
}
| 5,106 | 25.46114 | 139 | java |
JSAT | JSAT-master/JSAT/test/jsat/distributions/ExponentialTest.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package jsat.distributions;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author Edward Raff
*/
public class ExponentialTest
{
double[] range = new double[]
{
-3., -2.75, -2.5, -2.25, -2., -1.75,
-1.5, -1.25, -1., -0.75, -0.5, -0.25,
0., 0.25, 0.5, 0.75, 1., 1.25, 1.5,
1.75, 2., 2.25, 2.5, 2.75, 3.
};
public ExponentialTest()
{
}
@BeforeClass
public static void setUpClass() throws Exception
{
}
@AfterClass
public static void tearDownClass() throws Exception
{
}
@Before
public void setUp()
{
}
/**
* Test of logPdf method, of class Exponential.
*/
@Test
public void testLogPdf()
{
System.out.println("logPdf");
ContinuousDistribution instance = null;
double[] param0p5 = new double[]{0,0,-0.6931471805599453,-0.8181471805599453,-0.9431471805599453,-1.0681471805599452,-1.1931471805599454,-1.3181471805599452,-1.4431471805599454,-1.5681471805599454,-1.6931471805599452,-1.8181471805599452,-1.9431471805599454,-2.0681471805599454,-2.1931471805599454,-2.3181471805599454,-2.4431471805599454,-2.5681471805599454,-2.6931471805599454,-2.8181471805599454,-2.9431471805599454,-3.0681471805599454,-3.1931471805599454,-3.3181471805599454,-3.4431471805599454};
double[] param2 = new double[]{0,0,0.6931471805599453,0.19314718055994531,-0.30685281944005466,-0.8068528194400547,-1.3068528194400546,-1.8068528194400546,-2.3068528194400546,-2.8068528194400546,-3.3068528194400546,-3.8068528194400546,-4.306852819440055,-4.806852819440055,-5.306852819440055,-5.806852819440055,-6.306852819440055,-6.806852819440055,-7.306852819440055,-7.806852819440055,-8.306852819440055,-8.806852819440055,-9.306852819440055,-9.806852819440055,-10.306852819440055};
double[] param12 = new double[]{0,0,2.4849066497880004,-0.5150933502119998,-3.5150933502119996,-6.515093350211999,-9.515093350212,-12.515093350212,-15.515093350212,-18.515093350212,-21.515093350212,-24.515093350212,-27.515093350212,-30.515093350212,-33.515093350212,-36.515093350212,-39.515093350212,-42.515093350212,-45.515093350212,-48.515093350212,-51.515093350212,-54.515093350212,-57.515093350212,-60.515093350212,-63.515093350212};
instance = new Exponential(0.5);
for(int i = 0; i < range.length; i++)
assertEquals(param0p5[i], instance.logPdf(range[i]+2.5), 1e-10);
instance = new Exponential(2);
for(int i = 0; i < range.length; i++)
assertEquals(param2[i], instance.logPdf(range[i]+2.5), 1e-10);
instance = new Exponential(12);
for(int i = 0; i < range.length; i++)
assertEquals(param12[i], instance.logPdf(range[i]+2.5), 1e-10);
}
/**
* Test of pdf method, of class Exponential.
*/
@Test
public void testPdf()
{
System.out.println("pdf");
ContinuousDistribution instance = null;
double[] param0p5 = new double[]{0,0,0.5,0.4412484512922977,0.38940039153570244,0.3436446393954861,0.3032653298563167,0.26763071425949514,0.23618327637050734,0.2084310098392542,0.18393972058572117,0.16232623367917487,0.14325239843009505,0.12641979790237323,0.11156508007421491,0.09845583760209703,0.08688697172522257,0.07667748342246423,0.06766764161830635,0.05971648413335981,0.052699612280932166,0.046507244605331746,0.0410424993119494,0.03621987851712573,0.031963930603353785};
double[] param2 = new double[]{0,0,2.,1.2130613194252668,0.7357588823428847,0.44626032029685964,0.2706705664732254,0.1641699972477976,0.09957413673572789,0.060394766844637,0.03663127777746836,0.022217993076484612,0.013475893998170934,0.008173542876928133,0.004957504353332717,0.0030068783859551447,0.0018237639311090325,0.0011061687402956673,0.0006709252558050237,0.00040693673802128834,0.0002468196081733591,0.0001497036597754012,0.00009079985952496971,0.000055072898699494316,0.00003340340158049132};
double[] param12 = new double[]{0,0,12.,0.5974448204143673,0.029745026119996302,0.0014809176490401547,0.00007373054823993851,3.6708278460219094e-6,1.8275975693655156e-7,9.099072513494288e-9,4.5301614531349173e-10,2.2554345798469e-11,1.122914756260821e-12,5.590663374124077e-14,2.7834273962922836e-15,1.3857869007618943e-16,6.899426717152272e-18,3.4350222966592726e-19,1.7101968992891223e-20,8.514568994741645e-22,4.2391542866409685e-23,2.110550642909174e-24,1.0507812915235825e-25,5.231532000075697e-27,2.6046264135643672e-28};
instance = new Exponential(0.5);
for(int i = 0; i < range.length; i++)
assertEquals(param0p5[i], instance.pdf(range[i]+2.5), 1e-10);
instance = new Exponential(2);
for(int i = 0; i < range.length; i++)
assertEquals(param2[i], instance.pdf(range[i]+2.5), 1e-10);
instance = new Exponential(12);
for(int i = 0; i < range.length; i++)
assertEquals(param12[i], instance.pdf(range[i]+2.5), 1e-10);
}
/**
* Test of cdf method, of class Exponential.
*/
@Test
public void testCdf()
{
System.out.println("cdf");
ContinuousDistribution instance = null;
double[] param0p5 = new double[]{0,0,0.,0.11750309741540454,0.22119921692859512,0.31271072120902776,0.3934693402873666,0.4647385714810097,0.5276334472589853,0.5831379803214916,0.6321205588285577,0.6753475326416503,0.7134952031398099,0.7471604041952535,0.7768698398515702,0.8030883247958059,0.8262260565495548,0.8466450331550716,0.8646647167633873,0.8805670317332803,0.8946007754381357,0.9069855107893365,0.9179150013761012,0.9275602429657486,0.9360721387932924};
double[] param2 = new double[]{0,0,0.,0.3934693402873666,0.6321205588285577,0.7768698398515702,0.8646647167633873,0.9179150013761012,0.950212931632136,0.9698026165776815,0.9816843611112658,0.9888910034617577,0.9932620530009145,0.995913228561536,0.9975212478233336,0.9984965608070224,0.9990881180344455,0.9994469156298522,0.9996645373720975,0.9997965316309894,0.9998765901959134,0.9999251481701124,0.9999546000702375,0.9999724635506503,0.9999832982992097};
double[] param12 = new double[]{0,0,0.,0.950212931632136,0.9975212478233336,0.9998765901959134,0.9999938557876467,0.9999996940976795,0.9999999847700203,0.999999999241744,0.9999999999622486,0.9999999999981205,0.9999999999999064,0.9999999999999953,0.9999999999999998,1.,1.,1.,1.,1.,1.,1.,1.,1.,1.};
instance = new Exponential(0.5);
for(int i = 0; i < range.length; i++)
assertEquals(param0p5[i], instance.cdf(range[i]+2.5), 1e-10);
instance = new Exponential(2);
for(int i = 0; i < range.length; i++)
assertEquals(param2[i], instance.cdf(range[i]+2.5), 1e-10);
instance = new Exponential(12);
for(int i = 0; i < range.length; i++)
assertEquals(param12[i], instance.cdf(range[i]+2.5), 1e-10);
}
/**
* Test of invCdf method, of class Exponential.
*/
@Test
public void testInvCdf()
{
System.out.println("invCdf");
ContinuousDistribution instance;
double[] param0p5 = new double[]
{0.016460998273030734,0.10086170725378371,0.18898168684184483,0.28116390124237867,0.37780105578399414,0.4793457065308407,0.5863230764328129,0.6993474969594976,0.8191437801216357,0.9465754088938508,1.0826823353838824,1.2287326054136622,1.3862943611198906,1.5573387079962149,1.7443908240178614,1.9507592964883234,2.180897956057897,2.4410042125542932,2.740067680496221,3.091849013423549,3.5189972140196675,4.062864644986952,4.812251543869772,6.024523151010405,9.608042089466538};
double[] param2 = new double[]
{0.004115249568257684,0.025215426813445928,0.047245421710461206,0.07029097531059467,0.09445026394599854,0.11983642663271017,0.14658076910820322,0.1748368742398744,0.20478594503040892,0.2366438522234627,0.2706705838459706,0.30718315135341556,0.34657359027997264,0.3893346769990537,0.43609770600446535,0.48768982412208084,0.5452244890144743,0.6102510531385733,0.6850169201240552,0.7729622533558872,0.8797493035049169,1.015716161246738,1.203062885967443,1.5061307877526013,2.4020105223666346};
double[] param12 = new double[]
{0.0006858749280429472,0.004202571135574321,0.007874236951743534,0.011715162551765777,0.01574171065766642,0.01997273777211836,0.024430128184700535,0.029139479039979065,0.03413099083840149,0.03944064203724378,0.04511176397432843,0.05119719189223593,0.057762265046662105,0.06488911283317561,0.07268295100074422,0.08128163735368013,0.09087074816907904,0.10170850885642888,0.11416948668734253,0.12882704222598118,0.14662488391748613,0.16928602687445632,0.20051048099457383,0.2510217979587669,0.40033508706110577};
instance = new Exponential(0.5);
for(int i = 0; i < range.length-2; i++)//-2 b/c it enters a numerically unstable range that isnt fair
assertEquals(param0p5[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Exponential(2);
for(int i = 0; i < range.length; i++)
assertEquals(param2[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
instance = new Exponential(12);
for(int i = 0; i < range.length; i++)
assertEquals(param12[i], instance.invCdf(range[i]/6.1+0.5), 1e-10);
}
/**
* Test of min method, of class Exponential.
*/
@Test
public void testMin()
{
System.out.println("min");
ContinuousDistribution dist = new Exponential(0.5);
assertTrue(0 == dist.min());
}
/**
* Test of max method, of class Exponential.
*/
@Test
public void testMax()
{
System.out.println("max");
ContinuousDistribution dist = new Exponential(0.5);
assertTrue(Double.POSITIVE_INFINITY == dist.max());
}
/**
* Test of mean method, of class Exponential.
*/
@Test
public void testMean()
{
System.out.println("mean");
ContinuousDistribution dist = new Exponential(0.5);
assertEquals(2, dist.mean(), 1e-10);
dist = new Exponential(2);
assertEquals(0.5, dist.mean(), 1e-10);
dist = new Exponential(12);
assertEquals(0.08333333333333333, dist.mean(), 1e-10);
}
/**
* Test of median method, of class Exponential.
*/
@Test
public void testMedian()
{
System.out.println("median");
ContinuousDistribution dist = new Exponential(0.5);
assertEquals(1.3862943611198906, dist.median(), 1e-10);
dist = new Exponential(2);
assertEquals(0.34657359027997264, dist.median(), 1e-10);
dist = new Exponential(12);
assertEquals(0.057762265046662105, dist.median(), 1e-10);
}
/**
* Test of mode method, of class Exponential.
*/
@Test
public void testMode()
{
System.out.println("mode");
ContinuousDistribution dist = new Exponential(0.5);
assertEquals(0, dist.mode(), 1e-10);
dist = new Exponential(2);
assertEquals(0, dist.mode(), 1e-10);
dist = new Exponential(12);
assertEquals(0, dist.mode(), 1e-10);
}
/**
* Test of variance method, of class Exponential.
*/
@Test
public void testVariance()
{
System.out.println("variance");
ContinuousDistribution dist = new Exponential(0.5);
assertEquals(4, dist.variance(), 1e-10);
dist = new Exponential(2);
assertEquals(0.25, dist.variance(), 1e-10);
dist = new Exponential(12);
assertEquals(0.006944444444444444, dist.variance(), 1e-10);
}
/**
* Test of skewness method, of class Exponential.
*/
@Test
public void testSkewness()
{
System.out.println("skewness");
ContinuousDistribution dist = new Exponential(0.5);
assertEquals(2, dist.skewness(), 1e-10);
dist = new Exponential(2);
assertEquals(2, dist.skewness(), 1e-10);
dist = new Exponential(12);
assertEquals(2, dist.skewness(), 1e-10);
}
@Test
public void testEquals(){
System.out.println("equals");
ContinuousDistribution d1 = new Exponential(0.5);
ContinuousDistribution d2 = new Exponential(0.6);
ContinuousDistribution d4 = new Exponential(0.5);
Integer i = new Integer(1);
assertFalse(d1.equals(d2));
assertFalse(d1.equals(i));
assertFalse(d1.equals(null));
assertEquals(d1, d1);
assertEquals(d1, d4);
assertEquals(d1, d1.clone());
}
@Test
public void testHashCode(){
System.out.println("hashCode");
ContinuousDistribution d1 = new Exponential(0.5);
ContinuousDistribution d2 = new Exponential(0.6);
ContinuousDistribution d4 = new Exponential(0.5);
assertEquals(d1.hashCode(), d4.hashCode());
assertFalse(d1.hashCode()==d2.hashCode());
}
}
| 13,044 | 47.857678 | 535 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.