I am not 100% sure if this is a bug or I am not doing something right but if you give Percentile a large amount of data that is the consistent of the same value (see code below) the evaluate method takes a very long time. If you give Percentile the random values evaluate takes a considerable shorter time.
As noted below Median is a subcalss of Percentile.
private void testOne(){
int size = 200000;
int sameValue = 100;
List<Double> list = new ArrayList<Double>();
for (int i = 0; i < size; i++)
{
list.add((double)sameValue);
}
Median m = new Median();
m.setData(ArrayUtils.toPrimitive(list.toArray(new Double[0])));
long start = System.currentTimeMillis();
System.out.println("Start:"+ start);
double result = m.evaluate();
System.out.println("Result:" + result);
System.out.println("Time:"+ (System.currentTimeMillis()- start));
}
private void testTwo(){
int size = 200000;
List<Double> list = new ArrayList<Double>();
Random r = new Random();
for (int i = 0; i < size; i++)
{
list.add(r.nextDouble() * 100.0);
}
Median m = new Median();
m.setData(ArrayUtils.toPrimitive(list.toArray(new Double[0])));
long start = System.currentTimeMillis();
System.out.println("Start:"+ start);
double result = m.evaluate();
System.out.println("Result:" + result);
System.out.println("Time:"+ (System.currentTimeMillis()- start));
}