StarPU Handbook
Loading...
Searching...
No Matches
23. StarPU Java Interface

The StarPU Java Interface provides the ability to execute Java applications on top of StarPU.

The interface allows to write either StarPU-like applications

package fr.labri.hpccloud.starpu.examples;
import fr.labri.hpccloud.starpu.Codelet;
import fr.labri.hpccloud.starpu.StarPU;
import fr.labri.hpccloud.starpu.data.DataHandle;
import fr.labri.hpccloud.starpu.data.IntegerVariableHandle;
import fr.labri.hpccloud.starpu.data.VectorHandle;
import java.util.Random;
import static fr.labri.hpccloud.starpu.data.DataHandle.AccessMode.*;
public class VectorScal
{
public static final int NX = 10;
public static final Float factor = 3.14f;
static final Codelet scal = new Codelet()
{
@Override
public void run(DataHandle[] buffers)
{
VectorHandle<Float> array = (VectorHandle<Float>)buffers[0];
int n = array.getSize();
System.out.println(String.format("scaling array %s with %d elements", array, n));
for (int i = 0; i < n; i++)
{
array.setValueAt(i, factor * array.getValueAt(i));
}
}
@Override
public DataHandle.AccessMode[] getAccessModes()
{
return new DataHandle.AccessMode[]
{
};
}
};
public static void main(String[] args) throws Exception
{
int nx = (args.length == 0) ? NX : Integer.valueOf(args[0]);
compute(nx);
}
public static void compute(int nx) throws Exception
{
StarPU.init();
System.out.println(String.format("VECTOR[#nx=%d]", nx));
VectorHandle<Float> arrayHandle = VectorHandle.register(nx);
System.out.println(String.format("scaling array %s", arrayHandle));
for(int i=0 ; i<nx ; i++)
{
arrayHandle.setValueAt(i, i+1.0f);
}
StarPU.submitTask(scal, false, arrayHandle);
arrayHandle.acquire();
for(int i=0 ; i<nx ; i++)
{
System.out.println(String.format("v[%d] = %f", i, arrayHandle.getValueAt(i)));
}
arrayHandle.release();
arrayHandle.unregister();
StarPU.shutdown();
}
}
@ STARPU_RW
Definition starpu_data.h:60

or Spark applications.

package fr.labri.hpccloud.starpu.examples;
import fr.labri.hpccloud.starpu.StarPU;
import fr.labri.hpccloud.starpu.data.DataPairSet;
import fr.labri.hpccloud.starpu.data.DataSet;
import fr.labri.hpccloud.starpu.data.Tuple2;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Arrays;
import java.util.regex.Pattern;
public class WordCount
{
static InputStream openFile(String filename) throws Exception
{
return WordCount.class.getResourceAsStream(filename);
}
private static final Pattern SPACE = Pattern.compile(" ");
public static void main(String[] args ) throws Exception
{
InputStream input = new FileInputStream(args[0]);
StarPU.init();
compute(input);
input.close();
StarPU.shutdown();
}
private static void compute(InputStream input) throws Exception
{
DataSet<String> lines = DataSet.readFile (input, s->s).splitByBlocks(10);
DataSet<String> words = lines.flatMap(s -> Arrays.asList(SPACE.split(s)).iterator()).splitByBlocks(10);
DataPairSet<String,Integer> ones = (DataPairSet<String,Integer>)words.mapToPair(w-> new Tuple2<>(w,1));
DataPairSet<String,Integer> counts = ones.reduceByKey((c1,c2)-> c1 + c2);
for(Tuple2<String,Integer> p : counts.collect())
{
System.out.println("("+p._1()+","+p._2()+")");
}
}
}

The installation process is not yet included in the StarPU autotools mechanism. However, a file INSTALL.org is provided in the starpujni directory to explain how to proceed with the installation, and shows how to run some basic examples.

hadoop needs to be installed before running the installation process.