/* matrix-matrix multiplication on Hadoop A x B = C constraint: A, B, C must be of the same size I use this to evaluate the efficiency of Hadoop for matrix multiplication, so I really don't care to handle non-square matrices. ===Data preparation==== Matrix data must be stored in a file on Hadoop. Line number must be appended to the beginning of each line. For example, the following represents a 4x4 matrix: 0 18 20 16 14 1 17 12 11 19 2 10 17 11 19 3 14 17 20 10 Left (A in this example) matrix should be stored in file "left"; Right (B in this example) matrix should be stored in file "right"; I use filenames to distinguish input data. Place "left" and "right" in the same folder (let's call it "input") ====Run the program==== > hadoop jar matrixmul.jar MatrixMul input output 8 2 results will be placed in "output" folder on HDFS. 8: all matrices are 8x8 2: every partitioned block is of size 2x2 ===Read the results=== Given the above sample command, we multiply two 8x8 matrices, in many 2x2 blocks. So, that the resulted C matrix has 16 blocks. In the output folder, there will be 16 separate files: part-r-00000, part-r-00001, ... part-r-00015 Every file stores one block in C. In this example, every block has 2 rows and 2 columns. These files are organized in "row"-order. ===Algorithm=== Mappers read input data. Every reducer processes one block of the resulted matrix. */ import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class MatrixMul { public static class MyMapper extends Mapper<LongWritable, Text, IntWritable, Text>{ private String filename=null; private boolean isLeftMatrix=false; private int totalSize, partSize, npart; private boolean isLeft(){return isLeftMatrix;} protected void setup(Context context) throws IOException, InterruptedException{ //get filename FileSplit fileSplit = (FileSplit)context.getInputSplit(); filename = fileSplit.getPath().getName(); if("left".equalsIgnoreCase(filename)) isLeftMatrix=true; else isLeftMatrix=false; //get how size and partition information Configuration conf=context.getConfiguration(); totalSize=conf.getInt("matrix-mul-totalsize", -1); partSize=conf.getInt("matrix-mul-partsize", -1); npart=conf.getInt("matrix-mul-npart", -1); if(totalSize<0 || partSize<0 || npart<0){ System.out.println("Error in setup of MyMapper."); System.exit(1); } } public void map(LongWritable key, Text value, Context context ) throws IOException, InterruptedException { String line=value.toString(); String[] strs=line.split(" "); if(strs.length!=totalSize+1){ System.out.println("Error in map of Mapper."); System.out.println(strs.length+"___"+totalSize); System.out.println("line is: "+line); System.exit(1); } int linenum=Integer.parseInt(strs[0]); int[] numbers=new int[totalSize]; for(int i=0;i<totalSize;i++) numbers[i]=Integer.parseInt(strs[i+1]); int part_hor=linenum/partSize; //horizontal partitioned id int prev_part_ver=-1; String msg=null; for(int i=0;i<totalSize;i++){ int part_ver=i/partSize; //vertical partition number if(part_ver!=prev_part_ver){ if(msg!=null){ int baselinenum = part_hor * partSize; int old=part_ver; part_ver=prev_part_ver; if(isLeft()){ String toSend="l:"+(linenum - baselinenum)+":"+part_ver+"#"+msg; System.out.println("left "+linenum+","+part_ver+" "+msg); for(int k=0;k<npart;k++){ int dest=part_hor * npart + k; context.write(new IntWritable(dest), new Text(toSend)); } }else{ String toSend="r:"+(linenum - baselinenum)+":"+part_hor+"#"+msg; System.out.println("right "+part_ver+":"+linenum+" "+msg); for(int k=0;k<npart;k++){ int dest=k * npart + part_ver; context.write(new IntWritable(dest), new Text(toSend)); } } part_ver=old; } msg=null; prev_part_ver=part_ver; } if(msg==null) msg=""+strs[i+1]; else msg+=" "+strs[i+1]; } if(msg!=null){ //almost the same code int part_ver=npart-1; int baselinenum = part_hor * partSize; if(isLeft()){ String toSend="l:"+(linenum - baselinenum)+":"+part_ver+"#"+msg; System.out.println("left "+linenum+","+part_ver+" "+msg); for(int k=0;k<npart;k++){ int dest=part_hor * npart + k; context.write(new IntWritable(dest), new Text(toSend)); } }else{ String toSend="r:"+(linenum - baselinenum)+":"+part_hor+"#"+msg; System.out.println("right "+part_ver+":"+linenum+" "+msg); for(int k=0;k<npart;k++){ int dest=k * npart + part_ver; //has to be the last part context.write(new IntWritable(dest), new Text(toSend)); } } } } } public static class MyReducer extends Reducer<IntWritable, Text, Text, Text> { private int totalSize, partSize, npart; int[][] left=null; int[][] right=null; protected void setup(Context context) throws IOException, InterruptedException{ //get how # of partitions Configuration conf=context.getConfiguration(); totalSize=conf.getInt("matrix-mul-totalsize", -1); partSize=conf.getInt("matrix-mul-partsize", -1); npart=conf.getInt("matrix-mul-npart", -1); if(totalSize<0 || partSize<0 || npart<0){ System.out.println("Error in setup of MyReducer."); System.exit(1); } left=new int[partSize][totalSize]; right=new int[totalSize][partSize]; } public void reduce(IntWritable key, Iterable<Text> values, Context context ) throws IOException, InterruptedException { int sum = 0; for (Text val : values) { String line=val.toString(); String[] meta_val=line.split("#"); String[] metas=meta_val[0].split(":"); String[] numbers=meta_val[1].split(" "); int baselinenum=Integer.parseInt(metas[1]); int blkindex=Integer.parseInt(metas[2]); if("l".equalsIgnoreCase(metas[0])){ //from left matrix int start=blkindex * partSize; for(int i=0;i<partSize; i++) left[baselinenum][start+i]=Integer.parseInt(numbers[i]); }else{ int rowindex=blkindex*partSize + baselinenum; for(int i=0;i<partSize; i++) right[rowindex][i]=Integer.parseInt(numbers[i]); } } } protected void cleanup(Context context) throws IOException, InterruptedException { //now let's do the calculation int[][] res=new int[partSize][partSize]; for(int i=0;i<partSize;i++) for(int j=0;j<partSize;j++) res[i][j]=0; for(int i=0;i<partSize;i++){ for(int k=0;k<totalSize;k++){ for(int j=0;j<partSize;j++){ res[i][j]+=left[i][k]*right[k][j]; } } } for(int i=0;i<partSize;i++){ String output=null; for(int j=0;j<partSize;j++){ if(output==null) output=""+res[i][j]; else output+=" "+res[i][j]; } context.write(new Text(output), null); } } } public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); if (args.length != 4) { System.err.println("Usage: MatrixMul input-dir output-dir total-size part-size"); System.exit(2); } int totalsize=Integer.parseInt(args[2]); int partsize=Integer.parseInt(args[3]); if(totalsize==0 || partsize==0 || partsize>totalsize){ System.out.println("Invalid total-size or part-size"); System.exit(1); } conf.setInt("matrix-mul-totalsize", totalsize); //the matrix is 'totalsize' by 'totalsize' conf.setInt("matrix-mul-partsize", partsize); //every block is 'partsize' by 'partsize' int npart=totalsize/partsize; if(npart*partsize<totalsize) npart++; conf.setInt("matrix-mul-npart", npart); //number of parts on one dimension Job job = new Job(conf, "matrix-mul"); job.setJarByClass(MatrixMul.class); job.setMapperClass(MyMapper.class); job.setReducerClass(MyReducer.class); job.setNumReduceTasks(npart*npart); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); //FileInputFormat.addInputPath(job, new Path(args[0])); TextInputFormat.addInputPath(job, new Path(args[0])); //need to read a complete line FileOutputFormat.setOutputPath(job, new Path(args[1])); job.waitForCompletion(true) ; } }