Question

I have 2 files which needs to be accessed by the hadoop cluster. Those two files are good.txt and bad.txt respectively. Firstly since both these files needs to be accessed from different nodes i place these two files in distributed cache in driver class as follows

Configuration conf = new Configuration();
DistributedCache.addCacheFile(new URI("/user/training/Rakshith/good.txt"),conf);
DistributedCache.addCacheFile(new URI("/user/training/Rakshith/bad.txt"),conf);
Job job = new Job(conf);

Now both good and bad files are placed in distributed cache. I access the distributed cache in mapper class as follows

public class LetterMapper extends Mapper<LongWritable,Text,LongWritable,Text> {
private Path[]files;

@Override
protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context)
        throws IOException, InterruptedException {

files=DistributedCache.getLocalCacheFiles(new Configuration(context.getConfiguration()));

    }

I need to check if a word is present in a good.txt or bad.txt. So i use the something like this

File file=new File(files[0].toString()); //to access good.txt
BufferedReader br=new BufferedReader(new FileReader(file));
StringBuider sb=new StringBuilder();
String input=null;
while((input=br.readLine())!=null){
     sb.append(input);
     }
input=sb.toString();

iam supposed to get the content of good file in my input variable. But i dont get it. Have i missed anything??

Was it helpful?

Solution

Does job finish successfully? The maptask may fail because you are using JobConf in this line

files=DistributedCache.getLocalCacheFiles(new JobConf(context.getConfiguration()));

If you change it like this it should work, I don't see any problem with remaining code you posted in question.

files=DistributedCache.getLocalCacheFiles(context.getConfiguration());

or

files=DistributedCache.getLocalCacheFiles(new Configuration(context.getConfiguration()));

OTHER TIPS

@rVr these is my driver class

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class AvgWordLength {

  public static void main(String[] args) throws Exception {


if (args.length !=2) {
  System.out.printf("Usage: AvgWordLength <input dir> <output dir>\n");
  System.exit(-1);
}



Configuration conf = new Configuration();
DistributedCache.addCacheFile(new URI("/user/training/Rakshith/good.txt"),conf);
DistributedCache.addCacheFile(new URI("/user/training/Rakshith/bad.txt"),conf);
Job job = new Job(conf);


job.setJarByClass(AvgWordLength.class);


job.setJobName("Average Word Length");
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
job.setMapperClass(LetterMapper.class);

job.setMapOutputKeyClass(LongWritable.class);
job.setOutputValueClass(Text.class);

boolean success = job.waitForCompletion(true);
System.exit(success ? 0 : 1);
  }
}

And my mapper class is

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;



public class LetterMapper extends Mapper<LongWritable,Text,LongWritable,Text> {
private Path[]files;

@Override
protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context)
        throws IOException, InterruptedException {
    files=DistributedCache.getLocalCacheFiles(new Configuration(context.getConfiguration()));
    System.out.println("in setup()"+files.toString());


}

 @Override
  public void map(LongWritable key, Text value, Context context)throws IOException,InterruptedException{
     int i=0;
     System.out.println("in map----->>"+files.toString());//added just to view logs



    HashMap<String,String> h=new HashMap<String,String>();
    String negword=null;
     String input=value.toString();
                 if(isPresent(input,files[0].toString()){
                     h.put(input,"good");
                      }
                 else
                      if(isPresent(input,files[1].toString()){
                           h.put(input,"bad");
                                   }


 }
 public static boolean isPresent(String n,Path files2) throws IOException{
     File file=new File(files2.toString());
        BufferedReader br=new BufferedReader(new FileReader(file));
        StringBuilder sb=new StringBuilder();
        String input=null;
        while((input=br.readLine().toString())!=null){
            sb.append(input.toString());
        }
        input=sb.toString();
        //System.out.println(input);
        Pattern pattern=Pattern.compile(n);
        Matcher matcher=pattern.matcher(input);
        if(matcher.find()){
            return true;
        }
        else
            return false;

    }
     }
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top