I don't know if it a problem with Vertica or Hadoop but even though my code successfully compiles, it does not run when triggered and does not giv ANY output at all. I cannot understand what the problem is and am stumped. Any help would be really appreciated. The code is:
Code: Select all
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import com.vertica.hadoop.VerticaConfiguration;
import com.vertica.hadoop.VerticaInputFormat;
import com.vertica.hadoop.VerticaRecord;
public class MapRVertica {
public static class VerticaMapper extends
Mapper<LongWritable, VerticaRecord, LongWritable, Text> {
@Override
public void map(LongWritable key, VerticaRecord value, Context context)
throws IOException, InterruptedException {
context.write(key, new Text((String) value.get(0)));
}
}
public static void main(String[] args) throws IOException {
if (args.length != 1) {
System.err.println("Usage: MapRVertica <output_path>");
System.exit(-1);
}
Job job = new Job();
job.setJarByClass(MapRVertica.class);
job.setJobName("MapReduce Vertica");
job.setInputFormatClass(VerticaInputFormat.class);
VerticaConfiguration.configureVertica(job.getConfiguration(),
new String[]{"hostname"},
"dbname", "port", "username", "password");
VerticaInputFormat.setInput(job, "select now()");
FileOutputFormat.setOutputPath(job, new Path(args[0]));
job.setMapperClass(VerticaMapper.class);
job.setNumReduceTasks(0);
job.setOutputKeyClass(LongWritable.class);
job.setOutputValueClass(Text.class);
}
}