hadoop null pointer exception - java

import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.Raster;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import javax.imageio.ImageIO;
import javax.xml.soap.Text;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
public class blur {
public static class BlurMapper extends MapReduceBase implements Mapper<Text, BytesWritable, LongWritable, BytesWritable>
{
OutputCollector<LongWritable, BytesWritable> goutput;
int IMAGE_HEIGHT = 240;
int IMAGE_WIDTH = 320;
public BytesWritable Gmiu;
public BytesWritable Gsigma;
public BytesWritable w;
byte[] bytes = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*3];
public BytesWritable emit = new BytesWritable(bytes);
int count = 0;
int initVar = 125;
public LongWritable l = new LongWritable(1);
public void map(Text key, BytesWritable file,OutputCollector<LongWritable, BytesWritable> output, Reporter reporter) throws IOException {
//Read Current Image from File.
goutput = output;
//System.out.println("akhil langer");
BufferedImage img = ImageIO.read(new ByteArrayInputStream(file.getBytes()));
// BufferedImage dest = null;
//Apply Blur on Filter Operation - External JAR
// BoxBlurFilter BlurOp = new BoxBlurFilter(10,10,2);
Raster ras=img.getData();
DataBufferByte db= (DataBufferByte)ras.getDataBuffer();
byte[] data = db.getData();
byte[] byte1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
byte[] byte2 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte1[i]=20;
byte2[i]=125;
}
byte [] oldmiu;
oldmiu = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ;
byte [] oldsigma;
oldsigma = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ;
if(count==0){
Gmiu = new BytesWritable(data);
Gsigma = new BytesWritable(byte1);
w = new BytesWritable(byte2);
count++;
oldmiu= Gmiu.getBytes();
oldsigma= Gmiu.getBytes();
}
else{
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte pixel = data[i];
Double tempmiu=new Double(0.0);
Double tempsig=new Double(0.0);
Double weight = new Double(0.0);
double temp1=0; double alpha = 0.05;
tempmiu = (1-alpha)*oldmiu[i] + alpha*pixel;
temp1=temp1+(pixel-oldmiu[i])*(pixel-oldmiu[i]);
tempsig=(1-alpha)*oldsigma[i]+ alpha*temp1;
byte1[i] = tempmiu.byteValue();
byte2[i]= tempsig.byteValue();
Gmiu.set(byte1,i,1);
Gsigma.set(byte2,i,1);
byte1 = w.getBytes();
Double w1=new Double((1-alpha)*byte1[i]+alpha*100);
byte2[i] = w1.byteValue();
w.set(byte2,i,1);
}
}
byte1 = Gsigma.getBytes();
emit.set(byte1,0,IMAGE_HEIGHT*IMAGE_WIDTH);
byte1 = Gsigma.getBytes();
emit.set(byte1,IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH);
byte1 = w.getBytes();
emit.set(byte1,2*IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH);
}
#Override
public void close(){
try{
goutput.collect(l, emit);
}
catch(Exception e){
e.printStackTrace();
System.exit(-1);
}
}
}
public static void main(String[] args) {
if(args.length!=2) {
System.err.println("Usage: blurvideo input output");
System.exit(-1);
}
JobClient client = new JobClient();
JobConf conf = new JobConf(blur.class);
conf.setOutputValueClass(BytesWritable.class);
conf.setInputFormat(SequenceFileInputFormat.class);
//conf.setNumMapTasks(n)
SequenceFileInputFormat.addInputPath(conf, new Path(args[0]));
SequenceFileOutputFormat.setOutputPath(conf, new Path(args[1]));
conf.setMapperClass(BlurMapper.class);
conf.setNumReduceTasks(0);
//conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
client.setConf(conf);
try {
JobClient.runJob(conf);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Error:
java.lang.NullPointerException at
blur$BlurMapper.close(blur.java:99)
at
org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:57)
at
org.apache.hadoop.mapred.MapTask.run(MapTask.java:342)
at
org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:138)
Please reply!

Damn ... my resistance has crumbled.
Evidently, goutput.collect(l, emit); is throwing an NPE. And that means that goutput is null when close() is called.
That means either map was never called, or it was called with an output argument that was null.
I don't know Hadoop, but I suspect that the root problem is your map method is not overriding the map method in the base class because the signature is different. Net result is that your map method is not getting called, and .... NPE.

Related

Add custom metadata to tiff

I want to add some custom metadata to a multipage tiff for further processing steps, like
identifier1 = XYZ1
identifier2 = XYZ2
...
My idea was to update (see code/TODO below)
IIOMetadata streamMetadata [option 1]
IIOMetadata imageMetadata [option 2]
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.ImageWriteParam;
import javax.imageio.ImageWriter;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.stream.ImageInputStream;
import javax.imageio.stream.ImageOutputStream;
public class TiffMetadataExample {
public static void addMetadata(File tiff, File out, Object metadata2Add)
throws FileNotFoundException, IOException {
try (FileInputStream fis = new FileInputStream(tiff);
FileOutputStream fos = new FileOutputStream(out)) {
addMetadata(fis, fos, metadata2Add);
}
}
public static void addMetadata(InputStream inputImage, OutputStream out, Object metadata2Add)
throws IOException {
List<IIOMetadata> metadata = new ArrayList<>();
List<BufferedImage> images = getImages(inputImage, metadata);
if (metadata.size() != images.size()) {
throw new IllegalStateException();
}
// Obtain a TIFF writer
ImageWriter writer = ImageIO.getImageWritersByFormatName("TIFF").next();
try (ImageOutputStream output = ImageIO.createImageOutputStream(out)) {
writer.setOutput(output);
ImageWriteParam params = writer.getDefaultWriteParam();
params.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
// Compression: None, PackBits, ZLib, Deflate, LZW, JPEG and CCITT variants allowed
// (different plugins may use a different set of compression type names)
params.setCompressionType("Deflate");
// streamMetadata is null here
IIOMetadata streamMetadata = writer.getDefaultStreamMetadata(params);
// TODO: add custom metadata fields [option 1]
writer.prepareWriteSequence(streamMetadata);
for (int i = 0; i < images.size(); i++) {
BufferedImage image = images.get(i);
IIOMetadata imageMetadata = metadata.get(i);
// TODO: add custom metadata fields [option 2]
writer.writeToSequence(new IIOImage(image, null, imageMetadata), params);
}
writer.endWriteSequence();
} finally {
writer.dispose();
}
}
private static List<BufferedImage> getImages(final InputStream inputImage,
final List<IIOMetadata> metadata) throws IOException {
List<BufferedImage> images = new ArrayList<>();
ImageReader reader = null;
try (ImageInputStream is = ImageIO.createImageInputStream(inputImage)) {
Iterator<ImageReader> iterator = ImageIO.getImageReaders(is);
reader = iterator.next();
reader.setInput(is);
int numPages = reader.getNumImages(true);
for (int numPage = 0; numPage < numPages; numPage++) {
BufferedImage pageImage = reader.read(numPage);
IIOMetadata imageMetadata = reader.getImageMetadata(numPage);
metadata.add(imageMetadata);
images.add(pageImage);
}
return images;
} finally {
if (reader != null) {
reader.dispose();
}
}
}
}
Try to update imageMetadata [option 2] with following code does not work. What is wrong here?
IIOMetadataNode textEntry = new IIOMetadataNode("tEXtEntry");
textEntry.setAttribute("keyword", "aaaaaa");
textEntry.setAttribute("value", "bbb");
IIOMetadataNode text = new IIOMetadataNode("tEXt");
text.appendChild(textEntry);
Node root = meta.getAsTree(formatName);
root.appendChild(text);
//e.g. formatName = "javax_imageio_1.0"
imageMetadata.setFromTree(imageMetadata.getNativeMetadataFormatName(), root);
Or is there a nicer/other way to store some further processing informations within the tiff?
This is my working solution.
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.attribute.UserDefinedFileAttributeView;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.ImageWriteParam;
import javax.imageio.ImageWriter;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.metadata.IIOMetadataNode;
import javax.imageio.stream.ImageInputStream;
import javax.imageio.stream.ImageOutputStream;
import org.apache.commons.imaging.common.RationalNumber;
import org.apache.commons.imaging.formats.tiff.constants.TiffTagConstants;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfo;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoAscii;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoBytes;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoDouble;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoFloat;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoLong;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoRational;
import org.apache.commons.imaging.formats.tiff.taginfos.TagInfoShort;
import com.twelvemonkeys.imageio.metadata.tiff.Rational;
public class TiffMetadataExample {
public static final int TIFF_TAG_XMP = 0x2BC;
public static final String TIFF_TAG_XMP_NAME = "XMP";
private static final String SUN_TIFF_FORMAT = "com_sun_media_imageio_plugins_tiff_image_1.0";
private static final String SUN_TIFF_STREAM_FORMAT =
"com_sun_media_imageio_plugins_tiff_stream_1.0";
private static final String TAG_SET_CLASS_NAME =
"com.sun.media.imageio.plugins.tiff.BaselineTIFFTagSet";
public static void setMetaData(File in, File out, Metadata metaData) throws IOException {
try (FileInputStream fis = new FileInputStream(in);
FileOutputStream fos = new FileOutputStream(out)) {
setMetaData(fis, fos, metaData);
}
UserDefinedFileAttributeView userDefView =
Files.getFileAttributeView(out.toPath(), UserDefinedFileAttributeView.class);
for (Entry<String, String> fileAttEntry : metaData.getfileAtt().entrySet()) {
userDefView.write(fileAttEntry.getKey(),
Charset.defaultCharset().encode(fileAttEntry.getValue()));
}
}
public static void setMetaData(InputStream inputImage, OutputStream out, Metadata metdaData2Add)
throws IOException {
List<IIOMetadata> metadataList = new ArrayList<>();
List<BufferedImage> images = getImages(inputImage, metadataList);
// Obtain a TIFF writer
ImageWriter writer = ImageIO.getImageWritersByFormatName("TIFF").next();
try (ImageOutputStream output = ImageIO.createImageOutputStream(out)) {
writer.setOutput(output);
ImageWriteParam params = writer.getDefaultWriteParam();
params.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
// Compression: None, PackBits, ZLib, Deflate, LZW, JPEG and CCITT variants allowed
// (different plugins may use a different set of compression type names)
params.setCompressionType("Deflate");
IIOMetadata streamMetadata = writer.getDefaultStreamMetadata(params);
writer.prepareWriteSequence(streamMetadata);
for (int i = 0; i < images.size(); i++) {
BufferedImage image = images.get(i);
IIOMetadata imageMetadata = metadataList.get(i);
updateMetadata(imageMetadata, metdaData2Add.get());
writer.writeToSequence(new IIOImage(image, null, imageMetadata), params);
}
writer.endWriteSequence();
} finally {
writer.dispose();
}
}
private static void updateMetadata(IIOMetadata metadata, List<IIOMetadataNode> metdaData2AddList)
throws IOException {
if (SUN_TIFF_FORMAT.equals(metadata.getNativeMetadataFormatName())
|| SUN_TIFF_STREAM_FORMAT.equals(metadata.getNativeMetadataFormatName())) {
// wanted format
} else {
throw new IllegalArgumentException(
"Could not write tiff metadata, wrong format: " + metadata.getNativeMetadataFormatName());
}
IIOMetadataNode root = new IIOMetadataNode(metadata.getNativeMetadataFormatName());
IIOMetadataNode ifd;
if (root.getElementsByTagName("TIFFIFD").getLength() == 0) {
ifd = new IIOMetadataNode("TIFFIFD");
ifd.setAttribute("tagSets", TAG_SET_CLASS_NAME);
root.appendChild(ifd);
} else {
ifd = (IIOMetadataNode) root.getElementsByTagName("TIFFIFD").item(0);
}
for (IIOMetadataNode metdaData2Add : metdaData2AddList) {
ifd.appendChild(metdaData2Add);
}
metadata.mergeTree(metadata.getNativeMetadataFormatName(), root);
}
private static List<BufferedImage> getImages(final InputStream inputImage,
final List<IIOMetadata> metadata) throws IOException {
List<BufferedImage> images = new ArrayList<>();
ImageReader reader = null;
try (ImageInputStream is = ImageIO.createImageInputStream(inputImage)) {
Iterator<ImageReader> iterator = ImageIO.getImageReaders(is);
reader = iterator.next();
reader.setInput(is);
int numPages = reader.getNumImages(true);
for (int numPage = 0; numPage < numPages; numPage++) {
BufferedImage pageImage = reader.read(numPage);
IIOMetadata meta = reader.getImageMetadata(numPage);
metadata.add(meta);
images.add(pageImage);
}
return images;
} finally {
if (reader != null) {
reader.dispose();
}
}
}
public static class Metadata {
private final List<IIOMetadataNode> addList = new ArrayList<>();
private final Map<String, String> fileAtt = new TreeMap<>();
public Metadata() {}
private List<IIOMetadataNode> get() {
return addList;
}
private Map<String, String> getfileAtt() {
return fileAtt;
}
public void add(int exifTag, String exifTagName, Object val) {
IIOMetadataNode md;
if (val instanceof byte[]) {
md = createBytesField(exifTag, exifTagName, (byte[]) val);
} else if (val instanceof String) {
md = createAsciiField(exifTag, exifTagName, (String) val);
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Short) {
md = createShortField(exifTag, exifTagName, ((Short) val).intValue());
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Integer) {
md = createShortField(exifTag, exifTagName, ((Integer) val).intValue());
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Long) {
md = createLongField(exifTag, exifTagName, ((Long) val).longValue());
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Float) {
md = createFloatField(exifTag, exifTagName, ((Float) val).floatValue());
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Double) {
md = createDoubleField(exifTag, exifTagName, ((Double) val).doubleValue());
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof Rational) {
md = createRationalField(exifTag, exifTagName, ((Rational) val));
fileAtt.put(exifTagName, String.valueOf(val));
} else if (val instanceof RationalNumber) {
md = createRationalField(exifTag, exifTagName, ((RationalNumber) val));
fileAtt.put(exifTagName, String.valueOf(val));
} else {
throw new IllegalArgumentException("unsupported value class: " + val.getClass().getName());
}
addList.add(md);
}
/**
*
* #param tagInfo {#link TiffTagConstants} like {#link TiffTagConstants#TIFF_TAG_XMP}
* #param val String, byte[],
*/
public void add(TagInfo tagInfo, Object val) {
if (tagInfo instanceof TagInfoBytes) {
if (!(val instanceof byte[])) {
throw new IllegalArgumentException("expecting byte[] value");
}
} else if (tagInfo instanceof TagInfoAscii) {
if (!(val instanceof String)) {
throw new IllegalArgumentException("expecting String value");
}
} else if (tagInfo instanceof TagInfoShort) {
if (val instanceof Short || val instanceof Integer) {
// ok
} else {
throw new IllegalArgumentException("expecting Short/Integer value");
}
} else if (tagInfo instanceof TagInfoLong) {
if (!(val instanceof Long)) {
throw new IllegalArgumentException("expecting Long value");
}
} else if (tagInfo instanceof TagInfoDouble) {
if (!(val instanceof Double)) {
throw new IllegalArgumentException("expecting double value");
}
} else if (tagInfo instanceof TagInfoFloat) {
if (!(val instanceof Float)) {
throw new IllegalArgumentException("expecting float value");
}
} else if (tagInfo instanceof TagInfoRational) {
if (val instanceof RationalNumber || val instanceof Rational) {
// ok
} else {
throw new IllegalArgumentException("expecting rational value");
}
}
add(tagInfo.tag, tagInfo.name, val);
}
private static IIOMetadataNode createBytesField(int number, String name, byte[] bytes) {
IIOMetadataNode field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
IIOMetadataNode arrayNode = new IIOMetadataNode("TIFFBytes");
field.appendChild(arrayNode);
for (byte b : bytes) {
IIOMetadataNode valueNode = new IIOMetadataNode("TIFFByte");
valueNode.setAttribute("value", Integer.toString(b));
arrayNode.appendChild(valueNode);
}
return field;
}
private static IIOMetadataNode createShortField(int number, String name, int val) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFShorts");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFShort");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", Integer.toString(val));
return field;
}
private static IIOMetadataNode createAsciiField(int number, String name, String val) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFAsciis");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFAscii");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", val);
return field;
}
private static IIOMetadataNode createLongField(int number, String name, long val) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFLongs");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFLong");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", Long.toString(val));
return field;
}
private static IIOMetadataNode createFloatField(int number, String name, float val) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFFloats");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFFloat");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", Float.toString(val));
return field;
}
private static IIOMetadataNode createDoubleField(int number, String name, double val) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFDoubles");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFDouble");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", Double.toString(val));
return field;
}
private static IIOMetadataNode createRationalField(int number, String name, Rational rational) {
return createRationalField(number, name, rational.numerator(), rational.denominator());
}
private static IIOMetadataNode createRationalField(int number, String name,
RationalNumber rational) {
return createRationalField(number, name, rational.numerator, rational.divisor);
}
private static IIOMetadataNode createRationalField(int number, String name, long numerator,
long denominator) {
IIOMetadataNode field, arrayNode, valueNode;
field = new IIOMetadataNode("TIFFField");
field.setAttribute("number", Integer.toString(number));
field.setAttribute("name", name);
arrayNode = new IIOMetadataNode("TIFFRationals");
field.appendChild(arrayNode);
valueNode = new IIOMetadataNode("TIFFRational");
arrayNode.appendChild(valueNode);
valueNode.setAttribute("value", numerator + "/" + denominator);
return field;
}
}
}
Usage
byte[] bytes = create();
TiffMetadata.Metadata metaData = new TiffMetadata.Metadata();
metaData.add(TiffTagConstants.TIFF_TAG_SOFTWARE, "FUBAR");
// metaData.add(TiffMetadata.TIFF_TAG_XMP, TiffMetadata.TIFF_TAG_XMP_NAME, bytes );
metaData.add(TiffTagConstants.TIFF_TAG_XMP, bytes);
TiffMetadata.setMetaData(tiffIn, tiffOut, metaData);

error while deserializing object of size greater than 95Kb, working fine for less than 95Kb objects [duplicate]

This question already has answers here:
java.io.StreamCorruptedException: invalid type code: 00
(6 answers)
Closed 4 years ago.
I am getting a StreamCorruptedException when I deserialize objects that are greater than 95KB, but the code works fine for objects that are less than 95KB. Here is my code:
<!-- language: lang-java -->
package hadoop;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HadoopClient {
static ArrayList<Long> list = new ArrayList<Long>();
public long readFileFromHDFS(String source) throws IOException {
Configuration obj = new Configuration();
obj.set("fs.default.name", "hdfs://127.0.0.1:9000");
FileSystem fs = FileSystem.get(obj);
Path sourcePath = new Path(fs.getHomeDirectory() + source + ".txt");
FSDataInputStream in = fs.open(sourcePath);
byte[] b = new byte[in.available()];
final long startTime = System.nanoTime();
in.read(b, 0, in.available());
final long endTime = System.nanoTime();
in.close();
fs.close();
TestObject objj = null;
try {
ByteArrayInputStream bi = new ByteArrayInputStream(b);
ObjectInputStream si = new ObjectInputStream(bi);
objj = (TestObject) si.readObject();
objj.printHello();
} catch (Exception e) {
System.out.println(e);
}
return endTime - startTime;
}
public long copyBuffertoHDFS(byte[] array, String destFileName, boolean replace, short replicationFacotr, long blockSize, int bufferSize) throws IOException {
Configuration obj = new Configuration();
obj.set("fs.default.name", "hdfs://127.0.0.1:9000");
FileSystem fs = FileSystem.get(obj);
String s = fs.getHomeDirectory() + destFileName + ".txt";
Path outFile = new Path(s);
final long startTime = System.nanoTime();
FSDataOutputStream out = fs.create(outFile, replace, bufferSize, replicationFacotr, blockSize);
final long endTime = System.nanoTime();
out.write(array);
out.close();
return endTime - startTime;
}
public static void main(String[] args) throws Exception {
HadoopClient hadoopJava = new HadoopClient();
short replicationFactor;
long blockSize;
int bufferSize;
int noOfBytes;
int noOfEntries;
boolean replacement;
String str = "";
for (int testCases = 0; testCases < args.length; testCases += 6) {
blockSize = Integer.parseInt(args[0 + testCases]);
replicationFactor = Short.parseShort(args[1 + testCases]);
bufferSize = Integer.parseInt(args[2 + testCases]);
noOfBytes = Integer.parseInt(args[3 + testCases]);
noOfEntries = Integer.parseInt(args[4 + testCases]);
replacement = Boolean.parseBoolean(args[5 + testCases]);
TestObject testObject = new TestObject();
testObject.setString(noOfBytes);
str = hadoopJava.toStringMethod(testObject);
hadoopJava.publishByteArrayTimer(str.getBytes("windows-1252"), noOfEntries, replacement, replicationFactor, blockSize, bufferSize);
hadoopJava.retrieveByteArrayTimer(noOfEntries);
Collections.sort(list);
for (Long ll : list) {
System.out.println(ll);
}
System.out.println("");
}
}
public String toStringMethod(TestObject test) {
String serializedObject = "";
try {
ByteArrayOutputStream bo = new ByteArrayOutputStream();
ObjectOutputStream so = new ObjectOutputStream(bo);
so.writeObject(test);
so.flush();
so.close();
serializedObject = bo.toString("windows-1252");
bo.flush();
bo.close();
} catch (Exception e) {
System.out.println(e);
}
return serializedObject;
}
public void publishByteArrayTimer(byte[] array, int numberOfInsertions, boolean replace, short replicationFactor, long blockSize, int bufferSize) throws IOException, InterruptedException {
long timeTaken = 0;
for (int fileName = 0; fileName < numberOfInsertions; fileName++) {
timeTaken = copyBuffertoHDFS(array, String.valueOf(fileName), replace, replicationFactor, blockSize, bufferSize);
list.add(timeTaken / 1000);
TimeUnit.MICROSECONDS.sleep(10000);
}
}
public void retrieveByteArrayTimer(Integer numberOfInsertions) throws IOException {
long timeTaken = 0;
for (int fileName = 0; fileName < numberOfInsertions; fileName++) {
timeTaken += readFileFromHDFS(String.valueOf(fileName));
}
}
}
class TestObject implements Serializable {
char chars[];
String str;
public String setString(int numberOfBytes) {
numberOfBytes = numberOfBytes / 2;
chars = new char[numberOfBytes];
Arrays.fill(chars, 'a');
str = new String(chars);
return str;
}
public String getString() {
return str;
}
public void printHello() {
System.out.println("hello tester");
}
}
This is the error trace:
java.io.StreamCorruptedException: invalid type code: 00
Please help.
I am attaching the screenshot of the error trace also:
The sample input that I have given is as follows:
Hi, I am attaching the full stacktrace of the error also:
java.io.StreamCorruptedException: invalid type code: 00
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1946)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1870)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1752)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1328)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:350)
at hadoop.HadoopClient.readFileFromHDFS(HadoopClient.java:47)
at hadoop.HadoopClient.retrieveByteArrayTimer(HadoopClient.java:159)
at hadoop.HadoopClient.main(HadoopClient.java:114)
Please help
Thanks...
You can pass your FSDataInputStream directly to ObjectInputStream without using a byte array.
public long readFileFromHDFS(String source) throws IOException {
Configuration obj = new Configuration();
obj.set("fs.default.name", "hdfs://127.0.0.1:9000");
FileSystem fs = FileSystem.get(obj);
Path sourcePath = new Path(fs.getHomeDirectory() + source + ".txt");
FSDataInputStream in = fs.open(sourcePath);
TestObject objj = null;
final long startTime = System.nanoTime();
try {
ObjectInputStream si = new ObjectInputStream(in);
objj = (TestObject) si.readObject();
objj.printHello();
si.close();
} catch (Exception e) {
System.out.println(e);
} finally {
in.close();
fs.close();
}
return System.nanoTime() - startTime;
}
Most likely your problem here:
byte[] b = new byte[in.available()];
in.read(b, 0, in.available());
In general it is wrong assumption that you read all data by this code.
You can use method from Apache commons-io: org.apache.commons.io.IOUtils#toByteArray(java.io.InputStream) or:
ByteArrayOutputStream bos = new ByteArrayOutputStream(in.available());
byte[] buf = new byte[4096*16];
int c;
while((c=in.read(buf))!=-1){
bos.write(buf, 0, c);
}
byte[] data = bos.toByteArray();

How to convert dicom file to jpg conversion

How we can convert a dicom file(.dcm) to a jpeg image using java?
Here is my code:
import java.io.File;
import java.io.IOException;
import org.dcm4che2.tool.dcm2jpg.Dcm2Jpg;
public class MainClass {
public static void main(String[] args) throws IOException{
Dcm2Jpg conv = new Dcm2Jpg();
conv.convert(new File("C:\\Users\\lijo.joseph\\Desktop\\Dicom\\IM-0001-0001.dcm"), new File("C:\\Users\\lijo.joseph\\Desktop\\Dicom\\IM-0001-0001.jpg"));
}
}
and i am getting the following error while running the project
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/commons/cli/ParseException
at MainClass.main(MainClass.java:7)
Caused by: java.lang.ClassNotFoundException: org.apache.commons.cli.ParseException
at java.net.URLClassLoader$1.run(URLClassLoader.java:372)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:360)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more
please help and thanks in advance
Here is the link Converting DICOM to JPEG using dcm4che 2
Following is my code which works perfectly.I have placed it with imports so it might be use-full.
import java.awt.image.BufferedImage;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Iterator;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import org.dcm4che2.imageio.plugins.dcm.DicomImageReadParam;
import com.sun.image.codec.jpeg.JPEGCodec;
import com.sun.image.codec.jpeg.JPEGImageEncoder;
public class Examplke1 {
static BufferedImage myJpegImage=null;
public static void main(String[] args) {
File file = new File("test5/12840.dcm");
Iterator<ImageReader> iterator =ImageIO.getImageReadersByFormatName("DICOM");
while (iterator.hasNext()) {
ImageReader imageReader = (ImageReader) iterator.next();
DicomImageReadParam dicomImageReadParam = (DicomImageReadParam) imageReader.getDefaultReadParam();
try {
ImageInputStream iis = ImageIO.createImageInputStream(file);
imageReader.setInput(iis,false);
myJpegImage = imageReader.read(0, dicomImageReadParam);
iis.close();
if(myJpegImage == null){
System.out.println("Could not read image!!");
}
} catch (IOException e) {
e.printStackTrace();
}
File file2 = new File("/test.jpg");
try {
OutputStream outputStream = new BufferedOutputStream(new FileOutputStream(file2));
JPEGImageEncoder encoder = JPEGCodec.createJPEGEncoder(outputStream);
encoder.encode(myJpegImage);
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("Completed");
}
}
}
Jars Used to Run it
dcm4che-imageio-2.0.28.jar
dcm4che-image-2.0.28.jar
jai_imageio-1.1.jar
dcm4che-core-2.0.28.jar
slf4j-api-1.7.7.jar
slf4j-log4j12-1.7.7.jar
apache-logging-log4j.jar
Hope it helps.
This Code is used for Converting Dicom Image to JPG Image
import java.io.File;
import java.io.IOException;
public class Dcm2JpgTest {
public static void main(String[] args) throws IOException {
try{
File src = new File("d:\\Test.dcm");
File dest = new File("d:\\Test.jpg");
Dcm2Jpeg dcm2jpg= new Dcm2Jpeg();
dcm2jpg.convert(src, dest);
System.out.println("Completed");
} catch(IOException e){
e.printStackTrace();
} catch(Exception e){
e.printStackTrace();
}
}
}
Dcm2Jpeg.java File
import java.awt.image.BufferedImage;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.List;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.ImageInputStream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.dcm4che2.data.DicomObject;
import org.dcm4che2.imageio.plugins.dcm.DicomImageReadParam;
import org.dcm4che2.io.DicomInputStream;
import org.dcm4che2.util.CloseUtils;
import com.sun.image.codec.jpeg.JPEGCodec;
import com.sun.image.codec.jpeg.JPEGImageEncoder;
public class Dcm2Jpeg {
private static final String USAGE =
"dcm2jpg [Options] <dcmfile> <jpegfile>\n" +
"or dcm2jpg [Options] <dcmfile>... <outdir>\n" +
"or dcm2jpg [Options] <indir>... <outdir>";
private static final String DESCRIPTION =
"Convert DICOM image(s) to JPEG(s)\nOptions:";
private static final String EXAMPLE = null;
private int frame = 1;
private float center;
private float width;
private String vlutFct;
private boolean autoWindowing;
private DicomObject prState;
private short[] pval2gray;
private String fileExt = ".jpg";
private void setFrameNumber(int frame) {
this.frame = frame;
}
private void setWindowCenter(float center) {
this.center = center;
}
private void setWindowWidth(float width) {
this.width = width;
}
public final void setVoiLutFunction(String vlutFct) {
this.vlutFct = vlutFct;
}
private final void setAutoWindowing(boolean autoWindowing) {
this.autoWindowing = autoWindowing;
}
private final void setPresentationState(DicomObject prState) {
this.prState = prState;
}
private final void setPValue2Gray(short[] pval2gray) {
this.pval2gray = pval2gray;
}
public final void setFileExt(String fileExt) {
this.fileExt = fileExt;
}
public void convert(File src, File dest) throws IOException {
Iterator<ImageReader> iter = ImageIO.getImageReadersByFormatName("DICOM");
ImageReader reader = iter.next();
DicomImageReadParam param =
(DicomImageReadParam) reader.getDefaultReadParam();
param.setWindowCenter(center);
param.setWindowWidth(width);
param.setVoiLutFunction(vlutFct);
param.setPresentationState(prState);
param.setPValue2Gray(pval2gray);
param.setAutoWindowing(autoWindowing);
ImageInputStream iis = ImageIO.createImageInputStream(src);
BufferedImage bi;
OutputStream out = null;
try {
reader.setInput(iis, false);
bi = reader.read(frame - 1, param);
if (bi == null) {
System.out.println("\nError: " + src + " - couldn't read!");
return;
}
out = new BufferedOutputStream(new FileOutputStream(dest));
JPEGImageEncoder enc = JPEGCodec.createJPEGEncoder(out);
enc.encode(bi);
} finally {
CloseUtils.safeClose(iis);
CloseUtils.safeClose(out);
}
//System.out.print('.');
}
public int mconvert(List<String> args, int optind, File destDir)
throws IOException {
int count = 0;
for (int i = optind, n = args.size() - 1; i < n; ++i) {
File src = new File(args.get(i));
count += mconvert(src, new File(destDir, src2dest(src)));
}
return count;
}
private String src2dest(File src) {
String srcname = src.getName();
return src.isFile() ? srcname + this.fileExt : srcname;
}
public int mconvert(File src, File dest) throws IOException {
if (!src.exists()) {
System.err.println("WARNING: No such file or directory: " + src
+ " - skipped.");
return 0;
}
if (src.isFile()) {
try {
convert(src, dest);
} catch (Exception e) {
System.err.println("WARNING: Failed to convert " + src + ":");
e.printStackTrace(System.err);
System.out.print('F');
return 0;
}
System.out.print('.');
return 1;
}
File[] files = src.listFiles();
if (files.length > 0 && !dest.exists()) {
dest.mkdirs();
}
int count = 0;
for (int i = 0; i < files.length; ++i) {
count += mconvert(files[i], new File(dest, src2dest(files[i])));
}
return count;
}
#SuppressWarnings("unchecked")
public static void main(String args[]) throws Exception {
CommandLine cl = parse(args);
Dcm2Jpeg dcm2jpg = new Dcm2Jpeg();
if (cl.hasOption("f")) {
dcm2jpg.setFrameNumber(
parseInt(cl.getOptionValue("f"),
"illegal argument of option -f",
1, Integer.MAX_VALUE));
}
if (cl.hasOption("p")) {
dcm2jpg.setPresentationState(loadDicomObject(
new File(cl.getOptionValue("p"))));
}
if (cl.hasOption("pv2gray")) {
dcm2jpg.setPValue2Gray(loadPVal2Gray(
new File(cl.getOptionValue("pv2gray"))));
}
if (cl.hasOption("c")) {
dcm2jpg.setWindowCenter(
parseFloat(cl.getOptionValue("c"),
"illegal argument of option -c"));
}
if (cl.hasOption("w")) {
dcm2jpg.setWindowWidth(
parseFloat(cl.getOptionValue("w"),
"illegal argument of option -w"));
}
if (cl.hasOption("sigmoid")) {
dcm2jpg.setVoiLutFunction(DicomImageReadParam.SIGMOID);
}
dcm2jpg.setAutoWindowing(!cl.hasOption("noauto"));
if (cl.hasOption("jpgext")) {
dcm2jpg.setFileExt(cl.getOptionValue("jpgext"));
}
final List<String> argList = cl.getArgList();
int argc = argList.size();
File dest = new File(argList.get(argc-1));
long t1 = System.currentTimeMillis();
int count = 1;
if (dest.isDirectory()) {
count = dcm2jpg.mconvert(argList, 0, dest);
} else {
File src = new File(argList.get(0));
if (argc > 2 || src.isDirectory()) {
exit("dcm2jpg: when converting several files, "
+ "last argument must be a directory\n");
}
dcm2jpg.convert(src, dest);
}
long t2 = System.currentTimeMillis();
System.out.println("\nconverted " + count + " files in " + (t2 - t1)
/ 1000f + " s.");
}
private static DicomObject loadDicomObject(File file) {
DicomInputStream in = null;
try {
in = new DicomInputStream(file);
return in.readDicomObject();
} catch (IOException e) {
exit(e.getMessage());
throw new RuntimeException();
} finally {
CloseUtils.safeClose(in);
}
}
private static short[] loadPVal2Gray(File file) {
BufferedReader r = null;
try {
r = new BufferedReader(new InputStreamReader(new FileInputStream(
file)));
short[] pval2gray = new short[256];
int n = 0;
String line;
while ((line = r.readLine()) != null) {
try {
int val = Integer.parseInt(line.trim());
if (n == pval2gray.length) {
if (n == 0x10000) {
exit("Number of entries in " + file + " > 2^16");
}
short[] tmp = pval2gray;
pval2gray = new short[n << 1];
System.arraycopy(tmp, 0, pval2gray, 0, n);
}
pval2gray[n++] = (short) val;
} catch (NumberFormatException nfe) {
// ignore lines where Integer.parseInt fails
}
}
if (n != pval2gray.length) {
exit("Number of entries in " + file + ": " + n
+ " != 2^[8..16]");
}
return pval2gray;
} catch (IOException e) {
exit(e.getMessage());
throw new RuntimeException();
} finally {
CloseUtils.safeClose(r);
}
}
private static CommandLine parse(String[] args) {
Options opts = new Options();
OptionBuilder.withArgName("frame");
OptionBuilder.hasArg();
OptionBuilder.withDescription(
"frame to convert, 1 (= first frame) by default");
opts.addOption(OptionBuilder.create("f"));
OptionBuilder.withArgName("prfile");
OptionBuilder.hasArg();
OptionBuilder.withDescription(
"file path of presentation state to apply");
opts.addOption(OptionBuilder.create("p"));
OptionBuilder.withArgName("center");
OptionBuilder.hasArg();
OptionBuilder.withDescription("Window Center");
opts.addOption(OptionBuilder.create("c"));
OptionBuilder.withArgName("width");
OptionBuilder.hasArg();
OptionBuilder.withDescription("Window Width");
opts.addOption(OptionBuilder.create("w"));
opts.addOption("sigmoid", false,
"apply sigmoid VOI LUT function with given Window Center/Width");
opts.addOption("noauto", false,
"disable auto-windowing for images w/o VOI attributes");
OptionBuilder.withArgName("file");
OptionBuilder.hasArg();
OptionBuilder.withDescription(
"file path of P-Value to gray value map");
opts.addOption(OptionBuilder.create("pv2gray"));
OptionBuilder.withArgName(".xxx");
OptionBuilder.hasArg();
OptionBuilder.withDescription(
"jpeg file extension used with destination directory argument,"
+ " default: '.jpg'.");
opts.addOption(OptionBuilder.create("jpgext"));
opts.addOption("h", "help", false, "print this message");
opts.addOption("V", "version", false,
"print the version information and exit");
CommandLine cl = null;
try {
cl = new GnuParser().parse(opts, args);
} catch (ParseException e) {
exit("dcm2jpg: " + e.getMessage());
throw new RuntimeException("unreachable");
}
if (cl.hasOption('V')) {
Package p = Dcm2Jpeg.class.getPackage();
System.out.println("dcm2jpg v" + p.getImplementationVersion());
System.exit(0);
}
if (cl.hasOption('h') || cl.getArgList().size() < 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(USAGE, DESCRIPTION, opts, EXAMPLE);
System.exit(0);
}
return cl;
}
private static int parseInt(String s, String errPrompt, int min, int max) {
try {
int i = Integer.parseInt(s);
if (i >= min && i <= max)
return i;
} catch (NumberFormatException e) {
// parameter is not a valid integer; fall through to exit
}
exit(errPrompt);
throw new RuntimeException();
}
private static float parseFloat(String s, String errPrompt) {
try {
return Float.parseFloat(s);
} catch (NumberFormatException e) {
exit(errPrompt);
throw new RuntimeException();
}
}
private static void exit(String msg) {
System.err.println(msg);
System.err.println("Try 'dcm2jpg -h' for more information.");
System.exit(1);
}
}
Jars Files Used to Run this code
dcm4che-core-2.0.23.jar
dcm4che-image-2.0.23.jar
dcm4che-imageio-2.0.23.jar
dcm4che-imageio-rle-2.0.23.jar
slf4j-log4j12-1.5.0.jar
slf4j-api-1.5.0.jar
log4j-1.2.13.jar
commons-cli-1.2.jar
If you don't want to use direct Dcm2Jpg.java file then you can include below jar file.
dcm4che-tool-dcm2jpg-2.0.23.jar
In this jar you can import org.dcm4che2.tool.dcm2jpg.Dcm2Jpg this java file

problems in reading writing SHA-1

I m trying to generate SHA-1 in java of a file's content then read it back. one hex digit becomes changed, and I don't know why?
codes:
writing in file
//writer:
import java.util.*;
import java.io.*;
import java.security.*;
public class Writer{
public static void main(String []args)throws Exception{
InputStream is = new FileInputStream("input.txt");
PrintStream os=new PrintStream(new File("out.txt"));
byte[] buffer = new byte[1024];
String str;
MessageDigest complete = MessageDigest.getInstance("SHA-1");
MessageDigest partial = MessageDigest.getInstance("SHA-1");
int numRead;
do {
numRead = is.read(buffer);
if (numRead > 0) {
complete.update(buffer, 0, numRead);
partial.update(buffer,0,numRead);
byte []digest=partial.digest();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < digest.length; i++) {
sb.append(String.format("%x", digest[i]));
}
System.out.println(sb.toString());
str=new String(digest);
os.println(str);
partial.reset();
}
} while (numRead != -1);
byte []digest=complete.digest();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < digest.length; i++) {
sb.append(String.format("%x", digest[i]));
}
System.out.println(sb.toString());
str=new String(digest);
os.println(str);
is.close();
os.close();
}
}
code to readfile
//Reader:
import java.util.*;
import java.io.*;
import java.security.*;
public class Reader{
public static void main(String []args)throws Exception{
BufferedReader is = new BufferedReader(new FileReader("out.txt"));
String str;
while((str=is.readLine())!=null)
{
//System.out.println(str);
byte []digest=str.getBytes();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < digest.length; i++) {
sb.append(String.format("%x", digest[i]));
}
System.out.println(sb.toString());
}
is.close();
}
}
I used another code as input.txt. it generates 5 outputlines. first 4 lines reading and converting is okay. last line came with a changed hex digit
inputfile:input.txt
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* #author biborno
*/
import java.io.*;
import java.security.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
public class Torrent {
private static void encodeObject(Object o, OutputStream out) throws IOException {
if (o instanceof String)
encodeString((String)o, out);
else if (o instanceof Map)
encodeMap((Map)o, out);
else if (o instanceof byte[])
encodeBytes((byte[])o, out);
else if (o instanceof Number)
encodeLong(((Number) o).longValue(), out);
else
throw new Error("Unencodable type");
}
private static void encodeLong(long value, OutputStream out) throws IOException {
out.write('i');
out.write(Long.toString(value).getBytes("US-ASCII"));
out.write('e');
}
private static void encodeBytes(byte[] bytes, OutputStream out) throws IOException {
out.write(Integer.toString(bytes.length).getBytes("US-ASCII"));
out.write(':');
out.write(bytes);
}
private static void encodeString(String str, OutputStream out) throws IOException {
encodeBytes(str.getBytes("UTF-8"), out);
}
private static void encodeMap(Map<String,Object> map, OutputStream out) throws IOException{
// Sort the map. A generic encoder should sort by key bytes
SortedMap<String,Object> sortedMap = new TreeMap<String, Object>(map);
out.write('d');
for(Entry<String, Object> e : sortedMap.entrySet()) {
encodeString(e.getKey(), out);
encodeObject(e.getValue(), out);
}
out.write('e');
}
private static byte[] hashPieces(File file, int pieceLength) throws IOException {
MessageDigest sha1;
try {
sha1 = MessageDigest.getInstance("SHA");
} catch (NoSuchAlgorithmException e) {
throw new Error("SHA1 not supported");
}
InputStream in = new FileInputStream(file);
ByteArrayOutputStream pieces = new ByteArrayOutputStream();
byte[] bytes = new byte[pieceLength];
int pieceByteCount = 0, readCount = in.read(bytes, 0, pieceLength);
while (readCount != -1) {
pieceByteCount += readCount;
sha1.update(bytes, 0, readCount);
if (pieceByteCount == pieceLength) {
pieceByteCount = 0;
pieces.write(sha1.digest());
}
readCount = in.read(bytes, 0, pieceLength-pieceByteCount);
}
in.close();
if (pieceByteCount > 0)
pieces.write(sha1.digest());
return pieces.toByteArray();
}
public static void createTorrent(File file, File sharedFile, String announceURL) throws IOException {
final int pieceLength = 512*1024;
Map<String,Object> info = new HashMap<String,Object>();
info.put("name", sharedFile.getName());
info.put("length", sharedFile.length());
info.put("piece length", pieceLength);
info.put("pieces", hashPieces(sharedFile, pieceLength));
Map<String,Object> metainfo = new HashMap<String,Object>();
metainfo.put("announce", announceURL);
metainfo.put("info", info);
OutputStream out = new FileOutputStream(file);
encodeMap(metainfo, out);
out.close();
}
public static void main(String[] args) throws Exception {
createTorrent(new File("C:\\Documents and Settings\\biborno\\Desktop\\Test.text.torrent"), new File("C:\\Documents and Settings\\biborno\\Desktop\\Test.text"), "http://example.com/announce");
}
}
output of writer:
f1556b25a651c3dc4831871ab3fce2d239c7f0d4
4ed5c741dbf78059fef3733340efcf9b16b4594f
e2b66127e6f25648e9e2d21cb18e26d4a541a17
26c213a35b3f2ccae28ad5e3b98b5a02386c368
e7d152cd8f5f22949abbc11eb049451c511df07b
out.txt:
ñUk%¦QÃÜH1‡³üâÒ9ÇðÔ
NÕÇAÛ÷€Yþós3#ïÏ›´YO
â¶a'æòVHéâÒ±Ž&Ô¥
&£[òÌ®(­^;˜µ #†Ãh
çÑRÍ?_"”š»Á°IEQð{
Reader's Output
f1556b25a651c3dc4831871ab3fce2d239c7f0d4
4ed5c741dbf78059fef3733340efcf9b16b4594f
e2b66127e6f25648e9e2d21cb18e26d4a541a17
26c213a35b3f2ccae28ad5e3b98b5a02386c368
e7d152cd3f5f22949abbc11eb049451c511df07b

FileNotFoundException when using Hadoop distributed cache

this time someone should please relpy
i am struggling with running my code using distributed cahe. i have already the files on hdfs but when i run this code :
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.awt.image.Raster;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import org.apache.hadoop.filecache.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import java.lang.String;
import java.lang.Runtime;
import java.net.URI;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
public class blur2 {
public static class BlurMapper extends MapReduceBase implements Mapper<Text, BytesWritable, LongWritable, BytesWritable>
{
OutputCollector<LongWritable, BytesWritable> goutput;
int IMAGE_HEIGHT = 240;
int IMAGE_WIDTH = 320;
public BytesWritable Gmiu;
public BytesWritable Gsigma;
public BytesWritable w;
byte[] bytes = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*3];
public BytesWritable emit = new BytesWritable(bytes);
int count = 0;
int initVar = 125;
public LongWritable l = new LongWritable(1);
byte[] byte1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
byte[] byte2 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
byte[] byte3 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
public void map(Text key, BytesWritable file,OutputCollector<LongWritable, BytesWritable> output, Reporter reporter) throws IOException {
goutput = output;
BufferedImage img = ImageIO.read(new ByteArrayInputStream(file.getBytes()));
Raster ras=img.getData();
DataBufferByte db= (DataBufferByte)ras.getDataBuffer();
byte[] data = db.getData();
if(count==0){
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte1[i]=20;
byte2[i]=125;
}
Gmiu = new BytesWritable(data);
Gsigma = new BytesWritable(byte1);
w = new BytesWritable(byte2);
count++;
}
else{
byte1 = Gmiu.getBytes();
byte2 = Gsigma.getBytes();
byte3 = w.getBytes();
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
byte pixel = data[i];
Double tempmiu=new Double(0.0);
Double tempsig=new Double(0.0);
double temp1=0.0; double alpha = 0.05;
tempmiu = (1-alpha)*byte1[i] + alpha*pixel;
temp1=temp1+(pixel-byte1[i])*(pixel-byte1[i]);
tempsig=(1-alpha)*byte2[i]+ alpha*temp1;
byte1[i] = tempmiu.byteValue();
byte2[i]= tempsig.byteValue();
Double w1=new Double((1-alpha)*byte3[i]+alpha*100);
byte3[i] = w1.byteValue();
}
Gmiu.set(byte1,0,IMAGE_HEIGHT*IMAGE_WIDTH);
Gsigma.set(byte2,0,IMAGE_HEIGHT*IMAGE_WIDTH);
w.set(byte3,0,IMAGE_HEIGHT*IMAGE_WIDTH);
}
byte1 = Gsigma.getBytes();
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
bytes[i]=byte1[i];
}
byte1 = Gsigma.getBytes();
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
bytes[IMAGE_HEIGHT*IMAGE_WIDTH+i]=byte1[i];
}
byte1 = w.getBytes();
for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++)
{
bytes[2*IMAGE_HEIGHT*IMAGE_WIDTH+i]=byte1[i];
}
emit.set(bytes,0,3*IMAGE_HEIGHT*IMAGE_WIDTH);
}
#Override
public void close(){
try{
goutput.collect(l, emit);
}
catch(Exception e){
e.printStackTrace();
System.exit(-1);
}
}
}
//end of first job , this is running perfectly
public static void main(String[] args) throws URISyntaxException {
if(args.length!=3) {
System.err.println("Usage: blurvideo input output");
System.exit(-1);
}
JobClient client = new JobClient();
JobConf conf = new JobConf(blur2.class);
conf.setOutputValueClass(BytesWritable.class);
conf.setInputFormat(SequenceFileInputFormat.class);
//conf.setNumMapTasks(n)
SequenceFileInputFormat.addInputPath(conf, new Path(args[0]));
TextOutputFormat.setOutputPath(conf, new Path(args[1]));
conf.setMapperClass(BlurMapper.class);
conf.setNumReduceTasks(0);
//conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
client.setConf(conf);
try {
JobClient.runJob(conf);
} catch (Exception e) {
e.printStackTrace();
}
// exec("jar cf /home/hmobile/hadoop-0.19.2/imag /home/hmobile/hadoop-0.19.2/output");
JobClient client2 = new JobClient();
JobConf conf2 = new JobConf(blur2.class);
conf2.setOutputValueClass(BytesWritable.class);
conf2.setInputFormat(SequenceFileInputFormat.class);
//conf.setNumMapTasks(n)
SequenceFileInputFormat.addInputPath(conf2, new Path(args[0]));
SequenceFileOutputFormat.setOutputPath(conf2, new Path(args[2]));
conf2.setMapperClass(BlurMapper2.class);
conf2.setNumReduceTasks(0);
DistributedCache.addCacheFile(new URI("~/ayush/output/part-00000"), conf2);// these files are already on the hdfs
DistributedCache.addCacheFile(new URI("~/ayush/output/part-00001"), conf2);
client2.setConf(conf2);
try {
JobClient.runJob(conf2);
} catch (Exception e) {
e.printStackTrace();
}
}
public static class BlurMapper2 extends MapReduceBase implements Mapper<Text, BytesWritable, LongWritable, BytesWritable>
{
int IMAGE_HEIGHT = 240;
int T =60;
int IMAGE_WIDTH = 320;
public BytesWritable Gmiu;
public BytesWritable Gsigma;
public BytesWritable w;
byte[] bytes = new byte[IMAGE_HEIGHT*IMAGE_WIDTH];
public BytesWritable emit = new BytesWritable(bytes);
int initVar = 125;int gg=0;
int K=64;int k=0,k1=0,k2=0;
public LongWritable l = new LongWritable(1);
byte[] Gmiu1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*K];
byte[] Gsigma1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*K];
byte[] w1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*K];
public Path[] localFiles=new Path[2];
private FileSystem fs;
#Override
public void configure(JobConf conf2)
{
try {
fs = FileSystem.getLocal(new Configuration());
localFiles = DistributedCache.getLocalCacheFiles(conf2);
//System.out.println(localFiles[0].getName());
} catch (IOException ex) {
Logger.getLogger(blur2.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void map(Text key, BytesWritable file,OutputCollector<LongWritable, BytesWritable> output, Reporter reporter) throws IOException
{
if(gg==0){
//System.out.println(localFiles[0].getName());
String wrd; String line;
for(Path f:localFiles)
{
if(!f.getName().endsWith("crc"))
{
// FSDataInputStream localFile = fs.open(f);
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(fs.open(f)));
int c = 0;
try {
while ((line = br.readLine()) != null) {
StringTokenizer itr = new StringTokenizer(line, " ");
while (itr.hasMoreTokens()) {
wrd = itr.nextToken();
c++;
int i = Integer.parseInt(wrd, 16);
Integer I = new Integer(i);
byte b = I.byteValue();
if (c < IMAGE_HEIGHT * IMAGE_WIDTH) {
Gmiu1[k] = b;k++;
} else {
if ((c >= IMAGE_HEIGHT * IMAGE_WIDTH) && (c < 2 * IMAGE_HEIGHT * IMAGE_WIDTH)) {
Gsigma1[k] = b;k1++;
} else {
w1[k] = b;k2++;
}
}
}
}
} catch (IOException ex) {
Logger.getLogger(blur2.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (FileNotFoundException ex) {
Logger.getLogger(blur2.class.getName()).log(Level.SEVERE, null, ex);
} finally {
try {
br.close();
} catch (IOException ex) {
Logger.getLogger(blur2.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
gg++;
}
}
}
}
tackled a lot with this, can anyone please tell why i am getting this error:
java.io.FileNotFoundException: File does not exist: ~/ayush/output/part-00000
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:394)
at org.apache.hadoop.filecache.DistributedCache.getTimestamp(DistributedCache.java:475)
at org.apache.hadoop.mapred.JobClient.configureCommandLineOptions(JobClient.java:676)
at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:774)
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1127)
at blur2.main(blur2.java:175)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.util.RunJar.main(RunJar.java:165)
at org.apache.hadoop.mapred.JobShell.run(JobShell.java:54)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:79)
at org.apache.hadoop.mapred.JobShell.main(JobShell.java:68)
The problem is with the filename you are using "~/ayush/output/part-00000" relies on Unix shell (sh, bash, ksh) tilde expansion to replace the "~" with the pathname of your home directory.
Java (and C, and C++, and most other programming languages) don't do tilde expansion. You need to provide the pathname as "/home/ayush/output/part-00000" ... or whatever absolute pathname it is that the tilded form expands to.
Strictly speaking, the URI should be created as follows:
new File("/home/ayush/output/part-00000").toURI()
not as
new URI("/home/ayush/output/part-00000")
The latter creates a URI without a "protocol", and that could be problematic.

Categories

Resources