My LZW Compression works when I use a symbol table(dictionary) of length 256, Encoder and Decoder both work with 256 and everything works fine but when I increase this number for example to 512, 1024, 4096 the decoded file output is not the same with the first input file...Any hints?
Source Code:
LZWEncoder.java:
import java .io .*;
public class LZWEncoder
{
public static void main ( String [] args )
throws FileNotFoundException , IOException
{
File file = new File ("calgary/book1");
File fileOut = new File ( "calgary/book1_enc");
FileInputStream reader = new FileInputStream ( file );
FileOutputStream writer = new FileOutputStream ( fileOut );
int size_st;
long file_size;
file_size = file.length();
size_st = (int) file_size/1024;
System.out.println("File size " + file_size + " Sysmbol tree" + size_st);
if (size_st < 256)
size_st = 256;
else if (size_st < 512)
size_st = 512;
else if (size_st < 1024)
size_st = 1024;
else if (size_st < 2048)
size_st = 2048;
else
size_st = 4096;
byte[] size_stInBytes = (Integer.toString(size_st)+"\n").getBytes();
// writer.write(size_stInBytes);
System.out.println("File size " + file_size + " Sysmbol tree " + size_st);
// input stream with lookahead
LookAheadIn in = new LookAheadIn ( file );
LZWst st = new LZWst (4096); // specialised ST
while (! in.isEmpty ())
{
int codeword = st.getput (in );
writer.write ( codeword );
}
writer.close ();
reader.close ();
}
}
LZWDecoder.java:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Scanner;
public class LZWDecoder
{
public static void main ( String [] args )
throws FileNotFoundException , IOException
{
File file = new File ("calgary/book1_enc");
Scanner first_line = new Scanner("calgary/book1_enc");
File fileOut = new File ( "calgary/book1_dec2");
FileInputStream reader = new FileInputStream ( file );
FileOutputStream writer = new FileOutputStream ( fileOut );
String size_st;
size_st =first_line.nextLine();
System.out.println(" Sysmbol tree " + size_st);
String [] st = new String [4096];
int i;
for (i=0; i <128; i++)
st[i] = Character.toString (( char ) i);
String prev = "";
int codeword ;
while (( codeword = reader.read())!= -1)
{
String s;
if ( codeword == i) // Tricky situation !
s = prev + prev.charAt(0);
else
s = st[codeword ];
for (int j = 0; j<s.length(); j++)
writer.write(s.charAt (j));
if ( prev.length() > 0 && i < 4096 )
st[i++] = prev + s.charAt(0);
prev = s;
}
writer.close();
reader.close();
}
}
LZWst.java:
import java.io.FileNotFoundException;
import java.io.IOException;
public class LZWst
{
private int i; // next codeword to assign
private int codeword ; // codeword to return
private Node [] roots ; // array of TSTs
private int st_size;
public LZWst (int st_sz)
{
st_size = st_sz;
roots = new Node [128];
for (i=0; i <128; i++) // init with ASCII
roots [i] = new Node (( char ) i,i);
}
private class Node
{ // standard node code
Node (int c, int codeword )
{
this .c = c;
this . codeword = codeword ;
}
int c;
Node left , mid , right ;
int codeword ;
}
public int getput ( LookAheadIn in)
throws FileNotFoundException , IOException
{
int c = in. readChar ();
if ( c == -1) return -1; // EOF
roots [c] = getput (c, roots [c],in );
in. backup ();
return codeword ; // longest prefix
}
public Node getput ( int c, Node x, LookAheadIn in)
throws FileNotFoundException , IOException
{ // recursive search *and* insert
if (x== null ) {
if (i<st_size){
x = new Node (c,i++);
System.out.println("Value of i: " + i);
}
return x;
}
if (c<x.c) x. left = getput (c,x.left ,in );
else if (c>x.c) x. right = getput (c,x.right ,in );
else {
int next = in.readChar();
codeword = x. codeword ;
x.mid = getput (next ,x.mid ,in );
}
return x;
}
}
LookAheadIn.java:
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
public class LookAheadIn
{
private FileInputStream in = null ;
private int last ;
private boolean backup = true ;
public LookAheadIn ( File file )
throws FileNotFoundException , IOException
{
in = new FileInputStream ( file );
last = in. read ();
backup = true ;
}
public void backup () { backup = true ; }
public int readChar () throws FileNotFoundException , IOException
{
if (! backup ) last = in. read();
backup = false ;
return last ;}
public boolean isEmpty(){ return last == -1; }
}
You're reading bytes, they can't store larger values. If you want to do LZW with codes larger than 255, you'll need to either encode bit streams, or for testing (as a temporary hack) write two byte words (unsigned int of size 16).
Replace
writer.write ( codeword );
with
writer.write ( codeword>>8 );
writer.write ( codeword&0xff );
and it check if it works. If it does, you can spend some time implementing a bit based stream. You'll need to update the reader and decoder too.
Related
The code I've created so far allows me to get a java source file and input the text into a text file. Now what I need to do is to identify the comments..like // and /*. and output to the screen. I also need to identify the lines in between as such:
1: /*****************************************
2: ** This is my program // Weird comment! **
3: *****************************************/
7: /* Amount available */
14: /* This is my code */
I think you need something like this:
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Scanner;
public class Main {
public static void main(String[] args) {
Scanner kb = new Scanner(System.in);
System.out.println("Please enter the file name: ");
String sourceFile = kb.nextLine();
try {
Scanner in = new Scanner(new File(sourceFile));
String txtFile = sourceFile + ".txt";
PrintWriter fileOut = new PrintWriter(txtFile);
while(in.hasNextLine()) {
String intoTxt = in.nextLine();
fileOut.print(intoTxt);
fileOut.println("");
}
fileOut.close();
File file = new File(txtFile);
Scanner text = new Scanner(file);
CommentParser cp = new CommentParser();
cp.extractComments(text);
} catch( FileNotFoundException e ) {
System.out.println("Sorry that file does not exist or is not accessible");
}
}
public static class CommentParser {
private boolean isCommentBlock = false;
public static final String START_COMMENT_BLOCK = "/*";
public static final String END_COMMENT_BLOCK = "*/";
public static final String START_COMMENT_LINE = "//";
public void extractComments(Scanner text) {
int lineNum = 0;
while(text.hasNextLine()) {
String line = text.nextLine();
if( line.contains(START_COMMENT_BLOCK) && !this.isCommentBlock) {
// if it is a block comment
int startIdx = line.indexOf(START_COMMENT_BLOCK);
int endIdx = line.indexOf(END_COMMENT_BLOCK);
if( endIdx == -1 ) {
isCommentBlock = true;
this.printLine(lineNum, line.substring(startIdx, line.length()));
} else {
this.printLine(lineNum, line.substring(startIdx, endIdx + END_COMMENT_BLOCK.length()));
}
} else if( this.isCommentBlock ) {
// handle end of block comment, if it is in a following line
int endIdx = line.indexOf(END_COMMENT_BLOCK);
if( endIdx == -1 ){
this.printLine(lineNum, line);
} else {
this.printLine(lineNum, line.substring(0, endIdx + END_COMMENT_BLOCK.length()));
this.isCommentBlock = false;
}
} else if( line.contains(START_COMMENT_LINE) && !this.isCommentBlock ){
// if it is just a line comment
int startIdx = line.indexOf(START_COMMENT_LINE);
this.printLine(lineNum, line.substring(startIdx, line.length()));
}
lineNum++;
}
}
private void printLine(int lineNum, String comment) {
System.out.println(String.format("%s: %s", lineNum, comment));
}
}
}
You have to add some code to recognize also several block comments in one line. This is just a small addition in this code snippet.
Closed. This question needs details or clarity. It is not currently accepting answers.
Want to improve this question? Add details and clarify the problem by editing this post.
Closed 7 years ago.
Improve this question
The split method takes two arguments, name of file to split, and size of each split. Could you check if I'm on the write track? And the pseudocode on what to put in the for loop?
import java.io.*;
public class SplitFile {
public static void main(String[] args) throws IOException {
Split("testfile.pdf", 256);
}
public static Split(String filename, int splitSize) throws IOException {
int numberOfFiles = 0;
File file = new File(filename);
numberOfFiles = ((int) file.length() / splitSize) + 1;
for (; numberOfFiles >= 0; numberOfFiles--) {
DataInputStream in = new DataInputStream(new BufferedInputStream(
new FileInputStream(filename)));
DataOutputStream out = new DataOutputStream(
new BufferedOutputStream(new FileOutputStream(file))); //What do I put here?
}
}
}
Required changes
File object per output part, e.g.
Initialize data input stream outside the loop, not inside
Code
File original = new File(filename);
int numberOfFiles = ((int) original.length() / splitSize) + 1;
DataInputStream in =
new DataInputStream(new BufferedInputStream(new FileInputStream(filename)));
// <== just count through parts.
for (int i = 0; i < numberOfFiles; i++) {
File output = new File(String.format("%s-%d", filename, i));
// <== Part of file being output e.g. testfile.pdf-1, testfile.pdf-2
DataOutputStream out = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(output)));
}
For the actual writing...
read bytes from input stream using read() call
write bytes to output stream using write() call
Two approaches, either 1 byte at a time - easiest, but less efficient, or use a buffer, harder to code, but more efficient.
Buffered approach
long length = original.length();
DataInputStream in = new DataInputStream(new BufferedInputStream(new FileInputStream(filename)));
int pos = 0;
byte[] buffer = new byte[splitSize];
for (...) {
...
// make sure you deal with file not being exactly divisible,
// last chunk might be smaller
long remaining = length - pos;
in.read(buffer, pos, (int) Math.min(splitSize, remaining));
out.write(buffer, 0, (int) Math.min(splitSize, remaining));
pos += splitSize;
}
1 byte at a time.
for (...) {
...
for (int i = 0; i < splitSize && pos < length; i++) {
out.write(in.read());
pos++;
}
}
You can do it using the Java NIO API in the following way.
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
public final class SplitFile {
public static void main(String[] args) throws IOException {
split("testfile.pdf", 256);
}
private static void split(String filename, int splitSize) throws IOException {
int i = filename.lastIndexOf('.');
String basename = filename.substring(0, i);
String ext = filename.substring(i + 1);
Path inputPath = Paths.get(filename);
int numberOfFiles = (int) (Files.size(inputPath) / splitSize) + 1;
try (FileChannel inputChannel = FileChannel.open(inputPath, StandardOpenOption.READ)) {
for (int j = 0; j < numberOfFiles; j++) {
String outputFilename = String.format("%s-%04d.%s", basename, j + 1, ext);
Path outputPath = inputPath.getParent().resolve(outputFilename);
try (FileChannel outputChannel = FileChannel.open(outputPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
inputChannel.transferTo(j * splitSize, splitSize, outputChannel);
}
}
}
}
}
I try to compare two files by byte blocks. Block by block. But I have a problem with my loop-tree.
public void compare() {
File file1 = arrayOfFiles.get(i);
File file2 = arrayOfFiles.get(y);
if (file1.length() != file2.length()) {
break;
}
else
{
for (int z = 0; ; z++) {
byte[] b1 = getParts(file1, z);
byte[] b2 = getParts(file2, z);
if (b1.length != b2.length) {
break;
}
else
{
for (int x = 0; ; x++) {
if (b1[x] != b2[x]) {
break;
}
else
{
//how can I find the end of file? and compare last [x] of b1 and b2?
}
}
}
}
}
}
private static byte[] getParts(File file, int z) throws IOException {
byte [] bytes = new byte[1024];
int point = z * 1024;
RandomAccessFile raf = new RandomAccessFile(file, "r");
FileChannel fc = raf.getChannel();
MappedByteBuffer buffer = fc.map(FileChannel.MapMode.READ_ONLY, point, 1024);
buffer.get(bytes);
buffer.clear();
return bytes;
}
Is there another way to compare two files by bytes and do it with block of different size?
To compare last byte block of files your program could benefit from minor modification. Start iterating blocks from the last block. Changed the for clauses as following to iterate backwards.
import java.lang.Math;
import java.nio.channels.FileChannel;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
public class Compare
{
public static final int BLOCK_SIZE = 1024;
public boolean compare(File file1, File file2)
{
//File file1 = arrayOfFiles.get(i);
//File file2 = arrayOfFiles.get(y);
boolean equal = file1.length() != file2.length();
for (int z = getSizeInBlocks(file1) - 1; equal && 0 <= z ; z--)
{
MappedByteBuffer b1 = getParts(file1, z);
MappedByteBuffer b2 = getParts(file2, z);
if (b1.remaining() != b2.remaining())
{
equal = false;
break;
}
else
{
for (int x = getBlockSize() - 1; equal && 0 <= x; x--)
{
if (b1[x] != b2[x])
{
equal = false;
break;
}
else
{
//how can I find the end of file? and compare last [x] of b1 and b2?
}
}
}
}
return equal;
}
private static int getSizeInBlocks(File file)
{
return (int) Math.ceil((double)getBlockSize()/file.length());
}
private static int getBlockSize()
{
return BLOCK_SIZE;
}
private static ByteBuffer getParts(File file, int z)
throws IOException
{
int point = z * getBlockSize();
RandomAccessFile raf = new RandomAccessFile(file, "r");
FileChannel fc = raf.getChannel();
MappedByteBuffer buffer = fc.map(
FileChannel.MapMode.READ_ONLY,
point,
getBlockSize());
return buffer;
}
}
public static void main(String[] args) {
File inFile = null;
if (0 < args.length) {
inFile = new File(args[0]);
}
BufferedInputStream bStream = null;
try {
int read;
bStream = new BufferedInputStream(new FileInputStream(inFile));
while ((read = bStream.read()) > 0) {
getMarker(read, bStream);
System.out.println(read);
}
}
catch (IOException e) {
e.printStackTrace();
}
finally {
try {
if (bStream != null)bStream.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private static void getMarker(int read, BufferedInputStream bStream) {
}
I want to find the long 1234567890 in the bufferedInputStream. Am I able to search the bufferedInputStream for a long type? (I'm not sure whether I need 'read' as a parameter. I doubt it, I may remove that). How do I search a bufferedInputStream? Big endian, 8 byte aligned.
The initial marker that I'm searching for contains the value 1234567890. Once I have found that value I want to put the value of 2 bytes into a variable. These 2 bytes are located 11 bytes after marker.
With the method java.io.DataInputStream.readLong() it's possible to read data 8 bytes per 8 bytes. But the question is: the file contains only long or other data?
If the data may be anywhere, we have to read 8 times the file beginning at the offset 0, 1, 2 and so on.
class FuzzyReaderHelper {
public static final long MAGIC_NUMBER = 1234567890L;
public static DataInputStream getStream( File source ) {
boolean magicNumberFound = false;
for( int offset = 0; !magicNumberFound && offset < 8; ++offset ) {
dis = new DataInputStream( new FileInputStream( source ));
for( int i = 0; i < offset; ++i ) {
dis.read();
}
try {
long l;
while(( l = dis.readLong()) != MAGIC_NUMBER ) {
/* Nothing to do... */
}
magicNumberFound = true;
for( int i = 0; i < 11; ++i ) {
dis.read();
}
return dis;
}
catch( EOFException eof ){}
dis.close();
}
// choose:
throw new IllegalStateException( "Incompatible file: " + source );
// or
return null;
}
}
The next steps are up to you:
DataInputStream dis = FuzzyReaderHelper.getStream( new File( root, "toto.dat" ));
if( dis != null ) {
byte[] bytes = new byte[2];
bytes[0] = dis.read();
bytes[1] = dis.read();
...
}
I want to read file in opposite direction from end to the start my file,
[1322110800] LOG ROTATION: DAILY
[1322110800] LOG VERSION: 2.0
[1322110800] CURRENT HOST STATE:arsalan.hussain;DOWN;HARD;1;CRITICAL - Host Unreachable (192.168.1.107)
[1322110800] CURRENT HOST STATE: localhost;UP;HARD;1;PING OK - Packet loss = 0%, RTA = 0.06 ms
[1322110800] CURRENT HOST STATE: musewerx-72c7b0;UP;HARD;1;PING OK - Packet loss = 0%, RTA = 0.27 ms
i use code to read it in this way,
String strpath="/var/nagios.log";
FileReader fr = new FileReader(strpath);
BufferedReader br = new BufferedReader(fr);
String ch;
int time=0;
String Conversion="";
do {
ch = br.readLine();
out.print(ch+"<br/>");
} while (ch != null);
fr.close();
I would prefer to read in reverse order using buffer reader
I had the same problem as described here. I want to look at lines in file in reverse order, from the end back to the start (The unix tac command will do it).
However my input files are fairly large so reading the whole file into memory, as in the other examples was not really a workable option for me.
Below is the class I came up with, it does use RandomAccessFile, but does not need any buffers, since it just retains pointers to the file itself, and works with the standard InputStream methods.
It works for my cases, and empty files and a few other things I've tried. Now I don't have Unicode characters or anything fancy, but as long as the lines are delimited by LF, and even if they have a LF + CR it should work.
Basic Usage is :
in = new BufferedReader (new InputStreamReader (new ReverseLineInputStream(file)));
while(true) {
String line = in.readLine();
if (line == null) {
break;
}
System.out.println("X:" + line);
}
Here is the main source:
package www.kosoft.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.RandomAccessFile;
public class ReverseLineInputStream extends InputStream {
RandomAccessFile in;
long currentLineStart = -1;
long currentLineEnd = -1;
long currentPos = -1;
long lastPosInFile = -1;
public ReverseLineInputStream(File file) throws FileNotFoundException {
in = new RandomAccessFile(file, "r");
currentLineStart = file.length();
currentLineEnd = file.length();
lastPosInFile = file.length() -1;
currentPos = currentLineEnd;
}
public void findPrevLine() throws IOException {
currentLineEnd = currentLineStart;
// There are no more lines, since we are at the beginning of the file and no lines.
if (currentLineEnd == 0) {
currentLineEnd = -1;
currentLineStart = -1;
currentPos = -1;
return;
}
long filePointer = currentLineStart -1;
while ( true) {
filePointer--;
// we are at start of file so this is the first line in the file.
if (filePointer < 0) {
break;
}
in.seek(filePointer);
int readByte = in.readByte();
// We ignore last LF in file. search back to find the previous LF.
if (readByte == 0xA && filePointer != lastPosInFile ) {
break;
}
}
// we want to start at pointer +1 so we are after the LF we found or at 0 the start of the file.
currentLineStart = filePointer + 1;
currentPos = currentLineStart;
}
public int read() throws IOException {
if (currentPos < currentLineEnd ) {
in.seek(currentPos++);
int readByte = in.readByte();
return readByte;
}
else if (currentPos < 0) {
return -1;
}
else {
findPrevLine();
return read();
}
}
}
Apache Commons IO has the ReversedLinesFileReader class for this now (well, since version 2.2).
So your code could be:
String strpath="/var/nagios.log";
ReversedLinesFileReader fr = new ReversedLinesFileReader(new File(strpath));
String ch;
int time=0;
String Conversion="";
do {
ch = fr.readLine();
out.print(ch+"<br/>");
} while (ch != null);
fr.close();
The ReverseLineInputStream posted above is exactly what I was looking for. The files I am reading are large and cannot be buffered.
There are a couple of bugs:
File is not closed
if the last line is not terminated the last 2 lines are returned on the first read.
Here is the corrected code:
package www.kosoft.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
public class ReverseLineInputStream extends InputStream {
RandomAccessFile in;
long currentLineStart = -1;
long currentLineEnd = -1;
long currentPos = -1;
long lastPosInFile = -1;
int lastChar = -1;
public ReverseLineInputStream(File file) throws FileNotFoundException {
in = new RandomAccessFile(file, "r");
currentLineStart = file.length();
currentLineEnd = file.length();
lastPosInFile = file.length() -1;
currentPos = currentLineEnd;
}
private void findPrevLine() throws IOException {
if (lastChar == -1) {
in.seek(lastPosInFile);
lastChar = in.readByte();
}
currentLineEnd = currentLineStart;
// There are no more lines, since we are at the beginning of the file and no lines.
if (currentLineEnd == 0) {
currentLineEnd = -1;
currentLineStart = -1;
currentPos = -1;
return;
}
long filePointer = currentLineStart -1;
while ( true) {
filePointer--;
// we are at start of file so this is the first line in the file.
if (filePointer < 0) {
break;
}
in.seek(filePointer);
int readByte = in.readByte();
// We ignore last LF in file. search back to find the previous LF.
if (readByte == 0xA && filePointer != lastPosInFile ) {
break;
}
}
// we want to start at pointer +1 so we are after the LF we found or at 0 the start of the file.
currentLineStart = filePointer + 1;
currentPos = currentLineStart;
}
public int read() throws IOException {
if (currentPos < currentLineEnd ) {
in.seek(currentPos++);
int readByte = in.readByte();
return readByte;
} else if (currentPos > lastPosInFile && currentLineStart < currentLineEnd) {
// last line in file (first returned)
findPrevLine();
if (lastChar != '\n' && lastChar != '\r') {
// last line is not terminated
return '\n';
} else {
return read();
}
} else if (currentPos < 0) {
return -1;
} else {
findPrevLine();
return read();
}
}
#Override
public void close() throws IOException {
if (in != null) {
in.close();
in = null;
}
}
}
The proposed ReverseLineInputStream works really slow when you try to read thousands of lines. At my PC Intel Core i7 on SSD drive it was about 60k lines in 80 seconds. Here is the inspired optimized version with buffered reading (opposed to one-byte-at-a-time reading in ReverseLineInputStream). 60k lines log file is read in 400 milliseconds:
public class FastReverseLineInputStream extends InputStream {
private static final int MAX_LINE_BYTES = 1024 * 1024;
private static final int DEFAULT_BUFFER_SIZE = 1024 * 1024;
private RandomAccessFile in;
private long currentFilePos;
private int bufferSize;
private byte[] buffer;
private int currentBufferPos;
private int maxLineBytes;
private byte[] currentLine;
private int currentLineWritePos = 0;
private int currentLineReadPos = 0;
private boolean lineBuffered = false;
public ReverseLineInputStream(File file) throws IOException {
this(file, DEFAULT_BUFFER_SIZE, MAX_LINE_BYTES);
}
public ReverseLineInputStream(File file, int bufferSize, int maxLineBytes) throws IOException {
this.maxLineBytes = maxLineBytes;
in = new RandomAccessFile(file, "r");
currentFilePos = file.length() - 1;
in.seek(currentFilePos);
if (in.readByte() == 0xA) {
currentFilePos--;
}
currentLine = new byte[maxLineBytes];
currentLine[0] = 0xA;
this.bufferSize = bufferSize;
buffer = new byte[bufferSize];
fillBuffer();
fillLineBuffer();
}
#Override
public int read() throws IOException {
if (currentFilePos <= 0 && currentBufferPos < 0 && currentLineReadPos < 0) {
return -1;
}
if (!lineBuffered) {
fillLineBuffer();
}
if (lineBuffered) {
if (currentLineReadPos == 0) {
lineBuffered = false;
}
return currentLine[currentLineReadPos--];
}
return 0;
}
private void fillBuffer() throws IOException {
if (currentFilePos < 0) {
return;
}
if (currentFilePos < bufferSize) {
in.seek(0);
in.read(buffer);
currentBufferPos = (int) currentFilePos;
currentFilePos = -1;
} else {
in.seek(currentFilePos);
in.read(buffer);
currentBufferPos = bufferSize - 1;
currentFilePos = currentFilePos - bufferSize;
}
}
private void fillLineBuffer() throws IOException {
currentLineWritePos = 1;
while (true) {
// we've read all the buffer - need to fill it again
if (currentBufferPos < 0) {
fillBuffer();
// nothing was buffered - we reached the beginning of a file
if (currentBufferPos < 0) {
currentLineReadPos = currentLineWritePos - 1;
lineBuffered = true;
return;
}
}
byte b = buffer[currentBufferPos--];
// \n is found - line fully buffered
if (b == 0xA) {
currentLineReadPos = currentLineWritePos - 1;
lineBuffered = true;
break;
// just ignore \r for now
} else if (b == 0xD) {
continue;
} else {
if (currentLineWritePos == maxLineBytes) {
throw new IOException("file has a line exceeding " + maxLineBytes
+ " bytes; use constructor to pickup bigger line buffer");
}
// write the current line bytes in reverse order - reading from
// the end will produce the correct line
currentLine[currentLineWritePos++] = b;
}
}
}}
#Test
public void readAndPrintInReverseOrder() throws IOException {
String path = "src/misctests/test.txt";
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(path));
Stack<String> lines = new Stack<String>();
String line = br.readLine();
while(line != null) {
lines.push(line);
line = br.readLine();
}
while(! lines.empty()) {
System.out.println(lines.pop());
}
} finally {
if(br != null) {
try {
br.close();
} catch(IOException e) {
// can't help it
}
}
}
}
Note that this code reads the hole file into memory and then starts printing it. This is the only way you can do it with a buffered reader or anry other reader that does not support seeking. You have to keep this in mind, in your case you want to read a log file, log files can be very big!
If you want to read line by line and print on the fly then you have no other alternative than using a reader that support seeking such as java.io.RandomAccessFile and this anything but trivial.
As far as I understand, you try to read backwards line by line.
Suppose this is the file you try to read:
line1
line2
line3
And you want to write it to the output stream of the servlet as follows:
line3
line2
line1
Following code might be helpful in this case:
List<String> tmp = new ArrayList<String>();
do {
ch = br.readLine();
tmp.add(ch);
out.print(ch+"<br/>");
} while (ch != null);
for(int i=tmp.size()-1;i>=0;i--) {
out.print(tmp.get(i)+"<br/>");
}
I had a problem with your solution #dpetruha because of this:
Does RandomAccessFile.read() from local file guarantee that exact number of bytes will be read?
Here is my solution: (changed only fillBuffer)
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
public class ReverseLineInputStream extends InputStream {
private static final int MAX_LINE_BYTES = 1024 * 1024;
private static final int DEFAULT_BUFFER_SIZE = 1024 * 1024;
private RandomAccessFile in;
private long currentFilePos;
private int bufferSize;
private byte[] buffer;
private int currentBufferPos;
private int maxLineBytes;
private byte[] currentLine;
private int currentLineWritePos = 0;
private int currentLineReadPos = 0;
private boolean lineBuffered = false;
public ReverseLineInputStream(File file) throws IOException {
this(file, DEFAULT_BUFFER_SIZE, MAX_LINE_BYTES);
}
public ReverseLineInputStream(File file, int bufferSize, int maxLineBytes) throws IOException {
this.maxLineBytes = maxLineBytes;
in = new RandomAccessFile(file, "r");
currentFilePos = file.length() - 1;
in.seek(currentFilePos);
if (in.readByte() == 0xA) {
currentFilePos--;
}
currentLine = new byte[maxLineBytes];
currentLine[0] = 0xA;
this.bufferSize = bufferSize;
buffer = new byte[bufferSize];
fillBuffer();
fillLineBuffer();
}
#Override
public int read() throws IOException {
if (currentFilePos <= 0 && currentBufferPos < 0 && currentLineReadPos < 0) {
return -1;
}
if (!lineBuffered) {
fillLineBuffer();
}
if (lineBuffered) {
if (currentLineReadPos == 0) {
lineBuffered = false;
}
return currentLine[currentLineReadPos--];
}
return 0;
}
private void fillBuffer() throws IOException {
if (currentFilePos < 0) {
return;
}
if (currentFilePos < bufferSize) {
in.seek(0);
buffer = new byte[(int) currentFilePos + 1];
in.readFully(buffer);
currentBufferPos = (int) currentFilePos;
currentFilePos = -1;
} else {
in.seek(currentFilePos - buffer.length);
in.readFully(buffer);
currentBufferPos = bufferSize - 1;
currentFilePos = currentFilePos - bufferSize;
}
}
private void fillLineBuffer() throws IOException {
currentLineWritePos = 1;
while (true) {
// we've read all the buffer - need to fill it again
if (currentBufferPos < 0) {
fillBuffer();
// nothing was buffered - we reached the beginning of a file
if (currentBufferPos < 0) {
currentLineReadPos = currentLineWritePos - 1;
lineBuffered = true;
return;
}
}
byte b = buffer[currentBufferPos--];
// \n is found - line fully buffered
if (b == 0xA) {
currentLineReadPos = currentLineWritePos - 1;
lineBuffered = true;
break;
// just ignore \r for now
} else if (b == 0xD) {
continue;
} else {
if (currentLineWritePos == maxLineBytes) {
throw new IOException("file has a line exceeding " + maxLineBytes
+ " bytes; use constructor to pickup bigger line buffer");
}
// write the current line bytes in reverse order - reading from
// the end will produce the correct line
currentLine[currentLineWritePos++] = b;
}
}
}
}