Invalid header signature when read any xls file - java

When I try to read xls file using the code below, I always get error:
org.apache.poi.poifs.filesystem.NotOLE2FileException: Invalid header signature; read 0x65572D2D2D2D2D2D, expected 0xE11AB1A1E011CFD0 - Your file appears not to be a valid OLE2 document.
This is my code:
public Result<List<IDto>> ReadExcelClassInfo2003(File file,
Timestamp createTime, Timestamp updateTime, BigDecimal createBy,
BigDecimal updateBy) {
Result<List<IDto>> resultData = new Result<List<IDto>>();
Integer numInsertSuccess = 0;
if (file == null) {
resultData.setErrorCode(ErrorCode.ERROR_FORMAT);
return resultData;
}
try {
InputStream is = new FileInputStream(file);
POIFSFileSystem fs = new POIFSFileSystem(is);
Integer classType = ClassTypeEnum.CLASSROOM.getValue();
Integer maxCol = ExcelConstant.MAX_COLUMN_CLASSROOM_INFO;
workbook = new HSSFWorkbook(fs);
HSSFSheet sheetClassInfo = workbook
.getSheetAt(0);
if (sheetClassInfo == null) {
resultData.setErrorCode(ErrorCode.ERROR_FORMAT);
return resultData;
}
//Some code to get data from excel file here.
is.close();
workbook.close();
} catch (FileNotFoundException e) {
resultData.setErrorCode(ErrorCode.ERROR_FORMAT);
return resultData;
} catch (IOException e) {
resultData.setErrorCode(ErrorCode.ERROR_FORMAT);
return resultData;
} catch (Exception e) {
resultData.setErrorCode(ErrorCode.ERROR_FORMAT);
return resultData;
}
if (numInsertSuccess == 0) {
resultData.setErrorCode(ErrorCode.CLASS_DATA_INVALID);
return resultData;
}
resultData.setErrorCode(ErrorCode.IMPORT_SUCCESS);
resultData.setMessage(numInsertSuccess.toString());
return resultData;
}
My controller code :
#POST
#Path("class/import")
#Consumes(MediaType.MULTIPART_FORM_DATA)
#Produces(MediaType.APPLICATION_JSON)
#RolesAllowed(Role.TRAINING_ADMIN)
// public Response importClass(#FormParam("file") File file) {
public Response importClass(#Multipart("file") File file) {
LOGGER.info("Received PUT import class: file=" + file.length());
if (checkTokenAndRole(new int[] {1, 11}).getStatus() != Response.Status.OK.getStatusCode()) {
return LoginError(checkToken().getStatus());
} else {
String token = request.getHeader(HttpHeaders.AUTHORIZATION);
String fileExtension = request.getHeader("FileExtension");
return ClassService.getInstance().importClass(file, fileExtension,
token);
}
}
And the method are call by controller :
public Response importClass(File file, String fileExtension, String token) {
Result<List<IDto>> result = new Result<List<IDto>>();
try {
ErrorDTO errorDto = new ErrorDTO();
String data = "";
double bytes = file.length();
double kilobytes = (bytes / 1024);
double megabytes = (kilobytes / 1024);
if (megabytes > ExcelConstant.MAX_FILE_SIZE) {
errorDto.setErrorCode(ErrorCode.ERROR_FORMAT);
data = Utility.toJSONString(errorDto);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(data).build();
}
Timestamp createTime = new Timestamp(System.currentTimeMillis());
Timestamp updateTime = new Timestamp(System.currentTimeMillis());
BigDecimal createBy = null;
BigDecimal updateBy = null;
Result<UserInfoDTO> userInfo = DaoManager.getUserInfoDao()
.getUserInfoByToken(token);
if (userInfo.getData() != null) {
createBy = userInfo.getData().getId();
updateBy = userInfo.getData().getId();
}
result = DaoManager.getClassDao().importClass(file, fileExtension,
createTime, updateTime, createBy, updateBy);
int errorCode = result.getErrorCode();
String message = result.getMessage();
errorDto = new ErrorDTO();
errorDto.setErrorCode(errorCode);
errorDto.setMessage(message);
data = Utility.toJSONString(errorDto);
// release memory
userInfo = null;
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(data).build();
} catch (Exception e) {
e.printStackTrace();
LOGGER.error(e.getMessage());
result.setStatus(Constant.INTERNAL_SERVER_ERROR);
result.setErrorCode(ErrorCode.IMPORT_ERROR);
return super.responseData(result);
}
}
public Result<List<IDto>> importClass(File file, String fileExtension,
Timestamp createTime, Timestamp updateTime, BigDecimal createBy,
BigDecimal updateBy) throws IOException {
return ExportExcelService.getInstance().ReadExcelClassInfo2003(file,
fileExtension, createTime, updateTime, createBy, updateBy);
}
I debuged and found the process always check new POIFSFileSystem and throw exception with error above. I tested with all xls file I have and have same error.
Any can help me resolve this problem, and what the header 0x65572D2D2D2D2D2D ?
Thanks.

Related

Android - Zip Path Traversal in play store

I am uploading my App on play store but get me bellow error:
Zip Path Traversal Your app contains an unsafe unzipping pattern that
may lead to a Path Traversal vulnerability. Please see this Google
Help Center article to learn how to fix the issue.
org.apache.cordova.Zip.unzipSync
I edited my source code like this LINK, but get me error.
Here is my source code changed:
public class Zip extends CordovaPlugin {
private static final String LOG_TAG = "Zip";
// Can't use DataInputStream because it has the wrong endian-ness.
private static int readInt(InputStream is) throws IOException {
int a = is.read();
int b = is.read();
int c = is.read();
int d = is.read();
return a | b << 8 | c << 16 | d << 24;
}
#Override
public boolean execute(String action, CordovaArgs args, final CallbackContext callbackContext) throws JSONException {
if ("unzip".equals(action)) {
unzip(args, callbackContext);
return true;
}
return false;
}
private void unzip(final CordovaArgs args, final CallbackContext callbackContext) {
this.cordova.getThreadPool().execute(new Runnable() {
public void run() {
unzipSync(args, callbackContext);
}
});
}
private void unzipSync(CordovaArgs args, CallbackContext callbackContext) {
InputStream inputStream = null;
try {
String zipFileName = args.getString(0);
String outputDirectory = args.getString(1);
// Since Cordova 3.3.0 and release of File plugins, files are accessed via cdvfile://
// Accept a path or a URI for the source zip.
Uri zipUri = getUriForArg(zipFileName);
Uri outputUri = getUriForArg(outputDirectory);
CordovaResourceApi resourceApi = webView.getResourceApi();
File tempFile = resourceApi.mapUriToFile(zipUri);
if (tempFile == null || !tempFile.exists()) {
String errorMessage = "Zip file does not exist";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage);
return;
}
File outputDir = resourceApi.mapUriToFile(outputUri);
outputDirectory = outputDir.getAbsolutePath();
outputDirectory += outputDirectory.endsWith(File.separator) ? "" : File.separator;
if (outputDir == null || (!outputDir.exists() && !outputDir.mkdirs())) {
String errorMessage = "Could not create output directory";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage);
return;
}
OpenForReadResult zipFile = resourceApi.openForRead(zipUri);
ProgressEvent progress = new ProgressEvent();
progress.setTotal(zipFile.length);
inputStream = new BufferedInputStream(zipFile.inputStream);
inputStream.mark(10);
int magic = readInt(inputStream);
if (magic != 875721283) { // CRX identifier
inputStream.reset();
} else {
// CRX files contain a header. This header consists of:
// * 4 bytes of magic number
// * 4 bytes of CRX format version,
// * 4 bytes of public key length
// * 4 bytes of signature length
// * the public key
// * the signature
// and then the ordinary zip data follows. We skip over the header before creating the ZipInputStream.
readInt(inputStream); // version == 2.
int pubkeyLength = readInt(inputStream);
int signatureLength = readInt(inputStream);
inputStream.skip(pubkeyLength + signatureLength);
progress.setLoaded(16 + pubkeyLength + signatureLength);
}
// The inputstream is now pointing at the start of the actual zip file content.
ZipInputStream zis = new ZipInputStream(inputStream);
inputStream = zis;
ZipEntry ze;
byte[] buffer = new byte[32 * 1024];
boolean anyEntries = false;
while ((ze = zis.getNextEntry()) != null) {
try {
anyEntries = true;
String compressedName = ze.getName();
if (ze.isDirectory()) {
try {
File dir = new File(outputDirectory + compressedName);
File f = new File(dir, ze.getName());
String canonicalPath = f.getCanonicalPath();
if (!canonicalPath.startsWith(dir.toString())){
dir.mkdirs();
}else {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
}
}
}
} catch (Exception e) {
String errorMessage = "An error occurred while unzipping.";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage, e);
}
} else {
File file = new File(outputDirectory + compressedName);
File f = new File(file, ze.getName());
String canonicalPath = f.getCanonicalPath();
if (!canonicalPath.startsWith(file.toString())) {
file.getParentFile().mkdirs();
if (file.exists() || file.createNewFile()) {
try {
Log.w("Zip", "extracting: " + file.getPath());
FileOutputStream fout = new FileOutputStream(file);
int count;
while ((count = zis.read(buffer)) != -1) {
fout.write(buffer, 0, count);
}
fout.close();
} catch (Exception e) {
String errorMessage = "An error occurred while unzipping.";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage, e);
}
}
}else {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
}
}
}
}
progress.addLoaded(ze.getCompressedSize());
updateProgress(callbackContext, progress);
zis.closeEntry();
} catch (Exception e) {
String errorMessage = "An error occurred while unzipping.";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage, e);
}
}
// final progress = 100%
progress.setLoaded(progress.getTotal());
updateProgress(callbackContext, progress);
if (anyEntries)
callbackContext.success();
else
callbackContext.error("Bad zip file");
} catch (Exception e) {
String errorMessage = "An error occurred while unzipping.";
callbackContext.error(errorMessage);
Log.e(LOG_TAG, errorMessage, e);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
}
}
}
}
private void updateProgress(CallbackContext callbackContext, ProgressEvent progress) throws JSONException {
PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, progress.toJSONObject());
pluginResult.setKeepCallback(true);
callbackContext.sendPluginResult(pluginResult);
}
private Uri getUriForArg(String arg) {
CordovaResourceApi resourceApi = webView.getResourceApi();
Uri tmpTarget = Uri.parse(arg);
return resourceApi.remapUri(
tmpTarget.getScheme() != null ? tmpTarget : Uri.fromFile(new File(arg)));
}
private static class ProgressEvent {
private long loaded;
private long total;
public long getLoaded() {
return loaded;
}
public void setLoaded(long loaded) {
this.loaded = loaded;
}
public void addLoaded(long add) {
this.loaded += add;
}
public long getTotal() {
return total;
}
public void setTotal(long total) {
this.total = total;
}
public JSONObject toJSONObject() throws JSONException {
return new JSONObject(
"{loaded:" + loaded +
",total:" + total + "}");
}
}
}

How to restore file from GDrive?

I am making an app which stores its SQLite Database backup on GDrive. I succeeded in signing in and uploading the file in the drive but failed to restore it. Following is the code.
I use SQLiteDatabase to store the fileID so that when it is required while updating and restoring, it can be used. I am looking for a method which will make use of FileID to restore.
Error occurs at file.getDownloadUrl() and file.getContent().
class DriveClassHelper
{
private final Executor mExecutor = Executors.newSingleThreadExecutor();
private static Drive mDriveService;
private String FileID = null;
private static String filePath = "/data/data/com.example.gdrivebackup/databases/Data.db";
DriveClassHelper(Drive mDriveService)
{
DriveClassHelper.mDriveService = mDriveService;
}
// ---------------------------------- TO BackUp on Drive -------------------------------------------
public Task<String> createFile()
{
return Tasks.call(mExecutor, () ->
{
File fileMetaData = new File();
fileMetaData.setName("Backup");
java.io.File file = new java.io.File(filePath);
String mimeType = MimeTypeMap.getSingleton().getExtensionFromMimeType("application/x-sqlite-3");
FileContent mediaContent = new FileContent(mimeType, file);
File myFile = null;
FileID = getFileIDFromDatabase();
try {
if (FileID != null) {
Log.i("CALLED : ", FileID);
//mDriveService.files().delete().execute();
myFile = mDriveService.files().update(FileID, fileMetaData, mediaContent).execute();
} else {
myFile = mDriveService.files().create(fileMetaData, mediaContent).execute();
MainActivity.demoSQLite.insertData(myFile.getId());
}
} catch (Exception e) {
e.printStackTrace();
}
if (myFile == null) {
throw new IOException("Null Result when requesting file creation");
}
Log.i("ID:", myFile.getId());
return myFile.getId();
}
);
}
// -------------------------------------------------------------------------------------------------
// ---------------------------------- TO get File ID -------------------------------------------
private static String getFileIDFromDatabase()
{
String FileIDFromMethod = null;
Cursor result = MainActivity.demoSQLite.getData();
if (result.getCount() == 0) {
Log.i("CURSOR :", "NO ENTRY");
return null;
} else {
while (result.moveToNext()) {
FileIDFromMethod = result.getString(0);
}
return FileIDFromMethod;
}
}
// -------------------------------------------------------------------------------------------------
// ---------------------------------- TO Restore -------------------------------------------
public static class Restore extends AsyncTask<Void, Void, String>
{
#Override
protected String doInBackground(Void... params) {
String fileId = null;
try
{
fileId = getFileIDFromDatabase();
if (fileId != null)
{
File file = mDriveService.files().get(fileId).execute();
downloadFile(file);
}
else
{
return null;
}
}
catch (Exception e)
{
e.printStackTrace();
}
return fileId;
}
private void downloadFile(File file)
{
InputStream mInput = null;
FileOutputStream mOutput = null;
if (file.getDownloadUrl() != null && file.getDownloadUrl().length() > 0) //Error occurs at file.getDownloadUrl()
{
try
{
HttpResponse resp = mDriveService.getRequestFactory().buildGetRequest(new GenericUrl(file.getDownloadUrl())).execute();
mInput = resp.getContent();
String outFileName = "file://" + Environment.getDataDirectory().getPath() + filePath;
// Log.e("com.example.myapp", "getDatabasePath="+ getDatabasePath(""));
//Log.e("com.example.myapp", "outFileName="+outFileName);
// String outFileName = "../databases/" + "Quickpay.db";
mOutput = new FileOutputStream(outFileName);
byte[] mBuffer = new byte[1024];
int mLength;
while ((mLength = mInput.read(mBuffer)) > 0)
{
mOutput.write(mBuffer, 0, mLength);
}
mOutput.flush();
}
catch (IOException e)
{
// An error occurred.
e.printStackTrace();
// return null;
}
finally
{
try
{
//Close the streams
if (mOutput != null)
{
mOutput.close();
}
if (mInput != null)
{
mInput.close();
}
}
catch (IOException e)
{
Log.e("com.example.myapp", "failed to close databases");
}
}
}
else
{
// The file doesn't have any content stored on Drive.
// return null;
Log.e("com.example.myapp", "No content on Drive");
}
}
}
}
The Gradle file is like
implementation 'com.google.android.gms:play-services-auth:16.0.1'
implementation('com.google.apis:google-api-services-drive:v3-rev136-1.25.0')
{
exclude group: 'org.apache.httpcomponents'
}
implementation('com.google.api-client:google-api-client-android:1.26.0')
{
exclude group: 'org.apache.httpcomponents'
}
implementation 'com.google.http-client:google-http-client-gson:1.26.0'
As far as i know Download URL is only avalibale in Google drive api v2 and not in V3.
Short lived download URL for the file. This field is only populated for files with content stored in Google Drive; it is not populated for Google Docs or shortcut files.
It was not very stable in my opinion as not all file types would return a download url.
Using Google Drive v3 you should download the file using a stream.
String fileId = "0BwwA4oUTeiV1UVNwOHItT0xfa2M";
OutputStream outputStream = new ByteArrayOutputStream();
driveService.files().get(fileId)
.executeMediaAndDownloadTo(outputStream);
This should work with the restore. Let me know if it doesnt and i will have a look its been a while since i have tried restore.

Flatbuffer writing in binary file in android give only single response

I am new at flatbuffer. I had created a schema file from outside the project and adding the user(Monster as in Flatbuffer document) inside the binary via java code in android. everything works fine. but the time of reading binary file data it only gives me the user length 1. I had added 3 people but it's give me the length of monsters is 1 at the time of reading. can anyone help to figure this out. Here is the code->
this is the full code ->
enter code here
builder = new FlatBufferBuilder(1024);
public void addNewData(String emailOffset, String nameOffset,String
contactNoOffset, String DOJOffset, String departmentOffset,
String empIdOffset, float[] embeddingOffset)
{
int storeembedd = SingleJson.createEmbeddingVector(builder,
embeddingOffset);
int hereEmailOffset = builder.createString(emailOffset);
int hereNameOffset = builder.createString(nameOffset);
int hereContactOffset = builder.createString(contactNoOffset);
int hereDOJOffset = builder.createString(DOJOffset);
int hereDepartmentOffset=builder.createString(departmentOffset);
int hereImpIdOffset = builder.createString(empIdOffset);
int test = SingleJson.createSingleJson(builder, hereEmailOffset,
hereNameOffset, hereContactOffset
, hereDOJOffset, hereDepartmentOffset, hereImpIdOffset,
storeembedd);
int [] offsetOfMonster = new int[1];
offsetOfMonster[0] = test;
int temp = Monsters.createMonstersVector(builder,
offsetOfMonster);
Monsters.startMonsters(builder);
Monsters.addMonsters(builder, temp);
int orc = Monsters.endMonsters(builder);
builder.finish(orc);
byte[] buf1 = builder.sizedByteArray();
openAndAppenedInBinary(buf1);
}
private void openAndAppenedInBinary(byte[] buf1) {
FileOutputStream output = null;
try {
InputStream inputStream = new FileInputStream(newFile());
byte[] buffer = new byte[inputStream.available()];
if (inputStream.available() == 0) {
output = new FileOutputStream(newFile(), true);
output.write(buf1);
} else {
while (inputStream.read(buffer) != -1) {
output = new FileOutputStream(newFile(), true);
output.write(buffer);
output.write(buf1);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
output.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private String newFile() {
File file = new File(Environment.getExternalStorageDirectory()
+ "/Android/data/"
+ context.getPackageName()
+ "/binFile");
if (!file.exists()) {
if (!file.mkdir()) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
try {
Path dir = Paths.get(file.getAbsolutePath());
Files.createDirectory(dir);
} catch (IOException e) {
Path parentDir = Paths.get(file.getParent());
if (!Files.exists(parentDir)) {
try {
Files.createDirectories(parentDir);
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
}
}
}
File binFile = new File(file, "renamed.bin");
try {
FileWriter writer = new FileWriter(binFile);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
return binFile.getAbsolutePath();
}
here is my reading code of flatbiffer->
new FlatParsingForPerticularId().execute(readRawResource(R.raw.renamed));//renamed is my bin file
private class FlatParsingForPerticularId extends AsyncTask {
#Override
protected String doInBackground(Object... params) {
byte[] buffer = (byte[]) params[0];
long startTime = System.currentTimeMillis();
ByteBuffer bb = ByteBuffer.wrap(buffer);
Monsters monsterList = Monsters.getRootAsMonsters(bb);
int length = monsterList.monstersLength();
SingleJson monster = null;
for (int i = 0; i < length; i++) {//here I m getting length 1 intead of 3
monster = monsterList.monsters(i);
if (i == outputval[0]) {
outputval[0] = (int) monster.EmpNo();
break;
}
}
long endTime = System.currentTimeMillis() - startTime;
String textToShow = "Elements: " + monsterList.monstersLength() + ": load time: " + endTime + "ms";
String[] monsterArr = monster.Name().split(" ");
return monsterArr[0];
}

Best practices to upload large files by chunks in Spring boot

I have A big file and i want to upload that in Server side. it's very important when occured any problem (like interrupting the internet or power cut ...) if i retry to upload, file uploaded from resume and doesn't need to send file from beginning.
I try this approach with sending file chunks but it seems that's not a good way, because a send chunks(byte arrays) directly in response Entity and this isn't good idea.
whatever if anybody can develop this approach and make this code a better code with better performance i appreciate that. does anybody known Best practice way to doing that??
and if u like my code, vote me
thanks :)
RestController
#RestController
#RequestMapping("/files")
public class Controller {
#Autowired
private MyService service;
#PutMapping("/upload/resume")
public Mono<ResponseEntity> uploadWithResume(#RequestPart("chunk")byte[] chunk,
#RequestPart("fileName")String fileName,
#RequestParam("length")Long length
) throws ParseException {
try {
return service.fileResumeUpload(chunk, fileName, length);
} catch (IOException e) {
e.printStackTrace();
return Mono.just(ResponseEntity.status(HttpStatus.PERMANENT_REDIRECT).build());
}
}
#RequestMapping(value = "/get/uploaded/size", method = RequestMethod.HEAD)
public Mono<ResponseEntity> getUploadedSize(#RequestParam("fileName") String fileName) throws IOException {
if (Files.exists(Paths.get("src/main/resources/" + fileName))) {
String size = String.valueOf(Files.size(Paths.get("src/main/resources/" + fileName)));
return Mono.just(ResponseEntity.ok()
.header("upload-offset", size)
.build());
} else{
return Mono.just(ResponseEntity.notFound()
.header("upload-offset" , "0").build());
}
}
}
Service
public Mono<ResponseEntity> fileResumeUpload(byte[] chunk , String fileName,long length) throws IOException, ParseException {
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream("src/main/resources/" + fileName, true));
boolean uploaded = true;
try {
out.write(chunk);
} catch (IOException e) {
uploaded = false;
System.err.println("io exception");
} finally {
if (uploaded) {
out.close();
return Mono.just(ResponseEntity.ok()
.header("expiration-date", getExpirationDate())
.build());
} else {
out.close();
return Mono.just(ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build());
}
}
}
Sending chunks with webTestClient
#Test
public void test1_upload_Expected_200StatusCode(){
try {
String fileName = "film.mkv";
RandomAccessFile raf = new RandomAccessFile(new File("src/test/resources/" + fileName), "rw");
long realSize = raf.length();
List<String> strings = webTestClient.head().uri("/files/get/uploaded/size?fileName=" + fileName)
.exchange().expectBody().returnResult().getResponseHeaders().get("upload-offset");
long uploadedSize = Long.valueOf(strings.get(0));
boolean f = false;
int sizeBuffer = 256 * 1024;
byte[] buffer = new byte[sizeBuffer];
MultiValueMap<String, Object> formData;
WebTestClient.ResponseSpec exchange = null;
System.out.println("first uploaded Size ; " + uploadedSize);
raf.seek(uploadedSize);
while (raf.read(buffer) != -1) {
formData = new LinkedMultiValueMap<>();
formData.add("fileName", fileName);
formData.add("chunk", buffer);
formData.add("length", realSize);
exchange = webTestClient.put().uri("/files/upload/resume")
.contentType(MediaType.MULTIPART_FORM_DATA)
.body(BodyInserters.fromMultipartData(formData))
.exchange();
exchange.expectStatus().isOk();
if (exchange.expectBody().returnResult().getStatus().is5xxServerError()) {
return;
}
if (uploadedSize + 256 * 1024 > realSize) {
sizeBuffer = ((int) (realSize - uploadedSize));
System.out.println(sizeBuffer);
uploadedSize = uploadedSize + sizeBuffer;
System.out.println(uploadedSize);
buffer = new byte[sizeBuffer];
f=true;
} else uploadedSize = uploadedSize + sizeBuffer;
if (f) System.out.println(uploadedSize);
//System.out.println(uploadedSize);
float percent = ((float) uploadedSize / realSize * 100);
System.out.format("%.2f\n", percent);
}
if (exchange!=null)
exchange.expectStatus().isOk();
}
catch (Exception e){
e.printStackTrace();
System.err.println("channel closed!!!");
}
}

FileBackedOutputStream on Appengine

My application on Appengine create a csv file with more 65535 rows
But, I have an error of type OutOfMemoryError when writing :
java.lang.OutOfMemoryError: Java heap space
at java.util.Arrays.copyOf(Arrays.java:2271)
at java.io.ByteArrayOutputStream.grow(ByteArrayOutputStream.java:118)
at java.io.ByteArrayOutputStream.ensureCapacity(ByteArrayOutputStream.java:93)
at java.io.ByteArrayOutputStream.write(ByteArrayOutputStream.java:153)
White this code :
public static byte[] joinLines(Collection<String> lines) {
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
boolean firstElement = true;
for (final String part : lines) {
String value = part + LINE_SEPARATOR;
if (firstElement) {
value = addExcelPrefix(value);
firstElement = false;
}
final int currentSize = value.length();
try {
stream.write(value.getBytes(ENCODING), 0, currentSize); // OutOfMemoryError HERE
} catch (UnsupportedEncodingException e) {
LOGGER.info(e.getMessage());
}
}
return stream.toByteArray();
}
So I used FileBackedOutputStream of Guava for solve the problem of OutOfMemoryError :
public static byte[] joinLines(Collection<String> lines) throws IOException {
final FileBackedOutputStream stream = new FileBackedOutputStream(THRESHOLD, true);
boolean firstElement = true;
for (final String part : lines) {
String value = part + LINE_SEPARATOR;
if (firstElement) {
value = addExcelPrefix(value);
firstElement = false;
}
final int currentSize = value.length();
try {
stream.write(value.getBytes(ENCODING), 0, currentSize);
} catch (IOException e) {
LOGGER.error(e.getMessage());
}
}
return stream.asByteSource().read();
}
But, on appengine, I now an error of type SecurityException when creating of temporary file :
java.lang.SecurityException: Unable to create temporary file
at java.io.File.checkAndCreate(File.java:2083)
at java.io.File.createTempFile(File.java:2198)
at java.io.File.createTempFile(File.java:2244)
at com.google.common.io.FileBackedOutputStream.update(FileBackedOutputStream.java:196)
at com.google.common.io.FileBackedOutputStream.write(FileBackedOutputStream.java:178)
How to allow create temporary file on Appengine with FileBackedOutputStream ?
In a bucket, how ?
Thanks
I used GcsService that solves my problem :
protected String uploadBytesForCsv(Map<Integer, Map<Integer, Object>> rows) throws IOException {
LOGGER.info("Get Bytes For Csv");
final Collection<String> lines = cellsToCsv(rows);
LOGGER.info("number line : " + lines.size());
boolean firstElement = true;
final String fileName = getFileName();
final GcsFilename gcsFilename = new GcsFilename(config.getBucketName(), fileName);
final GcsService gcsService = GcsServiceFactory.createGcsService();
final GcsOutputChannel outputChannel = gcsService.createOrReplace(gcsFilename, GcsFileOptions.getDefaultInstance());
for (final String part : lines) {
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
String value = part + LINE_SEPARATOR;
if (firstElement) {
value = addExcelPrefix(value);
firstElement = false;
}
final int currentSize = value.length();
try {
stream.write(value.getBytes(ENCODING), 0, currentSize);
outputChannel.write(ByteBuffer.wrap(stream.toByteArray()));
} catch (UnsupportedEncodingException e) {
LOGGER.info(e.getMessage());
}
stream.flush();
stream.close();
}
outputChannel.close();
return new UrlBuilder(config.getStorageUrlForExport())
.setBucketName(config.getBucketName())
.setFilename(fileName).build();
}

Categories

Resources