I am able to call AWS Textract to read an image from my local path. How can I integrate this textract code to read the image uploaded onto a created S3 bucket with the S3 bucket codes.
Working Textract Code to textract images from local path
package aws.cloud.work;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.io.InputStream;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.textract.AmazonTextract;
import com.amazonaws.services.textract.AmazonTextractClientBuilder;
import com.amazonaws.services.textract.model.DetectDocumentTextRequest;
import com.amazonaws.services.textract.model.DetectDocumentTextResult;
import com.amazonaws.services.textract.model.Document;
import com.amazonaws.util.IOUtils;
public class TextractDemo {
static AmazonTextractClientBuilder clientBuilder = AmazonTextractClientBuilder.standard()
.withRegion(Regions.US_EAST_1);
private static FileWriter file;
public static void main(String[] args) throws IOException {
//AWS Credentials to access AWS Textract services
clientBuilder.setCredentials(new AWSStaticCredentialsProvider(
new BasicAWSCredentials("Access Key", "Secret key")));
//Set the path of the image to be textract. Can be configured to use from S3
String document="C:\\Users\\image-local-path\\sampleTT.jpg";
ByteBuffer imageBytes;
//Code to use AWS Textract services
try (InputStream inputStream = new FileInputStream(new File(document))) {
imageBytes = ByteBuffer.wrap(IOUtils.toByteArray(inputStream));
}
AmazonTextract client = clientBuilder.build();
DetectDocumentTextRequest request = new DetectDocumentTextRequest()
.withDocument(new Document().withBytes(imageBytes));
/*
* DetectDocumentTextResult result = client.detectDocumentText(request);
* System.out.println(result); result.getBlocks().forEach(block ->{
* if(block.getBlockType().equals("LINE")) System.out.println("text is "+
* block.getText() + " confidence is "+ block.getConfidence());
*/
//
DetectDocumentTextResult result = client.detectDocumentText(request);
System.out.println(result);
JSONObject obj = new JSONObject();
result.getBlocks().forEach(block -> {
if (block.getBlockType().equals("LINE"))
System.out.println("text is " + block.getText() + " confidence is " + block.getConfidence());
JSONArray fields = new JSONArray();
fields.add(block.getText() + " , " + block.getConfidence());
obj.put(block.getText(), fields);
});
//To import the results into JSON file and output the console output as sample.txt
try {
file = new FileWriter("/Users/output-path/sample.txt");
file.write(obj.toJSONString());
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
file.flush();
file.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
This is an example of the console out where the "text" and corresponding "confidence scores" are returned
S3 bucket code integration I managed to find from the docs:
String document = "sampleTT.jpg";
String bucket = "textract-images";
AmazonS3 s3client = AmazonS3ClientBuilder.standard()
.withEndpointConfiguration(
new EndpointConfiguration("https://s3.amazonaws.com","us-east-1"))
.build();
// Get the document from S3
com.amazonaws.services.s3.model.S3Object s3object = s3client.getObject(bucket, document);
S3ObjectInputStream inputStream = s3object.getObjectContent();
BufferedImage image = ImageIO.read(inputStream);
(Edited) - Thanks #smac2020, I currently have a working Rekognition Code that reads from my AWS console S3 bucket and runs the Rekognition services that I am referencing to. However, I am unable to modify and merge it with
the Textract source code
package com.amazonaws.samples;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.rekognition.AmazonRekognition;
import com.amazonaws.services.rekognition.AmazonRekognitionClientBuilder;
import com.amazonaws.services.rekognition.model.AmazonRekognitionException;
import com.amazonaws.services.rekognition.model.DetectLabelsRequest;
import com.amazonaws.services.rekognition.model.DetectLabelsResult;
import com.amazonaws.services.rekognition.model.Image;
import com.amazonaws.services.rekognition.model.Label;
import com.amazonaws.services.rekognition.model.S3Object;
import java.util.List;
public class DetectLabels {
public static void main(String[] args) throws Exception {
String photo = "sampleTT.jpg";
String bucket = "Textract-bucket";
// AmazonRekognition rekognitionClient = AmazonRekognitionClientBuilder.standard().withRegion("ap-southeast-1").build();
AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider (new BasicAWSCredentials("Access Key", "Secret Key"));
AmazonRekognition rekognitionClient = AmazonRekognitionClientBuilder.standard().withCredentials(credentialsProvider).withRegion("ap-southeast-1").build();
DetectLabelsRequest request = new DetectLabelsRequest()
.withImage(new Image()
.withS3Object(new S3Object()
.withName(photo).withBucket(bucket)))
.withMaxLabels(10)
.withMinConfidence(75F);
try {
DetectLabelsResult result = rekognitionClient.detectLabels(request);
List <Label> labels = result.getLabels();
System.out.println("Detected labels for " + photo);
for (Label label: labels) {
System.out.println(label.getName() + ": " + label.getConfidence().toString());
}
} catch(AmazonRekognitionException e) {
e.printStackTrace();
}
}
}
Looks like you are trying to read an Amazon S3 object from a Spring boot app and then pass that byte array to DetectDocumentTextRequest.
There is a tutorial that shows a very similar use case where a Spring BOOT app reads the bytes from an Amazon S3 object and passes it to the Amazon Rekognition service (instead of Textract).
The Java code is:
// Get the byte[] from this AWS S3 object.
public byte[] getObjectBytes (String bucketName, String keyName) {
s3 = getClient();
try {
GetObjectRequest objectRequest = GetObjectRequest
.builder()
.key(keyName)
.bucket(bucketName)
.build();
ResponseBytes<GetObjectResponse> objectBytes = s3.getObjectAsBytes(objectRequest);
byte[] data = objectBytes.asByteArray();
return data;
} catch (S3Exception e) {
System.err.println(e.awsErrorDetails().errorMessage());
System.exit(1);
}
return null;
}
See this AWS development article to see how to build a Spring BOOT app that has this functionality.
Creating an example AWS photo analyzer application using the AWS SDK for Java
This example uses the AWS SDK For Java V2. If you are not familiar with working with the latest SDK version, I recommend that you start here:
Get started with the AWS SDK for Java 2.x
Related
When I tried to give the source URI of a folder inside my bucket (which has around 400 CSV files) in the Java program, it has not moved any files to BQ table. If I try with a single csv file , it moves.
package com.example.bigquerydatatransfer;
import com.google.api.gax.rpc.ApiException;
import com.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest;
import com.google.cloud.bigquery.datatransfer.v1.DataTransferServiceClient;
import com.google.cloud.bigquery.datatransfer.v1.ProjectName;
import com.google.cloud.bigquery.datatransfer.v1.TransferConfig;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
// Sample to create google cloud storage transfer config
public class Cloud_to_BQ {
public static void main(String[] args) throws IOException {
final String projectId = "dfp-bq";
String datasetId = "mytest1";
String tableId = "PROG_DATA";
String sourceUri = "gs://dfp-bq/C:\\PROG_Reports";
String fileFormat = "CSV";
String fieldDelimiter = ",";
String skipLeadingRows = "1";
Map<String, Value> params = new HashMap<>();
params.put(
"destination_table_name_template", Value.newBuilder().setStringValue(tableId).build());
params.put("data_path_template", Value.newBuilder().setStringValue(sourceUri).build());
params.put("write_disposition", Value.newBuilder().setStringValue("APPEND").build());
params.put("file_format", Value.newBuilder().setStringValue(fileFormat).build());
params.put("field_delimiter", Value.newBuilder().setStringValue(fieldDelimiter).build());
params.put("skip_leading_rows", Value.newBuilder().setStringValue(skipLeadingRows).build());
TransferConfig transferConfig =
TransferConfig.newBuilder()
.setDestinationDatasetId(datasetId)
.setDisplayName("Trial_Run_PROG_DataTransfer")
.setDataSourceId("google_cloud_storage")
.setParams(Struct.newBuilder().putAllFields(params).build())
.setSchedule("every 24 hours")
.build();
createCloudStorageTransfer(projectId, transferConfig);
}
public static void createCloudStorageTransfer(String projectId, TransferConfig transferConfig)
throws IOException {
try (DataTransferServiceClient client = DataTransferServiceClient.create()) {
ProjectName parent = ProjectName.of(projectId);
CreateTransferConfigRequest request =
CreateTransferConfigRequest.newBuilder()
.setParent(parent.toString())
.setTransferConfig(transferConfig)
.build();
TransferConfig config = client.createTransferConfig(request);
System.out.println("Cloud storage transfer created successfully :" + config.getName());
} catch (ApiException ex) {
System.out.print("Cloud storage transfer was not created." + ex.toString());
}
}
}
Is there any way I can move all the files to the BQ table at a stretch?
2022-08-04T07:27:50.185847509ZNo files found matching: "gs://dfp-bq/C:\PROG_Reports" - This is the BQ logs for the Run.
Can some one give me a direction how can I implement this aws email template tutorial by a java code? Through java code I want to set this AWS Email Template and through java only I want to set the parameter values to the template and through java only I want to send the email.
I cant find any tutorial or direction from which I can translate above requests in java code.
The "code" in your link is actually just some JSON templates for sending and formatting email, and a few calls to an AWS command line tool. If you need to make AWS send email calls from a Java process then you need to take a look at:
The SES API
The Javadoc for the Java client lib
I am able to code it successfully. Pasting the example code here.
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailService;
import com.amazonaws.services.simpleemail.AmazonSimpleEmailServiceClientBuilder;
import com.amazonaws.services.simpleemail.model.BulkEmailDestination;
import com.amazonaws.services.simpleemail.model.BulkEmailDestinationStatus;
import com.amazonaws.services.simpleemail.model.Destination;
import com.amazonaws.services.simpleemail.model.SendBulkTemplatedEmailRequest;
import com.amazonaws.services.simpleemail.model.SendBulkTemplatedEmailResult;
public class AmazonSESSample2 {
public static void main(String[] args) throws IOException {
String accessKeyId = "accessKeyId";
String secretKeyId = "secretKeyId";
String region = "us-east-1";
List<BulkEmailDestination> listBulkEmailDestination = null;
SendBulkTemplatedEmailRequest sendBulkTemplatedEmailRequest = null;
try {
AmazonSimpleEmailService client = getAmazonSESClient(accessKeyId, secretKeyId, region);
listBulkEmailDestination = new ArrayList<>();
for(String email : getRecievers()) {
String replacementData="{"
+ "\"FULL_NAME\":\"AAA BBB\","
+ "\"USERNAME\":\""+email+"\","
+ "}";
BulkEmailDestination bulkEmailDestination = new BulkEmailDestination();
bulkEmailDestination.setDestination(new Destination(Arrays.asList(email)));
bulkEmailDestination.setReplacementTemplateData(replacementData);
listBulkEmailDestination.add(bulkEmailDestination);
}
sendBulkTemplatedEmailRequest = new SendBulkTemplatedEmailRequest();
sendBulkTemplatedEmailRequest.setSource("noreply#mydomain.com");
sendBulkTemplatedEmailRequest.setTemplate("welcome-email-en_GB-v1");
sendBulkTemplatedEmailRequest.setDefaultTemplateData("{\"FULL_NAME\":\"friend\", \"USERNAME\":\"unknown\"}");
sendBulkTemplatedEmailRequest.setDestinations(listBulkEmailDestination);
SendBulkTemplatedEmailResult res = client.sendBulkTemplatedEmail(sendBulkTemplatedEmailRequest);
System.out.println("======================================");
System.out.println(res.getSdkResponseMetadata());
System.out.println("======================================");
for(BulkEmailDestinationStatus status : res.getStatus()) {
System.out.println(status.getStatus());
System.out.println(status.getError());
System.out.println(status.getMessageId());
}
} catch (Exception ex) {
System.out.println("The email was not sent. Error message: " + ex.getMessage());
ex.printStackTrace();
}
}
public static List<String> getRecievers() {
ArrayList<String> list = new ArrayList<>();
list.add("aaa+1#gmail.com");
list.add("aaa+2#gmail.com");
list.add("aaa+3#gmail.com");
list.add("aaa+4#gmail.com");
return list;
}
public static AmazonSimpleEmailService getAmazonSESClient(String accessKeyId, String secretKeyId, String region) {
BasicAWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretKeyId);
AmazonSimpleEmailService client = AmazonSimpleEmailServiceClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
.withRegion(region)
.build();
return client;
}
}
I am creating a simple application where I want to upload a file to my AWS S3 bucket. Here is my code:
import java.io.File;
import java.io.IOException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.SdkClientException;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.fasterxml.jackson.*;
public class UploadFileInBucket {
public static void main(String[] args) throws IOException {
String clientRegion = "<myRegion>";
String bucketName = "<myBucketName>";
String stringObjKeyName = "testobject";
String fileObjKeyName = "testfileobject";
String fileName = "D:\\Attachments\\LICENSE";
try {
BasicAWSCredentials awsCreds = new BasicAWSCredentials("<myAccessKey>", "<mySecretKey>");
AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withRegion(clientRegion)
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
.build();
// Upload a text string as a new object.
s3Client.putObject(bucketName, stringObjKeyName, "Uploaded String Object");
// Upload a file as a new object with ContentType and title specified.
PutObjectRequest request = new PutObjectRequest(bucketName, fileObjKeyName, new File(fileName));
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentType("plain/text");
metadata.addUserMetadata("x-amz-meta-title", "someTitle");
request.setMetadata(metadata);
s3Client.putObject(request);
}
catch(AmazonServiceException e) {
// The call was transmitted successfully, but Amazon S3 couldn't process
// it, so it returned an error response.
e.printStackTrace();
}
catch(SdkClientException e) {
// Amazon S3 couldn't be contacted for a response, or the client
// couldn't parse the response from Amazon S3.
e.printStackTrace();
}
}
}
I am unable to upload a file and getting an error as:
Exception in thread "main" java.lang.NoSuchFieldError:
ALLOW_FINAL_FIELDS_AS_MUTATORS
at com.amazonaws.partitions.PartitionsLoader.<clinit>(PartitionsLoader.java:52)
at com.amazonaws.regions.RegionMetadataFactory.create(RegionMetadataFactory.java:30)
at com.amazonaws.regions.RegionUtils.initialize(RegionUtils.java:64)
at com.amazonaws.regions.RegionUtils.getRegionMetadata(RegionUtils.java:52)
at com.amazonaws.regions.RegionUtils.getRegion(RegionUtils.java:105)
at com.amazonaws.client.builder.AwsClientBuilder.getRegionObject(AwsClientBuilder.java:249)
at com.amazonaws.client.builder.AwsClientBuilder.withRegion(AwsClientBuilder.java:238)
at UploadFileInBucket.main(UploadFileInBucket.java:28)
I have added required AWS bucket credentials, permissions and dependencies to execute this code.
What changes I should made in the code to get my file uploaded to desired bucket?
It looks as though you either have the wrong version of the Jackson libraries or are somehow linking with multiple versions of them.
The AWS for Java SDK distribution contains a third-party/lib directory which contains all of the (correct versions of) the libraries that version of the SDK should be built with. Depending on which features of the SDK you are using you may not need all of them, but those are the specific 3rd party libraries you should be using.
You need to add Jackson to your classpath. Its classes are missing.
I don't know which version you need, but you can download them from their gitpage: https://github.com/FasterXML/jackson/
History for context:
I am trying to run a web job from an HTTP Client. The file is a ZIP file . and contains a java class and bat file to run that java class. This runs okay when i do from POSTMAN. But when i use HTTP client, i get the following error always " '---i-NPsGbTVUpaP0CeJxMQVrHoDHvaxo3' is not recognized as an internal or external command" - Please help – Jagaran yesterday
#Jagaran if it only happen from some clients, it is likely unrelated. Please ask a new question – David Ebbo 21 hours ago
No any HTTP Client i am using in java, it is the same. it works in CURL or loading from web console. My sample code below – Jagaran 2 hours ago
No any HTTP Client i am using in java, it is the same. it works in CURL or loading from web console.
Do you have any sample Java based HTTP Client where I can publish Azure Web Job? I have tried all Java REST clients.
May be i am doing something wrong. The error I get in Azure console is '---i-NPsGbTVUpaP0CeJxMQVrHoDHvaxo3' is not recognized as an internal or external command, [08/25/2017 09:30:22 > e7f683: ERR ] operable program or batch file.o
I feel Content type = applciation /zip is not happening correctly when using java. Please help us.
Sample Code:
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.http.entity.ContentType;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
/**
* #author jagaran.das
*
*/
public class AIPHTTPClient {
/**
* #param args
* #throws IOException
*/
#SuppressWarnings({ "unused", "rawtypes" })
public static void main(String[] args) throws IOException {
try {
URI uri = new AIPHTTPClient().getURI();
HttpResponse<InputStream> jsonResponse = Unirest.put("https://<URL>/api/triggeredwebjobs/TestJOb")
.basicAuth("$AzureWebJobTestBRMS", "XXXXX")
.header("content-disposition","attachement; filename=acvbgth.bat")
.field("file", new FileInputStream(new File(uri))
,ContentType.create("content-type: application/zip"),"AzureWebJob.zip").asBinary();
System.out.println(jsonResponse.getStatusText());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public InputStream readZip() {
ZipFile zipFile = null;
ZipEntry zipEntry = zipFile.getEntry("run.bat");
InputStream stream = null;
/* try {
zipFile = new ZipFile("/Users/jagaran.das/Documents/work/AIP/AzureWebJob.zip");
java.util.Enumeration<? extends ZipEntry> entries = zipFile.entries();
while(entries.hasMoreElements()){
ZipEntry entry = entries.nextElement();
stream = zipFile.getInputStream(entry);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} */
try {
stream = zipFile.getInputStream(zipEntry);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return stream;
}
public URI getURI() throws MalformedURLException {
File file = new File("/Users/jagaran.das/Documents/work/AIP/azure-poc/AzureWebJob.zip");
URI fileUri = file.toURI();
System.out.println("URI:" + fileUri);
URL fileUrl = file.toURI().toURL();
System.out.println("URL:" + fileUrl);
URL fileUrlWithoutSpecialCharacterHandling = file.toURL();
System.out.println("URL (no special character handling):" + fileUrlWithoutSpecialCharacterHandling);
return fileUri;
}
}
I've been a little too harsh in my answer before really trying stuff out. Apologies. I've now tried out your snippet and looks like you're hitting an issue with Unirest - probably this one.
My advice would be to just move to Apache's HTTP library.
Here's a working sample:
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.EntityBuilder;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;
import java.io.File;
public class App
{
public static void main( String[] args )
{
File sourceZipFile = new File("webjob.zip");
String kuduApiUrl = "https://yoursitename.scm.azurewebsites.net/api/zip/site/wwwroot/app_data/jobs/triggered/job988/";
HttpEntity httpEntity = EntityBuilder.create()
.setFile(sourceZipFile)
.build();
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(
"$yoursitename", "SiteLevelPasSw0rD"
);
provider.setCredentials(AuthScope.ANY, credentials);
HttpClient client = HttpClientBuilder.create()
.setDefaultCredentialsProvider(provider)
.build();
HttpPut putRequest = new HttpPut(kuduApiUrl);
putRequest.setEntity(httpEntity);
// Kudu's Zip API expects application/zip
putRequest.setHeader("Content-type", "application/zip");
try {
HttpResponse response = client.execute(putRequest);
int statusCode = response.getStatusLine().getStatusCode();
HttpEntity entity = response.getEntity();
String resBody = EntityUtils.toString(entity, "UTF-8");
System.out.println(statusCode);
System.out.println(resBody);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
That's sending Content-Type: application/zip and the raw zip contents in the body (no multipart horse manure). I've probably over-engineered the sample.. but it is what it is.
The upload is successful and the WebJob published:
Glad for you that you have solved the issue and I try to provide a workaround for your reference.
Deploy WebJob to azure , in addition to using REST API, you can also use the FTP way. Of course, the premise is that you need to know the directory uploaded by webjob via KUDU.
I offer you the snippet of code below via FTP4J libiary:
import java.io.File;
import it.sauronsoftware.ftp4j.FTPClient;
public class UploadFileByFTP {
private static String hostName = <your host name>;
private static String userName = <user name>;
private static String password = <password>;
public static void main(String[] args) {
try {
// create client
FTPClient client = new FTPClient();
// connect host
client.connect(hostName);
// log in
client.login(userName, password);
// print address
System.out.println(client);
// change directory
client.changeDirectory("/site/wwwroot/App_Data/jobs/continuous");
// current directory
String dir = client.currentDirectory();
System.out.println(dir);
File file = new File("D:/test.zip");
client.upload(file);
} catch (Exception e) {
e.printStackTrace();
}
}
}
You can follow this tutorial to configure your parameters.
I have been trying for a long time to upload a file in the Google cloud store using java. By goggling I have found this code, but cant able to understand exactly. Can anyone please customize this one to upload a file in the GCS?
// Given
InputStream inputStream; // object data, e.g., FileInputStream
long byteCount; // size of input stream
InputStreamContent mediaContent = new InputStreamContent("application/octet-stream", inputStream);
// Knowing the stream length allows server-side optimization, and client-side progress
// reporting with a MediaHttpUploaderProgressListener.
mediaContent.setLength(byteCount);
StorageObject objectMetadata = null;
if (useCustomMetadata) {
// If you have custom settings for metadata on the object you want to set
// then you can allocate a StorageObject and set the values here. You can
// leave out setBucket(), since the bucket is in the insert command's
// parameters.
objectMetadata = new StorageObject()
.setName("myobject")
.setMetadata(ImmutableMap.of("key1", "value1", "key2", "value2"))
.setAcl(ImmutableList.of(
new ObjectAccessControl().setEntity("domain-example.com").setRole("READER"),
new ObjectAccessControl().setEntity("user-administrator#example.com").setRole("OWNER")
))
.setContentDisposition("attachment");
}
Storage.Objects.Insert insertObject = storage.objects().insert("mybucket", objectMetadata,
mediaContent);
if (!useCustomMetadata) {
// If you don't provide metadata, you will have specify the object
// name by parameter. You will probably also want to ensure that your
// default object ACLs (a bucket property) are set appropriately:
// https://developers.google.com/storage/docs/json_api/v1/buckets#defaultObjectAcl
insertObject.setName("myobject");
}
// For small files, you may wish to call setDirectUploadEnabled(true), to
// reduce the number of HTTP requests made to the server.
if (mediaContent.getLength() > 0 && mediaContent.getLength() <= 2 * 1000 * 1000 /* 2MB */) {
insertObject.getMediaHttpUploader().setDirectUploadEnabled(true);
}
insertObject.execute();
The recommended way to use Google Cloud Storage from Java is to use the Cloud Storage Client Libraries.
The GitHub page for this client gives several examples and resources to learn how to use it properly.
It also gives this code sample as an example of how to upload objects to Google Cloud Storage using the client library:
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.Bucket;
import com.google.cloud.storage.BucketInfo;
// Create your service object
Storage storage = StorageOptions.getDefaultInstance().getService();
// Create a bucket
String bucketName = "my_unique_bucket"; // Change this to something unique
Bucket bucket = storage.create(BucketInfo.of(bucketName));
// Upload a blob to the newly created bucket
BlobId blobId = BlobId.of(bucketName, "my_blob_name");
BlobInfo blobInfo = BlobInfo.newBuilder(blobId).setContentType("text/plain").build();
Blob blob = storage.create(blobInfo, "a simple blob".getBytes(UTF_8));
I also tried with using GCS, but it did not worked for me. Finally, I did with using ServletFileUpload class. Below is the code that I wrote in order to create Google Bucket and to upload the file selected by the user, to that Bucket:
package com1.KT1;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.IOUtils;
import com.google.cloud.storage.Bucket;
import com.google.cloud.storage.BucketInfo;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.BlobId;
public class TestBucket extends HttpServlet {
private static final long serialVersionUID = 1L;
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
response.getWriter().append("Served at: ").append(request.getContextPath());
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
// Instantiates a client
String targetFileStr ="";
List<FileItem> fileName = null;
Storage storage = StorageOptions.getDefaultInstance().getService();
// The name for the new bucket
String bucketName = "vendor-bucket13"; // "my-new-bucket";
// Creates the new bucket
Bucket bucket = storage.create(BucketInfo.of(bucketName));
//Object requestedFile = request.getParameter("filename");
ServletFileUpload sfu = new ServletFileUpload(new DiskFileItemFactory());
try {
fileName = sfu.parseRequest(request);
for(FileItem f:fileName)
{
try {
f.write (new File("/Users/tkmajdt/Documents/workspace/File1POC1/" + f.getName()));
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//targetFileStr = readFile("/Users/tkmajdt/Documents/workspace/File1POC1/" + f.getName(),Charset.defaultCharset());
targetFileStr = new String(Files.readAllBytes(Paths.get("/Users/tkmajdt/Documents/workspace/File1POC1/" + f.getName())));
}
}
//response.getWriter().print("File Uploaded Successfully");
//String content = readFile("test.txt", Charset.defaultCharset());
catch (FileUploadException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
/*if(requestedFile==null)
{
response.getWriter().print("File Not Found");
}*/
/*else
{
//String fileName = (String)requestedFile;
FileInputStream fisTargetFile = new FileInputStream(fileName);
targetFileStr = IOUtils.toString(fisTargetFile, "UTF-8");
}*/
BlobId blobId = BlobId.of(bucketName, "my_blob_name");
//Blob blob = bucket.create("my_blob_name", "a simple blob".getBytes("UTF-8"), "text/plain");
Blob blob = bucket.create("my_blob_name", targetFileStr.getBytes("UTF-8"), "text/plain");
//storage.delete("vendor-bucket3");
}
}
I uploaded the whole source code to GitHub