Java application

Here’s the sample Java application.

/**
* This sample Java application shows how to use the HCP HS3 API, which is
* compatible with Amazon S3. The application uses the Amazon S3 SDK.
*/
package com.hds.hcp.examples;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;

import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.CanonicalGrantee;
import com.amazonaws.services.s3.model.EmailAddressGrantee;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.Permission;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.S3ObjectSummary;

public class HS3SampleApp {

    /**
     * @param args
     */
    public static void main(String[] args) {

        /*
         * Initialize access credentials for the HS3 client.
         */
        // base64 of HCP user name: "lgreen"
        String accessKey = "bGdyZWVu";
        // md5 of HCP user password: "p4ssw0rd"
        String secretKey = "2a9d119df47ff993b662a8ef36f9ea20";

        /*
         * Set up the client configuration to allow for 200 max HTTP
         * connections, as this is an HCP best practice.
         */
         ClientConfiguration myClientConfig = new ClientConfiguration();
         myClientConfig.setMaxConnections(200);

        /*
         * By default, AWS SDK uses the HTTPS protocol and validates certificates
         * with a certificate authority. The default certificates installed in
         * HCP are self-signed. If these self-signed certificates are used,
         * certificate validation will need to be disabled.
         */
         myClientConfig.setProtocol(Protocol.HTTPS);
         System.setProperty("com.amazonaws.sdk.disableCertChecking", "true");



        /*
         * Build the hs3Client to be used for communication with HCP.
         */
        AmazonS3 hs3Client = new AmazonS3Client(
                                     new BasicAWSCredentials(accessKey, secretKey),
                                     myClientConfig);

        // Set up the service point to be the tenant in HCP.
        hs3Client.setEndpoint("europe.hcp.example.com");

        /*
         * Now that the hs3Client is created for HCP usage, proceed with some
         * operations.
         */
        String bucketName = "finance";

        try {
            /*
             * Create a new bucket. With HCP, the bucket name does not need
             * to be globally unique. It needs to be unique only within the HCP
             * service point (that is, the HCP tenant).
             */
            System.out.println("Creating bucket " + bucketName + "\n");
            hs3Client.createBucket(bucketName);

            /*
             * List the buckets you own at the service point.
             */
            System.out.println("Buckets:");
            for (Bucket bucket : hs3Client.listBuckets()) {
                System.out.println(" * " + bucket.getName());
            }
            System.out.println();

            /*
             * Add an ACL to the bucket to give read to a user with the
             * specified user ID.
             */
            AccessControlList bucketACL = hs3Client.getBucketAcl(bucketName);
            bucketACL.grantPermission(
            new CanonicalGrantee("7370bb2d-033c-4f05-863e-35a4eaf1d739"),
                                 Permission.Read );
            hs3Client.setBucketAcl(bucketName, bucketACL);

            /*
             * Upload a couple of objects to the bucket from files on the local file
             * system.
             */
            String objectNamePrefix = "quarterly_rpts/";

            // Setup metadata for first object
            String firstFileName = "input/Q4_2012.ppt";
            ObjectMetadata metadata = new ObjectMetadata();
            metadata.addUserMetadata("Author", "P.D. Gray");
            metadata.addUserMetadata("Audit_Date", "2013-02-23");
            // Content-Length must be set because the application  will use an
            // InputStream during the PUT. Otherwise, the whole file would be
            // will be read into memory, which could cause the application to
            // run out of memory.
metadata.setContentLength(new File(firstFileName).length());

            System.out.println("Uploading first object to HCP from a file\n");
            String firstObjectName = objectNamePrefix + "Q4_2012.ppt";
            hs3Client.putObject(new PutObjectRequest(
bucketName,
                                                  firstObjectName,
                                                  new FileInputStream(firstFileName),
                                                  metadata));

            // Write a second object without metadata. Also collect its ETag for
            // later usage.
            System.out.println("Uploading second object to HCP from a file\n");
            String secondObjectName = objectNamePrefix + "Q3_2012.ppt";
            PutObjectResult result = hs3Client.putObject(
                                            new PutObjectRequest(
                                                  bucketName,
                                                  secondObjectName,
                                                  new File("input/Q3_2012.ppt")));
            String secondObjectEtag = result.getETag();

            /*
             * List objects in the bucket with prefix quarterly_rpts/Q.
             * The bucket listing is limited to 1,000 items per request.
             * Be sure to check whether the returned listing has been
             * truncated. If it has, retrieve additional results by using
             * the AmazonS3.listNextBatchOfObjects(...) operation.
             */
             System.out.println("Objects:");
             ObjectListing objectListing = hs3Client.listObjects(
                                             new ListObjectsRequest()
                                                   .withBucketName(bucketName)
                                                   .withPrefix(objectNamePrefix
                                                               + "Q"));
            for (S3ObjectSummary objectSummary
                  : objectListing.getObjectSummaries()) {
                System.out.println(" * " + objectSummary.getKey() + " " +
                                   "(size = " + objectSummary.getSize() + ")");
            }
            System.out.println();

            /*
             * Download an object. When you download an object, you get all
             * the object metadata and a stream from which to read the object
             * content.
             */
            System.out.println("Downloading the first object\n");

            S3Object firstObject = hs3Client.getObject(
                                              new GetObjectRequest(bucketName,
                                                                firstObjectName));

            // Write the content to a file named Q4_2012.ppt in the output folder.
           S3ObjectInputStream responseStream = firstObject.getObjectContent();
            FileOutputStream dataFile = new FileOutputStream("output/Q4_2012.ppt");

            // Keep reading bytes until the end of stream is reached.
            byte buffer[] = new byte[2048];
            int readSize;
            while (-1 != (readSize = responseStream.read(buffer))) {
                dataFile.write(buffer, 0, readSize);
            }

            dataFile.close();

            /*
             * Add an ACL to the first object to give full control to the user with
             * the username rsilver. HCP will look up the user ID based on the
             * username.
             */
            AccessControlList objectACL = hs3Client.getObjectAcl(bucketName,
                                                                   firstObjectName);
            objectACL.grantPermission(new EmailAddressGrantee("rsilver"),
                                      Permission.FullControl);
            hs3Client.setObjectAcl(bucketName, firstObjectName, objectACL);

            /*
             * Perform a conditional download of object. This will get the object
             * only
             * if it doesn't match the ETag we received when storing the object.
             */
            System.out.println("Checking the second object");
            GetObjectRequest conditionalRequest
                = new GetObjectRequest(bucketName, secondObjectName)
                            .withNonmatchingETagConstraint(secondObjectEtag);
            S3Object conditionalObject = hs3Client.getObject(conditionalRequest);
            if (null == conditionalObject) {
                System.out.println(" The object did not change; not downloaded.\n");
            } else {
                // The object has changed, download it to a new file.

                System.out.println(
                    " The object changed; downloading new revision\n");

                S3ObjectInputStream refreshResponseStream
                                        = conditionalObject.getObjectContent();
                FileOutputStream dataFile2
                                   = new FileOutputStream("output/Q3_2012_Rev2.ppt");

                // Keep reading bytes until the end of stream is reached.
                byte readBuffer[] = new byte[2048];
                int conditionalReadSize;
                while (-1 != (conditionalReadSize
                                    = refreshResponseStream.read(readBuffer))) {
                    dataFile2.write(readBuffer, 0, conditionalReadSize);
                }
                dataFile2.close();
            }

            /*
             * Delete the objects.
             */
            System.out.println(
                "Deleting the objects created by this sample application\n");
            hs3Client.deleteObject(bucketName, firstObjectName);
            hs3Client.deleteObject(bucketName, secondObjectName);

            /*
             * Delete the folder.
             */
            System.out.println(
            "Deleting the folder created when the first object was stored\n");
            hs3Client.deleteObject(bucketName, objectNamePrefix);

            /*
             * Delete the bucket.
             */
            System.out.println("Deleting the finance bucket\n");
            hs3Client.deleteBucket(bucketName);

        } catch (AmazonServiceException ase) {
            System.out.println(
                "Caught an AmazonServiceException, which means the request made it "
                    + "to HCP but was rejected for some reason.");
            System.out.println("Error Message: " + ase.getMessage());
            System.out.println("HTTP Status Code: " + ase.getStatusCode());
            System.out.println("AWS Error Code: " + ase.getErrorCode());
            System.out.println("Error Type: " + ase.getErrorType());
            System.out.println("Request ID: " + ase.getRequestId());
        } catch (AmazonClientException ace) {
            System.out.println(
                "Caught an AmazonClientException, which means the client encountered "
                    + "a serious internal problem while trying to communicate with "
                    + "HCP via HS3, such as not being able to access the network.");
            System.out.println("Error Message: " + ace.getMessage());
        } catch (IOException ioe) {
            System.out.println(
               "Caught an IOException while trying to create an object or read "
                    + "from an internal buffer.");
            System.out.println("Error Message: " + ioe.getMessage());
        }
    }
}

Trademarks and Legal Disclaimer

© 2017 Hitachi Data Systems Corporation. All rights reserved.