Package com.amazonaws.services.glacier.model

Examples of com.amazonaws.services.glacier.model.GetJobOutputResult


                    .withVaultName(vaultName)
                    .withRange("bytes=" + currentPosition + "-" + endPosition)
                    .withJobId(jobId)
                    .withGeneralProgressListener(progressListener)
                    ;
                GetJobOutputResult jobOutputResult = glacier.getJobOutput(req);
                try {
                    input = new TreeHashInputStream(new BufferedInputStream(jobOutputResult.getBody()));
                    appendToFile(output, input);
                } catch (NoSuchAlgorithmException e) {
                    throw failure(e, "Unable to compute hash for data integrity");
                } finally {
                    closeQuietly(input, log);
                }

                // Only do tree-hash check when the output checksum is returned from Glacier
                if (null != jobOutputResult.getChecksum()) {
                    // Checksum does not match
                    if (!input.getTreeHash().equalsIgnoreCase(jobOutputResult.getChecksum())) {
                        // Discard the chunk of bytes received
                        publishResponseBytesDiscarded(progressListener, chunkSize);
                        if (log.isDebugEnabled())
                            log.debug("reverting " + chunkSize);
                        throw new IOException("Client side computed hash doesn't match server side hash; possible data corruption");
View Full Code Here


   */
    public void download(final String accountId, final String vaultName, final String archiveId, final File file)
            throws AmazonServiceException, AmazonClientException {

      JobStatusMonitor jobStatusMonitor = null;
      GetJobOutputResult jobOutputResult = null;
      try {
        if (credentialsProvider != null && clientConfiguration != null) {
          jobStatusMonitor = new JobStatusMonitor(credentialsProvider, clientConfiguration);
        } else {
          jobStatusMonitor = new JobStatusMonitor(sqs, sns);
View Full Code Here

    private void downloadOneChunk(String accountId, String vaultName, String jobId, RandomAccessFile output, long currentPosition, long endPosition) {
        TreeHashInputStream  input;
        int retries = 0;
        while (true) {
            try {
                GetJobOutputResult  jobOutputResult = glacier.getJobOutput(new GetJobOutputRequest()
                  .withAccountId(accountId)
                  .withVaultName(vaultName)
                  .withRange("bytes=" + Long.toString(currentPosition) + "-" + Long.toString(endPosition))
                  .withJobId(jobId));

                try {
                    input = new TreeHashInputStream(new BufferedInputStream(jobOutputResult.getBody()));
                } catch (NoSuchAlgorithmException e) {
                    throw new AmazonClientException("Unable to compute hash for data integrity: " + e.getMessage(), e);
                }

                appendToFile(output, input);

                // Only do tree-hash check when the output checksum is returned from Glacier
                if (null != jobOutputResult.getChecksum()) {
                    // Checksum does not match
                    if (!input.getTreeHash().equalsIgnoreCase(jobOutputResult.getChecksum())) {
                        throw new IOException("Client side computed hash doesn't match server side hash; possible data corruption");
                    }
                } else {
                    log.warn("Cannot validate the downloaded output since no tree-hash checksum is returned from Glacier. "
                            + "Make sure the InitiateJob and GetJobOutput requests use tree-hash-aligned ranges.");
View Full Code Here

    private void downloadOneChunk(String accountId, String vaultName, String jobId, RandomAccessFile output, long currentPosition, long endPosition) {
        TreeHashInputStream  input;
        int retries = 0;
        while (true) {
            try {
                GetJobOutputResult  jobOutputResult = glacier.getJobOutput(new GetJobOutputRequest()
                  .withAccountId(accountId)
                  .withVaultName(vaultName)
                  .withRange("bytes=" + Long.toString(currentPosition) + "-" + Long.toString(endPosition))
                  .withJobId(jobId));

                try {
                    input = new TreeHashInputStream(new BufferedInputStream(jobOutputResult.getBody()));
                } catch (NoSuchAlgorithmException e) {
                    throw new AmazonClientException("Unable to compute hash for data integrity: " + e.getMessage(), e);
                }

                appendToFile(output, input);

                // Only do tree-hash check when the output checksum is returned from Glacier
                if (null != jobOutputResult.getChecksum()) {
                    // Checksum does not match
                    if (!input.getTreeHash().equalsIgnoreCase(jobOutputResult.getChecksum())) {
                        throw new IOException("Client side computed hash doesn't match server side hash; possible data corruption");
                    }
                } else {
                    log.warn("Cannot validate the downloaded output since no tree-hash checksum is returned from Glacier. "
                            + "Make sure the InitiateJob and GetJobOutput requests use tree-hash-aligned ranges.");
View Full Code Here

            }
           
            GetJobOutputRequest gjoRequest = new GetJobOutputRequest()
              .withVaultName(irVault)
              .withJobId(thisJobId);
            GetJobOutputResult gjoResult = irClient.getJobOutput(gjoRequest);
           
            Format formatter = new SimpleDateFormat("yyyyMMMdd_HHmmss");
            String fileDate = formatter.format(d);
           
            String fileName =  irVault + fileDate + ".txt";
               
            String filePath = ""+curDir+System.getProperty("file.separator")+fileName;
           
            FileWriter fileStream = new FileWriter(filePath);
             
            BufferedWriter out = new BufferedWriter(fileStream);
                       
            BufferedReader in = new BufferedReader(new InputStreamReader(gjoResult.getBody()));
           
            String inputLine;
           
            while ((inputLine = in.readLine()) != null)
            {
View Full Code Here

    private void downloadOneChunk(String accountId, String vaultName, String jobId, RandomAccessFile output, long currentPosition, long endPosition) {
        TreeHashInputStream  input;
        int retries = 0;
        while (true) {
            try {
                GetJobOutputResult  jobOutputResult = glacier.getJobOutput(new GetJobOutputRequest()
                  .withAccountId(accountId)
                  .withVaultName(vaultName)
                  .withRange("bytes=" + Long.toString(currentPosition) + "-" + Long.toString(endPosition))
                  .withJobId(jobId));

                try {
                    input = new TreeHashInputStream(new BufferedInputStream(jobOutputResult.getBody()));
                } catch (NoSuchAlgorithmException e) {
                    throw new AmazonClientException("Unable to compute hash for data integrity: " + e.getMessage(), e);
                }

                appendToFile(output, input);

                // Checksum does not match
                if (!input.getTreeHash().equalsIgnoreCase(jobOutputResult.getChecksum())) {
                    throw new IOException("Client side computed hash doesn't match server side hash; possible data corruption");
                }

                // Successfully download
                return;
View Full Code Here

    private void downloadOneChunk(String accountId, String vaultName, String jobId, RandomAccessFile output, long currentPosition, long endPosition) {
        TreeHashInputStream  input;
        int retries = 0;
        while (true) {
            try {
                GetJobOutputResult  jobOutputResult = glacier.getJobOutput(new GetJobOutputRequest()
                  .withAccountId(accountId)
                  .withVaultName(vaultName)
                  .withRange("bytes=" + Long.toString(currentPosition) + "-" + Long.toString(endPosition))
                  .withJobId(jobId));

                try {
                    input = new TreeHashInputStream(new BufferedInputStream(jobOutputResult.getBody()));
                } catch (NoSuchAlgorithmException e) {
                    throw new AmazonClientException("Unable to compute hash for data integrity: " + e.getMessage(), e);
                }

                appendToFile(output, input);

                // Only do tree-hash check when the output checksum is returned from Glacier
                if (null != jobOutputResult.getChecksum()) {
                    // Checksum does not match
                    if (!input.getTreeHash().equalsIgnoreCase(jobOutputResult.getChecksum())) {
                        throw new IOException("Client side computed hash doesn't match server side hash; possible data corruption");
                    }
                } else {
                    log.warn("Cannot validate the downloaded output since no tree-hash checksum is returned from Glacier. "
                            + "Make sure the InitiateJob and GetJobOutput requests use tree-hash-aligned ranges.");
View Full Code Here

  public void retrieveInventoryListing(final String vaultName, final String jobId) {
    log.info("Retrieving inventory for job id " + jobId + "...");

    try {
      final GetJobOutputRequest jobOutputRequest = new GetJobOutputRequest().withVaultName(vaultName).withJobId(jobId);
      final GetJobOutputResult jobOutputResult = client.getJobOutput(jobOutputRequest);
      final BufferedReader reader = new BufferedReader(new InputStreamReader(jobOutputResult.getBody()));
      String content = "";
      String line = null;
      while ((line = reader.readLine()) != null) {
        content += line;
      }
View Full Code Here

TOP

Related Classes of com.amazonaws.services.glacier.model.GetJobOutputResult

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.