Examples of Bucket


Examples of com.amazonaws.services.s3.model.Bucket

    public Bucket createBucket(CreateBucketRequest createBucketRequest) throws AmazonClientException, AmazonServiceException {
        if ("nonExistingBucket".equals(createBucketRequest.getBucketName())) {
            nonExistingBucketCreated = true;
        }
       
        Bucket bucket = new Bucket();
        bucket.setName(createBucketRequest.getBucketName());
        bucket.setCreationDate(new Date());
        bucket.setOwner(new Owner("c2efc7302b9011ba9a78a92ac5fd1cd47b61790499ab5ddf5a37c31f0638a8fc ", "Christian Mueller"));
        return bucket;
    }
View Full Code Here

Examples of com.basho.riak.client.bucket.Bucket

    @Override
    public void storeIncomingStatistics(String guiPath, String accountName, Long timeperiod, String value, ValueType valueType, UnitType unitType, Long count) {
        Double valueDouble = LiveStatisticsUtil.parseDouble(value);
        long hoursSince1970 = timeperiod / 240;

        Bucket myBucket = null;
        try {
            myBucket = riakClient.fetchBucket(accountName + ";" + hoursSince1970).execute();
            BasicMetricHour storedMetricHour = myBucket.fetch("" + guiPath, BasicMetricHour.class).execute();
            if (storedMetricHour == null) {
                storedMetricHour = new BasicMetricHour(guiPath, accountName, hoursSince1970, valueType.toString(), unitType.toString());
            }

            storedMetricHour.addStatistic(new BasicLiveStatistics(guiPath, accountName, timeperiod, valueDouble, valueType.value(), unitType.value(), count));

            myBucket.store("" + guiPath, storedMetricHour).execute();
        } catch (RiakRetryFailedException e) {
            e.printStackTrace()//To change body of catch statement use File | Settings | File Templates.
        }
    }
View Full Code Here

Examples of com.ceph.crush.Bucket

    rlock.lock();
    try {
      String[] parts = native_ceph_get_osd_crush_location(instance_ptr, osd);
      Bucket[] path = new Bucket[parts.length / 2];
      for (int i = 0; i < path.length; i++)
        path[i] = new Bucket(parts[i*2], parts[i*2+1]);
      return path;
    } finally {
      rlock.unlock();
    }
  }
View Full Code Here

Examples of com.couchbase.client.vbucket.config.Bucket

  public CouchbaseNodeOrder getStreamingNodeOrder() {
    return nodeOrder;
  }

  public Config getVBucketConfig() {
    Bucket config = configurationProvider.getBucketConfiguration(bucket);
    if(config == null) {
      throw new ConfigurationException("Could not fetch valid configuration "
        + "from provided nodes. Stopping.");
    } else if (config.isNotUpdating()) {
      LOGGER.warning("Noticed bucket configuration to be disconnected, "
        + "will attempt to reconnect");
      setConfigurationProvider(new ConfigurationProviderHTTP(storedBaseList,
        bucket, pass));
    }
View Full Code Here

Examples of com.google.api.services.storage.model.Bucket

            GccCredential credential = (GccCredential) stack.getCredential();
            ImageList list = compute.images().list(credential.getProjectId()).execute();
            Long time = new Date().getTime();
            if (!containsSpecificImage(list)) {
                try {
                    Bucket bucket = new Bucket();
                    bucket.setName(credential.getProjectId() + time);
                    bucket.setStorageClass("STANDARD");
                    Storage.Buckets.Insert ins = storage.buckets().insert(credential.getProjectId(), bucket);
                    ins.execute();
                } catch (GoogleJsonResponseException ex) {
                    if (ex.getStatusCode() != CONFLICT) {
                        throw ex;
View Full Code Here

Examples of com.m6d.filecrush.crush.Bucketer.Bucket

                    statusFor("file3", 37),
                    statusFor("file4", 19),
                    statusFor("file5", 17),
                    statusFor("file6", 10));

    expected = asList(new Bucket("three buckets of two each-0", asList("file3", "file6"), 47),
                      new Bucket("three buckets of two each-1", asList("file2", "file5"), 37),
                      new Bucket("three buckets of two each-2", asList("file4", "file1"), 37));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Not enough data to fill all the buckets. Data should be packed into as few buckets as possible.
     */
    dir = "not/enough/data/for/max/buckets";

    input = asListstatusFor("file1", 1),
                    statusFor("file2", 2),
                    statusFor("file3", 3),
                    statusFor("file4", 4),
                    statusFor("file5", 5),
                    statusFor("file6", 6));

    expected = asList(new Bucket("not/enough/data/for/max/buckets-0", asList("file6", "file5", "file4", "file3", "file2", "file1"), 21));

    testCases.add(new Object[] { dir, true, input, expected });

    /*
     * A directory with one file should be ignored.
     */
    dir = "dir/with/one/file";

    input = asList(statusFor("loner", 1));

    expected = emptyList();

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Test case with enough data to fill up all the buckets but no one bucket is more than twice the bucket size.
     *
     * 0            1            2            3            4
     * file 9 35    file 1 30    file 3 30    file 5 30    file 7 30
     * file 6 20    file 8 25    file 0 20    file 2 20    file 4 20
     *                           file 11 20  file 10 10
     */
    dir = "enough/data/for/max/buckets";

    input = asListstatusFor("file0"20),
                    statusFor("file1"30),
                    statusFor("file2"20),
                    statusFor("file3"30),
                    statusFor("file4"20),
                    statusFor("file5"30),
                    statusFor("file6"20),
                    statusFor("file7"30),
                    statusFor("file8"25),
                    statusFor("file9"35),
                    statusFor("file10"10),
                    statusFor("file11"20));

    expected = asList(
      new Bucket("enough/data/for/max/buckets-0", asList("file9", "file6"), 55),
      new Bucket("enough/data/for/max/buckets-1", asList("file1", "file8"), 55),
      new Bucket("enough/data/for/max/buckets-2", asList("file3", "file0", "file11"), 70),
      new Bucket("enough/data/for/max/buckets-3", asList("file5", "file2", "file10"), 60),
      new Bucket("enough/data/for/max/buckets-4", asList("file7", "file4"), 50));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Test case with enough data to fill up all the buckets with some of the buckets more than twice the bucket size.
     *
     * 1            2            3            4            5
     * file  0 35    file  2 35  file  4 35  file  6 35  file  8 35
     * file 10 35    file 12 35  file 14 35  file  1 30  file  3 30
     * file  9 30    file 11 30  file 13 30   file  5 30  file  7 30
     *                                       file 15 30  file 16 20
     */
    dir = "enough/data/for/max/buckets/and/big/buckets";

    input = asListstatusFor("file0"35),
                    statusFor("file1"30),
                    statusFor("file2"35),
                    statusFor("file3"30),
                    statusFor("file4"35),
                    statusFor("file5"30),
                    statusFor("file6"35),
                    statusFor("file7"30),
                    statusFor("file8"35),
                    statusFor("file9"30),
                    statusFor("file10"35),
                    statusFor("file11"30),
                    statusFor("file12"35),
                    statusFor("file13"30),
                    statusFor("file14"35),
                    statusFor("file15"30),
                    statusFor("file16"20));

    expected = asList(
      new Bucket("enough/data/for/max/buckets/and/big/buckets-0", asList("file0", "file10", "file9"), 100),
      new Bucket("enough/data/for/max/buckets/and/big/buckets-1", asList("file2", "file12", "file11"), 100),
      new Bucket("enough/data/for/max/buckets/and/big/buckets-2", asList("file4", "file14", "file13"), 100),
      new Bucket("enough/data/for/max/buckets/and/big/buckets-3", asList("file6", "file1", "file5", "file15"), 125),
      new Bucket("enough/data/for/max/buckets/and/big/buckets-4", asList("file8", "file3", "file7", "file16"), 115));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Exactly enough data for five buckets of 50.
     */
    dir = "exactly/enough/data/for/max/buckets";

    input = asListstatusFor("file0", 20),
                    statusFor("file1", 30),
                    statusFor("file2", 20),
                    statusFor("file3", 30),
                    statusFor("file4", 20),
                    statusFor("file5", 30),
                    statusFor("file6", 20),
                    statusFor("file7", 30),
                    statusFor("file8", 20),
                    statusFor("file9", 30));

    expected = asList(
      new Bucket("exactly/enough/data/for/max/buckets-0", asList("file1", "file0"), 50),
      new Bucket("exactly/enough/data/for/max/buckets-1", asList("file3", "file2"), 50),
      new Bucket("exactly/enough/data/for/max/buckets-2", asList("file5", "file4"), 50),
      new Bucket("exactly/enough/data/for/max/buckets-3", asList("file7", "file6"), 50),
      new Bucket("exactly/enough/data/for/max/buckets-4", asList("file9", "file8"), 50));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Exactly enough data for four buckets of 50.
     */
    dir = "exactly/enough/data/for/four/buckets";

    input = asListstatusFor("file0", 20),
                    statusFor("file1", 30),
                    statusFor("file2", 20),
                    statusFor("file3", 30),
                    statusFor("file4", 20),
                    statusFor("file5", 30),
                    statusFor("file6", 20),
                    statusFor("file7", 30));

    expected = asList(
      new Bucket("exactly/enough/data/for/four/buckets-0", asList("file1", "file0"), 50),
      new Bucket("exactly/enough/data/for/four/buckets-1", asList("file3", "file2"), 50),
      new Bucket("exactly/enough/data/for/four/buckets-2", asList("file5", "file4"), 50),
      new Bucket("exactly/enough/data/for/four/buckets-3", asList("file7", "file6"), 50));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Buckets that end up with one file are ignored.
     *
     * 0          1
     * file 3 35  file 2 30
     *             file 1 25
     *
     * What would have been bucket 0 is dropped since it has only one file in it.
     */
    dir = "buckets/with/one/file/are/ignored";

    input = asListstatusFor("file1", 25),
                    statusFor("file2", 30),
                    statusFor("file3", 35));

    expected = asList(new Bucket("buckets/with/one/file/are/ignored-1", asList("file2", "file1"), 55));

    testCases.add(new Object[] { dir, true, input, expected });


    /*
     * Set the flag so that single item buckets are returned.
     *
     * 0          1
     * file 3 35  file 2 30
     *             file 1 25
     *
     * What would have been bucket 0 is dropped since it has only one file in it.
     */
    dir = "include/buckets/with/one/file";

    input = asListstatusFor("file1", 25),
                    statusFor("file2", 30),
                    statusFor("file3", 35));

    expected = asList(
        new Bucket("include/buckets/with/one/file-0", asList("file3"), 35),
        new Bucket("include/buckets/with/one/file-1", asList("file2", "file1"), 55));

    testCases.add(new Object[] { dir, false, input, expected });

    return testCases;
  }
View Full Code Here

Examples of com.splunk.shuttl.archiver.model.Bucket

  public LocalBucket getBucketFromArchive(Bucket bucket)
      throws ThawTransferFailException, ImportThawedBucketFailException {
    logger.info(will("Attempting to thaw bucket", "bucket", bucket));
    LocalBucket thawedBucket = getTransferedBucket(bucket);
    LocalBucket importedBucket = importThawedBucket(thawedBucket);
    Bucket bucketWithSize = bucketSizeResolver.resolveBucketSize(thawedBucket);
    logger.info(done("Thawed bucket", "bucket", importedBucket));
    return BucketFactory.createBucketWithIndexDirectoryAndSize(
        importedBucket.getIndex(), importedBucket.getDirectory(),
        importedBucket.getFormat(), bucketWithSize.getSize());
  }
View Full Code Here

Examples of com.yahoo.omid.tso.Bucket

     */
    public static void main(String[] args) {
        for (int i = 0; i <= 12; i++)
            System.out.println(i % 12);

        Bucket b = new Bucket(10);

        for (int i = 0; i <= 12; i++)
            b.commit(i);

        for (int i = 0; i <= 24; i++)
            System.out.println(!b.isUncommited(i));

        BitSet transactions = new BitSet(12);

        for (int i = 0; i <= 10; i++)
            transactions.set(i);
View Full Code Here

Examples of freenet.support.api.Bucket

  public URIFeedMessage(String header, String shortText, String text, short priorityClass, long updatedTime,
      String sourceNodeName, long composed, long sent, long received,
      FreenetURI URI, String description) {
    super(header, shortText, text, priorityClass, updatedTime, sourceNodeName, composed, sent, received);
    this.URI = URI;
    final Bucket descriptionBucket;
    try {
      if(description != null)
        descriptionBucket = new ArrayBucket(description.getBytes("UTF-8"));
      else
        descriptionBucket = new NullBucket();
View Full Code Here

Examples of freenet.support.api.Bucket

  }
 
  @Override
  protected void writeData(OutputStream os) throws IOException {
      for(Map.Entry<String, Bucket> entry : buckets.entrySet()) {
        Bucket bucket = entry.getValue();
        BucketTools.copyTo(bucket, os, bucket.size());
        if(freeOnSent) bucket.free(); // Always transient so no removeFrom() needed.
      }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.