Examples of addCachePool()


Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

        System.err.println("Usage is " + getShortUsage());
        return 1;
      }
      DistributedFileSystem dfs = getDFS(conf);
      try {
        dfs.addCachePool(info);
      } catch (IOException e) {
        System.err.println(prettifyException(e));
        return 2;
      }
      System.out.println("Successfully added cache pool " + name + ".");
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

      Assert.fail("expected UnsupportedOperationException");
    } catch (UnsupportedOperationException e) {
      // expected
    }
    // Cache the file
    fs.addCachePool(new CachePoolInfo("pool1"));
    long directiveId = fs.addCacheDirective(new CacheDirectiveInfo.Builder().
        setPath(TEST_PATH).
        setReplication((short)1).
        setPool("pool1").
        build());
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

    Path fileName = new Path("/testUncacheQuiesces");
    int fileLen = 4096;
    DFSTestUtil.createFile(fs, fileName, fileLen, (short)1, 0xFDFD);
    // Cache it
    DistributedFileSystem dfs = cluster.getFileSystem();
    dfs.addCachePool(new CachePoolInfo("pool"));
    dfs.addCacheDirective(new CacheDirectiveInfo.Builder()
        .setPool("pool").setPath(fileName).setReplication((short)3).build());
    GenericTestUtils.waitFor(new Supplier<Boolean>() {
      @Override
      public Boolean get() {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

        System.err.println("Usage is " + getShortUsage());
        return 1;
      }
      DistributedFileSystem dfs = getDFS(conf);
      try {
        dfs.addCachePool(info);
      } catch (IOException e) {
        System.err.println(prettifyException(e));
        return 2;
      }
      System.out.println("Successfully added cache pool " + name + ".");
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

      Assert.fail("expected UnsupportedOperationException");
    } catch (UnsupportedOperationException e) {
      // expected
    }
    // Cache the file
    fs.addCachePool(new CachePoolInfo("pool1"));
    long directiveId = fs.addCacheDirective(new CacheDirectiveInfo.Builder().
        setPath(TEST_PATH).
        setReplication((short)1).
        setPool("pool1").
        build());
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

    Path fileName = new Path("/testUncacheQuiesces");
    int fileLen = 4096;
    DFSTestUtil.createFile(fs, fileName, fileLen, (short)1, 0xFDFD);
    // Cache it
    DistributedFileSystem dfs = cluster.getFileSystem();
    dfs.addCachePool(new CachePoolInfo("pool"));
    dfs.addCacheDirective(new CacheDirectiveInfo.Builder()
        .setPool("pool").setPath(fileName).setReplication((short)3).build());
    GenericTestUtils.waitFor(new Supplier<Boolean>() {
      @Override
      public Boolean get() {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

    // Create a file that will take up the whole cache
    final Path BIG_FILE = new Path("/bigFile");
    DFSTestUtil.createFile(fs, BIG_FILE,
        TOTAL_BLOCKS_PER_CACHE * BLOCK_SIZE, (short)1, 0xbeef);
    final DistributedFileSystem dfs = cluster.getFileSystem();
    dfs.addCachePool(new CachePoolInfo("pool"));
    final long bigCacheDirectiveId =
        dfs.addCacheDirective(new CacheDirectiveInfo.Builder()
        .setPool("pool").setPath(BIG_FILE).setReplication((short)1).build());
    GenericTestUtils.waitFor(new Supplier<Boolean>() {
      @Override
View Full Code Here

Examples of org.apache.hadoop.hdfs.DistributedFileSystem.addCachePool()

        System.err.println("Usage is " + getShortUsage());
        return 1;
      }
      DistributedFileSystem dfs = getDFS(conf);
      try {
        dfs.addCachePool(info);
      } catch (IOException e) {
        System.err.println(prettifyException(e));
        return 2;
      }
      System.out.println("Successfully added cache pool " + name + ".");
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols.addCachePool()

    nnRpc.cacheReport(dn0.getDNRegistrationForBP(bpid), bpid, bogusBlockIds);

    Path rootDir = helper.getDefaultWorkingDirectory(dfs);
    // Create the pool
    final String pool = "friendlyPool";
    nnRpc.addCachePool(new CachePoolInfo("friendlyPool"));
    // Create some test files
    final int numFiles = 2;
    final int numBlocksPerFile = 2;
    final List<String> paths = new ArrayList<String>(numFiles);
    for (int i=0; i<numFiles; i++) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols.addCachePool()

    nnRpc.cacheReport(dn0.getDNRegistrationForBP(bpid), bpid, bogusBlockIds);

    Path rootDir = helper.getDefaultWorkingDirectory(dfs);
    // Create the pool
    final String pool = "friendlyPool";
    nnRpc.addCachePool(new CachePoolInfo("friendlyPool"));
    // Create some test files
    final int numFiles = 2;
    final int numBlocksPerFile = 2;
    final List<String> paths = new ArrayList<String>(numFiles);
    for (int i=0; i<numFiles; i++) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.