Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileSystem.create()


      FileSystem fs = FileSystem.get(config);
      Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader());
      Path p = new Path("/tmp/shdp-lease-early-init-" + UUID.randomUUID().toString());
      // create/delete
      fs.create(p).close();
      fs.delete(p, false);
    }
  }
}
View Full Code Here


          }
          else if (st.getLen() != 0)
            throw new IllegalArgumentException(src + " must be a zero-length file");
        }
        else {
          IOUtils.closeStream(srcFs.create(src));
        }
      } catch (IOException ex) {
        throw new HadoopException("Cannot touchz " + uri + ";" + ex.getMessage(), ex);
      }
    }
View Full Code Here

    public void testDataFileChunk() throws IOException {
        Logger.getRootLogger().removeAllAppenders();

        Path path = new Path(TestUtils.createTempDir().getAbsolutePath(), "tempFile");
        FileSystem fs = path.getFileSystem(new Configuration());
        fs.create(path);
        HdfsDataFileChunk chunk = new HdfsDataFileChunk(fs, fs.getFileStatus(path));

        int jumps = 10;
        ByteBuffer buffer = ByteBuffer.allocate(jumps);
        try {
View Full Code Here

        } catch(EOFException e) {}

        for(int numBytes = 100; numBytes <= 1000; numBytes += 100) {
            // Clear up file
            fs.delete(path, true);
            fs.create(path);

            // Write random bytes to it
            byte[] randomBytes = TestUtils.randomBytes(numBytes);

            FileOutputStream stream = new FileOutputStream(path.toString());
View Full Code Here

        FileSystem fs = testPath.getFileSystem(new Configuration());
        fs.mkdirs(testPath);

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

        fs.create(new Path(testPath, "0_0_1.data"));
        fs.create(new Path(testPath, "0_0_1data"));
        fs.create(new Path(testPath, "0_0_2.index"));
        fs.create(new Path(testPath, "0_0.data"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
View Full Code Here

        fs.mkdirs(testPath);

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

        fs.create(new Path(testPath, "0_0_1.data"));
        fs.create(new Path(testPath, "0_0_1data"));
        fs.create(new Path(testPath, "0_0_2.index"));
        fs.create(new Path(testPath, "0_0.data"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
View Full Code Here

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

        fs.create(new Path(testPath, "0_0_1.data"));
        fs.create(new Path(testPath, "0_0_1data"));
        fs.create(new Path(testPath, "0_0_2.index"));
        fs.create(new Path(testPath, "0_0.data"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);
View Full Code Here

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

        fs.create(new Path(testPath, "0_0_1.data"));
        fs.create(new Path(testPath, "0_0_1data"));
        fs.create(new Path(testPath, "0_0_2.index"));
        fs.create(new Path(testPath, "0_0.data"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);
View Full Code Here

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);

        fs.create(new Path(testPath, "1_0_0.data"));
        fs.create(new Path(testPath, "1_0"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 0).length, 1);

        fs.create(new Path(testPath, "1_0_1.data"));
View Full Code Here

        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);

        fs.create(new Path(testPath, "1_0_0.data"));
        fs.create(new Path(testPath, "1_0"));
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 1);
        assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 0).length, 1);

        fs.create(new Path(testPath, "1_0_1.data"));
        fs.create(new Path(testPath, "1_0_1data"));
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.