Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileSystem.create()


        testPath = new Path(headPath, "0_0_999.data");
        fs.create(testPath);
        writeRandomData(testPath, 999);

        testPath = new Path(headPath, "0_0_101.data");
        fs.create(testPath);
        writeRandomData(testPath, 101);

        testPath = new Path(headPath, "0_0_1000.data");
        fs.create(testPath);
        writeRandomData(testPath, 1000);
View Full Code Here


        testPath = new Path(headPath, "0_0_101.data");
        fs.create(testPath);
        writeRandomData(testPath, 101);

        testPath = new Path(headPath, "0_0_1000.data");
        fs.create(testPath);
        writeRandomData(testPath, 1000);

        set = HadoopStoreBuilderUtils.getDataFileChunkSet(fs,
                                                          HadoopStoreBuilderUtils.getDataChunkFiles(fs,
                                                                                                    headPath,
View Full Code Here

    @Test
    public void testReadFileContents() throws Exception {

        Path testPath = new Path(TestUtils.createTempDir().getAbsolutePath(), "tempFile");
        FileSystem fs = testPath.getFileSystem(new Configuration());
        fs.create(testPath);

        // 1) Read back empty file
        String emptyString = HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024);
        Assert.assertEquals(emptyString.length(), 0);
View Full Code Here

        Assert.assertEquals(HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024),
                            new String(randomBytes));

        // 3) Write a json string
        fs.delete(testPath, true);
        fs.create(testPath);

        ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
        metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());

        // Write file contents
View Full Code Here

                Path checkSumValueFile = new Path(nodeDir, fileNamePrefix + ".data.checksum");

                if(outputFs.exists(checkSumIndexFile)) {
                    outputFs.delete(checkSumIndexFile);
                }
                FSDataOutputStream output = outputFs.create(checkSumIndexFile);
                outputFs.setPermission(checkSumIndexFile,
                                       new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION));
                output.write(this.checkSumDigestIndex.getCheckSum());
                output.close();
View Full Code Here

                output.close();

                if(outputFs.exists(checkSumValueFile)) {
                    outputFs.delete(checkSumValueFile);
                }
                output = outputFs.create(checkSumValueFile);
                outputFs.setPermission(checkSumValueFile,
                                       new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION));
                output.write(this.checkSumDigestValue.getCheckSum());
                output.close();
            } else {
View Full Code Here

                    Path checkSumValueFile = new Path(nodeDir, chunkFileName + ".data.checksum");

                    if(outputFs.exists(checkSumIndexFile)) {
                        outputFs.delete(checkSumIndexFile);
                    }
                    FSDataOutputStream output = outputFs.create(checkSumIndexFile);
                    outputFs.setPermission(checkSumIndexFile,
                                           new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION));
                    output.write(this.checkSumDigestIndex[chunkId].getCheckSum());
                    output.close();
View Full Code Here

                    output.close();

                    if(outputFs.exists(checkSumValueFile)) {
                        outputFs.delete(checkSumValueFile);
                    }
                    output = outputFs.create(checkSumValueFile);
                    outputFs.setPermission(checkSumValueFile,
                                           new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION));
                    output.write(this.checkSumDigestValue[chunkId].getCheckSum());
                    output.close();
                } else {
View Full Code Here

            outpath.append(tableName);
            outpath.append(".log");

            Path pt=new Path(outpath.toString());
            FileSystem fs = FileSystem.get(new Configuration());
            BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fs.create(pt, false)));

            // catch output
            while ((lChldProcOutPutStr = lChldProcOutStream.readLine()) != null)
            {
              br.write(lChldProcOutPutStr);
View Full Code Here

      ext = codec.getDefaultExtension();
    }

    Path file = getDefaultWorkFile(context, ext);
    FileSystem fs = file.getFileSystem(conf);
    FSDataOutputStream fileOut = fs.create(file, false);
    DataOutputStream ostream = fileOut;

    if (isCompressed) {
      ostream = new DataOutputStream(codec.createOutputStream(fileOut));
    }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.