Package org.springframework.data.hadoop.store.output

Examples of org.springframework.data.hadoop.store.output.OutputStreamWriter


    data[0] = DATA10.getBytes();
    data[1] = "\n".getBytes();
    data[2] = DATA11.getBytes();
    data[3] = "\n".getBytes();

    OutputStreamWriter writer = new OutputStreamWriter(getConfiguration(), testDefaultPath, null);
    TestUtils.writeData(writer, data, true);

    TextFileReader reader = new TextFileReader(getConfiguration(), testDefaultPath, null);
    TestUtils.readDataAndAssert(reader, dataArray);
  }
View Full Code Here


  }

  @Test
  public void testStreamSmall() throws IOException {
    ByteArrayInputStream stream = new ByteArrayInputStream(DATA10.getBytes());
    OutputStreamWriter writer = new OutputStreamWriter(getConfiguration(), testDefaultPath, null);

    doWithInputStream(stream, writer);

    String[] dataArray = new String[] { DATA10 };
    TextFileReader reader = new TextFileReader(getConfiguration(), testDefaultPath, null);
View Full Code Here

    for (int i = 0; i<1000; i++) {
      buf.append(DATA10);
      buf.append("\n");
    }
    ByteArrayInputStream stream = new ByteArrayInputStream(buf.toString().getBytes());
    OutputStreamWriter writer = new OutputStreamWriter(getConfiguration(), testDefaultPath, null);

    doWithInputStream(stream, writer);

    TextFileReader reader = new TextFileReader(getConfiguration(), testDefaultPath, null);
    List<String> data = TestUtils.readData(reader);
View Full Code Here

    for (int i = 0; i<1000; i++) {
      buf.append(DATA10);
      buf.append("\n");
    }
    ByteArrayInputStream stream = new ByteArrayInputStream(buf.toString().getBytes());
    OutputStreamWriter writer = new OutputStreamWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());

    doWithInputStream(stream, writer);

    TextFileReader reader = new TextFileReader(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());
    List<String> data = TestUtils.readData(reader);
View Full Code Here

    for (int i = 0; i<1000; i++) {
      buf.append(DATA10);
      buf.append("\n");
    }
    ByteArrayInputStream stream = new ByteArrayInputStream(buf.toString().getBytes());
    OutputStreamWriter writer = new OutputStreamWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());

    ChainedFileNamingStrategy fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new CodecFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);

    doWithInputStream(stream, writer);

    TextFileReader reader = new TextFileReader(getConfiguration(), new Path(testDefaultPath, "data.gzip"), Codecs.GZIP.getCodecInfo());
    List<String> data = TestUtils.readData(reader);
View Full Code Here

    }
    boolean result = this.template.get(filePath, new InputStreamCallback() {

      @Override
      public void doWithInputStream(InputStream stream) throws IOException {
        OutputStreamWriter writer = new OutputStreamWriter(configuration,
            new Path(hdfsDirectory + filePath), null);
        byte[] buff = new byte[1024];
        int len;
        while ((len = stream.read(buff)) > 0) {
          if (len == buff.length) {
            writer.write(buff);
          }
          else {
            writer.write(Arrays.copyOf(buff, len));
          }
        }
        writer.close();
      }

    });
    if (!result) {
      throw new MessagingException("Error during file transfer");
View Full Code Here

TOP

Related Classes of org.springframework.data.hadoop.store.output.OutputStreamWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.