Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.OutputLogFilter


      double alpha = Double.parseDouble(alpha_0);
      DirichletState<VectorWritable> state = DirichletDriver.createState(modelFactory, modelPrototype,
        Integer.parseInt(prototypeSize), Integer.parseInt(numClusters), alpha);
      Path path = new Path(statePath);
      FileSystem fs = FileSystem.get(path.toUri(), job);
      FileStatus[] status = fs.listStatus(path, new OutputLogFilter());
      for (FileStatus s : status) {
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, s.getPath(), job);
        try {
          Text key = new Text();
          DirichletCluster<VectorWritable> cluster = new DirichletCluster<VectorWritable>();
View Full Code Here


    if (!r_job.isSuccessful()) {
      fail("Oops! The job broke due to an unexpected error");
    }
    Path[] outputFiles = FileUtil.stat2Paths(
        getFileSystem().listStatus(outDir,
        new OutputLogFilter()));
    if (outputFiles.length > 0) {
      InputStream is = getFileSystem().open(outputFiles[0]);
      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
      String line = reader.readLine();
      //make sure we get what we expect as the first line, and also
View Full Code Here

      assertTrue("No counters found!", (numCounters > 0));
    }

    List<String> results = new ArrayList<String>();
    for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath,
                                new OutputLogFilter()))) {
      results.add(TestMiniMRWithDFS.readOutput(p, job));
    }
    assertEquals("number of reduces is wrong",
                 expectedResults.length, results.size());
    for(int i=0; i < results.size(); i++) {
View Full Code Here

    if (!r_job.isSuccessful()) {
      fail("Oops! The job broke due to an unexpected error");
    }
    Path[] outputFiles = FileUtil.stat2Paths(
        getFileSystem().listStatus(outDir,
        new OutputLogFilter()));
    if (outputFiles.length > 0) {
      InputStream is = getFileSystem().open(outputFiles[0]);
      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
      String line = reader.readLine();
      //make sure we get what we expect as the first line, and also
View Full Code Here

  public static Map<Integer, List<VectorWritable>> getRepresentativePoints(Configuration conf, Path statePath) {
    Map<Integer, List<VectorWritable>> representativePoints = new HashMap<Integer, List<VectorWritable>>();
    try {
      FileSystem fs = FileSystem.get(statePath.toUri(), conf);
      FileStatus[] status = fs.listStatus(statePath, new OutputLogFilter());
      for (FileStatus s : status) {
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, s.getPath(), conf);
        try {
          IntWritable key = new IntWritable(0);
          VectorWritable point = new VectorWritable();
View Full Code Here

    if (!r_job.isSuccessful()) {
      fail("Oops! The job broke due to an unexpected error");
    }
    Path[] outputFiles = FileUtil.stat2Paths(
        getFileSystem().listStatus(outDir,
        new OutputLogFilter()));
    if (outputFiles.length > 0) {
      InputStream is = getFileSystem().open(outputFiles[0]);
      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
      String line = reader.readLine();
      //make sure we get what we expect as the first line, and also
View Full Code Here

      assertTrue("No counters found!", (numCounters > 0));
    }

    List<String> results = new ArrayList<String>();
    for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath,
                                new OutputLogFilter()))) {
      results.add(TestMiniMRWithDFS.readOutput(p, job));
    }
    assertEquals("number of reduces is wrong",
                 expectedResults.length, results.size());
    for(int i=0; i < results.size(); i++) {
View Full Code Here

    List<String> badRecs = new ArrayList<String>();
    badRecs.addAll(MAPPER_BAD_RECORDS);
    badRecs.addAll(REDUCER_BAD_RECORDS);
    Path[] outputFiles = FileUtil.stat2Paths(
        getFileSystem().listStatus(getOutputDir(),
        new OutputLogFilter()));
   
    if (outputFiles.length > 0) {
      InputStream is = getFileSystem().open(outputFiles[0]);
      BufferedReader reader = new BufferedReader(new InputStreamReader(is));
      String line = reader.readLine();
View Full Code Here

    try {
      DirichletState<Vector> state = DirichletDriver.createState(modelFactory,
          Integer.parseInt(numClusters), Double.parseDouble(alpha_0));
      Path path = new Path(statePath);
      FileSystem fs = FileSystem.get(path.toUri(), job);
      FileStatus[] status = fs.listStatus(path, new OutputLogFilter());
      for (FileStatus s : status) {
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, s.getPath(),
            job);
        try {
          Text key = new Text();
View Full Code Here

  private static CubeQueryResult getCubeQueryResult(String outPath,
      CubeScan scan) throws Exception {
    CubeQueryResult cubeQueryResult = new CubeQueryResult();
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    Path[] fileList = FileUtil.stat2Paths(fs.listStatus(new Path(outPath), new OutputLogFilter()));
    for(Path path: fileList) {
      FSDataInputStream fsin = fs.open(path);
      BufferedReader reader = new BufferedReader(new InputStreamReader(fsin));
      String s = null;
      do {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.OutputLogFilter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.