Examples of HdfsDirectory


Examples of com.bah.lucene.hdfs.HdfsDirectory

/**
*/
public class WriteReadPerformanceTest {

    public static void main(String[] args) throws Exception {
        HdfsDirectory hdfsDirectory = new HdfsDirectory(new Configuration(), new Path("hdfs://localhost/index"));
        Directory directory = new BlockCacheDirectoryFactoryV2(new Configuration(), 1000000).newDirectory(
                "index", "shard1", hdfsDirectory, null
        );

        String[] names = new String[]{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j",
View Full Code Here

Examples of com.bah.lucene.hdfs.HdfsDirectory

            Path localIndexPath = new Path("/localindex");
            fileSystem.mkdirs(localIndexPath);
            fileSystem.copyFromLocalFile(new Path(localDir.getAbsolutePath() + "/"), localIndexPath);
            Path hdfsIndexPath = fileSystem.listStatus(localIndexPath)[0].getPath();

            HdfsDirectory hdfsDirectory = new HdfsDirectory(conf, hdfsIndexPath);
            Directory directory = new BlockCacheDirectoryFactoryV2(new Configuration(), 1000000).newDirectory(
                    "index", "shard1", hdfsDirectory, null
            );

            try (DirectoryReader reader = DirectoryReader.open(directory)) {
                IndexSearcher indexSearcher = new IndexSearcher(reader);

                long start = System.currentTimeMillis();
                Query query = new QueryParser(Version.LUCENE_43, "name", analyzer).parse("r");
                TopDocs search = indexSearcher.search(query, 1000);
                ScoreDoc[] scoreDocs = search.scoreDocs;
                System.out.println("Found " + scoreDocs.length + " num of documents from search in " +
                        (System.currentTimeMillis() - start) +
                        " ms. Total[" + search.totalHits + "]");

                start = System.currentTimeMillis();
                for (ScoreDoc scoreDoc : scoreDocs) {
                    Document doc = indexSearcher.doc(scoreDoc.doc);
                    assert doc != null;
                }
                System.out.println("Took [" + (System.currentTimeMillis() - start) + "] ms to retrieve all docs");
            }

            hdfsDirectory.close();
            directory.close();
        } finally {
            localDirectory.close();
            FileDeleteStrategy.FORCE.delete(localDir);
        }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

      SortedSet<FileStatus> listStatus = sort(fileSystem.listStatus(path));
      List<HdfsDirectory> indexesToImport = new ArrayList<HdfsDirectory>();
      for (FileStatus fileStatus : listStatus) {
        Path file = fileStatus.getPath();
        if (fileStatus.isDir() && file.getName().endsWith(".commit")) {
          HdfsDirectory hdfsDirectory = new HdfsDirectory(configuration, file);
          if (!DirectoryReader.indexExists(hdfsDirectory)) {
            LOG.error("Directory found at [{0}] is not a vaild index.", file);
          } else {
            indexesToImport.add(hdfsDirectory);
          }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    Path tablePath = new Path(getTableDescriptor(table).tableUri);
    Path hdfsDirPath = new Path(tablePath, shard);

    BlurLockFactory lockFactory = new BlurLockFactory(_configuration, hdfsDirPath, _nodeName, BlurUtil.getPid());

    Directory directory = new HdfsDirectory(_configuration, hdfsDirPath);
    directory.setLockFactory(lockFactory);

    TableDescriptor descriptor = _clusterStatus.getTableDescriptor(true, _cluster, table);
    TableContext tableContext = TableContext.create(descriptor);
    ShardContext shardContext = ShardContext.create(tableContext, shard);
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    ShardContext shardContext = new ShardContext();
    shardContext.tableContext = tableContext;
    shardContext.walShardPath = new Path(tableContext.getWalTablePath(), shard);
    shardContext.hdfsDirPath = new Path(tableContext.getTablePath(), shard);
    shardContext.shard = shard;
    shardContext.directory = new HdfsDirectory(tableContext.getConfiguration(), shardContext.hdfsDirPath);
    return shardContext;
  }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

      _maxDocumentBufferSize = BlurOutputFormat.getMaxDocumentBufferSize(configuration);
      Path tableOutput = BlurOutputFormat.getOutputPath(configuration);
      String shardName = BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, shardId);
      Path indexPath = new Path(tableOutput, shardName);
      _newIndex = new Path(indexPath, tmpDirName);
      _finalDir = new ProgressableDirectory(new HdfsDirectory(configuration, _newIndex),
          BlurOutputFormat.getProgressable());
      _finalDir.setLockFactory(NoLockFactory.getNoLockFactory());

      TableContext tableContext = TableContext.create(tableDescriptor);
      _fieldManager = tableContext.getFieldManager();
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    file = new File(TMPDIR, "hdfsdirectorytest");
    rm(file);
    URI uri = new File(file, "hdfs").toURI();
    Path hdfsDirPath = new Path(uri.toString());
    Configuration conf = new Configuration();
    directory = new HdfsDirectory(conf, hdfsDirPath);
    seed = new Random().nextLong();
    random = new Random(seed);
  }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    return new BufferedDirectory(destDirectory, 32768);
  }

  protected Directory getDestDirectory(Configuration configuration, TableDescriptor descriptor, Path directoryPath)
      throws IOException {
    return new HdfsDirectory(configuration, directoryPath);
  }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    System.out.println("Counters: " + ctrs);

    Path path = new Path(tableUri, BlurUtil.getShardName(0));
    Collection<Path> commitedTasks = getCommitedTasks(path);
    assertEquals(1, commitedTasks.size());
    DirectoryReader reader = DirectoryReader.open(new HdfsDirectory(conf, commitedTasks.iterator().next()));
    assertEquals(2, reader.numDocs());
    reader.close();
  }
View Full Code Here

Examples of org.apache.blur.store.hdfs.HdfsDirectory

    Path path = new Path(tableUri, BlurUtil.getShardName(0));
    Collection<Path> commitedTasks = getCommitedTasks(path);
    assertEquals(1, commitedTasks.size());

    DirectoryReader reader = DirectoryReader.open(new HdfsDirectory(conf, commitedTasks.iterator().next()));
    assertEquals(80000, reader.numDocs());
    reader.close();
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.