Package com.ikanow.infinit.e.data_model.index

Examples of com.ikanow.infinit.e.data_model.index.ElasticSearchManager


        queryFields.append(DocumentPojo.url_, 1);
        queryFields.append(DocumentPojo.index_, 1);
        queryFields.append(DocumentPojo.sourceKey_, 1);
       
        DBCursor cur = documentDb.find(query, queryFields).batchSize(100);
        ElasticSearchManager esm = null;
        ElasticSearchManager esm_base = ElasticSearchManager.getIndex("document_index");
        String sIndex = null;

        while (cur.hasNext())
        {
          if (bKillMeNow) {
View Full Code Here


        for (ObjectId sCommunityId: sp.getCommunityIds()) {
          sb.append(",doc_").append(sCommunityId.toString());
        }
        sb.append("/document_index");
       
        ElasticSearchManager esm = ElasticSearchManager.getIndex(sb.toString());
               
        SearchRequestBuilder searchOptions = esm.getSearchOptions();
        BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
        boolQuery.must(QueryBuilders.rangeQuery(DocumentPojo.created_).from(cleanseStartTime));
        boolQuery.must(QueryBuilders.termQuery(DocumentPojo.sourceKey_, sp.getKey() ));
        searchOptions.setSize(200); // (note this is multiplied by the number of primary shards)
        searchOptions.setSearchType(SearchType.SCAN);
        searchOptions.setScroll("10m");
        SearchResponse rsp = esm.doQuery(boolQuery, searchOptions);
        String scrollId = rsp.getScrollId();
        int nSkip = 0;
       
        for (;;) // Until no more hits
        {
          rsp = esm.doScrollingQuery(scrollId, "10m");
          SearchHit[] docs = rsp.getHits().getHits();
          scrollId = rsp.getScrollId();
         
          if ((null == docs) || (0 == docs.length)) {
            break;
          }         
          if (docs.length > 100) { // just display large checks)
            logger.info("Checking ES docs for large source=" + sp.getKey() + " source: " + source_index + "/" + source_count + " from " + nSkip + " to " + (nSkip+docs.length) );
          }
         
          //Check all solr docs against mongodb
         
          for (SearchHit hit: docs)
          {
            String idStr = hit.getId();
            boolean found = true; //(fail closed!)
            if (null == dbCache) {
              //OBSOLETED, USE DBCACHE INSTEAD (WHERE AVAILABLE):
              ObjectId id = new ObjectId(idStr);
              BasicDBObject query = new BasicDBObject(DocumentPojo._id_, id);
              query.put(DocumentPojo.sourceKey_, sp.getKey()); // (ensures uses only the right shard)
              DBObject dbo = documentDb.findOne(query, queryFields);
              found = (dbo != null);
            }//TESTED
            else {
              found = dbCache.contains(idStr);
            }//TESTED
            if (!found)
            {       
              ObjectId id = new ObjectId(idStr);
              DocumentPojo doc = new DocumentPojo();
              doc.setId(id);
              doc.setIndex(hit.getIndex() + "/document_index");
              docs_to_remove.add(doc);
              logger.info("db sync removing doc: " + id + "/" + hit.getIndex() + "/" + source_index + " not found in mongo");
              fixcount++;
            } // end if not found
          } // end loop over docs to check
         
          nSkip += docs.length;
        }// until no more hits
        if (!docs_to_remove.isEmpty()) {
          storeManager.removeFromSearch(docs_to_remove);
          docs_to_remove.clear();
        }
       
        //CHECK OLD FEEDS 10 at atime
        int iteration = 1;
        boolean removedAll = true;
        while (removedAll )
        {
          int rows = iteration*iteration*10;//exponential scaling 10x^2
          iteration++;
          int oldfixes = 0;
         
          //get old docs from es
          SearchRequestBuilder searchOptionsOLD = esm.getSearchOptions();
          BoolQueryBuilder boolQueryOLD = QueryBuilders.boolQuery();
          boolQueryOLD.must(QueryBuilders.rangeQuery(DocumentPojo.created_).from(cleanseStartTime));
          boolQueryOLD.must(QueryBuilders.termQuery(DocumentPojo.sourceKey_, sp.getKey()));
          searchOptionsOLD.addSort(DocumentPojo.created_, SortOrder.ASC);
          searchOptionsOLD.setSize(rows);
          SearchResponse rspOLD = esm.doQuery(boolQueryOLD, searchOptionsOLD);
          SearchHit[] docsOLD = rspOLD.getHits().getHits();
         
          //Check all solr docs against mongodb
         
          for (SearchHit hit: docsOLD)        
View Full Code Here

 
  @SuppressWarnings("unused")
  private void doUnitTest(String sMongoDbHost, String sMongoDbPort, String sElasticHost, String sElasticPort,
      BasicDBObject query, int nLimit)
  {   
    ElasticSearchManager elasticManager = null;
     
    try {
      // Initialize the DB:
     
      DBCollection feedsDB = DbManager.getDocument().getMetadata();
      DBCollection contentDB = DbManager.getDocument().getContent();
      DBCollection sourcesDB = DbManager.getIngest().getSource();

      String indexName = "document_index";
     
      // Test/debug recreate the index
      if (true) {
       
        // (delete the index)
        System.out.println("Deleting index...");
        elasticManager = ElasticSearchManager.getIndex(indexName, sElasticHost + ":" + sElasticPort);
        elasticManager.deleteMe();
        //(also deletes the child index - same index, different type)

        // Create the index if necessary
        String sMapping = new Gson().toJson(new DocumentPojoIndexMap.Mapping(), DocumentPojoIndexMap.Mapping.class);
View Full Code Here

  {
    long nSysTime = System.currentTimeMillis();   

    ResponsePojo rp = new ResponsePojo();

    ElasticSearchManager gazIndex = ElasticSearchManager.getIndex(entityIndex_);

    // Need to do a quick decomposition of the term to fit in with analyzed strings
    String escapedterm = null;
    StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
    CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
    StringBuffer sb = new StringBuffer();
    try {
      try {
        st.reset();
        while (st.incrementToken()) {
          if (sb.length() > 0) {
            sb.append(" +");
          }
          else {
            sb.append('+');           
          }
          sb.append(luceneEncodeTerm(termAtt.toString()));
        }
      }
      finally {
        st.close();     
      }
    } catch (IOException e) {
      e.printStackTrace();
    }   

    if (!term.endsWith(" ") || (0 == sb.length())) { // Could be in the middle of typing, stick a * on the end
      sb.append('*');
    }//TESTED     
    escapedterm = sb.toString();     

    // Create the search query

    SearchRequestBuilder searchOptions = gazIndex.getSearchOptions();
    BaseQueryBuilder queryObj1 = QueryBuilders.queryString(escapedterm).defaultField(EntityFeaturePojoIndexMap.Mapping.RootObject.RootProperties.alias_pri_);

    String[] communityIdStrs = SocialUtils.getCommunityIds(userIdStr, communityIdStrList);
    BaseQueryBuilder queryObj2 = QueryBuilders.boolQuery().should(QueryBuilders.termsQuery(EntityFeaturePojo.communityId_, communityIdStrs));

    BaseQueryBuilder queryObj = QueryBuilders.boolQuery().must(queryObj1).must(queryObj2);

    searchOptions.addSort(EntityFeaturePojo.doccount_, SortOrder.DESC);
    searchOptions.addFields(EntityFeaturePojo.disambiguated_name_, EntityFeaturePojo.doccount_,
        EntityFeaturePojo.type_, EntityFeaturePojo.dimension_);
    if (bIncludeGeo) {
      searchOptions.addFields(EntityFeaturePojo.geotag_);
      searchOptions.addFields(EntityFeaturePojo.ontology_type_);
    }
    if (bIncludeLinkdata) {
      searchOptions.addFields(EntityFeaturePojo.linkdata_);     
    }

    // Initial alias handling:

    AliasLookupTable aliasTable = null;
    HashMap<String, SearchSuggestPojo> aliasResults = null;
    if (!bWantNoAlias) {
      AliasManager aliasManager = AliasManager.getAliasManager();
      if (null != aliasManager) {
        aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
      }
    }
    //TESTED

    // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
    List<EntityFeaturePojo> extraEntries = null;
    if (null != aliasTable) {
      extraEntries = checkAliasMasters(aliasTable, escapedterm);
    }
    // (end initial alias handling)

    int nDesiredSize = 20;
    if (null == aliasTable) {   
      searchOptions.setSize(nDesiredSize); // will forward all 20
    }
    else {
      searchOptions.addFields(EntityFeaturePojo.index_);
      searchOptions.setSize(3*nDesiredSize); // will forward top 20 after de-aliasing

      aliasResults = new HashMap<String, SearchSuggestPojo>();
      // (We use this to ensure we only include each entity once after aliasing)
    }
    //TESTED

    // Perform the search

    SearchResponse rsp = gazIndex.doQuery(queryObj, searchOptions);

    // Format the return values

    SearchHit[] docs = rsp.getHits().getHits();     
    DimensionListPojo dimlist = new DimensionListPojo();
View Full Code Here

        if (null != aliasManager) {
          aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
        }
      }//TESTED                   

      ElasticSearchManager esm = ElasticSearchManager.getIndex(assocIndex_);
      SearchRequestBuilder searchOptions = esm.getSearchOptions();
      BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
      boolean bExtraQueryTerms = false;
      String term = "";
      if ( !ent1.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity1_) )
          term = ent1;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent1);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity1_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity1_index_, ent1));
          }//TESTED
        }
      }
      if ( !verb.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.verb_) )
          term = verb;
        else
        {
          bExtraQueryTerms = true;
          boolQuery.must(QueryBuilders.queryString(new StringBuffer("+").append(verb.replaceAll("\\s+", " +")).toString()).
              defaultField(AssociationFeaturePojo.verb_));
        }
      }
      if ( !ent2.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity2_) )
          term = ent2;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent2);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity2_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity2_index_, ent2));
          }
        }//TESTED (cut and paste from entity1)
     

      String escapedterm = null;
      StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
      CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
      StringBuffer sb = new StringBuffer();
      try {
        try {
          st.reset();
          while (st.incrementToken()) {
            if (sb.length() > 0) {
              sb.append(" +");
            }
            else {
              sb.append('+');           
            }
            sb.append(luceneEncodeTerm(termAtt.toString()));
          }
        }
        finally {
          st.close();
        }
      } catch (IOException e) {
        e.printStackTrace();
      }     
      if (!term.endsWith(" ") || (0 == sb.length())) { // Could be in the middle of typing, stick a * on the end
        sb.append('*');
      }//TESTED     

      escapedterm = sb.toString();
     
      // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
      List<EntityFeaturePojo> extraEntries = null;
      BoolQueryBuilder extraQueryTerms = null;
      if (field.startsWith("entity")) {
        String indexField = field.startsWith("entity1") ? "entity1_index" : "entity2_index";
        if (null != aliasTable) {
          extraEntries = checkAliasMasters(aliasTable, escapedterm);
        }
        if (null != extraEntries) {
          extraQueryTerms = QueryBuilders.boolQuery();
          int nExtraTerms = 0;
          Iterator<EntityFeaturePojo> aliasIt = extraEntries.iterator();
          while (aliasIt.hasNext()) {
            EntityFeaturePojo alias = aliasIt.next();           
            nExtraTerms += alias.getAlias().size();
           
            if (!bExtraQueryTerms && (nExtraTerms > 20)) { // If not filtering on event type we'll be more aggressive
              break;
            }//TESTED
            if (bExtraQueryTerms && (nExtraTerms > 60)) { // If the number of terms gets too large bail anyway
              break;
            }//TESTED
           
            extraQueryTerms.should(QueryBuilders.termsQuery(indexField, alias.getAlias().toArray()));
            aliasIt.remove();
           
          }//end loop over entities
        }//if found new aliases
       
      }//(if this is an entity lookup) TESTED - including breaking out because of # of terms
     
      // (end initial alias handling)
     
      if (null == extraQueryTerms) {
        boolQuery.must(QueryBuilders.queryString(escapedterm).defaultField(field));
      }
      else {//(in this case combine the escaped term with the aliases
        extraQueryTerms.should(QueryBuilders.queryString(escapedterm).defaultField(field));
        boolQuery.must(extraQueryTerms);
      }//TESTED
      boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.communityId_, communityIdStrs));

      searchOptions.addSort(AssociationFeaturePojo.doccount_, SortOrder.DESC);

      // Work out which fields to return:
      //TODO (INF-1234) need to work out what to do with quotations and similar here (ie entityX without entityX_index)
      String returnfield;
      boolean bReturningEntities = true;
      if ( field.equals(AssociationFeaturePojo.entity1_) ) {
        returnfield = AssociationFeaturePojo.entity1_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity1_index_, AssociationFeaturePojo.doccount_);
      }
      else if ( field.equals(AssociationFeaturePojo.entity2_)) {
        returnfield = AssociationFeaturePojo.entity2_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity2_index_, AssociationFeaturePojo.doccount_);
      }
      else {
        bReturningEntities = false;
        returnfield = AssociationFeaturePojo.verb_;
        searchOptions.addFields( AssociationFeaturePojo.verb_, AssociationFeaturePojo.verb_category_,  AssociationFeaturePojo.doccount_);
      }

      int nNumSuggestionsToReturn = 20;
      if (bReturningEntities && (null != aliasTable)) {
        searchOptions.setSize(3*nNumSuggestionsToReturn); // we're going to remove some duplicates so get more than we need
      }
      else { // normal case
        searchOptions.setSize(nNumSuggestionsToReturn);
      }

      SearchResponse rsp = esm.doQuery(boolQuery, searchOptions);
      SearchHit[] docs = rsp.getHits().getHits();

      //Currently this code takes the results and puts
      //them into a set so there are no duplicates
      //duplicates occur for example when you search for
View Full Code Here

            commids.add(new ObjectId(s));
          boolean bAdmin = RESTTools.adminLookup(userid);
          //make sure user is allowed to submit on behalf of the commids given
          if ( bAdmin || isInAllCommunities(commids, userid) )
          {
            ElasticSearchManager customIndex = CustomOutputIndexingEngine.getExistingIndex(cmr);
            if (null != customIndex) {
              CustomOutputIndexingEngine.swapAliases(customIndex, commids, true)
            }//TESTED (by hand - removal and deletion)           
           
            cmr.communityIds = commids;
View Full Code Here

 
  // PROCESSING LOOP (new interface)
 
  private void doTransfer(BasicDBObject query, int nSkip, int nLimit, BasicDBObject chunk)
  {   
    ElasticSearchManager elasticManager = null;
       
    // Initialize the DB:
    DBCollection entityFeatureDB = DbManager.getFeature().getEntity();
   
    // Initialize the ES (create the index if it doesn't already):
         
// 1. Set-up the entity feature index

    String indexName = "entity_index";
    ElasticSearchManager.setDefaultClusterName("infinite-aws");
   
    // (delete the index)
    //elasticManager = ElasticSearchManager.getIndex(indexName);
    //elasticManager.deleteMe();
   
    // Create the index if necessary
    String sMapping = new Gson().toJson(new EntityFeaturePojoIndexMap.Mapping(), EntityFeaturePojoIndexMap.Mapping.class);
    Builder localSettings = ImmutableSettings.settingsBuilder();
    localSettings.put("number_of_shards", 1).put("number_of_replicas", 0);     
    localSettings.put("index.analysis.analyzer.suggestAnalyzer.tokenizer", "standard");
    localSettings.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "standard", "lowercase");
   
    elasticManager = ElasticSearchManager.createIndex(indexName, null, false,  null, sMapping, localSettings);
   
    // Get the index (necessary if already created)
    if (null == elasticManager)
    {
      elasticManager = ElasticSearchManager.getIndex(indexName);
    }
   
// Now query the DB:
   
    DBCursor dbc = null;
    dbc = entityFeatureDB.find(query);
    if (null != chunk) {
      if (chunk.containsField(DbManager.min_)) {
        dbc = dbc.addSpecial(DbManager.min_, chunk.get(DbManager.min_));
      }
      if (chunk.containsField(DbManager.max_)) {
        dbc = dbc.addSpecial(DbManager.max_, chunk.get(DbManager.max_));
      }
    }
    dbc = dbc.skip(nSkip).limit(nLimit).batchSize(1000);
    if (null == chunk) {
      int nCount = dbc.count() - nSkip;
      if (nCount < 0) nCount = 0;
      System.out.println("Found " + nCount + " records to sync, process first " + (0==nLimit?nCount:nLimit));
      if (0 == nCount) { // Nothing to do...
        return;
      }     
    }   
   
    int nSynced = 0;
   
    List<EntityFeaturePojo> entities = new ArrayList<EntityFeaturePojo>();
    while ( dbc.hasNext() )
    {
      EntityFeaturePojo feature = EntityFeaturePojo.fromDb(dbc.next(),EntityFeaturePojo.class);
       
      if (null != feature.getAlias()) { // (some corrupt gazateer entry)

        // Handle groups (system group is: "4c927585d591d31d7b37097a")
        // if there is no community id, add system group (something is wrong if this happens?)
        if (null == feature.getCommunityId())
        {
          feature.setCommunityId(new ObjectId("4c927585d591d31d7b37097a"));           
        }
      }
     
      entities.add(feature);
      nSynced++;
     
      // Add the entities
      if ( entities.size() > 1000 )
      {
        elasticManager.bulkAddDocuments(
            IndexManager.mapListToIndex(entities, EntityFeaturePojo.listType(), new EntityFeaturePojoIndexMap()),
            "_id", null, true);
          // (note EntityFeaturePojoIndexMap creates an "_id" field of the format index:community)
       
        entities = new ArrayList<EntityFeaturePojo>();
      }
    }
    //write whatevers left
    elasticManager.bulkAddDocuments(
        IndexManager.mapListToIndex(entities, EntityFeaturePojo.listType(), new EntityFeaturePojoIndexMap()),
        "_id", null, true);
      // (note EntityFeaturePojoIndexMap creates an "_id" field of the format index:community)
   
    if (null != chunk) {
View Full Code Here

  {   
    try
    {
      // Initialize the DB: 
      DBCollection entityFeatureDB = DbManager.getFeature().getEntity();
      ElasticSearchManager elasticManager = ElasticSearchManager.getIndex("entity_index");
     
      BasicDBObject fields = new BasicDBObject();
      fields.put(EntityFeaturePojo.index_, 1);
      fields.put(EntityFeaturePojo.communityId_, 1);
     
View Full Code Here

  @SuppressWarnings("unused")
  private void doUnitTestCode(String sMongoDbHost, String sMongoDbPort, String sElasticHost, String sElasticPort,
      BasicDBObject query, int nLimit)
  {   
    Mongo mongoDB = null;
    ElasticSearchManager elasticManager = null;
   
   
    try {
      // Initialize the DB:
     
View Full Code Here

 
  Map<String, SourcePojo> _sourceCache = new HashMap<String, SourcePojo>();
 
  private void doTransfer(BasicDBObject query, int nSkip, int nLimit, BasicDBObject chunk)
  {   
    ElasticSearchManager elasticManager = null;   
   
    // Initialize the DB:
    DBCollection eventFeatureDB = DbManager.getFeature().getAssociation();
   
    // Initialize the ES (create the index if it doesn't already):
         
// 1. Set-up the entity feature index

    ElasticSearchManager.setDefaultClusterName("infinite-aws");
   
    // (delete the index)
    //elasticManager = ElasticSearchManager.getIndex("association_index");
    //elasticManager.deleteMe();
   
    // Create the index if necessary
    String sMapping = new Gson().toJson(new AssociationFeaturePojoIndexMap.Mapping(), AssociationFeaturePojoIndexMap.Mapping.class);
    Builder localSettings = ImmutableSettings.settingsBuilder();
    localSettings.put("number_of_shards", 1).put("number_of_replicas", 0);     
    localSettings.put("index.analysis.analyzer.suggestAnalyzer.tokenizer", "standard");
    localSettings.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "standard", "lowercase");
   
    elasticManager = ElasticSearchManager.createIndex("association_index", null, false, null, sMapping, localSettings);
   
    // Get the index (necessary if already created)
    if (null == elasticManager)
    {
      elasticManager = ElasticSearchManager.getIndex("association_index");
    }
   
// Now query the DB:
   
    DBCursor dbc = null;
    dbc = eventFeatureDB.find(query);
    if (null != chunk) {
      if (chunk.containsField(DbManager.min_)) {
        dbc = dbc.addSpecial(DbManager.min_, chunk.get(DbManager.min_));
      }
      if (chunk.containsField(DbManager.max_)) {
        dbc = dbc.addSpecial(DbManager.max_, chunk.get(DbManager.max_));
      }
    }
    dbc = dbc.skip(nSkip).limit(nLimit).batchSize(1000);
    if (null == chunk) {
      int nCount = dbc.count() - nSkip;
      if (nCount < 0) nCount = 0;
      System.out.println("Found " + nCount + " records to sync, process first " + (0==nLimit?nCount:nLimit));
      if (0 == nCount) { // Nothing to do...
        return;
      }     
    }   
   
    List<AssociationFeaturePojo> events = new LinkedList<AssociationFeaturePojo>();
   
    int nSynced = 0;
   
    // Loop over array and invoke the cleansing function for each one
    while ( dbc.hasNext() )
    {
      BasicDBObject dbo = (BasicDBObject) dbc.next();
      AssociationFeaturePojo evt = AssociationFeaturePojo.fromDb(dbo,AssociationFeaturePojo.class);
     
      // If this table has just been rebuilt from the document then the indexes are all wrong ...
      // recalculate and save
      if ('#' == evt.getIndex().charAt(0)) {
        AssociationPojo singleEvt = new AssociationPojo();
        singleEvt.setEntity1_index(evt.getEntity1_index());
        singleEvt.setEntity2_index(evt.getEntity2_index());
        singleEvt.setVerb_category(evt.getVerb_category());
        singleEvt.setGeo_index(evt.getGeo_index());
        evt.setIndex(AssociationAggregationUtils.getEventFeatureIndex(singleEvt));
        eventFeatureDB.update(new BasicDBObject("_id", dbo.get("_id")),
                      new BasicDBObject(MongoDbManager.set_,
                          new BasicDBObject(AssociationFeaturePojo.index_, evt.getIndex())), false, true);
          // (has to be a multi-update even though it's unique because it's sharded on index)
      }
     
      // Handle groups (system group is: "4c927585d591d31d7b37097a")
      if (null == evt.getCommunityId())
      {
        evt.setCommunityId(new ObjectId("4c927585d591d31d7b37097a"));
      }
      // Bulk add prep
      events.add(evt);
      nSynced++;
         
      if ( events.size() > 1000 )
      {
        elasticManager.bulkAddDocuments(IndexManager.mapListToIndex(events, AssociationFeaturePojo.listType(), new AssociationFeaturePojoIndexMap()), "_id", null,true);
        events.clear();
      }
    }
     // End loop over entities
   
    //write whatevers left
    elasticManager.bulkAddDocuments(IndexManager.mapListToIndex(events, AssociationFeaturePojo.listType(), new AssociationFeaturePojoIndexMap()), "_id", null,true);
   
    if (null != chunk) {
      System.out.println("Found " + nSynced + " records to sync in chunk");
    }       
  }
View Full Code Here

TOP

Related Classes of com.ikanow.infinit.e.data_model.index.ElasticSearchManager

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.