Examples of TokenizerFactory


Examples of org.apache.solr.analysis.TokenizerFactory

    SimpleOrderedMap<Object> aninfo = new SimpleOrderedMap<Object>();
    aninfo.add("className", analyzer.getClass().getName());
    if (analyzer instanceof TokenizerChain) {
       SimpleOrderedMap<Object> tokenizer = new SimpleOrderedMap<Object>();
         TokenizerChain tchain = (TokenizerChain)analyzer;
         TokenizerFactory tfac = tchain.getTokenizerFactory();
         tokenizer.add("className", tfac.getClass().getName());
         tokenizer.add("args", tfac.getArgs());
         aninfo.add("tokenizer", tokenizer);
         TokenFilterFactory[] filtfacs = tchain.getTokenFilterFactories();
        
        SimpleOrderedMap<Map<String, Object>> filters = new SimpleOrderedMap<Map<String, Object>>();
        for (TokenFilterFactory filtfac : filtfacs) {
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   *
   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef, Version luceneMatchVersion) {
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = ( TokenizerFactory ) instantiate( token.factory() );
    tokenFactory.init( getMapOfParameters( token.params(), luceneMatchVersion ) );

    final int length = analyzerDef.filters().length;
    final int charLength = analyzerDef.charFilters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
    CharFilterFactory[] charFilters = new CharFilterFactory[charLength];
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   * @param analyzerDef The <code>AnalyzerDef</code> annotation as found in the annotated domain class.
   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = ( TokenizerFactory ) instantiate( token.factory() );
    tokenFactory.init( getMapOfParameters( token.params() ) );

    final int length = analyzerDef.filters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
    ResourceLoader resourceLoader = new HibernateSearchResourceLoader();
    for ( int index = 0 ; index < length ; index++ ) {
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

    NodeList nList = (NodeList)xpath.evaluate("./filter", node, XPathConstants.NODESET);

    if (tokNode==null){
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"analyzer without class or tokenizer & filter list");
    }
    TokenizerFactory tfac = readTokenizerFactory(tokNode);

    /******
    // oops, getChildNodes() includes text (newlines, etc) in addition
    // to the actual child elements
    NodeList nList = node.getChildNodes();
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

  // <tokenizer class="solr.StandardFilterFactory"/>
  private TokenizerFactory readTokenizerFactory(Node node) {
    // if (node.getNodeName() != "tokenizer") return null;
    NamedNodeMap attrs = node.getAttributes();
    String className = DOMUtil.getAttr(attrs,"class","tokenizer");
    TokenizerFactory tfac = (TokenizerFactory)Config.newInstance(className);
    tfac.init(DOMUtil.toMapExcept(attrs,"class"));
    return tfac;
  }
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef, Version luceneMatchVersion) {
    ResourceLoader defaultResourceLoader = new HibernateSearchResourceLoader();
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = instanceFromClass( TokenizerFactory.class, token.factory(), "Tokenizer factory" );
    final Map<String, String> tokenMapsOfParameters = getMapOfParameters( token.params(), luceneMatchVersion );
    tokenFactory.init( tokenMapsOfParameters );
    injectResourceLoader( tokenFactory, defaultResourceLoader, tokenMapsOfParameters );

    final int length = analyzerDef.filters().length;
    final int charLength = analyzerDef.charFilters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   * @param analyzerDef The <code>AnalyzerDef</code> annotation as found in the annotated domain class.
   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = ( TokenizerFactory ) instantiate( token.factory() );
    tokenFactory.init( getMapOfParameters( token.params() ) );

    final int length = analyzerDef.filters().length;
    final int charLength = analyzerDef.charFilters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
    CharFilterFactory[] charFilters = new CharFilterFactory[charLength];
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   * @param analyzerDef The <code>AnalyzerDef</code> annotation as found in the annotated domain class.
   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = ( TokenizerFactory ) instantiate( token.factory() );
    tokenFactory.init( getMapOfParameters( token.params() ) );

    final int length = analyzerDef.filters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
    ResourceLoader resourceLoader = new HibernateSearchResourceLoader();
    for ( int index = 0 ; index < length ; index++ ) {
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

   * @return a Lucene <code>Analyzer</code>
   */
  public static Analyzer buildAnalyzer(AnalyzerDef analyzerDef, Version luceneMatchVersion) {
    ResourceLoader defaultResourceLoader = new HibernateSearchResourceLoader();
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = instanceFromClass( TokenizerFactory.class, token.factory(), "Tokenizer factory" );
    final Map<String, String> tokenMapsOfParameters = getMapOfParameters( token.params(), luceneMatchVersion );
    tokenFactory.init( tokenMapsOfParameters );
    injectResourceLoader( tokenFactory, defaultResourceLoader, tokenMapsOfParameters );

    final int length = analyzerDef.filters().length;
    final int charLength = analyzerDef.charFilters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
View Full Code Here

Examples of org.apache.solr.analysis.TokenizerFactory

    return Collections.unmodifiableMap( initializedAnalyzers );
  }

  private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) {
    TokenizerDef token = analyzerDef.tokenizer();
    TokenizerFactory tokenFactory = (TokenizerFactory) instantiate( token.factory() );
    tokenFactory.init( getMapOfParameters( token.params() ) );

    final int length = analyzerDef.filters().length;
    TokenFilterFactory[] filters = new TokenFilterFactory[length];
    for ( int index = 0 ; index < length ; index++ ) {
      TokenFilterDef filterDef = analyzerDef.filters()[index];
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.