Package org.apache.solr.common

Examples of org.apache.solr.common.SolrException


        log.trace("reading field def "+name);
        String type = DOMUtil.getAttr(attrs,"type","field " + name);

        FieldType ft = fieldTypes.get(type);
        if (ft==null) {
          throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Unknown fieldtype '" + type + "' specified on field " + name,false);
        }

        Map<String,String> args = DOMUtil.toMapExcept(attrs, "name", "type");
        if( args.get( "required" ) != null ) {
          explicitRequiredProp.put( name, Boolean.valueOf( args.get( "required" ) ) );
        }

        SchemaField f = SchemaField.create(name,ft,args);

        if (node.getNodeName().equals("field")) {
          SchemaField old = fields.put(f.getName(),f);
          if( old != null ) {
            String msg = "[schema.xml] Duplicate field definition for '"
              + f.getName() + "' ignoring: "+old.toString();
           
            Throwable t = new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
            SolrException.logOnce(log,null,t);
            SolrConfig.severeErrors.add( t );
          }
          log.debug("field defined: " + f);
          if( f.getDefaultValue() != null ) {
            log.debug(name+" contains default value: " + f.getDefaultValue());
            fieldsWithDefaultValue.add( f );
          }
          if (f.isRequired()) {
            log.debug(name+" is required in this schema");
            requiredFields.add(f);
          }
        } else if (node.getNodeName().equals("dynamicField")) {
          // make sure nothing else has the same path
          addDynamicField(dFields, f);
        } else {
          // we should never get here
          throw new RuntimeException("Unknown field type");
        }
      }
     
    //fields with default values are by definition required
    //add them to required fields, and we only have to loop once
    // in DocumentBuilder.getDoc()
    requiredFields.addAll(getFieldsWithDefaultValue());


    // OK, now sort the dynamic fields largest to smallest size so we don't get
    // any false matches.  We want to act like a compiler tool and try and match
    // the largest string possible.
    Collections.sort(dFields);

    log.trace("Dynamic Field Ordering:" + dFields);

    // stuff it in a normal array for faster access
    dynamicFields = dFields.toArray(new DynamicField[dFields.size()]);


    Node node = (Node) xpath.evaluate("/schema/similarity", document, XPathConstants.NODE);
    if (node==null) {
      similarityFactory = new SimilarityFactory() {
        @Override
        public Similarity getSimilarity() {
          return Similarity.getDefault();
        }
      };
      log.debug("using default similarity");
    } else {
      final Object obj = loader.newInstance(((Element) node).getAttribute("class"));
      if (obj instanceof SimilarityFactory) {
        // configure a factory, get a similarity back
        SolrParams params = SolrParams.toSolrParams(DOMUtil.childNodesToNamedList(node));
        similarityFactory = (SimilarityFactory)obj;
        similarityFactory.init(params);
      } else {
        // just like always, assume it's a Similarlity and get a ClassCastException - reasonable error handling
        similarityFactory = new SimilarityFactory() {
          @Override
          public Similarity getSimilarity() {
            return (Similarity) obj;
          }
        };
      }
      if (similarityFactory instanceof SchemaAware){
        schemaAware.add((SchemaAware) similarityFactory);
      }
      log.debug("using similarity factory" + similarityFactory.getClass().getName());
    }

    node = (Node) xpath.evaluate("/schema/defaultSearchField/text()", document, XPathConstants.NODE);
    if (node==null) {
      log.warn("no default search field specified in schema.");
    } else {
      defaultSearchFieldName=node.getNodeValue().trim();
      // throw exception if specified, but not found or not indexed
      if (defaultSearchFieldName!=null) {
        SchemaField defaultSearchField = getFields().get(defaultSearchFieldName);
        if ((defaultSearchField == null) || !defaultSearchField.indexed()) {
          String msg =  "default search field '" + defaultSearchFieldName + "' not defined or not indexed" ;
          throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
        }
      }
      log.info("default search field is "+defaultSearchFieldName);
    }

    node = (Node) xpath.evaluate("/schema/solrQueryParser/@defaultOperator", document, XPathConstants.NODE);
    if (node==null) {
      log.debug("using default query parser operator (OR)");
    } else {
      queryParserDefaultOperator=node.getNodeValue().trim();
      log.info("query parser default operator is "+queryParserDefaultOperator);
    }

    node = (Node) xpath.evaluate("/schema/uniqueKey/text()", document, XPathConstants.NODE);
    if (node==null) {
      log.warn("no uniqueKey specified in schema.");
    } else {
      uniqueKeyField=getIndexedField(node.getNodeValue().trim());
      if (!uniqueKeyField.stored()) {
        log.error("uniqueKey is not stored - distributed search will not work");
      }
      if (uniqueKeyField.multiValued()) {
        log.error("uniqueKey should not be multivalued");
      }
      uniqueKeyFieldName=uniqueKeyField.getName();
      uniqueKeyFieldType=uniqueKeyField.getType();
      log.info("unique key field: "+uniqueKeyFieldName);
     
      // Unless the uniqueKeyField is marked 'required=false' then make sure it exists
      if( Boolean.FALSE != explicitRequiredProp.get( uniqueKeyFieldName ) ) {
        uniqueKeyField.required = true;
        requiredFields.add(uniqueKeyField);
      }
    }

    /////////////// parse out copyField commands ///////////////
    // Map<String,ArrayList<SchemaField>> cfields = new HashMap<String,ArrayList<SchemaField>>();
    // expression = "/schema/copyField";
   
    dynamicCopyFields = new DynamicCopy[] {};
    expression = "//copyField";
    nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET);

      for (int i=0; i<nodes.getLength(); i++) {
        node = nodes.item(i);
        NamedNodeMap attrs = node.getAttributes();

        String source = DOMUtil.getAttr(attrs,"source","copyField definition");
        String dest   = DOMUtil.getAttr(attrs,"dest""copyField definition");
        String maxChars = DOMUtil.getAttr(attrs, "maxChars");
        int maxCharsInt = CopyField.UNLIMITED;
        if (maxChars != null) {
          try {
            maxCharsInt = Integer.parseInt(maxChars);
          } catch (NumberFormatException e) {
            log.warn("Couldn't parse maxChars attribute for copyField from "
                    + source + " to " + dest + " as integer. The whole field will be copied.");
          }
        }

        registerCopyField(source, dest, maxCharsInt);
     }
     
      for (Map.Entry<SchemaField, Integer> entry : copyFieldTargetCounts.entrySet())    {
        if (entry.getValue() > 1 && !entry.getKey().multiValued())  {
          log.warn("Field " + entry.getKey().name + " is not multivalued "+
                      "and destination for multiple copyFields ("+
                      entry.getValue()+")");
        }
      }
      //Run the callbacks on SchemaAware now that everything else is done
      for (SchemaAware aware : schemaAware) {
        aware.inform(this);
      }
    } catch (SolrException e) {
      SolrConfig.severeErrors.add( e );
      throw e;
    } catch(Exception e) {
      // unexpected exception...
      SolrConfig.severeErrors.add( e );
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Schema Parsing Failed: " + e.getMessage(), e,false);
    }

    // create the field analyzers
    refreshAnalyzers();
View Full Code Here


      addDynamicFieldNoDupCheck(dFields, f);
    } else {
      String msg = "[schema.xml] Duplicate DynamicField definition for '"
              + f.getName() + "' ignoring: " + f.toString();

      Throwable t = new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
      SolrException.logOnce(log, null, t);
      SolrConfig.severeErrors.add(t);
    }
  }
View Full Code Here

    boolean destIsPattern   = isWildCard(dest);

    log.debug("copyField source='"+source+"' dest='"+dest+"' maxChars='"+maxChars);
    SchemaField d = getFieldOrNull(dest);
    if(d == null){
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "copyField destination :'"+dest+"' does not exist" );
    }

    if(sourceIsPattern) {
      if( destIsPattern ) {
        DynamicField df = null;
        for( DynamicField dd : dynamicFields ) {
          if( dd.regex.equals( dest ) ) {
            df = dd;
            break;
          }
        }
        if( df == null ) {
          throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "copyField dynamic destination must match a dynamicField." );
        }
        registerDynamicCopyField(new DynamicDestCopy(source, df, maxChars ));
      }
      else {
        registerDynamicCopyField(new DynamicCopy(source, d, maxChars));
      }
    }
    else if( destIsPattern ) {
      String msg =  "copyField only supports a dynamic destination if the source is also dynamic" ;
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
    }
    else {
      // retrieve the field to force an exception if it doesn't exist
      SchemaField f = getField(source);
View Full Code Here

          Constructor<? extends Analyzer> cnstr = clazz.getConstructor(Version.class);
          final String matchVersionStr = DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM);
          final Version luceneMatchVersion = (matchVersionStr == null) ?
            solrConfig.luceneMatchVersion : Config.parseLuceneVersionString(matchVersionStr);
          if (luceneMatchVersion == null) {
            throw new SolrException
              ( SolrException.ErrorCode.SERVER_ERROR,
                "Configuration Error: Analyzer '" + clazz.getName() +
                "' needs a 'luceneMatchVersion' parameter");
          }
          return cnstr.newInstance(luceneMatchVersion);
        } catch (NoSuchMethodException nsme) {
          // otherwise use default ctor
          return clazz.newInstance();
        }
      } catch (Exception e) {
        log.error("Cannot load analyzer: "+analyzerName, e);
        throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
                                 "Cannot load analyzer: "+analyzerName, e );
      }
    }

    XPath xpath = XPathFactory.newInstance().newXPath();

    // Load the CharFilters
    // --------------------------------------------------------------------------------
    final ArrayList<CharFilterFactory> charFilters = new ArrayList<CharFilterFactory>();
    AbstractPluginLoader<CharFilterFactory> charFilterLoader =
      new AbstractPluginLoader<CharFilterFactory>( "[schema.xml] analyzer/charFilter", false, false )
    {
      @Override
      protected void init(CharFilterFactory plugin, Node node) throws Exception {
        if( plugin != null ) {
          final Map<String,String> params = DOMUtil.toMapExcept(node.getAttributes(),"class");
          // copy the luceneMatchVersion from config, if not set
          if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM))
            params.put(LUCENE_MATCH_VERSION_PARAM, solrConfig.luceneMatchVersion.toString());
          plugin.init( params );
          charFilters.add( plugin );
        }
      }

      @Override
      protected CharFilterFactory register(String name, CharFilterFactory plugin) throws Exception {
        return null; // used for map registration
      }
    };
    charFilterLoader.load( solrConfig.getResourceLoader(), (NodeList)xpath.evaluate("./charFilter", node, XPathConstants.NODESET) );

    // Load the Tokenizer
    // Although an analyzer only allows a single Tokenizer, we load a list to make sure
    // the configuration is ok
    // --------------------------------------------------------------------------------
    final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1);
    AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
      new AbstractPluginLoader<TokenizerFactory>( "[schema.xml] analyzer/tokenizer", false, false )
    {
      @Override
      protected void init(TokenizerFactory plugin, Node node) throws Exception {
        if( !tokenizers.isEmpty() ) {
          throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
              "The schema defines multiple tokenizers for: "+node );
        }
        final Map<String,String> params = DOMUtil.toMapExcept(node.getAttributes(),"class");
        // copy the luceneMatchVersion from config, if not set
        if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM))
          params.put(LUCENE_MATCH_VERSION_PARAM, solrConfig.luceneMatchVersion.toString());
        plugin.init( params );
        tokenizers.add( plugin );
      }

      @Override
      protected TokenizerFactory register(String name, TokenizerFactory plugin) throws Exception {
        return null; // used for map registration
      }
    };
    tokenizerLoader.load( loader, (NodeList)xpath.evaluate("./tokenizer", node, XPathConstants.NODESET) );
   
    // Make sure something was loaded
    if( tokenizers.isEmpty() ) {
      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"analyzer without class or tokenizer & filter list");
    }
   

    // Load the Filters
    // --------------------------------------------------------------------------------
View Full Code Here

    // Hmmm, default field could also be implemented with a dynamic field of "*".
    // It would have to be special-cased and only used if nothing else matched.
    /***  REMOVED -YCS
    if (defaultFieldType != null) return new SchemaField(fieldName,defaultFieldType);
    ***/
    throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"undefined field "+fieldName);
  }
View Full Code Here

    
     if(!fieldName.endsWith("_s"))
     {
       return getDynamicFieldType(fieldName+"_s");
     }
    throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"undefined field "+fieldName);
  }
View Full Code Here

    inject = getBoolean(INJECT, true);
   
    String name = args.get( ENCODER );
    if( name == null ) {
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Missing required parameter: "+ENCODER
          +" ["+registry.keySet()+"]" );
    }
    Class<? extends Encoder> clazz = registry.get(name.toUpperCase(Locale.ENGLISH));
    if( clazz == null ) {
      lock.lock();
      try {
        clazz = resolveEncoder(name);
      } finally {
        lock.unlock();
      }
    }
   
    try {
      encoder = clazz.newInstance();
     
      // Try to set the maxCodeLength
      String v = args.get( "maxCodeLength" );
      if( v != null ) {
        Method setter = encoder.getClass().getMethod( "setMaxCodeLen", int.class );
        setter.invoke( encoder, Integer.parseInt( v ) );
      }
    }
    catch (Exception e) {
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error initializing: "+name + "/"+clazz, e , false);
    }
  }
View Full Code Here

      clazz = lookupEncoder(PACKAGE_CONTAINING_ENCODERS+name);
    } catch (ClassNotFoundException e) {
      try {
        clazz = lookupEncoder(name);
      } catch (ClassNotFoundException cnfe) {
        throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Unknown encoder: "+name +" ["+registry.keySet()+"]" );
      }
    }
    catch (ClassCastException e) {
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Not an encoder: "+name +" ["+registry.keySet()+"]" );
    }
    return clazz;
  }
View Full Code Here

  public void init(Map<String, String> args) {
    super.init(args);
    assureMatchVersion();
    dictFile = args.get("dictionary");
    if (null == dictFile) {
      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
                               "Missing required parameter: dictionary");
    }

    minWordSize= getInt("minWordSize",CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE);
    minSubwordSize= getInt("minSubwordSize",CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE);
View Full Code Here

  @Override
  public void init(Map<String, String> args) {
    super.init(args);
    assureMatchVersion();
    if (args.containsKey("charset"))
      throw new SolrException(ErrorCode.SERVER_ERROR,
          "The charset parameter is no longer supported.  "
          + "Please process your documents as Unicode instead.");
  }
View Full Code Here

TOP

Related Classes of org.apache.solr.common.SolrException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.