Package org.apache.drill.common.exceptions

Examples of org.apache.drill.common.exceptions.DrillRuntimeException


  public HoldingContainer renderEnd(ClassGenerator<?> g, HoldingContainer[] inputVariables, JVar[]  workspaceJVars){
    //If the function's annotation specifies a parameter has to be constant expression, but the HoldingContainer
    //for the argument is not, then raise exception.
    for (int i =0; i < inputVariables.length; i++) {
      if (parameters[i].isConstant && !inputVariables[i].isConstant()) {
        throw new DrillRuntimeException(String.format("The argument '%s' of Function '%s' has to be constant!", parameters[i].name, this.getRegisteredNames()[0]));
      }
    }
    generateBody(g, BlockType.SETUP, setupBody, inputVariables, workspaceJVars, true);
    HoldingContainer c = generateEvalBody(g, inputVariables, evalBody, workspaceJVars);
    generateBody(g, BlockType.RESET, resetBody, null, workspaceJVars, false);
View Full Code Here


            return holder;
          default:
            throw new UnsupportedOperationException("Type of " + type + " is not supported yet!");
        }
      } catch (Exception ex){
        throw new DrillRuntimeException("Error when evaluate a ValueVectorReadExpression: " + ex);
      }
    }
View Full Code Here

  public static final String fieldValue = "value";

  public static void mappify(FieldReader reader, BaseWriter.ComplexWriter writer, DrillBuf buffer) {
    // Currently we expect single map as input
    if (!(reader instanceof SingleMapReaderImpl)) {
      throw new DrillRuntimeException("kvgen function only supports Simple maps as input");
    }
    BaseWriter.ListWriter listWriter = writer.rootAsList();
    listWriter.start();
    BaseWriter.MapWriter mapWriter = listWriter.map();

    // Iterate over the fields in the map
    Iterator<String> fieldIterator = reader.iterator();
    while (fieldIterator.hasNext()) {
      String str = fieldIterator.next();
      FieldReader fieldReader = reader.reader(str);

      // Skip the field if its null
      if (fieldReader.isSet() == false) {
        mapWriter.end();
        continue;
      }

      // Check if the value field is not repeated
      if (fieldReader.getType().getMode() == TypeProtos.DataMode.REPEATED) {
        throw new DrillRuntimeException("kvgen function does not support repeated type values");
      }

      // writing a new field, start a new map
      mapWriter.start();
View Full Code Here

    this.queryId = QueryIdHelper.getQueryId(id);
    try {
      this.profilePStore = provider.getPStore(QUERY_PROFILE);
      this.profileEStore = provider.getEStore(RUNNING_QUERY_PROFILE);
    } catch (IOException e) {
      throw new DrillRuntimeException(e);
    }
    this.foreman = foreman;
  }
View Full Code Here

        // finally register schemas with the refreshed plugins
        for (StoragePlugin plugin : plugins.values()) {
          plugin.registerSchemas(session, parent);
        }
      } catch (ExecutionSetupException e) {
        throw new DrillRuntimeException("Failure while updating storage plugins", e);
      }

      // Add second level schema as top level schema with name qualified with parent schema name
      // Ex: "dfs" schema has "default" and "tmp" as sub schemas. Add following extra schemas "dfs.default" and
      // "dfs.tmp" under root schema.
View Full Code Here

          incoming.getSchema().getColumn(
              incoming.getValueVectorId(
                  popConfig.getColumn()).getFieldIds()[0]).getValueClass(),
          incoming.getValueVectorId(popConfig.getColumn()).getFieldIds()).getValueVector());
    } catch (Exception ex) {
      throw new DrillRuntimeException("Trying to flatten a non-repeated filed.");
    }
  }
View Full Code Here

    int bl=byteBuf.capacity();
    try{
      CompatibilityUtil.getBuf(input, directBuffer, pageLength);
    }catch(Exception e) {
      logger.error("Failed to read data into Direct ByteBuffer with exception: "+e.getMessage());
      throw new DrillRuntimeException(e.getMessage());
    }
    return byteBuf;
  }
View Full Code Here

//      logger.debug("So far read {} records out of row group({}) in file '{}'", totalRecordsRead, rowGroupIndex, hadoopPath.toUri().getPath());
      totalRecordsRead += firstColumnStatus.getRecordsReadInCurrentPass();
      return firstColumnStatus.getRecordsReadInCurrentPass();
    } catch (IOException e) {
      throw new DrillRuntimeException(e);
    }
  }
View Full Code Here

        MinorType result = TypeCastRules.getLeastRestrictiveType(types);

        // Add the cast
        List<LogicalExpression> args = new LinkedList<>();
        if (result == null) {
          throw new DrillRuntimeException(String.format("Join conditions cannot be compared failing build expression: %s failing probe expression: %s",
              keyExprsBuild[i].getMajorType().toString(), keyExprsProbe[i].getMajorType().toString()));
        }
        else if (result != buildType) {
          // Add a cast expression on top of the build expression
          args.add(keyExprsBuild[i]);
View Full Code Here

      vector.getMutator().setValueCount(recordCount);
      logger.debug("text scan batch size {}", batchSize);
      return recordCount;
    } catch (IOException e) {
      cleanup();
      throw new DrillRuntimeException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.drill.common.exceptions.DrillRuntimeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.