Examples of DAGOperatorException


Examples of com.odiago.flumebase.util.DAGOperatorException

    Schema inputSchema = null;
    for (PlanNode parent : parents) {
      Schema parentOutputSchema = (Schema) parent.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR);
      if (null == parentOutputSchema) {
        // This should not happen if we use this operator correctly with BFS.
        throw new DAGOperatorException("Node " + parent + " does not have output schema set");
      }

      if (myInputSchemas != null) {
        // This node accepts multiple input schemas. Check that the parent's output
        // schema matches one of them.
        boolean match = false;
        for (Schema candidate : myInputSchemas) {
          if (candidate.equals(parentOutputSchema)) {
            match = true;
            break;
          }
        }

        if (!match) {
          throw new DAGOperatorException("Schema resolution execption; node [" + node
              + "] has a parent output schema that does not match any candidate input schema.");
        }
      } else if (null == inputSchema) {
        // This node will have a single input schema. Cache the first parent's output schema.
        inputSchema = parentOutputSchema;
      } else {
        // Now check that each other parent has the same schema as the first parent.
        if (!parentOutputSchema.equals(inputSchema)) {
          throw new DAGOperatorException("Schema resolution exception; node [" + node
              + "] has parents with mismatched schemas:\nSchema 1:\n"
              + inputSchema + "\nSchema 2:\n" + parentOutputSchema);
        }
      }
    }

    if (myInputSchemas == null) {
      // For nodes with exactly one input schema, check that the defined input
      // schema (if any) matches the output of the predecessors. If unset,
      // set it to the predecessor output.
      Schema myInputSchema = (Schema) node.getAttr(PlanNode.INPUT_SCHEMA_ATTR);
      if (null == myInputSchema) {
        node.setAttr(PlanNode.INPUT_SCHEMA_ATTR, inputSchema);
        myInputSchema = inputSchema;
      }

      if (null != myInputSchema && null != inputSchema) {
        // Check that these are equal.
        if (!inputSchema.equals(myInputSchema)) {
          throw new DAGOperatorException("Node [" + node + "] has set input schema:\n"
              + myInputSchema + "\nbut parents have output schema:\n" + inputSchema);
        }
      }

      // And if there's no output schema defined, set it equal to our input schema.
      Schema myOutputSchema = (Schema) node.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR);
      if (null == myOutputSchema) {
        node.setAttr(PlanNode.OUTPUT_SCHEMA_ATTR, myInputSchema);
      }
    } else {
      // For nodes that accept multiple input schemas, just check that the
      // output schema is set, since we can't infer the output schema from the set
      // of input schemas.
      if (null == node.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR)) {
        throw new DAGOperatorException("Node [" + node
            + "] has multiple input schemas but the output schema is unset.");
      }

      // Sanity check: If this also has a singleton input schema set, complain.
      if (null != node.getAttr(PlanNode.INPUT_SCHEMA_ATTR)) {
        throw new DAGOperatorException("Node [" + node
            + "] has multiple input schemas and singleton input schema set.");
      }
    }
  }
View Full Code Here

Examples of com.odiago.flumebase.util.DAGOperatorException

            try {
              LOG.debug("Opening flow element of class: " + flowElem.getClass().getName());
              flowElem.open();
            } catch (IOException ioe) {
              throw new DAGOperatorException(ioe);
            } catch (InterruptedException ie) {
              throw new DAGOperatorException(ie);
            }
          }
        });
      } catch (DAGOperatorException doe) {
        // This is a wrapper exception; unpack and rethrow with the appropriate type.
View Full Code Here

Examples of com.odiago.flumebase.util.DAGOperatorException

            FlowElement flowElem = node.getFlowElement();
            if (flowElem instanceof OutputElement) {
              try {
                ((OutputElement) flowElem).setFlumeTarget(name);
              } catch (IOException ioe) {
                throw new DAGOperatorException(ioe);
              }
            }
          }
        });
      } catch (DAGOperatorException doe) {
View Full Code Here

Examples of com.odiago.flumebase.util.DAGOperatorException

      StreamSymbol streamSym = new StreamSymbol(createStream);
      if (!streamSym.getEventParser().validate(streamSym)) {
        // Fails final check of parameters
        // TODO: The EventParser is giving better info in its LOG; but this
        // should really be communicated back to the user.
        throw new DAGOperatorException(
            "Stream cannot be created with the specified parameters.");
      } else if (mRootSymbolTable.resolve(streamName) != null) {
        // TODO: Allow CREATE OR REPLACE STREAM to override this.
        throw new DAGOperatorException("Object already exists at top level: " + streamName);
      } else {
        mRootSymbolTable.addSymbol(streamSym);
        mSubmitterSession.sendInfo("CREATE STREAM");
      }

      if (createStream.getType().equals(StreamSourceType.File)
          && streamSym.getFormatSpec().getParam(FileSourceElement.TIMESTAMP_COL_KEY) == null) {
        // We're reading from a file, and making up timestamps based on read time.
        // Warn the user that timestamps will change between queries.
        StringBuilder sb = new StringBuilder();
        sb.append("Warning: File-based streams will set event timestamps based on read time.\n");
        sb.append("To specify timestamps explictly, set the ");
        sb.append(FileSourceElement.TIMESTAMP_COL_KEY);
        sb.append(" event format property.");
        mSubmitterSession.sendInfo(sb.toString());
      }
    } else if (node instanceof DescribeNode) {
      // Look up the referenced object in the symbol table and describe it immediately.
      DescribeNode describe = (DescribeNode) node;
      Symbol sym = mRootSymbolTable.resolve(describe.getIdentifier());
      mSubmitterSession.sendInfo(sym.toString());
    } else if (node instanceof DropNode) {
      // Perform the operation here.
      // Remove the objet from our symbol table.
      DropNode dropNode = (DropNode) node;
      String name = dropNode.getName();
      Symbol sym = mRootSymbolTable.resolve(name);
      if (null == sym) {
        // Shouldn't happen; the type checker already accepted this statement.
        throw new DAGOperatorException("No such object at top level: " + name);
      }
      EntityTarget targetType = dropNode.getType();
      // Perform the operation.
      mRootSymbolTable.remove(name);
      mSubmitterSession.sendInfo("DROP " + targetType.toString().toUpperCase());
    } else if (node instanceof NamedSourceNode) {
      NamedSourceNode namedInput = (NamedSourceNode) node;
      String streamName = namedInput.getStreamName();
      Symbol symbol = mRootSymbolTable.resolve(streamName).resolveAliases();
      if (null == symbol) {
        throw new DAGOperatorException("No symbol for stream: " + streamName);
      }

      if (!(symbol instanceof StreamSymbol)) {
        throw new DAGOperatorException("Identifier " + streamName + " has type: "
            + symbol.getType() + ", not STREAM.");
      }

      StreamSymbol streamSymbol = (StreamSymbol) symbol;

      switch (streamSymbol.getSourceType()) {
      case File:
        String fileName = streamSymbol.getSource();
        newElem = new FileSourceElement(newContext, fileName, streamSymbol.isLocal(),
            namedInput.getFields(), streamSymbol);
        break;
      case Source:
        if (!streamSymbol.isLocal()) {
          throw new DAGOperatorException("Do not know how to handle a non-local source yet.");
        }
        String flumeSource = streamSymbol.getSource();
        long flowIdNum = mFlowId.getId();
        String flowSourceId = "flumebase-flow-" + flowIdNum + "-" + streamSymbol.getName();
        newElem = new LocalFlumeSourceElement(newContext, flowSourceId,
            mFlumeConfig, flumeSource, (Schema) namedInput.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR),
            namedInput.getFields(), streamSymbol);
        if (!streamSymbol.isLocal()) {
          LOG.info("Created local Flume logical node: " + flowSourceId);
          LOG.info("You may need to connect upstream Flume elements to this source.");
        }

        // Mark Flume as required to execute this flow.
        mLocalFlow.setFlumeRequired(true);
        break;
      case Memory:
        newElem = new LocalInMemSourceElement(newContext,
            namedInput.getFields(), (InMemStreamSymbol) streamSymbol);
        break;
      case Node:
        String nodeSourceId = "flumebase-flow-" + mFlowId.getId() + "-" + streamSymbol.getName();
        newElem = new FlumeNodeElement(newContext, nodeSourceId,
            mFlumeConfig, streamSymbol.getSource(),
            (Schema) namedInput.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR),
            namedInput.getFields(), streamSymbol);

        LOG.info("Created local Flume receiver context: " + nodeSourceId);
        LOG.info("This will be connected to upstream Flume node: " + streamSymbol.getSource());

        // Mark Flume as required to execute this flow.
        mLocalFlow.setFlumeRequired(true);
        break;
      default:
        throw new DAGOperatorException("Unhandled stream source type: "
            + streamSymbol.getSourceType());
      }
    } else if (node instanceof FilterNode) {
      FilterNode filterNode = (FilterNode) node;
      Expr filterExpr = filterNode.getFilterExpr();
      newElem = new FilterElement(newContext, filterExpr);
    } else if (node instanceof ProjectionNode) {
      ProjectionNode projNode = (ProjectionNode) node;
      Schema outSchema = (Schema) projNode.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR);
      newElem = new ProjectionElement(newContext, outSchema, projNode.getInputFields(),
          projNode.getOutputFields());
    } else if (node instanceof AggregateNode) {
      AggregateNode aggNode = (AggregateNode) node;
      newElem = new BucketedAggregationElement(newContext, aggNode);
    } else if (node instanceof EvaluateExprsNode) {
      EvaluateExprsNode evalNode = (EvaluateExprsNode) node;
      Schema outSchema = (Schema) evalNode.getAttr(PlanNode.OUTPUT_SCHEMA_ATTR);
      newElem = new EvaluationElement(newContext, evalNode.getExprs(),
          evalNode.getPropagateFields(), outSchema);
    } else if (node instanceof HashJoinNode) {
      HashJoinNode joinNode = (HashJoinNode) node;
      newElem = new HashJoinElement(newContext, joinNode);
    } else {
      throw new DAGOperatorException("Cannot create FlowElement for PlanNode of type: "
          + node.getClass().getName());
    }

    if (null != newElem) {
      // Wrap the FlowElement in a DAGNode.
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.