Examples of ImportException


Examples of au.csiro.ontology.importer.ImportException

                    "file. (input type = "+inputType+", file="+md+")";
            try {
                iss.add(input.getInputStream(md));
            } catch (NullPointerException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            } catch (IOException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            }
        }

        IModuleDependencyRefset res = RefsetImporter.importModuleDependencyRefset(iss);
        return res;
View Full Code Here

Examples of au.csiro.ontology.importer.ImportException

                    }
                }
            }
        } catch (Throwable t) {
            log.error(t);
            throw new ImportException("Unable to load reference set file. Please check your input configuration file " +
                    "(input type = " + input.getInputType() + ", file=" + refsetFile + ")");
        } finally {
            for (String moduleId : unknownModules) {
                log.warn("Refset: No version information known for " + moduleId + " found in " + refsetFile);
            }
View Full Code Here

Examples of au.csiro.ontology.importer.ImportException

                    "(input type = "+inputType+", file="+conceptsFile+")";
            try {
                loadConceptRows(modMap, conceptMap, input.getInputStream(conceptsFile));
            } catch (NullPointerException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            } catch (IOException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            }
        }

        // Load relationships
        Set<String> relationshipsFiles = input.getStatedRelationshipsFiles();
        if(relationshipsFiles == null || relationshipsFiles.isEmpty()) {
            log.info("Read inferred relationships info");
            relationshipsFiles = input.getRelationshipsFiles();
        } else {
            log.info("Read stated relationships info");
        }

        if(relationshipsFiles == null || relationshipsFiles.isEmpty()) {
            throw new ImportException("No relationships files was specified.");
        }

        for(String relationshipsFile : relationshipsFiles) {
            final String message = "Unable to load relationships file. Please check your input configuration " +
                    "file. (input type = " + inputType+", file=" + relationshipsFile+")";
            try {
                loadRelationshipRows(modMap, relationshipMap, input.getInputStream(relationshipsFile));
            } catch (NullPointerException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            } catch (IOException e) {
                log.error(message, e);
                throw new ImportException(message, e);
            }

            // Load concrete domains refsets
            log.info("Read concrete domains reference set info");
            for (String filename : input.getConcreteDomainRefsetFiles()) {
                try {
                    loadReferenceSet(input, filename, modMap, cdMap, IRefsetFactory.CD);
                } catch (ArrayIndexOutOfBoundsException e) {
                    final String msg = "Error loading concrete domains reference set: " + filename +
                            ". Possibly has wrong number of columns.";
                    log.error(msg, e);
                    throw new ImportException(msg, e);
                }
            }
        }

        VersionRows vr = new VersionRows(conceptMap.values(), relationshipMap.values(), cdMap.values());
View Full Code Here

Examples of au.csiro.ontology.importer.ImportException

            // 1. Load module dependencies
            log.info("Loading module dependencies");
            IModuleDependencyRefset mdr = loadModuleDependencies(in);

            if(mdr == null) {
                throw new ImportException("Couldn't load module dependency reference set for RF2 input files.");
            }

            // Each map entry contains a map of modules indexed by version
            Map<String, Map<String, ModuleDependency>> deps =
                    mdr.getModuleDependencies();
View Full Code Here

Examples of au.csiro.ontology.importer.ImportException

                                        cols[1], active , cols[3], cols[4],
                                        cols[5], cols[6], cols[7]);
                        members.add(m);
                    }
                } else {
                    throw new ImportException("Malformed module dependency reference set with " + cols.length + " columns "+Arrays.asList(cols));
                }
            } catch (IOException e) {
                log.error("Problem reading refset file "+refsetFile, e);
                throw new ImportException("Problem reading refset file ", e);
            } finally {
                if(br != null) {
                    try { br.close(); } catch(Exception e) {}
                }
            }
View Full Code Here

Examples of com.cloudera.sqoop.util.ImportException

    Configuration conf = job.getConfiguration();
    String tableName = conf.get(HBasePutProcessor.TABLE_NAME_KEY);
    String familyName = conf.get(HBasePutProcessor.COL_FAMILY_KEY);

    if (null == tableName) {
      throw new ImportException(
          "Import to HBase error: Table name not specified");
    }

    if (null == familyName) {
      throw new ImportException(
          "Import to HBase error: Column family not specified");
    }

    // Add HBase configuration files to this conf object.
    HBaseConfiguration.addHbaseResources(conf);

    HBaseAdmin admin = new HBaseAdmin(conf);

    // Add authentication token to the job if we're running on secure cluster.
    //
    // We're currently supporting HBase version 0.90 that do not have security
    // patches which means that it do not have required methods
    // "isSecurityEnabled" and "obtainAuthTokenForJob".
    //
    // We're using reflection API to see if those methods are available and call
    // them only if they are present.
    //
    // After we will remove support for HBase 0.90 we can simplify the code to
    // following code fragment:
    /*
    try {
      if (User.isSecurityEnabled()) {
        User user = User.getCurrent();
        user.obtainAuthTokenForJob(conf, job);
      }
    } catch(InterruptedException ex) {
      throw new ImportException("Can't get authentication token", ex);
    }
    */
    try {
      // Get method isSecurityEnabled
      Method isSecurityEnabled = User.class.getMethod("isSecurityEnabled");

      // Get method obtainAuthTokenForJob
      Method obtainAuthTokenForJob = User.class.getMethod(
        "obtainAuthTokenForJob", Configuration.class, Job.class);

      // Get current user
      User user = User.getCurrent();

      // Obtain security token if needed
      if ((Boolean)isSecurityEnabled.invoke(null)) {
        obtainAuthTokenForJob.invoke(user, conf, job);
      }
    } catch (NoSuchMethodException e) {
      LOG.info("It seems that we're running on HBase without security"
        + " additions. Security additions will not be used during this job.");
    } catch (InvocationTargetException e) {
      throw new ImportException("Can't get authentication token", e);
    } catch (IllegalAccessException e) {
      throw new ImportException("Can't get authentication token", e);
    }

    // Check to see if the table exists.
    HTableDescriptor tableDesc = null;
    byte [] familyBytes = Bytes.toBytes(familyName);
View Full Code Here

Examples of com.cloudera.sqoop.util.ImportException

      }
      nextIncrementalValue = manager.datetimeToQueryString(nextVal.toString(),
          checkColumnType);
      break;
    default:
      throw new ImportException("Undefined incremental import type: "
          + incrementalMode);
    }

    // Build the WHERE clause components that are used to import
    // only this incremental section.
    StringBuilder sb = new StringBuilder();
    String prevEndpoint = options.getIncrementalLastValue();

    if (isDateTimeColumn(checkColumnType) && null != prevEndpoint
        && !prevEndpoint.startsWith("\'") && !prevEndpoint.endsWith("\'")) {
      // Incremental imports based on date/time should be 'quoted' in
      // ANSI SQL. If the user didn't specify single-quotes, put them
      // around, here.
      prevEndpoint = manager.datetimeToQueryString(prevEndpoint,
          checkColumnType);
    }

    String checkColName = manager.escapeColName(
        options.getIncrementalTestColumn());
    LOG.info("Incremental import based on column " + checkColName);
    if (null != prevEndpoint) {
      if (prevEndpoint.equals(nextIncrementalValue)) {
        LOG.info("No new rows detected since last import.");
        return false;
      }
      LOG.info("Lower bound value: " + prevEndpoint);
      sb.append(checkColName);
      switch (incrementalMode) {
      case AppendRows:
        sb.append(" > ");
        break;
      case DateLastModified:
        sb.append(" >= ");
        break;
      default:
        throw new ImportException("Undefined comparison");
      }
      sb.append(prevEndpoint);
      sb.append(" AND ");
    }

    if (null != nextIncrementalValue) {
      sb.append(checkColName);
      switch (incrementalMode) {
      case AppendRows:
        sb.append(" <= ");
        break;
      case DateLastModified:
        sb.append(" < ");
        break;
      default:
        throw new ImportException("Undefined comparison");
      }
      sb.append(nextIncrementalValue);
    } else {
      sb.append(checkColName);
      sb.append(" IS NULL ");
View Full Code Here

Examples of com.cloudera.sqoop.util.ImportException

    // Default implementation: check that the split column is set
    // correctly.
    String splitCol = getSplitColumn(opts, tableName);
    if (null == splitCol && opts.getNumMappers() > 1) {
      // Can't infer a primary key.
      throw new ImportException("No primary key could be found for table "
          + tableName + ". Please specify one with --split-by or perform "
          + "a sequential import with '-m 1'.");
    }
  }
View Full Code Here

Examples of com.cloudera.sqoop.util.ImportException

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in "
            + "classpath, cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
View Full Code Here

Examples of com.cloudera.sqoop.util.ImportException

    ImportJobBase importer;
    if (opts.getHBaseTable() != null) {
      // Import to HBase.
      if (!HBaseUtil.isHBaseJarPresent()) {
        throw new ImportException("HBase jars are not present in classpath,"
            + " cannot import to HBase!");
      }
      importer = new HBaseImportJob(opts, context);
    } else {
      // Import to HDFS.
      importer = new DataDrivenImportJob(opts, context.getInputFormat(),
          context);
    }

    String splitCol = getSplitColumn(opts, null);
    if (splitCol == null) {
      String boundaryQuery = opts.getBoundaryQuery();
      if (opts.getNumMappers() > 1) {
        // Can't infer a primary key.
        throw new ImportException("A split-by column must be specified for "
            + "parallel free-form query imports. Please specify one with "
            + "--split-by or perform a sequential import with '-m 1'.");
      } else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
        // Query import with boundary query and no split column specified
        throw new ImportException("Using a boundary query for a query based "
            + "import requires specifying the split by column as well. Please "
            + "specify a column name using --split-by and try again.");
      }
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.