Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.copyWork


      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpFileURI(toURI);
      URI copyURI = URI.create(copyURIStr);
      try {
        rTask = TaskFactory.get(new CopyWork(URIUtil.decode(fromURI.toString()), copyURIStr),
            conf);
      } catch (URIException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, e
            .getMessage()), e);
      }
View Full Code Here


  }

  private Task<?> loadTable(URI fromURI, Table table) {
    Path dataPath = new Path(fromURI.toString(), "data");
    String tmpURI = ctx.getExternalTmpFileURI(fromURI);
    Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath.toString(),
        tmpURI, false), conf);
    LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(),
        ctx.getExternalTmpFileURI(fromURI),
        Utilities.getTableDesc(table), new TreeMap<String, String>(),
        false);
View Full Code Here

      addPartitionDesc.setLocation(tgtPath.toString());
      LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
          + partSpecToString(addPartitionDesc.getPartSpec())
          + " with source location: " + srcLocation);
      String tmpURI = ctx.getExternalTmpFileURI(fromURI);
      Task<?> copyTask = TaskFactory.get(new CopyWork(srcLocation,
          tmpURI, false), conf);
      Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
          getOutputs(), addPartitionDesc), conf);
      LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI,
          ctx.getExternalTmpFileURI(fromURI),
View Full Code Here

  }

  private Task<?> loadTable(URI fromURI, Table table) {
    Path dataPath = new Path(fromURI.toString(), "data");
    String tmpURI = ctx.getExternalTmpFileURI(fromURI);
    Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath.toString(),
        tmpURI, false), conf);
    LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(),
        ctx.getExternalTmpFileURI(fromURI),
        Utilities.getTableDesc(table), new TreeMap<String, String>(),
        false);
View Full Code Here

      addPartitionDesc.setLocation(tgtPath.toString());
      LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
          + partSpecToString(addPartitionDesc.getPartSpec())
          + " with source location: " + srcLocation);
      String tmpURI = ctx.getExternalTmpFileURI(fromURI);
      Task<?> copyTask = TaskFactory.get(new CopyWork(srcLocation,
          tmpURI, false), conf);
      Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),
          getOutputs(), addPartitionDesc), conf);
      LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI,
          ctx.getExternalTmpFileURI(fromURI),
View Full Code Here

      // might seem redundant in the case
      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpFileURI(toURI);
      URI copyURI = URI.create(copyURIStr);
      rTask = TaskFactory.get(new CopyWork(fromURI.toString(), copyURIStr),
          conf);
      fromURI = copyURI;
    }

    // create final load/move work
View Full Code Here

      if (ts.tableHandle.isPartitioned()) {
        partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
      }
      Path path = new Path(ctx.getLocalTmpPath(), "_metadata");
      EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          path, new Path(toURI), false), conf);
      rootTasks.add(rTask);
      LOG.debug("_metadata file written into " + path.toString()
          + " and then copied to " + toURI.toString());
    } catch (Exception e) {
      throw new SemanticException(
          ErrorMsg.GENERIC_ERROR
              .getMsg("Exception while writing out the local file"), e);
    }

    Path parentPath = new Path(toURI);

    if (ts.tableHandle.isPartitioned()) {
      for (Partition partition : partitions) {
        Path fromPath = partition.getDataLocation();
        Path toPartPath = new Path(parentPath, partition.getName());
        Task<? extends Serializable> rTask = TaskFactory.get(
            new CopyWork(fromPath, toPartPath, false),
            conf);
        rootTasks.add(rTask);
        inputs.add(new ReadEntity(partition));
      }
    } else {
      Path fromPath = ts.tableHandle.getDataLocation();
      Path toDataPath = new Path(parentPath, "data");
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          fromPath, toDataPath, false), conf);
      rootTasks.add(rTask);
      inputs.add(new ReadEntity(ts.tableHandle));
    }
    outputs.add(new WriteEntity(parentPath, toURI.getScheme().equals("hdfs")));
View Full Code Here

      // might seem redundant in the case
      // that the hive warehouse is also located in the local file system - but
      // that's just a test case.
      String copyURIStr = ctx.getExternalTmpFileURI(toURI);
      URI copyURI = URI.create(copyURIStr);
      rTask = TaskFactory.get(new CopyWork(fromURI.toString(), copyURIStr),
          conf);
      fromURI = copyURI;
    }

    // create final load/move work
View Full Code Here

        partitions = (ts.partitions != null) ? ts.partitions : db.getPartitions(ts.tableHandle);
      }
      String tmpfile = ctx.getLocalTmpFileURI();
      Path path = new Path(tmpfile, "_metadata");
      EximUtil.createExportDump(FileSystem.getLocal(conf), path, ts.tableHandle, partitions);
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          path.toString(), toURI.toString(), false), conf);
      rootTasks.add(rTask);
      LOG.debug("_metadata file written into " + path.toString()
          + " and then copied to " + toURI.toString());
    } catch (Exception e) {
      throw new SemanticException(
          ErrorMsg.GENERIC_ERROR
              .getMsg("Exception while writing out the local file"), e);
    }

    if (ts.tableHandle.isPartitioned()) {
      for (Partition partition : partitions) {
        URI fromURI = partition.getDataLocation();
        Path toPartPath = new Path(toURI.toString(), partition.getName());
        Task<? extends Serializable> rTask = TaskFactory.get(
            new CopyWork(fromURI.toString(), toPartPath.toString(), false),
            conf);
        rootTasks.add(rTask);
        inputs.add(new ReadEntity(partition));
      }
    } else {
      URI fromURI = ts.tableHandle.getDataLocation();
      Path toDataPath = new Path(toURI.toString(), "data");
      Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
          fromURI.toString(), toDataPath.toString(), false), conf);
      rootTasks.add(rTask);
      inputs.add(new ReadEntity(ts.tableHandle));
    }
    outputs.add(new WriteEntity(toURI.toString(),
View Full Code Here

  }

  private Task<?> loadTable(URI fromURI, Table table) {
    Path dataPath = new Path(fromURI.toString(), "data");
    String tmpURI = ctx.getExternalTmpFileURI(fromURI);
    Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath.toString(),
        tmpURI, false), conf);
    LoadTableDesc loadTableWork = new LoadTableDesc(tmpURI.toString(),
        ctx.getExternalTmpFileURI(fromURI),
        Utilities.getTableDesc(table), new TreeMap<String, String>(),
        false);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.copyWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.