Package com.jcraft.jsch

Examples of com.jcraft.jsch.ChannelExec


  }

  public void downloadToLocalFile(File localFile) throws JSchException,
      IOException, InvocationTargetException, InterruptedException {

    final ChannelExec exec =

    exec(" dfs " + DfsFolder.s_whichFS + " -cat " + getPath());

    final OutputStream os =
        new BufferedOutputStream(new FileOutputStream(localFile));

    try {
      PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
          new IRunnableWithProgress() {
            public void run(IProgressMonitor monitor)
                throws InvocationTargetException {
              try {
                monitor.beginTask("View file from Distributed File System",
                    IProgressMonitor.UNKNOWN);
                exec.connect();
                BufferedInputStream stream =
                    new BufferedInputStream(exec.getInputStream());

                byte[] buffer = new byte[1024];
                int bytes;

                while ((bytes = stream.read(buffer)) >= 0) {
                  if (monitor.isCanceled()) {
                    os.close();
                    return;
                  }

                  monitor.worked(1);
                  os.write(buffer, 0, bytes);
                }

                monitor.done();
              } catch (Exception e) {
                throw new InvocationTargetException(e);
              }
            }
          });
    } finally {
      if (exec.isConnected()) {
        exec.disconnect();
      }
      os.close();
    }
  }
View Full Code Here


  protected String getPath() {
    return this.path;
  }

  protected ChannelExec exec(String command) throws JSchException {
    ChannelExec channel = (ChannelExec) getSession().openChannel("exec");
    channel.setCommand(location.getInstallPath() + "/bin/hadoop " + command);
    channel.setErrStream(System.err);
    // channel.connect();

    return channel;
  }
View Full Code Here

  protected void doExec(final String command) {
    org.eclipse.core.runtime.jobs.Job job =
        new org.eclipse.core.runtime.jobs.Job("DFS operation: " + command) {
          @Override
          protected IStatus run(IProgressMonitor monitor) {
            ChannelExec exec = null;
            monitor.beginTask("Execute remote dfs  command", 100);
            try {
              exec = exec(" " + command);
              monitor.worked(33);

              exec.connect();
              monitor.worked(33);

              BufferedReader reader =
                  new BufferedReader(new InputStreamReader(
                      new BufferedInputStream(exec.getInputStream())));
              String response = reader.readLine(); // TIDY(jz)
              monitor.worked(34);

              monitor.done();

              refresh();

              return Status.OK_STATUS;
            } catch (Exception e) {
              e.printStackTrace();
              return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
                  "DFS operation failed: " + e.getLocalizedMessage(), e);
            } finally {
              if (exec != null) {
                exec.disconnect();
              }
            }
          }
        };
View Full Code Here

  private DfsFolder(DfsPath parent, String path) {
    super(parent, path);
  }

  public Object[] getChildren() {
    ChannelExec channel = null;
    if (children == null) {
      doRefresh();
      return new Object[] { "Loading..." };
    } else {
      return children;
View Full Code Here

  public void doRefresh() {
    new Job("Refresh DFS Children") {
      @Override
      protected IStatus run(IProgressMonitor monitor) {
        try {
          ChannelExec channel =
              exec(" dfs " + s_whichFS + " -ls " + getPath());
          InputStream is = channel.getInputStream();
          BufferedReader in =
              new BufferedReader(new InputStreamReader(
                  new BufferedInputStream(is)));

          if (!channel.isConnected()) {
            channel.connect();
          }

          try {
            // initial "found n items" line ignorable
            if (in.readLine() == null) {
              children =
                  new Object[] { "An error occurred: empty result from dfs -ls" };
            }

            String line;
            List<DfsPath> children = new ArrayList<DfsPath>();
            while ((line = in.readLine()) != null) {
              String[] parts = line.split("\t");

              for (int i = 0; i < parts.length; i++) {
                log.fine(parts[0]);
              }

              if (parts[1].equals("<dir>")) {
                children.add(new DfsFolder(DfsFolder.this, parts[0]));
              } else {
                children.add(new DfsFile(DfsFolder.this, parts[0]));
              }
            }

            DfsFolder.this.children = children.toArray();

            DfsFolder.super.doRefresh();

            return Status.OK_STATUS;
          } finally {
            if (channel.isConnected()) {
              channel.disconnect();
            }
          }
        } catch (Exception e) {
          e.printStackTrace();
          return new Status(IStatus.ERROR, Activator.PLUGIN_ID, -1,
View Full Code Here

              }

              final File dir = new File(directory);

              sub.beginTask("Move files from staging server to DFS", 1);
              ChannelExec exec =
                  exec(" dfs " + s_whichFS
                      + " -moveFromLocal /tmp/hadoop_scp_" + guid + " \""
                      + getPath() + "/" + dir.getName() + "\"");
              BufferedReader reader =
                  new BufferedReader(new InputStreamReader(
                      new BufferedInputStream(exec.getInputStream())));

              if (!monitor.isCanceled()) {
                exec.connect();
                String line = reader.readLine();
                sub.worked(1);
              }

              if (exec.isConnected()) {
                exec.disconnect();
              }

              sub.done();

              monitor.done();
              doRefresh();
            } catch (Exception e) {
              log.log(Level.SEVERE, "", e);
              throw new InvocationTargetException(e);
            }
          }

          public void scp(String from, String to, IProgressMonitor monitor) {
            File file = new File(from);
            ChannelExec channel = null;

            monitor.beginTask("scp from " + from + " to " + to, 100 * (file
                .isDirectory() ? file.list().length + 1 : 1));

            if (monitor.isCanceled()) {
              return;
            }

            if (file.isDirectory()) {
              // mkdir
              try {
                channel = (ChannelExec) getSession().openChannel("exec");
                channel.setCommand(" mkdir " + to);
                InputStream in = channel.getInputStream();
                channel.connect();
                // in.read(); // wait for a response, which
                // we'll then ignore
              } catch (JSchException e) {
                // BUG(jz) abort operation and display error
                throw new RuntimeException(e);
              } catch (IOException e) {
                throw new RuntimeException(e);
              } finally {
                if (channel.isConnected()) {
                  channel.disconnect();
                }
              }

              monitor.worked(100);

              String[] children = file.list();
              for (int i = 0; i < children.length; i++) {
                File child = new File(file, children[i]);

                // recurse
                scp(new File(file, children[i]).getAbsolutePath(), to + "/"
                    + children[i], new SubProgressMonitor(monitor, 100));
              }
            } else {
              InputStream filein = null;

              try {
                channel = (ChannelExec) getSession().openChannel("exec");
                (channel).setCommand("scp -p -t " + to);
                BufferedOutputStream out =
                    new BufferedOutputStream(channel.getOutputStream());
                InputStream in = channel.getInputStream();
                channel.connect();

                if (in.read() == 0) {
                  int step = (int) (100 / new File(from).length());
                  out.write(("C0644 " + new File(from).length() + " "
                      + new File(to).getName() + "\n").getBytes());
                  out.flush();
                  if (in.read() != 0) {
                    throw new RuntimeException("Copy failed");
                  }

                  filein =
                      new BufferedInputStream(new FileInputStream(from));

                  byte[] buffer = new byte[1024];
                  int bytes;
                  while ((bytes = filein.read(buffer)) > -1) {
                    if (monitor.isCanceled()) {
                      return;
                    }

                    out.write(buffer, 0, bytes);
                    monitor.worked(step);
                  }

                  out.write("\0".getBytes());
                  out.flush();

                  if (in.read() != 0) {
                    throw new RuntimeException("Copy failed");
                  }
                  out.close();
                } else {
                  // problems with copy
                  throw new RuntimeException("Copy failed");
                }
              } catch (JSchException e) {
                e.printStackTrace();
                throw new RuntimeException(e);
              } catch (IOException e) {
                throw new RuntimeException(e);
              } finally {
                if (channel.isConnected()) {
                  channel.disconnect();
                }
                try {
                  filein.close();
                } catch (IOException e) {
                }
View Full Code Here

   * @param jarFile The jar file containing the classes for the Hadoop job
   * @throws JSchException
   */
  private void execInConsole(final Session session, final String command,
      final String jarFile) throws JSchException {
    final ChannelExec channel = (ChannelExec) session.openChannel("exec");

    final MessageConsole console = new MessageConsole("Hadoop: " + command,
        null);
    final MessageConsoleStream stream = console.newMessageStream();

    final IOConsoleOutputStream out = console.newOutputStream();
    final IOConsoleOutputStream err = console.newOutputStream();

    out.setColor(black);
    err.setColor(red);

    ConsolePlugin.getDefault().getConsoleManager().addConsoles(
        new IConsole[] { console });
    ConsolePlugin.getDefault().getConsoleManager().showConsoleView(console);

    channel.setCommand(command);
    channel.setInputStream(null);

    channel.connect();
    new Thread() {
      @Override
      public void run() {
        try {

          BufferedReader hadoopOutput = new BufferedReader(
              new InputStreamReader(channel.getInputStream()));

          String stdoutLine;
          while ((stdoutLine = hadoopOutput.readLine()) != null) {
            out.write(stdoutLine);
            out.write('\n');
            continue;
          }

          channel.disconnect();

          // meaningless call meant to prevent console from being
          // garbage collected -- eyhung
          console.getName();
          ChannelExec channel2 = (ChannelExec) session.openChannel("exec");
          channel2.setCommand("rm -rf "
              + jarFile.substring(0, jarFile.lastIndexOf("/")));
          log.fine("Removing temp file "
              + jarFile.substring(0, jarFile.lastIndexOf("/")));
          channel2.connect();
          channel2.disconnect();

        } catch (Exception e) {
        }
      }
    }.start();
View Full Code Here

              throws InvocationTargetException, InterruptedException {
            Session session = null;
            try {
              session = location.createSession();
              try {
                ChannelExec channel =
                    (ChannelExec) session.openChannel("exec");
                channel.setCommand(location.getInstallPath()
                    + "/bin/hadoop version");
                BufferedReader response =
                    new BufferedReader(new InputStreamReader(channel
                        .getInputStream()));
                channel.connect();
                final String versionLine = response.readLine();

                if ((versionLine != null)
                    && versionLine.startsWith("Hadoop")) {
                  Display.getDefault().syncExec(new Runnable() {
View Full Code Here

        throws BuildException {
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        TeeOutputStream tee = new TeeOutputStream(out, new KeepAliveOutputStream(System.out));

        try {
            final ChannelExec channel;
            session.setTimeout((int) maxwait);
            /* execute the command */
            channel = (ChannelExec) session.openChannel("exec");
            channel.setCommand(cmd);
            channel.setOutputStream(tee);
            channel.setExtOutputStream(tee);
            channel.connect();
            // wait for it to finish
            thread =
                new Thread() {
                    public void run() {
                        while (!channel.isClosed()) {
                            if (thread == null) {
                                return;
                            }
                            try {
                                sleep(RETRY_INTERVAL);
                            } catch (Exception e) {
                                // ignored
                            }
                        }
                    }
                };

            thread.start();
            thread.join(maxwait);

            if (thread.isAlive()) {
                // ran out of time
                thread = null;
                if (getFailonerror()) {
                    throw new BuildException(TIMEOUT_MESSAGE);
                } else {
                    log(TIMEOUT_MESSAGE, Project.MSG_ERR);
                }
            } else {
                //success
                if (outputFile != null) {
                    writeToFile(out.toString(), append, outputFile);
                }

                // this is the wrong test if the remote OS is OpenVMS,
                // but there doesn't seem to be a way to detect it.
                int ec = channel.getExitStatus();
                if (ec != 0) {
                    String msg = "Remote command failed with exit status " + ec;
                    if (getFailonerror()) {
                        throw new BuildException(msg);
                    } else {
View Full Code Here

    }

  }

  public Process ssh(String cmd) throws JSchException, IOException {
    ChannelExec channel = (ChannelExec) session.openChannel("exec");
    channel.setCommand(cmd);
    channel.setPty(true);
    channel.connect();
    return new SSHProcess(channel);
  }
View Full Code Here

TOP

Related Classes of com.jcraft.jsch.ChannelExec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.