Package org.apache.hadoop.eclipse.server

Examples of org.apache.hadoop.eclipse.server.HadoopServer$PingJob


   */
  /* @inheritDoc */
  public Object[] getChildren(Object parent) {

    if (parent instanceof HadoopServer) {
      HadoopServer location = (HadoopServer) parent;
      location.addJobListener(this);
      Collection<HadoopJob> jobs = location.getJobs();
      return jobs.toArray();
    }

    return null;
  }
View Full Code Here


  }

  /* @inheritDoc */
  public String getColumnText(Object element, int columnIndex) {
    if (element instanceof HadoopServer) {
      HadoopServer server = (HadoopServer) element;

      switch (columnIndex) {
        case 0:
          return server.getLocationName();
        case 1:
          return server.getMasterHostName().toString();
        case 2:
          return server.getState();
        case 3:
          return "";
      }
    } else if (element instanceof HadoopJob) {
      HadoopJob job = (HadoopJob) element;
View Full Code Here

      if ((selection != null) && (selection instanceof IStructuredSelection)) {
        Object selItem =
            ((IStructuredSelection) selection).getFirstElement();

        if (selItem instanceof HadoopServer) {
          HadoopServer location = (HadoopServer) selItem;
          if (MessageDialog.openConfirm(Display.getDefault()
              .getActiveShell(), "Confirm delete Hadoop location",
              "Do you really want to remove the Hadoop location: "
                  + location.getLocationName())) {
            ServerRegistry.getInstance().removeServer(location);
          }

        } else if (selItem instanceof HadoopJob) {
View Full Code Here

    String dir = ensureTrailingSlash(path);

    log.log(Level.FINER, "Computed Server URL", new Object[] { dir, user,
        hostname });

    HadoopServer server = ServerRegistry.getInstance().getServer(serverid);

    try {
      Session session = server.createSession();
      // session.setTimeout(TIMEOUT);

      log.log(Level.FINER, "Connected");

      /*
       * COMMENTED(jz) removing server start/stop support for now if (!
       * attributes.containsKey("hadoop.jar")) { // start or stop server if(
       * server.getServerState() == IServer.STATE_STARTING ) { String command =
       * dir + "bin/start-all.sh"; execInConsole(session, command); } else if(
       * server.getServerState() == IServer.STATE_STOPPING ) { String command =
       * dir + "bin/stop-all.sh"; execInConsole(session, command); } }
       */

      if (false) {
      } else {
        FileInputStream fis = null;
        String jarFile, remoteFile = null;

        if (attributes.containsKey("hadoop.jar")) {
          jarFile = (String) attributes.get("hadoop.jar");
        } else {
          String memento = (String) attributes.get("hadoop.jarrable");
          JarModule fromMemento = JarModule.fromMemento(memento);
          jarFile = fromMemento.buildJar(new SubProgressMonitor(monitor, 100))
              .toString();
        }

        if (jarFile.lastIndexOf('/') > 0) {
          remoteFile = jarFile.substring(jarFile.lastIndexOf('/') + 1);
        } else if (jarFile.lastIndexOf('\\') > 0) {
          remoteFile = jarFile.substring(jarFile.lastIndexOf('\\') + 1);
        }

        // exec 'scp -t -p hadoop.jar' remotely

        String command = "scp -p -t " + remoteFile;
        Channel channel = session.openChannel("exec");
        ((ChannelExec) channel).setCommand(command);

        // get I/O streams for remote scp
        OutputStream out = channel.getOutputStream();
        final InputStream in = channel.getInputStream();

        channel.connect();

        if (checkAck(in) != 0) {
          throw new CoreException(SSH_FAILED_STATUS1);
        }

        // send "C0644 filesize filename", where filename should not
        // include '/'
        long filesize = (new File(jarFile)).length();
        command = "C0644 " + filesize + " ";
        if (jarFile.lastIndexOf('/') > 0) {
          command += jarFile.substring(jarFile.lastIndexOf('/') + 1);
        } else {
          command += jarFile;
        }

        command += "\n";
        out.write(command.getBytes());
        out.flush();
        if (checkAck(in) != 0) {
          throw new CoreException(SSH_FAILED_STATUS2);
        }

        // send a content of jarFile
        fis = new FileInputStream(jarFile);
        byte[] buf = new byte[1024];
        while (true) {
          int len = fis.read(buf, 0, buf.length);
          if (len <= 0) {
            break;
          }
          out.write(buf, 0, len); // out.flush();
        }

        fis.close();
        fis = null;
        // send '\0'
        buf[0] = 0;
        out.write(buf, 0, 1);
        out.flush();
        if (checkAck(in) != 0) {
          throw new CoreException(SSH_FAILED_STATUS3);
        }
        out.close();
        channel.disconnect();

        // move the jar file to a temp directory
        String jarDir = "/tmp/hadoopjar"
            + new VMID().toString().replace(':', '_');
        command = "mkdir " + jarDir + ";mv " + remoteFile + " " + jarDir;
        channel = session.openChannel("exec");
        ((ChannelExec) channel).setCommand(command);
        channel.connect();
        channel.disconnect();

        session.disconnect();

        // we create a new session with a zero timeout to prevent the
        // console stream
        // from stalling -- eyhung
        final Session session2 = server.createSessionNoTimeout();

        // now remotely execute hadoop with the just sent-over jarfile
        command = dir + "bin/hadoop jar " + jarDir + "/" + remoteFile;
        log.fine("Running command: " + command);
        execInConsole(session2, command, jarDir + "/" + remoteFile);
View Full Code Here

              String location = parts[0];
              parts = new String[] { location, "Hadoop Server" };
            }

            if (parts.length > 2) {
              servers.add(new HadoopServer(parts[0], parts[1], parts[2],
                  parts[3]));
            } else {
              servers.add(new HadoopServer(parts[0], parts[1]));
            }

            servers.get(servers.size() - 1).setId(servers.size() - 1);

          } catch (Exception e) {
View Full Code Here

  /**
   * Performs any actions appropriate in response to the user having pressed
   * the Finish button, or refuse if finishing now is not permitted.
   */
  public boolean performFinish() {
    HadoopServer location = null;
    if (mainPage.createNew.getSelection()) {
      location = createNewPage.performFinish();
    } else if (mainPage.table.getSelection().length == 1) {
      location = (HadoopServer) mainPage.table.getSelection()[0].getData();
    }

    if (location != null) {
      location.runJar(jar, progressMonitor);

      return true;
    }

    return false;
View Full Code Here

  }

  /* @inheritDoc */
  public String getColumnText(Object element, int columnIndex) {
    if (element instanceof HadoopServer) {
      HadoopServer server = (HadoopServer) element;

      switch (columnIndex) {
        case 0:
          return server.getName();
        case 1:
          return server.getHostName().toString();
        case 2:
          return server.getState();
        case 3:
          return "";
      }
    } else if (element instanceof HadoopJob) {
      HadoopJob job = (HadoopJob) element;
View Full Code Here

            + installPath.getText();

    if (editedServer == null) {
      // Create and register the new HadoopServer
      this.editedServer =
          new HadoopServer(uri, serverName.getText(), (useSSHTunnel
              .getSelection()) ? tunnelHostName.getText() : null,
              (useSSHTunnel.getSelection()) ? tunnelUserName.getText()
                  : null);
      ServerRegistry.getInstance().addServer(this.editedServer);
View Full Code Here

  private void testLocation() {
    ProgressMonitorDialog dialog = new ProgressMonitorDialog(getShell());
    dialog.setOpenOnRun(true);

    try {
      final HadoopServer location = defineServerFromValues();

      try {
        dialog.run(true, false, new IRunnableWithProgress() {
          public void run(IProgressMonitor monitor)
              throws InvocationTargetException, InterruptedException {
            Session session = null;
            try {
              session = location.createSession();
              try {
                ChannelExec channel =
                    (ChannelExec) session.openChannel("exec");
                channel.setCommand(location.getInstallPath()
                    + "/bin/hadoop version");
                BufferedReader response =
                    new BufferedReader(new InputStreamReader(channel
                        .getInputStream()));
                channel.connect();
                final String versionLine = response.readLine();

                if ((versionLine != null)
                    && versionLine.startsWith("Hadoop")) {
                  Display.getDefault().syncExec(new Runnable() {
                    public void run() {
                      setMessage("Found " + versionLine,
                          IMessageProvider.INFORMATION);
                    }
                  });
                } else {
                  Display.getDefault().syncExec(new Runnable() {
                    public void run() {
                      setMessage("No Hadoop Found in this location",
                          IMessageProvider.WARNING);
                    }
                  });
                }
              } finally {
                session.disconnect();
                location.dispose();
              }
            } catch (final JSchException e) {
              Display.getDefault().syncExec(new Runnable() {
                public void run() {
                  System.err.println(e.getMessage());
View Full Code Here

      if ((selection != null) && (selection instanceof IStructuredSelection)) {
        Object selItem =
            ((IStructuredSelection) selection).getFirstElement();

        if (selItem instanceof HadoopServer) {
          HadoopServer location = (HadoopServer) selItem;
          ServerRegistry.getInstance().removeServer(location);

        } else if (selItem instanceof HadoopJob) {

          // kill the job
          HadoopJob job = (HadoopJob) selItem;
          HadoopServer server = job.getServer();
          String jobId = job.getJobId();

          if (job.isCompleted())
            return;

          try {
            Session session = server.createSession();

            String command =
                server.getInstallPath() + "/bin/hadoop job -kill " + jobId;
            Channel channel = session.openChannel("exec");
            ((ChannelExec) channel).setCommand(command);
            channel.connect();
            channel.disconnect();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.eclipse.server.HadoopServer$PingJob

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.