Examples of LogKey


Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    try {
      Map<String, Map<String, String>> logMap =
          new HashMap<String, Map<String, String>>();
      DataInputStream valueStream;

      LogKey key = new LogKey();
      valueStream = reader.next(key);

      while (valueStream != null) {
        LOG.info("Found container " + key.toString());
        Map<String, String> perContainerMap = new HashMap<String, String>();
        logMap.put(key.toString(), perContainerMap);

        while (true) {
          try {
            DataOutputBuffer dob = new DataOutputBuffer();
            LogReader.readAContainerLogsForALogType(valueStream, dob);

            DataInputBuffer dib = new DataInputBuffer();
            dib.reset(dob.getData(), dob.getLength());

            Assert.assertEquals("\nLogType:", dib.readUTF());
            String fileType = dib.readUTF();

            Assert.assertEquals("\nLogLength:", dib.readUTF());
            String fileLengthStr = dib.readUTF();
            long fileLength = Long.parseLong(fileLengthStr);

            Assert.assertEquals("\nLog Contents:\n", dib.readUTF());
            byte[] buf = new byte[(int) fileLength]; // cast is okay in this
                                                     // test.
            dib.read(buf, 0, (int) fileLength);
            perContainerMap.put(fileType, new String(buf));

            LOG.info("LogType:" + fileType);
            LOG.info("LogType:" + fileLength);
            LOG.info("Log Contents:\n" + perContainerMap.get(fileType));
          } catch (EOFException eof) {
            break;
          }
        }

        // Next container
        key = new LogKey();
        valueStream = reader.next(key);
      }

      // 1 for each container
      Assert.assertEquals(expectedContainerIds.length, logMap.size());
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

              + "] is not authorized to view the logs for " + logEntity)._();
      return;
    }

    DataInputStream valueStream;
    LogKey key = new LogKey();
    try {
      valueStream = reader.next(key);
      while (valueStream != null
          && !key.toString().equals(containerId.toString())) {
        valueStream = reader.next(key);
      }
      if (valueStream == null) {
        html.h1()._(
            "Logs not available for " + logEntity
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    }

    LOG.info("Uploading logs for container " + containerId
        + ". Current good log dirs are "
        + StringUtils.join(",", dirsHandler.getLogDirs()));
    LogKey logKey = new LogKey(containerId);
    LogValue logValue = new LogValue(dirsHandler.getLogDirs(), containerId);
    try {
      this.writer.append(logKey, logValue);
    } catch (IOException e) {
      LOG.error("Couldn't upload logs for " + containerId
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

  private int dumpAContainerLogs(String containerIdStr,
      AggregatedLogFormat.LogReader reader, PrintStream out)
      throws IOException {
    DataInputStream valueStream;
    LogKey key = new LogKey();
    valueStream = reader.next(key);

    while (valueStream != null && !key.toString().equals(containerIdStr)) {
      // Next container
      key = new LogKey();
      valueStream = reader.next(key);
    }

    if (valueStream == null) {
      System.out.println("Logs for container " + containerIdStr
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

          new AggregatedLogFormat.LogReader(getConf(),
              new Path(remoteAppLogDir, thisNodeFile.getPath().getName()));
      try {

        DataInputStream valueStream;
        LogKey key = new LogKey();
        valueStream = reader.next(key);

        while (valueStream != null) {
          String containerString = "\n\nContainer: " + key + " on " + thisNodeFile.getPath().getName();
          out.println(containerString);
          out.println(StringUtils.repeat("=", containerString.length()));
          while (true) {
            try {
              LogReader.readAContainerLogsForALogType(valueStream, out);
            } catch (EOFException eof) {
              break;
            }
          }

          // Next container
          key = new LogKey();
          valueStream = reader.next(key);
        }
      } finally {
        reader.close();
      }
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    writeSrcFile(srcFilePath, "stdout", numChars);

    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    LogWriter logWriter = new LogWriter(conf, remoteAppLogFile, ugi);

    LogKey logKey = new LogKey(testContainerId);
    LogValue logValue =
        new LogValue(Collections.singletonList(srcFileRoot.toString()),
            testContainerId, ugi.getShortUserName());

    logWriter.append(logKey, logValue);
    logWriter.closeWriter();

    // make sure permission are correct on the file
    FileStatus fsStatus =  fs.getFileStatus(remoteAppLogFile);
    Assert.assertEquals("permissions on log aggregation file are wrong"
      FsPermission.createImmutable((short) 0640), fsStatus.getPermission());

    LogReader logReader = new LogReader(conf, remoteAppLogFile);
    LogKey rLogKey = new LogKey();
    DataInputStream dis = logReader.next(rLogKey);
    Writer writer = new StringWriter();
    LogReader.readAcontainerLogs(dis, writer);
   
    String s = writer.toString();
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    UserGroupInformation ugi =
        UserGroupInformation.getCurrentUser();
    LogWriter logWriter = new LogWriter(conf, remoteAppLogFile, ugi);

    LogKey logKey = new LogKey(testContainerId1);
    String randomUser = "randomUser";
    LogValue logValue =
        spy(new LogValue(Collections.singletonList(srcFileRoot.toString()),
            testContainerId1, randomUser));
   
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    }

    LOG.info("Uploading logs for container " + containerId
        + ". Current good log dirs are "
        + StringUtils.join(",", dirsHandler.getLogDirs()));
    LogKey logKey = new LogKey(containerId);
    LogValue logValue =
        new LogValue(dirsHandler.getLogDirs(), containerId,
          userUgi.getShortUserName());
    try {
      this.writer.append(logKey, logValue);
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

    try {
      Map<String, Map<String, String>> logMap =
          new HashMap<String, Map<String, String>>();
      DataInputStream valueStream;

      LogKey key = new LogKey();
      valueStream = reader.next(key);

      while (valueStream != null) {
        LOG.info("Found container " + key.toString());
        Map<String, String> perContainerMap = new HashMap<String, String>();
        logMap.put(key.toString(), perContainerMap);

        while (true) {
          try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            PrintStream ps = new PrintStream(baos);
            LogReader.readAContainerLogsForALogType(valueStream, ps);

            String writtenLines[] = baos.toString().split(
              System.getProperty("line.separator"));

            Assert.assertEquals("LogType:", writtenLines[0].substring(0, 8));
            String fileType = writtenLines[0].substring(9);

            Assert.assertEquals("LogLength:", writtenLines[1].substring(0, 10));
            String fileLengthStr = writtenLines[1].substring(11);
            long fileLength = Long.parseLong(fileLengthStr);

            Assert.assertEquals("Log Contents:",
              writtenLines[2].substring(0, 13));

            String logContents = StringUtils.join(
              Arrays.copyOfRange(writtenLines, 3, writtenLines.length), "\n");
            perContainerMap.put(fileType, logContents);

            LOG.info("LogType:" + fileType);
            LOG.info("LogType:" + fileLength);
            LOG.info("Log Contents:\n" + perContainerMap.get(fileType));
          } catch (EOFException eof) {
            break;
          }
        }

        // Next container
        key = new LogKey();
        valueStream = reader.next(key);
      }

      // 1 for each container
      Assert.assertEquals(expectedContainerIds.length, logMap.size());
View Full Code Here

Examples of org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey

  private int dumpAContainerLogs(String containerIdStr,
      AggregatedLogFormat.LogReader reader, PrintStream out)
      throws IOException {
    DataInputStream valueStream;
    LogKey key = new LogKey();
    valueStream = reader.next(key);

    while (valueStream != null && !key.toString().equals(containerIdStr)) {
      // Next container
      key = new LogKey();
      valueStream = reader.next(key);
    }

    if (valueStream == null) {
      System.out.println("Logs for container " + containerIdStr
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.