Examples of IPCLoggerChannel


Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

    jn.start();
    journalId = "test-journalid-" + GenericTestUtils.uniqueSequenceId();
    journal = jn.getOrCreateJournal(journalId);
    journal.format(FAKE_NSINFO);
   
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 0L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1).get();
    ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
   
    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 1L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    ch.setCommittedTxId(100L);
    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();

    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 2L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 1L, metrics);
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        new URL(urlRoot + "/journalstatus.jsp"));
    assertTrue(pageContents.contains("JournalNode"));

    // Create some edits on server side
    byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3);
    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1).get();
    ch.sendEdits(1L, 1, 3, EDITS_DATA).get();
    ch.finalizeLogSegment(1, 3).get();

    // Attempt to retrieve via HTTP, ensure we get the data back
    // including the header we expected
    byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot +
        "/getJournal?segmentTxId=1&jid=" + journalId));
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

    jn.start();
    journalId = "test-journalid-" + GenericTestUtils.uniqueSequenceId();
    journal = jn.getOrCreateJournal(journalId);
    journal.format(FAKE_NSINFO);
   
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 0L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1).get();
    ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
   
    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 1L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    ch.setCommittedTxId(100L);
    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();

    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 2L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 1L, metrics);
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        new URL(urlRoot + "/journalstatus.jsp"));
    assertTrue(pageContents.contains("JournalNode"));

    // Create some edits on server side
    byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3);
    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1).get();
    ch.sendEdits(1L, 1, 3, EDITS_DATA).get();
    ch.finalizeLogSegment(1, 3).get();

    // Attempt to retrieve via HTTP, ensure we get the data back
    // including the header we expected
    byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot +
        "/getJournal?segmentTxId=1&jid=" + journalId));
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

  public void setupMock() {
    conf.setInt(DFSConfigKeys.DFS_QJOURNAL_QUEUE_SIZE_LIMIT_KEY,
        LIMIT_QUEUE_SIZE_MB);

    // Channel to the mock object instead of a real IPC proxy.
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, JID, FAKE_ADDR) {
      @Override
      protected QJournalProtocol getProxy() throws IOException {
        return mockProxy;
      }
    };
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

    jn.start();
    journalId = "test-journalid-" + GenericTestUtils.uniqueSequenceId();
    journal = jn.getOrCreateJournal(journalId);
    journal.format(FAKE_NSINFO);
   
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 0L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
    ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
   
    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 1L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);

    ch.setCommittedTxId(100L);
    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();

    metrics = MetricsAsserts.getMetrics(
        journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 2L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 1L, metrics);
View Full Code Here

Examples of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel

        new URL(urlRoot + "/journalstatus.jsp"));
    assertTrue(pageContents.contains("JournalNode"));

    // Create some edits on server side
    byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3);
    IPCLoggerChannel ch = new IPCLoggerChannel(
        conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
    ch.sendEdits(1L, 1, 3, EDITS_DATA).get();
    ch.finalizeLogSegment(1, 3).get();

    // Attempt to retrieve via HTTP, ensure we get the data back
    // including the header we expected
    byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot +
        "/getJournal?segmentTxId=1&jid=" + journalId));
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.