/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2005
* Sleepycat Software. All rights reserved.
*
* $Id: CleanerTest.java,v 1.78 2005/09/21 21:48:04 cwl Exp $
*/
package com.sleepycat.je.cleaner;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import junit.framework.TestCase;
import com.sleepycat.bind.tuple.IntegerBinding;
import com.sleepycat.je.CheckpointConfig;
import com.sleepycat.je.Cursor;
import com.sleepycat.je.Database;
import com.sleepycat.je.DatabaseConfig;
import com.sleepycat.je.DatabaseEntry;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.DbInternal;
import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;
import com.sleepycat.je.EnvironmentStats;
import com.sleepycat.je.LockMode;
import com.sleepycat.je.OperationStatus;
import com.sleepycat.je.Transaction;
import com.sleepycat.je.config.EnvironmentParams;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.log.FileManager;
import com.sleepycat.je.util.StringDbt;
import com.sleepycat.je.util.TestUtils;
public class CleanerTest extends TestCase {
private static final int N_KEYS = 300;
private static final int N_KEY_BYTES = 10;
/*
* Make the log file size small enough to allow cleaning, but large enough
* not to generate a lot of fsyncing at the log file boundaries.
*/
private static final int FILE_SIZE = 10000;
protected File envHome = null;
protected Database db = null;
private Environment exampleEnv;
private Database exampleDb;
private CheckpointConfig forceConfig;
public CleanerTest() {
envHome = new File(System.getProperty(TestUtils.DEST_DIR));
forceConfig = new CheckpointConfig();
forceConfig.setForce(true);
}
public void setUp()
throws IOException, DatabaseException {
TestUtils.removeLogFiles("Setup", envHome, false);
TestUtils.removeFiles("Setup", envHome, FileManager.DEL_SUFFIX);
}
private void initEnv(boolean createDb, boolean allowDups)
throws DatabaseException {
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
DbInternal.disableParameterValidation(envConfig);
envConfig.setTransactional(true);
envConfig.setAllowCreate(true);
envConfig.setTxnNoSync(Boolean.getBoolean(TestUtils.NO_SYNC));
envConfig.setConfigParam(EnvironmentParams.LOG_FILE_MAX.getName(),
Integer.toString(FILE_SIZE));
envConfig.setConfigParam(EnvironmentParams.ENV_CHECK_LEAKS.getName(),
"false");
envConfig.setConfigParam(EnvironmentParams.ENV_RUN_CLEANER.getName(),
"false");
envConfig.setConfigParam(EnvironmentParams.CLEANER_REMOVE.getName(),
"false");
envConfig.setConfigParam
(EnvironmentParams.CLEANER_MIN_UTILIZATION.getName(), "80");
envConfig.setConfigParam
(EnvironmentParams.ENV_RUN_CHECKPOINTER.getName(), "false");
envConfig.setConfigParam(EnvironmentParams.NODE_MAX.getName(), "6");
envConfig.setConfigParam(EnvironmentParams.BIN_DELTA_PERCENT.getName(),
"75");
/* Don't use detail tracking in this test. */
envConfig.setConfigParam
(EnvironmentParams.CLEANER_TRACK_DETAIL.getName(), "false");
exampleEnv = new Environment(envHome, envConfig);
String databaseName = "cleanerDb";
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(true);
dbConfig.setAllowCreate(createDb);
dbConfig.setSortedDuplicates(allowDups);
exampleDb = exampleEnv.openDatabase(null, databaseName, dbConfig);
}
public void tearDown()
throws IOException, DatabaseException {
if (exampleEnv != null) {
try {
exampleEnv.close();
} catch (DatabaseException e) {
System.out.println("tearDown: " + e);
}
}
exampleDb = null;
exampleEnv = null;
//*
TestUtils.removeLogFiles("TearDown", envHome, true);
TestUtils.removeFiles("TearDown", envHome, FileManager.DEL_SUFFIX);
//*/
}
private void closeEnv()
throws DatabaseException {
if (exampleDb != null) {
exampleDb.close();
exampleDb = null;
}
if (exampleEnv != null) {
exampleEnv.close();
exampleEnv = null;
}
}
public void testCleanerNoDupes()
throws Throwable {
initEnv(true, false);
try {
doCleanerTest(N_KEYS, 1);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
public void testCleanerWithDupes()
throws Throwable {
initEnv(true, true);
try {
doCleanerTest(2, 500);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
private void doCleanerTest(int nKeys, int nDupsPerKey)
throws DatabaseException {
EnvironmentImpl environment =
DbInternal.envGetEnvironmentImpl(exampleEnv);
FileManager fileManager = environment.getFileManager();
Map expectedMap = new HashMap();
doLargePut(expectedMap, nKeys, nDupsPerKey, true);
Long lastNum = fileManager.getLastFileNum();
/* Read the data back. */
StringDbt foundKey = new StringDbt();
StringDbt foundData = new StringDbt();
Cursor cursor = exampleDb.openCursor(null, null);
while (cursor.getNext(foundKey, foundData, LockMode.DEFAULT) ==
OperationStatus.SUCCESS) {
}
exampleEnv.checkpoint(forceConfig);
for (int i = 0; i < (int) lastNum.longValue(); i++) {
/*
* Force clean one file. Utilization-based cleaning won't
* work here, since utilization is over 90%.
*/
DbInternal.envGetEnvironmentImpl(exampleEnv).
getCleaner().
doClean(false, // invokedFromDaemon
false, // cleanMultipleFiles
true); // forceCleaning
}
EnvironmentStats stats = exampleEnv.getStats(TestUtils.FAST_STATS);
assertTrue(stats.getNINsCleaned() > 0);
cursor.close();
closeEnv();
initEnv(false, (nDupsPerKey > 1));
checkData(expectedMap);
assertTrue(fileManager.getLastFileNum().longValue() >
lastNum.longValue());
closeEnv();
}
/**
* Ensure that INs are cleaned.
*/
public void testCleanInternalNodes()
throws DatabaseException {
initEnv(true, true);
int nKeys = 200;
EnvironmentImpl environment =
DbInternal.envGetEnvironmentImpl(exampleEnv);
FileManager fileManager = environment.getFileManager();
/* Insert a lot of keys. ExpectedMap holds the expected data */
Map expectedMap = new HashMap();
doLargePut(expectedMap, nKeys, 1, true);
/* Modify every other piece of data. */
modifyData(expectedMap, 10, true);
checkData(expectedMap);
/* Checkpoint */
exampleEnv.checkpoint(forceConfig);
checkData(expectedMap);
/* Modify every other piece of data. */
modifyData(expectedMap, 10, true);
checkData(expectedMap);
/* Checkpoint -- this should obsolete INs. */
exampleEnv.checkpoint(forceConfig);
checkData(expectedMap);
/* Clean */
Long lastNum = fileManager.getLastFileNum();
exampleEnv.cleanLog();
/* Validate after cleaning. */
checkData(expectedMap);
EnvironmentStats stats = exampleEnv.getStats(TestUtils.FAST_STATS);
/* Make sure we really cleaned something.*/
assertTrue(stats.getNINsCleaned() > 0);
assertTrue(stats.getNLNsCleaned() > 0);
closeEnv();
initEnv(false, true);
checkData(expectedMap);
assertTrue(fileManager.getLastFileNum().longValue() >
lastNum.longValue());
closeEnv();
}
/**
* See if we can clean in the middle of the file set.
*/
public void testCleanFileHole()
throws Throwable {
initEnv(true, true);
int nKeys = 20; // test ends up inserting 2*nKeys
int nDupsPerKey = 30;
EnvironmentImpl environment =
DbInternal.envGetEnvironmentImpl(exampleEnv);
FileManager fileManager = environment.getFileManager();
/* Insert some non dup data, modify, insert dup data. */
Map expectedMap = new HashMap();
doLargePut(expectedMap, nKeys, 1, true);
modifyData(expectedMap, 10, true);
doLargePut(expectedMap, nKeys, nDupsPerKey, true);
checkData(expectedMap);
/*
* Delete all the data, but abort. (Try to fill up the log
* with entries we don't need.
*/
deleteData(expectedMap, false, false);
checkData(expectedMap);
/* Do some more insertions, but abort them. */
doLargePut(expectedMap, nKeys, nDupsPerKey, false);
checkData(expectedMap);
/* Do some more insertions and commit them. */
doLargePut(expectedMap, nKeys, nDupsPerKey, true);
checkData(expectedMap);
/* Checkpoint */
exampleEnv.checkpoint(forceConfig);
checkData(expectedMap);
/* Clean */
Long lastNum = fileManager.getLastFileNum();
exampleEnv.cleanLog();
/* Validate after cleaning. */
checkData(expectedMap);
EnvironmentStats stats = exampleEnv.getStats(TestUtils.FAST_STATS);
/* Make sure we really cleaned something.*/
assertTrue(stats.getNINsCleaned() > 0);
assertTrue(stats.getNLNsCleaned() > 0);
closeEnv();
initEnv(false, true);
checkData(expectedMap);
assertTrue(fileManager.getLastFileNum().longValue() >
lastNum.longValue());
closeEnv();
}
/**
* Test for SR13191. This SR shows a problem where a MapLN is initialized
* with a DatabaseImpl that has a null EnvironmentImpl. When the Database
* gets used, a NullPointerException occurs in the Cursor code which
* expects there to be an EnvironmentImpl present. The MapLN gets init'd
* by the Cleaner reading through a log file and encountering a MapLN which
* is not presently in the DbTree. As an efficiency, the Cleaner calls
* updateEntry on the BIN to try to insert the MapLN into the BIN so that
* it won't have to fetch it when it migrates the BIN. But this is bad
* since the MapLN has not been init'd properly. The fix was to ensure
* that the MapLN is init'd correctly by calling postFetchInit on it just
* prior to inserting it into the BIN.
*
* This test first creates an environment and two databases. The first
* database it just adds to the tree with no data. This will be the MapLN
* that eventually gets instantiated by the cleaner. The second database
* is used just to create a bunch of data that will get deleted so as to
* create a low utilization for one of the log files. Once the data for
* db2 is created, the log is flipped (so file 0 is the one with the MapLN
* for db1 in it), and the environment is closed and reopened. We insert
* more data into db2 until we have enough .jdb files that file 0 is
* attractive to the cleaner. Call the cleaner to have it instantiate the
* MapLN and then use the MapLN in a Database.get() call.
*/
public void testSR13191()
throws Throwable {
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
envConfig.setAllowCreate(true);
envConfig.setConfigParam
(EnvironmentParams.ENV_RUN_CLEANER.getName(), "false");
Environment env = new Environment(envHome, envConfig);
EnvironmentImpl envImpl = DbInternal.envGetEnvironmentImpl(env);
FileManager fileManager =
DbInternal.envGetEnvironmentImpl(env).getFileManager();
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setAllowCreate(true);
Database db1 =
env.openDatabase(null, "db1", dbConfig);
Database db2 =
env.openDatabase(null, "db2", dbConfig);
DatabaseEntry key = new DatabaseEntry();
DatabaseEntry data = new DatabaseEntry();
IntegerBinding.intToEntry(1, key);
data.setData(new byte[100000]);
for (int i = 0; i < 50; i++) {
assertEquals(OperationStatus.SUCCESS, db2.put(null, key, data));
}
db1.close();
db2.close();
assertEquals("Should have 0 as current file", 0L,
fileManager.getCurrentFileNum());
envImpl.forceLogFileFlip();
env.close();
env = new Environment(envHome, envConfig);
fileManager = DbInternal.envGetEnvironmentImpl(env).getFileManager();
assertEquals("Should have 1 as current file", 1L,
fileManager.getCurrentFileNum());
db2 = env.openDatabase(null, "db2", dbConfig);
for (int i = 0; i < 250; i++) {
assertEquals(OperationStatus.SUCCESS, db2.put(null, key, data));
}
db2.close();
env.cleanLog();
db1 = env.openDatabase(null, "db1", dbConfig);
db1.get(null, key, data, null);
db1.close();
env.close();
}
/**
* Helper routine. Generates keys with random alpha values while data
* is numbered numerically.
*/
private void doLargePut(Map expectedMap,
int nKeys,
int nDupsPerKey,
boolean commit)
throws DatabaseException {
Transaction txn = exampleEnv.beginTransaction(null, null);
for (int i = 0; i < nKeys; i++) {
byte[] key = new byte[N_KEY_BYTES];
TestUtils.generateRandomAlphaBytes(key);
String keyString = new String(key);
/*
* The data map is keyed by key value, and holds a hash
* map of all data values.
*/
Set dataVals = new HashSet();
if (commit) {
expectedMap.put(keyString, dataVals);
}
for (int j = 0; j < nDupsPerKey; j++) {
String dataString = Integer.toString(j);
exampleDb.put(txn,
new StringDbt(keyString),
new StringDbt(dataString));
dataVals.add(dataString);
}
}
if (commit) {
txn.commit();
} else {
txn.abort();
}
}
/**
* Increment each data value.
*/
private void modifyData(Map expectedMap,
int increment,
boolean commit)
throws DatabaseException {
Transaction txn = exampleEnv.beginTransaction(null, null);
StringDbt foundKey = new StringDbt();
StringDbt foundData = new StringDbt();
Cursor cursor = exampleDb.openCursor(txn, null);
OperationStatus status = cursor.getFirst(foundKey, foundData,
LockMode.DEFAULT);
boolean toggle = true;
while (status == OperationStatus.SUCCESS) {
if (toggle) {
String foundKeyString = foundKey.getString();
String foundDataString = foundData.getString();
int newValue = Integer.parseInt(foundDataString) + increment;
String newDataString = Integer.toString(newValue);
/* If committing, adjust the expected map. */
if (commit) {
Set dataVals = (Set) expectedMap.get(foundKeyString);
if (dataVals == null) {
fail("Couldn't find " +
foundKeyString + "/" + foundDataString);
} else if (dataVals.contains(foundDataString)) {
dataVals.remove(foundDataString);
dataVals.add(newDataString);
} else {
fail("Couldn't find " +
foundKeyString + "/" + foundDataString);
}
}
assertEquals(OperationStatus.SUCCESS,
cursor.delete());
assertEquals(OperationStatus.SUCCESS,
cursor.put(foundKey,
new StringDbt(newDataString)));
toggle = false;
} else {
toggle = true;
}
status = cursor.getNext(foundKey, foundData, LockMode.DEFAULT);
}
cursor.close();
if (commit) {
txn.commit();
} else {
txn.abort();
}
}
/**
* Delete data.
*/
private void deleteData(Map expectedMap,
boolean everyOther,
boolean commit)
throws DatabaseException {
Transaction txn = exampleEnv.beginTransaction(null, null);
StringDbt foundKey = new StringDbt();
StringDbt foundData = new StringDbt();
Cursor cursor = exampleDb.openCursor(txn, null);
OperationStatus status = cursor.getFirst(foundKey, foundData,
LockMode.DEFAULT);
boolean toggle = true;
while (status == OperationStatus.SUCCESS) {
if (toggle) {
String foundKeyString = foundKey.getString();
String foundDataString = foundData.getString();
/* If committing, adjust the expected map */
if (commit) {
Set dataVals = (Set) expectedMap.get(foundKeyString);
if (dataVals == null) {
fail("Couldn't find " +
foundKeyString + "/" + foundDataString);
} else if (dataVals.contains(foundDataString)) {
dataVals.remove(foundDataString);
if (dataVals.size() == 0) {
expectedMap.remove(foundKeyString);
}
} else {
fail("Couldn't find " +
foundKeyString + "/" + foundDataString);
}
}
assertEquals(OperationStatus.SUCCESS, cursor.delete());
}
if (everyOther) {
toggle = toggle? false: true;
}
status = cursor.getNext(foundKey, foundData, LockMode.DEFAULT);
}
cursor.close();
if (commit) {
txn.commit();
} else {
txn.abort();
}
}
/**
* Check what's in the database against what's in the expected map.
*/
private void checkData(Map expectedMap)
throws DatabaseException {
StringDbt foundKey = new StringDbt();
StringDbt foundData = new StringDbt();
Cursor cursor = exampleDb.openCursor(null, null);
OperationStatus status = cursor.getFirst(foundKey, foundData,
LockMode.DEFAULT);
/*
* Make a copy of expectedMap so that we're free to delete out
* of the set of expected results when we verify.
* Also make a set of counts for each key value, to test count.
*/
Map checkMap = new HashMap();
Map countMap = new HashMap();
Iterator iter = expectedMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry entry = (Map.Entry) iter.next();
Set copySet = new HashSet();
copySet.addAll((Set) entry.getValue());
checkMap.put(entry.getKey(), copySet);
countMap.put(entry.getKey(), new Integer(copySet.size()));
}
while (status == OperationStatus.SUCCESS) {
String foundKeyString = foundKey.getString();
String foundDataString = foundData.getString();
/* Check that the current value is in the check values map */
Set dataVals = (Set) checkMap.get(foundKeyString);
if (dataVals == null) {
fail("Couldn't find " +
foundKeyString + "/" + foundDataString);
} else if (dataVals.contains(foundDataString)) {
dataVals.remove(foundDataString);
if (dataVals.size() == 0) {
checkMap.remove(foundKeyString);
}
} else {
fail("Couldn't find " +
foundKeyString + "/" +
foundDataString +
" in data vals");
}
/* Check that the count is right. */
int count = cursor.count();
assertEquals(((Integer)countMap.get(foundKeyString)).intValue(),
count);
status = cursor.getNext(foundKey, foundData, LockMode.DEFAULT);
}
cursor.close();
if (checkMap.size() != 0) {
dumpExpected(checkMap);
fail("checkMapSize = " + checkMap.size());
}
assertEquals(0, checkMap.size());
}
private void dumpExpected(Map expectedMap) {
Iterator iter = expectedMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry entry = (Map.Entry) iter.next();
String key = (String) entry.getKey();
Iterator dataIter = ((Set) entry.getValue()).iterator();
while (dataIter.hasNext()) {
System.out.println("key=" + key +
" data=" + (String) dataIter.next());
}
}
}
}