Index: ocean/build.xml
===================================================================
--- ocean/build.xml	(revision 0)
+++ ocean/build.xml	(revision 0)
@@ -0,0 +1,43 @@
+<?xml version="1.0"?>
+
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+ 
+        http://www.apache.org/licenses/LICENSE-2.0
+ 
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+ -->
+
+<project name="ocean" default="default">
+
+  <description>
+    Ocean realtime search on Lucene
+  </description>
+
+  <path id="additional.dependencies">
+    <pathelement location="lib/commons-io-1.3.2.jar"/>
+    <pathelement location="lib/jdom.jar"/>
+    <pathelement location="lib/commons-lang-2.3.jar"/>
+    <pathelement location="lib/slf4j-api-1.5.2.jar"/>
+    <pathelement location="lib/slf4j-simple-1.5.2.jar"/>
+  </path>
+
+  <pathconvert property="project.classpath"
+               targetos="unix"
+               refid="additional.dependencies"
+  />
+
+  <target name="compile-core" depends="common.compile-core">
+  </target>
+
+  <import file="../contrib-build.xml"/>
+</project>
Index: ocean/build.xml
===================================================================
--- ocean/build.xml	(revision 0)
+++ ocean/build.xml	(revision 0)
@@ -0,0 +1,43 @@
+<?xml version="1.0"?>
+
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+ 
+        http://www.apache.org/licenses/LICENSE-2.0
+ 
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+ -->
+
+<project name="ocean" default="default">
+
+  <description>
+    Ocean realtime search on Lucene
+  </description>
+
+  <path id="additional.dependencies">
+    <pathelement location="lib/commons-io-1.3.2.jar"/>
+    <pathelement location="lib/jdom.jar"/>
+    <pathelement location="lib/commons-lang-2.3.jar"/>
+    <pathelement location="lib/slf4j-api-1.5.2.jar"/>
+    <pathelement location="lib/slf4j-simple-1.5.2.jar"/>
+  </path>
+
+  <pathconvert property="project.classpath"
+               targetos="unix"
+               refid="additional.dependencies"
+  />
+
+  <target name="compile-core" depends="common.compile-core">
+  </target>
+
+  <import file="../contrib-build.xml"/>
+</project>
Index: ocean/lib/jdom.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\jdom.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/commons-lang-2.3.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\commons-lang-2.3.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/slf4j-api-1.5.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\slf4j-api-1.5.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/slf4j-simple-1.5.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\slf4j-simple-1.5.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/commons-io-1.3.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\commons-io-1.3.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/commons-io-1.3.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\commons-io-1.3.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/commons-lang-2.3.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\commons-lang-2.3.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/jdom.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\jdom.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/slf4j-api-1.5.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\slf4j-api-1.5.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/lib/slf4j-simple-1.5.2.jar
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream

Property changes on: ocean\lib\slf4j-simple-1.5.2.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Index: ocean/src/net/sourceforge/jsorter/SortComparator.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortComparator.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortComparator.java	(revision 0)
@@ -0,0 +1,230 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * The default comparator for the Sort and SwingSorter classes. This comparator
+ * is used for sorting multiple columns.
+ * 
+ * @author Robert Breidecker
+ */
+public class SortComparator implements Comparator {
+	/**
+	 * The list of sort columns.
+	 */
+	private List sortColumns = null;
+
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+
+	/**
+	 * Comparator constructor.
+	 * 
+	 * @param sortColumns
+	 *            A list of SortColumns that represent the positions of columns
+	 *            in your data that you want to be evaluated in the sort. The
+	 *            sort will start with the first column position in the list.
+	 *            Column positions not in the list will not be used in the sort.
+	 *            The number of items in this list should equal the number of
+	 *            items in the columnOrders list.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 */
+	public SortComparator(final List sortColumns, final int nullBehavior) {
+		this.sortColumns = sortColumns;
+		this.nullBehavior = nullBehavior;
+	}
+
+	/**
+	 * Overrides the java.util.Comparator compare method. This method is used
+	 * for comparing two dimensional data. See the standard JDK documention for
+	 * more information.
+	 * 
+	 * @param one
+	 *            Object - The first object used in the compare. This field
+	 *            should be a list of lists containing objects which implement
+	 *            the Comparable interface. Some of these object types include
+	 *            String, Integer, Long, Short, Float, Byte, Double and Date.
+	 *            See the standard JDK documention for Comparator for a complete
+	 *            list. The object type for each column of data must be
+	 *            consistent or a ClassCaseException will be thrown.
+	 * 
+	 * @param two
+	 *            Object - The second object used in the compare. This field
+	 *            should be a list of lists containing objects which implement
+	 *            the Comparable interface. Some of these object types include
+	 *            String, Integer, Long, Short, Float, Byte, Double and Date.
+	 *            See the standard JDK documention for Comparator for a complete
+	 *            list. The object type for each column of data must be
+	 *            consistent or a ClassCaseException will be thrown.
+	 * 
+	 * @return A negative integer, zero, or a positive integer as the first
+	 *         argument is less than, equal to, or greater than the second.
+	 * 
+	 * @exception ClassCastException
+	 *                Data in a column in not all of the same data type.
+	 */
+	public int compare(final Object one, final Object two) throws ClassCastException {
+		// The number of columns in the table.
+		int numColumns;
+
+		// The return value.
+		int rtn = 0;
+
+		// Used for counting the number of real fields in the data.
+		int ctr = 0;
+
+		// Holds the type of sort order being used.
+		int columnOrder;
+
+		// Used for counting the number of values in the sort columns.
+		int compareCtr;
+
+		// One row of data;
+		final List listOne = (List)one;
+
+		if (sortColumns == null) {
+			numColumns = listOne.size();
+		} else {
+			numColumns = sortColumns.size();
+		}
+
+		while (rtn == 0 && ctr < numColumns) {
+			// The first object to compare.
+			Comparable comparableOne;
+
+			// The second object to compare.
+			Comparable comparableTwo;
+
+			// Make sure compare column is within range.
+			if (sortColumns == null) {
+				compareCtr = ctr;
+			} else {
+				compareCtr = ((SortableColumn)sortColumns.get(ctr)).getColumnPosition();
+			}
+
+			if (compareCtr <= listOne.size()) {
+				// Another row of data;
+				final List listTwo = (List) two;
+
+				// Get the field to use in the compare.
+				if (sortColumns == null) {
+					comparableOne = (Comparable)listOne.get(compareCtr);
+					comparableTwo = (Comparable)listTwo.get(compareCtr);
+				} else {
+					comparableOne = (Comparable)listOne.get(((SortableColumn) sortColumns.get(ctr)).getColumnPosition());
+					comparableTwo = (Comparable)listTwo.get(((SortableColumn) sortColumns.get(ctr)).getColumnPosition());
+				}
+
+				// Get the sort type that goes with the sort column.
+				if (sortColumns == null) {
+					// If no sort columns were specified, then use ascending
+					// order.
+					columnOrder = SorterConstants.ASCENDING_ORDER;
+				} else {
+					columnOrder = ((SortableColumn)sortColumns.get(ctr)).getColumnOrder();
+				}
+
+				// Compare the objects.
+				if (comparableOne != null && comparableTwo != null) {
+					if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+						try {
+							rtn = comparableOne.compareTo(comparableTwo);
+						} catch (ClassCastException exception) {
+							throw exception;
+						}
+					} else {
+						try {
+							rtn = comparableTwo.compareTo(comparableOne);
+						} catch (ClassCastException exception) {
+							throw exception;
+						}
+					}
+				} else {
+					if (nullBehavior == SorterConstants.NULLS_ARE_INVALID) {
+						throw new IllegalStateException("Null data values are not valid.");
+					} else if (comparableOne == null && comparableTwo != null) {
+						if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+							rtn = -1;
+						} else {
+							rtn = 1;
+						}
+
+						if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+							rtn = rtn * -1;
+						}
+					} else if (comparableOne != null && comparableTwo == null) {
+						if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+							rtn = 1;
+						} else {
+							rtn = -1;
+						}
+
+						if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+							rtn = rtn * -1;
+						}
+					} else {
+						rtn = 0;
+					}
+				}
+			}
+			ctr++;
+		}
+		return rtn;
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/FieldUtil.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/FieldUtil.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/FieldUtil.java	(revision 0)
@@ -0,0 +1,74 @@
+/*
+ * FieldUtil.java
+ *
+ * Created on February 3, 2006, 5:24 PM
+ */
+
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ *
+ * @author  administrator
+ * @version
+ */
+public class FieldUtil {
+  private static Map fieldMap = new ConcurrentHashMap();
+  
+  public static void setFieldValue(String fieldName, Object object, Object value) {
+    try {
+      Field field = getField(object.getClass(), fieldName);
+      field.set(object, value);
+    } catch (Exception ex) { ex.printStackTrace(); }
+  }
+  
+  public static Object getFieldValue(String fieldName, Object object) {
+    try {
+      Field field = getField(object.getClass(), fieldName);
+      return field.get(object);
+    } catch (Exception ex) { ex.printStackTrace(); return null; }
+  }
+  
+  public static Field getField(Class clazz, String fieldName) {
+    Field[] fields = getFields(clazz);
+    for (int x=0; x < fields.length; x++) {
+      if (fields[x].getName().equals(fieldName)) {
+        return fields[x];
+      }
+    }
+    return null;
+  }
+  
+  public static Field[] getFields(Class clazz) {
+    Field[] fields = (Field[])fieldMap.get(clazz);
+    if (fields != null) return fields;
+    List list = new ArrayList();
+    getFields(clazz, list);
+    
+    fields = new Field[list.size()];
+    for (int x=0; x < fields.length; x++) {
+      fields[x] = (Field)list.get(x);
+    }
+    fieldMap.put(clazz, fields);
+    return fields;
+  }
+  
+  public static void getFields(Class clazz, List list) {
+    Class superClass = clazz.getSuperclass();
+    if (superClass != null) getFields(superClass, list);
+    Field[] fields = clazz.getDeclaredFields();
+    for (int x=0; x < fields.length; x++) {
+      int modifiers = fields[x].getModifiers();
+      if (Modifier.isPublic(modifiers) && !Modifier.isStatic(modifiers)
+      && !Modifier.isTransient(modifiers) && !Modifier.isFinal(modifiers))
+        list.add(fields[x]);
+    }
+  }
+}
+
Index: ocean/src/net/sourceforge/jsorter/ReflectColumns.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/ReflectColumns.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/ReflectColumns.java	(revision 0)
@@ -0,0 +1,66 @@
+
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ *
+ * @author  Jason Rutherglen
+ */
+public class ReflectColumns {
+  public List<SortableColumn> list = new ArrayList<SortableColumn>();
+  
+  public ReflectColumns() {
+  }
+  
+  public SortReflect[] getSortReflects() {
+    SortReflect[] sortReflects = new SortReflect[list.size()];
+    int count = 0;
+    Iterator iterator = list.iterator();
+    while (iterator.hasNext()) {
+      SortableColumnReflect sortableColumnReflect = (SortableColumnReflect)iterator.next();
+      sortReflects[count] = sortableColumnReflect.sortReflect;
+      
+      count++;
+    }
+    return sortReflects;
+  }
+  
+  public List<SortableColumn> getColumns() {
+    return list;
+  }
+  
+  public class SortableColumnReflect implements SortableColumn {
+    int position;
+    String name;
+    SortReflect sortReflect;
+    int order;
+    
+    public SortableColumnReflect(int position, String name, SortReflect sortReflect, int order) {
+      this.position = position;
+      this.name = name;
+      this.sortReflect = sortReflect;
+      this.order = order;
+    }
+    
+    public String getColumnName() {
+      return name;
+    }
+    
+    public int getColumnOrder() {
+      return order;
+    }
+    
+    public int getColumnPosition() {
+      return position;
+    }
+  }
+  
+  public void add(String name, SortReflect sortReflect, int order) {
+    SortableColumnReflect sortableColumnReflect = new SortableColumnReflect(list.size(), name, sortReflect, order);
+    list.add(sortableColumnReflect);
+  }
+}
Index: ocean/src/net/sourceforge/jsorter/SortHolder.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortHolder.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortHolder.java	(revision 0)
@@ -0,0 +1,90 @@
+
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+import net.sourceforge.jsorter.SortReflect.MethodException;
+
+
+/**
+ *
+ * @author  Jason Rutherglen
+ */
+public class SortHolder<T> implements Comparable {
+  public Comparable attribute;
+  public T object;
+  public int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+  
+  public SortHolder(Comparable attribute, T object, int nullBehavior) {
+    this.attribute = attribute;
+    this.object = object;
+    this.nullBehavior = nullBehavior;
+  }
+  
+  public String toString() {
+    if (attribute == null) return "null";
+    return attribute.toString();
+  }
+  
+  public static List getTable(SortReflect[] sortReflect, Collection collection, int nullBehavior) throws MethodException {
+    List table = new ArrayList(collection.size());
+    Iterator iterator = collection.iterator();
+    while (iterator.hasNext()) {
+      Object value = iterator.next();
+      List row = new ArrayList(sortReflect.length); 
+      for (int x=0; x < sortReflect.length; x++) {
+        Object attribute = sortReflect[x].get(value);
+        if (attribute instanceof java.net.URL) {
+          attribute = ((java.net.URL)attribute).toString();
+        }
+        Comparable attributeComparable = (Comparable)attribute;
+        SortHolder sortHolder = new SortHolder(attributeComparable, value, nullBehavior);
+        row.add(sortHolder);
+      }
+      table.add(row);
+    }
+    return table;
+  }
+  
+  public int compareTo(Object obj) {
+    Comparable comparableOne = (Comparable)attribute;
+    Comparable comparableTwo = (Comparable)obj;
+    if (obj instanceof SortHolder) {
+      SortHolder sortHolder = (SortHolder)obj;
+      comparableTwo = (Comparable)sortHolder.attribute;
+    }
+    
+    int rtn;
+    
+    if (comparableOne != null && comparableTwo != null) {
+      try {
+        rtn = comparableOne.compareTo(comparableTwo);
+      } catch (ClassCastException exception) {
+        throw exception;
+      }
+    } else {
+      if (nullBehavior == SorterConstants.NULLS_ARE_INVALID) {
+        throw new IllegalStateException("Null data values are not valid.");
+      } else if (comparableOne == null && comparableTwo != null) {
+        rtn = -1;
+        
+        if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+          rtn = rtn * -1;
+        }
+      } else if (comparableOne != null && comparableTwo == null) {
+        rtn = 1;
+        
+        if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+          rtn = rtn * -1;
+        }
+      } else {
+        rtn = 0;
+      }
+    }
+    return rtn;
+  }
+}
Index: ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java	(revision 0)
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.List;
+
+/**
+ * This class represents a component that will be sorted by SwingSorter.
+ * 
+ * @author Robert Breidecker
+ */
+public interface SortableSwingComponent {
+	/**
+	 * Returns a lists of lists that contain the component's data values. Each
+	 * inner list contained in the outer list represents a row of data.
+	 * 
+	 * @return The data list for this component.
+	 */
+	public List getDataList();
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SwingSorter.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SwingSorter.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SwingSorter.java	(revision 0)
@@ -0,0 +1,580 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.List;
+import java.util.Vector;
+
+import javax.swing.DefaultComboBoxModel;
+import javax.swing.DefaultListModel;
+import javax.swing.JComboBox;
+import javax.swing.JList;
+import javax.swing.JTable;
+import javax.swing.table.DefaultTableModel;
+
+/**
+ * Used for sorting data in the Swing objects JTable, JList, JComboBox,
+ * DefaultTableModel, DefaultListModel, DefaultComboBoxModel and
+ * SortableComponent.
+ * 
+ * Note: This is class is not thread safe.
+ * 
+ * @author Robert Breidecker
+ */
+public class SwingSorter {
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 */
+	public void sortComponent(final SortableSwingComponent component) {
+		sortComponent(component, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortComponent(final SortableSwingComponent component,
+			final int columnOrder) {
+		final SortableColumn sortColumn = new SortableColumnImpl(0, columnOrder);
+		final List sortColumns = new Vector();
+		sortColumns.add(sortColumn);
+
+		sortComponent(component, sortColumns);
+	}
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void sortComponent(final SortableSwingComponent component,
+			final List sortColumns) {
+		// Create a new Sorter object.
+		final Sorter sorter = new Sorter(component.getDataList(), sortColumns);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data!
+		sorter.sort();
+	}
+
+	/**
+	 * Sorts a combo box using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param comboBox
+	 *            The combo box to be sorted in ascending order. The combo box
+	 *            model must be an instance or a descendent of
+	 *            DefaultComboBoxModel.
+	 */
+	public void sortComboBox(final JComboBox comboBox) {
+		sortComboBox(comboBox, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a combo box using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param comboBox
+	 *            The combo box to be sorted. The combo box model must be an
+	 *            instance or a descendent of DefaultComboBoxModel or implement
+	 *            the SortableComponent.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortComboBox(final JComboBox comboBox, final int columnOrder) {
+		if (comboBox.getModel() instanceof DefaultComboBoxModel) {
+			final DefaultComboBoxModel model = (DefaultComboBoxModel) comboBox
+					.getModel();
+
+			sortComboBoxModel(model, columnOrder);
+		} else if (comboBox.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent component = (SortableSwingComponent) comboBox
+					.getModel();
+
+			sortComponent(component, columnOrder);
+		} else {
+			throw new IllegalArgumentException(
+					"ComboBox model must be an "
+							+ "instance of decendent of DefaultComboBoxModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a combo box model using Sorter. This method assumes that the object
+	 * type in the display column is made up entirely of the same type and that
+	 * the type implements the Comparable interface. For example, the data in
+	 * the display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The combo box model to be sorted in ascending order. The combo
+	 *            box model must be an instance or a descendent of
+	 *            DefaultComboBoxModel.
+	 */
+	public void sortComboBoxModel(final DefaultComboBoxModel model) {
+		sortComboBoxModel(model, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a combo box model using Sorter. This method assumes that the object
+	 * type in the display column is made up entirely of the same type and that
+	 * the type implements the Comparable interface. For example, the data in
+	 * the display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The combo box model to be sorted. The combo box model must be
+	 *            an instance or a descendent of DefaultComboBoxModel.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortComboBoxModel(final DefaultComboBoxModel model,
+			final int columnOrder) {
+		// Create the table of sort data.
+		final List table = new Vector();
+
+		// Get the model data.
+		for (int ctr = 0, size = model.getSize(); ctr < size; ctr++) {
+			// Create a new row.
+			final List row = new Vector();
+			row.add(model.getElementAt(ctr));
+			table.add(row);
+		}
+
+		// Create a new Sorter object.
+		final Sorter sorter = new Sorter(table, columnOrder);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data!
+		sorter.sort();
+
+		// Clear the model data.
+		model.removeAllElements();
+
+		// Re-add the sorted data to the model.
+		for (int ctr = 0, size = table.size(); ctr < size; ctr++) {
+			final List row = (List) table.get(ctr);
+
+			// Get the first element from the row, because a list
+			// only has one column.
+			model.addElement(row.get(0));
+		}
+	}
+
+	/**
+	 * Sorts a list model using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The list model to be sorted in ascending order. Must be a
+	 *            DefaultListModel or a descendent of DefaultListModel.
+	 */
+	public void sortListModel(final DefaultListModel model) {
+		sortListModel(model, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a list using Sorter. This method assumes that the object type in
+	 * the display column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in the display
+	 * column is entirely made of String or Integers. Some of the objects that
+	 * implement the Comparable interface include String, Integer, Long, Short,
+	 * Float, Byte, Double and Date.
+	 * 
+	 * @param list
+	 *            The list to be sorted in ascending order. The list model used
+	 *            must be a DefaultListBoxModel or a descendent of
+	 *            DefaultListModel.
+	 */
+	public void sortList(final JList list) {
+		sortList(list, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a list using Sorter. This method assumes that the object type in
+	 * the display column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in the display
+	 * column is entirely made of String or Integers. Some of the objects that
+	 * implement the Comparable interface include String, Integer, Long, Short,
+	 * Float, Byte, Double and Date.
+	 * 
+	 * @param list
+	 *            The list to be sorted. The list model used must be a
+	 *            DefaultListModel or a descendent of DefaultListModel or
+	 *            implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortList(final JList list, final int columnOrder) {
+		if (list.getModel() instanceof DefaultListModel) {
+			final DefaultListModel model = (DefaultListModel) list.getModel();
+
+			sortListModel(model, columnOrder);
+		} else if (list.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent model = (SortableSwingComponent) list
+					.getModel();
+
+			sortComponent(model, columnOrder);
+		} else {
+			throw new IllegalArgumentException(
+					"List model must be an "
+							+ "instance of decendent of DefaultListModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a list model using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The list model to be sorted. Must be a DefaultListModel or a
+	 *            descendent of DefaultListModel.
+	 * 
+	 * @param newSortOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortListModel(final DefaultListModel model,
+			final int newSortOrder) {
+		// Create the table of sort data.
+		final Vector table = new Vector();
+
+		// Get the model data.
+		for (int ctr = 0, size = model.getSize(); ctr < size; ctr++) {
+			// Create a new row.
+			final Vector row = new Vector();
+			row.add(model.getElementAt(ctr));
+			table.add(row);
+		}
+
+		// Create a new Sorter.
+		final Sorter sorter = new Sorter(table, newSortOrder);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the vector data.
+		sorter.sort();
+
+		// Clear the model data.
+		model.removeAllElements();
+
+		// Re-add the sorted data to the model.
+		for (int ctr = 0, size = table.size(); ctr < size; ctr++) {
+			final List row = (List) table.get(ctr);
+
+			// Get the first element from the row, because a list
+			// only has one column.
+			model.addElement(row.get(0));
+		}
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two can be entirely made
+	 * up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date. This method will sort using all columns in ascending order.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 */
+	public void sortTable(final JTable table) {
+		// Sort the table.
+		sortTable(table);
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two can be entirely made
+	 * up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date. This method will sort using all columns in the order specified.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortTable(final JTable table, final int columnOrder) {
+		// Sort the table!
+		sortTable(table, columnOrder);
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two is can be entriely
+	 * made up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortTable(final JTable table, final List sortColumns) {
+		if (table.getModel() instanceof DefaultTableModel) {
+			final DefaultTableModel model = (DefaultTableModel) table
+					.getModel();
+
+			sortTableModel(model, sortColumns);
+		} else if (table.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent model = (SortableSwingComponent) table
+					.getModel();
+
+			sortComponent(model, sortColumns);
+		} else {
+			throw new IllegalArgumentException(
+					"Table model must be an "
+							+ "instance of decendent of DefaultTableModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 */
+	public void sortTableModel(final DefaultTableModel model) {
+		// Sort the data.
+		sortTableModel(model);
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortTableModel(final DefaultTableModel model,
+			final int columnOrder) {
+		// Sort the table.
+		sortTableModel(model, columnOrder);
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void sortTableModel(final DefaultTableModel model,
+			final List sortColumns) {
+		// Create a new Sorter.
+		final Sorter sorter = new Sorter(model.getDataVector(), sortColumns);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data.
+		sorter.sort();
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableColumn.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableColumn.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableColumn.java	(revision 0)
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class represents a column that will be sorted by Sorter, SwingSorter, or
+ * SortComparator.
+ * 
+ * @author Robert Breidecker
+ */
+public interface SortableColumn {
+	/**
+	 * Returns the number position of the column to sort with.
+	 * 
+	 * @return The number position of the column to sort with.
+	 */
+	public int getColumnPosition();
+
+	/**
+	 * Returns the order to sort the column by.
+	 * 
+	 * @return The order to sort the column by.
+	 */
+	public int getColumnOrder();
+
+	/**
+	 * Returns the name of this column.
+	 * 
+	 * @return The name of this column.
+	 */
+	public String getColumnName();
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableBoolean.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableBoolean.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableBoolean.java	(revision 0)
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class is a wrapper on top of the Boolean class. Its purpose is to
+ * provide a class that allows for Boolean values to be sorted, because the
+ * Boolean class does not implement the Comparable interface.
+ * 
+ * @author Robert Breidecker
+ */
+public final class SortableBoolean implements Comparable {
+	/**
+	 * The Boolean value being wrapped.
+	 */
+	private Boolean booleanValue = null;
+
+	/**
+	 * Creates a new SortableBoolean object from the input boolean value.
+	 * 
+	 * @param value
+	 *            The boolean value used to create the new SortableBoolean.
+	 */
+	public SortableBoolean(final boolean value) {
+		booleanValue = new Boolean(value);
+	}
+
+	/**
+	 * Creates a new SortableBoolean object from the input Boolean.
+	 */
+	public SortableBoolean(final Boolean value) {
+		booleanValue = value;
+	}
+
+	/**
+	 * Creates a new SortableBoolean object from the input String. The new
+	 * object will have a true value if the input value is "true", otherwise it
+	 * will have a false value.
+	 */
+	public SortableBoolean(final String value) {
+		booleanValue = new Boolean(value);
+	}
+
+	/**
+	 * Returns the primitive boolean value for this object.
+	 * 
+	 * @return The primitive boolean value for this object.
+	 */
+	public boolean booleanValue() {
+		return booleanValue.booleanValue();
+	}
+
+	/**
+	 * Compares this object with the specified object for order. Returns a
+	 * negative integer, zero, or a positive integer as this object is less
+	 * than, equal to, or greater than the specified object.
+	 * 
+	 * @param object
+	 *            The object to compare this object to.
+	 * 
+	 * @return A negative integer, zero, or a positive integer as this object is
+	 *         less than, equal to, or greater than the specified object.
+	 */
+	public int compareTo(final Object object) {
+		int returnValue = -1;
+
+		if (object == null) {
+			throw new IllegalArgumentException(
+					"This object can not be compared " + "to a null value.");
+		}
+
+		if (!(object instanceof SortableBoolean)) {
+			throw new IllegalArgumentException("The input object must be an "
+					+ "instance of SortableBoolean.");
+		}
+
+		final SortableBoolean compareToBoolean = (SortableBoolean) object;
+
+		if (booleanValue() == false && compareToBoolean.booleanValue() == true) {
+			returnValue = -1;
+		} else if (booleanValue() == false
+				&& compareToBoolean.booleanValue() == false) {
+			returnValue = 0;
+		} else if (booleanValue() == true
+				&& compareToBoolean.booleanValue() == true) {
+			returnValue = 0;
+		} else if (booleanValue() == true
+				&& compareToBoolean.booleanValue() == false) {
+			returnValue = 1;
+		}
+
+		return returnValue;
+	}
+
+	/**
+	 * Returns true if and only if the argument is not null and is a
+	 * SortableBoolean object that represents the same boolean value as this
+	 * object.
+	 * 
+	 * @param object
+	 *            The object to compare this object to.
+	 * 
+	 * @return Returns true if the specified object represents the same value as
+	 *         this object.
+	 */
+	public boolean equals(final Object object) {
+		if (object == null) {
+			return false;
+		}
+
+		if (!(object instanceof SortableBoolean)) {
+			return false;
+		}
+
+		return booleanValue.equals(object);
+	}
+
+	/**
+	 * Returns a hash code for this object. This method calls the hashCode
+	 * method on the Boolean object it is wrapping.
+	 * 
+	 * @return The hash code for the Boolean object wrapped by this object.
+	 */
+	public int hashCode() {
+		return booleanValue.hashCode();
+	}
+
+	/**
+	 * Returns a String object representing this object's value. If this object
+	 * represents the value true, a string equal to "true" is returned.
+	 * Otherwise, a string equal to "false" is returned.
+	 * 
+	 * @return A string representation of this object.
+	 */
+	public String toString() {
+		return booleanValue.toString();
+	}
+
+	/**
+	 * Returns the Boolean wrapped by this object.
+	 * 
+	 * @return The Boolean wrapped by this object.
+	 */
+	public Boolean getBoolean() {
+		return booleanValue;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java	(revision 0)
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class represents a column that will be sorted by Sorter, SwingSorter, or
+ * SortComparator.
+ * 
+ * @author Robert Breidecker
+ */
+public class SortableColumnImpl implements SortableColumn {
+	/**
+	 * The number position of the column to sort with.
+	 */
+	private int columnPosition = SorterConstants.FIRST_COLUMN_POSITION;
+
+	/**
+	 * The order to sort the column by.
+	 */
+	private int columnOrder = SorterConstants.ASCENDING_ORDER;
+
+	/**
+	 * The name of the sort column.
+	 */
+	private String columnName = null;
+
+	/**
+	 * SortColumn constructor. The constructor will set both the column number
+	 * and the column order.
+	 * 
+	 * @param columnPosition
+	 *            The number position of the column in the data to sort by.
+	 *            Column numbers start at zero for the first column.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            column sorted by. Sort order can either be ascending or
+	 *            descending. The ASCENDING_ORDER and DESCENDING_ORDER constants
+	 *            in the Sorter class should be used for this value.
+	 */
+	public SortableColumnImpl(final int columnPosition, final int columnOrder) {
+		this(columnPosition, columnOrder, null);
+	}
+
+	/**
+	 * SortColumn constructor. The constructor will set both the column number,
+	 * the column order and the column name.
+	 * 
+	 * @param columnPosition
+	 *            The number position of the column in the data to sort by.
+	 *            Column numbers start at zero for the first column.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            column sorted by. Sort order can either be ascending or
+	 *            descending. The ASCENDING_ORDER and DESCENDING_ORDER constants
+	 *            in the Sorter class should be used for this value.
+	 * 
+	 * @param columnName
+	 *            A name or description for the column. This field only needs to
+	 *            be specified if you are planning to display information about
+	 *            the column to the user or want to use the name as an
+	 *            identifier for the column. If you do not want to use this
+	 *            field, you can pass null in as a value.
+	 */
+	public SortableColumnImpl(final int columnPosition, final int columnOrder,
+			final String columnName) {
+		this.columnPosition = columnPosition;
+		this.columnOrder = columnOrder;
+		this.columnName = columnName;
+	}
+
+	/**
+	 * Returns the number position of the column to sort with.
+	 * 
+	 * @return The number position of the column to sort with.
+	 */
+	public int getColumnPosition() {
+		return columnPosition;
+	}
+
+	/**
+	 * Returns the order to sort the column by.
+	 * 
+	 * @return The order to sort the column by.
+	 */
+	public int getColumnOrder() {
+		return columnOrder;
+	}
+
+	/**
+	 * Returns the name of this column.
+	 * 
+	 * @return The name of this column.
+	 */
+	public String getColumnName() {
+		return columnName;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SorterConstants.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SorterConstants.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SorterConstants.java	(revision 0)
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class contains constants used in the JSorter project.
+ * 
+ * @author Robert Breidecker
+ */
+public class SorterConstants {
+	/**
+	 * The constant for the first column position.
+	 */
+	public static final int FIRST_COLUMN_POSITION = 0;
+
+	/**
+	 * The constant for ascending order.
+	 */
+	public static final int ASCENDING_ORDER = 1;
+
+	/**
+	 * The constant for descending order.
+	 */
+	public static final int DESCENDING_ORDER = 0;
+
+	/**
+	 * The constant for stating that null data values are invalid. This is the
+	 * default value.
+	 */
+	public static final int NULLS_ARE_INVALID = 0;
+
+	/**
+	 * The constant for stating that null data values are valid and should be
+	 * treated as the least of possible values when sorting.
+	 */
+	public static final int NULLS_ARE_LEAST = 1;
+
+	/**
+	 * The constant for stating that null data values are valid and should be
+	 * treated as the greatest of possible values when sorting.
+	 */
+	public static final int NULLS_ARE_GREATEST = 2;
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortReflect.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortReflect.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortReflect.java	(revision 0)
@@ -0,0 +1,63 @@
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class SortReflect {
+	public static final int METHOD = 1;
+	public static final int FIELD = 2;
+	public int type;
+	public String methodName;
+	public String fieldName;
+	public Object[] params;
+
+	private SortReflect() {
+	}
+  
+	public static class MethodException extends Exception {
+		public MethodException(Throwable throwable) {
+			super(throwable);
+		}
+	}
+	
+	public Object get(Object value) throws MethodException {
+    if (type == METHOD) {
+    	try {
+        return MethodUtil.call(methodName, params, value);
+    	} catch (InvocationTargetException invocationTargetException) {
+    		throw new MethodException(invocationTargetException.getCause());
+    	} catch (Exception exception) {
+    		throw new MethodException(exception);
+    	}
+    } else if (type == FIELD) {
+      return FieldUtil.getFieldValue(fieldName, value);
+    }
+    throw new RuntimeException("type is invalid");
+  }
+
+	public static SortReflect field(String fieldName) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = FIELD;
+		sortReflect.fieldName = fieldName;
+		return sortReflect;
+	}
+
+	public static SortReflect method(String methodName) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = METHOD;
+		sortReflect.methodName = methodName;
+		return sortReflect;
+	}
+
+	public static SortReflect method(String methodName, Object[] params) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = METHOD;
+		sortReflect.methodName = methodName;
+		sortReflect.params = params;
+		return sortReflect;
+	}
+
+}
Index: ocean/src/net/sourceforge/jsorter/MethodUtil.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/MethodUtil.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/MethodUtil.java	(revision 0)
@@ -0,0 +1,38 @@
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class MethodUtil {
+
+	public MethodUtil() {
+	}
+
+	public static Object call(String methodName, Object parameters, Object object) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
+		Object[] parameterArray = null;
+		Class[] classArray = null;
+		if (parameters != null) {
+			if (!parameters.getClass().isArray()) {
+				parameterArray = new Object[] { parameters };
+				classArray = new Class[] { parameters.getClass() };
+			} else {
+				classArray = new Class[parameterArray.length];
+				for (int x = 0; x < parameterArray.length; x++) {
+					if (parameterArray[x] != null) {
+						classArray[x] = parameterArray.getClass();
+					}
+				}
+			}
+		} else {
+			parameterArray = new Object[0];
+			classArray = new Class[0];
+		}
+
+		Method method = object.getClass().getMethod(methodName, classArray);
+		return method.invoke(object, parameterArray);
+	}
+}
Index: ocean/src/net/sourceforge/jsorter/Sorter.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/Sorter.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/Sorter.java	(revision 0)
@@ -0,0 +1,383 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Vector;
+
+import net.sourceforge.jsorter.SortReflect.MethodException;
+
+/**
+ * Used for sorting lists of objects. This class is particularly good for
+ * sorting table data and multi-column lists.
+ * 
+ * Note: This is class is not thread safe.
+ * 
+ * @author Robert Breidecker
+ */
+public class Sorter<T> {
+	/**
+	 * Holds the data used for sorting.
+	 */
+	private List table = null;
+
+	/**
+	 * The columns to sort by.
+	 */
+	private List sortColumns = null;
+
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+  
+  public List<T> getReflectResults() {
+    List<T> results = new ArrayList<T>(table.size());
+    Iterator<T> iterator = table.iterator();
+    while (iterator.hasNext()) {
+      List row = (List)iterator.next();
+      SortHolder<T> sortHolder = (SortHolder<T>)row.get(0);
+      results.add(sortHolder.object);
+    }
+    return results;
+  }
+  
+  public static Sorter createReflect(ReflectColumns reflectColumns, List objects, int nullBehavior) throws MethodException {
+    SortReflect[] sortReflects = reflectColumns.getSortReflects();
+    List table = SortHolder.getTable(sortReflects, objects, nullBehavior);
+    Sorter sorter = new Sorter(table, reflectColumns.getColumns());
+    sorter.setNullBehavior(nullBehavior);
+    return sorter;
+  }
+  
+	/**
+	 * Sorter constructor. This version of the constructor does not use any
+	 * parameters. If you use this constructor, you should use the setter
+	 * methods to set the values you want the sort routine to use.
+	 */
+	public Sorter() {
+		super();
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes only a table as
+	 * a parameter. Since no sort columns are specified, this will cause the
+	 * sort routine to sort the table using all of the columns in the table in
+	 * ascending order. To change the table or sort columns, you must use the
+	 * appropriate "set" methods.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 */
+	public Sorter(final List table) {
+		// Set the values for the new sorter object.
+		setTable(table);
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes a table and a
+	 * column order as parameters. A sort column for each column in the data
+	 * will be created with the column order specified. This will cause the sort
+	 * routine to sort the table using all of the columns in the table (position
+	 * 0 and up or left to right) in the order specified in the column order. To
+	 * override the table or sort columns, you must use the appropriate "set"
+	 * methods. A vector will be used for storing the sort columns that are
+	 * dynamically created.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            columns in the table are sorted. Sort order can either be
+	 *            ascending or descending. The ASCENDING_ORDER and
+	 *            DESCENDING_ORDER constants in this class should be used for
+	 *            this value.
+	 */
+	public Sorter(final List table, final int columnOrder) {
+		// Get the first row.
+		List firstRow = null;
+		if (table.size() > 0) {
+			firstRow = (List)table.get(0);
+		}
+
+		// Build the sort columns.
+		List sortColumns = null;
+		if (firstRow != null) {
+			sortColumns = new Vector();
+			final int numColumns = firstRow.size();
+			for (int columnCtr = 0; columnCtr < numColumns; columnCtr++) {
+				final SortableColumn sortColumn = new SortableColumnImpl(columnCtr, columnOrder);
+
+				sortColumns.add(sortColumn);
+			}
+		}
+
+		// Set the values for the new sorter object.
+		setTable(table);
+		setSortColumns(sortColumns);
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes a table and a
+	 * list of sort columns as parameters. To override the table or sort
+	 * columns, you must use the appropriate "set" methods.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public Sorter(final List table, final List sortColumns) {
+		// Set the values for the new sorter object.
+		setTable(table);
+		setSortColumns(sortColumns);
+	}
+
+	/**
+	 * Returns the list of columns to sort by.
+	 * 
+	 * @return The list of columns to sort by.
+	 */
+	public List getSortColumns() {
+		return sortColumns;
+	}
+
+	/**
+	 * Returns the table of sort data.
+	 * 
+	 * @return The table of sort data.
+	 */
+	public List<T> getTable() {
+		return table;
+	}
+
+	/**
+	 * Updates the list of sort columns.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void setSortColumns(final List sortColumns) {
+		validateSortColumns(sortColumns);
+		this.sortColumns = sortColumns;
+	}
+
+	/**
+	 * Updates the table of sort data.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 */
+	public void setTable(final List table) {
+		validateTable(table);
+		this.table = table;
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+
+	/**
+	 * This routine sorts the table of data. The sort uses the sort columns to
+	 * determine how to sort the data.
+	 * 
+	 * @exception IllegalStateException
+	 *                The data in this class is in an invalid state.
+	 */
+	public void sort() {
+		// Sort the data.
+    List sortColumns = getSortColumns();
+    //System.out.println("sortColumns: "+sortColumns);
+		Collections.sort(table, new SortComparator(getSortColumns(), getNullBehavior()));
+	}
+  
+  public List<T> sortReflect() {
+    sort();
+    return getReflectResults();
+  }
+  
+	/**
+	 * This routine sorts the table of data using a comparator provided in the
+	 * parameters to do the sorting. The sort columns for this class will not be
+	 * used unless the input comparator has been coded to do so.
+	 * 
+	 * @param comparator
+	 *            A comparator to use for comparing the data rows in the table
+	 *            that has already been set on this class.
+	 * 
+	 * @exception IllegalStateException
+	 *                The data in Sorter is in an invalid state.
+	 */
+	public void sort(final Comparator comparator) {
+		// Sort the data.
+		Collections.sort(getTable(), comparator);
+	}
+
+	/**
+	 * Validates the list of sort columns.
+	 * 
+	 * @param sortColumns
+	 *            The list of sort columns to validate.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	private void validateSortColumns(final List sortColumns) {
+		if (sortColumns != null) {
+			// Validate size.
+			if (sortColumns.size() < 1) {
+				throw new IllegalArgumentException("Sort columns can "+ "not be empty.");
+			}
+
+			for (int ctr = 0, size = sortColumns.size(); ctr < size; ctr++) {
+				// Validate for SortColumns.
+				if (!(sortColumns.get(ctr) instanceof SortableColumn)) {
+					throw new IllegalArgumentException("The list of sort "+ "columns does not contain all SortColumn objects.");
+				}
+
+				// Validate for greater than or equal to zero.
+				if (((SortableColumn) sortColumns.get(ctr)).getColumnPosition() < 0) {
+					throw new IllegalArgumentException("A sort column number is less than zero.");
+				}
+
+				// Validate for invalid column order.
+				if (((SortableColumn)sortColumns.get(ctr)).getColumnOrder() != SorterConstants.ASCENDING_ORDER
+						&& ((SortableColumn)sortColumns.get(ctr)).getColumnOrder() != SorterConstants.DESCENDING_ORDER) {
+					throw new IllegalArgumentException("A sort column order is invalid.");
+				}
+			}
+		}
+	}
+
+	/**
+	 * Validates the table of sort data.
+	 * 
+	 * @param table
+	 *            The table of sort data to validate.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	private void validateTable(final List table) {
+		// Validate for null.
+		if (table == null) {
+			throw new IllegalArgumentException("The table of sort data "+ "can not be null.");
+		}
+
+		// Validate for Lists.
+		if (table.size() > 0) {
+			if (!(table.get(0) instanceof List)) {
+				throw new IllegalArgumentException("The table does not implement " + "the List interface.");
+			}
+		}
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/FieldUtil.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/FieldUtil.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/FieldUtil.java	(revision 0)
@@ -0,0 +1,74 @@
+/*
+ * FieldUtil.java
+ *
+ * Created on February 3, 2006, 5:24 PM
+ */
+
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ *
+ * @author  administrator
+ * @version
+ */
+public class FieldUtil {
+  private static Map fieldMap = new ConcurrentHashMap();
+  
+  public static void setFieldValue(String fieldName, Object object, Object value) {
+    try {
+      Field field = getField(object.getClass(), fieldName);
+      field.set(object, value);
+    } catch (Exception ex) { ex.printStackTrace(); }
+  }
+  
+  public static Object getFieldValue(String fieldName, Object object) {
+    try {
+      Field field = getField(object.getClass(), fieldName);
+      return field.get(object);
+    } catch (Exception ex) { ex.printStackTrace(); return null; }
+  }
+  
+  public static Field getField(Class clazz, String fieldName) {
+    Field[] fields = getFields(clazz);
+    for (int x=0; x < fields.length; x++) {
+      if (fields[x].getName().equals(fieldName)) {
+        return fields[x];
+      }
+    }
+    return null;
+  }
+  
+  public static Field[] getFields(Class clazz) {
+    Field[] fields = (Field[])fieldMap.get(clazz);
+    if (fields != null) return fields;
+    List list = new ArrayList();
+    getFields(clazz, list);
+    
+    fields = new Field[list.size()];
+    for (int x=0; x < fields.length; x++) {
+      fields[x] = (Field)list.get(x);
+    }
+    fieldMap.put(clazz, fields);
+    return fields;
+  }
+  
+  public static void getFields(Class clazz, List list) {
+    Class superClass = clazz.getSuperclass();
+    if (superClass != null) getFields(superClass, list);
+    Field[] fields = clazz.getDeclaredFields();
+    for (int x=0; x < fields.length; x++) {
+      int modifiers = fields[x].getModifiers();
+      if (Modifier.isPublic(modifiers) && !Modifier.isStatic(modifiers)
+      && !Modifier.isTransient(modifiers) && !Modifier.isFinal(modifiers))
+        list.add(fields[x]);
+    }
+  }
+}
+
Index: ocean/src/net/sourceforge/jsorter/MethodUtil.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/MethodUtil.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/MethodUtil.java	(revision 0)
@@ -0,0 +1,38 @@
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class MethodUtil {
+
+	public MethodUtil() {
+	}
+
+	public static Object call(String methodName, Object parameters, Object object) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
+		Object[] parameterArray = null;
+		Class[] classArray = null;
+		if (parameters != null) {
+			if (!parameters.getClass().isArray()) {
+				parameterArray = new Object[] { parameters };
+				classArray = new Class[] { parameters.getClass() };
+			} else {
+				classArray = new Class[parameterArray.length];
+				for (int x = 0; x < parameterArray.length; x++) {
+					if (parameterArray[x] != null) {
+						classArray[x] = parameterArray.getClass();
+					}
+				}
+			}
+		} else {
+			parameterArray = new Object[0];
+			classArray = new Class[0];
+		}
+
+		Method method = object.getClass().getMethod(methodName, classArray);
+		return method.invoke(object, parameterArray);
+	}
+}
Index: ocean/src/net/sourceforge/jsorter/ReflectColumns.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/ReflectColumns.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/ReflectColumns.java	(revision 0)
@@ -0,0 +1,66 @@
+
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ *
+ * @author  Jason Rutherglen
+ */
+public class ReflectColumns {
+  public List<SortableColumn> list = new ArrayList<SortableColumn>();
+  
+  public ReflectColumns() {
+  }
+  
+  public SortReflect[] getSortReflects() {
+    SortReflect[] sortReflects = new SortReflect[list.size()];
+    int count = 0;
+    Iterator iterator = list.iterator();
+    while (iterator.hasNext()) {
+      SortableColumnReflect sortableColumnReflect = (SortableColumnReflect)iterator.next();
+      sortReflects[count] = sortableColumnReflect.sortReflect;
+      
+      count++;
+    }
+    return sortReflects;
+  }
+  
+  public List<SortableColumn> getColumns() {
+    return list;
+  }
+  
+  public class SortableColumnReflect implements SortableColumn {
+    int position;
+    String name;
+    SortReflect sortReflect;
+    int order;
+    
+    public SortableColumnReflect(int position, String name, SortReflect sortReflect, int order) {
+      this.position = position;
+      this.name = name;
+      this.sortReflect = sortReflect;
+      this.order = order;
+    }
+    
+    public String getColumnName() {
+      return name;
+    }
+    
+    public int getColumnOrder() {
+      return order;
+    }
+    
+    public int getColumnPosition() {
+      return position;
+    }
+  }
+  
+  public void add(String name, SortReflect sortReflect, int order) {
+    SortableColumnReflect sortableColumnReflect = new SortableColumnReflect(list.size(), name, sortReflect, order);
+    list.add(sortableColumnReflect);
+  }
+}
Index: ocean/src/net/sourceforge/jsorter/SortableBoolean.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableBoolean.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableBoolean.java	(revision 0)
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class is a wrapper on top of the Boolean class. Its purpose is to
+ * provide a class that allows for Boolean values to be sorted, because the
+ * Boolean class does not implement the Comparable interface.
+ * 
+ * @author Robert Breidecker
+ */
+public final class SortableBoolean implements Comparable {
+	/**
+	 * The Boolean value being wrapped.
+	 */
+	private Boolean booleanValue = null;
+
+	/**
+	 * Creates a new SortableBoolean object from the input boolean value.
+	 * 
+	 * @param value
+	 *            The boolean value used to create the new SortableBoolean.
+	 */
+	public SortableBoolean(final boolean value) {
+		booleanValue = new Boolean(value);
+	}
+
+	/**
+	 * Creates a new SortableBoolean object from the input Boolean.
+	 */
+	public SortableBoolean(final Boolean value) {
+		booleanValue = value;
+	}
+
+	/**
+	 * Creates a new SortableBoolean object from the input String. The new
+	 * object will have a true value if the input value is "true", otherwise it
+	 * will have a false value.
+	 */
+	public SortableBoolean(final String value) {
+		booleanValue = new Boolean(value);
+	}
+
+	/**
+	 * Returns the primitive boolean value for this object.
+	 * 
+	 * @return The primitive boolean value for this object.
+	 */
+	public boolean booleanValue() {
+		return booleanValue.booleanValue();
+	}
+
+	/**
+	 * Compares this object with the specified object for order. Returns a
+	 * negative integer, zero, or a positive integer as this object is less
+	 * than, equal to, or greater than the specified object.
+	 * 
+	 * @param object
+	 *            The object to compare this object to.
+	 * 
+	 * @return A negative integer, zero, or a positive integer as this object is
+	 *         less than, equal to, or greater than the specified object.
+	 */
+	public int compareTo(final Object object) {
+		int returnValue = -1;
+
+		if (object == null) {
+			throw new IllegalArgumentException(
+					"This object can not be compared " + "to a null value.");
+		}
+
+		if (!(object instanceof SortableBoolean)) {
+			throw new IllegalArgumentException("The input object must be an "
+					+ "instance of SortableBoolean.");
+		}
+
+		final SortableBoolean compareToBoolean = (SortableBoolean) object;
+
+		if (booleanValue() == false && compareToBoolean.booleanValue() == true) {
+			returnValue = -1;
+		} else if (booleanValue() == false
+				&& compareToBoolean.booleanValue() == false) {
+			returnValue = 0;
+		} else if (booleanValue() == true
+				&& compareToBoolean.booleanValue() == true) {
+			returnValue = 0;
+		} else if (booleanValue() == true
+				&& compareToBoolean.booleanValue() == false) {
+			returnValue = 1;
+		}
+
+		return returnValue;
+	}
+
+	/**
+	 * Returns true if and only if the argument is not null and is a
+	 * SortableBoolean object that represents the same boolean value as this
+	 * object.
+	 * 
+	 * @param object
+	 *            The object to compare this object to.
+	 * 
+	 * @return Returns true if the specified object represents the same value as
+	 *         this object.
+	 */
+	public boolean equals(final Object object) {
+		if (object == null) {
+			return false;
+		}
+
+		if (!(object instanceof SortableBoolean)) {
+			return false;
+		}
+
+		return booleanValue.equals(object);
+	}
+
+	/**
+	 * Returns a hash code for this object. This method calls the hashCode
+	 * method on the Boolean object it is wrapping.
+	 * 
+	 * @return The hash code for the Boolean object wrapped by this object.
+	 */
+	public int hashCode() {
+		return booleanValue.hashCode();
+	}
+
+	/**
+	 * Returns a String object representing this object's value. If this object
+	 * represents the value true, a string equal to "true" is returned.
+	 * Otherwise, a string equal to "false" is returned.
+	 * 
+	 * @return A string representation of this object.
+	 */
+	public String toString() {
+		return booleanValue.toString();
+	}
+
+	/**
+	 * Returns the Boolean wrapped by this object.
+	 * 
+	 * @return The Boolean wrapped by this object.
+	 */
+	public Boolean getBoolean() {
+		return booleanValue;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableColumn.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableColumn.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableColumn.java	(revision 0)
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class represents a column that will be sorted by Sorter, SwingSorter, or
+ * SortComparator.
+ * 
+ * @author Robert Breidecker
+ */
+public interface SortableColumn {
+	/**
+	 * Returns the number position of the column to sort with.
+	 * 
+	 * @return The number position of the column to sort with.
+	 */
+	public int getColumnPosition();
+
+	/**
+	 * Returns the order to sort the column by.
+	 * 
+	 * @return The order to sort the column by.
+	 */
+	public int getColumnOrder();
+
+	/**
+	 * Returns the name of this column.
+	 * 
+	 * @return The name of this column.
+	 */
+	public String getColumnName();
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableColumnImpl.java	(revision 0)
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class represents a column that will be sorted by Sorter, SwingSorter, or
+ * SortComparator.
+ * 
+ * @author Robert Breidecker
+ */
+public class SortableColumnImpl implements SortableColumn {
+	/**
+	 * The number position of the column to sort with.
+	 */
+	private int columnPosition = SorterConstants.FIRST_COLUMN_POSITION;
+
+	/**
+	 * The order to sort the column by.
+	 */
+	private int columnOrder = SorterConstants.ASCENDING_ORDER;
+
+	/**
+	 * The name of the sort column.
+	 */
+	private String columnName = null;
+
+	/**
+	 * SortColumn constructor. The constructor will set both the column number
+	 * and the column order.
+	 * 
+	 * @param columnPosition
+	 *            The number position of the column in the data to sort by.
+	 *            Column numbers start at zero for the first column.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            column sorted by. Sort order can either be ascending or
+	 *            descending. The ASCENDING_ORDER and DESCENDING_ORDER constants
+	 *            in the Sorter class should be used for this value.
+	 */
+	public SortableColumnImpl(final int columnPosition, final int columnOrder) {
+		this(columnPosition, columnOrder, null);
+	}
+
+	/**
+	 * SortColumn constructor. The constructor will set both the column number,
+	 * the column order and the column name.
+	 * 
+	 * @param columnPosition
+	 *            The number position of the column in the data to sort by.
+	 *            Column numbers start at zero for the first column.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            column sorted by. Sort order can either be ascending or
+	 *            descending. The ASCENDING_ORDER and DESCENDING_ORDER constants
+	 *            in the Sorter class should be used for this value.
+	 * 
+	 * @param columnName
+	 *            A name or description for the column. This field only needs to
+	 *            be specified if you are planning to display information about
+	 *            the column to the user or want to use the name as an
+	 *            identifier for the column. If you do not want to use this
+	 *            field, you can pass null in as a value.
+	 */
+	public SortableColumnImpl(final int columnPosition, final int columnOrder,
+			final String columnName) {
+		this.columnPosition = columnPosition;
+		this.columnOrder = columnOrder;
+		this.columnName = columnName;
+	}
+
+	/**
+	 * Returns the number position of the column to sort with.
+	 * 
+	 * @return The number position of the column to sort with.
+	 */
+	public int getColumnPosition() {
+		return columnPosition;
+	}
+
+	/**
+	 * Returns the order to sort the column by.
+	 * 
+	 * @return The order to sort the column by.
+	 */
+	public int getColumnOrder() {
+		return columnOrder;
+	}
+
+	/**
+	 * Returns the name of this column.
+	 * 
+	 * @return The name of this column.
+	 */
+	public String getColumnName() {
+		return columnName;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortableSwingComponent.java	(revision 0)
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.List;
+
+/**
+ * This class represents a component that will be sorted by SwingSorter.
+ * 
+ * @author Robert Breidecker
+ */
+public interface SortableSwingComponent {
+	/**
+	 * Returns a lists of lists that contain the component's data values. Each
+	 * inner list contained in the outer list represents a row of data.
+	 * 
+	 * @return The data list for this component.
+	 */
+	public List getDataList();
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortComparator.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortComparator.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortComparator.java	(revision 0)
@@ -0,0 +1,230 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * The default comparator for the Sort and SwingSorter classes. This comparator
+ * is used for sorting multiple columns.
+ * 
+ * @author Robert Breidecker
+ */
+public class SortComparator implements Comparator {
+	/**
+	 * The list of sort columns.
+	 */
+	private List sortColumns = null;
+
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+
+	/**
+	 * Comparator constructor.
+	 * 
+	 * @param sortColumns
+	 *            A list of SortColumns that represent the positions of columns
+	 *            in your data that you want to be evaluated in the sort. The
+	 *            sort will start with the first column position in the list.
+	 *            Column positions not in the list will not be used in the sort.
+	 *            The number of items in this list should equal the number of
+	 *            items in the columnOrders list.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 */
+	public SortComparator(final List sortColumns, final int nullBehavior) {
+		this.sortColumns = sortColumns;
+		this.nullBehavior = nullBehavior;
+	}
+
+	/**
+	 * Overrides the java.util.Comparator compare method. This method is used
+	 * for comparing two dimensional data. See the standard JDK documention for
+	 * more information.
+	 * 
+	 * @param one
+	 *            Object - The first object used in the compare. This field
+	 *            should be a list of lists containing objects which implement
+	 *            the Comparable interface. Some of these object types include
+	 *            String, Integer, Long, Short, Float, Byte, Double and Date.
+	 *            See the standard JDK documention for Comparator for a complete
+	 *            list. The object type for each column of data must be
+	 *            consistent or a ClassCaseException will be thrown.
+	 * 
+	 * @param two
+	 *            Object - The second object used in the compare. This field
+	 *            should be a list of lists containing objects which implement
+	 *            the Comparable interface. Some of these object types include
+	 *            String, Integer, Long, Short, Float, Byte, Double and Date.
+	 *            See the standard JDK documention for Comparator for a complete
+	 *            list. The object type for each column of data must be
+	 *            consistent or a ClassCaseException will be thrown.
+	 * 
+	 * @return A negative integer, zero, or a positive integer as the first
+	 *         argument is less than, equal to, or greater than the second.
+	 * 
+	 * @exception ClassCastException
+	 *                Data in a column in not all of the same data type.
+	 */
+	public int compare(final Object one, final Object two) throws ClassCastException {
+		// The number of columns in the table.
+		int numColumns;
+
+		// The return value.
+		int rtn = 0;
+
+		// Used for counting the number of real fields in the data.
+		int ctr = 0;
+
+		// Holds the type of sort order being used.
+		int columnOrder;
+
+		// Used for counting the number of values in the sort columns.
+		int compareCtr;
+
+		// One row of data;
+		final List listOne = (List)one;
+
+		if (sortColumns == null) {
+			numColumns = listOne.size();
+		} else {
+			numColumns = sortColumns.size();
+		}
+
+		while (rtn == 0 && ctr < numColumns) {
+			// The first object to compare.
+			Comparable comparableOne;
+
+			// The second object to compare.
+			Comparable comparableTwo;
+
+			// Make sure compare column is within range.
+			if (sortColumns == null) {
+				compareCtr = ctr;
+			} else {
+				compareCtr = ((SortableColumn)sortColumns.get(ctr)).getColumnPosition();
+			}
+
+			if (compareCtr <= listOne.size()) {
+				// Another row of data;
+				final List listTwo = (List) two;
+
+				// Get the field to use in the compare.
+				if (sortColumns == null) {
+					comparableOne = (Comparable)listOne.get(compareCtr);
+					comparableTwo = (Comparable)listTwo.get(compareCtr);
+				} else {
+					comparableOne = (Comparable)listOne.get(((SortableColumn) sortColumns.get(ctr)).getColumnPosition());
+					comparableTwo = (Comparable)listTwo.get(((SortableColumn) sortColumns.get(ctr)).getColumnPosition());
+				}
+
+				// Get the sort type that goes with the sort column.
+				if (sortColumns == null) {
+					// If no sort columns were specified, then use ascending
+					// order.
+					columnOrder = SorterConstants.ASCENDING_ORDER;
+				} else {
+					columnOrder = ((SortableColumn)sortColumns.get(ctr)).getColumnOrder();
+				}
+
+				// Compare the objects.
+				if (comparableOne != null && comparableTwo != null) {
+					if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+						try {
+							rtn = comparableOne.compareTo(comparableTwo);
+						} catch (ClassCastException exception) {
+							throw exception;
+						}
+					} else {
+						try {
+							rtn = comparableTwo.compareTo(comparableOne);
+						} catch (ClassCastException exception) {
+							throw exception;
+						}
+					}
+				} else {
+					if (nullBehavior == SorterConstants.NULLS_ARE_INVALID) {
+						throw new IllegalStateException("Null data values are not valid.");
+					} else if (comparableOne == null && comparableTwo != null) {
+						if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+							rtn = -1;
+						} else {
+							rtn = 1;
+						}
+
+						if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+							rtn = rtn * -1;
+						}
+					} else if (comparableOne != null && comparableTwo == null) {
+						if (columnOrder == SorterConstants.ASCENDING_ORDER) {
+							rtn = 1;
+						} else {
+							rtn = -1;
+						}
+
+						if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+							rtn = rtn * -1;
+						}
+					} else {
+						rtn = 0;
+					}
+				}
+			}
+			ctr++;
+		}
+		return rtn;
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/Sorter.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/Sorter.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/Sorter.java	(revision 0)
@@ -0,0 +1,383 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Vector;
+
+import net.sourceforge.jsorter.SortReflect.MethodException;
+
+/**
+ * Used for sorting lists of objects. This class is particularly good for
+ * sorting table data and multi-column lists.
+ * 
+ * Note: This is class is not thread safe.
+ * 
+ * @author Robert Breidecker
+ */
+public class Sorter<T> {
+	/**
+	 * Holds the data used for sorting.
+	 */
+	private List table = null;
+
+	/**
+	 * The columns to sort by.
+	 */
+	private List sortColumns = null;
+
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+  
+  public List<T> getReflectResults() {
+    List<T> results = new ArrayList<T>(table.size());
+    Iterator<T> iterator = table.iterator();
+    while (iterator.hasNext()) {
+      List row = (List)iterator.next();
+      SortHolder<T> sortHolder = (SortHolder<T>)row.get(0);
+      results.add(sortHolder.object);
+    }
+    return results;
+  }
+  
+  public static Sorter createReflect(ReflectColumns reflectColumns, List objects, int nullBehavior) throws MethodException {
+    SortReflect[] sortReflects = reflectColumns.getSortReflects();
+    List table = SortHolder.getTable(sortReflects, objects, nullBehavior);
+    Sorter sorter = new Sorter(table, reflectColumns.getColumns());
+    sorter.setNullBehavior(nullBehavior);
+    return sorter;
+  }
+  
+	/**
+	 * Sorter constructor. This version of the constructor does not use any
+	 * parameters. If you use this constructor, you should use the setter
+	 * methods to set the values you want the sort routine to use.
+	 */
+	public Sorter() {
+		super();
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes only a table as
+	 * a parameter. Since no sort columns are specified, this will cause the
+	 * sort routine to sort the table using all of the columns in the table in
+	 * ascending order. To change the table or sort columns, you must use the
+	 * appropriate "set" methods.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 */
+	public Sorter(final List table) {
+		// Set the values for the new sorter object.
+		setTable(table);
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes a table and a
+	 * column order as parameters. A sort column for each column in the data
+	 * will be created with the column order specified. This will cause the sort
+	 * routine to sort the table using all of the columns in the table (position
+	 * 0 and up or left to right) in the order specified in the column order. To
+	 * override the table or sort columns, you must use the appropriate "set"
+	 * methods. A vector will be used for storing the sort columns that are
+	 * dynamically created.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 * 
+	 * @param columnOrder
+	 *            This value will be used for specifying the order in which the
+	 *            columns in the table are sorted. Sort order can either be
+	 *            ascending or descending. The ASCENDING_ORDER and
+	 *            DESCENDING_ORDER constants in this class should be used for
+	 *            this value.
+	 */
+	public Sorter(final List table, final int columnOrder) {
+		// Get the first row.
+		List firstRow = null;
+		if (table.size() > 0) {
+			firstRow = (List)table.get(0);
+		}
+
+		// Build the sort columns.
+		List sortColumns = null;
+		if (firstRow != null) {
+			sortColumns = new Vector();
+			final int numColumns = firstRow.size();
+			for (int columnCtr = 0; columnCtr < numColumns; columnCtr++) {
+				final SortableColumn sortColumn = new SortableColumnImpl(columnCtr, columnOrder);
+
+				sortColumns.add(sortColumn);
+			}
+		}
+
+		// Set the values for the new sorter object.
+		setTable(table);
+		setSortColumns(sortColumns);
+	}
+
+	/**
+	 * Sorter constructor. This version of the contructor takes a table and a
+	 * list of sort columns as parameters. To override the table or sort
+	 * columns, you must use the appropriate "set" methods.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public Sorter(final List table, final List sortColumns) {
+		// Set the values for the new sorter object.
+		setTable(table);
+		setSortColumns(sortColumns);
+	}
+
+	/**
+	 * Returns the list of columns to sort by.
+	 * 
+	 * @return The list of columns to sort by.
+	 */
+	public List getSortColumns() {
+		return sortColumns;
+	}
+
+	/**
+	 * Returns the table of sort data.
+	 * 
+	 * @return The table of sort data.
+	 */
+	public List<T> getTable() {
+		return table;
+	}
+
+	/**
+	 * Updates the list of sort columns.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void setSortColumns(final List sortColumns) {
+		validateSortColumns(sortColumns);
+		this.sortColumns = sortColumns;
+	}
+
+	/**
+	 * Updates the table of sort data.
+	 * 
+	 * @param table
+	 *            This field should be an object that implements the List
+	 *            interface. This list we will call the "table". Classes that
+	 *            implement the List interface include ArrayList, Vector and
+	 *            LinkedList. Each item in the table list should itself be an
+	 *            object that implements the List interface. These lists we will
+	 *            call "columns". Column lists should only contain objects that
+	 *            implement the Comparable interface. Classes that implement the
+	 *            Comparable interface include String, Integer, Long, Short,
+	 *            Float, Byte, Double and Date. See the standard JDK documention
+	 *            for Comparator for a complete list. The class type for each
+	 *            object in a column list must be consistent or a
+	 *            ClassCaseException will be thrown during the sort.
+	 */
+	public void setTable(final List table) {
+		validateTable(table);
+		this.table = table;
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+
+	/**
+	 * This routine sorts the table of data. The sort uses the sort columns to
+	 * determine how to sort the data.
+	 * 
+	 * @exception IllegalStateException
+	 *                The data in this class is in an invalid state.
+	 */
+	public void sort() {
+		// Sort the data.
+    List sortColumns = getSortColumns();
+    //System.out.println("sortColumns: "+sortColumns);
+		Collections.sort(table, new SortComparator(getSortColumns(), getNullBehavior()));
+	}
+  
+  public List<T> sortReflect() {
+    sort();
+    return getReflectResults();
+  }
+  
+	/**
+	 * This routine sorts the table of data using a comparator provided in the
+	 * parameters to do the sorting. The sort columns for this class will not be
+	 * used unless the input comparator has been coded to do so.
+	 * 
+	 * @param comparator
+	 *            A comparator to use for comparing the data rows in the table
+	 *            that has already been set on this class.
+	 * 
+	 * @exception IllegalStateException
+	 *                The data in Sorter is in an invalid state.
+	 */
+	public void sort(final Comparator comparator) {
+		// Sort the data.
+		Collections.sort(getTable(), comparator);
+	}
+
+	/**
+	 * Validates the list of sort columns.
+	 * 
+	 * @param sortColumns
+	 *            The list of sort columns to validate.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	private void validateSortColumns(final List sortColumns) {
+		if (sortColumns != null) {
+			// Validate size.
+			if (sortColumns.size() < 1) {
+				throw new IllegalArgumentException("Sort columns can "+ "not be empty.");
+			}
+
+			for (int ctr = 0, size = sortColumns.size(); ctr < size; ctr++) {
+				// Validate for SortColumns.
+				if (!(sortColumns.get(ctr) instanceof SortableColumn)) {
+					throw new IllegalArgumentException("The list of sort "+ "columns does not contain all SortColumn objects.");
+				}
+
+				// Validate for greater than or equal to zero.
+				if (((SortableColumn) sortColumns.get(ctr)).getColumnPosition() < 0) {
+					throw new IllegalArgumentException("A sort column number is less than zero.");
+				}
+
+				// Validate for invalid column order.
+				if (((SortableColumn)sortColumns.get(ctr)).getColumnOrder() != SorterConstants.ASCENDING_ORDER
+						&& ((SortableColumn)sortColumns.get(ctr)).getColumnOrder() != SorterConstants.DESCENDING_ORDER) {
+					throw new IllegalArgumentException("A sort column order is invalid.");
+				}
+			}
+		}
+	}
+
+	/**
+	 * Validates the table of sort data.
+	 * 
+	 * @param table
+	 *            The table of sort data to validate.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	private void validateTable(final List table) {
+		// Validate for null.
+		if (table == null) {
+			throw new IllegalArgumentException("The table of sort data "+ "can not be null.");
+		}
+
+		// Validate for Lists.
+		if (table.size() > 0) {
+			if (!(table.get(0) instanceof List)) {
+				throw new IllegalArgumentException("The table does not implement " + "the List interface.");
+			}
+		}
+	}
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SorterConstants.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SorterConstants.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SorterConstants.java	(revision 0)
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+/**
+ * This class contains constants used in the JSorter project.
+ * 
+ * @author Robert Breidecker
+ */
+public class SorterConstants {
+	/**
+	 * The constant for the first column position.
+	 */
+	public static final int FIRST_COLUMN_POSITION = 0;
+
+	/**
+	 * The constant for ascending order.
+	 */
+	public static final int ASCENDING_ORDER = 1;
+
+	/**
+	 * The constant for descending order.
+	 */
+	public static final int DESCENDING_ORDER = 0;
+
+	/**
+	 * The constant for stating that null data values are invalid. This is the
+	 * default value.
+	 */
+	public static final int NULLS_ARE_INVALID = 0;
+
+	/**
+	 * The constant for stating that null data values are valid and should be
+	 * treated as the least of possible values when sorting.
+	 */
+	public static final int NULLS_ARE_LEAST = 1;
+
+	/**
+	 * The constant for stating that null data values are valid and should be
+	 * treated as the greatest of possible values when sorting.
+	 */
+	public static final int NULLS_ARE_GREATEST = 2;
+}
\ No newline at end of file
Index: ocean/src/net/sourceforge/jsorter/SortHolder.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortHolder.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortHolder.java	(revision 0)
@@ -0,0 +1,90 @@
+
+
+package net.sourceforge.jsorter;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+import net.sourceforge.jsorter.SortReflect.MethodException;
+
+
+/**
+ *
+ * @author  Jason Rutherglen
+ */
+public class SortHolder<T> implements Comparable {
+  public Comparable attribute;
+  public T object;
+  public int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+  
+  public SortHolder(Comparable attribute, T object, int nullBehavior) {
+    this.attribute = attribute;
+    this.object = object;
+    this.nullBehavior = nullBehavior;
+  }
+  
+  public String toString() {
+    if (attribute == null) return "null";
+    return attribute.toString();
+  }
+  
+  public static List getTable(SortReflect[] sortReflect, Collection collection, int nullBehavior) throws MethodException {
+    List table = new ArrayList(collection.size());
+    Iterator iterator = collection.iterator();
+    while (iterator.hasNext()) {
+      Object value = iterator.next();
+      List row = new ArrayList(sortReflect.length); 
+      for (int x=0; x < sortReflect.length; x++) {
+        Object attribute = sortReflect[x].get(value);
+        if (attribute instanceof java.net.URL) {
+          attribute = ((java.net.URL)attribute).toString();
+        }
+        Comparable attributeComparable = (Comparable)attribute;
+        SortHolder sortHolder = new SortHolder(attributeComparable, value, nullBehavior);
+        row.add(sortHolder);
+      }
+      table.add(row);
+    }
+    return table;
+  }
+  
+  public int compareTo(Object obj) {
+    Comparable comparableOne = (Comparable)attribute;
+    Comparable comparableTwo = (Comparable)obj;
+    if (obj instanceof SortHolder) {
+      SortHolder sortHolder = (SortHolder)obj;
+      comparableTwo = (Comparable)sortHolder.attribute;
+    }
+    
+    int rtn;
+    
+    if (comparableOne != null && comparableTwo != null) {
+      try {
+        rtn = comparableOne.compareTo(comparableTwo);
+      } catch (ClassCastException exception) {
+        throw exception;
+      }
+    } else {
+      if (nullBehavior == SorterConstants.NULLS_ARE_INVALID) {
+        throw new IllegalStateException("Null data values are not valid.");
+      } else if (comparableOne == null && comparableTwo != null) {
+        rtn = -1;
+        
+        if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+          rtn = rtn * -1;
+        }
+      } else if (comparableOne != null && comparableTwo == null) {
+        rtn = 1;
+        
+        if (nullBehavior == SorterConstants.NULLS_ARE_GREATEST) {
+          rtn = rtn * -1;
+        }
+      } else {
+        rtn = 0;
+      }
+    }
+    return rtn;
+  }
+}
Index: ocean/src/net/sourceforge/jsorter/SortReflect.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SortReflect.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SortReflect.java	(revision 0)
@@ -0,0 +1,63 @@
+package net.sourceforge.jsorter;
+
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class SortReflect {
+	public static final int METHOD = 1;
+	public static final int FIELD = 2;
+	public int type;
+	public String methodName;
+	public String fieldName;
+	public Object[] params;
+
+	private SortReflect() {
+	}
+  
+	public static class MethodException extends Exception {
+		public MethodException(Throwable throwable) {
+			super(throwable);
+		}
+	}
+	
+	public Object get(Object value) throws MethodException {
+    if (type == METHOD) {
+    	try {
+        return MethodUtil.call(methodName, params, value);
+    	} catch (InvocationTargetException invocationTargetException) {
+    		throw new MethodException(invocationTargetException.getCause());
+    	} catch (Exception exception) {
+    		throw new MethodException(exception);
+    	}
+    } else if (type == FIELD) {
+      return FieldUtil.getFieldValue(fieldName, value);
+    }
+    throw new RuntimeException("type is invalid");
+  }
+
+	public static SortReflect field(String fieldName) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = FIELD;
+		sortReflect.fieldName = fieldName;
+		return sortReflect;
+	}
+
+	public static SortReflect method(String methodName) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = METHOD;
+		sortReflect.methodName = methodName;
+		return sortReflect;
+	}
+
+	public static SortReflect method(String methodName, Object[] params) {
+		SortReflect sortReflect = new SortReflect();
+		sortReflect.type = METHOD;
+		sortReflect.methodName = methodName;
+		sortReflect.params = params;
+		return sortReflect;
+	}
+
+}
Index: ocean/src/net/sourceforge/jsorter/SwingSorter.java
===================================================================
--- ocean/src/net/sourceforge/jsorter/SwingSorter.java	(revision 0)
+++ ocean/src/net/sourceforge/jsorter/SwingSorter.java	(revision 0)
@@ -0,0 +1,580 @@
+/*
+ * Copyright 2002-2005 Robert Breidecker.
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package net.sourceforge.jsorter;
+
+import java.util.List;
+import java.util.Vector;
+
+import javax.swing.DefaultComboBoxModel;
+import javax.swing.DefaultListModel;
+import javax.swing.JComboBox;
+import javax.swing.JList;
+import javax.swing.JTable;
+import javax.swing.table.DefaultTableModel;
+
+/**
+ * Used for sorting data in the Swing objects JTable, JList, JComboBox,
+ * DefaultTableModel, DefaultListModel, DefaultComboBoxModel and
+ * SortableComponent.
+ * 
+ * Note: This is class is not thread safe.
+ * 
+ * @author Robert Breidecker
+ */
+public class SwingSorter {
+	/**
+	 * Indicate how to treat null data values.
+	 */
+	private int nullBehavior = SorterConstants.NULLS_ARE_INVALID;
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 */
+	public void sortComponent(final SortableSwingComponent component) {
+		sortComponent(component, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortComponent(final SortableSwingComponent component,
+			final int columnOrder) {
+		final SortableColumn sortColumn = new SortableColumnImpl(0, columnOrder);
+		final List sortColumns = new Vector();
+		sortColumns.add(sortColumn);
+
+		sortComponent(component, sortColumns);
+	}
+
+	/**
+	 * Sorts a custom component that implements the SortableComponent interface.
+	 * This method assumes that the object type in the display column is made up
+	 * entirely of the same type and that the type implements the Comparable
+	 * interface. For example, the data in the display column is entirely made
+	 * of String or Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param component
+	 *            The component to be sorted in ascending order. The component
+	 *            model must implement the SortableComponent interface.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void sortComponent(final SortableSwingComponent component,
+			final List sortColumns) {
+		// Create a new Sorter object.
+		final Sorter sorter = new Sorter(component.getDataList(), sortColumns);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data!
+		sorter.sort();
+	}
+
+	/**
+	 * Sorts a combo box using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param comboBox
+	 *            The combo box to be sorted in ascending order. The combo box
+	 *            model must be an instance or a descendent of
+	 *            DefaultComboBoxModel.
+	 */
+	public void sortComboBox(final JComboBox comboBox) {
+		sortComboBox(comboBox, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a combo box using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param comboBox
+	 *            The combo box to be sorted. The combo box model must be an
+	 *            instance or a descendent of DefaultComboBoxModel or implement
+	 *            the SortableComponent.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortComboBox(final JComboBox comboBox, final int columnOrder) {
+		if (comboBox.getModel() instanceof DefaultComboBoxModel) {
+			final DefaultComboBoxModel model = (DefaultComboBoxModel) comboBox
+					.getModel();
+
+			sortComboBoxModel(model, columnOrder);
+		} else if (comboBox.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent component = (SortableSwingComponent) comboBox
+					.getModel();
+
+			sortComponent(component, columnOrder);
+		} else {
+			throw new IllegalArgumentException(
+					"ComboBox model must be an "
+							+ "instance of decendent of DefaultComboBoxModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a combo box model using Sorter. This method assumes that the object
+	 * type in the display column is made up entirely of the same type and that
+	 * the type implements the Comparable interface. For example, the data in
+	 * the display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The combo box model to be sorted in ascending order. The combo
+	 *            box model must be an instance or a descendent of
+	 *            DefaultComboBoxModel.
+	 */
+	public void sortComboBoxModel(final DefaultComboBoxModel model) {
+		sortComboBoxModel(model, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a combo box model using Sorter. This method assumes that the object
+	 * type in the display column is made up entirely of the same type and that
+	 * the type implements the Comparable interface. For example, the data in
+	 * the display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The combo box model to be sorted. The combo box model must be
+	 *            an instance or a descendent of DefaultComboBoxModel.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortComboBoxModel(final DefaultComboBoxModel model,
+			final int columnOrder) {
+		// Create the table of sort data.
+		final List table = new Vector();
+
+		// Get the model data.
+		for (int ctr = 0, size = model.getSize(); ctr < size; ctr++) {
+			// Create a new row.
+			final List row = new Vector();
+			row.add(model.getElementAt(ctr));
+			table.add(row);
+		}
+
+		// Create a new Sorter object.
+		final Sorter sorter = new Sorter(table, columnOrder);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data!
+		sorter.sort();
+
+		// Clear the model data.
+		model.removeAllElements();
+
+		// Re-add the sorted data to the model.
+		for (int ctr = 0, size = table.size(); ctr < size; ctr++) {
+			final List row = (List) table.get(ctr);
+
+			// Get the first element from the row, because a list
+			// only has one column.
+			model.addElement(row.get(0));
+		}
+	}
+
+	/**
+	 * Sorts a list model using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The list model to be sorted in ascending order. Must be a
+	 *            DefaultListModel or a descendent of DefaultListModel.
+	 */
+	public void sortListModel(final DefaultListModel model) {
+		sortListModel(model, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a list using Sorter. This method assumes that the object type in
+	 * the display column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in the display
+	 * column is entirely made of String or Integers. Some of the objects that
+	 * implement the Comparable interface include String, Integer, Long, Short,
+	 * Float, Byte, Double and Date.
+	 * 
+	 * @param list
+	 *            The list to be sorted in ascending order. The list model used
+	 *            must be a DefaultListBoxModel or a descendent of
+	 *            DefaultListModel.
+	 */
+	public void sortList(final JList list) {
+		sortList(list, SorterConstants.ASCENDING_ORDER);
+	}
+
+	/**
+	 * Sorts a list using Sorter. This method assumes that the object type in
+	 * the display column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in the display
+	 * column is entirely made of String or Integers. Some of the objects that
+	 * implement the Comparable interface include String, Integer, Long, Short,
+	 * Float, Byte, Double and Date.
+	 * 
+	 * @param list
+	 *            The list to be sorted. The list model used must be a
+	 *            DefaultListModel or a descendent of DefaultListModel or
+	 *            implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortList(final JList list, final int columnOrder) {
+		if (list.getModel() instanceof DefaultListModel) {
+			final DefaultListModel model = (DefaultListModel) list.getModel();
+
+			sortListModel(model, columnOrder);
+		} else if (list.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent model = (SortableSwingComponent) list
+					.getModel();
+
+			sortComponent(model, columnOrder);
+		} else {
+			throw new IllegalArgumentException(
+					"List model must be an "
+							+ "instance of decendent of DefaultListModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a list model using Sorter. This method assumes that the object type
+	 * in the display column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in the
+	 * display column is entirely made of String or Integers. Some of the
+	 * objects that implement the Comparable interface include String, Integer,
+	 * Long, Short, Float, Byte, Double and Date.
+	 * 
+	 * @param model
+	 *            The list model to be sorted. Must be a DefaultListModel or a
+	 *            descendent of DefaultListModel.
+	 * 
+	 * @param newSortOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortListModel(final DefaultListModel model,
+			final int newSortOrder) {
+		// Create the table of sort data.
+		final Vector table = new Vector();
+
+		// Get the model data.
+		for (int ctr = 0, size = model.getSize(); ctr < size; ctr++) {
+			// Create a new row.
+			final Vector row = new Vector();
+			row.add(model.getElementAt(ctr));
+			table.add(row);
+		}
+
+		// Create a new Sorter.
+		final Sorter sorter = new Sorter(table, newSortOrder);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the vector data.
+		sorter.sort();
+
+		// Clear the model data.
+		model.removeAllElements();
+
+		// Re-add the sorted data to the model.
+		for (int ctr = 0, size = table.size(); ctr < size; ctr++) {
+			final List row = (List) table.get(ctr);
+
+			// Get the first element from the row, because a list
+			// only has one column.
+			model.addElement(row.get(0));
+		}
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two can be entirely made
+	 * up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date. This method will sort using all columns in ascending order.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 */
+	public void sortTable(final JTable table) {
+		// Sort the table.
+		sortTable(table);
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two can be entirely made
+	 * up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date. This method will sort using all columns in the order specified.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortTable(final JTable table, final int columnOrder) {
+		// Sort the table!
+		sortTable(table, columnOrder);
+	}
+
+	/**
+	 * Sorts a table using Sorter. This method assumes that the object type in
+	 * each column is made up entirely of the same type and that the type
+	 * implements the Comparable interface. For example, the data in column one
+	 * can be entirely made of String types and column two is can be entriely
+	 * made up of Integers. Some of the objects that implement the Comparable
+	 * interface include String, Integer, Long, Short, Float, Byte, Double and
+	 * Date.
+	 * 
+	 * @param table
+	 *            JTable The table to be sorted. The table model of the table
+	 *            must be an instance or a descendent of DefaultTableModel or
+	 *            must implement the SortableComponent interface.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 * 
+	 * @exception IllegalArgumentException
+	 *                Input data is invalid.
+	 */
+	public void sortTable(final JTable table, final List sortColumns) {
+		if (table.getModel() instanceof DefaultTableModel) {
+			final DefaultTableModel model = (DefaultTableModel) table
+					.getModel();
+
+			sortTableModel(model, sortColumns);
+		} else if (table.getModel() instanceof SortableSwingComponent) {
+			final SortableSwingComponent model = (SortableSwingComponent) table
+					.getModel();
+
+			sortComponent(model, sortColumns);
+		} else {
+			throw new IllegalArgumentException(
+					"Table model must be an "
+							+ "instance of decendent of DefaultTableModel or implement the "
+							+ "SortableComponent interface.");
+		}
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 */
+	public void sortTableModel(final DefaultTableModel model) {
+		// Sort the data.
+		sortTableModel(model);
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 * 
+	 * @param columnOrder
+	 *            An integer that represents the type of ordering to be used
+	 *            when sorting by only the first column in the data. The type of
+	 *            sort can either be ascending or descending. The
+	 *            ASCENDING_ORDER and DESCENDING_ORDER constants in this class
+	 *            should be used.
+	 */
+	public void sortTableModel(final DefaultTableModel model,
+			final int columnOrder) {
+		// Sort the table.
+		sortTableModel(model, columnOrder);
+	}
+
+	/**
+	 * Sorts a table model using Sorter. This method assumes that the object
+	 * type in each column is made up entirely of the same type and that the
+	 * type implements the Comparable interface. For example, the data in column
+	 * one can be entirely made of String types and column two is can be
+	 * entriely made up of Integers. Some of the objects that implement the
+	 * Comparable interface include String, Integer, Long, Short, Float, Byte,
+	 * Double and Date.
+	 * 
+	 * @param model
+	 *            The table model to be sorted. The table model must be an
+	 *            instance or a descendent of DefaultTableModel.
+	 * 
+	 * @param sortColumns
+	 *            Sort columns are a list of numbers specifying the colums to
+	 *            sort the table by. Each number in the list should be an
+	 *            instance of the SortColumn class represents a position of a
+	 *            column in the table. Column position start at zero just as
+	 *            they do in the standard Collection classes and Java arrays.
+	 *            When this field is null, there must be only one and only one
+	 *            column order specified. The sort routine will then sort using
+	 *            every column in the table starting from position 0 and up or
+	 *            left to right in ascending order.
+	 */
+	public void sortTableModel(final DefaultTableModel model,
+			final List sortColumns) {
+		// Create a new Sorter.
+		final Sorter sorter = new Sorter(model.getDataVector(), sortColumns);
+		sorter.setNullBehavior(nullBehavior);
+
+		// Sort the data.
+		sorter.sort();
+	}
+
+	/**
+	 * Returns the null behavior for this object.
+	 * 
+	 * @return An integer representing the constant that indicates how null data
+	 *         values should behave while being sorted. See the null behavior
+	 *         constants in this class. The default value for this class is
+	 *         NULLS_ARE_INVALID.
+	 */
+	public int getNullBehavior() {
+		return nullBehavior;
+	}
+
+	/**
+	 * Set the null behavior for this object.
+	 * 
+	 * @param nullBehavior
+	 *            An integer representing the constant that indicates how null
+	 *            data values should behave while being sorted. See the null
+	 *            behavior constants in this class.
+	 * 
+	 * @throws IllegalArgumentException
+	 *             Thrown if the null behavior value is not valid.
+	 */
+	public void setNullBehavior(final int nullBehavior) {
+		if (nullBehavior != SorterConstants.NULLS_ARE_GREATEST
+				&& nullBehavior != SorterConstants.NULLS_ARE_INVALID
+				&& nullBehavior != SorterConstants.NULLS_ARE_LEAST) {
+			throw new IllegalArgumentException("Invalid null behavior.");
+		}
+
+		this.nullBehavior = nullBehavior;
+	}
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/DirectoryMap.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DirectoryMap.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DirectoryMap.java	(revision 0)
@@ -0,0 +1,19 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.Directory;
+
+public abstract class DirectoryMap {
+  public abstract Directory create(String name) throws IOException;
+  
+  public abstract void delete(String name) throws IOException;
+  
+  public abstract Directory get(String name) throws IOException;
+  
+  public abstract String[] list() throws IOException;
+  
+  public abstract LogDirectory getDirectory();
+  
+  public abstract LogDirectory getLogDirectory();
+}
Index: ocean/src/org/apache/lucene/ocean/OceanSearcher.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanSearcher.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanSearcher.java	(revision 0)
@@ -0,0 +1,52 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.ocean.Snapshot.SnapshotSearcher;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searchable;
+
+public class OceanSearcher extends MultiSearcher {
+  Snapshot snapshot;
+  private Searchable[] searchables;
+  private int[] starts;
+  
+  public OceanSearcher(Snapshot snapshot) throws IOException {
+    super(snapshot.getSearchers());
+    searchables = snapshot.getSearchers();
+    starts = snapshot.getStarts();
+    this.snapshot = snapshot;
+  }
+  
+  public void close() {
+    //snapshot.decRef();
+  }
+  
+  public Document doc(int n) throws CorruptIndexException, IOException {
+    int i = subSearcher(n);       // find searcher index
+    Document document = searchables[i].doc(n - starts[i]);   // dispatch to searcher
+    SnapshotSearcher snapshotSearcher = (SnapshotSearcher)searchables[i];
+    IndexID indexId = snapshotSearcher.getIndexSnapshot().getIndex().getId();
+    document.add(new Field(Constants.INDEXID, indexId.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+    return document;
+  }
+
+  // inherit javadoc
+  public Document doc(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
+    int i = subSearcher(n);       // find searcher index
+    Document document = searchables[i].doc(n - starts[i], fieldSelector);    // dispatch to searcher
+    SnapshotSearcher snapshotSearcher = (SnapshotSearcher)searchables[i];
+    IndexID indexId = snapshotSearcher.getIndexSnapshot().getIndex().getId();
+    document.add(new Field(Constants.INDEXID, indexId.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+    return document;
+  }
+  
+  public Snapshot getSnapshot() {
+    return snapshot;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java	(revision 0)
@@ -0,0 +1,36 @@
+package org.apache.lucene.ocean;
+
+import java.util.Set;
+
+import org.apache.lucene.store.instantiated.InstantiatedIndex;
+import org.apache.lucene.store.instantiated.InstantiatedIndexReader;
+
+/**
+ * Simulates a multiple version IndexReader with InstantiatedIndexReader by
+ * having documents over the set maxDoc be deleted.  
+ *
+ */
+public class OceanInstantiatedIndexReader extends InstantiatedIndexReader {
+  private int maxDoc;
+  private Set<Integer> deletedDocs;
+  
+  public OceanInstantiatedIndexReader(int maxDoc, InstantiatedIndex index, Set<Integer> deletedDocs) {
+    super(index);
+    this.maxDoc = maxDoc;
+    this.deletedDocs = deletedDocs;
+  }
+  
+  public int numDocs() {
+    return maxDoc() - deletedDocs.size();
+  }
+  
+  public boolean isDeleted(int n) {
+    if (n > maxDoc) return true;
+    if (deletedDocs != null && deletedDocs.contains(n)) return true;
+    return false;
+  }
+  
+  public boolean hasDeletions() {
+    return true;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/SnapshotInfo.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SnapshotInfo.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SnapshotInfo.java	(revision 0)
@@ -0,0 +1,190 @@
+package org.apache.lucene.ocean;
+
+import java.math.BigDecimal;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.lucene.ocean.util.CElement;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.XMLUtil;
+import org.jdom.Element;
+
+public class SnapshotInfo implements CElement, Comparable<SnapshotInfo> {
+  private BigDecimal id;
+  private SortedList<IndexID,IndexInfo> indexInfos;
+  private int numDocs;
+  private int maxDoc;
+  private int deletedDocs;
+
+  public SnapshotInfo(BigDecimal id, int maxDoc, int numDocs, int deletedDocs) {
+    assert id != null;
+    this.id = id;
+    this.maxDoc = maxDoc;
+    this.numDocs = numDocs;
+    this.deletedDocs = deletedDocs;
+    indexInfos = new SortedList<IndexID,IndexInfo>();
+  }
+  
+  public IndexInfo getIndexInfo(IndexID id) {
+    return indexInfos.get(id);
+  }
+  
+  public int compareTo(SnapshotInfo other) {
+    return id.compareTo(other.id);
+  }
+  
+  public void add(IndexInfo indexInfo) {
+    indexInfos.put(indexInfo.getIndexID(), indexInfo);
+  }
+
+  public SnapshotInfo(Element element) {
+    indexInfos = new SortedList<IndexID,IndexInfo>();
+    id = new BigDecimal(element.getAttributeValue("id"));
+    for (Element indexElement : XMLUtil.getChildren("index", element)) {
+      IndexInfo indexInfo = new IndexInfo(indexElement);
+      indexInfos.put(indexInfo.getIndexID(), indexInfo);
+    }
+  }
+
+  public Long getSnapshotId() {
+    return id.longValue();
+  }
+
+  public BigDecimal getId() {
+    return id;
+  }
+
+  public Collection<IndexInfo> getIndexInfos() {
+    return indexInfos.values();
+  }
+
+  public SnapshotInfo(BigDecimal id, Map<IndexID,IndexInfo> indexInfos) {
+    this.id = id;
+    this.indexInfos = new SortedList<IndexID,IndexInfo>(indexInfos);
+  }
+
+  public static class IndexInfo implements CElement {
+    private Long snapshotId;
+    private Long id;
+    private Long segmentGeneration;
+    private String type;
+    private Integer maxDoc;
+    private Integer deletedDoc;
+    private Integer numDocs;
+    private Long minDocumentId;
+    private Long maxDocumentId;
+    private Long minSnapshotId;
+    private Long maxSnapshotId;
+
+    public IndexInfo(Long snapshotId, Long id, Long segmentGeneration, String type, int maxDoc, int numDocs, int deletedDoc, Long minDocumentId, Long maxDocumentId, Long minSnapshotId, Long maxSnapshotId) {
+      this.snapshotId = snapshotId;
+      this.id = id;
+      this.segmentGeneration = segmentGeneration;
+      this.type = type;
+      this.maxDoc = maxDoc;
+      this.numDocs = numDocs;
+      this.deletedDoc = deletedDoc;
+      this.minDocumentId = minDocumentId;
+      this.maxDocumentId = maxDocumentId;
+      this.minSnapshotId = minSnapshotId;
+      this.maxSnapshotId = maxSnapshotId;
+    }
+    
+    public IndexID getIndexID() {
+      return new IndexID(id, type);
+    }
+    
+    public IndexInfo(Element element) {
+      snapshotId = XMLUtil.getAttributeLong("snapshotid", element);
+      id = XMLUtil.getAttributeLong("id", element);
+      segmentGeneration = XMLUtil.getAttributeLong("segmentGeneration", element);
+      type = XMLUtil.getAttributeString("type", element);
+      maxDoc = XMLUtil.getAttributeInteger("maxDoc", element);
+      numDocs = XMLUtil.getAttributeInteger("numDocs", element);
+      deletedDoc = XMLUtil.getAttributeInteger("deletedDoc", element);
+      minDocumentId = XMLUtil.getAttributeLong("minDocumentId", element);
+      minSnapshotId = XMLUtil.getAttributeLong("minSnapshotId", element);
+      maxDocumentId = XMLUtil.getAttributeLong("maxDocumentId", element);
+      maxSnapshotId = XMLUtil.getAttributeLong("maxSnapshotId", element);
+    }
+    
+    public Integer getNumDocs() {
+      return numDocs;
+    }
+    
+    public Long getSnapshotId() {
+      return snapshotId;
+    }
+    
+    public Long getMinSnapshotId() {
+      return minSnapshotId;
+    }
+    
+    public Long getMinDocumentId() {
+      return minDocumentId;
+    }
+    
+    public Long getMaxSnapshotId() {
+      return maxSnapshotId;
+    }
+    
+    public Long getMaxDocumentId() {
+      return maxDocumentId;
+    }
+    
+    public Integer getDeletedDoc() {
+      return deletedDoc;
+    }
+    
+    public Long getSegmentGeneration() {
+      return segmentGeneration;
+    }
+    
+    public Integer getMaxDoc() {
+      return maxDoc;
+    }
+    
+    public Long getId() {
+      return id;
+    }
+
+    public String getType() {
+      return type;
+    }
+
+    public Element toElement() {
+      Element element = new Element("index");
+      XMLUtil.setAttribute("snapshotid", snapshotId, element);
+      XMLUtil.setAttribute("id", id, element);
+      XMLUtil.setAttribute("segmentGeneration", segmentGeneration, element);
+      XMLUtil.setAttribute("type", type, element);
+      XMLUtil.setAttribute("maxDoc", maxDoc, element);
+      XMLUtil.setAttribute("numDocs", numDocs, element);
+      XMLUtil.setAttribute("deletedDoc", deletedDoc, element);
+      XMLUtil.setAttribute("minDocumentId", minDocumentId, element);
+      XMLUtil.setAttribute("maxDocumentId", maxDocumentId, element);
+      XMLUtil.setAttribute("minSnapshotId", minSnapshotId, element);
+      XMLUtil.setAttribute("maxSnapshotId", maxSnapshotId, element);
+      return element;
+    }
+  }
+  /**
+  public void writeTo(RandomAccessIO output) throws Exception {
+    Element element = toElement();
+    String xml = XMLUtil.outputElement(element);
+    byte[] bytes = xml.getBytes("UTF-8");
+    output.write(bytes);
+  }
+  **/
+  public Element toElement() {
+    Element element = new Element("snapshot");
+    XMLUtil.setAttribute("id", id, element);
+    XMLUtil.setAttribute("numDocs", numDocs, element);
+    XMLUtil.setAttribute("maxDoc", maxDoc, element);
+    XMLUtil.setAttribute("deletedDocs", deletedDocs, element);
+    for (IndexInfo indexInfo : indexInfos.values()) {
+      element.addContent(indexInfo.toElement());
+    }
+    return element;
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/DiskIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DiskIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DiskIndex.java	(revision 0)
@@ -0,0 +1,182 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * On disk index.  Only deletes are allowed to occur to the index. 
+ * There is a unique IndexReader per snapshot.
+ *
+ */
+// TODO: may be issue with reader having same version but multiple snapshots when transaction has no deletes for this index
+public class DiskIndex extends DirectoryIndex {
+	public static Logger log = Logger.getLogger(DiskIndex.class.getName());
+	private final Directory directory;
+	//private IndexInfo indexInfo;
+
+	// load existing index
+	public DiskIndex(IndexID id, Directory directory, Long snapshotId, Long segmentGeneration, TransactionSystem system) throws Exception, IndexException, IOException {
+		super(id, system);
+		assert segmentGeneration != null;
+		this.directory = directory;
+		if (directory.fileExists("writing.index")) {
+			throw new IndexNeverCompletedCopyException("index never completed copying");
+		}
+		if (IndexWriter.isLocked(directory)) {
+		  LOG.info("directory: "+directory+" locked.  being unlocked");
+		  IndexWriter.unlock(directory);
+		}
+		initialIndexReader = IndexReader.open(directory, indexDeletionPolicy);
+		long readerGeneration = initialIndexReader.getIndexCommit().getGeneration();
+		assert segmentGeneration.longValue() == readerGeneration;
+		//indexInfo = loadIndexInfo();
+		createNewSnapshot(snapshotId, initialIndexReader);
+	}
+
+	// merge indexes creating new index
+	public DiskIndex(IndexID id, Directory directory, List<? extends IndexSnapshot> indexSnapshots, TransactionSystem system) throws Exception, IOException {
+		super(id, system);
+		this.directory = directory;
+		Util.touchFile("writing.index", directory);
+		IndexReader[] indexReaders = getIndexReaders(indexSnapshots);
+		// create in ram first, is faster than copying to disk due to less hard disk head movement
+		RAMDirectory ramDirectory = new RAMDirectory();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, system.getDefaultAnalyzer(), true);
+		indexWriter.setMergeScheduler(new SerialMergeScheduler());
+		indexWriter.setUseCompoundFile(true);
+		indexWriter.addIndexes(indexReaders);
+		indexWriter.close();
+		Directory.copy(ramDirectory, directory, true);
+		
+		//indexInfo = new IndexInfo();
+		//indexInfo.setMaxDocumentID(maxDocumentId);
+		//indexInfo.setMaxSnapshotID(maxSnapshotId);
+		//saveIndexInfo(indexInfo);
+		directory.deleteFile("writing.index");
+		initialIndexReader = IndexReader.open(directory, indexDeletionPolicy);
+	}
+  
+	public Directory getDirectory() {
+		return directory;
+	}
+  /**
+	private IndexInfo loadIndexInfo() throws Exception {
+		String xml = Util.getString("indexinfo.xml", directory);
+		Element element = XMLUtil.parseElement(xml);
+		return new IndexInfo(element);
+	}
+
+	private void saveIndexInfo(IndexInfo indexInfo) throws Exception {
+		Element element = indexInfo.toElement();
+		String xml = XMLUtil.outputElement(element);
+		Util.save(xml, "indexinfo.xml", directory);
+	}
+
+	public static class IndexInfo implements CElement {
+		private Long maxSnapshotId;
+		private Long maxDocumentId;
+
+		public IndexInfo() {
+		}
+
+		public Long getMaxSnapshotID() {
+			return maxSnapshotId;
+		}
+
+		public void setMaxSnapshotID(Long maxSnapshotId) {
+			this.maxSnapshotId = maxSnapshotId;
+		}
+
+		public Long getMaxDocumentID() {
+			return maxDocumentId;
+		}
+
+		public void setMaxDocumentID(Long maxDocumentId) {
+			this.maxDocumentId = maxDocumentId;
+		}
+
+		public IndexInfo(Element element) throws Exception {
+		  maxSnapshotId = XMLUtil.getAttributeLong("maxSnapshotId", element);
+		  maxDocumentId = XMLUtil.getAttributeLong("maxDocumentId", element);
+		}
+
+		public Element toElement() throws Exception {
+		  Element element = new Element("indexinfo");
+		  XMLUtil.setAttribute("maxSnapshotId", maxSnapshotId, element);
+		  XMLUtil.setAttribute("maxDocumentId", maxDocumentId, element);
+		  return element;
+		}
+	}
+  **/
+	public boolean hasTooManyDeletedDocs(double percent) {
+	  assert percent <= 1.0;
+		DirectoryIndexSnapshot indexSnapshot = getLatestIndexSnapshot();
+		if (indexSnapshot != null) {
+			IndexReader indexReader = indexSnapshot.getIndexReader();
+			int maxDoc = indexReader.maxDoc();
+			int deletedDocs = maxDoc - indexReader.numDocs();
+			if (deletedDocs > (maxDoc * percent))
+				return true;
+		}
+		return false;
+	}
+
+	public class DiskIndexSnapshot extends DirectoryIndexSnapshot {
+		private Collection<String> indexReaderFileNames;
+
+		public DiskIndexSnapshot(Long snapshotID, IndexReader indexReader, Collection<String> indexReaderFileNames) throws IOException {
+			super(snapshotID, indexReader);
+			this.indexReaderFileNames = indexReaderFileNames;
+		}
+
+		//public Long getMaxSnapshotId() throws IOException {
+		//	return indexInfo.getMaxSnapshotID();
+		//}
+
+		//public Long getMaxDocumentId() throws IOException {
+		//	return indexInfo.getMaxDocumentID();
+		//}
+    
+		protected void delete() throws Exception {
+			super.delete();
+			deleteFiles();
+		}
+		
+		public boolean hasRef() throws Exception {
+			return getSystem().getSnapshots().contains(snapshotId);
+		}
+
+		public void deleteFiles() throws IOException {
+		}
+
+		public List<String> getFiles() throws Exception {
+			List<String> files = new ArrayList<String>();
+			for (String fileName : indexReaderFileNames) {
+				files.add(fileName);
+			}
+			return files;
+		}
+	}
+  
+	protected void onCommit() throws Exception {
+	}
+
+	protected DiskIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException {
+	  IndexCommit indexCommit = newIndexReader.getIndexCommit();
+		Collection<String> fileNames = indexCommit.getFileNames();
+		DiskIndexSnapshot diskIndexSnapshot = new DiskIndexSnapshot(snapshotId, newIndexReader, fileNames);
+		registerSnapshot(diskIndexSnapshot);
+		return diskIndexSnapshot;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/Transaction.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Transaction.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Transaction.java	(revision 0)
@@ -0,0 +1,344 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.store.RAMDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Coordinates a multithreaded transaction commit between multiple indexes.
+ * Utilizes java.util.concurrent.CountDownLatch for synchronization between the
+ * indexes as each index operation is performed in it's own thread.
+ * 
+ */
+// TODO: add timeout to transaction
+public class Transaction {
+  public static final long TIMEOUT = 1000 * 5;
+  final static Logger LOG = LoggerFactory.getLogger(Transaction.class);
+  private Batch batch;
+  private CountDownLatch latch;
+  private List<Failure> failures = new ArrayList<Failure>();
+  private List<DeletesResult> deletesResults = new ArrayList<DeletesResult>();
+  private List<IndexSnapshot> newIndexSnapshots = new ArrayList<IndexSnapshot>();
+  private ReentrantLock lock = new ReentrantLock();
+  private CountDownLatch goLatch = new CountDownLatch(1);
+  private Long id;
+  private Long previousId;
+  private TransactionLog transactionLog;
+  private CommitResult commitResult;
+  private TransactionSystem system;
+  
+  // ran into a bug where the tasks were not running in the thread and could not figure out why
+  // the result is this constructor that does not use multiple threads.
+  public Transaction(Long id, Long previousId, Batch batch, WriteableMemoryIndex writeableIndex, List<Index> nonWriteableIndices,
+      TransactionSystem system) throws Exception {
+    this.id = id;
+    this.previousId = previousId;
+    this.batch = batch;
+    this.transactionLog = system.getTransactionLog();
+    List<Callable> tasks = new ArrayList<Callable>();
+    Deletes deletes = batch.getDeletes();
+    if (batch.hasDeletes()) {
+      for (Index index : nonWriteableIndices) {
+        new DeletesTask(deletes, index, this).call();
+      }
+    } else {
+      for (Index index : nonWriteableIndices) {
+        new NothingTask(index, this).call();
+      }
+    }
+    int numDocsAdded = 0;
+    // handle changes to writeable index, or if a ram directory create a ram
+    // index
+    if (batch.hasRAMDirectory()) {
+      new AddRamIndexDocumentsTask(batch.getRamDirectory()).call();
+    } else if (batch.hasDocuments()) {
+      Documents documents = batch.getDocuments();
+      Analyzer analyzer = batch.getAnalyzer();
+      new AddWriteableMemoryDocumentsTask(documents, analyzer, deletes, writeableIndex).call();
+      numDocsAdded += documents.size();
+    } else {
+      new DeletesTask(deletes, writeableIndex, this).call();
+    }
+    finish();
+    if (failures.size() == 0) {
+      commitResult = new CommitResult(id, deletesResults, numDocsAdded, writeableIndex.getId());
+    } else {
+      // rollback indexes
+      LOG.info("rolling back snapshot: " + id);
+      writeableIndex.rollback(id);
+      for (Index index : nonWriteableIndices) {
+        index.rollback(id);
+      }
+      throw new Exception("transaction failed " + failures);
+    }
+  }
+  /**
+  public Transaction(Long id, Long previousId, Batch batch, WriteableMemoryIndex writeableIndex, List<Index> nonWriteableIndices,
+      ExecutorService commitThreadPool, TransactionSystem system) throws Exception {
+    this.id = id;
+    this.previousId = previousId;
+    this.batch = batch;
+    this.transactionLog = system.getTransactionLog();
+    List<Callable> tasks = new ArrayList<Callable>();
+    Deletes deletes = batch.getDeletes();
+    if (batch.hasDeletes()) {
+      for (Index index : nonWriteableIndices) {
+        tasks.add(new DeletesTask(deletes, index, this));
+      }
+    } else {
+      for (Index index : nonWriteableIndices) {
+        tasks.add(new NothingTask(index, this));
+      }
+    }
+    int numDocsAdded = 0;
+    // handle changes to writeable index, or if a ram directory create a ram
+    // index
+    if (batch.hasRAMDirectory()) {
+      tasks.add(new AddRamIndexDocumentsTask(batch.getRamDirectory()));
+    } else if (batch.hasDocuments()) {
+      Documents documents = batch.getDocuments();
+      Analyzer analyzer = batch.getAnalyzer();
+      tasks.add(new AddWriteableMemoryDocumentsTask(documents, analyzer, deletes, writeableIndex));
+      numDocsAdded += documents.size();
+    } else {
+      tasks.add(new DeletesTask(deletes, writeableIndex, this));
+    }
+    latch = new CountDownLatch(tasks.size());
+    List<Future> futures = new ArrayList<Future>(tasks.size());
+    for (Callable callable : tasks) {
+      futures.add(commitThreadPool.submit(callable));
+    }
+    if (!latch.await(TIMEOUT, TimeUnit.MILLISECONDS)) {
+      failures.add(new TimeoutFailure("timed out after: " + TIMEOUT + " millis"));
+    } else {
+      goLatch.countDown();
+      // need rollback here for failures during commit
+      for (Future future : futures) {
+        try {
+          future.get();
+        } catch (ExecutionException executionException) {
+          Throwable cause = executionException.getCause();
+          LOG.info(cause.getMessage());
+        }
+      }
+    }
+    finish();
+    if (failures.size() == 0) {
+      commitResult = new CommitResult(id, deletesResults, numDocsAdded, writeableIndex.getId());
+    } else {
+      // rollback indexes
+      LOG.info("rolling back snapshot: " + id);
+      writeableIndex.rollback(id);
+      for (Index index : nonWriteableIndices) {
+        index.rollback(id);
+      }
+      throw new Exception("transaction failed " + failures);
+    }
+  }
+  **/
+  // TODO: no snapshots added
+  public List<IndexSnapshot> getNewIndexSnapshots() {
+    return newIndexSnapshots;
+  }
+
+  public Long getPreviousId() {
+    return previousId;
+  }
+
+  public CommitResult getCommitResult() {
+    assert commitResult != null; // should have thrown exception before this
+    // point
+    return commitResult;
+  }
+
+  public Long getId() {
+    return id;
+  }
+
+  public abstract static class Failure extends Exception {
+    private String string;
+
+    public Failure(String message) {
+      super(message);
+      string = message;
+    }
+
+    public Failure(Throwable throwable) {
+      super(throwable);
+      string = ExceptionUtils.getFullStackTrace(throwable);
+    }
+
+    public String toString() {
+      return string;
+    }
+  }
+
+  public static class TimeoutFailure extends Failure {
+    public TimeoutFailure(String message) {
+      super(message);
+    }
+  }
+
+  public static class LogFailure extends Failure {
+    public LogFailure(Throwable throwable) {
+      super(throwable);
+    }
+  }
+
+  public static class IndexFailure extends Failure {
+    Index index;
+
+    public IndexFailure(Index index, Throwable throwable) {
+      super(throwable);
+      this.index = index;
+    }
+  }
+
+  public static class NothingTask implements Callable {
+    private Index index;
+    private Transaction transaction;
+
+    public NothingTask(Index index, Transaction transaction) {
+      this.index = index;
+      this.transaction = transaction;
+    }
+
+    public Object call() throws Exception {
+      index.commitNothing(transaction);
+      return null;
+    }
+  }
+
+  public static class DeletesTask implements Callable<DeletesResult> {
+    private Index index;
+    private Deletes deletes;
+    private Transaction transaction;
+
+    public DeletesTask(Deletes deletes, Index index, Transaction transaction) {
+      this.deletes = deletes;
+      this.index = index;
+      this.transaction = transaction;
+    }
+
+    public DeletesResult call() throws Exception {
+      DeletesResult deletesResult = index.commitDeletes(deletes, transaction);
+      transaction.addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  public class AddRamIndexDocumentsTask implements Callable<DeletesResult> {
+    private RAMDirectory ramDirectory;
+
+    public AddRamIndexDocumentsTask(RAMDirectory ramDirectory) {
+      this.ramDirectory = ramDirectory;
+    }
+
+    public DeletesResult call() throws Exception {
+      // TODO: create new ramindex
+      long indexIdNum = system.getNextRamIndexId();
+      IndexID indexId = new IndexID(indexIdNum, "ram");
+      Analyzer analyzer = batch.getAnalyzer();
+      RamIndex ramIndex = new RamIndex(indexId, id, null, ramDirectory, system);
+      IndexSnapshot indexSnapshot = ramIndex.commitIndex(Transaction.this);
+      DeletesResult deletesResult = new DeletesResult(indexId);
+      addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  public class AddWriteableMemoryDocumentsTask implements Callable<DeletesResult> {
+    private Documents documents;
+    private Analyzer analyzer;
+    private Deletes deletes;
+    private WriteableMemoryIndex writeableIndex;
+
+    public AddWriteableMemoryDocumentsTask(Documents documents, Analyzer analyzer, Deletes deletes, WriteableMemoryIndex writeableIndex) {
+      this.documents = documents;
+      this.analyzer = analyzer;
+      this.deletes = deletes;
+      this.writeableIndex = writeableIndex;
+    }
+
+    public DeletesResult call() throws Exception {
+      DeletesResult deletesResult = writeableIndex.commitChanges(documents, deletes, analyzer, Transaction.this);
+      addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  void addDeletesResult(DeletesResult deletesResult) {
+    assert deletesResult != null;
+    deletesResults.add(deletesResult);
+  }
+
+  void failed(Index index, Throwable throwable) {
+    failures.add(new IndexFailure(index, throwable));
+    latch.countDown();
+  }
+
+  void ready(Index index) {
+    if (latch != null)
+      latch.countDown();
+  }
+
+  private void finish() {
+    try {
+      if (batch instanceof MasterBatch) {
+        MasterBatch masterBatch = (MasterBatch) batch;
+        if (masterBatch.hasDeletes()) {
+          int numDocIds = 0;
+          for (DeletesResult deletesResult : deletesResults) {
+            numDocIds += deletesResult.getDocIds().size();
+          }
+          long[] docIds = new long[numDocIds];
+          int x = 0;
+          for (DeletesResult deletesResult : deletesResults) {
+            for (Long docId : deletesResult.getDocIds()) {
+              docIds[x] = docId;
+              x++;
+            }
+          }
+          masterBatch.getDeletes().setDocIds(docIds);
+        }
+        transactionLog.writeMasterBatch(id, previousId, masterBatch);
+      }
+    } catch (Exception exception) {
+      failures.add(new LogFailure(exception));
+    }
+  }
+
+  public boolean go() {
+    return true;
+  }
+
+  /**
+   * public boolean go() throws InterruptedException { goLatch.await(); if
+   * (failures.size() == 0) { try { if (batch instanceof MasterBatch) {
+   * MasterBatch masterBatch = (MasterBatch) batch; if
+   * (masterBatch.hasDeletes()) { int numDocIds = 0; for (DeletesResult
+   * deletesResult : deletesResults) { numDocIds +=
+   * deletesResult.getDocIds().size(); } long[] docIds = new long[numDocIds];
+   * int x = 0; for (DeletesResult deletesResult : deletesResults) { for (Long
+   * docId : deletesResult.getDocIds()) { docIds[x] = docId; x++; } }
+   * masterBatch.getDeletes().setDocIds(docIds); }
+   * transactionLog.writeMasterBatch(id, previousId, masterBatch); } } catch
+   * (Throwable throwable) { LOG.error("", throwable); failures.add(new
+   * LogFailure(throwable)); return false; } return true; } else { return false; } }
+   */
+}
Index: ocean/src/org/apache/lucene/ocean/TransactionSystem.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/TransactionSystem.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/TransactionSystem.java	(revision 0)
@@ -0,0 +1,729 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.DiskIndex.DiskIndexSnapshot;
+import org.apache.lucene.ocean.Index.IndexException;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.RamIndex.RamIndexSnapshot;
+import org.apache.lucene.ocean.SnapshotInfo.IndexInfo;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.ocean.log.TransactionLog.SlaveBatchIterator;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.LongSequence;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.search.OceanMultiThreadSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Main class for search transaction system.
+ * 
+ * Indexes on disk are immutable, they can only be deleted from or merged
+ * periodically. Merges occur in the background. There is always one active
+ * WriteableMemoryIndex that new documents are written to.
+ * 
+ * A snapshot corresponds to a transaction. Each transaction creates a new
+ * snapshot. Snapshot ids have both major and minor version represented as a
+ * decimal. The major represents the transaction. The minor increments with
+ * index merges. Transaction data is known as a batch. There is a MasterBatch
+ * and SlaveBatch. A MasterBatch is what is created the initial call to
+ * TransactionSystem such as addDocument. A SlaveBatch is what is loaded from
+ * the transactionlog during a recovery.
+ * 
+ * IndexWriter like methods such as addDocument, updateDocument are provided.
+ * Also commitTransaction is provided complete transaction access.
+ * 
+ * A _documentid field is added to each document. This is an internal number for
+ * tracking a document and allows the transaction log system to be recovered
+ * properly. During recovery a delete will use the _documentid rather than the
+ * actual query or term to insure the exact documents are deleted at the point
+ * in time the transaction occurred.
+ * 
+ * 
+ */
+// TODO: need test case of maybeMergeDiskIndices
+// TODO: add .index suffix to index directory names
+// TODO: custom efficient document serializer
+// TODO: add writeVLong writeVInt to LogDirectory output
+// TODO: not sure how to handle Document fields with a TokenStream
+// TODO: make transaction timeout a batch parameter
+// TODO: make multithreaded transactions optional
+public class TransactionSystem {
+  final static Logger LOG = LoggerFactory.getLogger(TransactionSystem.class);
+  public static final int DEFAULT_MEMORY_INDEX_MAX_DOCS = 50;
+  public static final int DEFAULT_MAYBE_MERGE_DOC_CHANGES = 2000;
+  public static final int DEFAULT_MAX_RAM_INDEXES_SIZE = 1024 * 1024 * 30;
+  public static final float DEFAULT_MERGE_DISK_DELETED_PERCENT = 0.3f;
+  private ExecutorService commitThreadPool;
+  private ExecutorService mergeThreadPool;
+  private TransactionLog transactionLog;
+  private Indexes indexes = new Indexes();
+  private ReentrantLock commitLock = new ReentrantLock();
+  Snapshots snapshots;
+  private ReentrantLock mergeIndexesLock = new ReentrantLock();
+  private int docChangesSinceLastMerge = 0;
+  private Analyzer defaultAnalyzer;
+  private int serverNumber = 0;
+  private LongSequence documentSequence;
+  private LongSequence diskIndexSequence;
+  private LongSequence ramIndexSequence;
+  private int memoryIndexMaxDocs = DEFAULT_MEMORY_INDEX_MAX_DOCS;
+  private int maybeMergeDocChanges = DEFAULT_MAYBE_MERGE_DOC_CHANGES;
+  private int maxRamIndexesSize = DEFAULT_MAX_RAM_INDEXES_SIZE;
+  private int maxDocsIndexes = -1;
+  private int maxSnapshots = 5;
+  private float mergeDiskDeletedPercent = DEFAULT_MERGE_DISK_DELETED_PERCENT;
+  private long snapshotExpiration = 20 * 1000;
+  DirectoryMap directoryMap;
+  private ArrayBlockingQueue<Runnable> mergeQueue;
+  private SearcherPolicy searcherPolicy;
+  private ExecutorService searchThreadPool;
+  private ArrayBlockingQueue<Runnable> searchQueue;
+
+  public TransactionSystem(TransactionLog transactionLog, Analyzer defaultAnalyzer, DirectoryMap directoryMap) throws Exception {
+    this(transactionLog, defaultAnalyzer, directoryMap, DEFAULT_MAYBE_MERGE_DOC_CHANGES, -1, DEFAULT_MEMORY_INDEX_MAX_DOCS,
+        DEFAULT_MERGE_DISK_DELETED_PERCENT, new SingleThreadSearcherPolicy());
+  }
+
+  public TransactionSystem(TransactionLog transactionLog, Analyzer defaultAnalyzer, DirectoryMap directoryMap, int maybeMergeDocChanges,
+      int maxDocsIndexes, int memoryIndexMaxDocs, float mergeDiskDeletedPercent, SearcherPolicy searcherPolicy) throws Exception {
+    this.transactionLog = transactionLog;
+    this.defaultAnalyzer = defaultAnalyzer;
+    this.directoryMap = directoryMap;
+    this.maybeMergeDocChanges = maybeMergeDocChanges;
+    this.maxDocsIndexes = maxDocsIndexes;
+    this.memoryIndexMaxDocs = memoryIndexMaxDocs;
+    this.mergeDiskDeletedPercent = mergeDiskDeletedPercent;
+    this.searcherPolicy = searcherPolicy;
+    if (searcherPolicy instanceof MultiThreadSearcherPolicy) {
+      MultiThreadSearcherPolicy multiThreadSearcherPolicy = (MultiThreadSearcherPolicy) searcherPolicy;
+      searchQueue = new ArrayBlockingQueue<Runnable>(multiThreadSearcherPolicy.getQueueSize());
+      searchThreadPool = new ThreadPoolExecutor(multiThreadSearcherPolicy.getMinThreads(), multiThreadSearcherPolicy.getMaxThreads(),
+          1000 * 60, TimeUnit.MILLISECONDS, searchQueue);
+    }
+    mergeQueue = new ArrayBlockingQueue<Runnable>(2);
+    mergeThreadPool = new ThreadPoolExecutor(1, 1, 1000 * 60, TimeUnit.MILLISECONDS, mergeQueue);
+    commitThreadPool = Executors.newFixedThreadPool(5);
+    snapshots = new Snapshots(this);
+    if (LOG.isInfoEnabled())
+      LOG.info("TransactionSystem");
+    load();
+  }
+
+  public IndexID getIndexId(Long documentId) throws IOException {
+    Snapshot snapshot = snapshots.getLatestSnapshot();
+    for (IndexSnapshot indexSnapshot : snapshot.getIndexSnapshots()) {
+      IndexReader indexReader = indexSnapshot.getIndexReader();
+      int freq = indexReader.docFreq(new Term(Constants.DOCUMENTID, Util.longToEncoded(documentId)));
+      if (freq > 0) {
+        return indexSnapshot.getIndex().getId();
+      }
+    }
+    return null;
+  }
+
+  public void setMaybeMergeDocChanges(int maybeMergeDocChanges) {
+    this.maybeMergeDocChanges = maybeMergeDocChanges;
+  }
+
+  public void setMaxDocsIndexes(int maxDocsIndexes) {
+    this.maxDocsIndexes = maxDocsIndexes;
+  }
+
+  public void close() throws IOException {
+    if (LOG.isInfoEnabled())
+      LOG.info("close");
+    mergeThreadPool.shutdown();
+    // commitThreadPool.shutdown();
+    transactionLog.close();
+    for (Index index : indexes.getIndexes()) {
+      index.close();
+    }
+  }
+
+  public OceanSearcher getSearcher() throws IOException {
+    Snapshot snapshot = snapshots.getLatestSnapshot();
+    // snapshot.incRef();
+    if (searcherPolicy instanceof SingleThreadSearcherPolicy) {
+      return new OceanSearcher(snapshot);
+    } else {
+      return new OceanMultiThreadSearcher(snapshot, searchThreadPool);
+    }
+  }
+
+  public CommitResult deleteDocument(Query query) throws Exception {
+    List<Query> deleteByQueries = new ArrayList<Query>(1);
+    deleteByQueries.add(query);
+    return commitTransaction(null, null, null, deleteByQueries);
+  }
+
+  public CommitResult deleteDocument(Term term) throws Exception {
+    List<Term> dterms = new ArrayList<Term>(1);
+    dterms.add(term);
+    return commitTransaction(null, null, dterms, null);
+  }
+
+  public CommitResult updateDocument(Term term, Document document) throws Exception {
+    return updateDocument(term, document, defaultAnalyzer);
+  }
+
+  public CommitResult updateDocument(Term term, Document document, Analyzer analyzer) throws Exception {
+    List<Document> list = new ArrayList<Document>(1);
+    list.add(document);
+    List<Term> dterms = new ArrayList<Term>(1);
+    dterms.add(term);
+    return commitTransaction(list, analyzer, dterms, null);
+  }
+
+  public CommitResult addDocument(Document document) throws Exception {
+    return addDocument(document, defaultAnalyzer);
+  }
+
+  public CommitResult addDocument(Document document, Analyzer analyzer) throws Exception {
+    List<Document> list = new ArrayList<Document>(1);
+    list.add(document);
+    return commitTransaction(list, analyzer, null, null);
+  }
+
+  public CommitResult commitTransaction(List<Document> documents, Analyzer analyzer, List<Term> deleteByTerms, List<Query> deleteByQueries)
+      throws Exception {
+    MasterBatch masterBatch = new MasterBatch(this);
+    if (documents != null)
+      masterBatch.addDocuments(new Documents(documents));
+    masterBatch.setAnalyzer(analyzer);
+    Deletes deletes = new Deletes();
+    if (deleteByTerms != null) {
+      for (Term deleteTerm : deleteByTerms) {
+        deletes.addTerm(deleteTerm);
+      }
+    }
+    if (deleteByQueries != null) {
+      for (Query query : deleteByQueries) {
+        deletes.addQuery(query);
+      }
+    }
+    if (deletes.hasDeletes())
+      masterBatch.setDeletes(deletes);
+    return commitBatch(masterBatch);
+  }
+
+  public Analyzer getDefaultAnalyzer() {
+    return defaultAnalyzer;
+  }
+
+  public long getNextRamIndexId() {
+    return ramIndexSequence.getAndIncrement();
+  }
+
+  public long getNextDiskIndexId() {
+    return diskIndexSequence.getAndIncrement();
+  }
+
+  public TransactionLog getTransactionLog() {
+    return transactionLog;
+  }
+
+  public ExecutorService getCommitThreadPool() {
+    return commitThreadPool;
+  }
+
+  public void load() throws Exception {
+    BigDecimal id;
+    Long snapshotId;
+    List<IndexSnapshot> indexSnapshots = null;
+    SnapshotInfo snapshotInfo = Snapshots.loadMaxSnapshotInfo(directoryMap.getDirectory());
+    if (LOG.isDebugEnabled())
+      LOG.debug("snapshotInfo: " + snapshotInfo);
+    long timestamp = System.currentTimeMillis();
+    if (snapshotInfo != null) {
+      id = snapshotInfo.getId();
+      snapshotId = snapshotInfo.getSnapshotId();
+      assert snapshotId == transactionLog.getMaxId();
+      loadDiskIndexes(snapshotInfo, indexes);
+      IndexID diskMaxId = indexes.getMaxId("disk");
+      if (diskMaxId != null)
+        diskIndexSequence = new LongSequence(diskMaxId.id.longValue() + 1, 1);
+      else
+        diskIndexSequence = new LongSequence(1, 1);
+      ramIndexSequence = new LongSequence(1, 1);
+      indexSnapshots = new ArrayList<IndexSnapshot>();
+      List<Long> snapshotIds = new LinkedList<Long>();
+      // TODO: what if index directory is deleted and it is still referenced
+      for (IndexInfo indexInfo : snapshotInfo.getIndexInfos()) {
+        if (indexInfo.getType().equals("disk")) {
+          DiskIndex diskIndex = (DiskIndex) indexes.get(indexInfo.getIndexID());
+          if (diskIndex != null) {
+            IndexSnapshot indexSnapshot = diskIndex.getIndexSnapshot(snapshotInfo.getSnapshotId());
+            indexSnapshots.add(indexSnapshot);
+            snapshotIds.add(indexSnapshot.getMaxSnapshotId());
+          }
+        }
+      }
+      Long maxDiskIndexSnapshotId = Util.max(snapshotIds);
+      Long fromSnapshotId = null;
+      System.out.println("maxDiskIndexSnapshotId: " + maxDiskIndexSnapshotId);
+      if (maxDiskIndexSnapshotId != null) {
+        fromSnapshotId = new Long(maxDiskIndexSnapshotId.longValue() + 1);
+      }
+      List<RamIndexSnapshot> ramIndexSnapshots = runTransactionsNotInIndex(fromSnapshotId);
+      System.out.println("ramIndexSnapshots: " + ramIndexSnapshots);
+      // TODO: verify all snapshots have same id
+      indexSnapshots.addAll(ramIndexSnapshots);
+      List<Long> documentIds = new ArrayList<Long>(indexSnapshots.size());
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        documentIds.add(indexSnapshot.getMaxDocumentId());
+      }
+      Long maxDocumentId = Util.max(documentIds);
+      if (maxDocumentId != null) {
+        Long documentSequenceId = Util.getNextServerSequence(maxDocumentId, serverNumber);
+        documentSequence = new LongSequence(documentSequenceId, 100);
+      } else {
+        documentSequence = new LongSequence(serverNumber, 100);
+      }
+    } else {
+      snapshotId = new Long(0);
+      id = new BigDecimal(snapshotId.toString());
+      documentSequence = new LongSequence(serverNumber, 100);
+      diskIndexSequence = new LongSequence(1, 1);
+      ramIndexSequence = new LongSequence(1, 1);
+    }
+    WriteableMemoryIndex writeableMemoryIndex = newWriteableMemoryIndex();
+    MemoryIndexSnapshot writeableSnapshot = writeableMemoryIndex.setSnapshot(snapshotId);
+    if (indexSnapshots == null) {
+      indexSnapshots = new ArrayList<IndexSnapshot>();
+      indexSnapshots.add(writeableSnapshot);
+    }
+    Snapshot snapshot = new Snapshot(id, writeableSnapshot, indexSnapshots, this, timestamp);
+    snapshots.add(snapshot, false);
+    deleteUnreferencedSnapshots();
+    new MaybeMergeIndices().run();
+  }
+
+  /**
+   * Delete snapshotinfo if no longer referenced in Snapshots
+   * 
+   * @throws Exception
+   */
+  private void deleteUnreferencedSnapshots() throws Exception {
+    snapshots.remove(maxSnapshots, snapshotExpiration);
+    LogDirectory directory = directoryMap.getDirectory();
+    List<BigDecimal> ids = Snapshots.loadSnapshotInfoIds(directory);
+    for (BigDecimal id : ids) {
+      if (!snapshots.contains(id)) {
+        // not referenced, delete it
+        String fileName = Snapshot.getFileName(id);
+        System.out.println("deleteFile: " + fileName + " id: " + Snapshot.formatId(id));
+        try {
+          directory.deleteFile(fileName);
+          if (LOG.isDebugEnabled())
+            LOG.debug("deleteFile: " + fileName);
+        } catch (Exception exception) {
+          LOG.error(exception.getMessage());
+        }
+      }
+    }
+  }
+
+  public Indexes getIndexes() {
+    return indexes;
+  }
+
+  public Snapshots getSnapshots() {
+    return snapshots;
+  }
+
+  /**
+   * Runs the transactions from the transaction log that are not already in
+   * Lucene indices
+   * 
+   * @param startSnapshotId
+   * @return loaded ram snapshots
+   * @throws Exception
+   * @throws CategoryException
+   * @throws IOException
+   */
+  private List<RamIndexSnapshot> runTransactionsNotInIndex(Long startSnapshotId) throws Exception, IOException {
+    LOG.info("startSnapshotId: " + startSnapshotId);
+    SlaveBatchIterator iterator = transactionLog.getSlaveBatchIterator(startSnapshotId);
+    if (!iterator.hasNext())
+      return new ArrayList<RamIndexSnapshot>();
+    try {
+      long indexIdNum = ramIndexSequence.getAndIncrement();
+      IndexID indexId = new IndexID(indexIdNum, "ram");
+      RAMDirectory ramDirectory = new RAMDirectory();
+      ExecutorService threadPool = getCommitThreadPool();
+      IndexCreator indexCreator = new IndexCreator(ramDirectory, Long.MAX_VALUE, 4, defaultAnalyzer, threadPool);
+      BlockingQueue<IndexCreator.Add> addQueue = new ArrayBlockingQueue<IndexCreator.Add>(4000, true);
+      List<Deletes> deletesList = new ArrayList<Deletes>(); // deletes are
+      // recorded and run
+      // against all of the
+      // snapshots at the
+      // end
+      indexCreator.start(addQueue);
+      List<RAMDirectory> ramDirectories = new ArrayList<RAMDirectory>();
+      int docCount = 0;
+      while (iterator.hasNext()) {
+        SlaveBatch slaveBatch = iterator.next(true, true);
+        Analyzer analyzer = slaveBatch.getAnalyzer();
+        if (slaveBatch.hasDocuments()) {
+          Documents documents = slaveBatch.getDocuments();
+          for (Document document : documents) {
+            addQueue.add(new IndexCreator.Add(document));
+            docCount++;
+          }
+        } else if (slaveBatch.hasRAMDirectory()) {
+          ramDirectories.add(slaveBatch.getRamDirectory());
+        }
+        if (slaveBatch.hasDeletes()) {
+          deletesList.add(slaveBatch.getDeletes());
+        }
+      }
+      LOG.info("docCount: " + docCount);
+      // if zero means all the transactions were deletes
+      if (docCount == 0) {
+        return new ArrayList<RamIndexSnapshot>();
+      }
+      indexCreator.create();
+      ramDirectories.add(ramDirectory);
+      Long snapshotId = transactionLog.getMaxId();
+      List<RamIndexSnapshot> indexSnapshots = new ArrayList<RamIndexSnapshot>(ramDirectories.size());
+      for (RAMDirectory rd : ramDirectories) {
+        RamIndex ramIndex = new RamIndex(indexId, snapshotId, deletesList, rd, this);
+        indexes.add(ramIndex);
+        RamIndexSnapshot indexSnapshot = (RamIndexSnapshot) ramIndex.getIndexSnapshot(snapshotId);
+        assert indexSnapshot != null;
+        indexSnapshots.add(indexSnapshot);
+      }
+      // TODO: run maybe merge here
+      return indexSnapshots;
+    } finally {
+      if (iterator != null)
+        iterator.close();
+    }
+  }
+
+  private void loadDiskIndexes(SnapshotInfo snapshotInfo, Indexes indices) throws Exception, IOException {
+    for (String name : directoryMap.list()) {
+      try {
+        if (name.endsWith("_index")) {
+          String idString = StringUtils.split(name, "_")[0];
+          Directory directory = directoryMap.get(idString);
+          Long indexIdNum = new Long(idString);
+          IndexID indexId = new IndexID(indexIdNum, "disk");
+          try {
+            IndexInfo indexInfo = snapshotInfo.getIndexInfo(indexId);
+            if (indexInfo != null) {
+              Long snapshotId = snapshotInfo.getSnapshotId();
+              Long segmentGeneration = indexInfo.getSegmentGeneration();
+              DiskIndex diskIndex = new DiskIndex(indexId, directory, snapshotId, segmentGeneration, this);
+              indices.add(diskIndex);
+            } else {
+              LOG.info("index no longer referenced deleting: " + name);
+              // directoryMap.delete(name);
+            }
+          } catch (IndexException indexException) {
+            LOG.error("index not ready, deleting: " + name, indexException);
+            //directoryMap.delete(name);
+          } catch (IOException ioException) {
+            LOG.error("index not ready, deleting: " + name, ioException);
+            //directoryMap.delete(name);
+          }
+        }
+      } catch (Exception exception) {
+        LOG.error("", exception);
+        // if exception simply skip over the index
+      }
+    }
+  }
+
+  public MasterBatch createMasterBatch() throws Exception {
+    return new MasterBatch(this);
+  }
+
+  public class MaybeMergeIndices implements Runnable {
+    public MaybeMergeIndices() {
+    }
+
+    public void run() {
+      if (LOG.isDebugEnabled())
+        LOG.debug("MaybeMergeIndices");
+      mergeIndexesLock.lock();
+      try {
+        Snapshot snapshot = snapshots.getLatestSnapshot();
+        maybeMergeWriteable(snapshot);
+        snapshot = snapshots.getLatestSnapshot();
+        maybeMergeRamIndexes(snapshot);
+        snapshot = snapshots.getLatestSnapshot();
+        maybeMergeDiskIndexes(snapshot);
+      } catch (Throwable throwable) {
+        LOG.error("", throwable);
+      } finally {
+        mergeIndexesLock.unlock();
+      }
+    }
+
+    /**
+     * If the existing ram indexes are above maxRamIndexesSize, then they are
+     * merged and a new disk index is created from them. Or if the number of
+     * documents exceeds maxDocsIndexesSize.
+     * 
+     * @param snapshot
+     * @throws Exception
+     */
+    private void maybeMergeRamIndexes(Snapshot snapshot) throws Exception {
+      long size = 0;
+      int numDocs = 0;
+      List<RamIndexSnapshot> ramIndexSnapshots = snapshot.getRamIndexSnapshots();
+      for (RamIndexSnapshot ramIndexSnapshot : ramIndexSnapshots) {
+        RamIndex ramIndex = (RamIndex) ramIndexSnapshot.getIndex();
+        size += ramIndex.getSize();
+        numDocs += ramIndexSnapshot.getIndexReader().maxDoc();
+      }
+      // if merging based on number of docs
+      if (maxDocsIndexes > 0 && numDocs > maxDocsIndexes) {
+        if (LOG.isDebugEnabled())
+          LOG.debug("executeMerge because numDocs: " + numDocs + " more than maxDocsIndexes: " + maxDocsIndexes);
+        executeMerge(ramIndexSnapshots, snapshot);
+      } else if (size > maxRamIndexesSize) {
+        // merging based on size of ram indexes
+        executeMerge(ramIndexSnapshots, snapshot);
+      }
+    }
+
+    private void maybeMergeDiskIndexes(Snapshot snapshot) throws Exception {
+      Long snapshotId = snapshot.getSnapshotId();
+      List<IndexSnapshot> indexSnapshotsToMerge = new ArrayList<IndexSnapshot>();
+      for (DiskIndex diskIndex : snapshot.getDiskIndices()) {
+        DiskIndexSnapshot indexSnapshot = (DiskIndexSnapshot) diskIndex.getIndexSnapshot(snapshotId);
+        if (diskIndex.hasTooManyDeletedDocs(mergeDiskDeletedPercent)) {
+          indexSnapshotsToMerge.add(indexSnapshot);
+        }
+      }
+      if (indexSnapshotsToMerge.size() > 0) {
+        executeMerge(indexSnapshotsToMerge, snapshot);
+      }
+    }
+
+    /**
+     * converts current memorywriteableindex to a ramindex
+     * 
+     * @param snapshot
+     * @throws Exception
+     */
+    private void maybeMergeWriteable(Snapshot snapshot) throws Exception {
+      MemoryIndexSnapshot writeableIndexSnapshot = snapshot.getWriteableSnapshot();
+      int maxDoc = writeableIndexSnapshot.getIndexReader().maxDoc();
+      if (maxDoc >= memoryIndexMaxDocs) {
+        if (LOG.isInfoEnabled())
+          LOG.info("merge writeable");
+        commitLock.lock();
+        try {
+          long indexIdNum = ramIndexSequence.getAndIncrement();
+          IndexID indexId = new IndexID(indexIdNum, "memory");
+          RamIndex ramIndex = new RamIndex(indexId, writeableIndexSnapshot);
+          indexes.add(ramIndex);
+          IndexSnapshot ramIndexSnapshot = ramIndex.getLatestIndexSnapshot();
+          assert ramIndexSnapshot.maxDoc() == maxDoc;
+          Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+          List<IndexID> removeIndexIds = new ArrayList<IndexID>();
+          removeIndexIds.add(writeableIndexSnapshot.getIndex().getId());
+
+          // create new WriteableMemoryIndex for the new snapshot because the
+          // one that was there
+          // has been converted to a RamIndex
+          WriteableMemoryIndex newWriteableMemoryIndex = newWriteableMemoryIndex();
+          MemoryIndexSnapshot newMemoryIndexSnapshot = newWriteableMemoryIndex.setSnapshot(snapshot.getSnapshotId());
+          Snapshot newSnapshot = currentSnapshot.createMinor(removeIndexIds, newMemoryIndexSnapshot, ramIndex.getLatestIndexSnapshot());
+          snapshots.add(newSnapshot, true);
+          if (LOG.isInfoEnabled())
+            LOG.info("merge writeable completed");
+        } finally {
+          commitLock.unlock();
+        }
+      }
+    }
+
+    /**
+     * Takes snapshots and makes a DiskIndex.
+     * 
+     * @param indexSnapshots
+     * @param snapshot
+     * @throws Exception
+     */
+    private void executeMerge(List<? extends IndexSnapshot> indexSnapshots, Snapshot snapshot) throws Exception {
+      if (indexSnapshots.size() == 0)
+        return;
+      Long snapshotId = snapshot.getSnapshotId();
+      Long indexIdNum = diskIndexSequence.getAndIncrement();
+      IndexID indexId = new IndexID(indexIdNum, "disk");
+      Directory directory = directoryMap.create(indexIdNum+"_index");
+      // initial creation happens outside of commitlock because it is the most
+      // time consuming
+      // the deletes occur inside the commitlock as they are faster
+      DiskIndex newDiskIndex = new DiskIndex(indexId, directory, indexSnapshots, TransactionSystem.this);
+      indexes.add(newDiskIndex);
+      commitLock.lock();
+      try {
+        // TODO: probably can just save deletes from the batches
+        List<SlaveBatch> deleteOnlySlaveBatches = new ArrayList<SlaveBatch>();
+        Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+        Long latestSnapshotId = currentSnapshot.getSnapshotId();
+        if (!snapshotId.equals(latestSnapshotId)) {
+          SlaveBatchIterator iterator = transactionLog.getSlaveBatchIterator(snapshotId);
+          while (iterator.hasNext()) {
+            SlaveBatch slaveBatch = iterator.next(false, true);
+            deleteOnlySlaveBatches.add(slaveBatch);
+          }
+        }
+        IndexSnapshot newIndexSnapshot = newDiskIndex.initialize(latestSnapshotId, deleteOnlySlaveBatches, TransactionSystem.this);
+        List<IndexID> removeIndexIds = new ArrayList<IndexID>();
+        for (IndexSnapshot indexSnapshot : indexSnapshots) {
+          Index index = indexSnapshot.getIndex();
+          removeIndexIds.add(index.getId());
+        }
+        StringBuilder builder = new StringBuilder();
+        Iterator<? extends IndexSnapshot> iterator = indexSnapshots.iterator();
+        while (iterator.hasNext()) {
+          IndexSnapshot indexSnapshot = iterator.next();
+          builder.append(indexSnapshot.getIndex().getId().toString());
+          if (iterator.hasNext()) {
+            builder.append(", ");
+          }
+        }
+        builder.append(" ").append(" indexes written to disk index: ").append(indexId.toString());
+        LOG.info(builder.toString());
+        Snapshot newSnapshot = currentSnapshot.createMinor(removeIndexIds, newIndexSnapshot);
+        snapshots.add(newSnapshot, true);
+      } finally {
+        commitLock.unlock();
+      }
+    }
+  }
+
+  /**
+   * Commits a batch to the transaction log
+   * 
+   * @param batch
+   * @return CommitResult
+   * @throws Exception
+   * @throws IOException
+   */
+  CommitResult commitBatch(Batch batch) throws Exception, IOException {
+    batch.close();
+    commitLock.lock();
+    try {
+      Long snapshotId = null;
+      if (batch instanceof SlaveBatch) {
+        snapshotId = ((SlaveBatch) batch).getId();
+      } else {
+        MasterBatch masterBatch = (MasterBatch) batch;
+        snapshotId = transactionLog.getNextId();
+        if (batch.hasDocuments()) {
+          Documents documents = batch.getDocuments();
+          for (Document document : documents) {
+            Long documentId = documentSequence.getAndIncrement();
+            Util.setValue(Constants.DOCUMENTID, documentId, document);
+            Util.setValue(Constants.SNAPSHOTID, snapshotId, document);
+          }
+          if (documents.size() >= memoryIndexMaxDocs) {
+            RAMDirectory ramDirectory = createRamDirectory(documents, batch.getAnalyzer());
+            masterBatch.setRAMDirectory(ramDirectory);
+          }
+        }
+      }
+      // ExecutorService threadPool = getCommitThreadPool();
+      Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+      MemoryIndexSnapshot writeableIndexSnapshot = currentSnapshot.getWriteableSnapshot();
+      WriteableMemoryIndex writeableMemoryIndex = (WriteableMemoryIndex) writeableIndexSnapshot.getIndex();
+      List<Index> nonWriteableIndices = currentSnapshot.getDeleteOnlyIndices();
+      Transaction transaction = null;
+      CommitResult commitResult = null;
+      try {
+        Long previousId = transactionLog.getPreviousId(snapshotId);
+        transaction = new Transaction(snapshotId, previousId, batch, writeableMemoryIndex, nonWriteableIndices, this);
+        commitResult = transaction.getCommitResult();
+      } catch (Exception exception) {
+        LOG.error("transaction failed");
+        throw new Exception("transaction failed", exception);
+      }
+      List<IndexSnapshot> indexSnapshots = new ArrayList<IndexSnapshot>(nonWriteableIndices.size() + 1);
+      for (Index index : nonWriteableIndices) {
+        IndexSnapshot snapshot = index.getIndexSnapshot(snapshotId);
+        assert snapshot != null;
+        indexSnapshots.add(snapshot);
+      }
+      for (IndexSnapshot newIndexSnapshot : transaction.getNewIndexSnapshots()) {
+        assert newIndexSnapshot != null;
+        indexes.add(newIndexSnapshot.getIndex());
+        indexSnapshots.add(newIndexSnapshot);
+      }
+      assert snapshotId == transaction.getId();
+      MemoryIndexSnapshot newWriteableSnapshot = writeableMemoryIndex.getIndexSnapshot(snapshotId);
+      assert newWriteableSnapshot != null;
+      indexSnapshots.add(newWriteableSnapshot);
+
+      Snapshot newSnapshot = new Snapshot(snapshotId, 0, newWriteableSnapshot, indexSnapshots, this, System.currentTimeMillis());
+      snapshots.add(newSnapshot, true);
+      docChangesSinceLastMerge += commitResult.getNumDocChanges();
+      int writeableMaxDoc = writeableMemoryIndex.getLatestIndexSnapshot().getIndexReader().maxDoc();
+      if (docChangesSinceLastMerge > maybeMergeDocChanges || writeableMaxDoc >= memoryIndexMaxDocs) {
+        System.out.println("docChangesSinceLastMerge: " + docChangesSinceLastMerge + " maybeMergeDocChanges: " + maybeMergeDocChanges);
+        System.out.println("writeableMaxDoc: " + writeableMaxDoc + " memoryIndexMaxDocs: " + memoryIndexMaxDocs);
+        // only submit if nothing is currently executing or pending
+        if (mergeQueue.size() == 0) {
+          mergeThreadPool.submit(new MaybeMergeIndices());
+          docChangesSinceLastMerge = 0;
+        }
+      }
+      deleteUnreferencedSnapshots();
+      return commitResult;
+    } finally {
+      commitLock.unlock();
+    }
+  }
+
+  RAMDirectory createRamDirectory(Documents documents, Analyzer analyzer) throws Exception {
+    RAMDirectory ramDirectory = new RAMDirectory();
+    ExecutorService threadPool = getCommitThreadPool();
+    IndexCreator indexCreator = new IndexCreator(ramDirectory, Long.MAX_VALUE, 4, analyzer, threadPool);
+    BlockingQueue<IndexCreator.Add> addQueue = new ArrayBlockingQueue<IndexCreator.Add>(1000, true);
+    indexCreator.start(addQueue);
+    for (Document document : documents) {
+      addQueue.add(new IndexCreator.Add(document));
+    }
+    indexCreator.create();
+    return ramDirectory;
+  }
+
+  private WriteableMemoryIndex newWriteableMemoryIndex() throws Exception {
+    Long indexIdNum = ramIndexSequence.getAndIncrement();
+    IndexID indexId = new IndexID(indexIdNum, "memory");
+    WriteableMemoryIndex writeableMemoryIndex = new WriteableMemoryIndex(indexId, this);
+    indexes.add(writeableMemoryIndex);
+    return writeableMemoryIndex;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/DirectoryIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DirectoryIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DirectoryIndex.java	(revision 0)
@@ -0,0 +1,276 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexDeletionPolicy;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Abstract class used by RamIndex and DiskIndex.  Assumes a org.apache.lucene.store.Directory
+ * based IndexReader implementation.  
+ *
+ */
+public abstract class DirectoryIndex extends Index {
+  final static Logger LOG = LoggerFactory.getLogger(DirectoryIndex.class);
+	protected final SortedList<Long, DirectoryIndexSnapshot> indexSnapshotMap = new SortedList<Long, DirectoryIndexSnapshot>();
+  
+	protected DirectoryIndexDeletionPolicy indexDeletionPolicy = new DirectoryIndexDeletionPolicy();
+	protected IndexReader initialIndexReader;
+	
+	public DirectoryIndex(IndexID id, TransactionSystem system) {
+		super(id, system);
+	}
+  
+	public void close() throws IOException {
+	  for (DirectoryIndexSnapshot snapshot : indexSnapshotMap.values()) {
+	    snapshot.close();
+	  }
+	}
+	
+	public DirectoryIndexSnapshot getLatestIndexSnapshot() {
+		return indexSnapshotMap.lastValue();
+	}
+
+	public DirectoryIndexSnapshot getIndexSnapshot(Long snapshotId) {
+		return indexSnapshotMap.get(snapshotId);
+	}
+  
+	public IndexSnapshot initialize(Long snapshotId, List<SlaveBatch> deleteOnlySlaveBatches, TransactionSystem system) throws Exception, IndexException, IOException {
+		IndexReader indexReader = initialIndexReader;
+		if (deleteOnlySlaveBatches == null || deleteOnlySlaveBatches.size() == 0) {
+			createNewSnapshot(snapshotId, indexReader);
+		} else {
+			for (SlaveBatch slaveBatch : deleteOnlySlaveBatches) {
+				if (slaveBatch.hasDeletes()) {
+					applyDeletes(true, slaveBatch.getDeletes(), null, indexReader);
+				}
+				indexReader = indexReader.reopen();
+				createNewSnapshot(slaveBatch.getId(), indexReader);
+			}
+		}
+		assert snapshotId.equals(indexSnapshotMap.lastKey());
+		return indexSnapshotMap.get(indexSnapshotMap.lastKey());
+	}
+
+	protected void onCommit() throws Exception {
+
+	}
+  
+	private List<DirectoryIndexSnapshot> getSnapshotsByGeneration(long generation) throws IOException {
+	  List<DirectoryIndexSnapshot> snapshots = new ArrayList<DirectoryIndexSnapshot>();
+	  for (DirectoryIndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot.getIndexReader().getIndexCommit().getGeneration() == generation) {
+        snapshots.add(indexSnapshot);
+      }
+    }
+	  return snapshots;
+	}
+	
+	/**
+	 * Finds reader by version by iterating over snapshots and comparing versions
+	 * @param version
+	 * @return
+	 */
+	private DirectoryIndexSnapshot getSnapshotByReaderVersion(long version) {
+	  for (DirectoryIndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+	    if (indexSnapshot.getIndexReaderVersion() == version) {
+	      return indexSnapshot;
+	    }
+	  }
+	  return null;
+	}
+	
+	public class DirectoryIndexDeletionPolicy implements IndexDeletionPolicy {
+		private IndexCommit lastCommit;
+		SortedList<Long,IndexCommit> commitPoints = new SortedList<Long,IndexCommit>(); // key is generation
+
+		public void onInit(List commits) throws IOException {
+			onCommit(commits);
+		}
+
+		public IndexCommit getLastIndexCommitPoint() {
+			return lastCommit;
+		}
+
+		public void onCommit(List commits) throws IOException {
+			try {
+			  commitPoints.clear();
+			  for (int x = 0; x < commits.size(); x++) {
+          IndexCommit indexCommit = (IndexCommit) commits.get(x);
+          commitPoints.put(indexCommit.getGeneration(), indexCommit);
+			  }
+				DirectoryIndex.this.onCommit();
+				lastCommit = (IndexCommit) commits.get(commits.size() - 1);
+				for (int x = 0; x < commits.size() - 1; x++) {
+					IndexCommit indexCommitPoint = (IndexCommit) commits.get(x);
+					// Multiple snapshots may have the same generation
+					// so deleting a commitpoint may affect multiple snapshots
+					long generation = indexCommitPoint.getGeneration();
+					List<DirectoryIndexSnapshot> snapshots = getSnapshotsByGeneration(generation);
+					// if there are no snapshots it needs to be deleted, nothing
+					// is using it anymore
+					if (snapshots.size() == 0) {
+					  indexCommitPoint.delete();	
+					  commitPoints.remove(indexCommitPoint.getGeneration());
+					}
+					for (DirectoryIndexSnapshot indexSnapshot : snapshots) {
+					  if (!indexSnapshot.hasRef()) {
+					    // not referenced in Snapshots anymore
+	            indexCommitPoint.delete();
+	            indexSnapshot.delete();
+	          }
+					}
+				}
+			} catch (Exception exception) {
+				throw Util.asIOException(exception);
+			}
+		}
+	}
+
+	public abstract class DirectoryIndexSnapshot extends IndexSnapshot {
+		protected IndexReader indexReader;
+		private int maxDoc;
+
+		public DirectoryIndexSnapshot(Long snapshotId, IndexReader indexReader) throws IOException {
+			super(snapshotId);
+			this.indexReader = indexReader;
+			maxDoc = indexReader.maxDoc();
+			getMinDocumentId();
+			getMaxDocumentId();
+		}
+		
+		void delete() throws Exception {
+		  LOG.info(DirectoryIndex.this.getId()+" deleting snapshotid: "+snapshotId);
+		  long generation = getGeneration();
+		  List<DirectoryIndexSnapshot> snapshotsGreaterWithGeneration = new ArrayList<DirectoryIndexSnapshot>();
+		  for (DirectoryIndexSnapshot snapshot : indexSnapshotMap.values()) {
+		    if (snapshot.getGeneration() == generation && snapshot.snapshotId.longValue() > snapshotId) {
+		      snapshotsGreaterWithGeneration.add(snapshot);
+		    }
+		  }
+		  indexReader.close();
+		  if (snapshotsGreaterWithGeneration.size() == 0) {
+		    IndexCommit indexCommit = indexDeletionPolicy.commitPoints.get(generation);
+		    if (indexCommit != null) {
+		      LOG.info(DirectoryIndex.this.getId()+" deleting snapshotid: "+snapshotId+" indexCommit: "+indexCommit.getGeneration());
+		      indexCommit.delete();
+		    }
+		  }
+			indexSnapshotMap.remove(snapshotId);
+		}
+    
+		public long getGeneration() throws IOException {
+		  return indexReader.getIndexCommit().getGeneration();
+		}
+		
+		public void close() throws IOException {
+		  indexReader.close();
+		  indexSnapshotMap.remove(snapshotId);
+		}
+		
+		public int deletedDoc() {
+		  return indexReader.maxDoc() - indexReader.numDocs();
+		}
+		
+		public int maxDoc() {
+      return indexReader.maxDoc();
+    }
+		
+		public IndexReader getIndexReader() {
+			return indexReader;
+		}
+
+		public long getIndexReaderVersion() {
+			return indexReader.getVersion();
+		}
+
+		public boolean hasRef() throws Exception {
+			return getSystem().getSnapshots().contains(snapshotId);
+		}
+	}
+
+	public long getSize() throws IOException {
+		Directory directory = getDirectory();
+		return Util.getSize(directory);
+	}
+  
+	/**
+	 * Creates a new snapshot only without performing any changes to the index
+	 * @param transaction
+	 * @throws IndexException
+	 * @throws InterruptedException
+	 * @throws IOException
+	 */
+	public void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException {
+	  IndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+    assert latestIndexSnapshot != null;
+    assert latestIndexSnapshot.getSnapshotId().equals(transaction.getPreviousId());
+    transaction.ready(this); 
+    if (transaction.go()) {
+      Long snapshotId = transaction.getId();
+      IndexReader previousIndexReader = latestIndexSnapshot.getIndexReader();
+      createNewSnapshot(snapshotId, previousIndexReader);
+      removeOldSnapshots(indexSnapshotMap);
+    }
+	}
+	
+	public boolean rollback(Long snapshotId) throws Exception {
+    LOG.info("rollback "+snapshotId);
+    DirectoryIndexSnapshot indexSnapshot = indexSnapshotMap.get(snapshotId);
+    if (indexSnapshot != null) {
+      indexSnapshot.delete();
+      return true;
+    }
+    return false;
+  }
+	
+	public DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, IndexException, InterruptedException, IOException {
+		IndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+		assert latestIndexSnapshot != null;
+		assert latestIndexSnapshot.getSnapshotId().equals(transaction.getPreviousId());
+		IndexReader previousIndexReader = latestIndexSnapshot.getIndexReader();
+		IndexReader newIndexReader = previousIndexReader.reopen(true);
+		try {
+			DeletesResult deletesResult = applyDeletes(true, deletes, null, newIndexReader);
+			transaction.ready(this); 
+			if (transaction.go()) {
+				if (deletesResult.getNumDeleted() > 0) {
+					newIndexReader.flush();
+				}
+				if (deletesResult.getNumDeleted() > 0) {
+				  // if flush was called check to make sure there is a new generation for the indexreader
+				  LOG.info("previous reader gen: "+previousIndexReader.getIndexCommit().getGeneration()+" newIndexReader gen: "+newIndexReader.getIndexCommit().getGeneration());
+				}
+				Long snapshotId = transaction.getId();
+				createNewSnapshot(snapshotId, newIndexReader);
+				removeOldSnapshots(indexSnapshotMap);
+				return deletesResult;
+			} else {
+				rollback(transaction.getId());
+				return null;
+			}
+		} catch (Throwable throwable) {
+			LOG.error("", throwable);
+			transaction.failed(this, throwable);
+			//rollback(transaction.getId());
+			return null;
+		}
+	}
+
+	protected abstract DirectoryIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException;
+
+	protected void registerSnapshot(DirectoryIndexSnapshot indexSnapshot) throws IOException {
+		indexSnapshotMap.put(indexSnapshot.getSnapshotId(), indexSnapshot);
+	}
+
+	public abstract Directory getDirectory();
+}
Index: ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java	(revision 0)
@@ -0,0 +1,185 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.FilterIndexReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.instantiated.InstantiatedIndex;
+import org.apache.lucene.store.instantiated.InstantiatedIndexReader;
+import org.apache.lucene.store.instantiated.InstantiatedIndexWriter;
+
+/**
+ * Encapsulates org.apache.lucene.store.instantiated.InstantiatedIndex for use with the
+ * Ocean system.  This is the only index that is writeable, meaning new documents may
+ * be added to.  
+ * 
+ */
+//TODO: release old MemoryIndexSnapshots from map
+public class WriteableMemoryIndex extends Index {
+	private WriteableIndexWriter indexWriter;
+	//private InstantiatedIndexReader indexReader;
+	private InstantiatedIndex instantiatedIndex;
+  private SortedList<Long,MemoryIndexSnapshot> snapshotMap = new SortedList<Long,MemoryIndexSnapshot>();
+	
+	public WriteableMemoryIndex(IndexID id, TransactionSystem system) throws IOException {
+		super(id, system);
+		instantiatedIndex = new InstantiatedIndex();
+		indexWriter = new WriteableIndexWriter(instantiatedIndex);
+		//indexReader = new InstantiatedIndexReader(instantiatedIndex);
+	}
+  
+	public boolean rollback(Long snapshotId) {
+	  return snapshotMap.remove(snapshotId) != null;
+	}
+	
+	// called by Category.runTransactionsNotInIndex
+	void addDocuments(Documents documents, Analyzer analyzer) throws IOException {
+		for (Document document : documents) {
+			indexWriter.addDocument(document, analyzer);
+		}
+	}
+
+	// called by Category.runTransactionsNotInIndex
+	MemoryIndexSnapshot setSnapshot(Long snapshotId) {//, List<Deletes> deletesList) throws Exception {
+		//int maxDoc = indexReader.maxDoc();
+		//HashSet<Integer> deletedSet = new HashSet<Integer>();
+		//if (deletesList != null) {
+		//	for (Deletes deletes : deletesList) {
+		//		applyDeletes(false, deletes, null, indexReader);
+		//	}
+		//}
+	  int maxDoc = instantiatedIndex.getDocumentsByNumber().length;
+	  HashSet<Integer> deletedSet = new HashSet<Integer>();
+		MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedSet);
+		snapshotMap.put(snapshotId, memoryIndexSnapshot);
+		return memoryIndexSnapshot;
+	}
+
+	public MemoryIndexSnapshot getIndexSnapshot(Long snapshotId) {
+		return snapshotMap.get(snapshotId);
+	}
+
+	public MemoryIndexSnapshot getLatestIndexSnapshot() {
+		return snapshotMap.lastValue();
+	}
+
+	public class MemoryIndexSnapshot extends IndexSnapshot {
+		private final int maxDoc;
+		private HashSet<Integer> deletedDocs;
+		private OceanInstantiatedIndexReader indexReader;
+
+		public MemoryIndexSnapshot(Long snapshotId, int maxDoc, HashSet<Integer> deletedDocs) {
+			super(snapshotId);
+			this.maxDoc = maxDoc;
+			this.deletedDocs = deletedDocs;
+			indexReader = new OceanInstantiatedIndexReader(maxDoc, instantiatedIndex, deletedDocs);
+		}
+		
+		public int deletedDoc() {
+		  return deletedDocs.size();
+		}
+		
+		public int maxDoc() {
+		  return maxDoc;
+		}
+
+		public IndexReader getIndexReader() {
+			return indexReader;
+		}
+	}
+
+	Long getLatestSnapshotId() {
+		return snapshotMap.lastKey();
+	}
+
+	private HashSet<Integer> getLatestSnapshotDeletedDocSet() {
+		MemoryIndexSnapshot memoryIndexSnapshot = snapshotMap.lastValue();
+		if (memoryIndexSnapshot == null || memoryIndexSnapshot.deletedDocs == null)
+			return null;
+		HashSet<Integer> deletedDocSet = memoryIndexSnapshot.deletedDocs;
+		return deletedDocSet;
+	}
+
+	public int getDocumentCount() {
+		return instantiatedIndex.getDocumentsByNumber().length;
+	}
+
+	public DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, InterruptedException, IOException {
+		return commitChanges(null, deletes, null, transaction);
+	}
+  
+	public void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException {
+	  Long snapshotId = transaction.getId();
+	  MemoryIndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+	  int maxDoc = latestIndexSnapshot.maxDoc();
+	  HashSet<Integer> deletedDocSet = latestIndexSnapshot.deletedDocs;
+	  MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedDocSet);
+    snapshotMap.put(snapshotId, memoryIndexSnapshot);
+    removeOldSnapshots(snapshotMap);
+	}
+	
+	public DeletesResult commitChanges(Documents documents, Deletes deletes, Analyzer analyzer, Transaction transaction) throws InterruptedException, Exception, IOException {
+		try {
+			if (isClosed()) {
+				throw new IOException("index is closed");
+			}
+			if (isReadOnly() && documents != null && documents.size() > 0) {
+				throw new IOException("index not accepting new documents");
+			}
+			DeletesResult deletesResult = new DeletesResult(getId());
+			HashSet<Integer> deletedSet = null;
+			if (deletes != null && deletes.hasDeletes()) {
+			  HashSet<Integer> previousDeletedSet = getLatestSnapshotDeletedDocSet();
+				if (previousDeletedSet != null) {
+					deletedSet = (HashSet<Integer>) previousDeletedSet.clone();
+				} else {
+					deletedSet = new HashSet<Integer>();
+				}
+				IndexSnapshot indexSnapshot = getLatestIndexSnapshot();
+				deletesResult = applyDeletes(false, deletes, deletedSet, indexSnapshot.getIndexReader());
+			} else if (deletes == null || !deletes.hasDeletes()) { // if no deletes
+				// just use same
+				deletedSet = getLatestSnapshotDeletedDocSet();
+			}
+			if (documents != null) {
+				for (Document document : documents) {
+					indexWriter.addDocument(document, analyzer);
+				}
+			}
+			transaction.ready(this);
+			if (transaction.go()) {
+				indexWriter.commit();
+				int maxDoc = instantiatedIndex.getDocumentsByNumber().length;
+				Long snapshotId = transaction.getId();
+				MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedSet);
+				snapshotMap.put(snapshotId, memoryIndexSnapshot);
+				removeOldSnapshots(snapshotMap);
+				return deletesResult;
+			} else {
+				indexWriter.abort();
+				return null;
+			}
+		} catch (Throwable throwable) {
+			transaction.failed(this, throwable);
+			if (throwable instanceof Exception) {
+			  throw (Exception)throwable;
+			} else {
+			  throw new Exception(throwable);
+			}
+		}
+		//return null;
+	}
+
+	public class WriteableIndexWriter extends InstantiatedIndexWriter {
+		public WriteableIndexWriter(InstantiatedIndex index) throws IOException {
+			super(index);
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/Snapshot.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Snapshot.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Snapshot.java	(revision 0)
@@ -0,0 +1,336 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.ocean.DiskIndex.DiskIndexSnapshot;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.RamIndex.RamIndexSnapshot;
+import org.apache.lucene.ocean.SnapshotInfo.IndexInfo;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searcher;
+
+public class Snapshot implements Comparable<Snapshot> {
+  private BigDecimal id;
+  private SortedList<IndexID,IndexSnapshot> indexSnapshotMap;
+  private MemoryIndexSnapshot writeableSnapshot;
+  private IndexReader indexReader;
+  private int maxDoc;
+  private final int[] starts;
+  private TransactionSystem system;
+  private final long timestamp;
+  //private int refs = 0;
+  
+  public Snapshot(BigDecimal id, MemoryIndexSnapshot writeableSnapshot, Collection<IndexSnapshot> indexSnapshots, TransactionSystem system,
+      long timestamp) throws IOException {
+    this.id = id;
+    this.writeableSnapshot = writeableSnapshot;
+    this.timestamp = timestamp;
+    List<IndexSnapshot> allIndexSnapshots = new ArrayList<IndexSnapshot>(indexSnapshots);
+    allIndexSnapshots.add(writeableSnapshot);
+    assert !hasDuplicates(allIndexSnapshots);
+    assert snapshotIdsMatch(allIndexSnapshots);
+    indexSnapshotMap = new SortedList<IndexID,IndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : allIndexSnapshots) {
+      indexSnapshotMap.put(indexSnapshot.getIndex().getId(), indexSnapshot);
+    }
+    IndexReader[] readerArray = getReaderArray(allIndexSnapshots);
+    indexReader = new MultiReader(readerArray);
+    starts = makeStarts();
+  }
+  
+  public Snapshot(Long snapshotId, int minorVersion, MemoryIndexSnapshot writeableSnapshot, List<IndexSnapshot> indexSnapshots,
+      TransactionSystem system, long timestamp) throws IOException {
+    this(toId(snapshotId, minorVersion), writeableSnapshot, indexSnapshots, system, timestamp);
+  }
+  
+  //public boolean hasRefs() {
+  //  return refs > 0;
+  //}
+  
+  //public void decRef() {
+  //  refs--;
+  //}
+  
+  //public void incRef() {
+  //  refs++;
+  //}
+  
+  private static boolean hasDuplicates(List<IndexSnapshot> allIndexSnapshots) {
+    Set<IndexID> set = new HashSet<IndexID>();
+    for (IndexSnapshot indexSnapshot : allIndexSnapshots) {
+      IndexID indexId = indexSnapshot.getIndex().getId();
+      if (set.contains(indexId)) {
+        return true;
+      } else {
+        set.add(indexId);
+      }
+    }
+    return false;
+  }
+  
+  private IndexReader[] getReaderArray(Collection<IndexSnapshot> indexSnapshots) {
+    IndexReader[] readerArray = new IndexReader[indexSnapshots.size()];
+    int x = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      indexSnapshotMap.put(indexSnapshot.getIndex().getId(), indexSnapshot);
+      readerArray[x] = indexSnapshot.getIndexReader();
+      x++;
+    }
+    return readerArray;
+  }
+  
+  public int numDocs() {
+    return indexReader.numDocs();
+  }
+  
+  private int[] makeStarts() {
+    IndexSnapshot[] indexSnapshotsArray = indexSnapshotMap.values().toArray(new IndexSnapshot[0]);
+    // build starts array
+    int[] starts = new int[indexSnapshotsArray.length + 1];
+    for (int i = 0; i < indexSnapshotsArray.length; i++) {
+      starts[i] = maxDoc;
+      maxDoc += indexSnapshotsArray[i].maxDoc(); // compute maxDocs
+    }
+    starts[indexSnapshotsArray.length] = maxDoc;
+    return starts;
+  }
+  
+  public long getTimestamp() {
+    return timestamp;
+  }
+
+  public int compareTo(Snapshot other) {
+    return id.compareTo(other.id);
+  }
+
+  public Searcher getSearcher() throws IOException {
+    MultiSearcher multiSearcher = new MultiSearcher(getSearchers());
+    return multiSearcher;
+  }
+  
+  public static class SnapshotSearcher extends IndexSearcher {
+    private IndexSnapshot indexSnapshot;
+    
+    public SnapshotSearcher(IndexReader indexReader, IndexSnapshot indexSnapshot) {
+      super(indexReader);
+      this.indexSnapshot = indexSnapshot;
+    }
+    
+    public IndexSnapshot getIndexSnapshot() {
+      return indexSnapshot;
+    }
+  }
+  
+  public Searcher[] getSearchers() {
+    IndexSnapshot[] indexSnapshots = (IndexSnapshot[]) indexSnapshotMap.values().toArray(new IndexSnapshot[0]);
+    Searcher[] searchers = new Searcher[indexSnapshotMap.size()];
+    for (int x = 0; x < indexSnapshots.length; x++) {
+      searchers[x] = new SnapshotSearcher(indexSnapshots[x].getIndexReader(), indexSnapshots[x]);
+    }
+    return searchers;
+  }
+
+  public IndexReader getIndexReader() {
+    return indexReader;
+  }
+
+  public int maxDoc() {
+    return maxDoc;
+  }
+
+  public int[] getStarts() {
+    return starts;
+  }
+
+  public List<RamIndexSnapshot> getRamIndexSnapshots() {
+    List<RamIndexSnapshot> ramIndexSnapshots = new ArrayList<RamIndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot instanceof RamIndexSnapshot) {
+        ramIndexSnapshots.add((RamIndexSnapshot) indexSnapshot);
+      }
+    }
+    return ramIndexSnapshots;
+  }
+
+  public IndexReader[] getIndexReaders() {
+    IndexReader[] indexReaders = new IndexReader[indexSnapshotMap.size()];
+    int i = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      indexReaders[i] = indexSnapshot.getIndexReader();
+      i++;
+    }
+    return indexReaders;
+  }
+
+  public int getMaxDoc() {
+    int maxDoc = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      maxDoc += indexSnapshot.getIndexReader().maxDoc();
+    }
+    return maxDoc;
+  }
+
+  public int getMinorVersion() {
+    return getMinorVersion(id);
+  }
+
+  public static void main(String[] args) {
+    BigDecimal id = toId(210l, 1);
+    String string = formatId(id);
+    System.out.println(string);
+  }
+
+  public static BigDecimal toId(Long snapshotId, int minorVersion) {
+    StringBuilder builder = new StringBuilder();
+    builder.append(snapshotId);
+    builder.append(".");
+    if (10 > minorVersion)
+      builder.append("0");
+    builder.append(minorVersion);
+    BigDecimal value = new BigDecimal(builder.toString());
+    return value;
+  }
+
+  public static int getMinorVersion(BigDecimal value) {
+    value = value.subtract(new BigDecimal(value.longValue()));
+    BigDecimal decimal = value.scaleByPowerOfTen(2);
+    return decimal.intValue();
+  }
+
+  public static boolean snapshotIdsMatch(Collection<IndexSnapshot> indexSnapshots) {
+    Long current = null;
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      if (current == null) {
+        current = indexSnapshot.getSnapshotId();
+      } else if (!current.equals(indexSnapshot.getSnapshotId())) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  public SnapshotInfo getSnapshotInfo() throws IOException {
+    int deletedDocs = indexReader.maxDoc() - indexReader.numDocs();
+    SnapshotInfo snapshotInfo = new SnapshotInfo(id, indexReader.maxDoc(), indexReader.numDocs(), deletedDocs);
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      Index index = indexSnapshot.getIndex();
+      String type = null;
+      Long segmentGeneration = null;
+      if (index instanceof DiskIndex) {
+        segmentGeneration = indexSnapshot.getIndexReader().getIndexCommit().getGeneration();
+        type = "disk";
+      } else if (index instanceof WriteableMemoryIndex)
+        type = "memory";
+      else if (index instanceof RamIndex)
+        type = "ram";
+      IndexInfo indexInfo = new IndexInfo(indexSnapshot.getSnapshotId(), index.getId().id, segmentGeneration, type, indexSnapshot.maxDoc(), indexSnapshot.getIndexReader().numDocs(), indexSnapshot.deletedDoc(), indexSnapshot.getMinDocumentId(), indexSnapshot.getMaxDocumentId(), indexSnapshot.getMinSnapshotId(), indexSnapshot.getMaxSnapshotId());
+      snapshotInfo.add(indexInfo);
+    }
+    return snapshotInfo;
+  }
+
+  public static String formatId(BigDecimal id) {
+    DecimalFormat format = new DecimalFormat("##0.00");
+    return format.format(id);
+  }
+
+  public static String getFileName(BigDecimal id) {
+    String string = formatId(id);
+    String replaced = string.replace('.', '_');
+    return "snapshot_" + replaced + ".xml";
+  }
+
+  /**
+   * Create minor snapshot (meaning a merged snapshot with no real index
+   * changes) reusing the existing writeableSnapshot.
+   * 
+   * @param removeIndexIds
+   * @param newIndexSnapshot
+   * @return
+   * @throws IOException
+   */
+  public Snapshot createMinor(List<IndexID> removeIndexIds, IndexSnapshot newIndexSnapshot) throws IOException {
+    return createMinor(removeIndexIds, writeableSnapshot, newIndexSnapshot);
+  }
+
+  public Snapshot createMinor(List<IndexID> removeIndexIds, MemoryIndexSnapshot writeableSnapshot, IndexSnapshot newIndexSnapshot)
+      throws IOException {
+    HashMap<IndexID,IndexSnapshot> mapCopy = new HashMap<IndexID,IndexSnapshot>(indexSnapshotMap);
+    for (IndexID indexid : removeIndexIds) {
+      mapCopy.remove(indexid);
+    }
+    IndexID newIndexId = newIndexSnapshot.getIndex().getId();
+    assert !mapCopy.containsKey(newIndexId);
+    mapCopy.put(newIndexId, newIndexSnapshot);
+    mapCopy.put(writeableSnapshot.getIndex().getId(), writeableSnapshot);
+    int minorVersion = getMinorVersion();
+    Long snapshotId = getSnapshotId();
+    int newMinorVersion = minorVersion + 1;
+    System.out.println("snapshotId: " + snapshotId + " newMinorVersion: " + newMinorVersion);
+    // BigDecimal newId = toId(snapshotId, minorVersion);
+    Snapshot newSnapshot = new Snapshot(snapshotId, newMinorVersion, writeableSnapshot, new ArrayList(mapCopy.values()), system, System
+        .currentTimeMillis());
+    return newSnapshot;
+  }
+
+  public List<DiskIndex> getDiskIndices() {
+    List<DiskIndex> diskIndices = new ArrayList<DiskIndex>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      indexSnapshot.getIndex();
+    }
+    return diskIndices;
+  }
+
+  public List<Index> getDeleteOnlyIndices() {
+    HashMap<IndexID,IndexSnapshot> mapCopy = new HashMap<IndexID,IndexSnapshot>(indexSnapshotMap);
+    mapCopy.remove(writeableSnapshot.getIndex().getId());
+    List<Index> indices = new ArrayList<Index>();
+    for (IndexSnapshot indexSnapshot : mapCopy.values()) {
+      indices.add(indexSnapshot.getIndex());
+    }
+    return indices;
+  }
+
+  public MemoryIndexSnapshot getWriteableSnapshot() {
+    return writeableSnapshot;
+  }
+
+  public boolean containsIndex(long indexid) {
+    return indexSnapshotMap.containsKey(indexid);
+  }
+
+  public List<DiskIndexSnapshot> getDiskIndexSnapshots() {
+    List<DiskIndexSnapshot> diskIndexSnapshots = new ArrayList<DiskIndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot instanceof DiskIndexSnapshot) {
+        diskIndexSnapshots.add((DiskIndexSnapshot) indexSnapshot);
+      }
+    }
+    return diskIndexSnapshots;
+  }
+
+  public List<IndexSnapshot> getIndexSnapshots() {
+    return new ArrayList(indexSnapshotMap.values());
+  }
+
+  public Long getSnapshotId() {
+    return id.longValue();
+  }
+
+  public BigDecimal getId() {
+    return id;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/FSLogDirectory.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/FSLogDirectory.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/FSLogDirectory.java	(revision 0)
@@ -0,0 +1,108 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.util.Util;
+
+public class FSLogDirectory extends LogDirectory {
+  private File fileDirectory;
+  private ReentrantLock outputLock = new ReentrantLock();
+  private ReentrantLock inputLock = new ReentrantLock();
+
+  public FSLogDirectory(File fileDirectory) {
+    Util.mkdir(fileDirectory);
+    this.fileDirectory = fileDirectory;
+  }
+  
+  public String[] list() throws IOException {
+    List<String> list = new ArrayList<String>();
+    for (File file : fileDirectory.listFiles()) {
+      if (!file.isDirectory()) {
+        list.add(file.getName());
+      }
+    }
+    return (String[]) list.toArray(new String[0]);
+  }
+
+  public boolean fileExists(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.exists();
+  }
+
+  public long fileModified(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.lastModified();
+  }
+
+  public void deleteFile(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    boolean deleted = file.delete();
+    if (!deleted) {
+      throw new IOException("file: "+name+" not deleted");
+    }
+  }
+
+  public long fileLength(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.length();
+  }
+
+  public RandomAccessFile openInput(String name) throws IOException {
+    inputLock.lock();
+    try {
+      File file = new File(fileDirectory, name);
+      return new RandomAccessFile(file, "r");
+      //RandomAccessFileContent content = new RandomAccessFileContent(file, "r");
+      //IOController contoller = new IOController(1024 * 16, content);
+      //BufferedRandomAccessIO buffered = new BufferedRandomAccessIO(contoller, true);
+      //return buffered;
+      //BufferedRandomAccessIO access = getOutput(name, false);
+      
+      //return access.createIOChild(0, 0, access.getByteOrder(), false);
+    } finally {
+      inputLock.unlock();
+    }
+  }
+
+  public RandomAccessFile getOutput(String name, boolean overwrite) throws IOException {
+    outputLock.lock();
+    try {
+      File file = new File(fileDirectory, name);
+      if (overwrite) {
+        file.delete();
+      }
+      return new RandomAccessFile(file, "rw");
+      /**
+      FSFile fsFile = outputMap.get(name);
+      if (fsFile == null) {
+        File file = new File(fileDirectory, name);
+        //RandomAccessFileContent content = new RandomAccessFileContent(file, "rw");
+        //content.tryLock();
+        fsFile = new FSFile(content);
+        outputMap.put(name, fsFile);
+      }
+      
+      if (overwrite) {
+        //if (fsFile != null) {
+        //  buffered.close();
+        //  buffered = null;
+       // }
+        //boolean deleted = file.delete();
+
+      }
+      IOController contoller = new IOController(1024 * 16, fsFile.content);
+      BufferedRandomAccessIO buffered = new BufferedRandomAccessIO(contoller, false);
+      return buffered;
+      **/
+    } finally {
+      outputLock.unlock();
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Indexes.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Indexes.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Indexes.java	(revision 0)
@@ -0,0 +1,38 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.ocean.util.Util;
+
+public class Indexes {
+  private Map<IndexID,Index> indexMap = new HashMap<IndexID,Index>();
+  
+  public IndexID getMaxId(String type) {
+    List<IndexID> list = new ArrayList<IndexID>();
+    for (IndexID indexId : indexMap.keySet()) {
+      if (indexId.type.equals(type)) {
+        list.add(indexId);
+      }
+    }
+    if (list.size() == 0) return null;
+    return Util.max(list);
+  }
+  
+  public List<Index> getIndexes() {
+    return new ArrayList(indexMap.values());
+  }
+  
+  public Indexes() {
+  }
+
+  public Index get(IndexID indexId) {
+    return indexMap.get(indexId);
+  }
+
+  public void add(Index index) {
+    indexMap.put(index.getId(), index);
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Documents.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Documents.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Documents.java	(revision 0)
@@ -0,0 +1,17 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.lucene.document.Document;
+
+public class Documents extends ArrayList<Document> {
+  
+  public Documents() {}
+  
+  public Documents(Collection<Document> documents) {
+    for (Document document : documents) {
+      add(document);
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java	(revision 0)
@@ -0,0 +1,5 @@
+package org.apache.lucene.ocean;
+
+public class SingleThreadSearcherPolicy extends SearcherPolicy {
+  
+}
Index: ocean/src/org/apache/lucene/ocean/LogDirectory.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/LogDirectory.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/LogDirectory.java	(revision 0)
@@ -0,0 +1,20 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+public abstract class LogDirectory {
+  public abstract String[] list() throws IOException;
+
+  public abstract boolean fileExists(String name) throws IOException;
+
+  public abstract long fileModified(String name) throws IOException;
+
+  public abstract void deleteFile(String name) throws IOException;
+
+  public abstract long fileLength(String name) throws IOException;
+  
+  public abstract RandomAccessFile openInput(String name) throws IOException;
+  
+  public abstract RandomAccessFile getOutput(String name, boolean overwrite) throws IOException;
+}
Index: ocean/src/org/apache/lucene/ocean/RamIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/RamIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/RamIndex.java	(revision 0)
@@ -0,0 +1,110 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+public class RamIndex extends DirectoryIndex {
+	private RAMDirectory ramDirectory;
+	private Long maxSnapshotId;
+	private Long maxDocumentId;
+  
+	public RamIndex(IndexID id, List<? extends IndexSnapshot> indexSnapshots, TransactionSystem system) throws Exception {
+		super(id, system);
+		ramDirectory = new RAMDirectory();
+		IndexReader[] indexReaders = getIndexReaders(indexSnapshots);
+		RAMDirectory ramDirectory = new RAMDirectory();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, system.getDefaultAnalyzer(), true);
+		indexWriter.setMergeScheduler(new SerialMergeScheduler());
+		indexWriter.setUseCompoundFile(true);
+		indexWriter.addIndexes(indexReaders);
+		indexWriter.close();
+		maxSnapshotId = getMaxSnapshotId(indexSnapshots);
+		maxDocumentId = getMaxDocumentId(indexSnapshots);
+	}
+	
+	// TODO: add timestamp so ramindex can be removed from indices
+	public RamIndex(IndexID id, Long snapshotId, List<Deletes> deletesList, RAMDirectory ramDirectory, TransactionSystem system) throws Exception {
+		super(id, system);
+		this.ramDirectory = ramDirectory;
+		initialIndexReader = IndexReader.open(ramDirectory, indexDeletionPolicy);
+		createNewSnapshot(snapshotId, initialIndexReader);
+		if (deletesList != null) {
+			for (Deletes deletes : deletesList) {
+				applyDeletes(true, deletes, null, initialIndexReader);
+			}
+		}
+	}
+  
+	// converts memoryIndexSnapshot into ramindexsnapshot
+	public RamIndex(IndexID id, MemoryIndexSnapshot memoryIndexSnapshot) throws Exception, IOException {
+		super(id, memoryIndexSnapshot.getIndex().getSystem());
+		this.maxSnapshotId = memoryIndexSnapshot.getMaxSnapshotId();
+		this.maxDocumentId = memoryIndexSnapshot.getMaxDocumentId();
+		ramDirectory = new RAMDirectory();
+		Analyzer defaultAnalyzer = memoryIndexSnapshot.getIndex().getSystem().getDefaultAnalyzer();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, defaultAnalyzer, true, new KeepOnlyLastCommitDeletionPolicy());
+		indexWriter.addIndexes(new IndexReader[] {memoryIndexSnapshot.getIndexReader()});
+		indexWriter.close();
+		initialIndexReader = IndexReader.open(ramDirectory, indexDeletionPolicy);
+		List<IndexSnapshot> indexSnapshots = new ArrayList<IndexSnapshot>(1);
+		indexSnapshots.add(memoryIndexSnapshot);
+		createNewSnapshot(memoryIndexSnapshot.getSnapshotId(), initialIndexReader);
+	}
+  
+	public RamIndexSnapshot commitIndex(Transaction transaction) throws IndexException, InterruptedException, IOException {
+		try {
+			transaction.ready(this);
+			if (transaction.go()) {
+				Long snapshotId = transaction.getId();
+				RamIndexSnapshot indexSnapshot = createNewSnapshot(snapshotId, initialIndexReader);
+				return indexSnapshot;
+			} else {
+				// if commit fails this snapshot and ramindex won't make it
+				return null;
+			}
+		} catch (Throwable throwable) {
+			LOG.error("", throwable);
+			transaction.failed(this, throwable);
+			return null;
+		}
+	}
+	
+	protected RamIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException {
+		RamIndexSnapshot ramIndexSnapshot = new RamIndexSnapshot(snapshotId, newIndexReader);
+		registerSnapshot(ramIndexSnapshot);
+		return ramIndexSnapshot;
+	}
+
+	public class RamIndexSnapshot extends DirectoryIndexSnapshot {
+		public RamIndexSnapshot(Long snapshotId, IndexReader indexReader) throws IOException {
+			super(snapshotId, indexReader);
+		}
+		
+		public String toString() {
+		  return "RamIndexSnapshot index: "+RamIndex.this.getId()+" snapshotid: "+snapshotId+" maxDoc: "+indexReader.maxDoc();
+		}
+		
+		//public Long getMaxSnapshotId() throws IOException {
+		//	return maxSnapshotId;
+		//}
+
+		//public Long getMaxDocumentId() throws IOException {
+		//	return maxDocumentId;
+		//}
+	}
+
+	public Directory getDirectory() {
+		return ramDirectory;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/Index.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Index.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Index.java	(revision 0)
@@ -0,0 +1,384 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.StaleReaderException;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.TermEnum;
+import org.apache.lucene.ocean.Deletes.DeleteByQuery;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.search.ExtendedFieldCache;
+import org.apache.lucene.search.HitCollector;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ExtendedFieldCache.LongParser;
+
+public abstract class Index {
+  private final IndexID id;
+  protected boolean isClosed = false;
+  private final TransactionSystem transactionSystem;
+  private boolean isReadOnly = false;
+  private boolean isDeleteOnly = false;
+
+  public Index(IndexID id, TransactionSystem transactionSystem) {
+    this.id = id;
+    this.transactionSystem = transactionSystem;
+  }
+
+  public void removeOldSnapshots(SortedList<Long,? extends IndexSnapshot> snapshotMap) {
+    Long last = snapshotMap.lastKey();
+    for (Iterator<Long> iterator = snapshotMap.keySet().iterator(); iterator.hasNext();) {
+      Long snapshotId = iterator.next();
+      if (!transactionSystem.snapshots.contains(snapshotId) && (last != null && !last.equals(snapshotId))) {
+        iterator.remove();
+      }
+    }
+  }
+
+  public void close() throws IOException {
+  }
+
+  public TransactionSystem getSystem() {
+    return transactionSystem;
+  }
+
+  public static IndexReader[] getIndexReaders(List<? extends IndexSnapshot> indexSnapshots) {
+    IndexReader[] indexReaders = new IndexReader[indexSnapshots.size()];
+    for (int x = 0; x < indexSnapshots.size(); x++) {
+      indexReaders[x] = indexSnapshots.get(x).getIndexReader();
+    }
+    return indexReaders;
+  }
+
+  public static Long getMaxSnapshotId(List<? extends IndexSnapshot> indexSnapshots) throws Exception {
+    List<Long> snapshotIdList = new ArrayList<Long>(indexSnapshots.size());
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      snapshotIdList.add(indexSnapshot.getMaxSnapshotId());
+    }
+    if (snapshotIdList.size() > 0) {
+      return Collections.max(snapshotIdList);
+    } else {
+      return null;
+    }
+  }
+
+  public static Long getMaxDocumentId(List<? extends IndexSnapshot> indexSnapshots) throws Exception {
+    List<Long> documentIdList = new ArrayList<Long>(indexSnapshots.size());
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      documentIdList.add(indexSnapshot.getMaxDocumentId());
+    }
+    if (documentIdList.size() > 0) {
+      return Collections.max(documentIdList);
+    } else {
+      return null;
+    }
+  }
+
+  public void setDeleteOnly(boolean isDeleteOnly) {
+    this.isDeleteOnly = isDeleteOnly;
+  }
+
+  public boolean isDeleteOnly() {
+    return isDeleteOnly;
+  }
+
+  public boolean isReadOnly() {
+    return isReadOnly;
+  }
+
+  public void setReadOnly(boolean isReadOnly) {
+    this.isReadOnly = isReadOnly;
+  }
+
+  protected DeletesResult applyDeletes(boolean deleteFromReader, Deletes deletes, Collection<Integer> deletedDocs, IndexReader indexReader)
+      throws CorruptIndexException, IOException, Exception {
+    long[] ids = ExtendedFieldCache.EXT_DEFAULT.getLongs(indexReader, Constants.DOCUMENTID, new LongParser() {
+      public long parseLong(String string) {
+        return Util.longFromEncoded(string);
+      }
+    });
+    DeletesResult deletesResult = new DeletesResult(getId());
+    if (deletes.hasDeletes()) {
+      if (deletes.hasDocIds()) {
+        int docsDeleted = 0;
+        long[] docIdsArray = deletes.getDocIds();
+        for (long id : docIdsArray) {
+          int doc = Util.getDoc(Constants.DOCUMENTID, id, indexReader);
+          if (doc >= 0) {
+            if (deleteFromReader) indexReader.deleteDocument(doc);
+            if (deletedDocs != null) deletedDocs.add(doc);
+            docsDeleted++;
+          }
+        }
+        deletesResult.add(new DeletesResult.Result(docIdsArray, docsDeleted));
+      } else {
+        List<Long> docIds = deletesResult.getDocIds();
+        if (deletes.hasTerms()) {
+          List<Term> terms = deletes.getTerms();
+          for (Term term : terms) {
+            int docsDeleted = deleteByTerm(deleteFromReader, term, deletedDocs, docIds, ids, indexReader);
+            deletesResult.add(new DeletesResult.Result(term, docsDeleted));
+          }
+        }
+        if (deletes.hasDeleteByQueries()) {
+          List<DeleteByQuery> deleteByQueries = deletes.getDeleteByQueries();
+          for (DeleteByQuery deleteByQuery : deleteByQueries) {
+            int docsDeleted = deleteByQuery(deleteFromReader, deleteByQuery, deletedDocs, ids, docIds, indexReader);
+            deletesResult.add(new DeletesResult.Result(deleteByQuery, docsDeleted));
+          }
+        }
+      }
+    }
+    return deletesResult;
+  }
+
+  protected int deleteByTerm(boolean deleteFromReader, Term term, Collection<Integer> deletedDocs, List<Long> docIds, long[] ids,
+      IndexReader indexReader) throws IOException {
+    TermDocs docs = indexReader.termDocs(term);
+    if (docs == null)
+      return 0;
+    int n = 0;
+    try {
+      while (docs.next()) {
+        int doc = docs.doc();
+        if (deletedDocs != null)
+          deletedDocs.add(doc);
+        Long docId = ids[doc];
+        docIds.add(docId);
+        if (deleteFromReader)
+          indexReader.deleteDocument(doc);
+        n++;
+      }
+    } finally {
+      docs.close();
+    }
+    return n;
+  }
+
+  protected int deleteByQuery(final boolean deleteFromReader, DeleteByQuery deleteByQuery, final Collection<Integer> deletedDocs,
+      final long[] ids, final List<Long> deletedIds, final IndexReader indexReader) throws IOException {
+    Query query = deleteByQuery.getQuery();
+    IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+    final int[] numDeleted = new int[1];
+    indexSearcher.search(query, new HitCollector() {
+      public void collect(int doc, float score) {
+        try {
+          if (deleteFromReader)
+            indexReader.deleteDocument(doc);
+          if (deletedDocs != null)
+            deletedDocs.add(doc);
+          Long docId = ids[doc];
+          deletedIds.add(docId);
+          numDeleted[0]++;
+        } catch (StaleReaderException staleReaderException) {
+          throw new RuntimeException(staleReaderException);
+        } catch (IOException ioException) {
+          throw new RuntimeException(ioException);
+        }
+      }
+    });
+    return numDeleted[0];
+  }
+
+  public static class IndexException extends Exception {
+    public IndexException(String message) {
+      super(message);
+    }
+
+    public IndexException(String message, Exception exception) {
+      super(message, exception);
+    }
+  }
+
+  public static class IndexNeverCompletedCopyException extends IndexException {
+    public IndexNeverCompletedCopyException(String message) {
+      super(message);
+    }
+  }
+
+  public abstract boolean rollback(Long snapshotId) throws Exception;
+
+  public abstract IndexSnapshot getIndexSnapshot(Long snapshotID);
+
+  public abstract IndexSnapshot getLatestIndexSnapshot();
+
+  public abstract class IndexSnapshot {
+    protected final Long snapshotId;
+    private Long maxSnapshotId;
+    private Long maxDocumentId;
+    private Long minSnapshotId;
+    private Long minDocumentId;
+
+    public IndexSnapshot(Long snapshotId) {
+      this.snapshotId = snapshotId;
+    }
+
+    public abstract int deletedDoc();
+
+    public abstract int maxDoc();
+
+    public Long getSnapshotId() {
+      return snapshotId;
+    }
+
+    public Index getIndex() {
+      return Index.this;
+    }
+
+    /**
+     * Iterates terms until next field is reached, returns text
+     * 
+     * @param field
+     * @return
+     * @throws Exception
+     */
+    public String getMax(String field) throws IOException {
+      IndexReader indexReader = getIndexReader();
+      TermEnum termEnum = indexReader.terms(new Term(field, ""));
+      try {
+        String text = null;
+        do {
+          Term term = termEnum.term();
+          if (term == null || term.field() != field)
+            break;
+          text = term.text();
+        } while (termEnum.next());
+        return text;
+      } finally {
+        termEnum.close();
+      }
+    }
+    
+    public String getMin(String field) throws IOException {
+      IndexReader indexReader = getIndexReader();
+      TermEnum termEnum = indexReader.terms(new Term(field, ""));
+      try {
+        do {
+          Term term = termEnum.term();
+          if (term == null || term.field() != field)
+            break;
+          return term.text();
+        } while (termEnum.next());
+        return null;
+      } finally {
+        termEnum.close();
+      }
+    }
+    
+    public Long getMinDocumentId() throws IOException {
+      if (minDocumentId == null) {
+        String string = getMin(Constants.DOCUMENTID);
+        if (string == null)
+          return null;
+        minDocumentId = Util.longFromEncoded(string);
+      }
+      return minDocumentId;
+    }
+    
+    public Long getMinSnapshotId() throws IOException {
+      if (minSnapshotId == null) {
+        String string = getMin(Constants.SNAPSHOTID);
+        if (string == null)
+          return null;
+        minSnapshotId = Util.longFromEncoded(string);
+      }
+      return minSnapshotId;
+    }
+    
+    public Long getMaxSnapshotId() throws IOException {
+      if (maxSnapshotId == null) {
+        String string = getMax(Constants.SNAPSHOTID);
+        if (string == null)
+          return null;
+        maxSnapshotId = Util.longFromEncoded(string);
+      }
+      return maxSnapshotId;
+    }
+
+    public Long getMaxDocumentId() throws IOException {
+      if (maxDocumentId == null) {
+        String string = getMax(Constants.DOCUMENTID);
+        if (string == null)
+          return null;
+        maxDocumentId = Util.longFromEncoded(string);
+      }
+      return maxDocumentId;
+    }
+
+    public abstract IndexReader getIndexReader();
+  }
+
+  public static class MergedDocMap {
+    private Map<IndexSnapshot,int[]> oldMap; // maps old doc to new doc
+    private RI[] merged; // maps new doc to old doc and reader
+
+    public MergedDocMap(List<? extends IndexSnapshot> indexSnapshots) {
+      int newMaxDoc = 0;
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        newMaxDoc += indexSnapshot.getIndexReader().numDocs();
+      }
+      oldMap = new HashMap<IndexSnapshot,int[]>(indexSnapshots.size());
+      RI[] merged = new RI[newMaxDoc];
+      int pos = 0;
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        IndexReader indexReader = indexSnapshot.getIndexReader();
+        int maxDoc = indexReader.maxDoc();
+        int[] old = new int[maxDoc];
+        for (int x = 0; x < maxDoc; x++) {
+          if (indexReader.hasDeletions() && indexReader.isDeleted(x)) {
+            merged[pos] = null;
+            old[x] = -1;
+          } else {
+            merged[pos] = new RI(x, indexSnapshot);
+            old[x] = pos;
+            pos++;
+          }
+        }
+        oldMap.put(indexSnapshot, old);
+      }
+    }
+
+    public static class RI {
+      public int doc;
+      public IndexSnapshot oldIndexSnapshot;
+
+      public RI(int doc, IndexSnapshot oldIndexSnapshot) {
+        this.doc = doc;
+        this.oldIndexSnapshot = oldIndexSnapshot;
+      }
+    }
+
+    public Map<IndexSnapshot,int[]> getOldMap() {
+      return oldMap;
+    }
+
+    public RI[] getMerged() {
+      return merged;
+    }
+  }
+
+  public boolean isClosed() {
+    return isClosed;
+  }
+
+  public IndexID getId() {
+    return id;
+  }
+
+  public abstract void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException;
+
+  public abstract DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, IndexException,
+      InterruptedException, IOException;
+}
Index: ocean/src/org/apache/lucene/ocean/OceanConsole.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanConsole.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanConsole.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean;
+
+/**
+ * Display indexes
+ * Display index snapshots
+ * Run merge
+ *
+ */
+public class OceanConsole {
+
+}
Index: ocean/src/org/apache/lucene/ocean/SearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SearcherPolicy.java	(revision 0)
@@ -0,0 +1,5 @@
+package org.apache.lucene.ocean;
+
+public class SearcherPolicy {
+  
+}
Index: ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java	(revision 0)
@@ -0,0 +1,25 @@
+package org.apache.lucene.ocean;
+
+public class MultiThreadSearcherPolicy extends SearcherPolicy {
+  private final int minThreads;
+  private final int maxThreads;
+  private final int queueSize;
+  
+  public MultiThreadSearcherPolicy(int minThreads, int maxThreads, int queueSize) {
+    this.minThreads = minThreads;
+    this.maxThreads = maxThreads;
+    this.queueSize = queueSize;
+  } 
+  
+  public int getQueueSize() {
+    return queueSize;
+  }
+  
+  public int getMinThreads() {
+    return minThreads;
+  }
+
+  public int getMaxThreads() {
+    return maxThreads;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Batch.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Batch.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Batch.java	(revision 0)
@@ -0,0 +1,215 @@
+package org.apache.lucene.ocean;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.io.Serializable;
+import java.util.Date;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Stays in memory
+ * 
+ */
+public abstract class Batch {
+	protected Documents documents;
+	protected RAMDirectory ramDirectory;
+	protected Analyzer analyzer;
+	protected long schemaVersion;
+	protected Deletes deletes;
+	protected Date timestamp;
+	protected boolean isClosed = false;
+
+	public Batch() {
+		timestamp = new Date();
+	}
+	
+	public boolean hasRAMDirectory() {
+		return ramDirectory != null;
+	}
+	
+	public RAMDirectory getRamDirectory() {
+		return ramDirectory;
+	}
+
+	public long getSchemaVersion() {
+		return schemaVersion;
+	}
+	
+	public static class MasterBatch extends Batch {
+	  private TransactionSystem transactionSystem;
+	  
+	  public MasterBatch(TransactionSystem transactionSystem) {
+	  	this.transactionSystem = transactionSystem;
+	  }
+	  
+	  public void setRAMDirectory(RAMDirectory ramDirectory) {
+	    documents = null;
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    this.ramDirectory = ramDirectory;
+	  }
+	  
+	  public void setAnalyzer(Analyzer analyzer) {
+	    this.analyzer = analyzer;
+	  }
+	  
+	  public void setDeletes(Deletes deletes) {
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    this.deletes = deletes;
+	  }
+	  
+	  public void addDocument(Document document) {
+	    if (this.documents == null)
+	      this.documents = new Documents();
+	    this.documents.add(document);
+	  }
+	  
+	  public void addDocuments(Documents documents) {
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    if (this.documents == null)
+	      this.documents = new Documents();
+	    this.documents.addAll(documents);
+	  }
+	  
+	  /**
+	  public Serializable getSerializableRamDirectory(Long id) {
+	  	assert ramDirectory != null;
+	  	return new SerializableRamDirectory(id, ramDirectory);
+	  }
+	  
+	  public Serializable getSerializableBatchDeletes(Long id) {
+	  	return new SerializableBatchDeletes(id, deletes);
+	  }
+	  
+	  public Serializable getSerializableBatchDocuments(Long id) {
+			return new SerializableBatchDocuments(id, documents); 
+		}
+	  **/
+	  public void commit() throws Exception {
+	    transactionSystem.commitBatch(this); 
+		}
+	}
+	
+	public static class SlaveBatch extends Batch {
+	  private Long id;
+	  
+	  public SlaveBatch(Long id, Documents documents, Deletes deletes) {
+	  	this.id = id;
+	  	this.documents = documents;
+	  	this.deletes = deletes;
+	  }
+	  
+	  public SlaveBatch(Long id, RAMDirectory ramDirectory, Deletes deletes) {
+	  	this.id = id;
+	  	this.ramDirectory = ramDirectory;
+	  	this.deletes = deletes;
+	  }
+	  
+	  public Long getId() {
+	  	return id;
+	  }
+	}
+	/**
+	public static class SerializableRamDirectory extends SerializableBatch implements Externalizable {
+		private static final long serialVersionUID = 1l;
+		private RAMDirectory ramDirectory;
+		
+		public SerializableRamDirectory(Long id, RAMDirectory ramDirectory) {
+			super(id);
+			this.ramDirectory = ramDirectory;
+		}
+    
+		// TODO: use native version not externalizable
+		public void readExternal(ObjectInput objectInput) throws IOException, ClassNotFoundException {
+			long objectVersion = objectInput.readLong();
+			ramDirectory = RamDirectorySerializer.deserialize(objectInput);
+		}
+		
+		public void writeExternal(ObjectOutput objectOutput) throws IOException {
+			objectOutput.writeLong(serialVersionUID);
+			RamDirectorySerializer.serialize(ramDirectory, objectOutput);
+		}
+		
+		public RAMDirectory getRamDirectory() {
+			return ramDirectory;
+		}
+	}
+	
+	public static class SerializableBatchDocuments extends SerializableBatch {
+		private static final long serialVersionUID = 1l;
+		private Documents documents;
+		
+		public SerializableBatchDocuments(Long id, Documents documents) {
+			super(id);
+			this.documents = documents;
+		}
+		
+		public Documents getDocuments() {
+			return documents;
+		}
+	}
+	
+	public static class SerializableBatchDeletes extends SerializableBatch {
+		private static final long serialVersionUID = 1l;
+		private Deletes deletes;
+		
+		public SerializableBatchDeletes(Long id, Deletes deletes) {
+			super(id);
+		}
+		
+		public Deletes getDeletes() {
+			return deletes;
+		}
+	}
+	
+	public abstract static class SerializableBatch implements Serializable {
+		private Long id;
+		
+		public SerializableBatch(Long id) {
+			this.id = id;
+		}
+		public Long getId() {
+			return id;
+		}
+	}
+	**/
+	public Analyzer getAnalyzer() {
+		return analyzer;
+	}
+  
+	public boolean hasDocuments() {
+		if (documents == null || documents.size() == 0)
+			return false;
+		else
+			return true;
+	}
+
+	public boolean hasDeletes() {
+		if (deletes == null || !deletes.hasDeletes()) {
+			return false;
+		} else {
+			return true;
+		}
+	}
+
+	// disallow any more additions
+	public void close() {
+		isClosed = true;
+	}
+
+	public Documents getDocuments() {
+		return documents;
+	}
+
+	public Deletes getDeletes() {
+		return deletes;
+	}
+
+	public Date getTimestamp() {
+		return timestamp;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/Snapshots.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Snapshots.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Snapshots.java	(revision 0)
@@ -0,0 +1,211 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.ocean.util.XMLUtil;
+import org.jdom.Element;
+
+public class Snapshots {
+  private List<Snapshot> list = new ArrayList<Snapshot>();
+  private TransactionSystem system;
+  private ReentrantLock writeLock = new ReentrantLock();
+
+  public Snapshots(TransactionSystem system) {
+    this.system = system;
+  }
+
+  public void remove(int max, long durationMillis) {
+    writeLock.lock();
+    try {
+      if (list.size() > max) {
+        long now = System.currentTimeMillis();
+        int numToCheck = list.size() - max;
+        Iterator<Snapshot> iterator = list.iterator();
+        for (int x = 0; x < numToCheck; x++) {
+          Snapshot snapshot = iterator.next();
+          if ((snapshot.getTimestamp() + durationMillis) > now) {
+            iterator.remove();
+          }
+        }
+      }
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  /**
+   * Loads the ids from the file names rather than loading each xml file.
+   * 
+   * @param directory
+   * @return
+   * @throws Exception
+   */
+  public static List<BigDecimal> loadSnapshotInfoIds(LogDirectory directory) throws Exception {
+    List<BigDecimal> list = new ArrayList<BigDecimal>();
+    for (String file : directory.list()) {
+      if (directory.fileLength(file) > 0) {
+        String str = "snapshot_";
+        if (file.startsWith(str)) {
+          String main = file.substring(str.length(), file.lastIndexOf('.'));
+          String[] split = StringUtils.split(main, "_");
+          if (split.length > 1) {
+            String replace = main.replace('_', '.');
+            //System.out.println("replace: "+replace);
+            list.add(new BigDecimal(replace));
+          } else {
+            Long snapshotId = new Long(split[0]);
+            list.add(new BigDecimal(snapshotId));
+          }
+        }
+      }
+    }
+    Collections.sort(list);
+    return list;
+  }
+
+  public static List<SnapshotInfo> loadSnapshotInfos(LogDirectory directory) throws Exception {
+    List<SnapshotInfo> snapshotInfos = new ArrayList<SnapshotInfo>();
+    for (String file : directory.list()) {
+      if (directory.fileLength(file) > 0) {
+        String str = "snapshot_";
+        if (file.startsWith(str)) {
+          String main = file.substring(str.length(), file.lastIndexOf('.'));
+          String[] split = StringUtils.split(main, "_");
+          Long snapshotId = new Long(split[0]);
+          Integer version = new Integer(0);
+          if (split.length > 1)
+            version = new Integer(split[1]);
+          String xml = Util.getString(file, directory);
+          Element element = XMLUtil.parseElement(xml);
+          snapshotInfos.add(new SnapshotInfo(element));
+          // sorted.add(new BigDecimal(snapshotId + "." + version));
+        }
+      }
+    }
+    Collections.sort(snapshotInfos);
+    return snapshotInfos;
+  }
+
+  // TODO: load max id using loadSnapshotInfoIds, then load max snapshotinfo
+  public static SnapshotInfo loadMaxSnapshotInfo(LogDirectory directory) throws Exception {
+    List<SnapshotInfo> list = loadSnapshotInfos(directory);
+    if (list.size() == 0)
+      return null;
+    return Util.max(list);
+    // TreeSet<BigDecimal> sortedSet = new TreeSet<BigDecimal>();
+    /**
+     * List<BigDecimal> sorted = new ArrayList<BigDecimal>(); for (String file :
+     * directory.list()) { if (directory.fileLength(file) > 0) { String str =
+     * "snapshot_"; if (file.startsWith(str)) { String main =
+     * file.substring(str.length(), file.lastIndexOf('.')); String[] split =
+     * StringUtils.split(main, "_"); Long snapshotId = new Long(split[0]);
+     * Integer version = new Integer(0); if (split.length > 1) version = new
+     * Integer(split[1]); sorted.add(new BigDecimal(snapshotId + "." +
+     * version)); } } } BigDecimal maxId = null; if (sorted.size() > 0) maxId =
+     * Collections.max(sorted); if (maxId == null) return null; String fileName =
+     * Snapshot.getFileName(maxId); String xml = Util.getString(fileName,
+     * directory); Element element = XMLUtil.parseElement(xml); return new
+     * SnapshotInfo(element);
+     */
+  }
+
+  public Snapshot get(long snapshotId) {
+    List<Snapshot> snapshots = getForSnapshot(snapshotId);
+    return Util.max(snapshots);
+  }
+
+  public List<Snapshot> getForSnapshot(long snapshotId) {
+    List<Snapshot> inrange = new ArrayList<Snapshot>();
+    for (Snapshot snapshot : list) {
+      long l = snapshot.getId().toBigInteger().longValue();
+      if (l == snapshotId) {
+        inrange.add(snapshot);
+      }
+    }
+    return inrange;
+  }
+
+  public boolean contains(BigDecimal id) {
+    for (Snapshot s : list) {
+      if (s.getId().compareTo(id) == 0)
+        return true;
+    }
+    return false;
+  }
+
+  public boolean contains(Long snapshotId) {
+    return get(snapshotId) != null;
+  }
+
+  public boolean containsIndex(long indexid) {
+    for (Snapshot snapshot : list) {
+      if (snapshot.containsIndex(indexid))
+        return true;
+    }
+    return false;
+  }
+
+  private void remove(Snapshot snapshot) throws IOException {
+    Iterator<Snapshot> iterator = list.iterator();
+    while (iterator.hasNext()) {
+      Snapshot s = iterator.next();
+      if (s.getId().equals(snapshot.getId())) {
+        iterator.remove();
+        String file = Snapshot.getFileName(snapshot.getId());
+        system.directoryMap.getDirectory().deleteFile(file);
+      }
+    }
+
+  }
+
+  public Snapshot getLatestSnapshot() {
+    if (list.size() == 0)
+      return null;
+    return list.get(list.size() - 1);
+  }
+
+  void add(Snapshot snapshot, boolean createFile) throws Exception {
+    writeLock.lock();
+    try {
+      if (createFile) {
+        addCreateFile(snapshot);
+      } else {
+        list.add(snapshot);
+      }
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  private void addCreateFile(Snapshot snapshot) throws Exception {
+    BigDecimal id = snapshot.getId();
+    SnapshotInfo snapshotInfo = snapshot.getSnapshotInfo();
+    String fileName = Snapshot.getFileName(id);
+    LogDirectory directory = system.directoryMap.getDirectory();
+    if (directory.fileExists(fileName)) {
+      throw new IOException("fileName: " + fileName + " already exists");
+    }
+    RandomAccessFile output = directory.getOutput(fileName, true);
+    Element element = snapshotInfo.toElement();
+    String xml = XMLUtil.outputElement(element);
+    byte[] bytes = xml.getBytes("UTF-8");
+    list.add(snapshot);
+    output.write(bytes, 0, bytes.length);
+    output.close();
+    // remove previous versions for this id
+    // SortedMap<BigDecimal,Snapshot> headMap = snapshotMap.headMap(id);
+    // for (Snapshot removeSnapshot : headMap.values()) {
+    // remove(removeSnapshot);
+    // }
+    Collections.sort(list);
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/DeletesResult.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DeletesResult.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DeletesResult.java	(revision 0)
@@ -0,0 +1,51 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+public class DeletesResult {
+  private IndexID indexId;
+  private List<Result> results = new ArrayList<Result>();
+  private int numDeleted = 0;
+  private List<Long> docIds = new LinkedList<Long>(); 
+
+  public DeletesResult(IndexID indexId) {
+    this.indexId = indexId;
+  }
+
+  public List<Long> getDocIds() {
+    return docIds;
+  }
+
+  public IndexID getIndexId() {
+    return indexId;
+  }
+
+  public void add(Result result) {
+    numDeleted += result.getNumDeleted();
+    results.add(result);
+  }
+
+  public int getNumDeleted() {
+    return numDeleted;
+  }
+
+  public static class Result {
+    private Object delete;
+    private int numDeleted;
+
+    public Result(Object delete, int numDeleted) {
+      this.delete = delete;
+      this.numDeleted = numDeleted;
+    }
+
+    public Object getDelete() {
+      return delete;
+    }
+
+    public int getNumDeleted() {
+      return numDeleted;
+    }
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/IndexID.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/IndexID.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/IndexID.java	(revision 0)
@@ -0,0 +1,52 @@
+package org.apache.lucene.ocean;
+
+import org.apache.commons.lang.builder.CompareToBuilder;
+
+public class IndexID implements Comparable<IndexID> {
+  public final Long id;
+  public final String type;
+  
+  public IndexID(Long id, String type) {
+    this.id = id;
+    this.type = type;
+  }
+  
+  public String toString() {
+    return type+":"+id;
+  }
+  
+  public int compareTo(IndexID other) {
+    return new CompareToBuilder().append(id, other.id).append(type, type).toComparison();
+  }
+  
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((id == null) ? 0 : id.hashCode());
+    result = prime * result + ((type == null) ? 0 : type.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    final IndexID other = (IndexID) obj;
+    if (id == null) {
+      if (other.id != null)
+        return false;
+    } else if (!id.equals(other.id))
+      return false;
+    if (type == null) {
+      if (other.type != null)
+        return false;
+    } else if (!type.equals(other.type))
+      return false;
+    return true;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Deletes.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Deletes.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Deletes.java	(revision 0)
@@ -0,0 +1,121 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+/**
+ * Encapsulates deletes for a transaction
+ * 
+ */
+public class Deletes implements Serializable {
+  public static final int DOC_IDS = 1;
+  public static final int TERMS = 2;
+  private static final long serialVersionUID = 1l;
+  private List<Term> terms = new ArrayList<Term>();
+  private List<DeleteByQuery> deleteByQueries = new ArrayList<DeleteByQuery>();
+  private long[] docIds;
+
+  /**
+   * public Deletes(IndexInput input) throws IOException { int docIdsLength =
+   * input.readVInt(); if (docIdsLength > 0) { docIds = new long[docIdsLength];
+   * for (int x=0; x < docIdsLength; x++) { docIds[x] = input.readVLong(); } }
+   * int termsLength = input.readVInt(); for (int x=0; x < termsLength; x++) {
+   * String field = input.readString(); String text = input.readString();
+   * terms.add(new Term(field, text)); } int dqLength = input.readVInt(); for
+   * (int x=0; x < dqLength; x++) { int blen = input.readVInt(); byte[] bytes =
+   * new byte[blen]; input.readBytes(bytes, 0, blen);
+   * deleteByQueries.add((DeleteByQuery)SerializationUtils.deserialize(bytes)); } }
+   */
+  public Deletes() {
+  }
+
+  /**
+   * public void writeTo(IndexOutput output) throws IOException { if (docIds ==
+   * null) { output.writeVInt(0); } else { output.writeVInt(docIds.length); for
+   * (int x = 0; x < docIds.length; x++) { output.writeVLong(docIds[x]); } }
+   * output.writeVInt(terms.size()); for (int x = 0; x < terms.size(); x++) {
+   * output.writeString(terms.get(x).field());
+   * output.writeString(terms.get(x).text()); }
+   * output.writeVInt(deleteByQueries.size()); for (int x = 0; x <
+   * deleteByQueries.size(); x++) { byte[] bytes =
+   * SerializationUtils.serialize(deleteByQueries.get(x));
+   * output.writeVInt(bytes.length); output.writeBytes(bytes, bytes.length); } }
+   */
+  public void merge(Deletes deletes) {
+    terms.addAll(deletes.getTerms());
+    deleteByQueries.addAll(deletes.getDeleteByQueries());
+  }
+
+  public boolean hasDocIds() {
+    return docIds != null;
+  }
+
+  void setDocIds(long[] docIds) {
+    assert docIds == null;
+    this.docIds = docIds;
+  }
+
+  public void addTerm(Term term) {
+    terms.add(term);
+  }
+
+  public void addQuery(Query query) {
+    deleteByQueries.add(new DeleteByQuery(query));
+  }
+
+  public void addDeleteByQuery(DeleteByQuery deleteByQuery) {
+    deleteByQueries.add(deleteByQuery);
+  }
+
+  public long[] getDocIds() {
+    return docIds;
+  }
+
+  public boolean hasDeletes() {
+    if (terms.size() > 0 || deleteByQueries.size() > 0 || docIds != null && docIds.length > 0) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  public boolean hasDeleteByQueries() {
+    if (deleteByQueries != null && deleteByQueries.size() > 0)
+      return true;
+    return false;
+  }
+
+  public List<DeleteByQuery> getDeleteByQueries() {
+    return deleteByQueries;
+  }
+
+  public static class DeleteByQuery implements Serializable {
+    private Query query;
+
+    public DeleteByQuery(Query query) {
+      this.query = query;
+    }
+
+    public Query getQuery() {
+      return query;
+    }
+  }
+
+  public List<Term> getTerms() {
+    return terms;
+  }
+
+  public boolean hasTerms() {
+    if (terms != null && terms.size() > 0)
+      return true;
+    return false;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/CommitResult.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/CommitResult.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/CommitResult.java	(revision 0)
@@ -0,0 +1,55 @@
+package org.apache.lucene.ocean;
+
+import java.util.List;
+
+public class CommitResult {
+  private Long snapshotId;
+  private List<DeletesResult> deletesResults;
+  private Integer numAdded;
+  private IndexID addedIndexId;
+  private int numDeleted = 0;
+
+  public CommitResult(Long snapshotId, List<DeletesResult> deletesResults, Integer numAdded, IndexID addedIndexId) {
+    this.snapshotId = snapshotId;
+    this.deletesResults = deletesResults;
+    this.numAdded = numAdded;
+    this.addedIndexId = addedIndexId;
+    if (deletesResults != null) {
+      for (DeletesResult deletesResult : deletesResults) {
+        numDeleted += deletesResult.getNumDeleted();
+      }
+    }
+  }
+
+  public int getNumDeleted() {
+    return numDeleted;
+  }
+  
+  public int getNumDocChanges() {
+    int numChanged = 0;
+    if (numAdded != null)
+      numChanged += numAdded;
+    if (deletesResults != null) {
+      for (DeletesResult deletesResult : deletesResults) {
+        numChanged += deletesResult.getNumDeleted();
+      }
+    }
+    return numChanged;
+  }
+
+  public Long getSnapshotId() {
+    return snapshotId;
+  }
+
+  public List<DeletesResult> getDeletesResults() {
+    return deletesResults;
+  }
+
+  public Integer getNumAdded() {
+    return numAdded;
+  }
+
+  public IndexID getAddedIndexId() {
+    return addedIndexId;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/IndexCreator.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/IndexCreator.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/IndexCreator.java	(revision 0)
@@ -0,0 +1,142 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Allows creation of an index using multiple threads by feeding documents into
+ * a BlockingQueue.
+ * 
+ */
+// TODO: after create called make object unusable
+public class IndexCreator {
+  private Directory directory;
+  private long maxSize;
+  private int threads;
+  private ExecutorService threadPool;
+  private IndexWriter indexWriter;
+  private boolean isFinished;
+  private List<Future<Object>> futures = new ArrayList<Future<Object>>();
+  private Analyzer analyzer;
+  private BlockingQueue<Add> queue;
+
+  public IndexCreator(Directory directory, long maxSize, int threads, Analyzer analyzer, ExecutorService threadPool) {
+    this.directory = directory;
+    this.maxSize = maxSize;
+    this.threads = threads;
+    this.analyzer = analyzer;
+    this.threadPool = threadPool;
+    isFinished = false;
+  }
+
+  public static class Add {
+    private Document document;
+
+    // private RAMDirectory ramDirectory;
+
+    // public Add(RAMDirectory ramDirectory) {
+    // this.ramDirectory = ramDirectory;
+    // }
+
+    public Add(Document document) {
+      this.document = document;
+    }
+
+    // public RAMDirectory getRamDirectory() {
+    // return ramDirectory;
+    // }
+
+    public Document getDocument() {
+      return document;
+    }
+  }
+
+  public void start(BlockingQueue<Add> queue) throws Exception {
+    this.queue = queue;
+    indexWriter = new IndexWriter(directory, false, analyzer, true);
+    indexWriter.setUseCompoundFile(true);
+    indexWriter.setRAMBufferSizeMB(500.0); // set impossibly high to never be
+                                            // triggered, setting both to
+                                            // DISABLE_AUTO_FLUSH causes an
+                                            // exception
+    indexWriter.setMaxBufferedDocs(IndexWriter.DISABLE_AUTO_FLUSH);
+    //List<Callable<Object>> callables = new ArrayList<Callable<Object>>(threads);
+    for (int x = 0; x < threads; x++) {
+      //callables.add(new Task(queue));
+      futures.add(threadPool.submit(new Task(queue)));
+    }
+    //futures = threadPool.invokeAll(callables);
+    
+  }
+
+  public void create() throws Exception {
+    while (queue.peek() != null) { 
+      Thread.sleep(5);
+    }
+    setFinished(true);
+    try {
+      for (Future<Object> future : futures) {
+        if (future.isDone()) {
+          try {
+            future.get();
+          } catch (ExecutionException executionException) {
+            Throwable cause = executionException.getCause();
+            if (cause instanceof Exception) {
+              throw (Exception) cause;
+            } else {
+              throw new Exception(cause);
+            }
+          }
+        }
+        Thread.sleep(10);
+      }
+      indexWriter.optimize(); // should not be necessary
+    } finally {
+      indexWriter.close();
+    }
+  }
+
+  public void setFinished(boolean isFinished) {
+    this.isFinished = isFinished;
+  }
+
+  private boolean isFinished() {
+    if (isFinished)
+      return true;
+    if (indexWriter.ramSizeInBytes() >= maxSize) {
+      isFinished = true;
+    }
+    return isFinished;
+  }
+
+  public class Task implements Callable {
+    private BlockingQueue<Add> queue;
+
+    public Task(BlockingQueue<Add> queue) {
+      this.queue = queue;
+    }
+
+    public Object call() throws Exception {
+      while (!isFinished()) {
+        Add add = queue.poll(3, TimeUnit.MILLISECONDS);
+        if (add != null) {
+          Document document = add.getDocument();
+          indexWriter.addDocument(document, analyzer);
+        }
+      }
+      return null;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java	(revision 0)
@@ -0,0 +1,120 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.Lock;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.NativeFSLockFactory;
+
+public class FSDirectoryMap extends DirectoryMap {
+  public static final String WRITE_LOCK_NAME = "write.lock";
+  private Map<String,FSDirectory> map = new HashMap<String,FSDirectory>();
+  private File fileDirectory;
+  private LogDirectory rootDirectory;
+  private LogDirectory logDirectory;
+  private static MessageDigest DIGESTER;
+  private static final char[] HEX_DIGITS =
+  {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
+  private NativeFSLockFactory lockFactory;
+  
+  static {
+    try {
+      DIGESTER = MessageDigest.getInstance("MD5");
+    } catch (NoSuchAlgorithmException e) {
+        throw new RuntimeException(e.toString(), e);
+    }
+  }
+  
+  public FSDirectoryMap(File fileDirectory, String logDirectoryName) throws IOException {
+    this.fileDirectory = fileDirectory;
+    Util.mkdir(fileDirectory);
+    
+    lockFactory = new NativeFSLockFactory(fileDirectory);
+    //lockFactory.clearLock(WRITE_LOCK_NAME);
+    lockFactory.setLockPrefix(getLockID());
+    Lock lock = lockFactory.makeLock(WRITE_LOCK_NAME);
+    boolean obtained = lock.obtain(1000*5);
+    System.out.println("lock obtained: "+obtained);
+    if (!obtained) throw new LockObtainFailedException("Index locked for write: " + lock);
+    
+    rootDirectory = new FSLogDirectory(fileDirectory);
+    for (File file : fileDirectory.listFiles()) {
+      if (file.isDirectory() && !file.getName().equals(logDirectoryName)) {
+        FSDirectory dir = FSDirectory.getDirectory(file);
+        map.put(file.getName(), dir);
+      }
+    }
+    logDirectory = new FSLogDirectory(new File(fileDirectory, logDirectoryName));
+  }
+  
+  public String getLockID() {
+    String dirName;                               // name to be hashed
+    try {
+      dirName = fileDirectory.getCanonicalPath();
+    } catch (IOException e) {
+      throw new RuntimeException(e.toString(), e);
+    }
+
+    byte digest[];
+    synchronized (DIGESTER) {
+      digest = DIGESTER.digest(dirName.getBytes());
+    }
+    StringBuilder buf = new StringBuilder();
+    buf.append("ocean-");
+    for (int i = 0; i < digest.length; i++) {
+      int b = digest[i];
+      buf.append(HEX_DIGITS[(b >> 4) & 0xf]);
+      buf.append(HEX_DIGITS[b & 0xf]);
+    }
+    return buf.toString();
+  }
+  
+  public LogDirectory getLogDirectory() {
+    return logDirectory;
+  }
+  
+  public LogDirectory getDirectory() {
+    return rootDirectory;
+  }
+  
+  public Directory create(String name) throws IOException {
+    Directory directory = FSDirectory.getDirectory(new File(fileDirectory, name));
+    directory.setLockFactory(new NativeFSLockFactory(fileDirectory));
+    IndexWriter.unlock(directory);
+    return directory;
+  }
+
+  public void delete(String name) throws IOException {
+    FSDirectory directory = get(name);
+    directory.close();
+    File file = directory.getFile();
+    FileUtils.deleteDirectory(file);
+    map.remove(name);
+  }
+
+  public FSDirectory get(String name) throws IOException {
+    return map.get(name);
+  }
+
+  public String[] list() throws IOException {
+    int x = 0;
+    String[] array = new String[map.size()];
+    for (String string : map.keySet()) {
+      array[x] = string;
+      x++;
+    }
+    return array;
+  }
+  
+  
+}
Index: ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java	(revision 0)
@@ -0,0 +1,54 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.RAMDirectory;
+
+public class RamDirectorySerializer {
+	static final int BUFFER_SIZE = 16384;
+	
+  public RamDirectorySerializer() {
+  }
+  
+  public static RAMDirectory deserialize(IndexInput input) throws IOException {
+  	int numFiles = input.readVInt();
+  	RAMDirectory ramDirectory = new RAMDirectory();
+  	byte[] buffer = new byte[BUFFER_SIZE];
+  	for (int x=0; x < numFiles; x++) {
+  		String file = input.readString();
+  		int length = input.readVInt();
+  		IndexOutput indexOutput = ramDirectory.createOutput(file);
+  		int readCount = 0;
+      while (readCount < length) {
+        int toRead = readCount + BUFFER_SIZE > length ? (int)(length - readCount) : BUFFER_SIZE;
+        input.readBytes(buffer, 0, toRead);//readFully(buffer, 0, toRead);
+        indexOutput.writeBytes(buffer, toRead);
+        readCount += toRead;
+      }
+      indexOutput.close();
+  	}
+  	return ramDirectory;
+  }
+  
+  public static void serialize(RAMDirectory ramDirectory, IndexOutput output) throws IOException {
+  	String[] files = ramDirectory.list();
+  	output.writeVInt(files.length);
+  	byte[] buffer = new byte[BUFFER_SIZE];
+  	for (String file : files) {
+  		int length = (int)ramDirectory.fileLength(file);
+  		output.writeString(file);
+  		output.writeVInt(length);
+  		IndexInput indexInput = ramDirectory.openInput(file);
+  		int readCount = 0;
+      while (readCount < length) {
+        int toRead = readCount + BUFFER_SIZE > length ? (int)(length - readCount) : BUFFER_SIZE;
+        indexInput.readBytes(buffer, 0, toRead);
+        output.writeBytes(buffer, toRead);//write(buffer, 0, toRead);
+        readCount += toRead;
+      }
+      indexInput.close();
+  	}
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Batch.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Batch.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Batch.java	(revision 0)
@@ -0,0 +1,215 @@
+package org.apache.lucene.ocean;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.io.Serializable;
+import java.util.Date;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Stays in memory
+ * 
+ */
+public abstract class Batch {
+	protected Documents documents;
+	protected RAMDirectory ramDirectory;
+	protected Analyzer analyzer;
+	protected long schemaVersion;
+	protected Deletes deletes;
+	protected Date timestamp;
+	protected boolean isClosed = false;
+
+	public Batch() {
+		timestamp = new Date();
+	}
+	
+	public boolean hasRAMDirectory() {
+		return ramDirectory != null;
+	}
+	
+	public RAMDirectory getRamDirectory() {
+		return ramDirectory;
+	}
+
+	public long getSchemaVersion() {
+		return schemaVersion;
+	}
+	
+	public static class MasterBatch extends Batch {
+	  private TransactionSystem transactionSystem;
+	  
+	  public MasterBatch(TransactionSystem transactionSystem) {
+	  	this.transactionSystem = transactionSystem;
+	  }
+	  
+	  public void setRAMDirectory(RAMDirectory ramDirectory) {
+	    documents = null;
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    this.ramDirectory = ramDirectory;
+	  }
+	  
+	  public void setAnalyzer(Analyzer analyzer) {
+	    this.analyzer = analyzer;
+	  }
+	  
+	  public void setDeletes(Deletes deletes) {
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    this.deletes = deletes;
+	  }
+	  
+	  public void addDocument(Document document) {
+	    if (this.documents == null)
+	      this.documents = new Documents();
+	    this.documents.add(document);
+	  }
+	  
+	  public void addDocuments(Documents documents) {
+	    if (isClosed) throw new RuntimeException("batch closed");
+	    if (this.documents == null)
+	      this.documents = new Documents();
+	    this.documents.addAll(documents);
+	  }
+	  
+	  /**
+	  public Serializable getSerializableRamDirectory(Long id) {
+	  	assert ramDirectory != null;
+	  	return new SerializableRamDirectory(id, ramDirectory);
+	  }
+	  
+	  public Serializable getSerializableBatchDeletes(Long id) {
+	  	return new SerializableBatchDeletes(id, deletes);
+	  }
+	  
+	  public Serializable getSerializableBatchDocuments(Long id) {
+			return new SerializableBatchDocuments(id, documents); 
+		}
+	  **/
+	  public void commit() throws Exception {
+	    transactionSystem.commitBatch(this); 
+		}
+	}
+	
+	public static class SlaveBatch extends Batch {
+	  private Long id;
+	  
+	  public SlaveBatch(Long id, Documents documents, Deletes deletes) {
+	  	this.id = id;
+	  	this.documents = documents;
+	  	this.deletes = deletes;
+	  }
+	  
+	  public SlaveBatch(Long id, RAMDirectory ramDirectory, Deletes deletes) {
+	  	this.id = id;
+	  	this.ramDirectory = ramDirectory;
+	  	this.deletes = deletes;
+	  }
+	  
+	  public Long getId() {
+	  	return id;
+	  }
+	}
+	/**
+	public static class SerializableRamDirectory extends SerializableBatch implements Externalizable {
+		private static final long serialVersionUID = 1l;
+		private RAMDirectory ramDirectory;
+		
+		public SerializableRamDirectory(Long id, RAMDirectory ramDirectory) {
+			super(id);
+			this.ramDirectory = ramDirectory;
+		}
+    
+		// TODO: use native version not externalizable
+		public void readExternal(ObjectInput objectInput) throws IOException, ClassNotFoundException {
+			long objectVersion = objectInput.readLong();
+			ramDirectory = RamDirectorySerializer.deserialize(objectInput);
+		}
+		
+		public void writeExternal(ObjectOutput objectOutput) throws IOException {
+			objectOutput.writeLong(serialVersionUID);
+			RamDirectorySerializer.serialize(ramDirectory, objectOutput);
+		}
+		
+		public RAMDirectory getRamDirectory() {
+			return ramDirectory;
+		}
+	}
+	
+	public static class SerializableBatchDocuments extends SerializableBatch {
+		private static final long serialVersionUID = 1l;
+		private Documents documents;
+		
+		public SerializableBatchDocuments(Long id, Documents documents) {
+			super(id);
+			this.documents = documents;
+		}
+		
+		public Documents getDocuments() {
+			return documents;
+		}
+	}
+	
+	public static class SerializableBatchDeletes extends SerializableBatch {
+		private static final long serialVersionUID = 1l;
+		private Deletes deletes;
+		
+		public SerializableBatchDeletes(Long id, Deletes deletes) {
+			super(id);
+		}
+		
+		public Deletes getDeletes() {
+			return deletes;
+		}
+	}
+	
+	public abstract static class SerializableBatch implements Serializable {
+		private Long id;
+		
+		public SerializableBatch(Long id) {
+			this.id = id;
+		}
+		public Long getId() {
+			return id;
+		}
+	}
+	**/
+	public Analyzer getAnalyzer() {
+		return analyzer;
+	}
+  
+	public boolean hasDocuments() {
+		if (documents == null || documents.size() == 0)
+			return false;
+		else
+			return true;
+	}
+
+	public boolean hasDeletes() {
+		if (deletes == null || !deletes.hasDeletes()) {
+			return false;
+		} else {
+			return true;
+		}
+	}
+
+	// disallow any more additions
+	public void close() {
+		isClosed = true;
+	}
+
+	public Documents getDocuments() {
+		return documents;
+	}
+
+	public Deletes getDeletes() {
+		return deletes;
+	}
+
+	public Date getTimestamp() {
+		return timestamp;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/CommitResult.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/CommitResult.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/CommitResult.java	(revision 0)
@@ -0,0 +1,55 @@
+package org.apache.lucene.ocean;
+
+import java.util.List;
+
+public class CommitResult {
+  private Long snapshotId;
+  private List<DeletesResult> deletesResults;
+  private Integer numAdded;
+  private IndexID addedIndexId;
+  private int numDeleted = 0;
+
+  public CommitResult(Long snapshotId, List<DeletesResult> deletesResults, Integer numAdded, IndexID addedIndexId) {
+    this.snapshotId = snapshotId;
+    this.deletesResults = deletesResults;
+    this.numAdded = numAdded;
+    this.addedIndexId = addedIndexId;
+    if (deletesResults != null) {
+      for (DeletesResult deletesResult : deletesResults) {
+        numDeleted += deletesResult.getNumDeleted();
+      }
+    }
+  }
+
+  public int getNumDeleted() {
+    return numDeleted;
+  }
+  
+  public int getNumDocChanges() {
+    int numChanged = 0;
+    if (numAdded != null)
+      numChanged += numAdded;
+    if (deletesResults != null) {
+      for (DeletesResult deletesResult : deletesResults) {
+        numChanged += deletesResult.getNumDeleted();
+      }
+    }
+    return numChanged;
+  }
+
+  public Long getSnapshotId() {
+    return snapshotId;
+  }
+
+  public List<DeletesResult> getDeletesResults() {
+    return deletesResults;
+  }
+
+  public Integer getNumAdded() {
+    return numAdded;
+  }
+
+  public IndexID getAddedIndexId() {
+    return addedIndexId;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Deletes.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Deletes.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Deletes.java	(revision 0)
@@ -0,0 +1,121 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+/**
+ * Encapsulates deletes for a transaction
+ * 
+ */
+public class Deletes implements Serializable {
+  public static final int DOC_IDS = 1;
+  public static final int TERMS = 2;
+  private static final long serialVersionUID = 1l;
+  private List<Term> terms = new ArrayList<Term>();
+  private List<DeleteByQuery> deleteByQueries = new ArrayList<DeleteByQuery>();
+  private long[] docIds;
+
+  /**
+   * public Deletes(IndexInput input) throws IOException { int docIdsLength =
+   * input.readVInt(); if (docIdsLength > 0) { docIds = new long[docIdsLength];
+   * for (int x=0; x < docIdsLength; x++) { docIds[x] = input.readVLong(); } }
+   * int termsLength = input.readVInt(); for (int x=0; x < termsLength; x++) {
+   * String field = input.readString(); String text = input.readString();
+   * terms.add(new Term(field, text)); } int dqLength = input.readVInt(); for
+   * (int x=0; x < dqLength; x++) { int blen = input.readVInt(); byte[] bytes =
+   * new byte[blen]; input.readBytes(bytes, 0, blen);
+   * deleteByQueries.add((DeleteByQuery)SerializationUtils.deserialize(bytes)); } }
+   */
+  public Deletes() {
+  }
+
+  /**
+   * public void writeTo(IndexOutput output) throws IOException { if (docIds ==
+   * null) { output.writeVInt(0); } else { output.writeVInt(docIds.length); for
+   * (int x = 0; x < docIds.length; x++) { output.writeVLong(docIds[x]); } }
+   * output.writeVInt(terms.size()); for (int x = 0; x < terms.size(); x++) {
+   * output.writeString(terms.get(x).field());
+   * output.writeString(terms.get(x).text()); }
+   * output.writeVInt(deleteByQueries.size()); for (int x = 0; x <
+   * deleteByQueries.size(); x++) { byte[] bytes =
+   * SerializationUtils.serialize(deleteByQueries.get(x));
+   * output.writeVInt(bytes.length); output.writeBytes(bytes, bytes.length); } }
+   */
+  public void merge(Deletes deletes) {
+    terms.addAll(deletes.getTerms());
+    deleteByQueries.addAll(deletes.getDeleteByQueries());
+  }
+
+  public boolean hasDocIds() {
+    return docIds != null;
+  }
+
+  void setDocIds(long[] docIds) {
+    assert docIds == null;
+    this.docIds = docIds;
+  }
+
+  public void addTerm(Term term) {
+    terms.add(term);
+  }
+
+  public void addQuery(Query query) {
+    deleteByQueries.add(new DeleteByQuery(query));
+  }
+
+  public void addDeleteByQuery(DeleteByQuery deleteByQuery) {
+    deleteByQueries.add(deleteByQuery);
+  }
+
+  public long[] getDocIds() {
+    return docIds;
+  }
+
+  public boolean hasDeletes() {
+    if (terms.size() > 0 || deleteByQueries.size() > 0 || docIds != null && docIds.length > 0) {
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  public boolean hasDeleteByQueries() {
+    if (deleteByQueries != null && deleteByQueries.size() > 0)
+      return true;
+    return false;
+  }
+
+  public List<DeleteByQuery> getDeleteByQueries() {
+    return deleteByQueries;
+  }
+
+  public static class DeleteByQuery implements Serializable {
+    private Query query;
+
+    public DeleteByQuery(Query query) {
+      this.query = query;
+    }
+
+    public Query getQuery() {
+      return query;
+    }
+  }
+
+  public List<Term> getTerms() {
+    return terms;
+  }
+
+  public boolean hasTerms() {
+    if (terms != null && terms.size() > 0)
+      return true;
+    return false;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/DeletesResult.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DeletesResult.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DeletesResult.java	(revision 0)
@@ -0,0 +1,51 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+public class DeletesResult {
+  private IndexID indexId;
+  private List<Result> results = new ArrayList<Result>();
+  private int numDeleted = 0;
+  private List<Long> docIds = new LinkedList<Long>(); 
+
+  public DeletesResult(IndexID indexId) {
+    this.indexId = indexId;
+  }
+
+  public List<Long> getDocIds() {
+    return docIds;
+  }
+
+  public IndexID getIndexId() {
+    return indexId;
+  }
+
+  public void add(Result result) {
+    numDeleted += result.getNumDeleted();
+    results.add(result);
+  }
+
+  public int getNumDeleted() {
+    return numDeleted;
+  }
+
+  public static class Result {
+    private Object delete;
+    private int numDeleted;
+
+    public Result(Object delete, int numDeleted) {
+      this.delete = delete;
+      this.numDeleted = numDeleted;
+    }
+
+    public Object getDelete() {
+      return delete;
+    }
+
+    public int getNumDeleted() {
+      return numDeleted;
+    }
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/DirectoryIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DirectoryIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DirectoryIndex.java	(revision 0)
@@ -0,0 +1,276 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexDeletionPolicy;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Abstract class used by RamIndex and DiskIndex.  Assumes a org.apache.lucene.store.Directory
+ * based IndexReader implementation.  
+ *
+ */
+public abstract class DirectoryIndex extends Index {
+  final static Logger LOG = LoggerFactory.getLogger(DirectoryIndex.class);
+	protected final SortedList<Long, DirectoryIndexSnapshot> indexSnapshotMap = new SortedList<Long, DirectoryIndexSnapshot>();
+  
+	protected DirectoryIndexDeletionPolicy indexDeletionPolicy = new DirectoryIndexDeletionPolicy();
+	protected IndexReader initialIndexReader;
+	
+	public DirectoryIndex(IndexID id, TransactionSystem system) {
+		super(id, system);
+	}
+  
+	public void close() throws IOException {
+	  for (DirectoryIndexSnapshot snapshot : indexSnapshotMap.values()) {
+	    snapshot.close();
+	  }
+	}
+	
+	public DirectoryIndexSnapshot getLatestIndexSnapshot() {
+		return indexSnapshotMap.lastValue();
+	}
+
+	public DirectoryIndexSnapshot getIndexSnapshot(Long snapshotId) {
+		return indexSnapshotMap.get(snapshotId);
+	}
+  
+	public IndexSnapshot initialize(Long snapshotId, List<SlaveBatch> deleteOnlySlaveBatches, TransactionSystem system) throws Exception, IndexException, IOException {
+		IndexReader indexReader = initialIndexReader;
+		if (deleteOnlySlaveBatches == null || deleteOnlySlaveBatches.size() == 0) {
+			createNewSnapshot(snapshotId, indexReader);
+		} else {
+			for (SlaveBatch slaveBatch : deleteOnlySlaveBatches) {
+				if (slaveBatch.hasDeletes()) {
+					applyDeletes(true, slaveBatch.getDeletes(), null, indexReader);
+				}
+				indexReader = indexReader.reopen();
+				createNewSnapshot(slaveBatch.getId(), indexReader);
+			}
+		}
+		assert snapshotId.equals(indexSnapshotMap.lastKey());
+		return indexSnapshotMap.get(indexSnapshotMap.lastKey());
+	}
+
+	protected void onCommit() throws Exception {
+
+	}
+  
+	private List<DirectoryIndexSnapshot> getSnapshotsByGeneration(long generation) throws IOException {
+	  List<DirectoryIndexSnapshot> snapshots = new ArrayList<DirectoryIndexSnapshot>();
+	  for (DirectoryIndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot.getIndexReader().getIndexCommit().getGeneration() == generation) {
+        snapshots.add(indexSnapshot);
+      }
+    }
+	  return snapshots;
+	}
+	
+	/**
+	 * Finds reader by version by iterating over snapshots and comparing versions
+	 * @param version
+	 * @return
+	 */
+	private DirectoryIndexSnapshot getSnapshotByReaderVersion(long version) {
+	  for (DirectoryIndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+	    if (indexSnapshot.getIndexReaderVersion() == version) {
+	      return indexSnapshot;
+	    }
+	  }
+	  return null;
+	}
+	
+	public class DirectoryIndexDeletionPolicy implements IndexDeletionPolicy {
+		private IndexCommit lastCommit;
+		SortedList<Long,IndexCommit> commitPoints = new SortedList<Long,IndexCommit>(); // key is generation
+
+		public void onInit(List commits) throws IOException {
+			onCommit(commits);
+		}
+
+		public IndexCommit getLastIndexCommitPoint() {
+			return lastCommit;
+		}
+
+		public void onCommit(List commits) throws IOException {
+			try {
+			  commitPoints.clear();
+			  for (int x = 0; x < commits.size(); x++) {
+          IndexCommit indexCommit = (IndexCommit) commits.get(x);
+          commitPoints.put(indexCommit.getGeneration(), indexCommit);
+			  }
+				DirectoryIndex.this.onCommit();
+				lastCommit = (IndexCommit) commits.get(commits.size() - 1);
+				for (int x = 0; x < commits.size() - 1; x++) {
+					IndexCommit indexCommitPoint = (IndexCommit) commits.get(x);
+					// Multiple snapshots may have the same generation
+					// so deleting a commitpoint may affect multiple snapshots
+					long generation = indexCommitPoint.getGeneration();
+					List<DirectoryIndexSnapshot> snapshots = getSnapshotsByGeneration(generation);
+					// if there are no snapshots it needs to be deleted, nothing
+					// is using it anymore
+					if (snapshots.size() == 0) {
+					  indexCommitPoint.delete();	
+					  commitPoints.remove(indexCommitPoint.getGeneration());
+					}
+					for (DirectoryIndexSnapshot indexSnapshot : snapshots) {
+					  if (!indexSnapshot.hasRef()) {
+					    // not referenced in Snapshots anymore
+	            indexCommitPoint.delete();
+	            indexSnapshot.delete();
+	          }
+					}
+				}
+			} catch (Exception exception) {
+				throw Util.asIOException(exception);
+			}
+		}
+	}
+
+	public abstract class DirectoryIndexSnapshot extends IndexSnapshot {
+		protected IndexReader indexReader;
+		private int maxDoc;
+
+		public DirectoryIndexSnapshot(Long snapshotId, IndexReader indexReader) throws IOException {
+			super(snapshotId);
+			this.indexReader = indexReader;
+			maxDoc = indexReader.maxDoc();
+			getMinDocumentId();
+			getMaxDocumentId();
+		}
+		
+		void delete() throws Exception {
+		  LOG.info(DirectoryIndex.this.getId()+" deleting snapshotid: "+snapshotId);
+		  long generation = getGeneration();
+		  List<DirectoryIndexSnapshot> snapshotsGreaterWithGeneration = new ArrayList<DirectoryIndexSnapshot>();
+		  for (DirectoryIndexSnapshot snapshot : indexSnapshotMap.values()) {
+		    if (snapshot.getGeneration() == generation && snapshot.snapshotId.longValue() > snapshotId) {
+		      snapshotsGreaterWithGeneration.add(snapshot);
+		    }
+		  }
+		  indexReader.close();
+		  if (snapshotsGreaterWithGeneration.size() == 0) {
+		    IndexCommit indexCommit = indexDeletionPolicy.commitPoints.get(generation);
+		    if (indexCommit != null) {
+		      LOG.info(DirectoryIndex.this.getId()+" deleting snapshotid: "+snapshotId+" indexCommit: "+indexCommit.getGeneration());
+		      indexCommit.delete();
+		    }
+		  }
+			indexSnapshotMap.remove(snapshotId);
+		}
+    
+		public long getGeneration() throws IOException {
+		  return indexReader.getIndexCommit().getGeneration();
+		}
+		
+		public void close() throws IOException {
+		  indexReader.close();
+		  indexSnapshotMap.remove(snapshotId);
+		}
+		
+		public int deletedDoc() {
+		  return indexReader.maxDoc() - indexReader.numDocs();
+		}
+		
+		public int maxDoc() {
+      return indexReader.maxDoc();
+    }
+		
+		public IndexReader getIndexReader() {
+			return indexReader;
+		}
+
+		public long getIndexReaderVersion() {
+			return indexReader.getVersion();
+		}
+
+		public boolean hasRef() throws Exception {
+			return getSystem().getSnapshots().contains(snapshotId);
+		}
+	}
+
+	public long getSize() throws IOException {
+		Directory directory = getDirectory();
+		return Util.getSize(directory);
+	}
+  
+	/**
+	 * Creates a new snapshot only without performing any changes to the index
+	 * @param transaction
+	 * @throws IndexException
+	 * @throws InterruptedException
+	 * @throws IOException
+	 */
+	public void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException {
+	  IndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+    assert latestIndexSnapshot != null;
+    assert latestIndexSnapshot.getSnapshotId().equals(transaction.getPreviousId());
+    transaction.ready(this); 
+    if (transaction.go()) {
+      Long snapshotId = transaction.getId();
+      IndexReader previousIndexReader = latestIndexSnapshot.getIndexReader();
+      createNewSnapshot(snapshotId, previousIndexReader);
+      removeOldSnapshots(indexSnapshotMap);
+    }
+	}
+	
+	public boolean rollback(Long snapshotId) throws Exception {
+    LOG.info("rollback "+snapshotId);
+    DirectoryIndexSnapshot indexSnapshot = indexSnapshotMap.get(snapshotId);
+    if (indexSnapshot != null) {
+      indexSnapshot.delete();
+      return true;
+    }
+    return false;
+  }
+	
+	public DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, IndexException, InterruptedException, IOException {
+		IndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+		assert latestIndexSnapshot != null;
+		assert latestIndexSnapshot.getSnapshotId().equals(transaction.getPreviousId());
+		IndexReader previousIndexReader = latestIndexSnapshot.getIndexReader();
+		IndexReader newIndexReader = previousIndexReader.reopen(true);
+		try {
+			DeletesResult deletesResult = applyDeletes(true, deletes, null, newIndexReader);
+			transaction.ready(this); 
+			if (transaction.go()) {
+				if (deletesResult.getNumDeleted() > 0) {
+					newIndexReader.flush();
+				}
+				if (deletesResult.getNumDeleted() > 0) {
+				  // if flush was called check to make sure there is a new generation for the indexreader
+				  LOG.info("previous reader gen: "+previousIndexReader.getIndexCommit().getGeneration()+" newIndexReader gen: "+newIndexReader.getIndexCommit().getGeneration());
+				}
+				Long snapshotId = transaction.getId();
+				createNewSnapshot(snapshotId, newIndexReader);
+				removeOldSnapshots(indexSnapshotMap);
+				return deletesResult;
+			} else {
+				rollback(transaction.getId());
+				return null;
+			}
+		} catch (Throwable throwable) {
+			LOG.error("", throwable);
+			transaction.failed(this, throwable);
+			//rollback(transaction.getId());
+			return null;
+		}
+	}
+
+	protected abstract DirectoryIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException;
+
+	protected void registerSnapshot(DirectoryIndexSnapshot indexSnapshot) throws IOException {
+		indexSnapshotMap.put(indexSnapshot.getSnapshotId(), indexSnapshot);
+	}
+
+	public abstract Directory getDirectory();
+}
Index: ocean/src/org/apache/lucene/ocean/DirectoryMap.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DirectoryMap.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DirectoryMap.java	(revision 0)
@@ -0,0 +1,19 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.Directory;
+
+public abstract class DirectoryMap {
+  public abstract Directory create(String name) throws IOException;
+  
+  public abstract void delete(String name) throws IOException;
+  
+  public abstract Directory get(String name) throws IOException;
+  
+  public abstract String[] list() throws IOException;
+  
+  public abstract LogDirectory getDirectory();
+  
+  public abstract LogDirectory getLogDirectory();
+}
Index: ocean/src/org/apache/lucene/ocean/DiskIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/DiskIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/DiskIndex.java	(revision 0)
@@ -0,0 +1,182 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * On disk index.  Only deletes are allowed to occur to the index. 
+ * There is a unique IndexReader per snapshot.
+ *
+ */
+// TODO: may be issue with reader having same version but multiple snapshots when transaction has no deletes for this index
+public class DiskIndex extends DirectoryIndex {
+	public static Logger log = Logger.getLogger(DiskIndex.class.getName());
+	private final Directory directory;
+	//private IndexInfo indexInfo;
+
+	// load existing index
+	public DiskIndex(IndexID id, Directory directory, Long snapshotId, Long segmentGeneration, TransactionSystem system) throws Exception, IndexException, IOException {
+		super(id, system);
+		assert segmentGeneration != null;
+		this.directory = directory;
+		if (directory.fileExists("writing.index")) {
+			throw new IndexNeverCompletedCopyException("index never completed copying");
+		}
+		if (IndexWriter.isLocked(directory)) {
+		  LOG.info("directory: "+directory+" locked.  being unlocked");
+		  IndexWriter.unlock(directory);
+		}
+		initialIndexReader = IndexReader.open(directory, indexDeletionPolicy);
+		long readerGeneration = initialIndexReader.getIndexCommit().getGeneration();
+		assert segmentGeneration.longValue() == readerGeneration;
+		//indexInfo = loadIndexInfo();
+		createNewSnapshot(snapshotId, initialIndexReader);
+	}
+
+	// merge indexes creating new index
+	public DiskIndex(IndexID id, Directory directory, List<? extends IndexSnapshot> indexSnapshots, TransactionSystem system) throws Exception, IOException {
+		super(id, system);
+		this.directory = directory;
+		Util.touchFile("writing.index", directory);
+		IndexReader[] indexReaders = getIndexReaders(indexSnapshots);
+		// create in ram first, is faster than copying to disk due to less hard disk head movement
+		RAMDirectory ramDirectory = new RAMDirectory();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, system.getDefaultAnalyzer(), true);
+		indexWriter.setMergeScheduler(new SerialMergeScheduler());
+		indexWriter.setUseCompoundFile(true);
+		indexWriter.addIndexes(indexReaders);
+		indexWriter.close();
+		Directory.copy(ramDirectory, directory, true);
+		
+		//indexInfo = new IndexInfo();
+		//indexInfo.setMaxDocumentID(maxDocumentId);
+		//indexInfo.setMaxSnapshotID(maxSnapshotId);
+		//saveIndexInfo(indexInfo);
+		directory.deleteFile("writing.index");
+		initialIndexReader = IndexReader.open(directory, indexDeletionPolicy);
+	}
+  
+	public Directory getDirectory() {
+		return directory;
+	}
+  /**
+	private IndexInfo loadIndexInfo() throws Exception {
+		String xml = Util.getString("indexinfo.xml", directory);
+		Element element = XMLUtil.parseElement(xml);
+		return new IndexInfo(element);
+	}
+
+	private void saveIndexInfo(IndexInfo indexInfo) throws Exception {
+		Element element = indexInfo.toElement();
+		String xml = XMLUtil.outputElement(element);
+		Util.save(xml, "indexinfo.xml", directory);
+	}
+
+	public static class IndexInfo implements CElement {
+		private Long maxSnapshotId;
+		private Long maxDocumentId;
+
+		public IndexInfo() {
+		}
+
+		public Long getMaxSnapshotID() {
+			return maxSnapshotId;
+		}
+
+		public void setMaxSnapshotID(Long maxSnapshotId) {
+			this.maxSnapshotId = maxSnapshotId;
+		}
+
+		public Long getMaxDocumentID() {
+			return maxDocumentId;
+		}
+
+		public void setMaxDocumentID(Long maxDocumentId) {
+			this.maxDocumentId = maxDocumentId;
+		}
+
+		public IndexInfo(Element element) throws Exception {
+		  maxSnapshotId = XMLUtil.getAttributeLong("maxSnapshotId", element);
+		  maxDocumentId = XMLUtil.getAttributeLong("maxDocumentId", element);
+		}
+
+		public Element toElement() throws Exception {
+		  Element element = new Element("indexinfo");
+		  XMLUtil.setAttribute("maxSnapshotId", maxSnapshotId, element);
+		  XMLUtil.setAttribute("maxDocumentId", maxDocumentId, element);
+		  return element;
+		}
+	}
+  **/
+	public boolean hasTooManyDeletedDocs(double percent) {
+	  assert percent <= 1.0;
+		DirectoryIndexSnapshot indexSnapshot = getLatestIndexSnapshot();
+		if (indexSnapshot != null) {
+			IndexReader indexReader = indexSnapshot.getIndexReader();
+			int maxDoc = indexReader.maxDoc();
+			int deletedDocs = maxDoc - indexReader.numDocs();
+			if (deletedDocs > (maxDoc * percent))
+				return true;
+		}
+		return false;
+	}
+
+	public class DiskIndexSnapshot extends DirectoryIndexSnapshot {
+		private Collection<String> indexReaderFileNames;
+
+		public DiskIndexSnapshot(Long snapshotID, IndexReader indexReader, Collection<String> indexReaderFileNames) throws IOException {
+			super(snapshotID, indexReader);
+			this.indexReaderFileNames = indexReaderFileNames;
+		}
+
+		//public Long getMaxSnapshotId() throws IOException {
+		//	return indexInfo.getMaxSnapshotID();
+		//}
+
+		//public Long getMaxDocumentId() throws IOException {
+		//	return indexInfo.getMaxDocumentID();
+		//}
+    
+		protected void delete() throws Exception {
+			super.delete();
+			deleteFiles();
+		}
+		
+		public boolean hasRef() throws Exception {
+			return getSystem().getSnapshots().contains(snapshotId);
+		}
+
+		public void deleteFiles() throws IOException {
+		}
+
+		public List<String> getFiles() throws Exception {
+			List<String> files = new ArrayList<String>();
+			for (String fileName : indexReaderFileNames) {
+				files.add(fileName);
+			}
+			return files;
+		}
+	}
+  
+	protected void onCommit() throws Exception {
+	}
+
+	protected DiskIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException {
+	  IndexCommit indexCommit = newIndexReader.getIndexCommit();
+		Collection<String> fileNames = indexCommit.getFileNames();
+		DiskIndexSnapshot diskIndexSnapshot = new DiskIndexSnapshot(snapshotId, newIndexReader, fileNames);
+		registerSnapshot(diskIndexSnapshot);
+		return diskIndexSnapshot;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/Documents.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Documents.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Documents.java	(revision 0)
@@ -0,0 +1,17 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.lucene.document.Document;
+
+public class Documents extends ArrayList<Document> {
+  
+  public Documents() {}
+  
+  public Documents(Collection<Document> documents) {
+    for (Document document : documents) {
+      add(document);
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/FSDirectoryMap.java	(revision 0)
@@ -0,0 +1,120 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.Lock;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.NativeFSLockFactory;
+
+public class FSDirectoryMap extends DirectoryMap {
+  public static final String WRITE_LOCK_NAME = "write.lock";
+  private Map<String,FSDirectory> map = new HashMap<String,FSDirectory>();
+  private File fileDirectory;
+  private LogDirectory rootDirectory;
+  private LogDirectory logDirectory;
+  private static MessageDigest DIGESTER;
+  private static final char[] HEX_DIGITS =
+  {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
+  private NativeFSLockFactory lockFactory;
+  
+  static {
+    try {
+      DIGESTER = MessageDigest.getInstance("MD5");
+    } catch (NoSuchAlgorithmException e) {
+        throw new RuntimeException(e.toString(), e);
+    }
+  }
+  
+  public FSDirectoryMap(File fileDirectory, String logDirectoryName) throws IOException {
+    this.fileDirectory = fileDirectory;
+    Util.mkdir(fileDirectory);
+    
+    lockFactory = new NativeFSLockFactory(fileDirectory);
+    //lockFactory.clearLock(WRITE_LOCK_NAME);
+    lockFactory.setLockPrefix(getLockID());
+    Lock lock = lockFactory.makeLock(WRITE_LOCK_NAME);
+    boolean obtained = lock.obtain(1000*5);
+    System.out.println("lock obtained: "+obtained);
+    if (!obtained) throw new LockObtainFailedException("Index locked for write: " + lock);
+    
+    rootDirectory = new FSLogDirectory(fileDirectory);
+    for (File file : fileDirectory.listFiles()) {
+      if (file.isDirectory() && !file.getName().equals(logDirectoryName)) {
+        FSDirectory dir = FSDirectory.getDirectory(file);
+        map.put(file.getName(), dir);
+      }
+    }
+    logDirectory = new FSLogDirectory(new File(fileDirectory, logDirectoryName));
+  }
+  
+  public String getLockID() {
+    String dirName;                               // name to be hashed
+    try {
+      dirName = fileDirectory.getCanonicalPath();
+    } catch (IOException e) {
+      throw new RuntimeException(e.toString(), e);
+    }
+
+    byte digest[];
+    synchronized (DIGESTER) {
+      digest = DIGESTER.digest(dirName.getBytes());
+    }
+    StringBuilder buf = new StringBuilder();
+    buf.append("ocean-");
+    for (int i = 0; i < digest.length; i++) {
+      int b = digest[i];
+      buf.append(HEX_DIGITS[(b >> 4) & 0xf]);
+      buf.append(HEX_DIGITS[b & 0xf]);
+    }
+    return buf.toString();
+  }
+  
+  public LogDirectory getLogDirectory() {
+    return logDirectory;
+  }
+  
+  public LogDirectory getDirectory() {
+    return rootDirectory;
+  }
+  
+  public Directory create(String name) throws IOException {
+    Directory directory = FSDirectory.getDirectory(new File(fileDirectory, name));
+    directory.setLockFactory(new NativeFSLockFactory(fileDirectory));
+    IndexWriter.unlock(directory);
+    return directory;
+  }
+
+  public void delete(String name) throws IOException {
+    FSDirectory directory = get(name);
+    directory.close();
+    File file = directory.getFile();
+    FileUtils.deleteDirectory(file);
+    map.remove(name);
+  }
+
+  public FSDirectory get(String name) throws IOException {
+    return map.get(name);
+  }
+
+  public String[] list() throws IOException {
+    int x = 0;
+    String[] array = new String[map.size()];
+    for (String string : map.keySet()) {
+      array[x] = string;
+      x++;
+    }
+    return array;
+  }
+  
+  
+}
Index: ocean/src/org/apache/lucene/ocean/FSLogDirectory.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/FSLogDirectory.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/FSLogDirectory.java	(revision 0)
@@ -0,0 +1,108 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.util.Util;
+
+public class FSLogDirectory extends LogDirectory {
+  private File fileDirectory;
+  private ReentrantLock outputLock = new ReentrantLock();
+  private ReentrantLock inputLock = new ReentrantLock();
+
+  public FSLogDirectory(File fileDirectory) {
+    Util.mkdir(fileDirectory);
+    this.fileDirectory = fileDirectory;
+  }
+  
+  public String[] list() throws IOException {
+    List<String> list = new ArrayList<String>();
+    for (File file : fileDirectory.listFiles()) {
+      if (!file.isDirectory()) {
+        list.add(file.getName());
+      }
+    }
+    return (String[]) list.toArray(new String[0]);
+  }
+
+  public boolean fileExists(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.exists();
+  }
+
+  public long fileModified(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.lastModified();
+  }
+
+  public void deleteFile(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    boolean deleted = file.delete();
+    if (!deleted) {
+      throw new IOException("file: "+name+" not deleted");
+    }
+  }
+
+  public long fileLength(String name) throws IOException {
+    File file = new File(fileDirectory, name);
+    return file.length();
+  }
+
+  public RandomAccessFile openInput(String name) throws IOException {
+    inputLock.lock();
+    try {
+      File file = new File(fileDirectory, name);
+      return new RandomAccessFile(file, "r");
+      //RandomAccessFileContent content = new RandomAccessFileContent(file, "r");
+      //IOController contoller = new IOController(1024 * 16, content);
+      //BufferedRandomAccessIO buffered = new BufferedRandomAccessIO(contoller, true);
+      //return buffered;
+      //BufferedRandomAccessIO access = getOutput(name, false);
+      
+      //return access.createIOChild(0, 0, access.getByteOrder(), false);
+    } finally {
+      inputLock.unlock();
+    }
+  }
+
+  public RandomAccessFile getOutput(String name, boolean overwrite) throws IOException {
+    outputLock.lock();
+    try {
+      File file = new File(fileDirectory, name);
+      if (overwrite) {
+        file.delete();
+      }
+      return new RandomAccessFile(file, "rw");
+      /**
+      FSFile fsFile = outputMap.get(name);
+      if (fsFile == null) {
+        File file = new File(fileDirectory, name);
+        //RandomAccessFileContent content = new RandomAccessFileContent(file, "rw");
+        //content.tryLock();
+        fsFile = new FSFile(content);
+        outputMap.put(name, fsFile);
+      }
+      
+      if (overwrite) {
+        //if (fsFile != null) {
+        //  buffered.close();
+        //  buffered = null;
+       // }
+        //boolean deleted = file.delete();
+
+      }
+      IOController contoller = new IOController(1024 * 16, fsFile.content);
+      BufferedRandomAccessIO buffered = new BufferedRandomAccessIO(contoller, false);
+      return buffered;
+      **/
+    } finally {
+      outputLock.unlock();
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Index.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Index.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Index.java	(revision 0)
@@ -0,0 +1,384 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.StaleReaderException;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.index.TermEnum;
+import org.apache.lucene.ocean.Deletes.DeleteByQuery;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.search.ExtendedFieldCache;
+import org.apache.lucene.search.HitCollector;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ExtendedFieldCache.LongParser;
+
+public abstract class Index {
+  private final IndexID id;
+  protected boolean isClosed = false;
+  private final TransactionSystem transactionSystem;
+  private boolean isReadOnly = false;
+  private boolean isDeleteOnly = false;
+
+  public Index(IndexID id, TransactionSystem transactionSystem) {
+    this.id = id;
+    this.transactionSystem = transactionSystem;
+  }
+
+  public void removeOldSnapshots(SortedList<Long,? extends IndexSnapshot> snapshotMap) {
+    Long last = snapshotMap.lastKey();
+    for (Iterator<Long> iterator = snapshotMap.keySet().iterator(); iterator.hasNext();) {
+      Long snapshotId = iterator.next();
+      if (!transactionSystem.snapshots.contains(snapshotId) && (last != null && !last.equals(snapshotId))) {
+        iterator.remove();
+      }
+    }
+  }
+
+  public void close() throws IOException {
+  }
+
+  public TransactionSystem getSystem() {
+    return transactionSystem;
+  }
+
+  public static IndexReader[] getIndexReaders(List<? extends IndexSnapshot> indexSnapshots) {
+    IndexReader[] indexReaders = new IndexReader[indexSnapshots.size()];
+    for (int x = 0; x < indexSnapshots.size(); x++) {
+      indexReaders[x] = indexSnapshots.get(x).getIndexReader();
+    }
+    return indexReaders;
+  }
+
+  public static Long getMaxSnapshotId(List<? extends IndexSnapshot> indexSnapshots) throws Exception {
+    List<Long> snapshotIdList = new ArrayList<Long>(indexSnapshots.size());
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      snapshotIdList.add(indexSnapshot.getMaxSnapshotId());
+    }
+    if (snapshotIdList.size() > 0) {
+      return Collections.max(snapshotIdList);
+    } else {
+      return null;
+    }
+  }
+
+  public static Long getMaxDocumentId(List<? extends IndexSnapshot> indexSnapshots) throws Exception {
+    List<Long> documentIdList = new ArrayList<Long>(indexSnapshots.size());
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      documentIdList.add(indexSnapshot.getMaxDocumentId());
+    }
+    if (documentIdList.size() > 0) {
+      return Collections.max(documentIdList);
+    } else {
+      return null;
+    }
+  }
+
+  public void setDeleteOnly(boolean isDeleteOnly) {
+    this.isDeleteOnly = isDeleteOnly;
+  }
+
+  public boolean isDeleteOnly() {
+    return isDeleteOnly;
+  }
+
+  public boolean isReadOnly() {
+    return isReadOnly;
+  }
+
+  public void setReadOnly(boolean isReadOnly) {
+    this.isReadOnly = isReadOnly;
+  }
+
+  protected DeletesResult applyDeletes(boolean deleteFromReader, Deletes deletes, Collection<Integer> deletedDocs, IndexReader indexReader)
+      throws CorruptIndexException, IOException, Exception {
+    long[] ids = ExtendedFieldCache.EXT_DEFAULT.getLongs(indexReader, Constants.DOCUMENTID, new LongParser() {
+      public long parseLong(String string) {
+        return Util.longFromEncoded(string);
+      }
+    });
+    DeletesResult deletesResult = new DeletesResult(getId());
+    if (deletes.hasDeletes()) {
+      if (deletes.hasDocIds()) {
+        int docsDeleted = 0;
+        long[] docIdsArray = deletes.getDocIds();
+        for (long id : docIdsArray) {
+          int doc = Util.getDoc(Constants.DOCUMENTID, id, indexReader);
+          if (doc >= 0) {
+            if (deleteFromReader) indexReader.deleteDocument(doc);
+            if (deletedDocs != null) deletedDocs.add(doc);
+            docsDeleted++;
+          }
+        }
+        deletesResult.add(new DeletesResult.Result(docIdsArray, docsDeleted));
+      } else {
+        List<Long> docIds = deletesResult.getDocIds();
+        if (deletes.hasTerms()) {
+          List<Term> terms = deletes.getTerms();
+          for (Term term : terms) {
+            int docsDeleted = deleteByTerm(deleteFromReader, term, deletedDocs, docIds, ids, indexReader);
+            deletesResult.add(new DeletesResult.Result(term, docsDeleted));
+          }
+        }
+        if (deletes.hasDeleteByQueries()) {
+          List<DeleteByQuery> deleteByQueries = deletes.getDeleteByQueries();
+          for (DeleteByQuery deleteByQuery : deleteByQueries) {
+            int docsDeleted = deleteByQuery(deleteFromReader, deleteByQuery, deletedDocs, ids, docIds, indexReader);
+            deletesResult.add(new DeletesResult.Result(deleteByQuery, docsDeleted));
+          }
+        }
+      }
+    }
+    return deletesResult;
+  }
+
+  protected int deleteByTerm(boolean deleteFromReader, Term term, Collection<Integer> deletedDocs, List<Long> docIds, long[] ids,
+      IndexReader indexReader) throws IOException {
+    TermDocs docs = indexReader.termDocs(term);
+    if (docs == null)
+      return 0;
+    int n = 0;
+    try {
+      while (docs.next()) {
+        int doc = docs.doc();
+        if (deletedDocs != null)
+          deletedDocs.add(doc);
+        Long docId = ids[doc];
+        docIds.add(docId);
+        if (deleteFromReader)
+          indexReader.deleteDocument(doc);
+        n++;
+      }
+    } finally {
+      docs.close();
+    }
+    return n;
+  }
+
+  protected int deleteByQuery(final boolean deleteFromReader, DeleteByQuery deleteByQuery, final Collection<Integer> deletedDocs,
+      final long[] ids, final List<Long> deletedIds, final IndexReader indexReader) throws IOException {
+    Query query = deleteByQuery.getQuery();
+    IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+    final int[] numDeleted = new int[1];
+    indexSearcher.search(query, new HitCollector() {
+      public void collect(int doc, float score) {
+        try {
+          if (deleteFromReader)
+            indexReader.deleteDocument(doc);
+          if (deletedDocs != null)
+            deletedDocs.add(doc);
+          Long docId = ids[doc];
+          deletedIds.add(docId);
+          numDeleted[0]++;
+        } catch (StaleReaderException staleReaderException) {
+          throw new RuntimeException(staleReaderException);
+        } catch (IOException ioException) {
+          throw new RuntimeException(ioException);
+        }
+      }
+    });
+    return numDeleted[0];
+  }
+
+  public static class IndexException extends Exception {
+    public IndexException(String message) {
+      super(message);
+    }
+
+    public IndexException(String message, Exception exception) {
+      super(message, exception);
+    }
+  }
+
+  public static class IndexNeverCompletedCopyException extends IndexException {
+    public IndexNeverCompletedCopyException(String message) {
+      super(message);
+    }
+  }
+
+  public abstract boolean rollback(Long snapshotId) throws Exception;
+
+  public abstract IndexSnapshot getIndexSnapshot(Long snapshotID);
+
+  public abstract IndexSnapshot getLatestIndexSnapshot();
+
+  public abstract class IndexSnapshot {
+    protected final Long snapshotId;
+    private Long maxSnapshotId;
+    private Long maxDocumentId;
+    private Long minSnapshotId;
+    private Long minDocumentId;
+
+    public IndexSnapshot(Long snapshotId) {
+      this.snapshotId = snapshotId;
+    }
+
+    public abstract int deletedDoc();
+
+    public abstract int maxDoc();
+
+    public Long getSnapshotId() {
+      return snapshotId;
+    }
+
+    public Index getIndex() {
+      return Index.this;
+    }
+
+    /**
+     * Iterates terms until next field is reached, returns text
+     * 
+     * @param field
+     * @return
+     * @throws Exception
+     */
+    public String getMax(String field) throws IOException {
+      IndexReader indexReader = getIndexReader();
+      TermEnum termEnum = indexReader.terms(new Term(field, ""));
+      try {
+        String text = null;
+        do {
+          Term term = termEnum.term();
+          if (term == null || term.field() != field)
+            break;
+          text = term.text();
+        } while (termEnum.next());
+        return text;
+      } finally {
+        termEnum.close();
+      }
+    }
+    
+    public String getMin(String field) throws IOException {
+      IndexReader indexReader = getIndexReader();
+      TermEnum termEnum = indexReader.terms(new Term(field, ""));
+      try {
+        do {
+          Term term = termEnum.term();
+          if (term == null || term.field() != field)
+            break;
+          return term.text();
+        } while (termEnum.next());
+        return null;
+      } finally {
+        termEnum.close();
+      }
+    }
+    
+    public Long getMinDocumentId() throws IOException {
+      if (minDocumentId == null) {
+        String string = getMin(Constants.DOCUMENTID);
+        if (string == null)
+          return null;
+        minDocumentId = Util.longFromEncoded(string);
+      }
+      return minDocumentId;
+    }
+    
+    public Long getMinSnapshotId() throws IOException {
+      if (minSnapshotId == null) {
+        String string = getMin(Constants.SNAPSHOTID);
+        if (string == null)
+          return null;
+        minSnapshotId = Util.longFromEncoded(string);
+      }
+      return minSnapshotId;
+    }
+    
+    public Long getMaxSnapshotId() throws IOException {
+      if (maxSnapshotId == null) {
+        String string = getMax(Constants.SNAPSHOTID);
+        if (string == null)
+          return null;
+        maxSnapshotId = Util.longFromEncoded(string);
+      }
+      return maxSnapshotId;
+    }
+
+    public Long getMaxDocumentId() throws IOException {
+      if (maxDocumentId == null) {
+        String string = getMax(Constants.DOCUMENTID);
+        if (string == null)
+          return null;
+        maxDocumentId = Util.longFromEncoded(string);
+      }
+      return maxDocumentId;
+    }
+
+    public abstract IndexReader getIndexReader();
+  }
+
+  public static class MergedDocMap {
+    private Map<IndexSnapshot,int[]> oldMap; // maps old doc to new doc
+    private RI[] merged; // maps new doc to old doc and reader
+
+    public MergedDocMap(List<? extends IndexSnapshot> indexSnapshots) {
+      int newMaxDoc = 0;
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        newMaxDoc += indexSnapshot.getIndexReader().numDocs();
+      }
+      oldMap = new HashMap<IndexSnapshot,int[]>(indexSnapshots.size());
+      RI[] merged = new RI[newMaxDoc];
+      int pos = 0;
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        IndexReader indexReader = indexSnapshot.getIndexReader();
+        int maxDoc = indexReader.maxDoc();
+        int[] old = new int[maxDoc];
+        for (int x = 0; x < maxDoc; x++) {
+          if (indexReader.hasDeletions() && indexReader.isDeleted(x)) {
+            merged[pos] = null;
+            old[x] = -1;
+          } else {
+            merged[pos] = new RI(x, indexSnapshot);
+            old[x] = pos;
+            pos++;
+          }
+        }
+        oldMap.put(indexSnapshot, old);
+      }
+    }
+
+    public static class RI {
+      public int doc;
+      public IndexSnapshot oldIndexSnapshot;
+
+      public RI(int doc, IndexSnapshot oldIndexSnapshot) {
+        this.doc = doc;
+        this.oldIndexSnapshot = oldIndexSnapshot;
+      }
+    }
+
+    public Map<IndexSnapshot,int[]> getOldMap() {
+      return oldMap;
+    }
+
+    public RI[] getMerged() {
+      return merged;
+    }
+  }
+
+  public boolean isClosed() {
+    return isClosed;
+  }
+
+  public IndexID getId() {
+    return id;
+  }
+
+  public abstract void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException;
+
+  public abstract DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, IndexException,
+      InterruptedException, IOException;
+}
Index: ocean/src/org/apache/lucene/ocean/IndexCreator.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/IndexCreator.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/IndexCreator.java	(revision 0)
@@ -0,0 +1,142 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Allows creation of an index using multiple threads by feeding documents into
+ * a BlockingQueue.
+ * 
+ */
+// TODO: after create called make object unusable
+public class IndexCreator {
+  private Directory directory;
+  private long maxSize;
+  private int threads;
+  private ExecutorService threadPool;
+  private IndexWriter indexWriter;
+  private boolean isFinished;
+  private List<Future<Object>> futures = new ArrayList<Future<Object>>();
+  private Analyzer analyzer;
+  private BlockingQueue<Add> queue;
+
+  public IndexCreator(Directory directory, long maxSize, int threads, Analyzer analyzer, ExecutorService threadPool) {
+    this.directory = directory;
+    this.maxSize = maxSize;
+    this.threads = threads;
+    this.analyzer = analyzer;
+    this.threadPool = threadPool;
+    isFinished = false;
+  }
+
+  public static class Add {
+    private Document document;
+
+    // private RAMDirectory ramDirectory;
+
+    // public Add(RAMDirectory ramDirectory) {
+    // this.ramDirectory = ramDirectory;
+    // }
+
+    public Add(Document document) {
+      this.document = document;
+    }
+
+    // public RAMDirectory getRamDirectory() {
+    // return ramDirectory;
+    // }
+
+    public Document getDocument() {
+      return document;
+    }
+  }
+
+  public void start(BlockingQueue<Add> queue) throws Exception {
+    this.queue = queue;
+    indexWriter = new IndexWriter(directory, false, analyzer, true);
+    indexWriter.setUseCompoundFile(true);
+    indexWriter.setRAMBufferSizeMB(500.0); // set impossibly high to never be
+                                            // triggered, setting both to
+                                            // DISABLE_AUTO_FLUSH causes an
+                                            // exception
+    indexWriter.setMaxBufferedDocs(IndexWriter.DISABLE_AUTO_FLUSH);
+    //List<Callable<Object>> callables = new ArrayList<Callable<Object>>(threads);
+    for (int x = 0; x < threads; x++) {
+      //callables.add(new Task(queue));
+      futures.add(threadPool.submit(new Task(queue)));
+    }
+    //futures = threadPool.invokeAll(callables);
+    
+  }
+
+  public void create() throws Exception {
+    while (queue.peek() != null) { 
+      Thread.sleep(5);
+    }
+    setFinished(true);
+    try {
+      for (Future<Object> future : futures) {
+        if (future.isDone()) {
+          try {
+            future.get();
+          } catch (ExecutionException executionException) {
+            Throwable cause = executionException.getCause();
+            if (cause instanceof Exception) {
+              throw (Exception) cause;
+            } else {
+              throw new Exception(cause);
+            }
+          }
+        }
+        Thread.sleep(10);
+      }
+      indexWriter.optimize(); // should not be necessary
+    } finally {
+      indexWriter.close();
+    }
+  }
+
+  public void setFinished(boolean isFinished) {
+    this.isFinished = isFinished;
+  }
+
+  private boolean isFinished() {
+    if (isFinished)
+      return true;
+    if (indexWriter.ramSizeInBytes() >= maxSize) {
+      isFinished = true;
+    }
+    return isFinished;
+  }
+
+  public class Task implements Callable {
+    private BlockingQueue<Add> queue;
+
+    public Task(BlockingQueue<Add> queue) {
+      this.queue = queue;
+    }
+
+    public Object call() throws Exception {
+      while (!isFinished()) {
+        Add add = queue.poll(3, TimeUnit.MILLISECONDS);
+        if (add != null) {
+          Document document = add.getDocument();
+          indexWriter.addDocument(document, analyzer);
+        }
+      }
+      return null;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/Indexes.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Indexes.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Indexes.java	(revision 0)
@@ -0,0 +1,38 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.ocean.util.Util;
+
+public class Indexes {
+  private Map<IndexID,Index> indexMap = new HashMap<IndexID,Index>();
+  
+  public IndexID getMaxId(String type) {
+    List<IndexID> list = new ArrayList<IndexID>();
+    for (IndexID indexId : indexMap.keySet()) {
+      if (indexId.type.equals(type)) {
+        list.add(indexId);
+      }
+    }
+    if (list.size() == 0) return null;
+    return Util.max(list);
+  }
+  
+  public List<Index> getIndexes() {
+    return new ArrayList(indexMap.values());
+  }
+  
+  public Indexes() {
+  }
+
+  public Index get(IndexID indexId) {
+    return indexMap.get(indexId);
+  }
+
+  public void add(Index index) {
+    indexMap.put(index.getId(), index);
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/IndexID.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/IndexID.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/IndexID.java	(revision 0)
@@ -0,0 +1,52 @@
+package org.apache.lucene.ocean;
+
+import org.apache.commons.lang.builder.CompareToBuilder;
+
+public class IndexID implements Comparable<IndexID> {
+  public final Long id;
+  public final String type;
+  
+  public IndexID(Long id, String type) {
+    this.id = id;
+    this.type = type;
+  }
+  
+  public String toString() {
+    return type+":"+id;
+  }
+  
+  public int compareTo(IndexID other) {
+    return new CompareToBuilder().append(id, other.id).append(type, type).toComparison();
+  }
+  
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((id == null) ? 0 : id.hashCode());
+    result = prime * result + ((type == null) ? 0 : type.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    final IndexID other = (IndexID) obj;
+    if (id == null) {
+      if (other.id != null)
+        return false;
+    } else if (!id.equals(other.id))
+      return false;
+    if (type == null) {
+      if (other.type != null)
+        return false;
+    } else if (!type.equals(other.type))
+      return false;
+    return true;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/LogFile.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/LogFile.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/LogFile.java	(revision 0)
@@ -0,0 +1,199 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.log.RawLogFile.FileStreamRecord;
+import org.apache.lucene.ocean.log.RawLogFile.StreamRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Log file.  Contains all record headers for loading of the actual record.
+ *
+ */
+public class LogFile implements Comparable<LogFile> {
+  final static Logger LOG = LoggerFactory.getLogger(LogFile.class);
+  private List<RecordHeader> recordHeaders; // sorted by id
+  private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+  private RawLogFile rawLogFile;
+  private Long id;
+  
+  LogFile(Long id) {
+    this.id = id;
+  }
+  
+  public LogFile(Long id, String file, LogDirectory logDirectory) throws IOException {
+    this.id = id;
+    rawLogFile = new RawLogFile(file, logDirectory);
+    recordHeaders = rawLogFile.loadRecordHeaders();
+  }
+  
+  public int getNumRecords() {
+    return recordHeaders.size();
+  }
+  
+  public void close() throws IOException {
+    rawLogFile.close();
+  }
+  
+  public long getPreviousId(long id) {
+    long pos = getPosition(id);
+    if (pos == 0) return -1;
+    return pos-1;
+  }
+  
+  public int compareTo(LogFile other) {
+    return (id < other.id ? -1 : (id == other.id ? 0 : 1));
+  }
+
+  private void add(RecordHeader recordHeader) {
+    recordHeaders.add(recordHeader);
+  }
+
+  public long size() throws IOException {
+    return rawLogFile.size();
+  }
+
+  private int getPosition(long id) {
+    RecordHeader h = new RecordHeader();
+    h.id = id;
+    int pos = Collections.binarySearch(recordHeaders, h);
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public RecordHeader get(long id) {
+    if (recordHeaders.size() > 0) {
+      int pos = getPosition(id);
+      if (pos >= recordHeaders.size()) return null;
+      return recordHeaders.get(pos);
+    } else return null;
+  }
+
+  public boolean delete(long id) throws IOException {
+    lock.writeLock().lock();
+    try {
+      RecordHeader recordHeader = get(id);
+      boolean deleted = rawLogFile.delete(recordHeader);
+      if (deleted) {
+        int pos = getPosition(id);
+        recordHeaders.remove(pos);
+      }
+      return deleted;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  public long getId() {
+    return id;
+  }
+
+  public long getMinId() {
+    if (recordHeaders.size() == 0)
+      return -1;
+    else
+      return recordHeaders.get(0).id;
+  }
+
+  public boolean containsId(Long id) {
+    if (recordHeaders.size() == 0) return false;
+    return id <= getMaxId() && id >= getMinId();
+  }
+
+  public long getMaxId() {
+    return recordHeaders.get(recordHeaders.size() - 1).id;
+  }
+
+  public List<RecordHeader> getRecordHeaders(long from, int num) {
+    int pos = getPosition(from);
+    int count = 0;
+    List<RecordHeader> recordHeaders = new ArrayList<RecordHeader>(num);
+    while (count < num) {
+      RecordHeader recordHeader = recordHeaders.get(pos);
+      recordHeaders.add(recordHeader);
+      count++;
+    }
+    return recordHeaders;
+  }
+
+  public void commit(RecordHeader recordHeader) {
+    // TODO: commit
+  }
+
+  public RecordHeader writeRecord(Long id, byte[] docBytes, byte[] otherBytes) throws IOException {
+    lock.writeLock().lock();
+    try {
+      RecordHeader recordHeader = rawLogFile.write(id, docBytes, otherBytes);
+      add(recordHeader);
+      return recordHeader;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  public RecordIterator getRecordIterator(Long snapshotId) throws IOException {
+    RandomAccessFile input = rawLogFile.openInput();
+    return new RecordIterator(snapshotId, input);
+  }
+
+  public class RecordIterator {
+    private RandomAccessFile input;
+    int pos;
+
+    public RecordIterator(Long snapshotId, RandomAccessFile input) throws IOException {
+      this.input = input;
+      if (snapshotId == null) {
+        pos = 0;
+      } else {
+        pos = getPosition(snapshotId);
+      }
+    }
+
+    public Record next() throws IOException {
+      RecordHeader recordHeader = recordHeaders.get(pos);
+      if (LOG.isDebugEnabled()) LOG.debug("recordHeader: "+recordHeader);
+      pos++;
+      return getRecord(recordHeader);
+    }
+
+    private Record getRecord(RecordHeader recordHeader) throws IOException {
+      FileStreamRecord fileStreamRecord = rawLogFile.readRecordData(recordHeader, input);
+      Record record = new Record(recordHeader.id, fileStreamRecord);
+      return record;
+    }
+
+    public boolean hasNext() {
+      return pos < recordHeaders.size();
+    }
+
+    public void close() throws IOException {
+      input.close();
+    }
+  }
+
+  public static class Record {
+    private long id;
+    private StreamRecord streamRecord;
+
+    public Record(long id, StreamRecord streamRecord) {
+      this.id = id;
+      this.streamRecord = streamRecord;
+    }
+
+    public Long getId() {
+      return id;
+    }
+
+    public StreamRecord getStreamRecord() {
+      return streamRecord;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/RecordHeader.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/RecordHeader.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/RecordHeader.java	(revision 0)
@@ -0,0 +1,23 @@
+package org.apache.lucene.ocean.log;
+
+import org.apache.commons.lang.builder.ReflectionToStringBuilder;
+
+/**
+ * Log record header
+ *
+ */
+public class RecordHeader implements Comparable<RecordHeader> {
+  public long id;
+  public int headerPosition;
+  public int docsLength;
+  public int otherLength;
+  public long docsPosition;
+
+  public int compareTo(RecordHeader other) {
+    return (id < other.id ? -1 : (id == other.id ? 0 : 1));
+  }
+
+  public String toString() {
+    return ReflectionToStringBuilder.toString(this);
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/log/TransactionLog.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/TransactionLog.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/TransactionLog.java	(revision 0)
@@ -0,0 +1,134 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.ocean.Deletes;
+import org.apache.lucene.ocean.Documents;
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.log.LogFileManager.RecordIterator;
+import org.apache.lucene.ocean.log.RawLogFile.StreamData;
+import org.apache.lucene.ocean.util.ByteBufferPool;
+import org.apache.lucene.ocean.util.LongSequence;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Serializes transactions known internally as batches to an underlying log
+ * file. Provides an iterator over the batches.
+ * 
+ */
+public class TransactionLog {
+  private ByteBufferPool byteBufferPool = new ByteBufferPool(50 * 1024, 5, 5);
+  LogFileManager logFileManager;
+  private ReentrantLock writeLock = new ReentrantLock();
+  private LogDirectory logDirectory;
+  private LongSequence snapshotIdSequence;
+
+  public TransactionLog(LogDirectory logDirectory) throws IOException {
+    this.logDirectory = logDirectory;
+    logFileManager = new LogFileManager(logDirectory);
+    long maxId = logFileManager.getMaxId();
+    if (maxId == -1) maxId = 0;
+    snapshotIdSequence = new LongSequence(maxId+1, 1);
+  }
+  
+  public void close() throws IOException {
+    logFileManager.close();
+  }
+
+  public void writeMasterBatch(final Long id, final Long previousId, final MasterBatch masterBatch) throws Exception {
+    byte[] docBytes = null;
+    byte[] otherBytes = null;
+    if (masterBatch.hasRAMDirectory()) {
+      docBytes = SerializationUtils.serialize(masterBatch.getRamDirectory());
+    } else if (masterBatch.hasDocuments()) {
+      Documents documents = masterBatch.getDocuments();
+      docBytes = SerializationUtils.serialize(documents);
+    }
+    if (masterBatch.hasDeletes()) {
+      Deletes deletes = masterBatch.getDeletes();
+      otherBytes = SerializationUtils.serialize(deletes);
+    }
+    writeLock.lock();
+    try {
+      logFileManager.writeRecord(id, docBytes, otherBytes);
+    } finally {
+      writeLock.unlock();
+    }
+  }
+  
+  public int getNumRecords() {
+    return logFileManager.getNumRecords();
+  }
+  
+  public long getNextId() {
+    return snapshotIdSequence.getAndIncrement();
+  }
+  
+  public long getMinId() {
+    return logFileManager.getMinId();
+  }
+  
+  public long getMaxId() {
+    return logFileManager.getMaxId();
+  }
+
+  public Long getPreviousId(long id) {
+    return logFileManager.getPreviousId(id);
+  }
+
+  public SlaveBatchIterator getSlaveBatchIterator(Long snapshotId) throws Exception {
+    return new SlaveBatchIterator(snapshotId);
+  }
+
+  public class SlaveBatchIterator {
+    RecordIterator recordIterator;
+
+    public SlaveBatchIterator(Long snapshotId) throws IOException {
+      recordIterator = logFileManager.getRecordIterator(snapshotId);
+    }
+
+    public boolean hasNext() throws IOException {
+      return recordIterator.hasNext();
+    }
+
+    public SlaveBatch next(boolean loadDocuments, boolean loadOther) throws Exception {
+      Record record = recordIterator.next();
+      Documents documents = null;
+      RAMDirectory ramDirectory = null;
+      Deletes deletes = null;
+      if (loadDocuments) {
+        StreamData docData = record.getStreamRecord().getDocuments();
+        if (docData != null) {
+          byte[] docBytes = docData.getBytes();
+          Object object = SerializationUtils.deserialize(docBytes);
+          if (object instanceof RAMDirectory) {
+            ramDirectory = (RAMDirectory) object;
+          } else {
+            documents = (Documents) object;
+          }
+        }
+      }
+      if (loadOther) {
+        StreamData otherData = record.getStreamRecord().getOther();
+        if (otherData != null) {
+          byte[] otherBytes = otherData.getBytes();
+          deletes = (Deletes) SerializationUtils.deserialize(otherBytes);
+        }
+      }
+      if (ramDirectory != null) {
+        return new SlaveBatch(record.getId(), ramDirectory, deletes);
+      } else {
+        return new SlaveBatch(record.getId(), documents, deletes);
+      }
+    }
+
+    public void close() throws IOException {
+      recordIterator.close();
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/RawLogFile.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/RawLogFile.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/RawLogFile.java	(revision 0)
@@ -0,0 +1,232 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Performs raw file access including serializing records to and from the underlying file
+ *
+ */
+public class RawLogFile {
+  final static Logger LOG = LoggerFactory.getLogger(RawLogFile.class);
+  public static final byte DELETED = 101;
+  public static final byte OK = 9;
+  public static final byte[] HEADER = new byte[] { 'O', 'c', 'N' };
+  private long currentWritePosition = 0;
+  private ReentrantLock writeLock = new ReentrantLock();
+  private String file;
+  private RandomAccessFile output;
+  private LogDirectory logDirectory;
+
+  public RawLogFile(String file, LogDirectory logDirectory) throws IOException {
+    this.file = file;
+    this.logDirectory = logDirectory;
+    output = logDirectory.getOutput(file, false);
+    // TODO: need better way to make sure we're starting at the end of the file
+    currentWritePosition = logDirectory.fileLength(file);
+    System.out.println("staring currentWritePosition: "+currentWritePosition);
+  }
+
+  public RandomAccessFile openInput() throws IOException {
+    return logDirectory.openInput(file);
+  }
+
+  public long size() throws IOException {
+    return output.length();
+  }
+
+  public static interface StreamRecord {
+    public StreamData getDocuments();
+
+    public StreamData getOther();
+  }
+
+  public static interface StreamData {
+    public byte[] getBytes() throws IOException;
+
+    public int getLength();
+  }
+
+  public static class FileStreamRecord implements StreamRecord {
+    private RecordHeader recordHeader;
+    private RandomAccessFile input;
+
+    public FileStreamRecord(RecordHeader recordHeader, RandomAccessFile input) {
+      this.recordHeader = recordHeader;
+      this.input = input;
+    }
+
+    public FileStreamData getDocuments() {
+      if (recordHeader.docsLength == 0) return null;
+      return new FileStreamData(recordHeader.docsLength, recordHeader.docsPosition, input);
+    }
+
+    public FileStreamData getOther() {
+      if (recordHeader.otherLength == 0) return null;
+      return new FileStreamData(recordHeader.otherLength, recordHeader.docsPosition + recordHeader.docsLength, input);
+    }
+  }
+
+  public static class FileStreamData implements StreamData {
+    private int length;
+    private long position;
+    private RandomAccessFile input;
+
+    public FileStreamData(int length, long position, RandomAccessFile input) {
+      this.length = length;
+      this.position = position;
+      this.input = input;
+    }
+
+    public byte[] getBytes() throws IOException {
+      input.seek(position);
+      byte[] bytes = new byte[length];
+      input.readFully(bytes);
+      return bytes;
+    }
+
+    public int getLength() {
+      return length;
+    }
+  }
+
+  public FileStreamRecord readRecordData(RecordHeader recordHeader, RandomAccessFile input) {
+    return new FileStreamRecord(recordHeader, input);
+  }
+
+  public List<RecordHeader> loadRecordHeaders() throws IOException {
+    RandomAccessFile input = openInput();
+    if (input.length() > 0) {
+      LoadRecordHeaders loadRecordHeaders = new LoadRecordHeaders(input);
+      return loadRecordHeaders.getRecordHeaders();
+    } else {
+      return new ArrayList<RecordHeader>();
+    }
+  }
+
+  public boolean delete(RecordHeader recordHeader) throws IOException {
+    long lastPosition = -1;
+    writeLock.lock();
+    try {
+      lastPosition = output.getFilePointer();
+      output.seek(recordHeader.headerPosition);
+      byte[] header = new byte[3];
+      output.readFully(header, 0, header.length);
+      if (!Arrays.equals(HEADER, header)) {
+        throw new IOException("no header");
+      }
+      byte first = (byte) output.readByte();
+      if (first == OK) {
+        int nextHeaderPosition = output.readInt();
+        long id = output.readLong();
+        assert id == recordHeader.id;
+        output.seek(recordHeader.headerPosition + 4);
+        output.writeByte(DELETED);
+        output.getFD().sync();
+        return true;
+      } else {
+        // already deleted
+        return false;
+      }
+    } finally {
+      if (lastPosition != -1) output.seek(lastPosition);
+      writeLock.unlock();
+    }
+  }
+
+  public class LoadRecordHeaders {
+    private long currentReadPosition = 0;
+    private List<RecordHeader> recordHeaders = new ArrayList<RecordHeader>();
+
+    public LoadRecordHeaders(RandomAccessFile input) throws IOException {
+      try {
+        while (true) {
+          input.seek(currentReadPosition);
+          RecordHeader recordHeader = new RecordHeader();
+          byte[] header = new byte[3];
+          recordHeader.headerPosition = (int) input.getFilePointer();
+          input.readFully(header, 0, header.length);
+          if (!Arrays.equals(HEADER, header)) {
+            // TODO: scan to next header if there is one
+            LOG.error("header incorrect: "+new String(header)+" at pos: "+currentReadPosition);
+            while (true) {
+              recordHeader.headerPosition = (int) input.getFilePointer();
+              input.readFully(header, 0, header.length);
+              if (Arrays.equals(HEADER, header)) {
+                LOG.error("found header at position: "+recordHeader.headerPosition);
+                break;
+              }
+            }
+            // continue;
+          }
+          byte status = input.readByte();
+          recordHeader.id = input.readLong();
+          if (status == DELETED) {
+            // TODO: skip it
+            if (LOG.isDebugEnabled()) LOG.debug("record "+recordHeader.id+" status deleted");
+          }
+          recordHeader.docsLength = input.readInt();
+          recordHeader.otherLength = input.readInt();
+          recordHeader.docsPosition = input.getFilePointer();
+          if (status != DELETED)
+            recordHeaders.add(recordHeader);
+          currentReadPosition = input.getFilePointer() + recordHeader.docsLength + recordHeader.otherLength;
+        }
+      } catch (EOFException eofException) {
+        // at end of file
+      } finally {
+        if (input != null)
+          input.close();
+      }
+    }
+
+    public List<RecordHeader> getRecordHeaders() {
+      return recordHeaders;
+    }
+  }
+
+  // TODO: add read to footer to insure the record was fully written
+  // TODO: handle write failure cleanly
+  public RecordHeader write(Long id, byte[] docBytes, byte[] otherBytes) throws IOException {
+    writeLock.lock();
+    try {
+      int documentsLength = 0;
+      if (docBytes != null) documentsLength = docBytes.length;
+      int otherLength = 0;
+      if (otherBytes != null) otherLength = otherBytes.length;
+      RecordHeader recordHeader = new RecordHeader();
+      recordHeader.id = id;
+      recordHeader.headerPosition = (int) currentWritePosition;
+      recordHeader.docsLength = documentsLength;
+      recordHeader.otherLength = otherLength;
+
+      output.seek(currentWritePosition);
+      output.write(HEADER);
+      output.writeByte(OK);
+      output.writeLong(id);
+      output.writeInt(recordHeader.docsLength);
+      output.writeInt(recordHeader.otherLength);
+      recordHeader.docsPosition = output.getFilePointer();
+      if (docBytes != null) output.write(docBytes);
+      if (otherBytes != null) output.write(otherBytes);
+      output.getFD().sync();
+      currentWritePosition = output.getFilePointer();
+      return recordHeader;
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  public void close() throws IOException {
+    output.close();
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/LogFileManager.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/LogFileManager.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/LogFileManager.java	(revision 0)
@@ -0,0 +1,266 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.util.LongSequence;
+
+/**
+ * Manages the log files. There is one active log file at a time that updates
+ * are written to. When the active log file reaches the MAX_FILE_SIZE a new
+ * active log file is created.
+ * 
+ */
+// TODO: delete log files that are no longer needed
+public class LogFileManager {
+  public static Logger log = Logger.getLogger(LogFileManager.class.getName());
+  public static final long MAX_FILE_SIZE = 1024 * 1024 * 64;
+  private List<LogFile> logFiles = new ArrayList<LogFile>();
+  private LongSequence logIdSequence = new LongSequence(1, 1);
+  private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+  private ScheduledExecutorService logFileCheckTimer;
+  private LogDirectory logDirectory;
+
+  // TODO: load existing max snapshot id
+  public LogFileManager(LogDirectory logDirectory) throws IOException {
+    this.logDirectory = logDirectory;
+    String[] list = logDirectory.list();
+    for (String file : list) {
+      long id = getLogFileNumber(file);
+      LogFile logFile = new LogFile(id, file, logDirectory);
+      logFiles.add(logFile);
+    }
+    Collections.sort(logFiles);
+    if (logFiles.size() > 0) {
+      long last = logFiles.get(logFiles.size() - 1).getId();
+      long next = last + 1;
+      logIdSequence.set(next);
+    }
+    logFileCheckTimer = Executors.newSingleThreadScheduledExecutor();
+    logFileCheckTimer.scheduleWithFixedDelay(new LogFileSizeCheck(), 1000, 10 * 1000, TimeUnit.MILLISECONDS);
+  }
+
+  public int getNumRecords() {
+    int num = 0;
+    for (LogFile logFile : logFiles) {
+      num += logFile.getNumRecords();
+    }
+    return num;
+  }
+
+  public void close() throws IOException {
+    for (LogFile logFile : logFiles) {
+      logFile.close();
+    }
+  }
+
+  private int getPosition(long id) {
+    LogFile lf = new LogFile(id);
+    int pos = Collections.binarySearch(logFiles, lf);
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public long getMinSnapshotId() {
+    if (logFiles.size() > 0) {
+      LogFile logFile = logFiles.get(0);
+      return logFile.getMinId();
+    } else {
+      return -1;
+    }
+  }
+
+  public long getMaxSnapshotId() {
+    if (logFiles.size() > 0) {
+      LogFile logFile = logFiles.get(logFiles.size() - 1);
+      return logFile.getMaxId();
+    } else {
+      return -1;
+    }
+  }
+
+  public long getMaxId() {
+    if (logFiles.size() == 0)
+      return -1;
+    return logFiles.get(logFiles.size() - 1).getMaxId();
+  }
+
+  private LogFile getLast() {
+    if (logFiles.size() == 0)
+      return null;
+    return logFiles.get(logFiles.size() - 1);
+  }
+
+  public class LogFileSizeCheck implements Runnable {
+    public void run() {
+      try {
+        LogFile logFile = getLast();
+        if (logFile != null) {
+          if (logFile.size() >= MAX_FILE_SIZE) {
+            LogFile newLogFile = createNewLogFile();
+            logFiles.add(newLogFile);
+          }
+        }
+      } catch (IOException ioException) {
+        log.log(Level.SEVERE, "", ioException);
+      }
+    }
+  }
+
+  public boolean delete(Long id) throws Exception {
+    LogFile logFile = getLogFileContaining(id);
+    if (logFile == null)
+      throw new Exception("unknown id: " + id);
+    return logFile.delete(id);
+  }
+
+  public Long getPreviousId(Long id) {
+    LogFile logFile = getLogFileContaining(id);
+    if (logFile == null)
+      return null;
+    int pos = logFiles.indexOf(logFile);
+    Long previousId = logFile.getPreviousId(id);
+    if (previousId == null && pos > 0) {
+      logFile = logFiles.get(pos - 1);
+      previousId = logFile.getPreviousId(id);
+    }
+    return previousId;
+  }
+
+  public long getMinId() {
+    if (logFiles.size() > 0) {
+      return logFiles.get(0).getMinId();
+    }
+    return -1;
+  }
+
+  public LogFile createNewLogFile() throws IOException {
+    long id = logIdSequence.getAndIncrement();
+    String fileName = createLogFileName(id);
+    LogFile logFile = new LogFile(id, fileName, logDirectory);
+    lock.writeLock().lock();
+    try {
+      logFiles.add(logFile);
+      return logFile;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  private static String createLogFileName(long value) {
+    DecimalFormat decimalFormat = (DecimalFormat) DecimalFormat.getInstance();
+    decimalFormat.applyPattern("00000000");
+    return "log" + decimalFormat.format(value) + ".bin";
+  }
+
+  private static long getLogFileNumber(String fileName) {
+    String numberString = fileName.substring(3, fileName.lastIndexOf('.'));
+    return Long.parseLong(numberString);
+  }
+
+  private LogFile getLogFileContaining(Long id) {
+    lock.readLock().lock();
+    try {
+      for (LogFile logFile : logFiles) {
+        if (logFile.containsId(id)) {
+          return logFile;
+        }
+      }
+      return null;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public boolean contains(Long id) {
+    lock.readLock().lock();
+    try {
+      for (LogFile logFile : logFiles) {
+        if (logFile.containsId(id)) {
+          return true;
+        }
+      }
+      return false;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public RecordIterator getRecordIterator(Long snapshotId) throws IOException {
+    return new RecordIterator(snapshotId);
+  }
+
+  public class RecordIterator {
+    private Iterator<LogFile> logFileIterator;
+    private LogFile.RecordIterator currentRecordIterator;
+
+    public RecordIterator(Long snapshotId) throws IOException {
+      logFileIterator = logFiles.iterator();
+      while (logFileIterator.hasNext()) {
+        LogFile logFile = logFileIterator.next();
+        if (snapshotId == null || logFile.containsId(snapshotId)) {
+          currentRecordIterator = logFile.getRecordIterator(snapshotId);
+        }
+      }
+    }
+
+    public void close() throws IOException {
+      if (currentRecordIterator != null)
+        currentRecordIterator.close();
+    }
+
+    public Record next() throws IOException {
+      if (currentRecordIterator != null) {
+        return currentRecordIterator.next();
+      } else {
+        return null;
+      }
+    }
+
+    public boolean hasNext() throws IOException {
+      if (currentRecordIterator == null)
+        return false;
+      if (currentRecordIterator.hasNext()) {
+        return true;
+      } else {
+        currentRecordIterator.close();
+        if (logFileIterator.hasNext()) {
+          LogFile logFile = logFileIterator.next();
+          currentRecordIterator = logFile.getRecordIterator(null);
+          return hasNext();
+        }
+      }
+      return false;
+    }
+  }
+
+  public void commit(RecordHeader recordHeader) {
+    // TODO: implement commit
+  }
+
+  public RecordHeader writeRecord(Long id, byte[] doc, byte[] other) throws IOException {
+    LogFile logFile = getCurrentLogFile();
+    return logFile.writeRecord(id, doc, other);
+  }
+
+  public LogFile getCurrentLogFile() throws IOException {
+    LogFile logFile = getLast();
+    if (logFile == null) {
+      logFile = createNewLogFile();
+    }
+    return logFile;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/LogFile.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/LogFile.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/LogFile.java	(revision 0)
@@ -0,0 +1,199 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.log.RawLogFile.FileStreamRecord;
+import org.apache.lucene.ocean.log.RawLogFile.StreamRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Log file.  Contains all record headers for loading of the actual record.
+ *
+ */
+public class LogFile implements Comparable<LogFile> {
+  final static Logger LOG = LoggerFactory.getLogger(LogFile.class);
+  private List<RecordHeader> recordHeaders; // sorted by id
+  private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+  private RawLogFile rawLogFile;
+  private Long id;
+  
+  LogFile(Long id) {
+    this.id = id;
+  }
+  
+  public LogFile(Long id, String file, LogDirectory logDirectory) throws IOException {
+    this.id = id;
+    rawLogFile = new RawLogFile(file, logDirectory);
+    recordHeaders = rawLogFile.loadRecordHeaders();
+  }
+  
+  public int getNumRecords() {
+    return recordHeaders.size();
+  }
+  
+  public void close() throws IOException {
+    rawLogFile.close();
+  }
+  
+  public long getPreviousId(long id) {
+    long pos = getPosition(id);
+    if (pos == 0) return -1;
+    return pos-1;
+  }
+  
+  public int compareTo(LogFile other) {
+    return (id < other.id ? -1 : (id == other.id ? 0 : 1));
+  }
+
+  private void add(RecordHeader recordHeader) {
+    recordHeaders.add(recordHeader);
+  }
+
+  public long size() throws IOException {
+    return rawLogFile.size();
+  }
+
+  private int getPosition(long id) {
+    RecordHeader h = new RecordHeader();
+    h.id = id;
+    int pos = Collections.binarySearch(recordHeaders, h);
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public RecordHeader get(long id) {
+    if (recordHeaders.size() > 0) {
+      int pos = getPosition(id);
+      if (pos >= recordHeaders.size()) return null;
+      return recordHeaders.get(pos);
+    } else return null;
+  }
+
+  public boolean delete(long id) throws IOException {
+    lock.writeLock().lock();
+    try {
+      RecordHeader recordHeader = get(id);
+      boolean deleted = rawLogFile.delete(recordHeader);
+      if (deleted) {
+        int pos = getPosition(id);
+        recordHeaders.remove(pos);
+      }
+      return deleted;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  public long getId() {
+    return id;
+  }
+
+  public long getMinId() {
+    if (recordHeaders.size() == 0)
+      return -1;
+    else
+      return recordHeaders.get(0).id;
+  }
+
+  public boolean containsId(Long id) {
+    if (recordHeaders.size() == 0) return false;
+    return id <= getMaxId() && id >= getMinId();
+  }
+
+  public long getMaxId() {
+    return recordHeaders.get(recordHeaders.size() - 1).id;
+  }
+
+  public List<RecordHeader> getRecordHeaders(long from, int num) {
+    int pos = getPosition(from);
+    int count = 0;
+    List<RecordHeader> recordHeaders = new ArrayList<RecordHeader>(num);
+    while (count < num) {
+      RecordHeader recordHeader = recordHeaders.get(pos);
+      recordHeaders.add(recordHeader);
+      count++;
+    }
+    return recordHeaders;
+  }
+
+  public void commit(RecordHeader recordHeader) {
+    // TODO: commit
+  }
+
+  public RecordHeader writeRecord(Long id, byte[] docBytes, byte[] otherBytes) throws IOException {
+    lock.writeLock().lock();
+    try {
+      RecordHeader recordHeader = rawLogFile.write(id, docBytes, otherBytes);
+      add(recordHeader);
+      return recordHeader;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  public RecordIterator getRecordIterator(Long snapshotId) throws IOException {
+    RandomAccessFile input = rawLogFile.openInput();
+    return new RecordIterator(snapshotId, input);
+  }
+
+  public class RecordIterator {
+    private RandomAccessFile input;
+    int pos;
+
+    public RecordIterator(Long snapshotId, RandomAccessFile input) throws IOException {
+      this.input = input;
+      if (snapshotId == null) {
+        pos = 0;
+      } else {
+        pos = getPosition(snapshotId);
+      }
+    }
+
+    public Record next() throws IOException {
+      RecordHeader recordHeader = recordHeaders.get(pos);
+      if (LOG.isDebugEnabled()) LOG.debug("recordHeader: "+recordHeader);
+      pos++;
+      return getRecord(recordHeader);
+    }
+
+    private Record getRecord(RecordHeader recordHeader) throws IOException {
+      FileStreamRecord fileStreamRecord = rawLogFile.readRecordData(recordHeader, input);
+      Record record = new Record(recordHeader.id, fileStreamRecord);
+      return record;
+    }
+
+    public boolean hasNext() {
+      return pos < recordHeaders.size();
+    }
+
+    public void close() throws IOException {
+      input.close();
+    }
+  }
+
+  public static class Record {
+    private long id;
+    private StreamRecord streamRecord;
+
+    public Record(long id, StreamRecord streamRecord) {
+      this.id = id;
+      this.streamRecord = streamRecord;
+    }
+
+    public Long getId() {
+      return id;
+    }
+
+    public StreamRecord getStreamRecord() {
+      return streamRecord;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/LogFileManager.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/LogFileManager.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/LogFileManager.java	(revision 0)
@@ -0,0 +1,266 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.util.LongSequence;
+
+/**
+ * Manages the log files. There is one active log file at a time that updates
+ * are written to. When the active log file reaches the MAX_FILE_SIZE a new
+ * active log file is created.
+ * 
+ */
+// TODO: delete log files that are no longer needed
+public class LogFileManager {
+  public static Logger log = Logger.getLogger(LogFileManager.class.getName());
+  public static final long MAX_FILE_SIZE = 1024 * 1024 * 64;
+  private List<LogFile> logFiles = new ArrayList<LogFile>();
+  private LongSequence logIdSequence = new LongSequence(1, 1);
+  private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+  private ScheduledExecutorService logFileCheckTimer;
+  private LogDirectory logDirectory;
+
+  // TODO: load existing max snapshot id
+  public LogFileManager(LogDirectory logDirectory) throws IOException {
+    this.logDirectory = logDirectory;
+    String[] list = logDirectory.list();
+    for (String file : list) {
+      long id = getLogFileNumber(file);
+      LogFile logFile = new LogFile(id, file, logDirectory);
+      logFiles.add(logFile);
+    }
+    Collections.sort(logFiles);
+    if (logFiles.size() > 0) {
+      long last = logFiles.get(logFiles.size() - 1).getId();
+      long next = last + 1;
+      logIdSequence.set(next);
+    }
+    logFileCheckTimer = Executors.newSingleThreadScheduledExecutor();
+    logFileCheckTimer.scheduleWithFixedDelay(new LogFileSizeCheck(), 1000, 10 * 1000, TimeUnit.MILLISECONDS);
+  }
+
+  public int getNumRecords() {
+    int num = 0;
+    for (LogFile logFile : logFiles) {
+      num += logFile.getNumRecords();
+    }
+    return num;
+  }
+
+  public void close() throws IOException {
+    for (LogFile logFile : logFiles) {
+      logFile.close();
+    }
+  }
+
+  private int getPosition(long id) {
+    LogFile lf = new LogFile(id);
+    int pos = Collections.binarySearch(logFiles, lf);
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public long getMinSnapshotId() {
+    if (logFiles.size() > 0) {
+      LogFile logFile = logFiles.get(0);
+      return logFile.getMinId();
+    } else {
+      return -1;
+    }
+  }
+
+  public long getMaxSnapshotId() {
+    if (logFiles.size() > 0) {
+      LogFile logFile = logFiles.get(logFiles.size() - 1);
+      return logFile.getMaxId();
+    } else {
+      return -1;
+    }
+  }
+
+  public long getMaxId() {
+    if (logFiles.size() == 0)
+      return -1;
+    return logFiles.get(logFiles.size() - 1).getMaxId();
+  }
+
+  private LogFile getLast() {
+    if (logFiles.size() == 0)
+      return null;
+    return logFiles.get(logFiles.size() - 1);
+  }
+
+  public class LogFileSizeCheck implements Runnable {
+    public void run() {
+      try {
+        LogFile logFile = getLast();
+        if (logFile != null) {
+          if (logFile.size() >= MAX_FILE_SIZE) {
+            LogFile newLogFile = createNewLogFile();
+            logFiles.add(newLogFile);
+          }
+        }
+      } catch (IOException ioException) {
+        log.log(Level.SEVERE, "", ioException);
+      }
+    }
+  }
+
+  public boolean delete(Long id) throws Exception {
+    LogFile logFile = getLogFileContaining(id);
+    if (logFile == null)
+      throw new Exception("unknown id: " + id);
+    return logFile.delete(id);
+  }
+
+  public Long getPreviousId(Long id) {
+    LogFile logFile = getLogFileContaining(id);
+    if (logFile == null)
+      return null;
+    int pos = logFiles.indexOf(logFile);
+    Long previousId = logFile.getPreviousId(id);
+    if (previousId == null && pos > 0) {
+      logFile = logFiles.get(pos - 1);
+      previousId = logFile.getPreviousId(id);
+    }
+    return previousId;
+  }
+
+  public long getMinId() {
+    if (logFiles.size() > 0) {
+      return logFiles.get(0).getMinId();
+    }
+    return -1;
+  }
+
+  public LogFile createNewLogFile() throws IOException {
+    long id = logIdSequence.getAndIncrement();
+    String fileName = createLogFileName(id);
+    LogFile logFile = new LogFile(id, fileName, logDirectory);
+    lock.writeLock().lock();
+    try {
+      logFiles.add(logFile);
+      return logFile;
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  private static String createLogFileName(long value) {
+    DecimalFormat decimalFormat = (DecimalFormat) DecimalFormat.getInstance();
+    decimalFormat.applyPattern("00000000");
+    return "log" + decimalFormat.format(value) + ".bin";
+  }
+
+  private static long getLogFileNumber(String fileName) {
+    String numberString = fileName.substring(3, fileName.lastIndexOf('.'));
+    return Long.parseLong(numberString);
+  }
+
+  private LogFile getLogFileContaining(Long id) {
+    lock.readLock().lock();
+    try {
+      for (LogFile logFile : logFiles) {
+        if (logFile.containsId(id)) {
+          return logFile;
+        }
+      }
+      return null;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public boolean contains(Long id) {
+    lock.readLock().lock();
+    try {
+      for (LogFile logFile : logFiles) {
+        if (logFile.containsId(id)) {
+          return true;
+        }
+      }
+      return false;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public RecordIterator getRecordIterator(Long snapshotId) throws IOException {
+    return new RecordIterator(snapshotId);
+  }
+
+  public class RecordIterator {
+    private Iterator<LogFile> logFileIterator;
+    private LogFile.RecordIterator currentRecordIterator;
+
+    public RecordIterator(Long snapshotId) throws IOException {
+      logFileIterator = logFiles.iterator();
+      while (logFileIterator.hasNext()) {
+        LogFile logFile = logFileIterator.next();
+        if (snapshotId == null || logFile.containsId(snapshotId)) {
+          currentRecordIterator = logFile.getRecordIterator(snapshotId);
+        }
+      }
+    }
+
+    public void close() throws IOException {
+      if (currentRecordIterator != null)
+        currentRecordIterator.close();
+    }
+
+    public Record next() throws IOException {
+      if (currentRecordIterator != null) {
+        return currentRecordIterator.next();
+      } else {
+        return null;
+      }
+    }
+
+    public boolean hasNext() throws IOException {
+      if (currentRecordIterator == null)
+        return false;
+      if (currentRecordIterator.hasNext()) {
+        return true;
+      } else {
+        currentRecordIterator.close();
+        if (logFileIterator.hasNext()) {
+          LogFile logFile = logFileIterator.next();
+          currentRecordIterator = logFile.getRecordIterator(null);
+          return hasNext();
+        }
+      }
+      return false;
+    }
+  }
+
+  public void commit(RecordHeader recordHeader) {
+    // TODO: implement commit
+  }
+
+  public RecordHeader writeRecord(Long id, byte[] doc, byte[] other) throws IOException {
+    LogFile logFile = getCurrentLogFile();
+    return logFile.writeRecord(id, doc, other);
+  }
+
+  public LogFile getCurrentLogFile() throws IOException {
+    LogFile logFile = getLast();
+    if (logFile == null) {
+      logFile = createNewLogFile();
+    }
+    return logFile;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/RawLogFile.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/RawLogFile.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/RawLogFile.java	(revision 0)
@@ -0,0 +1,232 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.LogDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Performs raw file access including serializing records to and from the underlying file
+ *
+ */
+public class RawLogFile {
+  final static Logger LOG = LoggerFactory.getLogger(RawLogFile.class);
+  public static final byte DELETED = 101;
+  public static final byte OK = 9;
+  public static final byte[] HEADER = new byte[] { 'O', 'c', 'N' };
+  private long currentWritePosition = 0;
+  private ReentrantLock writeLock = new ReentrantLock();
+  private String file;
+  private RandomAccessFile output;
+  private LogDirectory logDirectory;
+
+  public RawLogFile(String file, LogDirectory logDirectory) throws IOException {
+    this.file = file;
+    this.logDirectory = logDirectory;
+    output = logDirectory.getOutput(file, false);
+    // TODO: need better way to make sure we're starting at the end of the file
+    currentWritePosition = logDirectory.fileLength(file);
+    System.out.println("staring currentWritePosition: "+currentWritePosition);
+  }
+
+  public RandomAccessFile openInput() throws IOException {
+    return logDirectory.openInput(file);
+  }
+
+  public long size() throws IOException {
+    return output.length();
+  }
+
+  public static interface StreamRecord {
+    public StreamData getDocuments();
+
+    public StreamData getOther();
+  }
+
+  public static interface StreamData {
+    public byte[] getBytes() throws IOException;
+
+    public int getLength();
+  }
+
+  public static class FileStreamRecord implements StreamRecord {
+    private RecordHeader recordHeader;
+    private RandomAccessFile input;
+
+    public FileStreamRecord(RecordHeader recordHeader, RandomAccessFile input) {
+      this.recordHeader = recordHeader;
+      this.input = input;
+    }
+
+    public FileStreamData getDocuments() {
+      if (recordHeader.docsLength == 0) return null;
+      return new FileStreamData(recordHeader.docsLength, recordHeader.docsPosition, input);
+    }
+
+    public FileStreamData getOther() {
+      if (recordHeader.otherLength == 0) return null;
+      return new FileStreamData(recordHeader.otherLength, recordHeader.docsPosition + recordHeader.docsLength, input);
+    }
+  }
+
+  public static class FileStreamData implements StreamData {
+    private int length;
+    private long position;
+    private RandomAccessFile input;
+
+    public FileStreamData(int length, long position, RandomAccessFile input) {
+      this.length = length;
+      this.position = position;
+      this.input = input;
+    }
+
+    public byte[] getBytes() throws IOException {
+      input.seek(position);
+      byte[] bytes = new byte[length];
+      input.readFully(bytes);
+      return bytes;
+    }
+
+    public int getLength() {
+      return length;
+    }
+  }
+
+  public FileStreamRecord readRecordData(RecordHeader recordHeader, RandomAccessFile input) {
+    return new FileStreamRecord(recordHeader, input);
+  }
+
+  public List<RecordHeader> loadRecordHeaders() throws IOException {
+    RandomAccessFile input = openInput();
+    if (input.length() > 0) {
+      LoadRecordHeaders loadRecordHeaders = new LoadRecordHeaders(input);
+      return loadRecordHeaders.getRecordHeaders();
+    } else {
+      return new ArrayList<RecordHeader>();
+    }
+  }
+
+  public boolean delete(RecordHeader recordHeader) throws IOException {
+    long lastPosition = -1;
+    writeLock.lock();
+    try {
+      lastPosition = output.getFilePointer();
+      output.seek(recordHeader.headerPosition);
+      byte[] header = new byte[3];
+      output.readFully(header, 0, header.length);
+      if (!Arrays.equals(HEADER, header)) {
+        throw new IOException("no header");
+      }
+      byte first = (byte) output.readByte();
+      if (first == OK) {
+        int nextHeaderPosition = output.readInt();
+        long id = output.readLong();
+        assert id == recordHeader.id;
+        output.seek(recordHeader.headerPosition + 4);
+        output.writeByte(DELETED);
+        output.getFD().sync();
+        return true;
+      } else {
+        // already deleted
+        return false;
+      }
+    } finally {
+      if (lastPosition != -1) output.seek(lastPosition);
+      writeLock.unlock();
+    }
+  }
+
+  public class LoadRecordHeaders {
+    private long currentReadPosition = 0;
+    private List<RecordHeader> recordHeaders = new ArrayList<RecordHeader>();
+
+    public LoadRecordHeaders(RandomAccessFile input) throws IOException {
+      try {
+        while (true) {
+          input.seek(currentReadPosition);
+          RecordHeader recordHeader = new RecordHeader();
+          byte[] header = new byte[3];
+          recordHeader.headerPosition = (int) input.getFilePointer();
+          input.readFully(header, 0, header.length);
+          if (!Arrays.equals(HEADER, header)) {
+            // TODO: scan to next header if there is one
+            LOG.error("header incorrect: "+new String(header)+" at pos: "+currentReadPosition);
+            while (true) {
+              recordHeader.headerPosition = (int) input.getFilePointer();
+              input.readFully(header, 0, header.length);
+              if (Arrays.equals(HEADER, header)) {
+                LOG.error("found header at position: "+recordHeader.headerPosition);
+                break;
+              }
+            }
+            // continue;
+          }
+          byte status = input.readByte();
+          recordHeader.id = input.readLong();
+          if (status == DELETED) {
+            // TODO: skip it
+            if (LOG.isDebugEnabled()) LOG.debug("record "+recordHeader.id+" status deleted");
+          }
+          recordHeader.docsLength = input.readInt();
+          recordHeader.otherLength = input.readInt();
+          recordHeader.docsPosition = input.getFilePointer();
+          if (status != DELETED)
+            recordHeaders.add(recordHeader);
+          currentReadPosition = input.getFilePointer() + recordHeader.docsLength + recordHeader.otherLength;
+        }
+      } catch (EOFException eofException) {
+        // at end of file
+      } finally {
+        if (input != null)
+          input.close();
+      }
+    }
+
+    public List<RecordHeader> getRecordHeaders() {
+      return recordHeaders;
+    }
+  }
+
+  // TODO: add read to footer to insure the record was fully written
+  // TODO: handle write failure cleanly
+  public RecordHeader write(Long id, byte[] docBytes, byte[] otherBytes) throws IOException {
+    writeLock.lock();
+    try {
+      int documentsLength = 0;
+      if (docBytes != null) documentsLength = docBytes.length;
+      int otherLength = 0;
+      if (otherBytes != null) otherLength = otherBytes.length;
+      RecordHeader recordHeader = new RecordHeader();
+      recordHeader.id = id;
+      recordHeader.headerPosition = (int) currentWritePosition;
+      recordHeader.docsLength = documentsLength;
+      recordHeader.otherLength = otherLength;
+
+      output.seek(currentWritePosition);
+      output.write(HEADER);
+      output.writeByte(OK);
+      output.writeLong(id);
+      output.writeInt(recordHeader.docsLength);
+      output.writeInt(recordHeader.otherLength);
+      recordHeader.docsPosition = output.getFilePointer();
+      if (docBytes != null) output.write(docBytes);
+      if (otherBytes != null) output.write(otherBytes);
+      output.getFD().sync();
+      currentWritePosition = output.getFilePointer();
+      return recordHeader;
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  public void close() throws IOException {
+    output.close();
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/log/RecordHeader.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/RecordHeader.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/RecordHeader.java	(revision 0)
@@ -0,0 +1,23 @@
+package org.apache.lucene.ocean.log;
+
+import org.apache.commons.lang.builder.ReflectionToStringBuilder;
+
+/**
+ * Log record header
+ *
+ */
+public class RecordHeader implements Comparable<RecordHeader> {
+  public long id;
+  public int headerPosition;
+  public int docsLength;
+  public int otherLength;
+  public long docsPosition;
+
+  public int compareTo(RecordHeader other) {
+    return (id < other.id ? -1 : (id == other.id ? 0 : 1));
+  }
+
+  public String toString() {
+    return ReflectionToStringBuilder.toString(this);
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/log/TransactionLog.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/log/TransactionLog.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/log/TransactionLog.java	(revision 0)
@@ -0,0 +1,134 @@
+package org.apache.lucene.ocean.log;
+
+import java.io.IOException;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.ocean.Deletes;
+import org.apache.lucene.ocean.Documents;
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.log.LogFileManager.RecordIterator;
+import org.apache.lucene.ocean.log.RawLogFile.StreamData;
+import org.apache.lucene.ocean.util.ByteBufferPool;
+import org.apache.lucene.ocean.util.LongSequence;
+import org.apache.lucene.store.RAMDirectory;
+
+/**
+ * Serializes transactions known internally as batches to an underlying log
+ * file. Provides an iterator over the batches.
+ * 
+ */
+public class TransactionLog {
+  private ByteBufferPool byteBufferPool = new ByteBufferPool(50 * 1024, 5, 5);
+  LogFileManager logFileManager;
+  private ReentrantLock writeLock = new ReentrantLock();
+  private LogDirectory logDirectory;
+  private LongSequence snapshotIdSequence;
+
+  public TransactionLog(LogDirectory logDirectory) throws IOException {
+    this.logDirectory = logDirectory;
+    logFileManager = new LogFileManager(logDirectory);
+    long maxId = logFileManager.getMaxId();
+    if (maxId == -1) maxId = 0;
+    snapshotIdSequence = new LongSequence(maxId+1, 1);
+  }
+  
+  public void close() throws IOException {
+    logFileManager.close();
+  }
+
+  public void writeMasterBatch(final Long id, final Long previousId, final MasterBatch masterBatch) throws Exception {
+    byte[] docBytes = null;
+    byte[] otherBytes = null;
+    if (masterBatch.hasRAMDirectory()) {
+      docBytes = SerializationUtils.serialize(masterBatch.getRamDirectory());
+    } else if (masterBatch.hasDocuments()) {
+      Documents documents = masterBatch.getDocuments();
+      docBytes = SerializationUtils.serialize(documents);
+    }
+    if (masterBatch.hasDeletes()) {
+      Deletes deletes = masterBatch.getDeletes();
+      otherBytes = SerializationUtils.serialize(deletes);
+    }
+    writeLock.lock();
+    try {
+      logFileManager.writeRecord(id, docBytes, otherBytes);
+    } finally {
+      writeLock.unlock();
+    }
+  }
+  
+  public int getNumRecords() {
+    return logFileManager.getNumRecords();
+  }
+  
+  public long getNextId() {
+    return snapshotIdSequence.getAndIncrement();
+  }
+  
+  public long getMinId() {
+    return logFileManager.getMinId();
+  }
+  
+  public long getMaxId() {
+    return logFileManager.getMaxId();
+  }
+
+  public Long getPreviousId(long id) {
+    return logFileManager.getPreviousId(id);
+  }
+
+  public SlaveBatchIterator getSlaveBatchIterator(Long snapshotId) throws Exception {
+    return new SlaveBatchIterator(snapshotId);
+  }
+
+  public class SlaveBatchIterator {
+    RecordIterator recordIterator;
+
+    public SlaveBatchIterator(Long snapshotId) throws IOException {
+      recordIterator = logFileManager.getRecordIterator(snapshotId);
+    }
+
+    public boolean hasNext() throws IOException {
+      return recordIterator.hasNext();
+    }
+
+    public SlaveBatch next(boolean loadDocuments, boolean loadOther) throws Exception {
+      Record record = recordIterator.next();
+      Documents documents = null;
+      RAMDirectory ramDirectory = null;
+      Deletes deletes = null;
+      if (loadDocuments) {
+        StreamData docData = record.getStreamRecord().getDocuments();
+        if (docData != null) {
+          byte[] docBytes = docData.getBytes();
+          Object object = SerializationUtils.deserialize(docBytes);
+          if (object instanceof RAMDirectory) {
+            ramDirectory = (RAMDirectory) object;
+          } else {
+            documents = (Documents) object;
+          }
+        }
+      }
+      if (loadOther) {
+        StreamData otherData = record.getStreamRecord().getOther();
+        if (otherData != null) {
+          byte[] otherBytes = otherData.getBytes();
+          deletes = (Deletes) SerializationUtils.deserialize(otherBytes);
+        }
+      }
+      if (ramDirectory != null) {
+        return new SlaveBatch(record.getId(), ramDirectory, deletes);
+      } else {
+        return new SlaveBatch(record.getId(), documents, deletes);
+      }
+    }
+
+    public void close() throws IOException {
+      recordIterator.close();
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/LogDirectory.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/LogDirectory.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/LogDirectory.java	(revision 0)
@@ -0,0 +1,20 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+public abstract class LogDirectory {
+  public abstract String[] list() throws IOException;
+
+  public abstract boolean fileExists(String name) throws IOException;
+
+  public abstract long fileModified(String name) throws IOException;
+
+  public abstract void deleteFile(String name) throws IOException;
+
+  public abstract long fileLength(String name) throws IOException;
+  
+  public abstract RandomAccessFile openInput(String name) throws IOException;
+  
+  public abstract RandomAccessFile getOutput(String name, boolean overwrite) throws IOException;
+}
Index: ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/MultiThreadSearcherPolicy.java	(revision 0)
@@ -0,0 +1,25 @@
+package org.apache.lucene.ocean;
+
+public class MultiThreadSearcherPolicy extends SearcherPolicy {
+  private final int minThreads;
+  private final int maxThreads;
+  private final int queueSize;
+  
+  public MultiThreadSearcherPolicy(int minThreads, int maxThreads, int queueSize) {
+    this.minThreads = minThreads;
+    this.maxThreads = maxThreads;
+    this.queueSize = queueSize;
+  } 
+  
+  public int getQueueSize() {
+    return queueSize;
+  }
+  
+  public int getMinThreads() {
+    return minThreads;
+  }
+
+  public int getMaxThreads() {
+    return maxThreads;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/OceanConsole.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanConsole.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanConsole.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean;
+
+/**
+ * Display indexes
+ * Display index snapshots
+ * Run merge
+ *
+ */
+public class OceanConsole {
+
+}
Index: ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanInstantiatedIndexReader.java	(revision 0)
@@ -0,0 +1,36 @@
+package org.apache.lucene.ocean;
+
+import java.util.Set;
+
+import org.apache.lucene.store.instantiated.InstantiatedIndex;
+import org.apache.lucene.store.instantiated.InstantiatedIndexReader;
+
+/**
+ * Simulates a multiple version IndexReader with InstantiatedIndexReader by
+ * having documents over the set maxDoc be deleted.  
+ *
+ */
+public class OceanInstantiatedIndexReader extends InstantiatedIndexReader {
+  private int maxDoc;
+  private Set<Integer> deletedDocs;
+  
+  public OceanInstantiatedIndexReader(int maxDoc, InstantiatedIndex index, Set<Integer> deletedDocs) {
+    super(index);
+    this.maxDoc = maxDoc;
+    this.deletedDocs = deletedDocs;
+  }
+  
+  public int numDocs() {
+    return maxDoc() - deletedDocs.size();
+  }
+  
+  public boolean isDeleted(int n) {
+    if (n > maxDoc) return true;
+    if (deletedDocs != null && deletedDocs.contains(n)) return true;
+    return false;
+  }
+  
+  public boolean hasDeletions() {
+    return true;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/OceanSearcher.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/OceanSearcher.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/OceanSearcher.java	(revision 0)
@@ -0,0 +1,52 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.ocean.Snapshot.SnapshotSearcher;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searchable;
+
+public class OceanSearcher extends MultiSearcher {
+  Snapshot snapshot;
+  private Searchable[] searchables;
+  private int[] starts;
+  
+  public OceanSearcher(Snapshot snapshot) throws IOException {
+    super(snapshot.getSearchers());
+    searchables = snapshot.getSearchers();
+    starts = snapshot.getStarts();
+    this.snapshot = snapshot;
+  }
+  
+  public void close() {
+    //snapshot.decRef();
+  }
+  
+  public Document doc(int n) throws CorruptIndexException, IOException {
+    int i = subSearcher(n);       // find searcher index
+    Document document = searchables[i].doc(n - starts[i]);   // dispatch to searcher
+    SnapshotSearcher snapshotSearcher = (SnapshotSearcher)searchables[i];
+    IndexID indexId = snapshotSearcher.getIndexSnapshot().getIndex().getId();
+    document.add(new Field(Constants.INDEXID, indexId.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+    return document;
+  }
+
+  // inherit javadoc
+  public Document doc(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
+    int i = subSearcher(n);       // find searcher index
+    Document document = searchables[i].doc(n - starts[i], fieldSelector);    // dispatch to searcher
+    SnapshotSearcher snapshotSearcher = (SnapshotSearcher)searchables[i];
+    IndexID indexId = snapshotSearcher.getIndexSnapshot().getIndex().getId();
+    document.add(new Field(Constants.INDEXID, indexId.toString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
+    return document;
+  }
+  
+  public Snapshot getSnapshot() {
+    return snapshot;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/RamDirectorySerializer.java	(revision 0)
@@ -0,0 +1,54 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.RAMDirectory;
+
+public class RamDirectorySerializer {
+	static final int BUFFER_SIZE = 16384;
+	
+  public RamDirectorySerializer() {
+  }
+  
+  public static RAMDirectory deserialize(IndexInput input) throws IOException {
+  	int numFiles = input.readVInt();
+  	RAMDirectory ramDirectory = new RAMDirectory();
+  	byte[] buffer = new byte[BUFFER_SIZE];
+  	for (int x=0; x < numFiles; x++) {
+  		String file = input.readString();
+  		int length = input.readVInt();
+  		IndexOutput indexOutput = ramDirectory.createOutput(file);
+  		int readCount = 0;
+      while (readCount < length) {
+        int toRead = readCount + BUFFER_SIZE > length ? (int)(length - readCount) : BUFFER_SIZE;
+        input.readBytes(buffer, 0, toRead);//readFully(buffer, 0, toRead);
+        indexOutput.writeBytes(buffer, toRead);
+        readCount += toRead;
+      }
+      indexOutput.close();
+  	}
+  	return ramDirectory;
+  }
+  
+  public static void serialize(RAMDirectory ramDirectory, IndexOutput output) throws IOException {
+  	String[] files = ramDirectory.list();
+  	output.writeVInt(files.length);
+  	byte[] buffer = new byte[BUFFER_SIZE];
+  	for (String file : files) {
+  		int length = (int)ramDirectory.fileLength(file);
+  		output.writeString(file);
+  		output.writeVInt(length);
+  		IndexInput indexInput = ramDirectory.openInput(file);
+  		int readCount = 0;
+      while (readCount < length) {
+        int toRead = readCount + BUFFER_SIZE > length ? (int)(length - readCount) : BUFFER_SIZE;
+        indexInput.readBytes(buffer, 0, toRead);
+        output.writeBytes(buffer, toRead);//write(buffer, 0, toRead);
+        readCount += toRead;
+      }
+      indexInput.close();
+  	}
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/RamIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/RamIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/RamIndex.java	(revision 0)
@@ -0,0 +1,110 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+
+public class RamIndex extends DirectoryIndex {
+	private RAMDirectory ramDirectory;
+	private Long maxSnapshotId;
+	private Long maxDocumentId;
+  
+	public RamIndex(IndexID id, List<? extends IndexSnapshot> indexSnapshots, TransactionSystem system) throws Exception {
+		super(id, system);
+		ramDirectory = new RAMDirectory();
+		IndexReader[] indexReaders = getIndexReaders(indexSnapshots);
+		RAMDirectory ramDirectory = new RAMDirectory();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, system.getDefaultAnalyzer(), true);
+		indexWriter.setMergeScheduler(new SerialMergeScheduler());
+		indexWriter.setUseCompoundFile(true);
+		indexWriter.addIndexes(indexReaders);
+		indexWriter.close();
+		maxSnapshotId = getMaxSnapshotId(indexSnapshots);
+		maxDocumentId = getMaxDocumentId(indexSnapshots);
+	}
+	
+	// TODO: add timestamp so ramindex can be removed from indices
+	public RamIndex(IndexID id, Long snapshotId, List<Deletes> deletesList, RAMDirectory ramDirectory, TransactionSystem system) throws Exception {
+		super(id, system);
+		this.ramDirectory = ramDirectory;
+		initialIndexReader = IndexReader.open(ramDirectory, indexDeletionPolicy);
+		createNewSnapshot(snapshotId, initialIndexReader);
+		if (deletesList != null) {
+			for (Deletes deletes : deletesList) {
+				applyDeletes(true, deletes, null, initialIndexReader);
+			}
+		}
+	}
+  
+	// converts memoryIndexSnapshot into ramindexsnapshot
+	public RamIndex(IndexID id, MemoryIndexSnapshot memoryIndexSnapshot) throws Exception, IOException {
+		super(id, memoryIndexSnapshot.getIndex().getSystem());
+		this.maxSnapshotId = memoryIndexSnapshot.getMaxSnapshotId();
+		this.maxDocumentId = memoryIndexSnapshot.getMaxDocumentId();
+		ramDirectory = new RAMDirectory();
+		Analyzer defaultAnalyzer = memoryIndexSnapshot.getIndex().getSystem().getDefaultAnalyzer();
+		IndexWriter indexWriter = new IndexWriter(ramDirectory, false, defaultAnalyzer, true, new KeepOnlyLastCommitDeletionPolicy());
+		indexWriter.addIndexes(new IndexReader[] {memoryIndexSnapshot.getIndexReader()});
+		indexWriter.close();
+		initialIndexReader = IndexReader.open(ramDirectory, indexDeletionPolicy);
+		List<IndexSnapshot> indexSnapshots = new ArrayList<IndexSnapshot>(1);
+		indexSnapshots.add(memoryIndexSnapshot);
+		createNewSnapshot(memoryIndexSnapshot.getSnapshotId(), initialIndexReader);
+	}
+  
+	public RamIndexSnapshot commitIndex(Transaction transaction) throws IndexException, InterruptedException, IOException {
+		try {
+			transaction.ready(this);
+			if (transaction.go()) {
+				Long snapshotId = transaction.getId();
+				RamIndexSnapshot indexSnapshot = createNewSnapshot(snapshotId, initialIndexReader);
+				return indexSnapshot;
+			} else {
+				// if commit fails this snapshot and ramindex won't make it
+				return null;
+			}
+		} catch (Throwable throwable) {
+			LOG.error("", throwable);
+			transaction.failed(this, throwable);
+			return null;
+		}
+	}
+	
+	protected RamIndexSnapshot createNewSnapshot(Long snapshotId, IndexReader newIndexReader) throws IOException {
+		RamIndexSnapshot ramIndexSnapshot = new RamIndexSnapshot(snapshotId, newIndexReader);
+		registerSnapshot(ramIndexSnapshot);
+		return ramIndexSnapshot;
+	}
+
+	public class RamIndexSnapshot extends DirectoryIndexSnapshot {
+		public RamIndexSnapshot(Long snapshotId, IndexReader indexReader) throws IOException {
+			super(snapshotId, indexReader);
+		}
+		
+		public String toString() {
+		  return "RamIndexSnapshot index: "+RamIndex.this.getId()+" snapshotid: "+snapshotId+" maxDoc: "+indexReader.maxDoc();
+		}
+		
+		//public Long getMaxSnapshotId() throws IOException {
+		//	return maxSnapshotId;
+		//}
+
+		//public Long getMaxDocumentId() throws IOException {
+		//	return maxDocumentId;
+		//}
+	}
+
+	public Directory getDirectory() {
+		return ramDirectory;
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/SearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SearcherPolicy.java	(revision 0)
@@ -0,0 +1,5 @@
+package org.apache.lucene.ocean;
+
+public class SearcherPolicy {
+  
+}
Index: ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SingleThreadSearcherPolicy.java	(revision 0)
@@ -0,0 +1,5 @@
+package org.apache.lucene.ocean;
+
+public class SingleThreadSearcherPolicy extends SearcherPolicy {
+  
+}
Index: ocean/src/org/apache/lucene/ocean/Snapshot.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Snapshot.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Snapshot.java	(revision 0)
@@ -0,0 +1,336 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.text.DecimalFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.ocean.DiskIndex.DiskIndexSnapshot;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.RamIndex.RamIndexSnapshot;
+import org.apache.lucene.ocean.SnapshotInfo.IndexInfo;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Searcher;
+
+public class Snapshot implements Comparable<Snapshot> {
+  private BigDecimal id;
+  private SortedList<IndexID,IndexSnapshot> indexSnapshotMap;
+  private MemoryIndexSnapshot writeableSnapshot;
+  private IndexReader indexReader;
+  private int maxDoc;
+  private final int[] starts;
+  private TransactionSystem system;
+  private final long timestamp;
+  //private int refs = 0;
+  
+  public Snapshot(BigDecimal id, MemoryIndexSnapshot writeableSnapshot, Collection<IndexSnapshot> indexSnapshots, TransactionSystem system,
+      long timestamp) throws IOException {
+    this.id = id;
+    this.writeableSnapshot = writeableSnapshot;
+    this.timestamp = timestamp;
+    List<IndexSnapshot> allIndexSnapshots = new ArrayList<IndexSnapshot>(indexSnapshots);
+    allIndexSnapshots.add(writeableSnapshot);
+    assert !hasDuplicates(allIndexSnapshots);
+    assert snapshotIdsMatch(allIndexSnapshots);
+    indexSnapshotMap = new SortedList<IndexID,IndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : allIndexSnapshots) {
+      indexSnapshotMap.put(indexSnapshot.getIndex().getId(), indexSnapshot);
+    }
+    IndexReader[] readerArray = getReaderArray(allIndexSnapshots);
+    indexReader = new MultiReader(readerArray);
+    starts = makeStarts();
+  }
+  
+  public Snapshot(Long snapshotId, int minorVersion, MemoryIndexSnapshot writeableSnapshot, List<IndexSnapshot> indexSnapshots,
+      TransactionSystem system, long timestamp) throws IOException {
+    this(toId(snapshotId, minorVersion), writeableSnapshot, indexSnapshots, system, timestamp);
+  }
+  
+  //public boolean hasRefs() {
+  //  return refs > 0;
+  //}
+  
+  //public void decRef() {
+  //  refs--;
+  //}
+  
+  //public void incRef() {
+  //  refs++;
+  //}
+  
+  private static boolean hasDuplicates(List<IndexSnapshot> allIndexSnapshots) {
+    Set<IndexID> set = new HashSet<IndexID>();
+    for (IndexSnapshot indexSnapshot : allIndexSnapshots) {
+      IndexID indexId = indexSnapshot.getIndex().getId();
+      if (set.contains(indexId)) {
+        return true;
+      } else {
+        set.add(indexId);
+      }
+    }
+    return false;
+  }
+  
+  private IndexReader[] getReaderArray(Collection<IndexSnapshot> indexSnapshots) {
+    IndexReader[] readerArray = new IndexReader[indexSnapshots.size()];
+    int x = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      indexSnapshotMap.put(indexSnapshot.getIndex().getId(), indexSnapshot);
+      readerArray[x] = indexSnapshot.getIndexReader();
+      x++;
+    }
+    return readerArray;
+  }
+  
+  public int numDocs() {
+    return indexReader.numDocs();
+  }
+  
+  private int[] makeStarts() {
+    IndexSnapshot[] indexSnapshotsArray = indexSnapshotMap.values().toArray(new IndexSnapshot[0]);
+    // build starts array
+    int[] starts = new int[indexSnapshotsArray.length + 1];
+    for (int i = 0; i < indexSnapshotsArray.length; i++) {
+      starts[i] = maxDoc;
+      maxDoc += indexSnapshotsArray[i].maxDoc(); // compute maxDocs
+    }
+    starts[indexSnapshotsArray.length] = maxDoc;
+    return starts;
+  }
+  
+  public long getTimestamp() {
+    return timestamp;
+  }
+
+  public int compareTo(Snapshot other) {
+    return id.compareTo(other.id);
+  }
+
+  public Searcher getSearcher() throws IOException {
+    MultiSearcher multiSearcher = new MultiSearcher(getSearchers());
+    return multiSearcher;
+  }
+  
+  public static class SnapshotSearcher extends IndexSearcher {
+    private IndexSnapshot indexSnapshot;
+    
+    public SnapshotSearcher(IndexReader indexReader, IndexSnapshot indexSnapshot) {
+      super(indexReader);
+      this.indexSnapshot = indexSnapshot;
+    }
+    
+    public IndexSnapshot getIndexSnapshot() {
+      return indexSnapshot;
+    }
+  }
+  
+  public Searcher[] getSearchers() {
+    IndexSnapshot[] indexSnapshots = (IndexSnapshot[]) indexSnapshotMap.values().toArray(new IndexSnapshot[0]);
+    Searcher[] searchers = new Searcher[indexSnapshotMap.size()];
+    for (int x = 0; x < indexSnapshots.length; x++) {
+      searchers[x] = new SnapshotSearcher(indexSnapshots[x].getIndexReader(), indexSnapshots[x]);
+    }
+    return searchers;
+  }
+
+  public IndexReader getIndexReader() {
+    return indexReader;
+  }
+
+  public int maxDoc() {
+    return maxDoc;
+  }
+
+  public int[] getStarts() {
+    return starts;
+  }
+
+  public List<RamIndexSnapshot> getRamIndexSnapshots() {
+    List<RamIndexSnapshot> ramIndexSnapshots = new ArrayList<RamIndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot instanceof RamIndexSnapshot) {
+        ramIndexSnapshots.add((RamIndexSnapshot) indexSnapshot);
+      }
+    }
+    return ramIndexSnapshots;
+  }
+
+  public IndexReader[] getIndexReaders() {
+    IndexReader[] indexReaders = new IndexReader[indexSnapshotMap.size()];
+    int i = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      indexReaders[i] = indexSnapshot.getIndexReader();
+      i++;
+    }
+    return indexReaders;
+  }
+
+  public int getMaxDoc() {
+    int maxDoc = 0;
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      maxDoc += indexSnapshot.getIndexReader().maxDoc();
+    }
+    return maxDoc;
+  }
+
+  public int getMinorVersion() {
+    return getMinorVersion(id);
+  }
+
+  public static void main(String[] args) {
+    BigDecimal id = toId(210l, 1);
+    String string = formatId(id);
+    System.out.println(string);
+  }
+
+  public static BigDecimal toId(Long snapshotId, int minorVersion) {
+    StringBuilder builder = new StringBuilder();
+    builder.append(snapshotId);
+    builder.append(".");
+    if (10 > minorVersion)
+      builder.append("0");
+    builder.append(minorVersion);
+    BigDecimal value = new BigDecimal(builder.toString());
+    return value;
+  }
+
+  public static int getMinorVersion(BigDecimal value) {
+    value = value.subtract(new BigDecimal(value.longValue()));
+    BigDecimal decimal = value.scaleByPowerOfTen(2);
+    return decimal.intValue();
+  }
+
+  public static boolean snapshotIdsMatch(Collection<IndexSnapshot> indexSnapshots) {
+    Long current = null;
+    for (IndexSnapshot indexSnapshot : indexSnapshots) {
+      if (current == null) {
+        current = indexSnapshot.getSnapshotId();
+      } else if (!current.equals(indexSnapshot.getSnapshotId())) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  public SnapshotInfo getSnapshotInfo() throws IOException {
+    int deletedDocs = indexReader.maxDoc() - indexReader.numDocs();
+    SnapshotInfo snapshotInfo = new SnapshotInfo(id, indexReader.maxDoc(), indexReader.numDocs(), deletedDocs);
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      Index index = indexSnapshot.getIndex();
+      String type = null;
+      Long segmentGeneration = null;
+      if (index instanceof DiskIndex) {
+        segmentGeneration = indexSnapshot.getIndexReader().getIndexCommit().getGeneration();
+        type = "disk";
+      } else if (index instanceof WriteableMemoryIndex)
+        type = "memory";
+      else if (index instanceof RamIndex)
+        type = "ram";
+      IndexInfo indexInfo = new IndexInfo(indexSnapshot.getSnapshotId(), index.getId().id, segmentGeneration, type, indexSnapshot.maxDoc(), indexSnapshot.getIndexReader().numDocs(), indexSnapshot.deletedDoc(), indexSnapshot.getMinDocumentId(), indexSnapshot.getMaxDocumentId(), indexSnapshot.getMinSnapshotId(), indexSnapshot.getMaxSnapshotId());
+      snapshotInfo.add(indexInfo);
+    }
+    return snapshotInfo;
+  }
+
+  public static String formatId(BigDecimal id) {
+    DecimalFormat format = new DecimalFormat("##0.00");
+    return format.format(id);
+  }
+
+  public static String getFileName(BigDecimal id) {
+    String string = formatId(id);
+    String replaced = string.replace('.', '_');
+    return "snapshot_" + replaced + ".xml";
+  }
+
+  /**
+   * Create minor snapshot (meaning a merged snapshot with no real index
+   * changes) reusing the existing writeableSnapshot.
+   * 
+   * @param removeIndexIds
+   * @param newIndexSnapshot
+   * @return
+   * @throws IOException
+   */
+  public Snapshot createMinor(List<IndexID> removeIndexIds, IndexSnapshot newIndexSnapshot) throws IOException {
+    return createMinor(removeIndexIds, writeableSnapshot, newIndexSnapshot);
+  }
+
+  public Snapshot createMinor(List<IndexID> removeIndexIds, MemoryIndexSnapshot writeableSnapshot, IndexSnapshot newIndexSnapshot)
+      throws IOException {
+    HashMap<IndexID,IndexSnapshot> mapCopy = new HashMap<IndexID,IndexSnapshot>(indexSnapshotMap);
+    for (IndexID indexid : removeIndexIds) {
+      mapCopy.remove(indexid);
+    }
+    IndexID newIndexId = newIndexSnapshot.getIndex().getId();
+    assert !mapCopy.containsKey(newIndexId);
+    mapCopy.put(newIndexId, newIndexSnapshot);
+    mapCopy.put(writeableSnapshot.getIndex().getId(), writeableSnapshot);
+    int minorVersion = getMinorVersion();
+    Long snapshotId = getSnapshotId();
+    int newMinorVersion = minorVersion + 1;
+    System.out.println("snapshotId: " + snapshotId + " newMinorVersion: " + newMinorVersion);
+    // BigDecimal newId = toId(snapshotId, minorVersion);
+    Snapshot newSnapshot = new Snapshot(snapshotId, newMinorVersion, writeableSnapshot, new ArrayList(mapCopy.values()), system, System
+        .currentTimeMillis());
+    return newSnapshot;
+  }
+
+  public List<DiskIndex> getDiskIndices() {
+    List<DiskIndex> diskIndices = new ArrayList<DiskIndex>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      indexSnapshot.getIndex();
+    }
+    return diskIndices;
+  }
+
+  public List<Index> getDeleteOnlyIndices() {
+    HashMap<IndexID,IndexSnapshot> mapCopy = new HashMap<IndexID,IndexSnapshot>(indexSnapshotMap);
+    mapCopy.remove(writeableSnapshot.getIndex().getId());
+    List<Index> indices = new ArrayList<Index>();
+    for (IndexSnapshot indexSnapshot : mapCopy.values()) {
+      indices.add(indexSnapshot.getIndex());
+    }
+    return indices;
+  }
+
+  public MemoryIndexSnapshot getWriteableSnapshot() {
+    return writeableSnapshot;
+  }
+
+  public boolean containsIndex(long indexid) {
+    return indexSnapshotMap.containsKey(indexid);
+  }
+
+  public List<DiskIndexSnapshot> getDiskIndexSnapshots() {
+    List<DiskIndexSnapshot> diskIndexSnapshots = new ArrayList<DiskIndexSnapshot>();
+    for (IndexSnapshot indexSnapshot : indexSnapshotMap.values()) {
+      if (indexSnapshot instanceof DiskIndexSnapshot) {
+        diskIndexSnapshots.add((DiskIndexSnapshot) indexSnapshot);
+      }
+    }
+    return diskIndexSnapshots;
+  }
+
+  public List<IndexSnapshot> getIndexSnapshots() {
+    return new ArrayList(indexSnapshotMap.values());
+  }
+
+  public Long getSnapshotId() {
+    return id.longValue();
+  }
+
+  public BigDecimal getId() {
+    return id;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/SnapshotInfo.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/SnapshotInfo.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/SnapshotInfo.java	(revision 0)
@@ -0,0 +1,190 @@
+package org.apache.lucene.ocean;
+
+import java.math.BigDecimal;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.lucene.ocean.util.CElement;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.XMLUtil;
+import org.jdom.Element;
+
+public class SnapshotInfo implements CElement, Comparable<SnapshotInfo> {
+  private BigDecimal id;
+  private SortedList<IndexID,IndexInfo> indexInfos;
+  private int numDocs;
+  private int maxDoc;
+  private int deletedDocs;
+
+  public SnapshotInfo(BigDecimal id, int maxDoc, int numDocs, int deletedDocs) {
+    assert id != null;
+    this.id = id;
+    this.maxDoc = maxDoc;
+    this.numDocs = numDocs;
+    this.deletedDocs = deletedDocs;
+    indexInfos = new SortedList<IndexID,IndexInfo>();
+  }
+  
+  public IndexInfo getIndexInfo(IndexID id) {
+    return indexInfos.get(id);
+  }
+  
+  public int compareTo(SnapshotInfo other) {
+    return id.compareTo(other.id);
+  }
+  
+  public void add(IndexInfo indexInfo) {
+    indexInfos.put(indexInfo.getIndexID(), indexInfo);
+  }
+
+  public SnapshotInfo(Element element) {
+    indexInfos = new SortedList<IndexID,IndexInfo>();
+    id = new BigDecimal(element.getAttributeValue("id"));
+    for (Element indexElement : XMLUtil.getChildren("index", element)) {
+      IndexInfo indexInfo = new IndexInfo(indexElement);
+      indexInfos.put(indexInfo.getIndexID(), indexInfo);
+    }
+  }
+
+  public Long getSnapshotId() {
+    return id.longValue();
+  }
+
+  public BigDecimal getId() {
+    return id;
+  }
+
+  public Collection<IndexInfo> getIndexInfos() {
+    return indexInfos.values();
+  }
+
+  public SnapshotInfo(BigDecimal id, Map<IndexID,IndexInfo> indexInfos) {
+    this.id = id;
+    this.indexInfos = new SortedList<IndexID,IndexInfo>(indexInfos);
+  }
+
+  public static class IndexInfo implements CElement {
+    private Long snapshotId;
+    private Long id;
+    private Long segmentGeneration;
+    private String type;
+    private Integer maxDoc;
+    private Integer deletedDoc;
+    private Integer numDocs;
+    private Long minDocumentId;
+    private Long maxDocumentId;
+    private Long minSnapshotId;
+    private Long maxSnapshotId;
+
+    public IndexInfo(Long snapshotId, Long id, Long segmentGeneration, String type, int maxDoc, int numDocs, int deletedDoc, Long minDocumentId, Long maxDocumentId, Long minSnapshotId, Long maxSnapshotId) {
+      this.snapshotId = snapshotId;
+      this.id = id;
+      this.segmentGeneration = segmentGeneration;
+      this.type = type;
+      this.maxDoc = maxDoc;
+      this.numDocs = numDocs;
+      this.deletedDoc = deletedDoc;
+      this.minDocumentId = minDocumentId;
+      this.maxDocumentId = maxDocumentId;
+      this.minSnapshotId = minSnapshotId;
+      this.maxSnapshotId = maxSnapshotId;
+    }
+    
+    public IndexID getIndexID() {
+      return new IndexID(id, type);
+    }
+    
+    public IndexInfo(Element element) {
+      snapshotId = XMLUtil.getAttributeLong("snapshotid", element);
+      id = XMLUtil.getAttributeLong("id", element);
+      segmentGeneration = XMLUtil.getAttributeLong("segmentGeneration", element);
+      type = XMLUtil.getAttributeString("type", element);
+      maxDoc = XMLUtil.getAttributeInteger("maxDoc", element);
+      numDocs = XMLUtil.getAttributeInteger("numDocs", element);
+      deletedDoc = XMLUtil.getAttributeInteger("deletedDoc", element);
+      minDocumentId = XMLUtil.getAttributeLong("minDocumentId", element);
+      minSnapshotId = XMLUtil.getAttributeLong("minSnapshotId", element);
+      maxDocumentId = XMLUtil.getAttributeLong("maxDocumentId", element);
+      maxSnapshotId = XMLUtil.getAttributeLong("maxSnapshotId", element);
+    }
+    
+    public Integer getNumDocs() {
+      return numDocs;
+    }
+    
+    public Long getSnapshotId() {
+      return snapshotId;
+    }
+    
+    public Long getMinSnapshotId() {
+      return minSnapshotId;
+    }
+    
+    public Long getMinDocumentId() {
+      return minDocumentId;
+    }
+    
+    public Long getMaxSnapshotId() {
+      return maxSnapshotId;
+    }
+    
+    public Long getMaxDocumentId() {
+      return maxDocumentId;
+    }
+    
+    public Integer getDeletedDoc() {
+      return deletedDoc;
+    }
+    
+    public Long getSegmentGeneration() {
+      return segmentGeneration;
+    }
+    
+    public Integer getMaxDoc() {
+      return maxDoc;
+    }
+    
+    public Long getId() {
+      return id;
+    }
+
+    public String getType() {
+      return type;
+    }
+
+    public Element toElement() {
+      Element element = new Element("index");
+      XMLUtil.setAttribute("snapshotid", snapshotId, element);
+      XMLUtil.setAttribute("id", id, element);
+      XMLUtil.setAttribute("segmentGeneration", segmentGeneration, element);
+      XMLUtil.setAttribute("type", type, element);
+      XMLUtil.setAttribute("maxDoc", maxDoc, element);
+      XMLUtil.setAttribute("numDocs", numDocs, element);
+      XMLUtil.setAttribute("deletedDoc", deletedDoc, element);
+      XMLUtil.setAttribute("minDocumentId", minDocumentId, element);
+      XMLUtil.setAttribute("maxDocumentId", maxDocumentId, element);
+      XMLUtil.setAttribute("minSnapshotId", minSnapshotId, element);
+      XMLUtil.setAttribute("maxSnapshotId", maxSnapshotId, element);
+      return element;
+    }
+  }
+  /**
+  public void writeTo(RandomAccessIO output) throws Exception {
+    Element element = toElement();
+    String xml = XMLUtil.outputElement(element);
+    byte[] bytes = xml.getBytes("UTF-8");
+    output.write(bytes);
+  }
+  **/
+  public Element toElement() {
+    Element element = new Element("snapshot");
+    XMLUtil.setAttribute("id", id, element);
+    XMLUtil.setAttribute("numDocs", numDocs, element);
+    XMLUtil.setAttribute("maxDoc", maxDoc, element);
+    XMLUtil.setAttribute("deletedDocs", deletedDocs, element);
+    for (IndexInfo indexInfo : indexInfos.values()) {
+      element.addContent(indexInfo.toElement());
+    }
+    return element;
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/Snapshots.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Snapshots.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Snapshots.java	(revision 0)
@@ -0,0 +1,211 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.ocean.util.XMLUtil;
+import org.jdom.Element;
+
+public class Snapshots {
+  private List<Snapshot> list = new ArrayList<Snapshot>();
+  private TransactionSystem system;
+  private ReentrantLock writeLock = new ReentrantLock();
+
+  public Snapshots(TransactionSystem system) {
+    this.system = system;
+  }
+
+  public void remove(int max, long durationMillis) {
+    writeLock.lock();
+    try {
+      if (list.size() > max) {
+        long now = System.currentTimeMillis();
+        int numToCheck = list.size() - max;
+        Iterator<Snapshot> iterator = list.iterator();
+        for (int x = 0; x < numToCheck; x++) {
+          Snapshot snapshot = iterator.next();
+          if ((snapshot.getTimestamp() + durationMillis) > now) {
+            iterator.remove();
+          }
+        }
+      }
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  /**
+   * Loads the ids from the file names rather than loading each xml file.
+   * 
+   * @param directory
+   * @return
+   * @throws Exception
+   */
+  public static List<BigDecimal> loadSnapshotInfoIds(LogDirectory directory) throws Exception {
+    List<BigDecimal> list = new ArrayList<BigDecimal>();
+    for (String file : directory.list()) {
+      if (directory.fileLength(file) > 0) {
+        String str = "snapshot_";
+        if (file.startsWith(str)) {
+          String main = file.substring(str.length(), file.lastIndexOf('.'));
+          String[] split = StringUtils.split(main, "_");
+          if (split.length > 1) {
+            String replace = main.replace('_', '.');
+            //System.out.println("replace: "+replace);
+            list.add(new BigDecimal(replace));
+          } else {
+            Long snapshotId = new Long(split[0]);
+            list.add(new BigDecimal(snapshotId));
+          }
+        }
+      }
+    }
+    Collections.sort(list);
+    return list;
+  }
+
+  public static List<SnapshotInfo> loadSnapshotInfos(LogDirectory directory) throws Exception {
+    List<SnapshotInfo> snapshotInfos = new ArrayList<SnapshotInfo>();
+    for (String file : directory.list()) {
+      if (directory.fileLength(file) > 0) {
+        String str = "snapshot_";
+        if (file.startsWith(str)) {
+          String main = file.substring(str.length(), file.lastIndexOf('.'));
+          String[] split = StringUtils.split(main, "_");
+          Long snapshotId = new Long(split[0]);
+          Integer version = new Integer(0);
+          if (split.length > 1)
+            version = new Integer(split[1]);
+          String xml = Util.getString(file, directory);
+          Element element = XMLUtil.parseElement(xml);
+          snapshotInfos.add(new SnapshotInfo(element));
+          // sorted.add(new BigDecimal(snapshotId + "." + version));
+        }
+      }
+    }
+    Collections.sort(snapshotInfos);
+    return snapshotInfos;
+  }
+
+  // TODO: load max id using loadSnapshotInfoIds, then load max snapshotinfo
+  public static SnapshotInfo loadMaxSnapshotInfo(LogDirectory directory) throws Exception {
+    List<SnapshotInfo> list = loadSnapshotInfos(directory);
+    if (list.size() == 0)
+      return null;
+    return Util.max(list);
+    // TreeSet<BigDecimal> sortedSet = new TreeSet<BigDecimal>();
+    /**
+     * List<BigDecimal> sorted = new ArrayList<BigDecimal>(); for (String file :
+     * directory.list()) { if (directory.fileLength(file) > 0) { String str =
+     * "snapshot_"; if (file.startsWith(str)) { String main =
+     * file.substring(str.length(), file.lastIndexOf('.')); String[] split =
+     * StringUtils.split(main, "_"); Long snapshotId = new Long(split[0]);
+     * Integer version = new Integer(0); if (split.length > 1) version = new
+     * Integer(split[1]); sorted.add(new BigDecimal(snapshotId + "." +
+     * version)); } } } BigDecimal maxId = null; if (sorted.size() > 0) maxId =
+     * Collections.max(sorted); if (maxId == null) return null; String fileName =
+     * Snapshot.getFileName(maxId); String xml = Util.getString(fileName,
+     * directory); Element element = XMLUtil.parseElement(xml); return new
+     * SnapshotInfo(element);
+     */
+  }
+
+  public Snapshot get(long snapshotId) {
+    List<Snapshot> snapshots = getForSnapshot(snapshotId);
+    return Util.max(snapshots);
+  }
+
+  public List<Snapshot> getForSnapshot(long snapshotId) {
+    List<Snapshot> inrange = new ArrayList<Snapshot>();
+    for (Snapshot snapshot : list) {
+      long l = snapshot.getId().toBigInteger().longValue();
+      if (l == snapshotId) {
+        inrange.add(snapshot);
+      }
+    }
+    return inrange;
+  }
+
+  public boolean contains(BigDecimal id) {
+    for (Snapshot s : list) {
+      if (s.getId().compareTo(id) == 0)
+        return true;
+    }
+    return false;
+  }
+
+  public boolean contains(Long snapshotId) {
+    return get(snapshotId) != null;
+  }
+
+  public boolean containsIndex(long indexid) {
+    for (Snapshot snapshot : list) {
+      if (snapshot.containsIndex(indexid))
+        return true;
+    }
+    return false;
+  }
+
+  private void remove(Snapshot snapshot) throws IOException {
+    Iterator<Snapshot> iterator = list.iterator();
+    while (iterator.hasNext()) {
+      Snapshot s = iterator.next();
+      if (s.getId().equals(snapshot.getId())) {
+        iterator.remove();
+        String file = Snapshot.getFileName(snapshot.getId());
+        system.directoryMap.getDirectory().deleteFile(file);
+      }
+    }
+
+  }
+
+  public Snapshot getLatestSnapshot() {
+    if (list.size() == 0)
+      return null;
+    return list.get(list.size() - 1);
+  }
+
+  void add(Snapshot snapshot, boolean createFile) throws Exception {
+    writeLock.lock();
+    try {
+      if (createFile) {
+        addCreateFile(snapshot);
+      } else {
+        list.add(snapshot);
+      }
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  private void addCreateFile(Snapshot snapshot) throws Exception {
+    BigDecimal id = snapshot.getId();
+    SnapshotInfo snapshotInfo = snapshot.getSnapshotInfo();
+    String fileName = Snapshot.getFileName(id);
+    LogDirectory directory = system.directoryMap.getDirectory();
+    if (directory.fileExists(fileName)) {
+      throw new IOException("fileName: " + fileName + " already exists");
+    }
+    RandomAccessFile output = directory.getOutput(fileName, true);
+    Element element = snapshotInfo.toElement();
+    String xml = XMLUtil.outputElement(element);
+    byte[] bytes = xml.getBytes("UTF-8");
+    list.add(snapshot);
+    output.write(bytes, 0, bytes.length);
+    output.close();
+    // remove previous versions for this id
+    // SortedMap<BigDecimal,Snapshot> headMap = snapshotMap.headMap(id);
+    // for (Snapshot removeSnapshot : headMap.values()) {
+    // remove(removeSnapshot);
+    // }
+    Collections.sort(list);
+  }
+}
\ No newline at end of file
Index: ocean/src/org/apache/lucene/ocean/Transaction.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/Transaction.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/Transaction.java	(revision 0)
@@ -0,0 +1,344 @@
+package org.apache.lucene.ocean;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.store.RAMDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Coordinates a multithreaded transaction commit between multiple indexes.
+ * Utilizes java.util.concurrent.CountDownLatch for synchronization between the
+ * indexes as each index operation is performed in it's own thread.
+ * 
+ */
+// TODO: add timeout to transaction
+public class Transaction {
+  public static final long TIMEOUT = 1000 * 5;
+  final static Logger LOG = LoggerFactory.getLogger(Transaction.class);
+  private Batch batch;
+  private CountDownLatch latch;
+  private List<Failure> failures = new ArrayList<Failure>();
+  private List<DeletesResult> deletesResults = new ArrayList<DeletesResult>();
+  private List<IndexSnapshot> newIndexSnapshots = new ArrayList<IndexSnapshot>();
+  private ReentrantLock lock = new ReentrantLock();
+  private CountDownLatch goLatch = new CountDownLatch(1);
+  private Long id;
+  private Long previousId;
+  private TransactionLog transactionLog;
+  private CommitResult commitResult;
+  private TransactionSystem system;
+  
+  // ran into a bug where the tasks were not running in the thread and could not figure out why
+  // the result is this constructor that does not use multiple threads.
+  public Transaction(Long id, Long previousId, Batch batch, WriteableMemoryIndex writeableIndex, List<Index> nonWriteableIndices,
+      TransactionSystem system) throws Exception {
+    this.id = id;
+    this.previousId = previousId;
+    this.batch = batch;
+    this.transactionLog = system.getTransactionLog();
+    List<Callable> tasks = new ArrayList<Callable>();
+    Deletes deletes = batch.getDeletes();
+    if (batch.hasDeletes()) {
+      for (Index index : nonWriteableIndices) {
+        new DeletesTask(deletes, index, this).call();
+      }
+    } else {
+      for (Index index : nonWriteableIndices) {
+        new NothingTask(index, this).call();
+      }
+    }
+    int numDocsAdded = 0;
+    // handle changes to writeable index, or if a ram directory create a ram
+    // index
+    if (batch.hasRAMDirectory()) {
+      new AddRamIndexDocumentsTask(batch.getRamDirectory()).call();
+    } else if (batch.hasDocuments()) {
+      Documents documents = batch.getDocuments();
+      Analyzer analyzer = batch.getAnalyzer();
+      new AddWriteableMemoryDocumentsTask(documents, analyzer, deletes, writeableIndex).call();
+      numDocsAdded += documents.size();
+    } else {
+      new DeletesTask(deletes, writeableIndex, this).call();
+    }
+    finish();
+    if (failures.size() == 0) {
+      commitResult = new CommitResult(id, deletesResults, numDocsAdded, writeableIndex.getId());
+    } else {
+      // rollback indexes
+      LOG.info("rolling back snapshot: " + id);
+      writeableIndex.rollback(id);
+      for (Index index : nonWriteableIndices) {
+        index.rollback(id);
+      }
+      throw new Exception("transaction failed " + failures);
+    }
+  }
+  /**
+  public Transaction(Long id, Long previousId, Batch batch, WriteableMemoryIndex writeableIndex, List<Index> nonWriteableIndices,
+      ExecutorService commitThreadPool, TransactionSystem system) throws Exception {
+    this.id = id;
+    this.previousId = previousId;
+    this.batch = batch;
+    this.transactionLog = system.getTransactionLog();
+    List<Callable> tasks = new ArrayList<Callable>();
+    Deletes deletes = batch.getDeletes();
+    if (batch.hasDeletes()) {
+      for (Index index : nonWriteableIndices) {
+        tasks.add(new DeletesTask(deletes, index, this));
+      }
+    } else {
+      for (Index index : nonWriteableIndices) {
+        tasks.add(new NothingTask(index, this));
+      }
+    }
+    int numDocsAdded = 0;
+    // handle changes to writeable index, or if a ram directory create a ram
+    // index
+    if (batch.hasRAMDirectory()) {
+      tasks.add(new AddRamIndexDocumentsTask(batch.getRamDirectory()));
+    } else if (batch.hasDocuments()) {
+      Documents documents = batch.getDocuments();
+      Analyzer analyzer = batch.getAnalyzer();
+      tasks.add(new AddWriteableMemoryDocumentsTask(documents, analyzer, deletes, writeableIndex));
+      numDocsAdded += documents.size();
+    } else {
+      tasks.add(new DeletesTask(deletes, writeableIndex, this));
+    }
+    latch = new CountDownLatch(tasks.size());
+    List<Future> futures = new ArrayList<Future>(tasks.size());
+    for (Callable callable : tasks) {
+      futures.add(commitThreadPool.submit(callable));
+    }
+    if (!latch.await(TIMEOUT, TimeUnit.MILLISECONDS)) {
+      failures.add(new TimeoutFailure("timed out after: " + TIMEOUT + " millis"));
+    } else {
+      goLatch.countDown();
+      // need rollback here for failures during commit
+      for (Future future : futures) {
+        try {
+          future.get();
+        } catch (ExecutionException executionException) {
+          Throwable cause = executionException.getCause();
+          LOG.info(cause.getMessage());
+        }
+      }
+    }
+    finish();
+    if (failures.size() == 0) {
+      commitResult = new CommitResult(id, deletesResults, numDocsAdded, writeableIndex.getId());
+    } else {
+      // rollback indexes
+      LOG.info("rolling back snapshot: " + id);
+      writeableIndex.rollback(id);
+      for (Index index : nonWriteableIndices) {
+        index.rollback(id);
+      }
+      throw new Exception("transaction failed " + failures);
+    }
+  }
+  **/
+  // TODO: no snapshots added
+  public List<IndexSnapshot> getNewIndexSnapshots() {
+    return newIndexSnapshots;
+  }
+
+  public Long getPreviousId() {
+    return previousId;
+  }
+
+  public CommitResult getCommitResult() {
+    assert commitResult != null; // should have thrown exception before this
+    // point
+    return commitResult;
+  }
+
+  public Long getId() {
+    return id;
+  }
+
+  public abstract static class Failure extends Exception {
+    private String string;
+
+    public Failure(String message) {
+      super(message);
+      string = message;
+    }
+
+    public Failure(Throwable throwable) {
+      super(throwable);
+      string = ExceptionUtils.getFullStackTrace(throwable);
+    }
+
+    public String toString() {
+      return string;
+    }
+  }
+
+  public static class TimeoutFailure extends Failure {
+    public TimeoutFailure(String message) {
+      super(message);
+    }
+  }
+
+  public static class LogFailure extends Failure {
+    public LogFailure(Throwable throwable) {
+      super(throwable);
+    }
+  }
+
+  public static class IndexFailure extends Failure {
+    Index index;
+
+    public IndexFailure(Index index, Throwable throwable) {
+      super(throwable);
+      this.index = index;
+    }
+  }
+
+  public static class NothingTask implements Callable {
+    private Index index;
+    private Transaction transaction;
+
+    public NothingTask(Index index, Transaction transaction) {
+      this.index = index;
+      this.transaction = transaction;
+    }
+
+    public Object call() throws Exception {
+      index.commitNothing(transaction);
+      return null;
+    }
+  }
+
+  public static class DeletesTask implements Callable<DeletesResult> {
+    private Index index;
+    private Deletes deletes;
+    private Transaction transaction;
+
+    public DeletesTask(Deletes deletes, Index index, Transaction transaction) {
+      this.deletes = deletes;
+      this.index = index;
+      this.transaction = transaction;
+    }
+
+    public DeletesResult call() throws Exception {
+      DeletesResult deletesResult = index.commitDeletes(deletes, transaction);
+      transaction.addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  public class AddRamIndexDocumentsTask implements Callable<DeletesResult> {
+    private RAMDirectory ramDirectory;
+
+    public AddRamIndexDocumentsTask(RAMDirectory ramDirectory) {
+      this.ramDirectory = ramDirectory;
+    }
+
+    public DeletesResult call() throws Exception {
+      // TODO: create new ramindex
+      long indexIdNum = system.getNextRamIndexId();
+      IndexID indexId = new IndexID(indexIdNum, "ram");
+      Analyzer analyzer = batch.getAnalyzer();
+      RamIndex ramIndex = new RamIndex(indexId, id, null, ramDirectory, system);
+      IndexSnapshot indexSnapshot = ramIndex.commitIndex(Transaction.this);
+      DeletesResult deletesResult = new DeletesResult(indexId);
+      addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  public class AddWriteableMemoryDocumentsTask implements Callable<DeletesResult> {
+    private Documents documents;
+    private Analyzer analyzer;
+    private Deletes deletes;
+    private WriteableMemoryIndex writeableIndex;
+
+    public AddWriteableMemoryDocumentsTask(Documents documents, Analyzer analyzer, Deletes deletes, WriteableMemoryIndex writeableIndex) {
+      this.documents = documents;
+      this.analyzer = analyzer;
+      this.deletes = deletes;
+      this.writeableIndex = writeableIndex;
+    }
+
+    public DeletesResult call() throws Exception {
+      DeletesResult deletesResult = writeableIndex.commitChanges(documents, deletes, analyzer, Transaction.this);
+      addDeletesResult(deletesResult);
+      return deletesResult;
+    }
+  }
+
+  void addDeletesResult(DeletesResult deletesResult) {
+    assert deletesResult != null;
+    deletesResults.add(deletesResult);
+  }
+
+  void failed(Index index, Throwable throwable) {
+    failures.add(new IndexFailure(index, throwable));
+    latch.countDown();
+  }
+
+  void ready(Index index) {
+    if (latch != null)
+      latch.countDown();
+  }
+
+  private void finish() {
+    try {
+      if (batch instanceof MasterBatch) {
+        MasterBatch masterBatch = (MasterBatch) batch;
+        if (masterBatch.hasDeletes()) {
+          int numDocIds = 0;
+          for (DeletesResult deletesResult : deletesResults) {
+            numDocIds += deletesResult.getDocIds().size();
+          }
+          long[] docIds = new long[numDocIds];
+          int x = 0;
+          for (DeletesResult deletesResult : deletesResults) {
+            for (Long docId : deletesResult.getDocIds()) {
+              docIds[x] = docId;
+              x++;
+            }
+          }
+          masterBatch.getDeletes().setDocIds(docIds);
+        }
+        transactionLog.writeMasterBatch(id, previousId, masterBatch);
+      }
+    } catch (Exception exception) {
+      failures.add(new LogFailure(exception));
+    }
+  }
+
+  public boolean go() {
+    return true;
+  }
+
+  /**
+   * public boolean go() throws InterruptedException { goLatch.await(); if
+   * (failures.size() == 0) { try { if (batch instanceof MasterBatch) {
+   * MasterBatch masterBatch = (MasterBatch) batch; if
+   * (masterBatch.hasDeletes()) { int numDocIds = 0; for (DeletesResult
+   * deletesResult : deletesResults) { numDocIds +=
+   * deletesResult.getDocIds().size(); } long[] docIds = new long[numDocIds];
+   * int x = 0; for (DeletesResult deletesResult : deletesResults) { for (Long
+   * docId : deletesResult.getDocIds()) { docIds[x] = docId; x++; } }
+   * masterBatch.getDeletes().setDocIds(docIds); }
+   * transactionLog.writeMasterBatch(id, previousId, masterBatch); } } catch
+   * (Throwable throwable) { LOG.error("", throwable); failures.add(new
+   * LogFailure(throwable)); return false; } return true; } else { return false; } }
+   */
+}
Index: ocean/src/org/apache/lucene/ocean/TransactionSystem.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/TransactionSystem.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/TransactionSystem.java	(revision 0)
@@ -0,0 +1,729 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.ocean.Batch.MasterBatch;
+import org.apache.lucene.ocean.Batch.SlaveBatch;
+import org.apache.lucene.ocean.DiskIndex.DiskIndexSnapshot;
+import org.apache.lucene.ocean.Index.IndexException;
+import org.apache.lucene.ocean.Index.IndexSnapshot;
+import org.apache.lucene.ocean.RamIndex.RamIndexSnapshot;
+import org.apache.lucene.ocean.SnapshotInfo.IndexInfo;
+import org.apache.lucene.ocean.WriteableMemoryIndex.MemoryIndexSnapshot;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.ocean.log.TransactionLog.SlaveBatchIterator;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.LongSequence;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.search.OceanMultiThreadSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Main class for search transaction system.
+ * 
+ * Indexes on disk are immutable, they can only be deleted from or merged
+ * periodically. Merges occur in the background. There is always one active
+ * WriteableMemoryIndex that new documents are written to.
+ * 
+ * A snapshot corresponds to a transaction. Each transaction creates a new
+ * snapshot. Snapshot ids have both major and minor version represented as a
+ * decimal. The major represents the transaction. The minor increments with
+ * index merges. Transaction data is known as a batch. There is a MasterBatch
+ * and SlaveBatch. A MasterBatch is what is created the initial call to
+ * TransactionSystem such as addDocument. A SlaveBatch is what is loaded from
+ * the transactionlog during a recovery.
+ * 
+ * IndexWriter like methods such as addDocument, updateDocument are provided.
+ * Also commitTransaction is provided complete transaction access.
+ * 
+ * A _documentid field is added to each document. This is an internal number for
+ * tracking a document and allows the transaction log system to be recovered
+ * properly. During recovery a delete will use the _documentid rather than the
+ * actual query or term to insure the exact documents are deleted at the point
+ * in time the transaction occurred.
+ * 
+ * 
+ */
+// TODO: need test case of maybeMergeDiskIndices
+// TODO: add .index suffix to index directory names
+// TODO: custom efficient document serializer
+// TODO: add writeVLong writeVInt to LogDirectory output
+// TODO: not sure how to handle Document fields with a TokenStream
+// TODO: make transaction timeout a batch parameter
+// TODO: make multithreaded transactions optional
+public class TransactionSystem {
+  final static Logger LOG = LoggerFactory.getLogger(TransactionSystem.class);
+  public static final int DEFAULT_MEMORY_INDEX_MAX_DOCS = 50;
+  public static final int DEFAULT_MAYBE_MERGE_DOC_CHANGES = 2000;
+  public static final int DEFAULT_MAX_RAM_INDEXES_SIZE = 1024 * 1024 * 30;
+  public static final float DEFAULT_MERGE_DISK_DELETED_PERCENT = 0.3f;
+  private ExecutorService commitThreadPool;
+  private ExecutorService mergeThreadPool;
+  private TransactionLog transactionLog;
+  private Indexes indexes = new Indexes();
+  private ReentrantLock commitLock = new ReentrantLock();
+  Snapshots snapshots;
+  private ReentrantLock mergeIndexesLock = new ReentrantLock();
+  private int docChangesSinceLastMerge = 0;
+  private Analyzer defaultAnalyzer;
+  private int serverNumber = 0;
+  private LongSequence documentSequence;
+  private LongSequence diskIndexSequence;
+  private LongSequence ramIndexSequence;
+  private int memoryIndexMaxDocs = DEFAULT_MEMORY_INDEX_MAX_DOCS;
+  private int maybeMergeDocChanges = DEFAULT_MAYBE_MERGE_DOC_CHANGES;
+  private int maxRamIndexesSize = DEFAULT_MAX_RAM_INDEXES_SIZE;
+  private int maxDocsIndexes = -1;
+  private int maxSnapshots = 5;
+  private float mergeDiskDeletedPercent = DEFAULT_MERGE_DISK_DELETED_PERCENT;
+  private long snapshotExpiration = 20 * 1000;
+  DirectoryMap directoryMap;
+  private ArrayBlockingQueue<Runnable> mergeQueue;
+  private SearcherPolicy searcherPolicy;
+  private ExecutorService searchThreadPool;
+  private ArrayBlockingQueue<Runnable> searchQueue;
+
+  public TransactionSystem(TransactionLog transactionLog, Analyzer defaultAnalyzer, DirectoryMap directoryMap) throws Exception {
+    this(transactionLog, defaultAnalyzer, directoryMap, DEFAULT_MAYBE_MERGE_DOC_CHANGES, -1, DEFAULT_MEMORY_INDEX_MAX_DOCS,
+        DEFAULT_MERGE_DISK_DELETED_PERCENT, new SingleThreadSearcherPolicy());
+  }
+
+  public TransactionSystem(TransactionLog transactionLog, Analyzer defaultAnalyzer, DirectoryMap directoryMap, int maybeMergeDocChanges,
+      int maxDocsIndexes, int memoryIndexMaxDocs, float mergeDiskDeletedPercent, SearcherPolicy searcherPolicy) throws Exception {
+    this.transactionLog = transactionLog;
+    this.defaultAnalyzer = defaultAnalyzer;
+    this.directoryMap = directoryMap;
+    this.maybeMergeDocChanges = maybeMergeDocChanges;
+    this.maxDocsIndexes = maxDocsIndexes;
+    this.memoryIndexMaxDocs = memoryIndexMaxDocs;
+    this.mergeDiskDeletedPercent = mergeDiskDeletedPercent;
+    this.searcherPolicy = searcherPolicy;
+    if (searcherPolicy instanceof MultiThreadSearcherPolicy) {
+      MultiThreadSearcherPolicy multiThreadSearcherPolicy = (MultiThreadSearcherPolicy) searcherPolicy;
+      searchQueue = new ArrayBlockingQueue<Runnable>(multiThreadSearcherPolicy.getQueueSize());
+      searchThreadPool = new ThreadPoolExecutor(multiThreadSearcherPolicy.getMinThreads(), multiThreadSearcherPolicy.getMaxThreads(),
+          1000 * 60, TimeUnit.MILLISECONDS, searchQueue);
+    }
+    mergeQueue = new ArrayBlockingQueue<Runnable>(2);
+    mergeThreadPool = new ThreadPoolExecutor(1, 1, 1000 * 60, TimeUnit.MILLISECONDS, mergeQueue);
+    commitThreadPool = Executors.newFixedThreadPool(5);
+    snapshots = new Snapshots(this);
+    if (LOG.isInfoEnabled())
+      LOG.info("TransactionSystem");
+    load();
+  }
+
+  public IndexID getIndexId(Long documentId) throws IOException {
+    Snapshot snapshot = snapshots.getLatestSnapshot();
+    for (IndexSnapshot indexSnapshot : snapshot.getIndexSnapshots()) {
+      IndexReader indexReader = indexSnapshot.getIndexReader();
+      int freq = indexReader.docFreq(new Term(Constants.DOCUMENTID, Util.longToEncoded(documentId)));
+      if (freq > 0) {
+        return indexSnapshot.getIndex().getId();
+      }
+    }
+    return null;
+  }
+
+  public void setMaybeMergeDocChanges(int maybeMergeDocChanges) {
+    this.maybeMergeDocChanges = maybeMergeDocChanges;
+  }
+
+  public void setMaxDocsIndexes(int maxDocsIndexes) {
+    this.maxDocsIndexes = maxDocsIndexes;
+  }
+
+  public void close() throws IOException {
+    if (LOG.isInfoEnabled())
+      LOG.info("close");
+    mergeThreadPool.shutdown();
+    // commitThreadPool.shutdown();
+    transactionLog.close();
+    for (Index index : indexes.getIndexes()) {
+      index.close();
+    }
+  }
+
+  public OceanSearcher getSearcher() throws IOException {
+    Snapshot snapshot = snapshots.getLatestSnapshot();
+    // snapshot.incRef();
+    if (searcherPolicy instanceof SingleThreadSearcherPolicy) {
+      return new OceanSearcher(snapshot);
+    } else {
+      return new OceanMultiThreadSearcher(snapshot, searchThreadPool);
+    }
+  }
+
+  public CommitResult deleteDocument(Query query) throws Exception {
+    List<Query> deleteByQueries = new ArrayList<Query>(1);
+    deleteByQueries.add(query);
+    return commitTransaction(null, null, null, deleteByQueries);
+  }
+
+  public CommitResult deleteDocument(Term term) throws Exception {
+    List<Term> dterms = new ArrayList<Term>(1);
+    dterms.add(term);
+    return commitTransaction(null, null, dterms, null);
+  }
+
+  public CommitResult updateDocument(Term term, Document document) throws Exception {
+    return updateDocument(term, document, defaultAnalyzer);
+  }
+
+  public CommitResult updateDocument(Term term, Document document, Analyzer analyzer) throws Exception {
+    List<Document> list = new ArrayList<Document>(1);
+    list.add(document);
+    List<Term> dterms = new ArrayList<Term>(1);
+    dterms.add(term);
+    return commitTransaction(list, analyzer, dterms, null);
+  }
+
+  public CommitResult addDocument(Document document) throws Exception {
+    return addDocument(document, defaultAnalyzer);
+  }
+
+  public CommitResult addDocument(Document document, Analyzer analyzer) throws Exception {
+    List<Document> list = new ArrayList<Document>(1);
+    list.add(document);
+    return commitTransaction(list, analyzer, null, null);
+  }
+
+  public CommitResult commitTransaction(List<Document> documents, Analyzer analyzer, List<Term> deleteByTerms, List<Query> deleteByQueries)
+      throws Exception {
+    MasterBatch masterBatch = new MasterBatch(this);
+    if (documents != null)
+      masterBatch.addDocuments(new Documents(documents));
+    masterBatch.setAnalyzer(analyzer);
+    Deletes deletes = new Deletes();
+    if (deleteByTerms != null) {
+      for (Term deleteTerm : deleteByTerms) {
+        deletes.addTerm(deleteTerm);
+      }
+    }
+    if (deleteByQueries != null) {
+      for (Query query : deleteByQueries) {
+        deletes.addQuery(query);
+      }
+    }
+    if (deletes.hasDeletes())
+      masterBatch.setDeletes(deletes);
+    return commitBatch(masterBatch);
+  }
+
+  public Analyzer getDefaultAnalyzer() {
+    return defaultAnalyzer;
+  }
+
+  public long getNextRamIndexId() {
+    return ramIndexSequence.getAndIncrement();
+  }
+
+  public long getNextDiskIndexId() {
+    return diskIndexSequence.getAndIncrement();
+  }
+
+  public TransactionLog getTransactionLog() {
+    return transactionLog;
+  }
+
+  public ExecutorService getCommitThreadPool() {
+    return commitThreadPool;
+  }
+
+  public void load() throws Exception {
+    BigDecimal id;
+    Long snapshotId;
+    List<IndexSnapshot> indexSnapshots = null;
+    SnapshotInfo snapshotInfo = Snapshots.loadMaxSnapshotInfo(directoryMap.getDirectory());
+    if (LOG.isDebugEnabled())
+      LOG.debug("snapshotInfo: " + snapshotInfo);
+    long timestamp = System.currentTimeMillis();
+    if (snapshotInfo != null) {
+      id = snapshotInfo.getId();
+      snapshotId = snapshotInfo.getSnapshotId();
+      assert snapshotId == transactionLog.getMaxId();
+      loadDiskIndexes(snapshotInfo, indexes);
+      IndexID diskMaxId = indexes.getMaxId("disk");
+      if (diskMaxId != null)
+        diskIndexSequence = new LongSequence(diskMaxId.id.longValue() + 1, 1);
+      else
+        diskIndexSequence = new LongSequence(1, 1);
+      ramIndexSequence = new LongSequence(1, 1);
+      indexSnapshots = new ArrayList<IndexSnapshot>();
+      List<Long> snapshotIds = new LinkedList<Long>();
+      // TODO: what if index directory is deleted and it is still referenced
+      for (IndexInfo indexInfo : snapshotInfo.getIndexInfos()) {
+        if (indexInfo.getType().equals("disk")) {
+          DiskIndex diskIndex = (DiskIndex) indexes.get(indexInfo.getIndexID());
+          if (diskIndex != null) {
+            IndexSnapshot indexSnapshot = diskIndex.getIndexSnapshot(snapshotInfo.getSnapshotId());
+            indexSnapshots.add(indexSnapshot);
+            snapshotIds.add(indexSnapshot.getMaxSnapshotId());
+          }
+        }
+      }
+      Long maxDiskIndexSnapshotId = Util.max(snapshotIds);
+      Long fromSnapshotId = null;
+      System.out.println("maxDiskIndexSnapshotId: " + maxDiskIndexSnapshotId);
+      if (maxDiskIndexSnapshotId != null) {
+        fromSnapshotId = new Long(maxDiskIndexSnapshotId.longValue() + 1);
+      }
+      List<RamIndexSnapshot> ramIndexSnapshots = runTransactionsNotInIndex(fromSnapshotId);
+      System.out.println("ramIndexSnapshots: " + ramIndexSnapshots);
+      // TODO: verify all snapshots have same id
+      indexSnapshots.addAll(ramIndexSnapshots);
+      List<Long> documentIds = new ArrayList<Long>(indexSnapshots.size());
+      for (IndexSnapshot indexSnapshot : indexSnapshots) {
+        documentIds.add(indexSnapshot.getMaxDocumentId());
+      }
+      Long maxDocumentId = Util.max(documentIds);
+      if (maxDocumentId != null) {
+        Long documentSequenceId = Util.getNextServerSequence(maxDocumentId, serverNumber);
+        documentSequence = new LongSequence(documentSequenceId, 100);
+      } else {
+        documentSequence = new LongSequence(serverNumber, 100);
+      }
+    } else {
+      snapshotId = new Long(0);
+      id = new BigDecimal(snapshotId.toString());
+      documentSequence = new LongSequence(serverNumber, 100);
+      diskIndexSequence = new LongSequence(1, 1);
+      ramIndexSequence = new LongSequence(1, 1);
+    }
+    WriteableMemoryIndex writeableMemoryIndex = newWriteableMemoryIndex();
+    MemoryIndexSnapshot writeableSnapshot = writeableMemoryIndex.setSnapshot(snapshotId);
+    if (indexSnapshots == null) {
+      indexSnapshots = new ArrayList<IndexSnapshot>();
+      indexSnapshots.add(writeableSnapshot);
+    }
+    Snapshot snapshot = new Snapshot(id, writeableSnapshot, indexSnapshots, this, timestamp);
+    snapshots.add(snapshot, false);
+    deleteUnreferencedSnapshots();
+    new MaybeMergeIndices().run();
+  }
+
+  /**
+   * Delete snapshotinfo if no longer referenced in Snapshots
+   * 
+   * @throws Exception
+   */
+  private void deleteUnreferencedSnapshots() throws Exception {
+    snapshots.remove(maxSnapshots, snapshotExpiration);
+    LogDirectory directory = directoryMap.getDirectory();
+    List<BigDecimal> ids = Snapshots.loadSnapshotInfoIds(directory);
+    for (BigDecimal id : ids) {
+      if (!snapshots.contains(id)) {
+        // not referenced, delete it
+        String fileName = Snapshot.getFileName(id);
+        System.out.println("deleteFile: " + fileName + " id: " + Snapshot.formatId(id));
+        try {
+          directory.deleteFile(fileName);
+          if (LOG.isDebugEnabled())
+            LOG.debug("deleteFile: " + fileName);
+        } catch (Exception exception) {
+          LOG.error(exception.getMessage());
+        }
+      }
+    }
+  }
+
+  public Indexes getIndexes() {
+    return indexes;
+  }
+
+  public Snapshots getSnapshots() {
+    return snapshots;
+  }
+
+  /**
+   * Runs the transactions from the transaction log that are not already in
+   * Lucene indices
+   * 
+   * @param startSnapshotId
+   * @return loaded ram snapshots
+   * @throws Exception
+   * @throws CategoryException
+   * @throws IOException
+   */
+  private List<RamIndexSnapshot> runTransactionsNotInIndex(Long startSnapshotId) throws Exception, IOException {
+    LOG.info("startSnapshotId: " + startSnapshotId);
+    SlaveBatchIterator iterator = transactionLog.getSlaveBatchIterator(startSnapshotId);
+    if (!iterator.hasNext())
+      return new ArrayList<RamIndexSnapshot>();
+    try {
+      long indexIdNum = ramIndexSequence.getAndIncrement();
+      IndexID indexId = new IndexID(indexIdNum, "ram");
+      RAMDirectory ramDirectory = new RAMDirectory();
+      ExecutorService threadPool = getCommitThreadPool();
+      IndexCreator indexCreator = new IndexCreator(ramDirectory, Long.MAX_VALUE, 4, defaultAnalyzer, threadPool);
+      BlockingQueue<IndexCreator.Add> addQueue = new ArrayBlockingQueue<IndexCreator.Add>(4000, true);
+      List<Deletes> deletesList = new ArrayList<Deletes>(); // deletes are
+      // recorded and run
+      // against all of the
+      // snapshots at the
+      // end
+      indexCreator.start(addQueue);
+      List<RAMDirectory> ramDirectories = new ArrayList<RAMDirectory>();
+      int docCount = 0;
+      while (iterator.hasNext()) {
+        SlaveBatch slaveBatch = iterator.next(true, true);
+        Analyzer analyzer = slaveBatch.getAnalyzer();
+        if (slaveBatch.hasDocuments()) {
+          Documents documents = slaveBatch.getDocuments();
+          for (Document document : documents) {
+            addQueue.add(new IndexCreator.Add(document));
+            docCount++;
+          }
+        } else if (slaveBatch.hasRAMDirectory()) {
+          ramDirectories.add(slaveBatch.getRamDirectory());
+        }
+        if (slaveBatch.hasDeletes()) {
+          deletesList.add(slaveBatch.getDeletes());
+        }
+      }
+      LOG.info("docCount: " + docCount);
+      // if zero means all the transactions were deletes
+      if (docCount == 0) {
+        return new ArrayList<RamIndexSnapshot>();
+      }
+      indexCreator.create();
+      ramDirectories.add(ramDirectory);
+      Long snapshotId = transactionLog.getMaxId();
+      List<RamIndexSnapshot> indexSnapshots = new ArrayList<RamIndexSnapshot>(ramDirectories.size());
+      for (RAMDirectory rd : ramDirectories) {
+        RamIndex ramIndex = new RamIndex(indexId, snapshotId, deletesList, rd, this);
+        indexes.add(ramIndex);
+        RamIndexSnapshot indexSnapshot = (RamIndexSnapshot) ramIndex.getIndexSnapshot(snapshotId);
+        assert indexSnapshot != null;
+        indexSnapshots.add(indexSnapshot);
+      }
+      // TODO: run maybe merge here
+      return indexSnapshots;
+    } finally {
+      if (iterator != null)
+        iterator.close();
+    }
+  }
+
+  private void loadDiskIndexes(SnapshotInfo snapshotInfo, Indexes indices) throws Exception, IOException {
+    for (String name : directoryMap.list()) {
+      try {
+        if (name.endsWith("_index")) {
+          String idString = StringUtils.split(name, "_")[0];
+          Directory directory = directoryMap.get(idString);
+          Long indexIdNum = new Long(idString);
+          IndexID indexId = new IndexID(indexIdNum, "disk");
+          try {
+            IndexInfo indexInfo = snapshotInfo.getIndexInfo(indexId);
+            if (indexInfo != null) {
+              Long snapshotId = snapshotInfo.getSnapshotId();
+              Long segmentGeneration = indexInfo.getSegmentGeneration();
+              DiskIndex diskIndex = new DiskIndex(indexId, directory, snapshotId, segmentGeneration, this);
+              indices.add(diskIndex);
+            } else {
+              LOG.info("index no longer referenced deleting: " + name);
+              // directoryMap.delete(name);
+            }
+          } catch (IndexException indexException) {
+            LOG.error("index not ready, deleting: " + name, indexException);
+            //directoryMap.delete(name);
+          } catch (IOException ioException) {
+            LOG.error("index not ready, deleting: " + name, ioException);
+            //directoryMap.delete(name);
+          }
+        }
+      } catch (Exception exception) {
+        LOG.error("", exception);
+        // if exception simply skip over the index
+      }
+    }
+  }
+
+  public MasterBatch createMasterBatch() throws Exception {
+    return new MasterBatch(this);
+  }
+
+  public class MaybeMergeIndices implements Runnable {
+    public MaybeMergeIndices() {
+    }
+
+    public void run() {
+      if (LOG.isDebugEnabled())
+        LOG.debug("MaybeMergeIndices");
+      mergeIndexesLock.lock();
+      try {
+        Snapshot snapshot = snapshots.getLatestSnapshot();
+        maybeMergeWriteable(snapshot);
+        snapshot = snapshots.getLatestSnapshot();
+        maybeMergeRamIndexes(snapshot);
+        snapshot = snapshots.getLatestSnapshot();
+        maybeMergeDiskIndexes(snapshot);
+      } catch (Throwable throwable) {
+        LOG.error("", throwable);
+      } finally {
+        mergeIndexesLock.unlock();
+      }
+    }
+
+    /**
+     * If the existing ram indexes are above maxRamIndexesSize, then they are
+     * merged and a new disk index is created from them. Or if the number of
+     * documents exceeds maxDocsIndexesSize.
+     * 
+     * @param snapshot
+     * @throws Exception
+     */
+    private void maybeMergeRamIndexes(Snapshot snapshot) throws Exception {
+      long size = 0;
+      int numDocs = 0;
+      List<RamIndexSnapshot> ramIndexSnapshots = snapshot.getRamIndexSnapshots();
+      for (RamIndexSnapshot ramIndexSnapshot : ramIndexSnapshots) {
+        RamIndex ramIndex = (RamIndex) ramIndexSnapshot.getIndex();
+        size += ramIndex.getSize();
+        numDocs += ramIndexSnapshot.getIndexReader().maxDoc();
+      }
+      // if merging based on number of docs
+      if (maxDocsIndexes > 0 && numDocs > maxDocsIndexes) {
+        if (LOG.isDebugEnabled())
+          LOG.debug("executeMerge because numDocs: " + numDocs + " more than maxDocsIndexes: " + maxDocsIndexes);
+        executeMerge(ramIndexSnapshots, snapshot);
+      } else if (size > maxRamIndexesSize) {
+        // merging based on size of ram indexes
+        executeMerge(ramIndexSnapshots, snapshot);
+      }
+    }
+
+    private void maybeMergeDiskIndexes(Snapshot snapshot) throws Exception {
+      Long snapshotId = snapshot.getSnapshotId();
+      List<IndexSnapshot> indexSnapshotsToMerge = new ArrayList<IndexSnapshot>();
+      for (DiskIndex diskIndex : snapshot.getDiskIndices()) {
+        DiskIndexSnapshot indexSnapshot = (DiskIndexSnapshot) diskIndex.getIndexSnapshot(snapshotId);
+        if (diskIndex.hasTooManyDeletedDocs(mergeDiskDeletedPercent)) {
+          indexSnapshotsToMerge.add(indexSnapshot);
+        }
+      }
+      if (indexSnapshotsToMerge.size() > 0) {
+        executeMerge(indexSnapshotsToMerge, snapshot);
+      }
+    }
+
+    /**
+     * converts current memorywriteableindex to a ramindex
+     * 
+     * @param snapshot
+     * @throws Exception
+     */
+    private void maybeMergeWriteable(Snapshot snapshot) throws Exception {
+      MemoryIndexSnapshot writeableIndexSnapshot = snapshot.getWriteableSnapshot();
+      int maxDoc = writeableIndexSnapshot.getIndexReader().maxDoc();
+      if (maxDoc >= memoryIndexMaxDocs) {
+        if (LOG.isInfoEnabled())
+          LOG.info("merge writeable");
+        commitLock.lock();
+        try {
+          long indexIdNum = ramIndexSequence.getAndIncrement();
+          IndexID indexId = new IndexID(indexIdNum, "memory");
+          RamIndex ramIndex = new RamIndex(indexId, writeableIndexSnapshot);
+          indexes.add(ramIndex);
+          IndexSnapshot ramIndexSnapshot = ramIndex.getLatestIndexSnapshot();
+          assert ramIndexSnapshot.maxDoc() == maxDoc;
+          Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+          List<IndexID> removeIndexIds = new ArrayList<IndexID>();
+          removeIndexIds.add(writeableIndexSnapshot.getIndex().getId());
+
+          // create new WriteableMemoryIndex for the new snapshot because the
+          // one that was there
+          // has been converted to a RamIndex
+          WriteableMemoryIndex newWriteableMemoryIndex = newWriteableMemoryIndex();
+          MemoryIndexSnapshot newMemoryIndexSnapshot = newWriteableMemoryIndex.setSnapshot(snapshot.getSnapshotId());
+          Snapshot newSnapshot = currentSnapshot.createMinor(removeIndexIds, newMemoryIndexSnapshot, ramIndex.getLatestIndexSnapshot());
+          snapshots.add(newSnapshot, true);
+          if (LOG.isInfoEnabled())
+            LOG.info("merge writeable completed");
+        } finally {
+          commitLock.unlock();
+        }
+      }
+    }
+
+    /**
+     * Takes snapshots and makes a DiskIndex.
+     * 
+     * @param indexSnapshots
+     * @param snapshot
+     * @throws Exception
+     */
+    private void executeMerge(List<? extends IndexSnapshot> indexSnapshots, Snapshot snapshot) throws Exception {
+      if (indexSnapshots.size() == 0)
+        return;
+      Long snapshotId = snapshot.getSnapshotId();
+      Long indexIdNum = diskIndexSequence.getAndIncrement();
+      IndexID indexId = new IndexID(indexIdNum, "disk");
+      Directory directory = directoryMap.create(indexIdNum+"_index");
+      // initial creation happens outside of commitlock because it is the most
+      // time consuming
+      // the deletes occur inside the commitlock as they are faster
+      DiskIndex newDiskIndex = new DiskIndex(indexId, directory, indexSnapshots, TransactionSystem.this);
+      indexes.add(newDiskIndex);
+      commitLock.lock();
+      try {
+        // TODO: probably can just save deletes from the batches
+        List<SlaveBatch> deleteOnlySlaveBatches = new ArrayList<SlaveBatch>();
+        Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+        Long latestSnapshotId = currentSnapshot.getSnapshotId();
+        if (!snapshotId.equals(latestSnapshotId)) {
+          SlaveBatchIterator iterator = transactionLog.getSlaveBatchIterator(snapshotId);
+          while (iterator.hasNext()) {
+            SlaveBatch slaveBatch = iterator.next(false, true);
+            deleteOnlySlaveBatches.add(slaveBatch);
+          }
+        }
+        IndexSnapshot newIndexSnapshot = newDiskIndex.initialize(latestSnapshotId, deleteOnlySlaveBatches, TransactionSystem.this);
+        List<IndexID> removeIndexIds = new ArrayList<IndexID>();
+        for (IndexSnapshot indexSnapshot : indexSnapshots) {
+          Index index = indexSnapshot.getIndex();
+          removeIndexIds.add(index.getId());
+        }
+        StringBuilder builder = new StringBuilder();
+        Iterator<? extends IndexSnapshot> iterator = indexSnapshots.iterator();
+        while (iterator.hasNext()) {
+          IndexSnapshot indexSnapshot = iterator.next();
+          builder.append(indexSnapshot.getIndex().getId().toString());
+          if (iterator.hasNext()) {
+            builder.append(", ");
+          }
+        }
+        builder.append(" ").append(" indexes written to disk index: ").append(indexId.toString());
+        LOG.info(builder.toString());
+        Snapshot newSnapshot = currentSnapshot.createMinor(removeIndexIds, newIndexSnapshot);
+        snapshots.add(newSnapshot, true);
+      } finally {
+        commitLock.unlock();
+      }
+    }
+  }
+
+  /**
+   * Commits a batch to the transaction log
+   * 
+   * @param batch
+   * @return CommitResult
+   * @throws Exception
+   * @throws IOException
+   */
+  CommitResult commitBatch(Batch batch) throws Exception, IOException {
+    batch.close();
+    commitLock.lock();
+    try {
+      Long snapshotId = null;
+      if (batch instanceof SlaveBatch) {
+        snapshotId = ((SlaveBatch) batch).getId();
+      } else {
+        MasterBatch masterBatch = (MasterBatch) batch;
+        snapshotId = transactionLog.getNextId();
+        if (batch.hasDocuments()) {
+          Documents documents = batch.getDocuments();
+          for (Document document : documents) {
+            Long documentId = documentSequence.getAndIncrement();
+            Util.setValue(Constants.DOCUMENTID, documentId, document);
+            Util.setValue(Constants.SNAPSHOTID, snapshotId, document);
+          }
+          if (documents.size() >= memoryIndexMaxDocs) {
+            RAMDirectory ramDirectory = createRamDirectory(documents, batch.getAnalyzer());
+            masterBatch.setRAMDirectory(ramDirectory);
+          }
+        }
+      }
+      // ExecutorService threadPool = getCommitThreadPool();
+      Snapshot currentSnapshot = snapshots.getLatestSnapshot();
+      MemoryIndexSnapshot writeableIndexSnapshot = currentSnapshot.getWriteableSnapshot();
+      WriteableMemoryIndex writeableMemoryIndex = (WriteableMemoryIndex) writeableIndexSnapshot.getIndex();
+      List<Index> nonWriteableIndices = currentSnapshot.getDeleteOnlyIndices();
+      Transaction transaction = null;
+      CommitResult commitResult = null;
+      try {
+        Long previousId = transactionLog.getPreviousId(snapshotId);
+        transaction = new Transaction(snapshotId, previousId, batch, writeableMemoryIndex, nonWriteableIndices, this);
+        commitResult = transaction.getCommitResult();
+      } catch (Exception exception) {
+        LOG.error("transaction failed");
+        throw new Exception("transaction failed", exception);
+      }
+      List<IndexSnapshot> indexSnapshots = new ArrayList<IndexSnapshot>(nonWriteableIndices.size() + 1);
+      for (Index index : nonWriteableIndices) {
+        IndexSnapshot snapshot = index.getIndexSnapshot(snapshotId);
+        assert snapshot != null;
+        indexSnapshots.add(snapshot);
+      }
+      for (IndexSnapshot newIndexSnapshot : transaction.getNewIndexSnapshots()) {
+        assert newIndexSnapshot != null;
+        indexes.add(newIndexSnapshot.getIndex());
+        indexSnapshots.add(newIndexSnapshot);
+      }
+      assert snapshotId == transaction.getId();
+      MemoryIndexSnapshot newWriteableSnapshot = writeableMemoryIndex.getIndexSnapshot(snapshotId);
+      assert newWriteableSnapshot != null;
+      indexSnapshots.add(newWriteableSnapshot);
+
+      Snapshot newSnapshot = new Snapshot(snapshotId, 0, newWriteableSnapshot, indexSnapshots, this, System.currentTimeMillis());
+      snapshots.add(newSnapshot, true);
+      docChangesSinceLastMerge += commitResult.getNumDocChanges();
+      int writeableMaxDoc = writeableMemoryIndex.getLatestIndexSnapshot().getIndexReader().maxDoc();
+      if (docChangesSinceLastMerge > maybeMergeDocChanges || writeableMaxDoc >= memoryIndexMaxDocs) {
+        System.out.println("docChangesSinceLastMerge: " + docChangesSinceLastMerge + " maybeMergeDocChanges: " + maybeMergeDocChanges);
+        System.out.println("writeableMaxDoc: " + writeableMaxDoc + " memoryIndexMaxDocs: " + memoryIndexMaxDocs);
+        // only submit if nothing is currently executing or pending
+        if (mergeQueue.size() == 0) {
+          mergeThreadPool.submit(new MaybeMergeIndices());
+          docChangesSinceLastMerge = 0;
+        }
+      }
+      deleteUnreferencedSnapshots();
+      return commitResult;
+    } finally {
+      commitLock.unlock();
+    }
+  }
+
+  RAMDirectory createRamDirectory(Documents documents, Analyzer analyzer) throws Exception {
+    RAMDirectory ramDirectory = new RAMDirectory();
+    ExecutorService threadPool = getCommitThreadPool();
+    IndexCreator indexCreator = new IndexCreator(ramDirectory, Long.MAX_VALUE, 4, analyzer, threadPool);
+    BlockingQueue<IndexCreator.Add> addQueue = new ArrayBlockingQueue<IndexCreator.Add>(1000, true);
+    indexCreator.start(addQueue);
+    for (Document document : documents) {
+      addQueue.add(new IndexCreator.Add(document));
+    }
+    indexCreator.create();
+    return ramDirectory;
+  }
+
+  private WriteableMemoryIndex newWriteableMemoryIndex() throws Exception {
+    Long indexIdNum = ramIndexSequence.getAndIncrement();
+    IndexID indexId = new IndexID(indexIdNum, "memory");
+    WriteableMemoryIndex writeableMemoryIndex = new WriteableMemoryIndex(indexId, this);
+    indexes.add(writeableMemoryIndex);
+    return writeableMemoryIndex;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java	(revision 0)
@@ -0,0 +1,185 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffer;
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffers;
+
+
+public class ByteArrayOutputStream extends OutputStream {
+	private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
+
+	private List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+	/** The index of the current buffer. */
+	private int currentBufferIndex;
+	/** The total count of bytes in all the filled buffers. */
+	private int filledBufferSum;
+	/** The current buffer. */
+	private ByteBuffer currentBuffer;
+	/** The total count of bytes written. */
+	private int count;
+	private ByteBufferPool byteBufferPool;
+
+	/**
+	 * Creates a new byte array output stream. The buffer capacity is initially
+	 * 1024 bytes, though its size increases if necessary.
+	 */
+	public ByteArrayOutputStream(ByteBufferPool byteBufferPool) {
+		this.byteBufferPool = byteBufferPool;
+	}
+  
+	public ByteBuffers getByteBuffers() {
+		return new ByteBuffers(buffers, size());
+	}
+	
+	/**
+	 * Return the appropriate <code>byte[]</code> buffer specified by index.
+	 * 
+	 * @param index
+	 *          the index of the buffer required
+	 * @return the buffer
+	 */
+	private ByteBuffer getBuffer(int index) {
+		return (ByteBuffer) buffers.get(index);
+	}
+
+	/**
+	 * Makes a new buffer available either by allocating a new one or re-cycling
+	 * an existing one.
+	 * 
+	 * @param newcount
+	 *          the size of the buffer if one is created
+	 */
+	private void needNewBuffer(int newcount) {
+		if (currentBufferIndex < buffers.size() - 1) {
+			// Recycling old buffer
+			filledBufferSum += currentBuffer.getBytes().length;
+
+			currentBufferIndex++;
+			currentBuffer = getBuffer(currentBufferIndex);
+		} else {
+			// Creating new buffer
+			int newBufferSize = 1024 * 16;
+			/**
+			 * if (currentBuffer == null) { newBufferSize = newcount; filledBufferSum =
+			 * 0; } else { newBufferSize = Math.max(currentBuffer.getBytes().length <<
+			 * 1, newcount - filledBufferSum); filledBufferSum +=
+			 * currentBuffer.getBytes().length; }
+			 */
+			currentBufferIndex++;
+			currentBuffer = byteBufferPool.get(newBufferSize);
+			buffers.add(currentBuffer);
+		}
+	}
+
+	/**
+	 * @see java.io.OutputStream#write(byte[], int, int)
+	 */
+	public void write(byte[] b, int off, int len) {
+		if ((off < 0) || (off > b.length) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0)) {
+			throw new IndexOutOfBoundsException();
+		} else if (len == 0) {
+			return;
+		}
+		int newcount = count + len;
+		int remaining = len;
+		int inBufferPos = count - filledBufferSum;
+		while (remaining > 0) {
+			int part = Math.min(remaining, currentBuffer.getBytes().length - inBufferPos);
+			System.arraycopy(b, off + len - remaining, currentBuffer, inBufferPos, part);
+			remaining -= part;
+			if (remaining > 0) {
+				needNewBuffer(newcount);
+				inBufferPos = 0;
+			}
+		}
+		count = newcount;
+	}
+
+	/**
+	 * @see java.io.OutputStream#write(int)
+	 */
+	public void write(int b) {
+		int inBufferPos = count - filledBufferSum;
+		if (inBufferPos == currentBuffer.getBytes().length) {
+			needNewBuffer(count + 1);
+			inBufferPos = 0;
+		}
+		currentBuffer.getBytes()[inBufferPos] = (byte) b;
+		count++;
+	}
+
+	/**
+	 * @see java.io.ByteArrayOutputStream#size()
+	 */
+	public int size() {
+		return count;
+	}
+
+	/**
+	 * Closing a <tt>ByteArrayOutputStream</tt> has no effect. The methods in
+	 * this class can be called after the stream has been closed without
+	 * generating an <tt>IOException</tt>.
+	 * 
+	 * @throws IOException
+	 *           never (this method should not declare this exception but it has
+	 *           to now due to backwards compatability)
+	 */
+	public void close() {
+		for (ByteBuffer byteBuffer : buffers) {
+			byteBuffer.finished();
+		}
+	}
+
+	/**
+	 * @see java.io.ByteArrayOutputStream#reset()
+	 */
+	public void reset() {
+		close();
+		count = 0;
+		filledBufferSum = 0;
+		currentBufferIndex = 0;
+		currentBuffer = getBuffer(currentBufferIndex);
+	}
+
+	/**
+	 * Writes the entire contents of this byte stream to the specified output
+	 * stream.
+	 * 
+	 * @param out
+	 *          the output stream to write to
+	 * @throws IOException
+	 *           if an I/O error occurs, such as if the stream is closed
+	 * @see java.io.ByteArrayOutputStream#writeTo(OutputStream)
+	 */
+	public void writeTo(OutputStream out) throws IOException {
+		int remaining = count;
+		for (int i = 0; i < buffers.size(); i++) {
+			byte[] buf = getBuffer(i).getBytes();
+			int c = Math.min(buf.length, remaining);
+			out.write(buf, 0, c);
+			remaining -= c;
+			if (remaining == 0) {
+				break;
+			}
+		}
+	}
+
+	/**
+	 * Gets the curent contents of this byte stream as a byte array. The result is
+	 * independent of this stream.
+	 * 
+	 * @return the current contents of this output stream, as a byte array
+	 * @see java.io.ByteArrayOutputStream#toByteArray()
+	 * 
+	 * public synchronized byte[] toByteArray() { int remaining = count; if
+	 * (remaining == 0) { return EMPTY_BYTE_ARRAY; } byte newbuf[] = new
+	 * byte[remaining]; int pos = 0; for (int i = 0; i < buffers.size(); i++) {
+	 * byte[] buf = getBuffer(i); int c = Math.min(buf.length, remaining);
+	 * System.arraycopy(buf, 0, newbuf, pos, c); pos += c; remaining -= c; if
+	 * (remaining == 0) { break; } } return newbuf; }
+	 */
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java	(revision 0)
@@ -0,0 +1,181 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class ByteBufferPool {
+  private ReentrantLock lock = new ReentrantLock();
+	private TreeSet<ByteBuffer> available = new TreeSet<ByteBuffer>();
+	private Set<ByteBuffer> inuse = new HashSet<ByteBuffer>();
+	private int maxCount;
+  
+	public ByteBufferPool(int initialSize, int count, int maxCount) {
+		this.maxCount = maxCount;
+		for (int x=0; x < count; x++) {
+			get(initialSize);
+		}
+	}
+	
+	public static class ByteBuffers {
+		private List<ByteBuffer> byteBuffers;
+		private int length;
+		
+		public ByteBuffers(RandomAccessFile randomAccessFile, int bufferSize, int size, ByteBufferPool bufferPool) throws IOException {
+			this.length = size;
+			int numRead = 0;
+			byteBuffers = new ArrayList<ByteBuffer>();
+			while (true) {
+				if (numRead >= size) break;
+				ByteBuffer byteBuffer = bufferPool.get(bufferSize);
+				int len = byteBuffer.getBytes().length;
+				if ( (len + numRead) > size) {
+					len = size - numRead;
+				}
+				int n = randomAccessFile.read(byteBuffer.getBytes(), 0, len);
+				if (n == -1) break;
+				byteBuffers.add(byteBuffer);
+				numRead += n;
+			}
+		}
+		
+		public ByteBuffers(InputStream inputStream, int bufferSize, int size, ByteBufferPool bufferPool) throws IOException {
+			this.length = size;
+			int numRead = 0;
+			byteBuffers = new ArrayList<ByteBuffer>();
+			while (true) {
+				if (numRead >= size) break;
+				ByteBuffer byteBuffer = bufferPool.get(bufferSize);
+				int len = byteBuffer.getBytes().length;
+				if ( (len + numRead) > size) {
+					len = size - numRead;
+				}
+				int n = inputStream.read(byteBuffer.getBytes(), 0, len);
+				if (n == -1) break;
+				byteBuffers.add(byteBuffer);
+				numRead += n;
+			}
+		}
+		
+		public ByteBuffers(ByteBuffer byteBuffer, int length) {
+			byteBuffers = new ArrayList<ByteBuffer>(1);
+			byteBuffers.add(byteBuffer);
+			this.length = length;
+		}
+		
+		public ByteBuffers(List<ByteBuffer> byteBuffers, int length) {
+			this.byteBuffers = byteBuffers;
+			this.length = length;
+		}
+    
+		public InputStream getInputStream() throws IOException {
+			return new ByteArrayInputStream(this);
+		}
+		
+		public void writeTo(DataOutput out) throws IOException {
+			int remaining = length;
+			for (int i = 0; i < byteBuffers.size(); i++) {
+				byte[] buf = byteBuffers.get(i).getBytes();
+				int c = Math.min(buf.length, remaining);
+				out.write(buf, 0, c);
+				remaining -= c;
+				if (remaining == 0) {
+					break;
+				}
+			}
+		}
+		
+		public void finished() {
+			for (ByteBuffer byteBuffer : byteBuffers) {
+				byteBuffer.finished();
+			}
+		}
+		
+		public List<ByteBuffer> getByteBuffers() {
+			return byteBuffers;
+		}
+
+		public int getLength() {
+			return length;
+		}
+	}
+	
+	public static class ByteBuffer implements Comparable<ByteBuffer> {
+		private byte[] bytes;
+		private ByteBufferPool byteBufferPool;
+		
+		public ByteBuffer(int size, ByteBufferPool byteBufferPool) {
+			this.bytes = new byte[size];
+			this.byteBufferPool = byteBufferPool;
+		}
+		
+		public void finished() {
+			byteBufferPool.finished(this);
+		}
+		
+		public int compareTo(ByteBuffer other) {
+			return new Integer(size()).compareTo(other.size());
+		}
+		
+		public int size() {
+			return bytes.length;
+		}
+		
+		public byte[] getBytes() {
+			return bytes;
+		}
+	}
+	
+	private void finished(ByteBuffer byteBuffer) {
+		lock.lock();
+		try {
+			inuse.remove(byteBuffer);
+			available.add(byteBuffer);
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	private void checkCount() {
+		lock.lock();
+		try {
+			int dif = available.size() - maxCount;
+			if (dif > 0) {
+				int count = 0;
+				Iterator<ByteBuffer> iterator = available.iterator();
+				while (iterator.hasNext() && count < dif) {
+					iterator.next();
+					iterator.remove();
+					count++;
+				}
+			}
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public ByteBuffer get(int size) {
+		lock.lock();
+		try {
+			ByteBuffer byteBuffer = null;
+			if (available.size() > 0) available.last();
+			if (byteBuffer == null || size > byteBuffer.size()) {
+				byteBuffer = new ByteBuffer(size, this);
+			} else {
+				available.remove(byteBuffer);
+			}
+			inuse.add(byteBuffer);
+			return byteBuffer;
+		} finally {
+			lock.unlock();
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java	(revision 0)
@@ -0,0 +1,24 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+public class DocumentSerializer {
+  
+  public static void write(Document document, IndexOutput output) throws IOException {
+    byte[] bytes = SerializationUtils.serialize(document);
+    output.writeVInt(bytes.length);
+    output.writeBytes(bytes, bytes.length);
+  }
+  
+  public static Document read(IndexInput input) throws IOException {
+    int length = input.readVInt();
+    byte[] bytes = new byte[length];
+    input.readBytes(bytes, 0, length);
+    return (Document)SerializationUtils.deserialize(bytes);
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/LongSequence.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/LongSequence.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/LongSequence.java	(revision 0)
@@ -0,0 +1,43 @@
+package org.apache.lucene.ocean.util;
+
+import java.util.concurrent.locks.ReentrantLock;
+
+public class LongSequence {
+	private long value;
+	private int increment;
+	private ReentrantLock lock = new ReentrantLock();
+
+	public LongSequence(long value, int increment) {
+		this.value = value;
+		this.increment = increment;
+	}
+	
+	public long get() {
+		lock.lock();
+		try {
+			return value;
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public void set(long i) {
+		lock.lock();
+		try {
+			value = i;
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public long getAndIncrement() {
+		lock.lock();
+		try {
+			long v = value;
+			value += increment;
+			return v;
+		} finally {
+			lock.unlock();
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/util/FastInputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/FastInputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/FastInputStream.java	(revision 0)
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.ocean.util;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+
+/** Single threaded buffered InputStream
+ *  Internal Solr use only, subject to change.
+ */
+public class FastInputStream extends InputStream implements DataInput {
+  private final InputStream in;
+  private final byte[] buf;
+  private int pos;
+  private int end;
+
+  public FastInputStream(InputStream in) {
+  // use default BUFSIZE of BufferedOutputStream so if we wrap that
+  // it won't cause double buffering.
+    this(in, new byte[8192], 0, 0);
+  }
+
+  public FastInputStream(InputStream in, byte[] tempBuffer, int start, int end) {
+    this.in = in;
+    this.buf = tempBuffer;
+    this.pos = start;
+    this.end = end;
+  }
+
+
+  public static FastInputStream wrap(InputStream in) {
+    return (in instanceof FastInputStream) ? (FastInputStream)in : new FastInputStream(in);
+  }
+
+  @Override
+  public int read() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) return -1;
+    }
+    return buf[pos++] & 0xff;     
+  }
+
+  public int readUnsignedByte() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) throw new EOFException();
+    }
+    return buf[pos++] & 0xff;
+  }
+
+  public void refill() throws IOException {
+    // this will set end to -1 at EOF
+    end = in.read(buf, 0, buf.length);
+    pos = 0;
+  }
+
+  @Override
+  public int available() throws IOException {
+    return end - pos;
+  }
+
+  @Override
+  public int read(byte b[], int off, int len) throws IOException {
+    int r=0;  // number of bytes read
+    // first read from our buffer;
+    if (end-pos > 0) {
+      r = Math.min(end-pos, len);
+      System.arraycopy(buf, pos, b, off, r);      
+      pos += r;
+    }
+
+    if (r == len) return r;
+
+    // amount left to read is >= buffer size
+    if (len-r >= buf.length) {
+      int ret = in.read(b, off+r, len-r);
+      if (ret==-1) return r==0 ? -1 : r;
+      r += ret;
+      return r;
+    }
+
+    refill();
+
+    // first read from our buffer;
+    if (end-pos > 0) {
+      int toRead = Math.min(end-pos, len-r);
+      System.arraycopy(buf, pos, b, off+r, toRead);
+      pos += toRead;
+      r += toRead;
+      return r;
+    }
+    
+    return -1;
+  }
+
+  @Override
+  public void close() throws IOException {
+    in.close();
+  }
+
+  public void readFully(byte b[]) throws IOException {
+    readFully(b, 0, b.length);
+  }
+
+  public void readFully(byte b[], int off, int len) throws IOException {
+    while (len>0) {
+      int ret = read(b, off, len);
+      if (ret==-1) {
+        throw new EOFException();
+      }
+      off += ret;
+      len -= ret;
+    }
+  }
+
+  public int skipBytes(int n) throws IOException {
+    if (end-pos >= n) {
+      pos += n;
+      return n;
+    }
+
+    if (end-pos<0) return -1;
+    
+    int r = end-pos;
+    pos = end;
+
+    while (r < n) {
+      refill();
+      if (end-pos <= 0) return r;
+      int toRead = Math.min(end-pos, n-r);
+      r += toRead;
+      pos += toRead;
+    }
+
+    return r;
+  }
+
+  public boolean readBoolean() throws IOException {
+    return readByte()==1;
+  }
+
+  public byte readByte() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) throw new EOFException();
+    }
+    return buf[pos++];
+  }
+
+
+  public short readShort() throws IOException {
+    return (short)((readUnsignedByte() << 8) | readUnsignedByte());
+  }
+
+  public int readUnsignedShort() throws IOException {
+    return (readUnsignedByte() << 8) | readUnsignedByte();
+  }
+
+  public char readChar() throws IOException {
+    return (char)((readUnsignedByte() << 8) | readUnsignedByte());
+  }
+
+  public int readInt() throws IOException {
+    return  ((readUnsignedByte() << 24)
+            |(readUnsignedByte() << 16)
+            |(readUnsignedByte() << 8)
+            | readUnsignedByte());
+  }
+
+  public long readLong() throws IOException {
+    return  (((long)readUnsignedByte()) << 56)
+            | (((long)readUnsignedByte()) << 48)
+            | (((long)readUnsignedByte()) << 40)
+            | (((long)readUnsignedByte()) << 32)
+            | (((long)readUnsignedByte()) << 24)
+            | (readUnsignedByte() << 16)
+            | (readUnsignedByte() << 8)
+            | (readUnsignedByte());
+  }
+
+  public float readFloat() throws IOException {
+    return Float.intBitsToFloat(readInt());    
+  }
+
+  public double readDouble() throws IOException {
+    return Double.longBitsToDouble(readLong());    
+  }
+
+  public String readLine() throws IOException {
+    return new DataInputStream(this).readLine();
+  }
+
+  public String readUTF() throws IOException {
+    return new DataInputStream(this).readUTF();
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/CElement.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/CElement.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/CElement.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean.util;
+
+import org.jdom.Element;
+
+/**
+ *
+ * @author jasonr
+ */
+public interface CElement {
+  public Element toElement() throws Exception;
+}
Index: ocean/src/org/apache/lucene/ocean/util/Constants.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/Constants.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/Constants.java	(revision 0)
@@ -0,0 +1,8 @@
+package org.apache.lucene.ocean.util;
+
+public interface Constants {
+  //public static final String ID = "_id".intern();
+  public static final String INDEXID = "_indexid".intern();
+  public static final String DOCUMENTID = "_documentid".intern();
+  public static final String SNAPSHOTID = "_snapshotid".intern();
+}
Index: ocean/src/org/apache/lucene/ocean/util/SortedList.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/SortedList.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/SortedList.java	(revision 0)
@@ -0,0 +1,329 @@
+package org.apache.lucene.ocean.util;
+
+import java.util.AbstractCollection;
+import java.util.AbstractSet;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.ObjectUtils;
+
+public class SortedList<K extends Comparable<K>,V> implements Map<K,V> {
+  private List<SortedEntry<K,V>> entries = new ArrayList<SortedEntry<K,V>>();
+  private transient Set<Map.Entry<K,V>> entrySet = null;
+  transient volatile Set<K>        keySet = null;
+  transient volatile Collection<V> values = null;
+  
+  public SortedList() {}
+  
+  public SortedList(Map<K,V> map) {
+    putAll(map);
+  }
+  
+  private Map.Entry<K,V> removeMapping(Map.Entry<K,V> toRemove) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) toRemove.getKey()));
+    if (pos >= 0) {
+      SortedEntry<K,V> entry = entries.get(pos);
+      if (entry.value.equals(toRemove.getValue())) {
+        entries.remove(pos);
+        return entry;
+      }
+    }
+    return null;
+  }
+  
+  public static class SortedEntry<K extends Comparable<K>,V> implements Map.Entry<K,V>, Comparable<SortedEntry<K,V>> {
+    private K key;
+    private V value;
+
+    private SortedEntry(K key, V value) {
+      this.key = key;
+      this.value = value;
+    }
+
+    public K setKey(K key) {
+      K oldKey = this.key;
+      this.key = key;
+      return oldKey;
+    }
+
+    public V setValue(V value) {
+      V oldValue = this.value;
+      this.value = value;
+      return oldValue;
+    }
+
+    public K getKey() {
+      return key;
+    }
+
+    public V getValue() {
+      return value;
+    }
+
+    private SortedEntry(K key) {
+      this.key = key;
+    }
+
+    public int compareTo(SortedEntry<K,V> other) {
+      return key.compareTo(other.key);
+    }
+  }
+
+  public void putAll(Map<? extends K,? extends V> m) {
+    for (Iterator<? extends Map.Entry<? extends K,? extends V>> i = m.entrySet().iterator(); i.hasNext();) {
+      Map.Entry<? extends K,? extends V> e = i.next();
+      put(e.getKey(), e.getValue());
+    }
+  }
+  /**
+  public Set<K> keySet() {
+    Set<K> set = new HashSet<K>(entries.size());
+    for (SortedEntry<K,V> entry : entries) {
+      set.add(entry.key);
+    }
+    return set;
+  }
+  **/
+  // public Set<Map.Entry<K,V>> entrySet() {
+  // Set<Map.Entry<K,V>> set = new HashSet<Map.Entry<K,V>>(entries);
+  // return set;
+  // }
+
+  public boolean isEmpty() {
+    return entries.size() == 0;
+  }
+
+  public void clear() {
+    entries.clear();
+  }
+
+  public int size() {
+    return entries.size();
+  }
+
+  private int getPos(K key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>(key));
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public boolean containsValue(Object value) {
+    for (SortedEntry<K,V> entry : entries) {
+      boolean b = ObjectUtils.equals(value, entry.value);
+      if (b)
+        return true;
+    }
+    return false;
+  }
+
+  public boolean containsKey(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    return pos >= 0;
+  }
+
+  public K lastKey() {
+    if (entries.size() == 0)
+      return null;
+    return entries.get(entries.size() - 1).key;
+  }
+
+  public V lastValue() {
+    if (entries.size() == 0)
+      return null;
+    return entries.get(entries.size() - 1).value;
+  }
+
+  public V put(K key, V value) {
+    V oldValue = null;
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>(key));
+    if (pos >= 0) {
+      oldValue = entries.get(pos).value;
+      entries.set(pos, new SortedEntry<K,V>(key, value));
+      return oldValue;
+    }
+    if (pos < 0)
+      pos = -1 - pos;
+    
+    entries.add(pos, new SortedEntry<K,V>(key, value));
+    return null;
+  }
+
+  private SortedEntry<K,V> getEntry(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      return entries.get(pos);
+    } else {
+      return null;
+    }
+  }
+
+  public V get(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      return entries.get(pos).value;
+    } else {
+      return null;
+    }
+  }
+
+  public V remove(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      SortedEntry<K,V> entry = entries.get(pos);
+      entries.remove(pos);
+      return entry.value;
+    }
+    return null;
+  }
+
+  public void add(K key, V value) {
+    int pos = getPos(key);
+    entries.add(pos, new SortedEntry<K,V>(key, value));
+  }
+
+  private abstract class SortedListIterator<E> implements Iterator<E> {
+    private Iterator<SortedEntry<K,V>> iterator;
+    
+    public SortedListIterator() {
+      iterator = entries.iterator();
+    }
+    
+    protected Map.Entry<K,V> nextEntry() {
+      return iterator.next();
+    }
+    
+    public void remove() {
+      iterator.remove();
+    }
+    
+    public boolean hasNext() {
+      return iterator.hasNext();
+    }
+  }
+
+  private class ValueIterator extends SortedListIterator<V> {
+    public V next() {
+      return nextEntry().getValue();
+    }
+  }
+
+  private class KeyIterator extends SortedListIterator<K> {
+    public K next() {
+      return nextEntry().getKey();
+    }
+  }
+
+  private class EntryIterator extends SortedListIterator<Map.Entry<K,V>> {
+    public Map.Entry<K,V> next() {
+      return nextEntry();
+    }
+  }
+
+  // Subclass overrides these to alter behavior of views' iterator() method
+  Iterator<K> newKeyIterator() {
+    return new KeyIterator();
+  }
+
+  Iterator<V> newValueIterator() {
+    return new ValueIterator();
+  }
+
+  Iterator<Map.Entry<K,V>> newEntryIterator() {
+    return new EntryIterator();
+  }
+
+  public Set<K> keySet() {
+    Set<K> ks = keySet;
+    return (ks != null ? ks : (keySet = new KeySet()));
+  }
+
+  private class KeySet extends AbstractSet<K> {
+    public Iterator<K> iterator() {
+      return newKeyIterator();
+    }
+
+    public int size() {
+      return entries.size();
+    }
+
+    public boolean contains(Object o) {
+      return containsKey(o);
+    }
+
+    public boolean remove(Object o) {
+      return SortedList.this.remove(o) != null;
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  public Collection<V> values() {
+    Collection<V> vs = values;
+    return (vs != null ? vs : (values = new Values()));
+  }
+
+  private class Values extends AbstractCollection<V> {
+    public Iterator<V> iterator() {
+      return newValueIterator();
+    }
+
+    public int size() {
+      return SortedList.this.size();
+    }
+
+    public boolean contains(Object o) {
+      return containsValue(o);
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  public Set<Map.Entry<K,V>> entrySet() {
+    Set<Map.Entry<K,V>> es = entrySet;
+    return (es != null ? es : (entrySet = (Set<Map.Entry<K,V>>) (Set) new EntrySet()));
+  }
+
+  private class EntrySet extends AbstractSet/* <Map.Entry<K,V>> */{
+    public Iterator/* <Map.Entry<K,V>> */iterator() {
+      return newEntryIterator();
+    }
+
+    public boolean contains(Object o) {
+      if (!(o instanceof Map.Entry))
+        return false;
+      Map.Entry<K,V> e = (Map.Entry<K,V>) o;
+      SortedEntry<K,V> candidate = getEntry(e.getKey());
+      return candidate != null && candidate.equals(e);
+    }
+
+    public boolean remove(Object o) {
+      return removeMapping((Map.Entry<K,V>)o) != null;
+    }
+
+    public int size() {
+      return SortedList.this.size();
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  /**
+   * public Collection<V> values() { List<V> values = new ArrayList<V>(entries.size());
+   * for (SortedEntry<K,V> entry : entries) { values.add(entry.value); } return
+   * values; }
+   */
+}
Index: ocean/src/org/apache/lucene/ocean/util/Util.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/Util.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/Util.java	(revision 0)
@@ -0,0 +1,442 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.io.Reader;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.text.DateFormat;
+import java.text.DecimalFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeSet;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NameNotFoundException;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.search.ExtendedFieldCache;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+public class Util {
+  public static TimeZone UTC = TimeZone.getTimeZone("UTC");
+  public final static Pattern splitPattern = Pattern.compile(",| ");
+  private static ThreadLocalDateFormat dateFormatThreadLocal = new ThreadLocalDateFormat();
+
+  public static String formatSnapshotId(BigDecimal id) {
+    DecimalFormat format = new DecimalFormat("##0.00");
+    return format.format(id);
+  }
+
+  /**
+   * Handles size 0 collections returns null
+   */
+  public static <T extends Object & Comparable<? super T>> T max(Collection<? extends T> coll) {
+    if (coll.size() == 0)
+      return null;
+    return Collections.max(coll);
+  }
+
+  public static void touchFile(String file, Directory directory) throws IOException {
+    IndexOutput output = directory.createOutput(file);
+    output.close();
+  }
+
+  public static String getString(String file, LogDirectory directory) throws IOException {
+    try {
+      RandomAccessFile input = directory.openInput(file);
+      if (input.length() == 0)
+        return null;
+      byte[] bytes = new byte[(int) input.length()];
+      input.read(bytes);
+      return new String(bytes, "UTF-8");
+    } catch (Throwable ioException) {
+      IOException newIOException = new IOException("file: "+file);
+      newIOException.initCause(ioException);
+      throw newIOException;
+    }
+  }
+
+  public static void save(String string, String file, LogDirectory directory) throws IOException {
+    byte[] bytes = string.getBytes("UTF-8");
+    RandomAccessFile output = directory.getOutput(file, true);
+    output.write(bytes);
+    output.getFD().sync();
+    output.close();
+  }
+
+  public static String getString(String file, Directory directory) throws IOException {
+    IndexInput input = directory.openInput(file);
+    byte[] bytes = new byte[(int) input.length()];
+    input.readBytes(bytes, 0, bytes.length);
+    return new String(bytes, "UTF-8");
+  }
+
+  public static void save(String string, String file, Directory directory) throws IOException {
+    byte[] bytes = string.getBytes("UTF-8");
+    IndexOutput output = directory.createOutput(file);
+    output.writeBytes(bytes, bytes.length);
+    output.flush();
+    output.close();
+  }
+
+  public static void copy(IndexInput input, IndexOutput output, byte[] buffer) throws IOException {
+    long len = input.length();
+    long readCount = 0;
+    while (readCount < len) {
+      int toRead = readCount + buffer.length > len ? (int) (len - readCount) : buffer.length;
+      input.readBytes(buffer, 0, toRead);
+      output.writeBytes(buffer, toRead);
+      readCount += toRead;
+    }
+  }
+
+  public static <K,V> V getLastValue(SortedMap<K,V> map) {
+    if (map.size() == 0)
+      return null;
+    return map.get(map.lastKey());
+  }
+
+  public static void setValue(String name, long value, Document document) {
+    String encoded = longToEncoded(value);
+    document.add(new Field(name, encoded, Field.Store.YES, Field.Index.UN_TOKENIZED));
+  }
+
+  public static boolean mkdir(File dir) {
+    if (!dir.exists()) {
+      return dir.mkdirs();
+    }
+    return false;
+  }
+
+  public static String longToEncoded(long value) {
+    return long2sortableStr(value);
+  }
+
+  public static long longFromEncoded(String string) {
+    return SortableStr2long(string, 0, 5);
+  }
+
+  public static String long2sortableStr(long val) {
+    char[] arr = new char[5];
+    long2sortableStr(val, arr, 0);
+    return new String(arr, 0, 5);
+  }
+
+  // uses binary representation of an int to build a string of
+  // chars that will sort correctly. Only char ranges
+  // less than 0xd800 will be used to avoid UCS-16 surrogates.
+  // we can use the lowest 15 bits of a char, (or a mask of 0x7fff)
+  public static int long2sortableStr(long val, char[] out, int offset) {
+    val += Long.MIN_VALUE;
+    out[offset++] = (char) (val >>> 60);
+    out[offset++] = (char) (val >>> 45 & 0x7fff);
+    out[offset++] = (char) (val >>> 30 & 0x7fff);
+    out[offset++] = (char) (val >>> 15 & 0x7fff);
+    out[offset] = (char) (val & 0x7fff);
+    return 5;
+  }
+
+  public static long SortableStr2long(String sval, int offset, int len) {
+    long val = (long) (sval.charAt(offset++)) << 60;
+    val |= ((long) sval.charAt(offset++)) << 45;
+    val |= ((long) sval.charAt(offset++)) << 30;
+    val |= sval.charAt(offset++) << 15;
+    val |= sval.charAt(offset);
+    val -= Long.MIN_VALUE;
+    return val;
+  }
+
+  public static int getDoc(String fieldName, long value, IndexReader indexReader) throws IOException {
+    String encoded = longToEncoded(value);
+    return getTermDoc(new Term(fieldName, encoded), indexReader);
+  }
+
+  public static int getTermDoc(Term term, IndexReader indexReader) throws IOException {
+    TermDocs docs = indexReader.termDocs(term);
+    try {
+      if (docs.next()) {
+        return docs.doc();
+      }
+    } finally {
+      docs.close();
+    }
+    return -1;
+  }
+
+  public static List<Integer> getTermDocs(Term term, IndexReader indexReader) throws IOException {
+    List<Integer> list = new ArrayList<Integer>();
+    TermDocs docs = indexReader.termDocs(term);
+    try {
+      while (docs.next()) {
+        list.add(docs.doc());
+      }
+    } finally {
+      docs.close();
+    }
+    return list;
+  }
+
+  public static long getSize(Directory directory) throws IOException {
+    long total = 0;
+    for (String file : directory.list()) {
+      total += directory.fileLength(file);
+    }
+    return total;
+  }
+
+  public static void copy(InputStream is, RandomAccessFile ras, byte[] buf) throws IOException {
+    int numRead;
+    while ((numRead = is.read(buf)) >= 0) {
+      ras.write(buf, 0, numRead);
+    }
+  }
+
+  public static File getDirectory(File root, String path) {
+    File file = new File(root, path);
+    if (!file.exists()) {
+      file.mkdirs();
+    }
+    return file;
+  }
+
+  public static DateFormat getThreadLocalDateFormat() {
+    return dateFormatThreadLocal.get();
+  }
+
+  public static String formatDate(Date date) {
+    DateFormat dateFormat = getThreadLocalDateFormat();
+    return dateFormat.format(date);
+  }
+
+  public static Date parseDate(String string) throws ParseException {
+    DateFormat dateFormat = getThreadLocalDateFormat();
+    return dateFormat.parse(string);
+  }
+
+  private static class ThreadLocalDateFormat extends ThreadLocal<DateFormat> {
+    DateFormat proto;
+
+    public ThreadLocalDateFormat() {
+      super();
+      SimpleDateFormat tmp = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US);
+      tmp.setTimeZone(UTC);
+      proto = tmp;
+    }
+
+    protected DateFormat initialValue() {
+      return (DateFormat) proto.clone();
+    }
+  }
+
+  public static long[] getFieldCacheLong(String field, IndexReader indexReader) throws IOException {
+    return ExtendedFieldCache.EXT_DEFAULT.getLongs(indexReader, field);
+  }
+
+  public static boolean isTrue(String string) {
+    return StringUtils.equalsIgnoreCase("true", string);
+  }
+
+  public static Object construct(Object parameter, Class clazz) throws Exception {
+    Constructor constructor = clazz.getDeclaredConstructor(parameter.getClass());
+    try {
+      return constructor.newInstance(parameter);
+    } catch (InvocationTargetException invocationTargetException) {
+      Throwable cause = invocationTargetException.getCause();
+      if (cause instanceof Exception)
+        throw (Exception) cause;
+      else
+        throw invocationTargetException;
+    }
+  }
+
+  public static Long parseLong(String value) {
+    if (StringUtils.isBlank(value)) {
+      return null;
+    }
+    try {
+      return new Long(value);
+    } catch (Throwable ex) {
+      return null;
+    }
+  }
+
+  public static URL parseURL(String string) throws MalformedURLException {
+    if (StringUtils.isBlank(string)) {
+      return null;
+    }
+    return new URL(string);
+  }
+
+  public static List<String> readLines(Reader reader) throws Exception {
+    return IOUtils.readLines(reader);
+  }
+
+  public static IOException asIOException(String message, Throwable throwable) throws IOException {
+    IOException ioException = new IOException(message);
+    ioException.initCause(throwable);
+    return ioException;
+  }
+
+  public static IOException asIOException(Throwable throwable) {
+    if (throwable instanceof IOException) {
+      return (IOException) throwable;
+    }
+    IOException ioException = new IOException(throwable.getMessage());
+    ioException.initCause(throwable);
+    return ioException;
+  }
+
+  public static void copy(InputStream is, OutputStream os, byte[] buf) throws IOException {
+    int numRead;
+    while ((numRead = is.read(buf)) >= 0) {
+      os.write(buf, 0, numRead);
+    }
+  }
+
+  public static int[] toIntArray(List<Integer> list) {
+    int size = list.size();
+    int[] array = new int[size];
+    int x = 0;
+    for (int i : list) {
+      array[x] = i;
+      x++;
+    }
+    return array;
+  }
+
+  public static List<URL> loadUrls(File file) throws IOException {
+    List<String> lines = IOUtils.readLines(new FileReader(file));
+    List<URL> urls = new ArrayList<URL>();
+    for (String line : lines) {
+      urls.add(new URL(line));
+    }
+    return urls;
+  }
+
+  /**
+   * public static HttpParameters toHttpParameters(HttpServletRequest request) {
+   * try { if (StringUtils.equalsIgnoreCase("post", request.getMethod())) {
+   * HttpParameters parameters = new HttpParameters(); URL url = new
+   * URL(request.getRequestURL().toString()); CGIParser cgiParser = new
+   * CGIParser(url.toString(), "UTF-8"); for (String name :
+   * cgiParser.getParameterNameList()) { for (String value :
+   * cgiParser.getParameterValues(name)) { parameters.add(name, value); } }
+   * return parameters; } } catch (Exception exception) { throw new
+   * RuntimeException(exception); } if (StringUtils.equalsIgnoreCase("get",
+   * request.getMethod())) { HttpParameters parameters = new HttpParameters();
+   * Enumeration paramEnum = request.getParameterNames(); while
+   * (paramEnum.hasMoreElements()) { String name = (String)
+   * paramEnum.nextElement(); String[] array = request.getParameterValues(name);
+   * if (array != null && array.length > 0) { for (String value : array) {
+   * parameters.add(name, value); } } } return parameters; } throw new
+   * RuntimeException("unknown http method " + request.getMethod()); }
+   */
+  public static Long getNextServerSequence(Long value, int serverNumber) {
+    if (value == null) {
+      return new Long(serverNumber);
+    }
+    Long i = null;
+    if (value > 99) {
+      String string = value.toString();
+      String substring = string.substring(0, string.length() - 2);
+      i = new Long(substring + "00");
+    } else {
+      i = new Long(0);
+    }
+    long v = i + serverNumber;
+    return v + 100;
+  }
+
+  public static int getServerNumber(BigInteger id) {
+    String string = id.toString();
+    String substring = string.substring(string.length() - 2, string.length());
+    return Integer.parseInt(substring);
+  }
+
+  public static SortedSet<String> splitToSortedSet(String string) {
+    if (StringUtils.isBlank(string)) {
+      return null;
+    }
+    String[] array = splitPattern.split(string.trim(), 0);
+    TreeSet<String> sortedSet = new TreeSet<String>();
+    for (int x = 0; x < array.length; x++) {
+      sortedSet.add(array[x]);
+    }
+    return sortedSet;
+  }
+
+  public static File getAppServerHome() {
+    return new File(System.getProperty("catalina.home"));
+  }
+
+  public static File getHomeDirectory(String name, File defaultDirectory) throws Exception {
+    String value = System.getenv(name);
+    if (value != null)
+      return new File(value);
+    value = System.getProperty(name);
+    if (value != null)
+      return new File(value);
+    Context context = (Context) new InitialContext().lookup("java:comp/env");
+    try {
+      String string = (String) context.lookup(name);
+      if (StringUtils.isNotBlank(value))
+        return new File(value);
+    } catch (NameNotFoundException nameNotFoundException) {
+    }
+    defaultDirectory.mkdirs();
+    return defaultDirectory;
+  }
+
+  public static Object getFirst(List list) {
+    Iterator iterator = list.iterator();
+    if (iterator.hasNext()) {
+      return iterator.next();
+    } else {
+      return null;
+    }
+  }
+
+  public static Map<String,String> toMapExcept(org.jdom.Element element, String exceptName) {
+    Map<String,String> map = new HashMap<String,String>();
+    for (Object object : element.getAttributes()) {
+      org.jdom.Attribute attribute = (org.jdom.Attribute) object;
+      String name = attribute.getName();
+      if (!StringUtils.equals(name, exceptName)) {
+        map.put(name, attribute.getValue());
+      }
+    }
+    return map;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java	(revision 0)
@@ -0,0 +1,97 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffer;
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffers;
+
+public class ByteArrayInputStream extends InputStream {
+	private List<ByteBuffer> byteBuffers;
+  private long length;
+
+  private ByteBuffer currentBuffer;
+  private int currentBufferIndex;
+  
+  private int bufferPosition;
+  private long bufferStart;
+  private int bufferLength;
+  private int bufferSize;
+  
+  public ByteArrayInputStream(ByteBuffers byteBuffers) throws IOException {
+  	this.length = byteBuffers.getLength();
+  	int numRead = 0;
+  	this.byteBuffers = byteBuffers.getByteBuffers();
+    currentBufferIndex = -1;
+    currentBuffer = null;
+  }
+  
+  public ByteArrayInputStream(int bufferSize, InputStream input, int length, ByteBufferPool byteBufferPool) throws IOException {
+  	this.length = length;
+  	int numRead = 0;
+  	byteBuffers = new ArrayList<ByteBuffer>();
+		while (true) {
+			ByteBuffer byteBuffer = byteBufferPool.get(bufferSize);
+			int n = input.read(byteBuffer.getBytes());
+			if (n == -1) {
+				byteBuffer.finished();
+				break;
+			}
+			numRead += n;
+			byteBuffers.add(byteBuffer);
+		}
+		if (length != numRead) {
+			throw new IOException("num read different than length");
+		}
+    currentBufferIndex = -1;
+    currentBuffer = null;
+  }
+
+  public void close() {
+    for (ByteBuffer byteBuffer : byteBuffers) {
+    	byteBuffer.finished();
+    }
+  }
+
+  public long length() {
+    return length;
+  }
+
+  public int read() throws IOException {
+    if (bufferPosition >= bufferLength) {
+      currentBufferIndex++;
+      switchCurrentBuffer();
+    }
+    return currentBuffer.getBytes()[bufferPosition++];
+  }
+
+  public void readBytes(byte[] b, int offset, int len) throws IOException {
+    while (len > 0) {
+      if (bufferPosition >= bufferLength) {
+        currentBufferIndex++;
+        switchCurrentBuffer();
+      }
+      int remainInBuffer = bufferLength - bufferPosition;
+      int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
+      System.arraycopy(currentBuffer, bufferPosition, b, offset, bytesToCopy);
+      offset += bytesToCopy;
+      len -= bytesToCopy;
+      bufferPosition += bytesToCopy;
+    }
+  }
+
+  private final void switchCurrentBuffer() throws IOException {
+    if (currentBufferIndex >= byteBuffers.size()) {
+      // end of file reached, no more buffers left
+      throw new IOException("Read past EOF");
+    } else {
+      currentBuffer = byteBuffers.get(currentBufferIndex);
+      bufferPosition = 0;
+      bufferStart = (long) bufferSize * (long) currentBufferIndex;
+      long buflen = length - bufferStart;
+      bufferLength = buflen > bufferSize ? bufferSize : (int) buflen;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/XMLUtil.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/XMLUtil.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/XMLUtil.java	(revision 0)
@@ -0,0 +1,444 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.io.Writer;
+import java.lang.reflect.Field;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.commons.lang.StringUtils;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.Namespace;
+import org.jdom.Verifier;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.xml.sax.EntityResolver;
+import org.xml.sax.InputSource;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class XMLUtil {
+	public static Logger log = Logger.getLogger(XMLUtil.class.getName());
+
+	public XMLUtil() {
+	}
+
+	public static Collection<Attribute> getAttributes(Element element) {
+		return (Collection<Attribute>)element.getAttributes();
+	}
+	
+	public static String printAndDetach(Element element) throws Exception {
+		List<Element> list = element.cloneContent();
+		Element root = null;
+		if (list.size() == 1) {
+			root = list.get(0);
+		} else {
+			root = new Element("root");
+			root.addContent(list);
+		}
+		return outputElement(root);
+	}
+
+	public static Object getProperValue(Field field, String string) throws Exception {
+		if (!field.getType().isAssignableFrom(String.class)) {
+			if (field.getType().isAssignableFrom(URL.class)) {
+				return new URL(string);
+			} else if (field.getType().isAssignableFrom(Date.class)) {
+				return Util.parseDate(string);
+			} else if (field.getType().isAssignableFrom(File.class)) {
+				return new File(string);
+			} else if (field.getType().isAssignableFrom(Long.class)) {
+				return new Long(string);
+			} else if (field.getType().isAssignableFrom(Double.class)) {
+				return new Double(string);
+			}
+			throw new Exception("unknown type " + field.getType().getName());
+		} else
+			return string;
+	}
+
+	public static void addAll(List<Element> children, Element parent) {
+		for (Element element : children) {
+			parent.addContent(element);
+		}
+	}
+
+	public static void printList(String rootName, List list, PrintWriter writer) throws Exception {
+		Format format = Format.getPrettyFormat();
+		format.setLineSeparator("\n");
+		XMLOutputter outputter = new XMLOutputter(format);
+		Element root = new Element(rootName);
+		for (Object object : list) {
+			if (object instanceof CElement) {
+				CElement cElement = (CElement) object;
+				root.addContent(cElement.toElement());
+			}
+		}
+		Document document = new Document();
+		document.addContent(root);
+		outputter.output(document, writer);
+	}
+
+	public static List<Element> getChildren(String xml) throws Exception {
+		return getChildren(parseElement(xml));
+	}
+
+	public static List<Element> getChildren(String name, Element element) {
+		List<Element> elements = (List<Element>) element.getChildren(name);
+		if (element == null)
+			return new ArrayList<Element>(0);
+		else
+			return elements;
+	}
+
+	public static List<Element> getChildren(Element element) {
+		List<Element> elements = (List<Element>) element.getChildren();
+		if (element == null)
+			return new ArrayList<Element>(0);
+		else
+			return elements;
+	}
+
+	public static Element getFirstChild(Element element) {
+		return (Element) Util.getFirst(element.getChildren());
+	}
+
+	public static void setAttribute(String name, Object value, Element element) {
+		if (value == null) {
+			return;
+		}
+		if (value instanceof Date) {
+			value = Util.formatDate((Date) value);
+		}
+		element.setAttribute(name, value.toString());
+	}
+
+	public static java.net.URL getChildURL(String name, Element element) {
+		String childText = element.getChildText(name);
+		try {
+			return new java.net.URL(childText);
+		} catch (Throwable th) {
+		}
+		return null;
+	}
+
+	public static Element getChild(String name, Element root) {
+		return root.getChild(name);
+	}
+
+	public static java.math.BigInteger getChildBigInteger(String name, Element element) {
+		String childText = element.getChildText(name);
+		try {
+			return new java.math.BigInteger(childText);
+		} catch (Throwable throwable) {
+		}
+		return null;
+	}
+
+	public static Long getChildLong(String name, Element element) throws NumberFormatException {
+		String childText = element.getChildText(name);
+		return new Long(childText);
+	}
+
+	public static String getAttributeString(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		return text;
+	}
+  
+	public static Integer getAttributeInteger(String name, Element element) throws NumberFormatException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return new Integer(text);
+	}
+	
+	public static Float getAttributeFloat(String name, Element element) throws NumberFormatException {
+    String text = element.getAttributeValue(name);
+    if (text == null || text.equals("")) {
+      return null;
+    }
+    return new Float(text);
+  }
+	
+	public static BigDecimal getAttributeBigDecimal(String name, Element element) throws NumberFormatException {
+    String text = element.getAttributeValue(name);
+    if (text == null || text.equals("")) {
+      return null;
+    }
+    return new BigDecimal(text);
+  }
+	
+	public static Long getAttributeLong(String name, Element element) throws NumberFormatException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return new Long(text);
+	}
+
+	public static Date getAttributeDate(String name, Element element) throws ParseException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return Util.parseDate(text);
+	}
+
+	public static Boolean getChildBoolean(String name, Element element) {
+		String text = element.getChildText(name);
+		if (StringUtils.isBlank(text)) {
+			return null;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static boolean getAttributeBooleanPrimitive(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		if (StringUtils.isBlank(text)) {
+			return false;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static Boolean getAttributeBoolean(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		if (StringUtils.isBlank(text)) {
+			return null;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static Date getChildDate(String name, Element element) throws ParseException {
+		String text = element.getChildText(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return parseDate(text);
+	}
+
+	public static Date parseDate(String dateStr) throws ParseException {
+		if (org.apache.commons.lang.StringUtils.isEmpty(dateStr)) {
+			return null;
+		}
+		return Util.parseDate(dateStr);
+	}
+
+	public static String formatDate(Date date) {
+		if (date == null) {
+			return "";
+		}
+		return Util.formatDate(date);
+	}
+
+	public static String getChildText(String name, Element element) {
+		return element.getChildText(name);
+	}
+
+	public static Integer getChildInteger(String name, Element element) {
+		try {
+			String text = element.getChildText(name);
+			return new Integer(text);
+		} catch (Throwable ex) {
+		}
+		return null;
+	}
+
+	public static Double getChildDouble(String name, Element element) throws NumberFormatException {
+		try {
+			String text = element.getChildText(name);
+			return new Double(text);
+		} catch (Throwable ex) {
+		}
+		return null;
+	}
+
+	public static void outputElement(Element element, Writer writer) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		xmlOut.output(new Document(element), writer);
+	}
+
+	public static String outputElement(Element element) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputElementMinimal(Element element) throws Exception {
+		Format format = Format.getCompactFormat();
+		format.setOmitDeclaration(true);
+		XMLOutputter xmlOut = new XMLOutputter(format);
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputElementOmitDeclaration(Element element) {
+		Format format = Format.getPrettyFormat();
+		format.setOmitDeclaration(true);
+		XMLOutputter xmlOut = new XMLOutputter(format);
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputDocument(Document document) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		String xmlString = xmlOut.outputString(document);
+		return xmlString;
+	}
+
+	public static String removeInvalidXMLChars(String value) {
+		StringBuffer buffer = new StringBuffer();
+		char[] array = value.toCharArray();
+		for (int x = 0; x < array.length; x++) {
+			if (Verifier.isXMLCharacter(array[x])) {
+				buffer.append(array[x]);
+			}
+		}
+		return buffer.toString();
+	}
+
+	public static Element createTextElement(String name, Object object, Element parentElement) {
+		if (object == null) {
+			return null;
+		}
+
+		String text = null;
+		if (object instanceof Date) {
+			Date date = (Date) object;
+
+			text = formatDate(date);
+		} else {
+			text = object.toString();
+		}
+		Element element = new Element(name);
+		// XMLOutputter outputter = new XMLOutputter();
+		// text = outputter.escapeElementEntities(text);
+		// text = removeInvalidXMLChars(text);
+		element.setText(text);
+
+		parentElement.addContent(element);
+
+		return element;
+	}
+
+	public static Element createTextElement(String name, Object object, Namespace namespace, Element parentElement) {
+		if (object == null) {
+			return null;
+		}
+
+		String text = null;
+		if (object instanceof File) {
+			text = ((File) object).getAbsolutePath();
+		} else {
+			text = object.toString();
+		}
+		Element element = new Element(name, namespace);
+		XMLOutputter outputter = new XMLOutputter();
+		text = outputter.escapeElementEntities(text);
+		text = removeInvalidXMLChars(text);
+		element.setText(text);
+
+		parentElement.addContent(element);
+
+		return element;
+	}
+
+	public static void saveXML(Element element, File file) throws IOException {
+		Document document = new Document();
+		document.addContent(element);
+		saveXML(document, file);
+	}
+
+	public static void saveXML(Document document, File file) throws IOException {
+		File parentDir = file.getParentFile();
+		if (!parentDir.exists()) {
+			parentDir.mkdirs();
+		}
+		Format format = Format.getPrettyFormat();
+		format.setLineSeparator("\n");
+		XMLOutputter outputter = new XMLOutputter(format);
+		String channelXMLStr = outputter.outputString(document);
+		FileUtils.writeStringToFile(file, channelXMLStr, "UTF-8");
+	}
+
+	/**
+	 * public static XmlPullParser parseDocumentSAX(InputStream input) throws
+	 * Exception { XmlPullParserFactory factory =
+	 * XmlPullParserFactory.newInstance("org.xmlpull.mxp1.MXParserFactory", null);
+	 * factory.setNamespaceAware(false); factory.setValidating(false);
+	 * XmlPullParser xpp = factory.newPullParser(); xpp.setInput(input, "UTF-8");
+	 * return xpp; }
+	 */
+	public static File getChildFile(String name, Element element) throws Exception {
+		String path = element.getChildTextTrim(name);
+		if (StringUtils.isBlank(path)) {
+			return null;
+		}
+		return new File(path);
+	}
+
+	public static Document parseDocument(File file) throws XMLException, IOException {
+		String xml = FileUtils.readFileToString(file, "UTF-8");
+		return parseDocument(xml);
+	}
+
+	public static Element parseElement(File file) throws XMLException, IOException {
+		if (!file.exists()) {
+			return null;
+		}
+		String xml = FileUtils.readFileToString(file, "UTF-8");
+		return parseElement(xml);
+	}
+
+	public static Element parseElement(String xml) throws XMLException {
+		Document document = parseDocument(xml);
+		return document.getRootElement();
+	}
+  
+	public static class XMLException extends Exception {
+		public XMLException(String message, Throwable throwable) {
+			super(message, throwable);
+		}
+	}
+	
+	public static Document parseDocument(String xml) throws XMLException {
+		if (StringUtils.isBlank(xml))
+			throw new IllegalArgumentException("xml blank"); 
+		EntityResolver RESOLVER = new EmptyEntityResolver();
+		SAXBuilder saxBuilder = new SAXBuilder(false);
+		saxBuilder.setEntityResolver(RESOLVER);
+		try {
+			Document document = saxBuilder.build(new StringReader(xml));
+			return document;
+		} catch (Exception exception) {
+			throw new XMLException(xml, exception);
+		}
+	}
+
+	public static class EmptyEntityResolver implements EntityResolver {
+		public InputSource resolveEntity(String publicId, String systemId) {
+			InputSource EMPTY_INPUTSOURCE = new InputSource(new ByteArrayInputStream(new byte[0]));
+			return EMPTY_INPUTSOURCE;
+			// if (systemId != null && systemId.endsWith(".dtd")) return
+			// EMPTY_INPUTSOURCE;
+			// return null;
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteArrayInputStream.java	(revision 0)
@@ -0,0 +1,97 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffer;
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffers;
+
+public class ByteArrayInputStream extends InputStream {
+	private List<ByteBuffer> byteBuffers;
+  private long length;
+
+  private ByteBuffer currentBuffer;
+  private int currentBufferIndex;
+  
+  private int bufferPosition;
+  private long bufferStart;
+  private int bufferLength;
+  private int bufferSize;
+  
+  public ByteArrayInputStream(ByteBuffers byteBuffers) throws IOException {
+  	this.length = byteBuffers.getLength();
+  	int numRead = 0;
+  	this.byteBuffers = byteBuffers.getByteBuffers();
+    currentBufferIndex = -1;
+    currentBuffer = null;
+  }
+  
+  public ByteArrayInputStream(int bufferSize, InputStream input, int length, ByteBufferPool byteBufferPool) throws IOException {
+  	this.length = length;
+  	int numRead = 0;
+  	byteBuffers = new ArrayList<ByteBuffer>();
+		while (true) {
+			ByteBuffer byteBuffer = byteBufferPool.get(bufferSize);
+			int n = input.read(byteBuffer.getBytes());
+			if (n == -1) {
+				byteBuffer.finished();
+				break;
+			}
+			numRead += n;
+			byteBuffers.add(byteBuffer);
+		}
+		if (length != numRead) {
+			throw new IOException("num read different than length");
+		}
+    currentBufferIndex = -1;
+    currentBuffer = null;
+  }
+
+  public void close() {
+    for (ByteBuffer byteBuffer : byteBuffers) {
+    	byteBuffer.finished();
+    }
+  }
+
+  public long length() {
+    return length;
+  }
+
+  public int read() throws IOException {
+    if (bufferPosition >= bufferLength) {
+      currentBufferIndex++;
+      switchCurrentBuffer();
+    }
+    return currentBuffer.getBytes()[bufferPosition++];
+  }
+
+  public void readBytes(byte[] b, int offset, int len) throws IOException {
+    while (len > 0) {
+      if (bufferPosition >= bufferLength) {
+        currentBufferIndex++;
+        switchCurrentBuffer();
+      }
+      int remainInBuffer = bufferLength - bufferPosition;
+      int bytesToCopy = len < remainInBuffer ? len : remainInBuffer;
+      System.arraycopy(currentBuffer, bufferPosition, b, offset, bytesToCopy);
+      offset += bytesToCopy;
+      len -= bytesToCopy;
+      bufferPosition += bytesToCopy;
+    }
+  }
+
+  private final void switchCurrentBuffer() throws IOException {
+    if (currentBufferIndex >= byteBuffers.size()) {
+      // end of file reached, no more buffers left
+      throw new IOException("Read past EOF");
+    } else {
+      currentBuffer = byteBuffers.get(currentBufferIndex);
+      bufferPosition = 0;
+      bufferStart = (long) bufferSize * (long) currentBufferIndex;
+      long buflen = length - bufferStart;
+      bufferLength = buflen > bufferSize ? bufferSize : (int) buflen;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteArrayOutputStream.java	(revision 0)
@@ -0,0 +1,185 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffer;
+import org.apache.lucene.ocean.util.ByteBufferPool.ByteBuffers;
+
+
+public class ByteArrayOutputStream extends OutputStream {
+	private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
+
+	private List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+	/** The index of the current buffer. */
+	private int currentBufferIndex;
+	/** The total count of bytes in all the filled buffers. */
+	private int filledBufferSum;
+	/** The current buffer. */
+	private ByteBuffer currentBuffer;
+	/** The total count of bytes written. */
+	private int count;
+	private ByteBufferPool byteBufferPool;
+
+	/**
+	 * Creates a new byte array output stream. The buffer capacity is initially
+	 * 1024 bytes, though its size increases if necessary.
+	 */
+	public ByteArrayOutputStream(ByteBufferPool byteBufferPool) {
+		this.byteBufferPool = byteBufferPool;
+	}
+  
+	public ByteBuffers getByteBuffers() {
+		return new ByteBuffers(buffers, size());
+	}
+	
+	/**
+	 * Return the appropriate <code>byte[]</code> buffer specified by index.
+	 * 
+	 * @param index
+	 *          the index of the buffer required
+	 * @return the buffer
+	 */
+	private ByteBuffer getBuffer(int index) {
+		return (ByteBuffer) buffers.get(index);
+	}
+
+	/**
+	 * Makes a new buffer available either by allocating a new one or re-cycling
+	 * an existing one.
+	 * 
+	 * @param newcount
+	 *          the size of the buffer if one is created
+	 */
+	private void needNewBuffer(int newcount) {
+		if (currentBufferIndex < buffers.size() - 1) {
+			// Recycling old buffer
+			filledBufferSum += currentBuffer.getBytes().length;
+
+			currentBufferIndex++;
+			currentBuffer = getBuffer(currentBufferIndex);
+		} else {
+			// Creating new buffer
+			int newBufferSize = 1024 * 16;
+			/**
+			 * if (currentBuffer == null) { newBufferSize = newcount; filledBufferSum =
+			 * 0; } else { newBufferSize = Math.max(currentBuffer.getBytes().length <<
+			 * 1, newcount - filledBufferSum); filledBufferSum +=
+			 * currentBuffer.getBytes().length; }
+			 */
+			currentBufferIndex++;
+			currentBuffer = byteBufferPool.get(newBufferSize);
+			buffers.add(currentBuffer);
+		}
+	}
+
+	/**
+	 * @see java.io.OutputStream#write(byte[], int, int)
+	 */
+	public void write(byte[] b, int off, int len) {
+		if ((off < 0) || (off > b.length) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0)) {
+			throw new IndexOutOfBoundsException();
+		} else if (len == 0) {
+			return;
+		}
+		int newcount = count + len;
+		int remaining = len;
+		int inBufferPos = count - filledBufferSum;
+		while (remaining > 0) {
+			int part = Math.min(remaining, currentBuffer.getBytes().length - inBufferPos);
+			System.arraycopy(b, off + len - remaining, currentBuffer, inBufferPos, part);
+			remaining -= part;
+			if (remaining > 0) {
+				needNewBuffer(newcount);
+				inBufferPos = 0;
+			}
+		}
+		count = newcount;
+	}
+
+	/**
+	 * @see java.io.OutputStream#write(int)
+	 */
+	public void write(int b) {
+		int inBufferPos = count - filledBufferSum;
+		if (inBufferPos == currentBuffer.getBytes().length) {
+			needNewBuffer(count + 1);
+			inBufferPos = 0;
+		}
+		currentBuffer.getBytes()[inBufferPos] = (byte) b;
+		count++;
+	}
+
+	/**
+	 * @see java.io.ByteArrayOutputStream#size()
+	 */
+	public int size() {
+		return count;
+	}
+
+	/**
+	 * Closing a <tt>ByteArrayOutputStream</tt> has no effect. The methods in
+	 * this class can be called after the stream has been closed without
+	 * generating an <tt>IOException</tt>.
+	 * 
+	 * @throws IOException
+	 *           never (this method should not declare this exception but it has
+	 *           to now due to backwards compatability)
+	 */
+	public void close() {
+		for (ByteBuffer byteBuffer : buffers) {
+			byteBuffer.finished();
+		}
+	}
+
+	/**
+	 * @see java.io.ByteArrayOutputStream#reset()
+	 */
+	public void reset() {
+		close();
+		count = 0;
+		filledBufferSum = 0;
+		currentBufferIndex = 0;
+		currentBuffer = getBuffer(currentBufferIndex);
+	}
+
+	/**
+	 * Writes the entire contents of this byte stream to the specified output
+	 * stream.
+	 * 
+	 * @param out
+	 *          the output stream to write to
+	 * @throws IOException
+	 *           if an I/O error occurs, such as if the stream is closed
+	 * @see java.io.ByteArrayOutputStream#writeTo(OutputStream)
+	 */
+	public void writeTo(OutputStream out) throws IOException {
+		int remaining = count;
+		for (int i = 0; i < buffers.size(); i++) {
+			byte[] buf = getBuffer(i).getBytes();
+			int c = Math.min(buf.length, remaining);
+			out.write(buf, 0, c);
+			remaining -= c;
+			if (remaining == 0) {
+				break;
+			}
+		}
+	}
+
+	/**
+	 * Gets the curent contents of this byte stream as a byte array. The result is
+	 * independent of this stream.
+	 * 
+	 * @return the current contents of this output stream, as a byte array
+	 * @see java.io.ByteArrayOutputStream#toByteArray()
+	 * 
+	 * public synchronized byte[] toByteArray() { int remaining = count; if
+	 * (remaining == 0) { return EMPTY_BYTE_ARRAY; } byte newbuf[] = new
+	 * byte[remaining]; int pos = 0; for (int i = 0; i < buffers.size(); i++) {
+	 * byte[] buf = getBuffer(i); int c = Math.min(buf.length, remaining);
+	 * System.arraycopy(buf, 0, newbuf, pos, c); pos += c; remaining -= c; if
+	 * (remaining == 0) { break; } } return newbuf; }
+	 */
+}
Index: ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/ByteBufferPool.java	(revision 0)
@@ -0,0 +1,181 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.concurrent.locks.ReentrantLock;
+
+public class ByteBufferPool {
+  private ReentrantLock lock = new ReentrantLock();
+	private TreeSet<ByteBuffer> available = new TreeSet<ByteBuffer>();
+	private Set<ByteBuffer> inuse = new HashSet<ByteBuffer>();
+	private int maxCount;
+  
+	public ByteBufferPool(int initialSize, int count, int maxCount) {
+		this.maxCount = maxCount;
+		for (int x=0; x < count; x++) {
+			get(initialSize);
+		}
+	}
+	
+	public static class ByteBuffers {
+		private List<ByteBuffer> byteBuffers;
+		private int length;
+		
+		public ByteBuffers(RandomAccessFile randomAccessFile, int bufferSize, int size, ByteBufferPool bufferPool) throws IOException {
+			this.length = size;
+			int numRead = 0;
+			byteBuffers = new ArrayList<ByteBuffer>();
+			while (true) {
+				if (numRead >= size) break;
+				ByteBuffer byteBuffer = bufferPool.get(bufferSize);
+				int len = byteBuffer.getBytes().length;
+				if ( (len + numRead) > size) {
+					len = size - numRead;
+				}
+				int n = randomAccessFile.read(byteBuffer.getBytes(), 0, len);
+				if (n == -1) break;
+				byteBuffers.add(byteBuffer);
+				numRead += n;
+			}
+		}
+		
+		public ByteBuffers(InputStream inputStream, int bufferSize, int size, ByteBufferPool bufferPool) throws IOException {
+			this.length = size;
+			int numRead = 0;
+			byteBuffers = new ArrayList<ByteBuffer>();
+			while (true) {
+				if (numRead >= size) break;
+				ByteBuffer byteBuffer = bufferPool.get(bufferSize);
+				int len = byteBuffer.getBytes().length;
+				if ( (len + numRead) > size) {
+					len = size - numRead;
+				}
+				int n = inputStream.read(byteBuffer.getBytes(), 0, len);
+				if (n == -1) break;
+				byteBuffers.add(byteBuffer);
+				numRead += n;
+			}
+		}
+		
+		public ByteBuffers(ByteBuffer byteBuffer, int length) {
+			byteBuffers = new ArrayList<ByteBuffer>(1);
+			byteBuffers.add(byteBuffer);
+			this.length = length;
+		}
+		
+		public ByteBuffers(List<ByteBuffer> byteBuffers, int length) {
+			this.byteBuffers = byteBuffers;
+			this.length = length;
+		}
+    
+		public InputStream getInputStream() throws IOException {
+			return new ByteArrayInputStream(this);
+		}
+		
+		public void writeTo(DataOutput out) throws IOException {
+			int remaining = length;
+			for (int i = 0; i < byteBuffers.size(); i++) {
+				byte[] buf = byteBuffers.get(i).getBytes();
+				int c = Math.min(buf.length, remaining);
+				out.write(buf, 0, c);
+				remaining -= c;
+				if (remaining == 0) {
+					break;
+				}
+			}
+		}
+		
+		public void finished() {
+			for (ByteBuffer byteBuffer : byteBuffers) {
+				byteBuffer.finished();
+			}
+		}
+		
+		public List<ByteBuffer> getByteBuffers() {
+			return byteBuffers;
+		}
+
+		public int getLength() {
+			return length;
+		}
+	}
+	
+	public static class ByteBuffer implements Comparable<ByteBuffer> {
+		private byte[] bytes;
+		private ByteBufferPool byteBufferPool;
+		
+		public ByteBuffer(int size, ByteBufferPool byteBufferPool) {
+			this.bytes = new byte[size];
+			this.byteBufferPool = byteBufferPool;
+		}
+		
+		public void finished() {
+			byteBufferPool.finished(this);
+		}
+		
+		public int compareTo(ByteBuffer other) {
+			return new Integer(size()).compareTo(other.size());
+		}
+		
+		public int size() {
+			return bytes.length;
+		}
+		
+		public byte[] getBytes() {
+			return bytes;
+		}
+	}
+	
+	private void finished(ByteBuffer byteBuffer) {
+		lock.lock();
+		try {
+			inuse.remove(byteBuffer);
+			available.add(byteBuffer);
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	private void checkCount() {
+		lock.lock();
+		try {
+			int dif = available.size() - maxCount;
+			if (dif > 0) {
+				int count = 0;
+				Iterator<ByteBuffer> iterator = available.iterator();
+				while (iterator.hasNext() && count < dif) {
+					iterator.next();
+					iterator.remove();
+					count++;
+				}
+			}
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public ByteBuffer get(int size) {
+		lock.lock();
+		try {
+			ByteBuffer byteBuffer = null;
+			if (available.size() > 0) available.last();
+			if (byteBuffer == null || size > byteBuffer.size()) {
+				byteBuffer = new ByteBuffer(size, this);
+			} else {
+				available.remove(byteBuffer);
+			}
+			inuse.add(byteBuffer);
+			return byteBuffer;
+		} finally {
+			lock.unlock();
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/util/CElement.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/CElement.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/CElement.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean.util;
+
+import org.jdom.Element;
+
+/**
+ *
+ * @author jasonr
+ */
+public interface CElement {
+  public Element toElement() throws Exception;
+}
Index: ocean/src/org/apache/lucene/ocean/util/Constants.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/Constants.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/Constants.java	(revision 0)
@@ -0,0 +1,8 @@
+package org.apache.lucene.ocean.util;
+
+public interface Constants {
+  //public static final String ID = "_id".intern();
+  public static final String INDEXID = "_indexid".intern();
+  public static final String DOCUMENTID = "_documentid".intern();
+  public static final String SNAPSHOTID = "_snapshotid".intern();
+}
Index: ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/DocumentSerializer.java	(revision 0)
@@ -0,0 +1,24 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.IOException;
+
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+public class DocumentSerializer {
+  
+  public static void write(Document document, IndexOutput output) throws IOException {
+    byte[] bytes = SerializationUtils.serialize(document);
+    output.writeVInt(bytes.length);
+    output.writeBytes(bytes, bytes.length);
+  }
+  
+  public static Document read(IndexInput input) throws IOException {
+    int length = input.readVInt();
+    byte[] bytes = new byte[length];
+    input.readBytes(bytes, 0, length);
+    return (Document)SerializationUtils.deserialize(bytes);
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/FastInputStream.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/FastInputStream.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/FastInputStream.java	(revision 0)
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.ocean.util;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+
+/** Single threaded buffered InputStream
+ *  Internal Solr use only, subject to change.
+ */
+public class FastInputStream extends InputStream implements DataInput {
+  private final InputStream in;
+  private final byte[] buf;
+  private int pos;
+  private int end;
+
+  public FastInputStream(InputStream in) {
+  // use default BUFSIZE of BufferedOutputStream so if we wrap that
+  // it won't cause double buffering.
+    this(in, new byte[8192], 0, 0);
+  }
+
+  public FastInputStream(InputStream in, byte[] tempBuffer, int start, int end) {
+    this.in = in;
+    this.buf = tempBuffer;
+    this.pos = start;
+    this.end = end;
+  }
+
+
+  public static FastInputStream wrap(InputStream in) {
+    return (in instanceof FastInputStream) ? (FastInputStream)in : new FastInputStream(in);
+  }
+
+  @Override
+  public int read() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) return -1;
+    }
+    return buf[pos++] & 0xff;     
+  }
+
+  public int readUnsignedByte() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) throw new EOFException();
+    }
+    return buf[pos++] & 0xff;
+  }
+
+  public void refill() throws IOException {
+    // this will set end to -1 at EOF
+    end = in.read(buf, 0, buf.length);
+    pos = 0;
+  }
+
+  @Override
+  public int available() throws IOException {
+    return end - pos;
+  }
+
+  @Override
+  public int read(byte b[], int off, int len) throws IOException {
+    int r=0;  // number of bytes read
+    // first read from our buffer;
+    if (end-pos > 0) {
+      r = Math.min(end-pos, len);
+      System.arraycopy(buf, pos, b, off, r);      
+      pos += r;
+    }
+
+    if (r == len) return r;
+
+    // amount left to read is >= buffer size
+    if (len-r >= buf.length) {
+      int ret = in.read(b, off+r, len-r);
+      if (ret==-1) return r==0 ? -1 : r;
+      r += ret;
+      return r;
+    }
+
+    refill();
+
+    // first read from our buffer;
+    if (end-pos > 0) {
+      int toRead = Math.min(end-pos, len-r);
+      System.arraycopy(buf, pos, b, off+r, toRead);
+      pos += toRead;
+      r += toRead;
+      return r;
+    }
+    
+    return -1;
+  }
+
+  @Override
+  public void close() throws IOException {
+    in.close();
+  }
+
+  public void readFully(byte b[]) throws IOException {
+    readFully(b, 0, b.length);
+  }
+
+  public void readFully(byte b[], int off, int len) throws IOException {
+    while (len>0) {
+      int ret = read(b, off, len);
+      if (ret==-1) {
+        throw new EOFException();
+      }
+      off += ret;
+      len -= ret;
+    }
+  }
+
+  public int skipBytes(int n) throws IOException {
+    if (end-pos >= n) {
+      pos += n;
+      return n;
+    }
+
+    if (end-pos<0) return -1;
+    
+    int r = end-pos;
+    pos = end;
+
+    while (r < n) {
+      refill();
+      if (end-pos <= 0) return r;
+      int toRead = Math.min(end-pos, n-r);
+      r += toRead;
+      pos += toRead;
+    }
+
+    return r;
+  }
+
+  public boolean readBoolean() throws IOException {
+    return readByte()==1;
+  }
+
+  public byte readByte() throws IOException {
+    if (pos >= end) {
+      refill();
+      if (pos >= end) throw new EOFException();
+    }
+    return buf[pos++];
+  }
+
+
+  public short readShort() throws IOException {
+    return (short)((readUnsignedByte() << 8) | readUnsignedByte());
+  }
+
+  public int readUnsignedShort() throws IOException {
+    return (readUnsignedByte() << 8) | readUnsignedByte();
+  }
+
+  public char readChar() throws IOException {
+    return (char)((readUnsignedByte() << 8) | readUnsignedByte());
+  }
+
+  public int readInt() throws IOException {
+    return  ((readUnsignedByte() << 24)
+            |(readUnsignedByte() << 16)
+            |(readUnsignedByte() << 8)
+            | readUnsignedByte());
+  }
+
+  public long readLong() throws IOException {
+    return  (((long)readUnsignedByte()) << 56)
+            | (((long)readUnsignedByte()) << 48)
+            | (((long)readUnsignedByte()) << 40)
+            | (((long)readUnsignedByte()) << 32)
+            | (((long)readUnsignedByte()) << 24)
+            | (readUnsignedByte() << 16)
+            | (readUnsignedByte() << 8)
+            | (readUnsignedByte());
+  }
+
+  public float readFloat() throws IOException {
+    return Float.intBitsToFloat(readInt());    
+  }
+
+  public double readDouble() throws IOException {
+    return Double.longBitsToDouble(readLong());    
+  }
+
+  public String readLine() throws IOException {
+    return new DataInputStream(this).readLine();
+  }
+
+  public String readUTF() throws IOException {
+    return new DataInputStream(this).readUTF();
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/LongSequence.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/LongSequence.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/LongSequence.java	(revision 0)
@@ -0,0 +1,43 @@
+package org.apache.lucene.ocean.util;
+
+import java.util.concurrent.locks.ReentrantLock;
+
+public class LongSequence {
+	private long value;
+	private int increment;
+	private ReentrantLock lock = new ReentrantLock();
+
+	public LongSequence(long value, int increment) {
+		this.value = value;
+		this.increment = increment;
+	}
+	
+	public long get() {
+		lock.lock();
+		try {
+			return value;
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public void set(long i) {
+		lock.lock();
+		try {
+			value = i;
+		} finally {
+			lock.unlock();
+		}
+	}
+	
+	public long getAndIncrement() {
+		lock.lock();
+		try {
+			long v = value;
+			value += increment;
+			return v;
+		} finally {
+			lock.unlock();
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/util/SortedList.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/SortedList.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/SortedList.java	(revision 0)
@@ -0,0 +1,329 @@
+package org.apache.lucene.ocean.util;
+
+import java.util.AbstractCollection;
+import java.util.AbstractSet;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.ObjectUtils;
+
+public class SortedList<K extends Comparable<K>,V> implements Map<K,V> {
+  private List<SortedEntry<K,V>> entries = new ArrayList<SortedEntry<K,V>>();
+  private transient Set<Map.Entry<K,V>> entrySet = null;
+  transient volatile Set<K>        keySet = null;
+  transient volatile Collection<V> values = null;
+  
+  public SortedList() {}
+  
+  public SortedList(Map<K,V> map) {
+    putAll(map);
+  }
+  
+  private Map.Entry<K,V> removeMapping(Map.Entry<K,V> toRemove) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) toRemove.getKey()));
+    if (pos >= 0) {
+      SortedEntry<K,V> entry = entries.get(pos);
+      if (entry.value.equals(toRemove.getValue())) {
+        entries.remove(pos);
+        return entry;
+      }
+    }
+    return null;
+  }
+  
+  public static class SortedEntry<K extends Comparable<K>,V> implements Map.Entry<K,V>, Comparable<SortedEntry<K,V>> {
+    private K key;
+    private V value;
+
+    private SortedEntry(K key, V value) {
+      this.key = key;
+      this.value = value;
+    }
+
+    public K setKey(K key) {
+      K oldKey = this.key;
+      this.key = key;
+      return oldKey;
+    }
+
+    public V setValue(V value) {
+      V oldValue = this.value;
+      this.value = value;
+      return oldValue;
+    }
+
+    public K getKey() {
+      return key;
+    }
+
+    public V getValue() {
+      return value;
+    }
+
+    private SortedEntry(K key) {
+      this.key = key;
+    }
+
+    public int compareTo(SortedEntry<K,V> other) {
+      return key.compareTo(other.key);
+    }
+  }
+
+  public void putAll(Map<? extends K,? extends V> m) {
+    for (Iterator<? extends Map.Entry<? extends K,? extends V>> i = m.entrySet().iterator(); i.hasNext();) {
+      Map.Entry<? extends K,? extends V> e = i.next();
+      put(e.getKey(), e.getValue());
+    }
+  }
+  /**
+  public Set<K> keySet() {
+    Set<K> set = new HashSet<K>(entries.size());
+    for (SortedEntry<K,V> entry : entries) {
+      set.add(entry.key);
+    }
+    return set;
+  }
+  **/
+  // public Set<Map.Entry<K,V>> entrySet() {
+  // Set<Map.Entry<K,V>> set = new HashSet<Map.Entry<K,V>>(entries);
+  // return set;
+  // }
+
+  public boolean isEmpty() {
+    return entries.size() == 0;
+  }
+
+  public void clear() {
+    entries.clear();
+  }
+
+  public int size() {
+    return entries.size();
+  }
+
+  private int getPos(K key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>(key));
+    if (pos < 0)
+      pos = -1 - pos;
+    return pos;
+  }
+
+  public boolean containsValue(Object value) {
+    for (SortedEntry<K,V> entry : entries) {
+      boolean b = ObjectUtils.equals(value, entry.value);
+      if (b)
+        return true;
+    }
+    return false;
+  }
+
+  public boolean containsKey(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    return pos >= 0;
+  }
+
+  public K lastKey() {
+    if (entries.size() == 0)
+      return null;
+    return entries.get(entries.size() - 1).key;
+  }
+
+  public V lastValue() {
+    if (entries.size() == 0)
+      return null;
+    return entries.get(entries.size() - 1).value;
+  }
+
+  public V put(K key, V value) {
+    V oldValue = null;
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>(key));
+    if (pos >= 0) {
+      oldValue = entries.get(pos).value;
+      entries.set(pos, new SortedEntry<K,V>(key, value));
+      return oldValue;
+    }
+    if (pos < 0)
+      pos = -1 - pos;
+    
+    entries.add(pos, new SortedEntry<K,V>(key, value));
+    return null;
+  }
+
+  private SortedEntry<K,V> getEntry(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      return entries.get(pos);
+    } else {
+      return null;
+    }
+  }
+
+  public V get(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      return entries.get(pos).value;
+    } else {
+      return null;
+    }
+  }
+
+  public V remove(Object key) {
+    int pos = Collections.binarySearch(entries, new SortedEntry<K,V>((K) key));
+    if (pos >= 0) {
+      SortedEntry<K,V> entry = entries.get(pos);
+      entries.remove(pos);
+      return entry.value;
+    }
+    return null;
+  }
+
+  public void add(K key, V value) {
+    int pos = getPos(key);
+    entries.add(pos, new SortedEntry<K,V>(key, value));
+  }
+
+  private abstract class SortedListIterator<E> implements Iterator<E> {
+    private Iterator<SortedEntry<K,V>> iterator;
+    
+    public SortedListIterator() {
+      iterator = entries.iterator();
+    }
+    
+    protected Map.Entry<K,V> nextEntry() {
+      return iterator.next();
+    }
+    
+    public void remove() {
+      iterator.remove();
+    }
+    
+    public boolean hasNext() {
+      return iterator.hasNext();
+    }
+  }
+
+  private class ValueIterator extends SortedListIterator<V> {
+    public V next() {
+      return nextEntry().getValue();
+    }
+  }
+
+  private class KeyIterator extends SortedListIterator<K> {
+    public K next() {
+      return nextEntry().getKey();
+    }
+  }
+
+  private class EntryIterator extends SortedListIterator<Map.Entry<K,V>> {
+    public Map.Entry<K,V> next() {
+      return nextEntry();
+    }
+  }
+
+  // Subclass overrides these to alter behavior of views' iterator() method
+  Iterator<K> newKeyIterator() {
+    return new KeyIterator();
+  }
+
+  Iterator<V> newValueIterator() {
+    return new ValueIterator();
+  }
+
+  Iterator<Map.Entry<K,V>> newEntryIterator() {
+    return new EntryIterator();
+  }
+
+  public Set<K> keySet() {
+    Set<K> ks = keySet;
+    return (ks != null ? ks : (keySet = new KeySet()));
+  }
+
+  private class KeySet extends AbstractSet<K> {
+    public Iterator<K> iterator() {
+      return newKeyIterator();
+    }
+
+    public int size() {
+      return entries.size();
+    }
+
+    public boolean contains(Object o) {
+      return containsKey(o);
+    }
+
+    public boolean remove(Object o) {
+      return SortedList.this.remove(o) != null;
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  public Collection<V> values() {
+    Collection<V> vs = values;
+    return (vs != null ? vs : (values = new Values()));
+  }
+
+  private class Values extends AbstractCollection<V> {
+    public Iterator<V> iterator() {
+      return newValueIterator();
+    }
+
+    public int size() {
+      return SortedList.this.size();
+    }
+
+    public boolean contains(Object o) {
+      return containsValue(o);
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  public Set<Map.Entry<K,V>> entrySet() {
+    Set<Map.Entry<K,V>> es = entrySet;
+    return (es != null ? es : (entrySet = (Set<Map.Entry<K,V>>) (Set) new EntrySet()));
+  }
+
+  private class EntrySet extends AbstractSet/* <Map.Entry<K,V>> */{
+    public Iterator/* <Map.Entry<K,V>> */iterator() {
+      return newEntryIterator();
+    }
+
+    public boolean contains(Object o) {
+      if (!(o instanceof Map.Entry))
+        return false;
+      Map.Entry<K,V> e = (Map.Entry<K,V>) o;
+      SortedEntry<K,V> candidate = getEntry(e.getKey());
+      return candidate != null && candidate.equals(e);
+    }
+
+    public boolean remove(Object o) {
+      return removeMapping((Map.Entry<K,V>)o) != null;
+    }
+
+    public int size() {
+      return SortedList.this.size();
+    }
+
+    public void clear() {
+      SortedList.this.clear();
+    }
+  }
+
+  /**
+   * public Collection<V> values() { List<V> values = new ArrayList<V>(entries.size());
+   * for (SortedEntry<K,V> entry : entries) { values.add(entry.value); } return
+   * values; }
+   */
+}
Index: ocean/src/org/apache/lucene/ocean/util/Util.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/Util.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/Util.java	(revision 0)
@@ -0,0 +1,442 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.io.Reader;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.text.DateFormat;
+import java.text.DecimalFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import java.util.TimeZone;
+import java.util.TreeSet;
+import java.util.regex.Pattern;
+
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NameNotFoundException;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.ocean.LogDirectory;
+import org.apache.lucene.search.ExtendedFieldCache;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+
+public class Util {
+  public static TimeZone UTC = TimeZone.getTimeZone("UTC");
+  public final static Pattern splitPattern = Pattern.compile(",| ");
+  private static ThreadLocalDateFormat dateFormatThreadLocal = new ThreadLocalDateFormat();
+
+  public static String formatSnapshotId(BigDecimal id) {
+    DecimalFormat format = new DecimalFormat("##0.00");
+    return format.format(id);
+  }
+
+  /**
+   * Handles size 0 collections returns null
+   */
+  public static <T extends Object & Comparable<? super T>> T max(Collection<? extends T> coll) {
+    if (coll.size() == 0)
+      return null;
+    return Collections.max(coll);
+  }
+
+  public static void touchFile(String file, Directory directory) throws IOException {
+    IndexOutput output = directory.createOutput(file);
+    output.close();
+  }
+
+  public static String getString(String file, LogDirectory directory) throws IOException {
+    try {
+      RandomAccessFile input = directory.openInput(file);
+      if (input.length() == 0)
+        return null;
+      byte[] bytes = new byte[(int) input.length()];
+      input.read(bytes);
+      return new String(bytes, "UTF-8");
+    } catch (Throwable ioException) {
+      IOException newIOException = new IOException("file: "+file);
+      newIOException.initCause(ioException);
+      throw newIOException;
+    }
+  }
+
+  public static void save(String string, String file, LogDirectory directory) throws IOException {
+    byte[] bytes = string.getBytes("UTF-8");
+    RandomAccessFile output = directory.getOutput(file, true);
+    output.write(bytes);
+    output.getFD().sync();
+    output.close();
+  }
+
+  public static String getString(String file, Directory directory) throws IOException {
+    IndexInput input = directory.openInput(file);
+    byte[] bytes = new byte[(int) input.length()];
+    input.readBytes(bytes, 0, bytes.length);
+    return new String(bytes, "UTF-8");
+  }
+
+  public static void save(String string, String file, Directory directory) throws IOException {
+    byte[] bytes = string.getBytes("UTF-8");
+    IndexOutput output = directory.createOutput(file);
+    output.writeBytes(bytes, bytes.length);
+    output.flush();
+    output.close();
+  }
+
+  public static void copy(IndexInput input, IndexOutput output, byte[] buffer) throws IOException {
+    long len = input.length();
+    long readCount = 0;
+    while (readCount < len) {
+      int toRead = readCount + buffer.length > len ? (int) (len - readCount) : buffer.length;
+      input.readBytes(buffer, 0, toRead);
+      output.writeBytes(buffer, toRead);
+      readCount += toRead;
+    }
+  }
+
+  public static <K,V> V getLastValue(SortedMap<K,V> map) {
+    if (map.size() == 0)
+      return null;
+    return map.get(map.lastKey());
+  }
+
+  public static void setValue(String name, long value, Document document) {
+    String encoded = longToEncoded(value);
+    document.add(new Field(name, encoded, Field.Store.YES, Field.Index.UN_TOKENIZED));
+  }
+
+  public static boolean mkdir(File dir) {
+    if (!dir.exists()) {
+      return dir.mkdirs();
+    }
+    return false;
+  }
+
+  public static String longToEncoded(long value) {
+    return long2sortableStr(value);
+  }
+
+  public static long longFromEncoded(String string) {
+    return SortableStr2long(string, 0, 5);
+  }
+
+  public static String long2sortableStr(long val) {
+    char[] arr = new char[5];
+    long2sortableStr(val, arr, 0);
+    return new String(arr, 0, 5);
+  }
+
+  // uses binary representation of an int to build a string of
+  // chars that will sort correctly. Only char ranges
+  // less than 0xd800 will be used to avoid UCS-16 surrogates.
+  // we can use the lowest 15 bits of a char, (or a mask of 0x7fff)
+  public static int long2sortableStr(long val, char[] out, int offset) {
+    val += Long.MIN_VALUE;
+    out[offset++] = (char) (val >>> 60);
+    out[offset++] = (char) (val >>> 45 & 0x7fff);
+    out[offset++] = (char) (val >>> 30 & 0x7fff);
+    out[offset++] = (char) (val >>> 15 & 0x7fff);
+    out[offset] = (char) (val & 0x7fff);
+    return 5;
+  }
+
+  public static long SortableStr2long(String sval, int offset, int len) {
+    long val = (long) (sval.charAt(offset++)) << 60;
+    val |= ((long) sval.charAt(offset++)) << 45;
+    val |= ((long) sval.charAt(offset++)) << 30;
+    val |= sval.charAt(offset++) << 15;
+    val |= sval.charAt(offset);
+    val -= Long.MIN_VALUE;
+    return val;
+  }
+
+  public static int getDoc(String fieldName, long value, IndexReader indexReader) throws IOException {
+    String encoded = longToEncoded(value);
+    return getTermDoc(new Term(fieldName, encoded), indexReader);
+  }
+
+  public static int getTermDoc(Term term, IndexReader indexReader) throws IOException {
+    TermDocs docs = indexReader.termDocs(term);
+    try {
+      if (docs.next()) {
+        return docs.doc();
+      }
+    } finally {
+      docs.close();
+    }
+    return -1;
+  }
+
+  public static List<Integer> getTermDocs(Term term, IndexReader indexReader) throws IOException {
+    List<Integer> list = new ArrayList<Integer>();
+    TermDocs docs = indexReader.termDocs(term);
+    try {
+      while (docs.next()) {
+        list.add(docs.doc());
+      }
+    } finally {
+      docs.close();
+    }
+    return list;
+  }
+
+  public static long getSize(Directory directory) throws IOException {
+    long total = 0;
+    for (String file : directory.list()) {
+      total += directory.fileLength(file);
+    }
+    return total;
+  }
+
+  public static void copy(InputStream is, RandomAccessFile ras, byte[] buf) throws IOException {
+    int numRead;
+    while ((numRead = is.read(buf)) >= 0) {
+      ras.write(buf, 0, numRead);
+    }
+  }
+
+  public static File getDirectory(File root, String path) {
+    File file = new File(root, path);
+    if (!file.exists()) {
+      file.mkdirs();
+    }
+    return file;
+  }
+
+  public static DateFormat getThreadLocalDateFormat() {
+    return dateFormatThreadLocal.get();
+  }
+
+  public static String formatDate(Date date) {
+    DateFormat dateFormat = getThreadLocalDateFormat();
+    return dateFormat.format(date);
+  }
+
+  public static Date parseDate(String string) throws ParseException {
+    DateFormat dateFormat = getThreadLocalDateFormat();
+    return dateFormat.parse(string);
+  }
+
+  private static class ThreadLocalDateFormat extends ThreadLocal<DateFormat> {
+    DateFormat proto;
+
+    public ThreadLocalDateFormat() {
+      super();
+      SimpleDateFormat tmp = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.US);
+      tmp.setTimeZone(UTC);
+      proto = tmp;
+    }
+
+    protected DateFormat initialValue() {
+      return (DateFormat) proto.clone();
+    }
+  }
+
+  public static long[] getFieldCacheLong(String field, IndexReader indexReader) throws IOException {
+    return ExtendedFieldCache.EXT_DEFAULT.getLongs(indexReader, field);
+  }
+
+  public static boolean isTrue(String string) {
+    return StringUtils.equalsIgnoreCase("true", string);
+  }
+
+  public static Object construct(Object parameter, Class clazz) throws Exception {
+    Constructor constructor = clazz.getDeclaredConstructor(parameter.getClass());
+    try {
+      return constructor.newInstance(parameter);
+    } catch (InvocationTargetException invocationTargetException) {
+      Throwable cause = invocationTargetException.getCause();
+      if (cause instanceof Exception)
+        throw (Exception) cause;
+      else
+        throw invocationTargetException;
+    }
+  }
+
+  public static Long parseLong(String value) {
+    if (StringUtils.isBlank(value)) {
+      return null;
+    }
+    try {
+      return new Long(value);
+    } catch (Throwable ex) {
+      return null;
+    }
+  }
+
+  public static URL parseURL(String string) throws MalformedURLException {
+    if (StringUtils.isBlank(string)) {
+      return null;
+    }
+    return new URL(string);
+  }
+
+  public static List<String> readLines(Reader reader) throws Exception {
+    return IOUtils.readLines(reader);
+  }
+
+  public static IOException asIOException(String message, Throwable throwable) throws IOException {
+    IOException ioException = new IOException(message);
+    ioException.initCause(throwable);
+    return ioException;
+  }
+
+  public static IOException asIOException(Throwable throwable) {
+    if (throwable instanceof IOException) {
+      return (IOException) throwable;
+    }
+    IOException ioException = new IOException(throwable.getMessage());
+    ioException.initCause(throwable);
+    return ioException;
+  }
+
+  public static void copy(InputStream is, OutputStream os, byte[] buf) throws IOException {
+    int numRead;
+    while ((numRead = is.read(buf)) >= 0) {
+      os.write(buf, 0, numRead);
+    }
+  }
+
+  public static int[] toIntArray(List<Integer> list) {
+    int size = list.size();
+    int[] array = new int[size];
+    int x = 0;
+    for (int i : list) {
+      array[x] = i;
+      x++;
+    }
+    return array;
+  }
+
+  public static List<URL> loadUrls(File file) throws IOException {
+    List<String> lines = IOUtils.readLines(new FileReader(file));
+    List<URL> urls = new ArrayList<URL>();
+    for (String line : lines) {
+      urls.add(new URL(line));
+    }
+    return urls;
+  }
+
+  /**
+   * public static HttpParameters toHttpParameters(HttpServletRequest request) {
+   * try { if (StringUtils.equalsIgnoreCase("post", request.getMethod())) {
+   * HttpParameters parameters = new HttpParameters(); URL url = new
+   * URL(request.getRequestURL().toString()); CGIParser cgiParser = new
+   * CGIParser(url.toString(), "UTF-8"); for (String name :
+   * cgiParser.getParameterNameList()) { for (String value :
+   * cgiParser.getParameterValues(name)) { parameters.add(name, value); } }
+   * return parameters; } } catch (Exception exception) { throw new
+   * RuntimeException(exception); } if (StringUtils.equalsIgnoreCase("get",
+   * request.getMethod())) { HttpParameters parameters = new HttpParameters();
+   * Enumeration paramEnum = request.getParameterNames(); while
+   * (paramEnum.hasMoreElements()) { String name = (String)
+   * paramEnum.nextElement(); String[] array = request.getParameterValues(name);
+   * if (array != null && array.length > 0) { for (String value : array) {
+   * parameters.add(name, value); } } } return parameters; } throw new
+   * RuntimeException("unknown http method " + request.getMethod()); }
+   */
+  public static Long getNextServerSequence(Long value, int serverNumber) {
+    if (value == null) {
+      return new Long(serverNumber);
+    }
+    Long i = null;
+    if (value > 99) {
+      String string = value.toString();
+      String substring = string.substring(0, string.length() - 2);
+      i = new Long(substring + "00");
+    } else {
+      i = new Long(0);
+    }
+    long v = i + serverNumber;
+    return v + 100;
+  }
+
+  public static int getServerNumber(BigInteger id) {
+    String string = id.toString();
+    String substring = string.substring(string.length() - 2, string.length());
+    return Integer.parseInt(substring);
+  }
+
+  public static SortedSet<String> splitToSortedSet(String string) {
+    if (StringUtils.isBlank(string)) {
+      return null;
+    }
+    String[] array = splitPattern.split(string.trim(), 0);
+    TreeSet<String> sortedSet = new TreeSet<String>();
+    for (int x = 0; x < array.length; x++) {
+      sortedSet.add(array[x]);
+    }
+    return sortedSet;
+  }
+
+  public static File getAppServerHome() {
+    return new File(System.getProperty("catalina.home"));
+  }
+
+  public static File getHomeDirectory(String name, File defaultDirectory) throws Exception {
+    String value = System.getenv(name);
+    if (value != null)
+      return new File(value);
+    value = System.getProperty(name);
+    if (value != null)
+      return new File(value);
+    Context context = (Context) new InitialContext().lookup("java:comp/env");
+    try {
+      String string = (String) context.lookup(name);
+      if (StringUtils.isNotBlank(value))
+        return new File(value);
+    } catch (NameNotFoundException nameNotFoundException) {
+    }
+    defaultDirectory.mkdirs();
+    return defaultDirectory;
+  }
+
+  public static Object getFirst(List list) {
+    Iterator iterator = list.iterator();
+    if (iterator.hasNext()) {
+      return iterator.next();
+    } else {
+      return null;
+    }
+  }
+
+  public static Map<String,String> toMapExcept(org.jdom.Element element, String exceptName) {
+    Map<String,String> map = new HashMap<String,String>();
+    for (Object object : element.getAttributes()) {
+      org.jdom.Attribute attribute = (org.jdom.Attribute) object;
+      String name = attribute.getName();
+      if (!StringUtils.equals(name, exceptName)) {
+        map.put(name, attribute.getValue());
+      }
+    }
+    return map;
+  }
+}
Index: ocean/src/org/apache/lucene/ocean/util/XMLUtil.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/util/XMLUtil.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/util/XMLUtil.java	(revision 0)
@@ -0,0 +1,444 @@
+package org.apache.lucene.ocean.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.io.Writer;
+import java.lang.reflect.Field;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.BooleanUtils;
+import org.apache.commons.lang.StringUtils;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.Namespace;
+import org.jdom.Verifier;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.xml.sax.EntityResolver;
+import org.xml.sax.InputSource;
+
+/**
+ * 
+ * @author Jason Rutherglen
+ */
+public class XMLUtil {
+	public static Logger log = Logger.getLogger(XMLUtil.class.getName());
+
+	public XMLUtil() {
+	}
+
+	public static Collection<Attribute> getAttributes(Element element) {
+		return (Collection<Attribute>)element.getAttributes();
+	}
+	
+	public static String printAndDetach(Element element) throws Exception {
+		List<Element> list = element.cloneContent();
+		Element root = null;
+		if (list.size() == 1) {
+			root = list.get(0);
+		} else {
+			root = new Element("root");
+			root.addContent(list);
+		}
+		return outputElement(root);
+	}
+
+	public static Object getProperValue(Field field, String string) throws Exception {
+		if (!field.getType().isAssignableFrom(String.class)) {
+			if (field.getType().isAssignableFrom(URL.class)) {
+				return new URL(string);
+			} else if (field.getType().isAssignableFrom(Date.class)) {
+				return Util.parseDate(string);
+			} else if (field.getType().isAssignableFrom(File.class)) {
+				return new File(string);
+			} else if (field.getType().isAssignableFrom(Long.class)) {
+				return new Long(string);
+			} else if (field.getType().isAssignableFrom(Double.class)) {
+				return new Double(string);
+			}
+			throw new Exception("unknown type " + field.getType().getName());
+		} else
+			return string;
+	}
+
+	public static void addAll(List<Element> children, Element parent) {
+		for (Element element : children) {
+			parent.addContent(element);
+		}
+	}
+
+	public static void printList(String rootName, List list, PrintWriter writer) throws Exception {
+		Format format = Format.getPrettyFormat();
+		format.setLineSeparator("\n");
+		XMLOutputter outputter = new XMLOutputter(format);
+		Element root = new Element(rootName);
+		for (Object object : list) {
+			if (object instanceof CElement) {
+				CElement cElement = (CElement) object;
+				root.addContent(cElement.toElement());
+			}
+		}
+		Document document = new Document();
+		document.addContent(root);
+		outputter.output(document, writer);
+	}
+
+	public static List<Element> getChildren(String xml) throws Exception {
+		return getChildren(parseElement(xml));
+	}
+
+	public static List<Element> getChildren(String name, Element element) {
+		List<Element> elements = (List<Element>) element.getChildren(name);
+		if (element == null)
+			return new ArrayList<Element>(0);
+		else
+			return elements;
+	}
+
+	public static List<Element> getChildren(Element element) {
+		List<Element> elements = (List<Element>) element.getChildren();
+		if (element == null)
+			return new ArrayList<Element>(0);
+		else
+			return elements;
+	}
+
+	public static Element getFirstChild(Element element) {
+		return (Element) Util.getFirst(element.getChildren());
+	}
+
+	public static void setAttribute(String name, Object value, Element element) {
+		if (value == null) {
+			return;
+		}
+		if (value instanceof Date) {
+			value = Util.formatDate((Date) value);
+		}
+		element.setAttribute(name, value.toString());
+	}
+
+	public static java.net.URL getChildURL(String name, Element element) {
+		String childText = element.getChildText(name);
+		try {
+			return new java.net.URL(childText);
+		} catch (Throwable th) {
+		}
+		return null;
+	}
+
+	public static Element getChild(String name, Element root) {
+		return root.getChild(name);
+	}
+
+	public static java.math.BigInteger getChildBigInteger(String name, Element element) {
+		String childText = element.getChildText(name);
+		try {
+			return new java.math.BigInteger(childText);
+		} catch (Throwable throwable) {
+		}
+		return null;
+	}
+
+	public static Long getChildLong(String name, Element element) throws NumberFormatException {
+		String childText = element.getChildText(name);
+		return new Long(childText);
+	}
+
+	public static String getAttributeString(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		return text;
+	}
+  
+	public static Integer getAttributeInteger(String name, Element element) throws NumberFormatException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return new Integer(text);
+	}
+	
+	public static Float getAttributeFloat(String name, Element element) throws NumberFormatException {
+    String text = element.getAttributeValue(name);
+    if (text == null || text.equals("")) {
+      return null;
+    }
+    return new Float(text);
+  }
+	
+	public static BigDecimal getAttributeBigDecimal(String name, Element element) throws NumberFormatException {
+    String text = element.getAttributeValue(name);
+    if (text == null || text.equals("")) {
+      return null;
+    }
+    return new BigDecimal(text);
+  }
+	
+	public static Long getAttributeLong(String name, Element element) throws NumberFormatException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return new Long(text);
+	}
+
+	public static Date getAttributeDate(String name, Element element) throws ParseException {
+		String text = element.getAttributeValue(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return Util.parseDate(text);
+	}
+
+	public static Boolean getChildBoolean(String name, Element element) {
+		String text = element.getChildText(name);
+		if (StringUtils.isBlank(text)) {
+			return null;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static boolean getAttributeBooleanPrimitive(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		if (StringUtils.isBlank(text)) {
+			return false;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static Boolean getAttributeBoolean(String name, Element element) {
+		String text = element.getAttributeValue(name);
+		if (StringUtils.isBlank(text)) {
+			return null;
+		}
+		return BooleanUtils.toBooleanObject(text);
+	}
+
+	public static Date getChildDate(String name, Element element) throws ParseException {
+		String text = element.getChildText(name);
+		if (text == null || text.equals("")) {
+			return null;
+		}
+		return parseDate(text);
+	}
+
+	public static Date parseDate(String dateStr) throws ParseException {
+		if (org.apache.commons.lang.StringUtils.isEmpty(dateStr)) {
+			return null;
+		}
+		return Util.parseDate(dateStr);
+	}
+
+	public static String formatDate(Date date) {
+		if (date == null) {
+			return "";
+		}
+		return Util.formatDate(date);
+	}
+
+	public static String getChildText(String name, Element element) {
+		return element.getChildText(name);
+	}
+
+	public static Integer getChildInteger(String name, Element element) {
+		try {
+			String text = element.getChildText(name);
+			return new Integer(text);
+		} catch (Throwable ex) {
+		}
+		return null;
+	}
+
+	public static Double getChildDouble(String name, Element element) throws NumberFormatException {
+		try {
+			String text = element.getChildText(name);
+			return new Double(text);
+		} catch (Throwable ex) {
+		}
+		return null;
+	}
+
+	public static void outputElement(Element element, Writer writer) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		xmlOut.output(new Document(element), writer);
+	}
+
+	public static String outputElement(Element element) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputElementMinimal(Element element) throws Exception {
+		Format format = Format.getCompactFormat();
+		format.setOmitDeclaration(true);
+		XMLOutputter xmlOut = new XMLOutputter(format);
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputElementOmitDeclaration(Element element) {
+		Format format = Format.getPrettyFormat();
+		format.setOmitDeclaration(true);
+		XMLOutputter xmlOut = new XMLOutputter(format);
+		String xmlString = xmlOut.outputString(new Document(element));
+		return xmlString;
+	}
+
+	public static String outputDocument(Document document) throws Exception {
+		XMLOutputter xmlOut = new XMLOutputter(Format.getPrettyFormat());
+		String xmlString = xmlOut.outputString(document);
+		return xmlString;
+	}
+
+	public static String removeInvalidXMLChars(String value) {
+		StringBuffer buffer = new StringBuffer();
+		char[] array = value.toCharArray();
+		for (int x = 0; x < array.length; x++) {
+			if (Verifier.isXMLCharacter(array[x])) {
+				buffer.append(array[x]);
+			}
+		}
+		return buffer.toString();
+	}
+
+	public static Element createTextElement(String name, Object object, Element parentElement) {
+		if (object == null) {
+			return null;
+		}
+
+		String text = null;
+		if (object instanceof Date) {
+			Date date = (Date) object;
+
+			text = formatDate(date);
+		} else {
+			text = object.toString();
+		}
+		Element element = new Element(name);
+		// XMLOutputter outputter = new XMLOutputter();
+		// text = outputter.escapeElementEntities(text);
+		// text = removeInvalidXMLChars(text);
+		element.setText(text);
+
+		parentElement.addContent(element);
+
+		return element;
+	}
+
+	public static Element createTextElement(String name, Object object, Namespace namespace, Element parentElement) {
+		if (object == null) {
+			return null;
+		}
+
+		String text = null;
+		if (object instanceof File) {
+			text = ((File) object).getAbsolutePath();
+		} else {
+			text = object.toString();
+		}
+		Element element = new Element(name, namespace);
+		XMLOutputter outputter = new XMLOutputter();
+		text = outputter.escapeElementEntities(text);
+		text = removeInvalidXMLChars(text);
+		element.setText(text);
+
+		parentElement.addContent(element);
+
+		return element;
+	}
+
+	public static void saveXML(Element element, File file) throws IOException {
+		Document document = new Document();
+		document.addContent(element);
+		saveXML(document, file);
+	}
+
+	public static void saveXML(Document document, File file) throws IOException {
+		File parentDir = file.getParentFile();
+		if (!parentDir.exists()) {
+			parentDir.mkdirs();
+		}
+		Format format = Format.getPrettyFormat();
+		format.setLineSeparator("\n");
+		XMLOutputter outputter = new XMLOutputter(format);
+		String channelXMLStr = outputter.outputString(document);
+		FileUtils.writeStringToFile(file, channelXMLStr, "UTF-8");
+	}
+
+	/**
+	 * public static XmlPullParser parseDocumentSAX(InputStream input) throws
+	 * Exception { XmlPullParserFactory factory =
+	 * XmlPullParserFactory.newInstance("org.xmlpull.mxp1.MXParserFactory", null);
+	 * factory.setNamespaceAware(false); factory.setValidating(false);
+	 * XmlPullParser xpp = factory.newPullParser(); xpp.setInput(input, "UTF-8");
+	 * return xpp; }
+	 */
+	public static File getChildFile(String name, Element element) throws Exception {
+		String path = element.getChildTextTrim(name);
+		if (StringUtils.isBlank(path)) {
+			return null;
+		}
+		return new File(path);
+	}
+
+	public static Document parseDocument(File file) throws XMLException, IOException {
+		String xml = FileUtils.readFileToString(file, "UTF-8");
+		return parseDocument(xml);
+	}
+
+	public static Element parseElement(File file) throws XMLException, IOException {
+		if (!file.exists()) {
+			return null;
+		}
+		String xml = FileUtils.readFileToString(file, "UTF-8");
+		return parseElement(xml);
+	}
+
+	public static Element parseElement(String xml) throws XMLException {
+		Document document = parseDocument(xml);
+		return document.getRootElement();
+	}
+  
+	public static class XMLException extends Exception {
+		public XMLException(String message, Throwable throwable) {
+			super(message, throwable);
+		}
+	}
+	
+	public static Document parseDocument(String xml) throws XMLException {
+		if (StringUtils.isBlank(xml))
+			throw new IllegalArgumentException("xml blank"); 
+		EntityResolver RESOLVER = new EmptyEntityResolver();
+		SAXBuilder saxBuilder = new SAXBuilder(false);
+		saxBuilder.setEntityResolver(RESOLVER);
+		try {
+			Document document = saxBuilder.build(new StringReader(xml));
+			return document;
+		} catch (Exception exception) {
+			throw new XMLException(xml, exception);
+		}
+	}
+
+	public static class EmptyEntityResolver implements EntityResolver {
+		public InputSource resolveEntity(String publicId, String systemId) {
+			InputSource EMPTY_INPUTSOURCE = new InputSource(new ByteArrayInputStream(new byte[0]));
+			return EMPTY_INPUTSOURCE;
+			// if (systemId != null && systemId.endsWith(".dtd")) return
+			// EMPTY_INPUTSOURCE;
+			// return null;
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java
===================================================================
--- ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java	(revision 0)
+++ ocean/src/org/apache/lucene/ocean/WriteableMemoryIndex.java	(revision 0)
@@ -0,0 +1,185 @@
+package org.apache.lucene.ocean;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.FilterIndexReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.ocean.util.SortedList;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.store.instantiated.InstantiatedIndex;
+import org.apache.lucene.store.instantiated.InstantiatedIndexReader;
+import org.apache.lucene.store.instantiated.InstantiatedIndexWriter;
+
+/**
+ * Encapsulates org.apache.lucene.store.instantiated.InstantiatedIndex for use with the
+ * Ocean system.  This is the only index that is writeable, meaning new documents may
+ * be added to.  
+ * 
+ */
+//TODO: release old MemoryIndexSnapshots from map
+public class WriteableMemoryIndex extends Index {
+	private WriteableIndexWriter indexWriter;
+	//private InstantiatedIndexReader indexReader;
+	private InstantiatedIndex instantiatedIndex;
+  private SortedList<Long,MemoryIndexSnapshot> snapshotMap = new SortedList<Long,MemoryIndexSnapshot>();
+	
+	public WriteableMemoryIndex(IndexID id, TransactionSystem system) throws IOException {
+		super(id, system);
+		instantiatedIndex = new InstantiatedIndex();
+		indexWriter = new WriteableIndexWriter(instantiatedIndex);
+		//indexReader = new InstantiatedIndexReader(instantiatedIndex);
+	}
+  
+	public boolean rollback(Long snapshotId) {
+	  return snapshotMap.remove(snapshotId) != null;
+	}
+	
+	// called by Category.runTransactionsNotInIndex
+	void addDocuments(Documents documents, Analyzer analyzer) throws IOException {
+		for (Document document : documents) {
+			indexWriter.addDocument(document, analyzer);
+		}
+	}
+
+	// called by Category.runTransactionsNotInIndex
+	MemoryIndexSnapshot setSnapshot(Long snapshotId) {//, List<Deletes> deletesList) throws Exception {
+		//int maxDoc = indexReader.maxDoc();
+		//HashSet<Integer> deletedSet = new HashSet<Integer>();
+		//if (deletesList != null) {
+		//	for (Deletes deletes : deletesList) {
+		//		applyDeletes(false, deletes, null, indexReader);
+		//	}
+		//}
+	  int maxDoc = instantiatedIndex.getDocumentsByNumber().length;
+	  HashSet<Integer> deletedSet = new HashSet<Integer>();
+		MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedSet);
+		snapshotMap.put(snapshotId, memoryIndexSnapshot);
+		return memoryIndexSnapshot;
+	}
+
+	public MemoryIndexSnapshot getIndexSnapshot(Long snapshotId) {
+		return snapshotMap.get(snapshotId);
+	}
+
+	public MemoryIndexSnapshot getLatestIndexSnapshot() {
+		return snapshotMap.lastValue();
+	}
+
+	public class MemoryIndexSnapshot extends IndexSnapshot {
+		private final int maxDoc;
+		private HashSet<Integer> deletedDocs;
+		private OceanInstantiatedIndexReader indexReader;
+
+		public MemoryIndexSnapshot(Long snapshotId, int maxDoc, HashSet<Integer> deletedDocs) {
+			super(snapshotId);
+			this.maxDoc = maxDoc;
+			this.deletedDocs = deletedDocs;
+			indexReader = new OceanInstantiatedIndexReader(maxDoc, instantiatedIndex, deletedDocs);
+		}
+		
+		public int deletedDoc() {
+		  return deletedDocs.size();
+		}
+		
+		public int maxDoc() {
+		  return maxDoc;
+		}
+
+		public IndexReader getIndexReader() {
+			return indexReader;
+		}
+	}
+
+	Long getLatestSnapshotId() {
+		return snapshotMap.lastKey();
+	}
+
+	private HashSet<Integer> getLatestSnapshotDeletedDocSet() {
+		MemoryIndexSnapshot memoryIndexSnapshot = snapshotMap.lastValue();
+		if (memoryIndexSnapshot == null || memoryIndexSnapshot.deletedDocs == null)
+			return null;
+		HashSet<Integer> deletedDocSet = memoryIndexSnapshot.deletedDocs;
+		return deletedDocSet;
+	}
+
+	public int getDocumentCount() {
+		return instantiatedIndex.getDocumentsByNumber().length;
+	}
+
+	public DeletesResult commitDeletes(Deletes deletes, Transaction transaction) throws Exception, InterruptedException, IOException {
+		return commitChanges(null, deletes, null, transaction);
+	}
+  
+	public void commitNothing(Transaction transaction) throws IndexException, InterruptedException, IOException {
+	  Long snapshotId = transaction.getId();
+	  MemoryIndexSnapshot latestIndexSnapshot = getLatestIndexSnapshot();
+	  int maxDoc = latestIndexSnapshot.maxDoc();
+	  HashSet<Integer> deletedDocSet = latestIndexSnapshot.deletedDocs;
+	  MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedDocSet);
+    snapshotMap.put(snapshotId, memoryIndexSnapshot);
+    removeOldSnapshots(snapshotMap);
+	}
+	
+	public DeletesResult commitChanges(Documents documents, Deletes deletes, Analyzer analyzer, Transaction transaction) throws InterruptedException, Exception, IOException {
+		try {
+			if (isClosed()) {
+				throw new IOException("index is closed");
+			}
+			if (isReadOnly() && documents != null && documents.size() > 0) {
+				throw new IOException("index not accepting new documents");
+			}
+			DeletesResult deletesResult = new DeletesResult(getId());
+			HashSet<Integer> deletedSet = null;
+			if (deletes != null && deletes.hasDeletes()) {
+			  HashSet<Integer> previousDeletedSet = getLatestSnapshotDeletedDocSet();
+				if (previousDeletedSet != null) {
+					deletedSet = (HashSet<Integer>) previousDeletedSet.clone();
+				} else {
+					deletedSet = new HashSet<Integer>();
+				}
+				IndexSnapshot indexSnapshot = getLatestIndexSnapshot();
+				deletesResult = applyDeletes(false, deletes, deletedSet, indexSnapshot.getIndexReader());
+			} else if (deletes == null || !deletes.hasDeletes()) { // if no deletes
+				// just use same
+				deletedSet = getLatestSnapshotDeletedDocSet();
+			}
+			if (documents != null) {
+				for (Document document : documents) {
+					indexWriter.addDocument(document, analyzer);
+				}
+			}
+			transaction.ready(this);
+			if (transaction.go()) {
+				indexWriter.commit();
+				int maxDoc = instantiatedIndex.getDocumentsByNumber().length;
+				Long snapshotId = transaction.getId();
+				MemoryIndexSnapshot memoryIndexSnapshot = new MemoryIndexSnapshot(snapshotId, maxDoc, deletedSet);
+				snapshotMap.put(snapshotId, memoryIndexSnapshot);
+				removeOldSnapshots(snapshotMap);
+				return deletesResult;
+			} else {
+				indexWriter.abort();
+				return null;
+			}
+		} catch (Throwable throwable) {
+			transaction.failed(this, throwable);
+			if (throwable instanceof Exception) {
+			  throw (Exception)throwable;
+			} else {
+			  throw new Exception(throwable);
+			}
+		}
+		//return null;
+	}
+
+	public class WriteableIndexWriter extends InstantiatedIndexWriter {
+		public WriteableIndexWriter(InstantiatedIndex index) throws IOException {
+			super(index);
+		}
+	}
+}
Index: ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java
===================================================================
--- ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java	(revision 0)
+++ ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java	(revision 0)
@@ -0,0 +1,254 @@
+package org.apache.lucene.search;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.OceanSearcher;
+import org.apache.lucene.ocean.Snapshot;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.util.PriorityQueue;
+
+public class OceanMultiThreadSearcher extends OceanSearcher {
+  private Snapshot snapshot;
+  private ExecutorService searchThreadPool;
+  private Searchable[] searchables;
+  private int[] starts;
+
+  public OceanMultiThreadSearcher(Snapshot snapshot, ExecutorService searchThreadPool) throws IOException {
+    super(snapshot);
+    searchables = snapshot.getSearchers();
+    starts = snapshot.getStarts();
+    this.snapshot = snapshot;
+    this.searchThreadPool = searchThreadPool;
+  }
+
+  public TopDocs search(Weight weight, Filter filter, int nDocs) throws IOException {
+    HitQueue hq = new HitQueue(nDocs);
+    int totalHits = 0;
+    MultiSearcherThread[] msta = new MultiSearcherThread[searchables.length];
+    ReentrantLock lock = new ReentrantLock();
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) { // search each searcher
+      msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, i, starts, lock);
+      futures.add(searchThreadPool.submit(msta[i]));
+    }
+    processFutures(futures);
+    for (int i = 0; i < searchables.length; i++) {
+      IOException ioe = msta[i].getIOException();
+      if (ioe == null) {
+        totalHits += msta[i].hits();
+      } else {
+        // if one search produced an IOException, rethrow it
+        throw ioe;
+      }
+    }
+    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
+    for (int i = hq.size() - 1; i >= 0; i--)
+      // put docs in array
+      scoreDocs[i] = (ScoreDoc) hq.pop();
+    float maxScore = (totalHits == 0) ? Float.NEGATIVE_INFINITY : scoreDocs[0].score;
+    return new TopDocs(totalHits, scoreDocs, maxScore);
+  }
+
+  private void processFutures(List<Future> futures) throws IOException {
+    for (Future future : futures) {
+      try {
+        future.get();
+      } catch (ExecutionException executionException) {
+        if (executionException.getCause() instanceof IOException) {
+          throw (IOException)executionException.getCause();
+        }
+        throw Util.asIOException(executionException.getCause());
+      } catch (InterruptedException interruptedException) {
+        throw Util.asIOException(interruptedException);
+      }
+    }
+  }
+
+  /**
+   * A search implementation allowing sorting which spans a new thread for each
+   * Searchable, waits for each search to complete and merges the results back
+   * together.
+   */
+  public TopFieldDocs search(Weight weight, Filter filter, int nDocs, Sort sort) throws IOException {
+    // don't specify the fields - we'll wait to do this until we get results
+    FieldDocSortedHitQueue hq = new FieldDocSortedHitQueue(null, nDocs);
+    int totalHits = 0;
+    MultiSearcherThread[] msta = new MultiSearcherThread[searchables.length];
+    ReentrantLock lock = new ReentrantLock();
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) { // search each searcher
+      // Assume not too many searchables and cost of creating a thread is by far
+      // inferior to a search
+      msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, sort, i, starts, lock);
+      futures.add(searchThreadPool.submit(msta[i]));
+    }
+    processFutures(futures);
+    float maxScore = Float.NEGATIVE_INFINITY;
+    for (int i = 0; i < searchables.length; i++) {
+      IOException ioe = msta[i].getIOException();
+      if (ioe == null) {
+        totalHits += msta[i].hits();
+        maxScore = Math.max(maxScore, msta[i].getMaxScore());
+      } else {
+        // if one search produced an IOException, rethrow it
+        throw ioe;
+      }
+    }
+    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
+    for (int i = hq.size() - 1; i >= 0; i--)
+      // put docs in array
+      scoreDocs[i] = (ScoreDoc) hq.pop();
+
+    return new TopFieldDocs(totalHits, scoreDocs, hq.getFields(), maxScore);
+  }
+
+  class MultiSearcherThread implements Callable {
+    private Searchable searchable;
+    private Weight weight;
+    private Filter filter;
+    private int nDocs;
+    private TopDocs docs;
+    private int i;
+    private PriorityQueue hq;
+    private int[] starts;
+    private IOException ioe;
+    private Sort sort;
+    private ReentrantLock lock;
+
+    public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, HitQueue hq, int i, int[] starts, ReentrantLock lock) {
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.nDocs = nDocs;
+      this.hq = hq;
+      this.i = i;
+      this.starts = starts;
+      this.lock = lock;
+    }
+
+    public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i,
+        int[] starts, ReentrantLock lock) {
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.nDocs = nDocs;
+      this.hq = hq;
+      this.i = i;
+      this.starts = starts;
+      this.sort = sort;
+      this.lock = lock;
+    }
+
+    public Object call() throws Exception {
+      try {
+        docs = (sort == null) ? searchable.search(weight, filter, nDocs) : searchable.search(weight, filter, nDocs, sort);
+      }
+      // Store the IOException for later use by the caller of this thread
+      catch (IOException ioe) {
+        this.ioe = ioe;
+      }
+      if (ioe == null) {
+        // if we are sorting by fields, we need to tell the field sorted hit
+        // queue
+        // the actual type of fields, in case the original list contained AUTO.
+        // if the searchable returns null for fields, we'll have problems.
+        if (sort != null) {
+          ((FieldDocSortedHitQueue) hq).setFields(((TopFieldDocs) docs).fields);
+        }
+        ScoreDoc[] scoreDocs = docs.scoreDocs;
+        for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
+          ScoreDoc scoreDoc = scoreDocs[j];
+          scoreDoc.doc += starts[i]; // convert doc
+          // it would be so nice if we had a thread-safe insert
+          lock.lock();
+          try {
+            if (!hq.insert(scoreDoc))
+              break;
+          } finally {
+            lock.unlock();// no more scores > minScore
+          }
+        }
+      }
+      return null;
+    }
+
+    public int hits() {
+      return docs.totalHits;
+    }
+
+    public float getMaxScore() {
+      return docs.getMaxScore();
+    }
+
+    public IOException getIOException() {
+      return ioe;
+    }
+  }
+
+  /**
+   * Lower-level search API.
+   * 
+   * <p>
+   * {@link HitCollector#collect(int,float)} is called for every non-zero
+   * scoring document.
+   * 
+   * <p>
+   * Applications should only use this if they need <i>all</i> of the matching
+   * documents. The high-level search API ({@link Searcher#search(Query)}) is
+   * usually more efficient, as it skips non-high-scoring hits.
+   * 
+   * @param weight
+   *          to match documents
+   * @param filter
+   *          if non-null, a bitset used to eliminate some documents
+   * @param results
+   *          to receive hits
+   * 
+   */
+  public void search(Weight weight, Filter filter, final HitCollector results) throws IOException {
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) {
+      final int start = starts[i];
+      HitCollectorThread hitCollectorThread = new HitCollectorThread(start, searchables[i], weight, filter, results);
+      futures.add(searchThreadPool.submit(hitCollectorThread));
+      //searchables[i].search(weight, filter, new HitCollector() {
+      //  public void collect(int doc, float score) {
+      //    results.collect(doc + start, score);
+      //  }
+      //});
+    }
+    processFutures(futures);
+  }
+
+  private class HitCollectorThread implements Callable {
+    private Searchable searchable;
+    private Weight weight;
+    private Filter filter;
+    private HitCollector hitCollector;
+    private int start;
+    
+    public HitCollectorThread(int start, Searchable searchable, Weight weight, Filter filter, HitCollector hitCollector) {
+      this.start = start;
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.hitCollector = hitCollector;
+    }
+
+    public Object call() throws Exception {
+      searchable.search(weight, filter, new HitCollector() {
+        public void collect(int doc, float score) {
+          hitCollector.collect(doc + start, score);
+        }
+      });
+      return null;
+    }
+  }
+}
Index: ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java
===================================================================
--- ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java	(revision 0)
+++ ocean/src/org/apache/lucene/search/OceanMultiThreadSearcher.java	(revision 0)
@@ -0,0 +1,254 @@
+package org.apache.lucene.search;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.lucene.ocean.OceanSearcher;
+import org.apache.lucene.ocean.Snapshot;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.util.PriorityQueue;
+
+public class OceanMultiThreadSearcher extends OceanSearcher {
+  private Snapshot snapshot;
+  private ExecutorService searchThreadPool;
+  private Searchable[] searchables;
+  private int[] starts;
+
+  public OceanMultiThreadSearcher(Snapshot snapshot, ExecutorService searchThreadPool) throws IOException {
+    super(snapshot);
+    searchables = snapshot.getSearchers();
+    starts = snapshot.getStarts();
+    this.snapshot = snapshot;
+    this.searchThreadPool = searchThreadPool;
+  }
+
+  public TopDocs search(Weight weight, Filter filter, int nDocs) throws IOException {
+    HitQueue hq = new HitQueue(nDocs);
+    int totalHits = 0;
+    MultiSearcherThread[] msta = new MultiSearcherThread[searchables.length];
+    ReentrantLock lock = new ReentrantLock();
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) { // search each searcher
+      msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, i, starts, lock);
+      futures.add(searchThreadPool.submit(msta[i]));
+    }
+    processFutures(futures);
+    for (int i = 0; i < searchables.length; i++) {
+      IOException ioe = msta[i].getIOException();
+      if (ioe == null) {
+        totalHits += msta[i].hits();
+      } else {
+        // if one search produced an IOException, rethrow it
+        throw ioe;
+      }
+    }
+    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
+    for (int i = hq.size() - 1; i >= 0; i--)
+      // put docs in array
+      scoreDocs[i] = (ScoreDoc) hq.pop();
+    float maxScore = (totalHits == 0) ? Float.NEGATIVE_INFINITY : scoreDocs[0].score;
+    return new TopDocs(totalHits, scoreDocs, maxScore);
+  }
+
+  private void processFutures(List<Future> futures) throws IOException {
+    for (Future future : futures) {
+      try {
+        future.get();
+      } catch (ExecutionException executionException) {
+        if (executionException.getCause() instanceof IOException) {
+          throw (IOException)executionException.getCause();
+        }
+        throw Util.asIOException(executionException.getCause());
+      } catch (InterruptedException interruptedException) {
+        throw Util.asIOException(interruptedException);
+      }
+    }
+  }
+
+  /**
+   * A search implementation allowing sorting which spans a new thread for each
+   * Searchable, waits for each search to complete and merges the results back
+   * together.
+   */
+  public TopFieldDocs search(Weight weight, Filter filter, int nDocs, Sort sort) throws IOException {
+    // don't specify the fields - we'll wait to do this until we get results
+    FieldDocSortedHitQueue hq = new FieldDocSortedHitQueue(null, nDocs);
+    int totalHits = 0;
+    MultiSearcherThread[] msta = new MultiSearcherThread[searchables.length];
+    ReentrantLock lock = new ReentrantLock();
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) { // search each searcher
+      // Assume not too many searchables and cost of creating a thread is by far
+      // inferior to a search
+      msta[i] = new MultiSearcherThread(searchables[i], weight, filter, nDocs, hq, sort, i, starts, lock);
+      futures.add(searchThreadPool.submit(msta[i]));
+    }
+    processFutures(futures);
+    float maxScore = Float.NEGATIVE_INFINITY;
+    for (int i = 0; i < searchables.length; i++) {
+      IOException ioe = msta[i].getIOException();
+      if (ioe == null) {
+        totalHits += msta[i].hits();
+        maxScore = Math.max(maxScore, msta[i].getMaxScore());
+      } else {
+        // if one search produced an IOException, rethrow it
+        throw ioe;
+      }
+    }
+    ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
+    for (int i = hq.size() - 1; i >= 0; i--)
+      // put docs in array
+      scoreDocs[i] = (ScoreDoc) hq.pop();
+
+    return new TopFieldDocs(totalHits, scoreDocs, hq.getFields(), maxScore);
+  }
+
+  class MultiSearcherThread implements Callable {
+    private Searchable searchable;
+    private Weight weight;
+    private Filter filter;
+    private int nDocs;
+    private TopDocs docs;
+    private int i;
+    private PriorityQueue hq;
+    private int[] starts;
+    private IOException ioe;
+    private Sort sort;
+    private ReentrantLock lock;
+
+    public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, HitQueue hq, int i, int[] starts, ReentrantLock lock) {
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.nDocs = nDocs;
+      this.hq = hq;
+      this.i = i;
+      this.starts = starts;
+      this.lock = lock;
+    }
+
+    public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i,
+        int[] starts, ReentrantLock lock) {
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.nDocs = nDocs;
+      this.hq = hq;
+      this.i = i;
+      this.starts = starts;
+      this.sort = sort;
+      this.lock = lock;
+    }
+
+    public Object call() throws Exception {
+      try {
+        docs = (sort == null) ? searchable.search(weight, filter, nDocs) : searchable.search(weight, filter, nDocs, sort);
+      }
+      // Store the IOException for later use by the caller of this thread
+      catch (IOException ioe) {
+        this.ioe = ioe;
+      }
+      if (ioe == null) {
+        // if we are sorting by fields, we need to tell the field sorted hit
+        // queue
+        // the actual type of fields, in case the original list contained AUTO.
+        // if the searchable returns null for fields, we'll have problems.
+        if (sort != null) {
+          ((FieldDocSortedHitQueue) hq).setFields(((TopFieldDocs) docs).fields);
+        }
+        ScoreDoc[] scoreDocs = docs.scoreDocs;
+        for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
+          ScoreDoc scoreDoc = scoreDocs[j];
+          scoreDoc.doc += starts[i]; // convert doc
+          // it would be so nice if we had a thread-safe insert
+          lock.lock();
+          try {
+            if (!hq.insert(scoreDoc))
+              break;
+          } finally {
+            lock.unlock();// no more scores > minScore
+          }
+        }
+      }
+      return null;
+    }
+
+    public int hits() {
+      return docs.totalHits;
+    }
+
+    public float getMaxScore() {
+      return docs.getMaxScore();
+    }
+
+    public IOException getIOException() {
+      return ioe;
+    }
+  }
+
+  /**
+   * Lower-level search API.
+   * 
+   * <p>
+   * {@link HitCollector#collect(int,float)} is called for every non-zero
+   * scoring document.
+   * 
+   * <p>
+   * Applications should only use this if they need <i>all</i> of the matching
+   * documents. The high-level search API ({@link Searcher#search(Query)}) is
+   * usually more efficient, as it skips non-high-scoring hits.
+   * 
+   * @param weight
+   *          to match documents
+   * @param filter
+   *          if non-null, a bitset used to eliminate some documents
+   * @param results
+   *          to receive hits
+   * 
+   */
+  public void search(Weight weight, Filter filter, final HitCollector results) throws IOException {
+    List<Future> futures = new ArrayList<Future>(searchables.length);
+    for (int i = 0; i < searchables.length; i++) {
+      final int start = starts[i];
+      HitCollectorThread hitCollectorThread = new HitCollectorThread(start, searchables[i], weight, filter, results);
+      futures.add(searchThreadPool.submit(hitCollectorThread));
+      //searchables[i].search(weight, filter, new HitCollector() {
+      //  public void collect(int doc, float score) {
+      //    results.collect(doc + start, score);
+      //  }
+      //});
+    }
+    processFutures(futures);
+  }
+
+  private class HitCollectorThread implements Callable {
+    private Searchable searchable;
+    private Weight weight;
+    private Filter filter;
+    private HitCollector hitCollector;
+    private int start;
+    
+    public HitCollectorThread(int start, Searchable searchable, Weight weight, Filter filter, HitCollector hitCollector) {
+      this.start = start;
+      this.searchable = searchable;
+      this.weight = weight;
+      this.filter = filter;
+      this.hitCollector = hitCollector;
+    }
+
+    public Object call() throws Exception {
+      searchable.search(weight, filter, new HitCollector() {
+        public void collect(int doc, float score) {
+          hitCollector.collect(doc + start, score);
+        }
+      });
+      return null;
+    }
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestLogRecords.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestLogRecords.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestLogRecords.java	(revision 0)
@@ -0,0 +1,56 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.ocean.log.LogFile;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.log.LogFile.RecordIterator;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestLogRecords extends LuceneTestCase {
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestLogRecords.class));
+  }
+  
+  public void testLogRecords() throws IOException {
+    List<String> docList = new ArrayList<String>();
+    List<String> otherList = new ArrayList<String>();
+    
+    FSDirectoryMap directoryMap = new FSDirectoryMap(new File("g:\\testocean"), "log");
+    LogFile logFile = new LogFile(1l, "test", directoryMap.getLogDirectory());
+    
+    for (int x=0; x < 5; x++) {
+      String docStr = "docs "+x+" ";
+      docList.add(docStr);
+      byte[] docBytes = docStr.getBytes("UTF-8");
+      System.out.println("docs.length: "+docBytes.length);
+      String otherStr = "other "+x+" ";
+      byte[] otherBytes = otherStr.getBytes("UTF-8");
+      otherList.add(otherStr);
+      logFile.writeRecord(new Long(x), docBytes, otherBytes);
+    }
+    System.out.println("min: "+logFile.getMinId());
+    System.out.println("max: "+logFile.getMaxId());
+    logFile.close();
+    logFile = new LogFile(1l, "test", directoryMap.getLogDirectory());
+    
+    RecordIterator iterator = logFile.getRecordIterator(null);
+    while (iterator.hasNext()) {
+      Record record = iterator.next();
+      
+      byte[] docBytes = record.getStreamRecord().getDocuments().getBytes();
+      String docs = new String(docBytes, "UTF-8");
+      byte[] otherBytes = record.getStreamRecord().getOther().getBytes();
+      String other = new String(otherBytes, "UTF-8");
+      System.out.println("docs: "+docs);
+      System.out.println("other: "+other);
+    }
+    iterator.close();
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestSearch.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestSearch.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestSearch.java	(revision 0)
@@ -0,0 +1,189 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.GregorianCalendar;
+import java.util.List;
+import java.util.Random;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.LuceneTestCase;
+
+// TODO: skipping snapshot id 2 when adding
+// TODO: on reload missing id 7
+public class TestSearch extends LuceneTestCase {
+  List<Long> deletedDocIds = new ArrayList<Long>();
+  Random random = new Random(System.currentTimeMillis());
+  int docsAdded = 0;
+  
+  /** Main for running test case by itself. */
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestSearch.class));
+  }
+
+  public static TransactionSystem setupTransactionSystem() throws Exception {
+    FSDirectoryMap directoryMap = new FSDirectoryMap(new File("g:\\testocean"), "log");
+    // RAMDirectoryMap directoryMap = new RAMDirectoryMap();
+    LogDirectory logDirectory = directoryMap.getLogDirectory();
+    TransactionLog transactionLog = new TransactionLog(logDirectory);
+    System.out.println("transactionLog num: " + transactionLog.getNumRecords() + " min: " + transactionLog.getMinId() + " max: "
+        + transactionLog.getMaxId());
+    SearcherPolicy searcherPolicy = new MultiThreadSearcherPolicy(2, 4, 25);
+    //SearcherPolicy searcherPolicy = new SingleThreadSearcherPolicy();
+    return new TransactionSystem(transactionLog, new SimpleAnalyzer(), directoryMap, 20, 5, 10, 0.3f, searcherPolicy);
+  }
+
+  public void testSearch() throws Exception {
+    TransactionSystem system = setupTransactionSystem();
+        
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw, true);
+    doTestSearch(pw, true, 10, system);
+    pw.close();
+    sw.close();
+    String multiFileOutput = sw.getBuffer().toString();
+
+    String singleFileOutput = sw.getBuffer().toString();
+    System.out.println(multiFileOutput);
+    
+    doDeleteRandomDocuments(system);
+    verifyDocumentsDeleted(system);
+    
+    //verifyNumDocs(system);
+  }
+  
+  private void verifyNumDocs(TransactionSystem system) throws IOException {
+    OceanSearcher searcher = system.getSearcher();
+    int docs = docsAdded - deletedDocIds.size();
+    int numDocs = searcher.getSnapshot().numDocs();
+    System.out.println("numDocs: "+numDocs+" should be docs: "+docs);
+    assertEquals(docs, numDocs);
+    searcher.close();
+  }
+  
+  private void verifyDocumentsDeleted(TransactionSystem system) throws Exception {
+    OceanSearcher searcher = system.getSearcher();
+    for (Long docId : deletedDocIds) {
+      String encodedId = Util.longToEncoded(docId);
+      Term term = new Term(Constants.DOCUMENTID, encodedId);
+      int totalHits = searcher.search(new TermQuery(term), null, 1000).totalHits;
+      if (totalHits > 0) {
+        IndexID indexId = system.getIndexId(docId);
+        System.out.println("documentId: "+docId+" from index: "+indexId+" not deleted");
+      }
+      assertEquals(totalHits, 0);
+    }
+    searcher.close();
+  }
+  
+  private void doDeleteRandomDocuments(TransactionSystem system) throws Exception {
+    OceanSearcher searcher = system.getSearcher();
+    ScoreDoc[] hits = searcher.search(new MatchAllDocsQuery(), null, 5000).scoreDocs;
+    int count = 0;
+    for (ScoreDoc scoreDoc : hits) {
+      Document document = searcher.doc(scoreDoc.doc);
+      String docEncoded = document.get(Constants.DOCUMENTID);
+      Long documentId = Util.longFromEncoded(docEncoded);
+      if (random.nextBoolean() || count == 0) {
+        System.out.println("deleting: "+documentId);
+        CommitResult result = system.deleteDocument(new Term(Constants.DOCUMENTID, docEncoded));
+        int numDeleted = result.getNumDeleted();
+        if (numDeleted != 1) {
+          IndexID indexId = system.getIndexId(documentId);
+          String docIndexId = document.get(Constants.INDEXID);
+          System.out.println("docIndexId: "+docIndexId+" documentId: "+documentId+" from index: "+indexId+" not deleted maxDoc: "+searcher.maxDoc());
+          printDocs(new Term(Constants.DOCUMENTID, docEncoded), system);
+        }
+        assertEquals(1, numDeleted);
+        deletedDocIds.add(documentId);
+      }
+      count++;
+    }
+    System.out.println("deletedDocIds: "+deletedDocIds);
+    searcher.close();
+  }
+  
+  public void printDocs(Term term, TransactionSystem system) throws IOException {
+    OceanSearcher searcher = system.getSearcher();
+    ScoreDoc[] hits = searcher.search(new TermQuery(term), null, 5000).scoreDocs;
+    System.out.println("printDocs hits.length: "+hits.length);
+    int count = 0;
+    for (ScoreDoc scoreDoc : hits) {
+      Document document = searcher.doc(scoreDoc.doc);
+      System.out.println(document.toString());
+    }
+    searcher.close();
+  }
+  
+  private void doTestSearch(PrintWriter out, boolean addDocs, int rounds, TransactionSystem system) throws Exception {
+    Analyzer analyzer = new SimpleAnalyzer();
+
+    if (addDocs) {
+      String[] docs = { "a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c" };
+      for (int x = 0; x < rounds; x++) {
+        for (int j = 0; j < docs.length; j++) {
+          Document d = new Document();
+          d.add(new Field("contents", docs[j], Field.Store.YES, Field.Index.TOKENIZED));
+          system.addDocument(d);
+          docsAdded++;
+        }
+      }
+    }
+
+    OceanSearcher searcher = system.getSearcher();
+
+    out.println("snapshot id: " + Util.formatSnapshotId(searcher.getSnapshot().getId()));
+
+    String[] queries = { "a b", "\"a b\"", "\"a b c\"", "a c", "\"a c\"", "\"a c e\"", "*:*" };
+    ScoreDoc[] hits = null;
+
+    QueryParser parser = new QueryParser("contents", analyzer);
+    parser.setPhraseSlop(4);
+    for (int j = 0; j < queries.length; j++) {
+      Query query = parser.parse(queries[j]);
+      out.println("Query: " + query.toString("contents"));
+
+      // DateFilter filter =
+      // new DateFilter("modified", Time(1997,0,1), Time(1998,0,1));
+      // DateFilter filter = DateFilter.Before("modified", Time(1997,00,01));
+      // System.out.println(filter);
+
+      hits = searcher.search(query, null, 1000).scoreDocs;
+
+      out.println(hits.length + " total results");
+      for (int i = 0; i < hits.length && i < 10; i++) {
+        Document d = searcher.doc(hits[i].doc);
+        String docEncoded = d.get(Constants.DOCUMENTID);
+        out.println(i + " " + hits[i].score
+        // + " " + DateField.stringToDate(d.get("modified"))
+            + " " + d.get("contents") + " " + Util.longFromEncoded(docEncoded));
+      }
+    }
+    searcher.close();
+    //system.close();
+  }
+
+  static long Time(int year, int month, int day) {
+    GregorianCalendar calendar = new GregorianCalendar();
+    calendar.set(year, month, day);
+    return calendar.getTime().getTime();
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestUpdates.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestUpdates.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestUpdates.java	(revision 0)
@@ -0,0 +1,42 @@
+package org.apache.lucene.ocean;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.util.English;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * Add documents, delete them, verify they are deleted. Update documents verify
+ * they have been updated.
+ * 
+ */
+public class TestUpdates extends LuceneTestCase {
+  TransactionSystem transactionSystem;
+  
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestUpdates.class));
+  }
+  
+  public void testUpdateDocuments() throws Exception {
+    transactionSystem = TestSearch.setupTransactionSystem();
+    // add documents
+    int count = 2;
+    for (int i = 0; i < 200; i++) {
+      Document d = new Document();
+      d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+      d.add(new Field("contents", English.intToEnglish(i + 10 * count), Field.Store.NO, Field.Index.TOKENIZED));
+      transactionSystem.addDocument(d);
+      //transactionSystem.updateDocument(new Term("id", Integer.toString(i)), d);
+    }
+    OceanSearcher searcher = transactionSystem.getSearcher();
+    int maxDoc = searcher.maxDoc();
+    System.out.println("maxDoc: "+maxDoc);
+    assertEquals(maxDoc, 200);
+    
+    
+    transactionSystem.close();
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean;
+
+/**
+ * Write to pool, read from pool make sure results match.
+ *
+ */
+public class TestByteBufferPool {
+  public void testByteBufferPool() {
+    
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestIndexCreator.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestIndexCreator.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestIndexCreator.java	(revision 0)
@@ -0,0 +1,73 @@
+package org.apache.lucene.ocean;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestIndexCreator extends LuceneTestCase {
+
+  /** Main for running test case by itself. */
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestIndexCreator.class));
+  }
+
+  public void testMain() throws Exception {
+    Directory directory = new RAMDirectory();
+    Analyzer analyzer = new WhitespaceAnalyzer();
+    ExecutorService threadPool = Executors.newFixedThreadPool(8);
+    IndexCreator indexCreator = new IndexCreator(directory, 1 * 1024 * 1024, 4, analyzer, threadPool);
+    BlockingQueue<IndexCreator.Add> queue = new ArrayBlockingQueue<IndexCreator.Add>(4000, true);
+    indexCreator.start(queue); // start threads consuming
+    String[] docs = { "a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c" };
+    for (int x = 0; x < 10; x++) {
+      for (int j = 0; j < docs.length; j++) {
+        Document d = new Document();
+        d.add(new Field("contents", x+" "+docs[j], Field.Store.YES, Field.Index.TOKENIZED));
+        queue.add(new IndexCreator.Add(d));
+      }
+    }
+    while ( queue.peek() != null) { 
+      Thread.sleep(5);
+    }
+    System.out.println("before create");
+    indexCreator.create();
+    System.out.println("after create");
+    IndexReader indexReader = IndexReader.open(directory);
+    IndexSearcher searcher = new IndexSearcher(indexReader);
+    Query query = new MatchAllDocsQuery();
+    ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
+    
+    StringWriter sw = new StringWriter();
+    PrintWriter out = new PrintWriter(sw, true);
+    out.println(hits.length + " total results");
+    for (int i = 0; i < hits.length; i++) {
+      Document d = searcher.doc(hits[i].doc);
+      out.println(i + " " + hits[i].score
+      // + " " + DateField.stringToDate(d.get("modified"))
+          + " " + d.get("contents"));
+    }
+    sw.close();
+    out.close();
+    String singleFileOutput = sw.getBuffer().toString();
+    System.out.println(singleFileOutput);
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestByteBufferPool.java	(revision 0)
@@ -0,0 +1,11 @@
+package org.apache.lucene.ocean;
+
+/**
+ * Write to pool, read from pool make sure results match.
+ *
+ */
+public class TestByteBufferPool {
+  public void testByteBufferPool() {
+    
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestIndexCreator.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestIndexCreator.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestIndexCreator.java	(revision 0)
@@ -0,0 +1,73 @@
+package org.apache.lucene.ocean;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestIndexCreator extends LuceneTestCase {
+
+  /** Main for running test case by itself. */
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestIndexCreator.class));
+  }
+
+  public void testMain() throws Exception {
+    Directory directory = new RAMDirectory();
+    Analyzer analyzer = new WhitespaceAnalyzer();
+    ExecutorService threadPool = Executors.newFixedThreadPool(8);
+    IndexCreator indexCreator = new IndexCreator(directory, 1 * 1024 * 1024, 4, analyzer, threadPool);
+    BlockingQueue<IndexCreator.Add> queue = new ArrayBlockingQueue<IndexCreator.Add>(4000, true);
+    indexCreator.start(queue); // start threads consuming
+    String[] docs = { "a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c" };
+    for (int x = 0; x < 10; x++) {
+      for (int j = 0; j < docs.length; j++) {
+        Document d = new Document();
+        d.add(new Field("contents", x+" "+docs[j], Field.Store.YES, Field.Index.TOKENIZED));
+        queue.add(new IndexCreator.Add(d));
+      }
+    }
+    while ( queue.peek() != null) { 
+      Thread.sleep(5);
+    }
+    System.out.println("before create");
+    indexCreator.create();
+    System.out.println("after create");
+    IndexReader indexReader = IndexReader.open(directory);
+    IndexSearcher searcher = new IndexSearcher(indexReader);
+    Query query = new MatchAllDocsQuery();
+    ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
+    
+    StringWriter sw = new StringWriter();
+    PrintWriter out = new PrintWriter(sw, true);
+    out.println(hits.length + " total results");
+    for (int i = 0; i < hits.length; i++) {
+      Document d = searcher.doc(hits[i].doc);
+      out.println(i + " " + hits[i].score
+      // + " " + DateField.stringToDate(d.get("modified"))
+          + " " + d.get("contents"));
+    }
+    sw.close();
+    out.close();
+    String singleFileOutput = sw.getBuffer().toString();
+    System.out.println(singleFileOutput);
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestLogRecords.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestLogRecords.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestLogRecords.java	(revision 0)
@@ -0,0 +1,56 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.ocean.log.LogFile;
+import org.apache.lucene.ocean.log.LogFile.Record;
+import org.apache.lucene.ocean.log.LogFile.RecordIterator;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestLogRecords extends LuceneTestCase {
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestLogRecords.class));
+  }
+  
+  public void testLogRecords() throws IOException {
+    List<String> docList = new ArrayList<String>();
+    List<String> otherList = new ArrayList<String>();
+    
+    FSDirectoryMap directoryMap = new FSDirectoryMap(new File("g:\\testocean"), "log");
+    LogFile logFile = new LogFile(1l, "test", directoryMap.getLogDirectory());
+    
+    for (int x=0; x < 5; x++) {
+      String docStr = "docs "+x+" ";
+      docList.add(docStr);
+      byte[] docBytes = docStr.getBytes("UTF-8");
+      System.out.println("docs.length: "+docBytes.length);
+      String otherStr = "other "+x+" ";
+      byte[] otherBytes = otherStr.getBytes("UTF-8");
+      otherList.add(otherStr);
+      logFile.writeRecord(new Long(x), docBytes, otherBytes);
+    }
+    System.out.println("min: "+logFile.getMinId());
+    System.out.println("max: "+logFile.getMaxId());
+    logFile.close();
+    logFile = new LogFile(1l, "test", directoryMap.getLogDirectory());
+    
+    RecordIterator iterator = logFile.getRecordIterator(null);
+    while (iterator.hasNext()) {
+      Record record = iterator.next();
+      
+      byte[] docBytes = record.getStreamRecord().getDocuments().getBytes();
+      String docs = new String(docBytes, "UTF-8");
+      byte[] otherBytes = record.getStreamRecord().getOther().getBytes();
+      String other = new String(otherBytes, "UTF-8");
+      System.out.println("docs: "+docs);
+      System.out.println("other: "+other);
+    }
+    iterator.close();
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestSearch.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestSearch.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestSearch.java	(revision 0)
@@ -0,0 +1,189 @@
+package org.apache.lucene.ocean;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.GregorianCalendar;
+import java.util.List;
+import java.util.Random;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.ocean.log.TransactionLog;
+import org.apache.lucene.ocean.util.Constants;
+import org.apache.lucene.ocean.util.Util;
+import org.apache.lucene.queryParser.QueryParser;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.util.LuceneTestCase;
+
+// TODO: skipping snapshot id 2 when adding
+// TODO: on reload missing id 7
+public class TestSearch extends LuceneTestCase {
+  List<Long> deletedDocIds = new ArrayList<Long>();
+  Random random = new Random(System.currentTimeMillis());
+  int docsAdded = 0;
+  
+  /** Main for running test case by itself. */
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestSearch.class));
+  }
+
+  public static TransactionSystem setupTransactionSystem() throws Exception {
+    FSDirectoryMap directoryMap = new FSDirectoryMap(new File("g:\\testocean"), "log");
+    // RAMDirectoryMap directoryMap = new RAMDirectoryMap();
+    LogDirectory logDirectory = directoryMap.getLogDirectory();
+    TransactionLog transactionLog = new TransactionLog(logDirectory);
+    System.out.println("transactionLog num: " + transactionLog.getNumRecords() + " min: " + transactionLog.getMinId() + " max: "
+        + transactionLog.getMaxId());
+    SearcherPolicy searcherPolicy = new MultiThreadSearcherPolicy(2, 4, 25);
+    //SearcherPolicy searcherPolicy = new SingleThreadSearcherPolicy();
+    return new TransactionSystem(transactionLog, new SimpleAnalyzer(), directoryMap, 20, 5, 10, 0.3f, searcherPolicy);
+  }
+
+  public void testSearch() throws Exception {
+    TransactionSystem system = setupTransactionSystem();
+        
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw, true);
+    doTestSearch(pw, true, 10, system);
+    pw.close();
+    sw.close();
+    String multiFileOutput = sw.getBuffer().toString();
+
+    String singleFileOutput = sw.getBuffer().toString();
+    System.out.println(multiFileOutput);
+    
+    doDeleteRandomDocuments(system);
+    verifyDocumentsDeleted(system);
+    
+    //verifyNumDocs(system);
+  }
+  
+  private void verifyNumDocs(TransactionSystem system) throws IOException {
+    OceanSearcher searcher = system.getSearcher();
+    int docs = docsAdded - deletedDocIds.size();
+    int numDocs = searcher.getSnapshot().numDocs();
+    System.out.println("numDocs: "+numDocs+" should be docs: "+docs);
+    assertEquals(docs, numDocs);
+    searcher.close();
+  }
+  
+  private void verifyDocumentsDeleted(TransactionSystem system) throws Exception {
+    OceanSearcher searcher = system.getSearcher();
+    for (Long docId : deletedDocIds) {
+      String encodedId = Util.longToEncoded(docId);
+      Term term = new Term(Constants.DOCUMENTID, encodedId);
+      int totalHits = searcher.search(new TermQuery(term), null, 1000).totalHits;
+      if (totalHits > 0) {
+        IndexID indexId = system.getIndexId(docId);
+        System.out.println("documentId: "+docId+" from index: "+indexId+" not deleted");
+      }
+      assertEquals(totalHits, 0);
+    }
+    searcher.close();
+  }
+  
+  private void doDeleteRandomDocuments(TransactionSystem system) throws Exception {
+    OceanSearcher searcher = system.getSearcher();
+    ScoreDoc[] hits = searcher.search(new MatchAllDocsQuery(), null, 5000).scoreDocs;
+    int count = 0;
+    for (ScoreDoc scoreDoc : hits) {
+      Document document = searcher.doc(scoreDoc.doc);
+      String docEncoded = document.get(Constants.DOCUMENTID);
+      Long documentId = Util.longFromEncoded(docEncoded);
+      if (random.nextBoolean() || count == 0) {
+        System.out.println("deleting: "+documentId);
+        CommitResult result = system.deleteDocument(new Term(Constants.DOCUMENTID, docEncoded));
+        int numDeleted = result.getNumDeleted();
+        if (numDeleted != 1) {
+          IndexID indexId = system.getIndexId(documentId);
+          String docIndexId = document.get(Constants.INDEXID);
+          System.out.println("docIndexId: "+docIndexId+" documentId: "+documentId+" from index: "+indexId+" not deleted maxDoc: "+searcher.maxDoc());
+          printDocs(new Term(Constants.DOCUMENTID, docEncoded), system);
+        }
+        assertEquals(1, numDeleted);
+        deletedDocIds.add(documentId);
+      }
+      count++;
+    }
+    System.out.println("deletedDocIds: "+deletedDocIds);
+    searcher.close();
+  }
+  
+  public void printDocs(Term term, TransactionSystem system) throws IOException {
+    OceanSearcher searcher = system.getSearcher();
+    ScoreDoc[] hits = searcher.search(new TermQuery(term), null, 5000).scoreDocs;
+    System.out.println("printDocs hits.length: "+hits.length);
+    int count = 0;
+    for (ScoreDoc scoreDoc : hits) {
+      Document document = searcher.doc(scoreDoc.doc);
+      System.out.println(document.toString());
+    }
+    searcher.close();
+  }
+  
+  private void doTestSearch(PrintWriter out, boolean addDocs, int rounds, TransactionSystem system) throws Exception {
+    Analyzer analyzer = new SimpleAnalyzer();
+
+    if (addDocs) {
+      String[] docs = { "a b c d e", "a b c d e a b c d e", "a b c d e f g h i j", "a c e", "e c a", "a c e a c e", "a c e a b c" };
+      for (int x = 0; x < rounds; x++) {
+        for (int j = 0; j < docs.length; j++) {
+          Document d = new Document();
+          d.add(new Field("contents", docs[j], Field.Store.YES, Field.Index.TOKENIZED));
+          system.addDocument(d);
+          docsAdded++;
+        }
+      }
+    }
+
+    OceanSearcher searcher = system.getSearcher();
+
+    out.println("snapshot id: " + Util.formatSnapshotId(searcher.getSnapshot().getId()));
+
+    String[] queries = { "a b", "\"a b\"", "\"a b c\"", "a c", "\"a c\"", "\"a c e\"", "*:*" };
+    ScoreDoc[] hits = null;
+
+    QueryParser parser = new QueryParser("contents", analyzer);
+    parser.setPhraseSlop(4);
+    for (int j = 0; j < queries.length; j++) {
+      Query query = parser.parse(queries[j]);
+      out.println("Query: " + query.toString("contents"));
+
+      // DateFilter filter =
+      // new DateFilter("modified", Time(1997,0,1), Time(1998,0,1));
+      // DateFilter filter = DateFilter.Before("modified", Time(1997,00,01));
+      // System.out.println(filter);
+
+      hits = searcher.search(query, null, 1000).scoreDocs;
+
+      out.println(hits.length + " total results");
+      for (int i = 0; i < hits.length && i < 10; i++) {
+        Document d = searcher.doc(hits[i].doc);
+        String docEncoded = d.get(Constants.DOCUMENTID);
+        out.println(i + " " + hits[i].score
+        // + " " + DateField.stringToDate(d.get("modified"))
+            + " " + d.get("contents") + " " + Util.longFromEncoded(docEncoded));
+      }
+    }
+    searcher.close();
+    //system.close();
+  }
+
+  static long Time(int year, int month, int day) {
+    GregorianCalendar calendar = new GregorianCalendar();
+    calendar.set(year, month, day);
+    return calendar.getTime().getTime();
+  }
+}
Index: ocean/test/org/apache/lucene/ocean/TestUpdates.java
===================================================================
--- ocean/test/org/apache/lucene/ocean/TestUpdates.java	(revision 0)
+++ ocean/test/org/apache/lucene/ocean/TestUpdates.java	(revision 0)
@@ -0,0 +1,42 @@
+package org.apache.lucene.ocean;
+
+import junit.framework.TestSuite;
+import junit.textui.TestRunner;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.util.English;
+import org.apache.lucene.util.LuceneTestCase;
+
+/**
+ * Add documents, delete them, verify they are deleted. Update documents verify
+ * they have been updated.
+ * 
+ */
+public class TestUpdates extends LuceneTestCase {
+  TransactionSystem transactionSystem;
+  
+  public static void main(String args[]) {
+    TestRunner.run(new TestSuite(TestUpdates.class));
+  }
+  
+  public void testUpdateDocuments() throws Exception {
+    transactionSystem = TestSearch.setupTransactionSystem();
+    // add documents
+    int count = 2;
+    for (int i = 0; i < 200; i++) {
+      Document d = new Document();
+      d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
+      d.add(new Field("contents", English.intToEnglish(i + 10 * count), Field.Store.NO, Field.Index.TOKENIZED));
+      transactionSystem.addDocument(d);
+      //transactionSystem.updateDocument(new Term("id", Integer.toString(i)), d);
+    }
+    OceanSearcher searcher = transactionSystem.getSearcher();
+    int maxDoc = searcher.maxDoc();
+    System.out.println("maxDoc: "+maxDoc);
+    assertEquals(maxDoc, 200);
+    
+    
+    transactionSystem.close();
+  }
+}
