Index: src/test/org/apache/hadoop/hbase/mapreduce/DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex.java
===================================================================
--- src/test/org/apache/hadoop/hbase/mapreduce/DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex.java (revision 909273)
+++ src/test/org/apache/hadoop/hbase/mapreduce/DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex.java (working copy)
@@ -1,307 +0,0 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.mapreduce;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Random;
-
-import junit.framework.TestSuite;
-import junit.textui.TestRunner;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.MultiRegionTable;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Collector;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.MultiSearcher;
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.Searchable;
-import org.apache.lucene.search.Searcher;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.store.FSDirectory;
-
-/**
- * Test Map/Reduce job to build index over HBase table
- */
-public class DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex extends MultiRegionTable {
- private static final Log LOG = LogFactory.getLog(DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex.class);
-
- static final byte[] TABLE_NAME = Bytes.toBytes("moretest");
- static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
- static final byte[] OUTPUT_FAMILY = Bytes.toBytes("text");
- static final String ROWKEY_NAME = "key";
- static final String INDEX_DIR = "testindex";
-
- static final Random rand = new Random();
-
- /** default constructor */
- public DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex() {
- super(Bytes.toString(INPUT_FAMILY));
- desc = new HTableDescriptor(TABLE_NAME);
- desc.addFamily(new HColumnDescriptor(INPUT_FAMILY));
- desc.addFamily(new HColumnDescriptor(OUTPUT_FAMILY));
- }
-
- @Override
- public void tearDown() throws Exception {
- if (conf != null) {
- FileUtil.fullyDelete(new File(conf.get("hadoop.tmp.dir")));
- }
- super.tearDown();
- }
-
- /**
- * Test HBase map/reduce
- *
- * @throws IOException
- * @throws ClassNotFoundException
- * @throws InterruptedException
- */
- public void testTableIndex()
- throws IOException, InterruptedException, ClassNotFoundException {
- boolean printResults = false;
- if (printResults) {
- LOG.info("Print table contents before map/reduce");
- }
- scanTable(printResults);
-
- MiniMRCluster mrCluster = new MiniMRCluster(2, fs.getUri().toString(), 1);
-
- // set configuration parameter for index build
- conf.set("hbase.index.conf", createIndexConfContent());
-
- try {
- Job job = new Job(conf, "index column contents");
- // number of indexes to partition into
- job.setNumReduceTasks(1);
-
- Scan scan = new Scan();
- scan.addFamily(INPUT_FAMILY);
- // use identity map (a waste, but just as an example)
- IdentityTableMapper.initJob(Bytes.toString(TABLE_NAME), scan,
- IdentityTableMapper.class, job);
- // use IndexTableReduce to build a Lucene index
- job.setReducerClass(IndexTableReducer.class);
- job.setOutputFormatClass(IndexOutputFormat.class);
- FileOutputFormat.setOutputPath(job, new Path(INDEX_DIR));
- job.waitForCompletion(true);
- } finally {
- mrCluster.shutdown();
- }
-
- if (printResults) {
- LOG.info("Print table contents after map/reduce");
- }
- scanTable(printResults);
-
- // verify index results
- verify();
- }
-
- private String createIndexConfContent() {
- StringBuilder buffer = new StringBuilder();
- buffer.append("" +
- "hbase.column.name" + Bytes.toString(INPUT_FAMILY) +
- "");
- buffer.append("hbase.column.store " +
- "true");
- buffer.append("hbase.column.index" +
- "true");
- buffer.append("hbase.column.tokenize" +
- "false");
- buffer.append("hbase.column.boost" +
- "3");
- buffer.append("hbase.column.omit.norms" +
- "false");
- buffer.append("hbase.index.rowkey.name" +
- ROWKEY_NAME + "");
- buffer.append("hbase.index.max.buffered.docs" +
- "500");
- buffer.append("hbase.index.max.field.length" +
- "10000");
- buffer.append("hbase.index.merge.factor" +
- "10");
- buffer.append("hbase.index.use.compound.file" +
- "true");
- buffer.append("hbase.index.optimize" +
- "true");
-
- IndexConfiguration c = new IndexConfiguration();
- c.addFromXML(buffer.toString());
- return c.toString();
- }
-
- private void scanTable(boolean printResults)
- throws IOException {
- HTable table = new HTable(conf, TABLE_NAME);
- Scan scan = new Scan();
- scan.addFamily(INPUT_FAMILY);
- scan.addFamily(OUTPUT_FAMILY);
- ResultScanner scanner = table.getScanner(scan);
- try {
- for (Result r : scanner) {
- if (printResults) {
- LOG.info("row: " + Bytes.toStringBinary(r.getRow()));
- }
- for (KeyValue kv : r.list()) {
- if (printResults) {
- LOG.info(" column: " + Bytes.toStringBinary(kv.getKey()) + " value: "
- + Bytes.toStringBinary(kv.getValue()));
- }
- }
- }
- } finally {
- scanner.close();
- }
- }
-
- private void verify() throws IOException {
- // Force a cache flush for every online region to ensure that when the
- // scanner takes its snapshot, all the updates have made it into the cache.
- for (HRegion r : cluster.getRegionThreads().get(0).getRegionServer().
- getOnlineRegions()) {
- HRegionIncommon region = new HRegionIncommon(r);
- region.flushcache();
- }
-
- Path localDir = new Path(getUnitTestdir(getName()), "index_" +
- Integer.toString(rand.nextInt()));
- this.fs.copyToLocalFile(new Path(INDEX_DIR), localDir);
- FileSystem localfs = FileSystem.getLocal(conf);
- FileStatus [] indexDirs = localfs.listStatus(localDir);
- Searcher searcher = null;
- ResultScanner scanner = null;
- try {
- if (indexDirs.length == 1) {
- searcher = new IndexSearcher(FSDirectory.open(new File(indexDirs[0].getPath().
- toUri())));
- } else if (indexDirs.length > 1) {
- Searchable[] searchers = new Searchable[indexDirs.length];
- for (int i = 0; i < indexDirs.length; i++) {
- searchers[i] = new IndexSearcher(FSDirectory.open(new File(indexDirs[i].getPath().
- toUri())));
- }
- searcher = new MultiSearcher(searchers);
- } else {
- throw new IOException("no index directory found");
- }
-
- HTable table = new HTable(conf, TABLE_NAME);
- Scan scan = new Scan();
- scan.addFamily(INPUT_FAMILY);
- scan.addFamily(OUTPUT_FAMILY);
- scanner = table.getScanner(scan);
-
- IndexConfiguration indexConf = new IndexConfiguration();
- String content = conf.get("hbase.index.conf");
- if (content != null) {
- indexConf.addFromXML(content);
- }
- String rowkeyName = indexConf.getRowkeyName();
-
- int count = 0;
- for (Result r : scanner) {
- String value = Bytes.toString(r.getRow());
- Term term = new Term(rowkeyName, value);
- CountCollector collector = new CountCollector();
- searcher.search(new TermQuery(term), collector);
- int hitCount = collector.getCount();
- assertEquals("check row " + value, 1, hitCount);
- count++;
- }
- LOG.debug("Searcher.maxDoc: " + searcher.maxDoc());
- LOG.debug("IndexReader.numDocs: " + ((IndexSearcher)searcher).getIndexReader().numDocs());
- int maxDoc = ((IndexSearcher)searcher).getIndexReader().numDocs();
- assertEquals("check number of rows", maxDoc, count);
- } finally {
- if (null != searcher)
- searcher.close();
- if (null != scanner)
- scanner.close();
- }
- }
-
- /**
- * Collector that retrieves the count of the documents.
- *
- * @author Kay Kay
- *
- */
- public static class CountCollector extends Collector {
-
- private int count;
-
- public CountCollector() {
- count = 0;
- }
-
- public int getCount() {
- return this.count;
- }
-
- @Override
- public boolean acceptsDocsOutOfOrder() {
- //Make this accept docs out of order as some collectors can be efficient that way.
- return true;
- }
-
- @Override
- public void collect(int doc) throws IOException {
- ++count;
- }
-
- @Override
- public void setNextReader(IndexReader reader, int docBase)
- throws IOException {
- //Do nothing
- }
-
- @Override
- public void setScorer(Scorer scorer) throws IOException {
- //Nothing to do with scorer.
- }
- }
-
- /**
- * @param args unused
- */
- public static void main(String[] args) {
- TestRunner.run(new TestSuite(DisabledBecauseVariableSubstTooLargeExceptionTestTableIndex.class));
- }
-}
\ No newline at end of file
Index: src/java/org/apache/hadoop/hbase/mapreduce/IndexConfiguration.java
===================================================================
--- src/java/org/apache/hadoop/hbase/mapreduce/IndexConfiguration.java (revision 909273)
+++ src/java/org/apache/hadoop/hbase/mapreduce/IndexConfiguration.java (working copy)
@@ -1,452 +0,0 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.mapreduce;
-
-import java.io.ByteArrayInputStream;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Properties;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.w3c.dom.Text;
-
-/**
- * Configuration parameters for building a Lucene index.
- */
-public class IndexConfiguration extends Configuration {
-
- private static final Log LOG = LogFactory.getLog(IndexConfiguration.class);
-
- static final String HBASE_COLUMN_NAME = "hbase.column.name";
- static final String HBASE_COLUMN_STORE = "hbase.column.store";
- static final String HBASE_COLUMN_INDEX = "hbase.column.index";
-
- /**
- * Tokenize property terminology is deprecated in lucene / replaced by analyze.
- * @see #HBASE_COLUMN_ANALYZE
- * @deprecated
- */
- static final String HBASE_COLUMN_TOKENIZE = "hbase.column.tokenize";
- static final String HBASE_COLUMN_ANALYZE = "hbase.column.analyze";
-
- static final String HBASE_COLUMN_BOOST = "hbase.column.boost";
- static final String HBASE_COLUMN_OMIT_NORMS = "hbase.column.omit.norms";
- static final String HBASE_INDEX_ROWKEY_NAME = "hbase.index.rowkey.name";
- static final String HBASE_INDEX_ANALYZER_NAME = "hbase.index.analyzer.name";
- static final String HBASE_INDEX_MAX_BUFFERED_DOCS =
- "hbase.index.max.buffered.docs";
- static final String HBASE_INDEX_MAX_BUFFERED_DELS =
- "hbase.index.max.buffered.dels";
- static final String HBASE_INDEX_MAX_FIELD_LENGTH =
- "hbase.index.max.field.length";
- static final String HBASE_INDEX_MAX_MERGE_DOCS =
- "hbase.index.max.merge.docs";
- static final String HBASE_INDEX_MERGE_FACTOR = "hbase.index.merge.factor";
- // double ramBufferSizeMB;
- static final String HBASE_INDEX_SIMILARITY_NAME =
- "hbase.index.similarity.name";
- static final String HBASE_INDEX_USE_COMPOUND_FILE =
- "hbase.index.use.compound.file";
- static final String HBASE_INDEX_OPTIMIZE = "hbase.index.optimize";
-
- public static class ColumnConf extends Properties {
-
- private static final long serialVersionUID = 7419012290580607821L;
-
- boolean getBoolean(String name, boolean defaultValue) {
- String valueString = getProperty(name);
- if ("true".equals(valueString))
- return true;
- else if ("false".equals(valueString))
- return false;
- else
- return defaultValue;
- }
-
- void setBoolean(String name, boolean value) {
- setProperty(name, Boolean.toString(value));
- }
-
- float getFloat(String name, float defaultValue) {
- String valueString = getProperty(name);
- if (valueString == null)
- return defaultValue;
- try {
- return Float.parseFloat(valueString);
- } catch (NumberFormatException e) {
- return defaultValue;
- }
- }
-
- void setFloat(String name, float value) {
- setProperty(name, Float.toString(value));
- }
- }
-
- private Map columnMap =
- new ConcurrentHashMap();
-
- public Iterator columnNameIterator() {
- return columnMap.keySet().iterator();
- }
-
- public boolean isIndex(String columnName) {
- return getColumn(columnName).getBoolean(HBASE_COLUMN_INDEX, true);
- }
-
- public void setIndex(String columnName, boolean index) {
- getColumn(columnName).setBoolean(HBASE_COLUMN_INDEX, index);
- }
-
- public boolean isStore(String columnName) {
- return getColumn(columnName).getBoolean(HBASE_COLUMN_STORE, false);
- }
-
- public void setStore(String columnName, boolean store) {
- getColumn(columnName).setBoolean(HBASE_COLUMN_STORE, store);
- }
-
- /**
- * @deprecated
- * See {@link #isAnalyze(String)} for replacement.
- * @param columnName
- * @return true, if column needs to be tokenized
- */
- public boolean isTokenize(String columnName) {
- return getColumn(columnName).getBoolean(HBASE_COLUMN_TOKENIZE, true);
- }
-
- /**
- * @deprecated
- * See {@link #setAnalyze(String, boolean)} for replacement.
- * @param columnName
- * @param tokenize
- */
- public void setTokenize(String columnName, boolean tokenize) {
- getColumn(columnName).setBoolean(HBASE_COLUMN_TOKENIZE, tokenize);
- }
-
- public boolean isAnalyze(String columnName) {
- return getColumn(columnName).getBoolean(HBASE_COLUMN_ANALYZE, true);
- }
-
- public void setAnalyze(String columnName, boolean analyze) {
- getColumn(columnName).setBoolean(HBASE_COLUMN_ANALYZE, analyze);
- }
-
- public float getBoost(String columnName) {
- return getColumn(columnName).getFloat(HBASE_COLUMN_BOOST, 1.0f);
- }
-
- public void setBoost(String columnName, float boost) {
- getColumn(columnName).setFloat(HBASE_COLUMN_BOOST, boost);
- }
-
- public boolean isOmitNorms(String columnName) {
- return getColumn(columnName).getBoolean(HBASE_COLUMN_OMIT_NORMS, true);
- }
-
- public void setOmitNorms(String columnName, boolean omitNorms) {
- getColumn(columnName).setBoolean(HBASE_COLUMN_OMIT_NORMS, omitNorms);
- }
-
- private ColumnConf getColumn(String columnName) {
- ColumnConf column = columnMap.get(columnName);
- if (column == null) {
- column = new ColumnConf();
- columnMap.put(columnName, column);
- }
- return column;
- }
-
- public String getAnalyzerName() {
- return get(HBASE_INDEX_ANALYZER_NAME,
- StandardAnalyzer.class.getName());
- }
-
- public void setAnalyzerName(String analyzerName) {
- set(HBASE_INDEX_ANALYZER_NAME, analyzerName);
- }
-
- public int getMaxBufferedDeleteTerms() {
- return getInt(HBASE_INDEX_MAX_BUFFERED_DELS, 1000);
- }
-
- public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
- setInt(HBASE_INDEX_MAX_BUFFERED_DELS, maxBufferedDeleteTerms);
- }
-
- public int getMaxBufferedDocs() {
- return getInt(HBASE_INDEX_MAX_BUFFERED_DOCS, 10);
- }
-
- public void setMaxBufferedDocs(int maxBufferedDocs) {
- setInt(HBASE_INDEX_MAX_BUFFERED_DOCS, maxBufferedDocs);
- }
-
- public int getMaxFieldLength() {
- return getInt(HBASE_INDEX_MAX_FIELD_LENGTH, Integer.MAX_VALUE);
- }
-
- public void setMaxFieldLength(int maxFieldLength) {
- setInt(HBASE_INDEX_MAX_FIELD_LENGTH, maxFieldLength);
- }
-
- public int getMaxMergeDocs() {
- return getInt(HBASE_INDEX_MAX_MERGE_DOCS, Integer.MAX_VALUE);
- }
-
- public void setMaxMergeDocs(int maxMergeDocs) {
- setInt(HBASE_INDEX_MAX_MERGE_DOCS, maxMergeDocs);
- }
-
- public int getMergeFactor() {
- return getInt(HBASE_INDEX_MERGE_FACTOR, 10);
- }
-
- public void setMergeFactor(int mergeFactor) {
- setInt(HBASE_INDEX_MERGE_FACTOR, mergeFactor);
- }
-
- public String getRowkeyName() {
- return get(HBASE_INDEX_ROWKEY_NAME, "ROWKEY");
- }
-
- public void setRowkeyName(String rowkeyName) {
- set(HBASE_INDEX_ROWKEY_NAME, rowkeyName);
- }
-
- public String getSimilarityName() {
- return get(HBASE_INDEX_SIMILARITY_NAME, null);
- }
-
- public void setSimilarityName(String similarityName) {
- set(HBASE_INDEX_SIMILARITY_NAME, similarityName);
- }
-
- public boolean isUseCompoundFile() {
- return getBoolean(HBASE_INDEX_USE_COMPOUND_FILE, false);
- }
-
- public void setUseCompoundFile(boolean useCompoundFile) {
- setBoolean(HBASE_INDEX_USE_COMPOUND_FILE, useCompoundFile);
- }
-
- public boolean doOptimize() {
- return getBoolean(HBASE_INDEX_OPTIMIZE, true);
- }
-
- public void setDoOptimize(boolean doOptimize) {
- setBoolean(HBASE_INDEX_OPTIMIZE, doOptimize);
- }
-
- public void addFromXML(String content) {
- try {
- DocumentBuilder builder = DocumentBuilderFactory.newInstance()
- .newDocumentBuilder();
-
- Document doc = builder
- .parse(new ByteArrayInputStream(content.getBytes()));
-
- Element root = doc.getDocumentElement();
- if (!"configuration".equals(root.getTagName())) {
- LOG.fatal("bad conf file: top-level element not ");
- }
-
- NodeList props = root.getChildNodes();
- for (int i = 0; i < props.getLength(); i++) {
- Node propNode = props.item(i);
- if (!(propNode instanceof Element)) {
- continue;
- }
-
- Element prop = (Element) propNode;
- if ("property".equals(prop.getTagName())) {
- propertyFromXML(prop, null);
- } else if ("column".equals(prop.getTagName())) {
- columnConfFromXML(prop);
- } else {
- LOG.warn("bad conf content: element neither nor ");
- }
- }
- } catch (Exception e) {
- LOG.fatal("error parsing conf content: " + e);
- throw new RuntimeException(e);
- }
- }
-
- private void propertyFromXML(Element prop, Properties properties) {
- NodeList fields = prop.getChildNodes();
- String attr = null;
- String value = null;
-
- for (int j = 0; j < fields.getLength(); j++) {
- Node fieldNode = fields.item(j);
- if (!(fieldNode instanceof Element)) {
- continue;
- }
-
- Element field = (Element) fieldNode;
- if ("name".equals(field.getTagName())) {
- attr = ((Text) field.getFirstChild()).getData();
- }
- if ("value".equals(field.getTagName()) && field.hasChildNodes()) {
- value = ((Text) field.getFirstChild()).getData();
- }
- }
-
- if (attr != null && value != null) {
- if (properties == null) {
- set(attr, value);
- } else {
- properties.setProperty(attr, value);
- }
- }
- }
-
- private void columnConfFromXML(Element column) {
- ColumnConf columnConf = new ColumnConf();
- NodeList props = column.getChildNodes();
- for (int i = 0; i < props.getLength(); i++) {
- Node propNode = props.item(i);
- if (!(propNode instanceof Element)) {
- continue;
- }
-
- Element prop = (Element) propNode;
- if ("property".equals(prop.getTagName())) {
- propertyFromXML(prop, columnConf);
- } else {
- LOG.warn("bad conf content: element not ");
- }
- }
-
- if (columnConf.getProperty(HBASE_COLUMN_NAME) != null) {
- columnMap.put(columnConf.getProperty(HBASE_COLUMN_NAME), columnConf);
- } else {
- LOG.warn("bad column conf: name not specified");
- }
- }
-
- public void write(OutputStream out) {
- try {
- Document doc = writeDocument();
- DOMSource source = new DOMSource(doc);
- StreamResult result = new StreamResult(out);
- TransformerFactory transFactory = TransformerFactory.newInstance();
- Transformer transformer = transFactory.newTransformer();
- transformer.transform(source, result);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private Document writeDocument() {
- Iterator> iter = iterator();
- try {
- Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder()
- .newDocument();
- Element conf = doc.createElement("configuration");
- doc.appendChild(conf);
- conf.appendChild(doc.createTextNode("\n"));
-
- Map.Entry entry;
- while (iter.hasNext()) {
- entry = iter.next();
- String name = entry.getKey();
- String value = entry.getValue();
- writeProperty(doc, conf, name, value);
- }
-
- Iterator columnIter = columnNameIterator();
- while (columnIter.hasNext()) {
- writeColumn(doc, conf, columnIter.next());
- }
-
- return doc;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private void writeProperty(Document doc, Element parent, String name,
- String value) {
- Element propNode = doc.createElement("property");
- parent.appendChild(propNode);
-
- Element nameNode = doc.createElement("name");
- nameNode.appendChild(doc.createTextNode(name));
- propNode.appendChild(nameNode);
-
- Element valueNode = doc.createElement("value");
- valueNode.appendChild(doc.createTextNode(value));
- propNode.appendChild(valueNode);
-
- parent.appendChild(doc.createTextNode("\n"));
- }
-
- private void writeColumn(Document doc, Element parent, String columnName) {
- Element column = doc.createElement("column");
- parent.appendChild(column);
- column.appendChild(doc.createTextNode("\n"));
-
- ColumnConf columnConf = getColumn(columnName);
- for (Map.Entry