Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/BooleanCondition.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/BooleanCondition.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/BooleanCondition.java (revision 0) @@ -0,0 +1,120 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Writable; + +/** + * Boolean condition, truth values combined using logical connectives (AND, OR, + * NOT) + */ +public class BooleanCondition { + private String expression; + private Map> unionSet = new HashMap>(); + private Map> intersectionSet = new HashMap>(); + + /** + * Sets the expression + * @param string + */ + public void setExpression(String string) { + this.expression = string; + + String[] or = expression.split(Constants.LOGICAL_CONNECTOR_OR); + String[] and = null; + for (int i = 0; i < or.length; i++) { + if (or[i].split(Constants.LOGICAL_CONNECTOR_AND).length == 1) { + unionSet.putAll(getAppendTerm(unionSet, or[i])); + } else { + and = new String[or[i].split(Constants.LOGICAL_CONNECTOR_AND).length]; + and = or[i].split(Constants.LOGICAL_CONNECTOR_AND); + } + } + + if (and != null) { + for (int ii = 0; ii < and.length; ii++) { + intersectionSet.putAll(getAppendTerm(intersectionSet, and[ii])); + } + } + } + + private Map> getAppendTerm( + Map> oldSet, String string) { + Map> result = new HashMap>(); + List valueList = new ArrayList(); + String[] term = string.split(" "); + if (oldSet.containsKey(term[0].trim())) { + valueList = oldSet.get(term[0].trim()); + } + + valueList.add(term[1] + " " + term[2]); + result.put(term[0].trim(), valueList); + + return result; + } + + public boolean checkConstraints(MapWritable data) + throws UnsupportedEncodingException { + Map record = getComparisonObject(data); + if (data == null || record.size() == 0) { + return false; // return false if data is null. + } + + boolean result = false; + if (intersectionSet.size() == record.size()) { + result = Comparator.booleanCompare(intersectionSet, record, true); + } else if (unionSet.size() == record.size()) { + result = Comparator.booleanCompare(unionSet, record, false); + } else { + result = getCompareResult(record); + } + + return result; + } + + private boolean getCompareResult(Map record) + throws UnsupportedEncodingException { + return (Comparator.booleanCompare(intersectionSet, record, true) && Comparator + .booleanCompare(unionSet, record, false)) ? true : false; + } + + private Map getComparisonObject(MapWritable data) { + Map result = new HashMap(); + + for (Map.Entry e : data.entrySet()) { + String cKey = e.getKey().toString(); + String val = new String(((ImmutableBytesWritable) e.getValue()).get()); + + if (intersectionSet.containsKey(cKey) || unionSet.containsKey(cKey)) { + result.put(cKey, val); + } + } + + return result; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Comparator.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Comparator.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Comparator.java (revision 0) @@ -0,0 +1,149 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.util.List; +import java.util.Map; + +/** + * Comparison class + */ +public class Comparator { + public static boolean booleanCompare(Map> expression, + Map record, boolean isIntersection) + throws UnsupportedEncodingException { + + boolean negative = true; + boolean positive = false; + + for (Map.Entry> e : expression.entrySet()) { + String key = e.getKey(); + List valueList = e.getValue(); + String recordValueList = record.get(key); + + for (int i = 0; i < valueList.size(); i++) { + String[] term = valueList.get(i).split(" "); + String comparator = term[0]; + String comparand = term[1]; + + switch (comparator.charAt(0)) { + case '>': + if (isSecond(comparator, "=")) { + if (Integer.parseInt(comparand) > Integer + .parseInt(recordValueList)) { + negative = false; + } else { + positive = true; + } + } else { + if (Integer.parseInt(comparand) > Integer + .parseInt(recordValueList) + || comparand.equals(recordValueList)) { + negative = false; + } else { + positive = true; + } + } + break; + case '<': + if (isSecond(comparator, "=")) { + if (Integer.parseInt(comparand) < Integer + .parseInt(recordValueList)) + negative = false; + else + positive = true; + } else { + if (Integer.parseInt(comparand) < Integer + .parseInt(recordValueList) + || comparand.equals(recordValueList)) + negative = false; + else + positive = true; + } + break; + + case '!': + if (isSecond(comparator, "!")) { + boolean checkBool = true; + String[] coms = comparand.split("[|]"); + for (int j = 0; j < coms.length; j++) { + if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) { + checkBool = false; + } + } + + if (!checkBool) { + negative = false; + } else { + positive = true; + } + + } else { + if (comparand.equals(recordValueList)) + negative = false; + else + positive = true; + } + break; + case '=': + if (isSecond(comparator, "=")) { + + boolean checkBool = true; + String[] coms = comparand.split("[|]"); + for (int j = 0; j < coms.length; j++) { + if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) { + checkBool = false; + } + } + + if (checkBool) { + negative = false; + } else { + positive = true; + } + + } else { + if (!comparand.equals(recordValueList)) + negative = false; + else + positive = true; + } + break; + } + } + } + + boolean result = false; + if (isIntersection) { + result = negative; + } else { + result = positive; + } + + return result; + } + + private static boolean isSecond(String comparator, String string) { + return (comparator.length() == 2 && string.charAt(0) == comparator + .charAt(1)) ? true : false; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java (revision 0) @@ -0,0 +1,42 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +/** + * List of access control algebraic operations constants. + */ +public class Constants { + public static final String CONFIG_OUTPUT_TABLE = "job.config.output.table"; + public static final String SUBSTITUTE_TABLE = "table"; + public static final String EXPRESSION_FILTER_LIST = "expression.filter.list"; + public static final String EXPRESSION_IN = "=="; + public static final String EXPRESSION_NOT_IN = "!!"; + + public static final String RELATIONAL_PROJECTION = "projection"; + public static final String RELATIONAL_SELECTION = "selection"; + public static final String RELATIONAL_GROUP = "group"; + public static final String RELATIONAL_JOIN = "join"; + public static final String RELATIONAL_JOIN_SECOND_RELATION = "second.relation"; + public static final String RELATIONAL_JOIN_KEY = ".ROW"; + public static final String CONDITION_SEPERATOR = " BOOL "; + + public static final String LOGICAL_CONNECTOR_AND = " AND "; + public static final String LOGICAL_CONNECTOR_OR = " OR "; +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java (revision 0) @@ -0,0 +1,95 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConnection; +import org.apache.hadoop.hbase.HConnectionManager; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.mapred.IdentityTableMap; +import org.apache.hadoop.hbase.mapred.IdentityTableReduce; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.ClusterStatus; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; + +/** + * Duplicates Table. R1 to R3 in O(N) + */ +public class DuplicateTable extends RelationalOperation { + public JobConf getConf(Configuration conf, String input, String output, + Map statements) { + + JobConf jobConf = new JobConf(conf, DuplicateTable.class); + HTableDescriptor desc = new HTableDescriptor(output); + String tableColumns = ""; + + try { + HConnection conn = HConnectionManager.getConnection(conf); + HBaseAdmin admin = new HBaseAdmin(conf); + + HTableDescriptor[] tables = conn.listTables(); + HColumnDescriptor[] columns = null; + for (int i = 0; i < tables.length; i++) { + if (tables[i].getName().equals(new Text(input))) { + columns = tables[i].getFamilies().values().toArray( + new HColumnDescriptor[] {}); + break; + } + } + + if (conn.tableExists(new Text(output))) { + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, false); + return jobConf; + } else { + for (int i = 0; i < columns.length; i++) { + desc.addFamily(columns[i]); + tableColumns += columns[i].getName() + " "; + } + } + + admin.createTable(desc); + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, true); + } catch (IOException e) { + e.printStackTrace(); + } + + IdentityTableMap.initJob(input, tableColumns, IdentityTableMap.class, + jobConf); + IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf); + + try { + JobClient jobClient = new JobClient(jobConf); + + ClusterStatus cluster = jobClient.getClusterStatus(); + jobConf.setNumMapTasks(cluster.getMapTasks()); + jobConf.setNumReduceTasks(1); + + } catch (IOException e) { + e.printStackTrace(); + } + return jobConf; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java (revision 0) @@ -0,0 +1,143 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.Map; + +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapred.TableMap; +import org.apache.hadoop.hbase.mapred.TableOutputCollector; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Reporter; + +/** + * Extract grouping columns from filtered records. + */ +public class GroupingFilterMap extends TableMap { + BooleanCondition booleanCondition = new BooleanCondition(); + public static final String EXPRESSION = "shell.mapred.filtertablemap.exps"; + public static final String GROUP_COLUMNS = "shell.mapred.filtertablemap.columns"; + private Text[] m_columns; + + /** default constructor */ + public GroupingFilterMap() { + super(); + } + + /** + * Use this before submitting a TableMap job. It will appropriately set up the + * JobConf. + * + * @param table table to be processed + * @param columns space separated list of columns to fetch + * @param groupColumns space separated list of columns used to form the key + * used in collect + * @param expression boolean expression + * @param mapper map class + * @param job job configuration object + */ + public static void initJob(String table, String columns, String groupColumns, + String expression, Class mapper, JobConf job) { + + initJob(table, columns, mapper, job); + job.set(GROUP_COLUMNS, groupColumns); + job.set(EXPRESSION, expression); + } + + /** {@inheritDoc} */ + @Override + public void configure(JobConf job) { + super.configure(job); + String[] cols = job.get(GROUP_COLUMNS, "").split(" "); + m_columns = new Text[cols.length]; + for (int i = 0; i < cols.length; i++) { + m_columns[i] = new Text(cols[i]); + } + booleanCondition.setExpression(job.get(EXPRESSION, "")); + } + + public void map(@SuppressWarnings("unused") + HStoreKey key, MapWritable value, TableOutputCollector output, + @SuppressWarnings("unused") + Reporter reporter) throws IOException { + byte[][] keyVals = extractKeyValues(value); + if (keyVals != null) { + Text tKey = createGroupKey(keyVals); + + if (booleanCondition.checkConstraints(value)) { + output.collect(tKey, value); + } + } + } + + protected byte[][] extractKeyValues(MapWritable r) { + byte[][] keyVals = null; + ArrayList foundList = new ArrayList(); + int numCols = m_columns.length; + if (numCols > 0) { + for (Map.Entry e : r.entrySet()) { + Text column = (Text) e.getKey(); + for (int i = 0; i < numCols; i++) { + if (column.equals(m_columns[i])) { + foundList.add(((ImmutableBytesWritable) e.getValue()).get()); + break; + } + } + } + if (foundList.size() == numCols) { + keyVals = foundList.toArray(new byte[numCols][]); + } + } + return keyVals; + } + + /** + * Create a key by concatenating multiple column values. Override this + * function in order to produce different types of keys. + * + * @param vals + * @return key generated by concatenating multiple column values + */ + protected Text createGroupKey(byte[][] vals) { + if (vals == null) { + return null; + } + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < vals.length; i++) { + if (i > 0) { + sb.append(" "); + } + try { + sb.append(new String(vals[i], HConstants.UTF8_ENCODING)); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + return new Text(sb.toString()); + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java (revision 0) @@ -0,0 +1,71 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.hbase.mapred.TableMap; +import org.apache.hadoop.hbase.mapred.TableOutputCollector; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Reporter; + +/** + * Extract filtered records. + */ +public class IdentityFilterMap extends TableMap { + BooleanCondition booleanCondition = new BooleanCondition(); + public static final String EXPRESSION = "shell.mapred.filtertablemap.exps"; + + /** Default Constructor. */ + public IdentityFilterMap() { + super(); + } + + @SuppressWarnings("deprecation") + public static void initJob(String table, String columns, String expression, + Class mapper, JobConf job) { + initJob(table, columns, mapper, job); + job.set(EXPRESSION, expression); + } + + /* + * (non-Javadoc) + * + * @see org.apache.hadoop.hbase.mapred.TableMap#configure(org.apache.hadoop.mapred.JobConf) + */ + public void configure(JobConf job) { + super.configure(job); + booleanCondition.setExpression(job.get(EXPRESSION, "")); + } + + /** + * Filter the value for each specified column family. + */ + public void map(HStoreKey key, MapWritable value, + TableOutputCollector output, Reporter reporter) throws IOException { + Text tKey = key.getRow(); + if (booleanCondition.checkConstraints(value)) { + output.collect(tKey, value); + } + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java (revision 0) @@ -0,0 +1,104 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConnection; +import org.apache.hadoop.hbase.HConnectionManager; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.ClusterStatus; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; + +/** + * Perform a index join using MapReduce. + */ +public class IndexJoin extends RelationalOperation { + public JobConf getConf(Configuration conf, String input, String output, + Map statements) { + JobConf jobConf = new JobConf(conf, Selection.class); + jobConf.setJobName("shell.mapred.join-" + +System.currentTimeMillis()); + String secondRelation = statements + .get(Constants.RELATIONAL_JOIN_SECOND_RELATION); + + try { + HConnection conn = HConnectionManager.getConnection(conf); + HBaseAdmin admin = new HBaseAdmin(conf); + HTableDescriptor desc = new HTableDescriptor(output); + + HTableDescriptor[] tables = conn.listTables(); + HColumnDescriptor[] firstColumns = null; + HColumnDescriptor[] secondColumns = null; + for (int i = 0; i < tables.length; i++) { + if (tables[i].getName().equals(new Text(input))) { + firstColumns = tables[i].getFamilies().values().toArray( + new HColumnDescriptor[] {}); + } else if (tables[i].getName().equals(new Text(secondRelation))) { + secondColumns = tables[i].getFamilies().values().toArray( + new HColumnDescriptor[] {}); + } + } + + String firstColumnsStr = ""; + String secondColumnsStr = ""; + + if (conn.tableExists(new Text(output))) { + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, false); + return jobConf; + } else { + for (int i = 0; i < firstColumns.length; i++) { + desc.addFamily(firstColumns[i]); + firstColumnsStr += firstColumns[i].getName() + " "; + } + + for (int i = 0; i < secondColumns.length; i++) { + desc.addFamily(secondColumns[i]); + secondColumnsStr += secondColumns[i].getName() + " "; + } + } + + admin.createTable(desc); // create output table. + + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, true); + + IndexJoinMap.initJob(input, secondRelation, firstColumnsStr, + secondColumnsStr, statements.get(Constants.RELATIONAL_JOIN), + IndexJoinMap.class, jobConf); + IndexJoinReduce.initJob(output, IndexJoinReduce.class, jobConf); + + JobClient jobClient = new JobClient(jobConf); + + ClusterStatus cluster = jobClient.getClusterStatus(); + jobConf.setNumMapTasks(cluster.getMapTasks()); + jobConf.setNumReduceTasks(1); + + } catch (IOException e) { + e.printStackTrace(); + } + + return jobConf; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java (revision 0) @@ -0,0 +1,100 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; + +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.hbase.mapred.TableMap; +import org.apache.hadoop.hbase.mapred.TableOutputCollector; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.Reporter; + +/** + * An index join exploits the existence of an row index for one of the relations + * used in the join to find matching rows more quickly. + * + * Index join (using R2 row index) takes time O(i+m)/map function number. + */ +public class IndexJoinMap extends TableMap { + JoinCondition joinCondition = new JoinCondition(); + private String joinExpression; + private String secondRelation; + public static final String JOIN_EXPRESSION = "shell.mapred.join.expression"; + public static final String SECOND_RELATION = "shell.mapred.join.second.relation"; + public static final String FIRST_COLUMNS = "shell.mapred.first.columns"; + private Text[] first_columns; + + /** constructor */ + public IndexJoinMap() { + super(); + } + + /** + * @param firstRelation R1 + * @param secondRelation R2 + * @param firstColumns (A 1,A 2,...,A n) + * @param secondColumns (B~1~,B~2~,...,B~m~) + * @param joinExpression join condition expression + * @param mapper mapper class + * @param job jobConf + */ + public static void initJob(String firstRelation, String secondRelation, + String firstColumns, String secondColumns, String joinExpression, + Class mapper, JobConf job) { + + initJob(firstRelation, firstColumns, mapper, job); + job.set(JOIN_EXPRESSION, joinExpression); + job.set(SECOND_RELATION, secondRelation); + job.set(FIRST_COLUMNS, firstColumns); + } + + /** {@inheritDoc} */ + @Override + public void configure(JobConf job) { + super.configure(job); + joinExpression = job.get(JOIN_EXPRESSION, ""); + secondRelation = job.get(SECOND_RELATION, ""); + String[] cols = job.get(FIRST_COLUMNS, "").split(" "); + first_columns = new Text[cols.length]; + for (int i = 0; i < cols.length; i++) { + first_columns[i] = new Text(cols[i]); + } + + joinCondition.setExpression(joinExpression); + } + + @Override + public void map(HStoreKey key, MapWritable value, + TableOutputCollector output, Reporter reporter) throws IOException { + Text tKey = key.getRow(); + MapWritable appendValue = joinCondition.getJoinColumns(value, + first_columns.length, secondRelation); + + if (appendValue.size() != 0) { + value.putAll(appendValue); + if (joinCondition.checkConstraints(value)) { + output.collect(tKey, value); + } + } + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java (revision 0) @@ -0,0 +1,49 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Iterator; + +import org.apache.hadoop.hbase.mapred.TableOutputCollector; +import org.apache.hadoop.hbase.mapred.TableReduce; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.Reporter; + +/** + * Table join Reduce class + */ +public class IndexJoinReduce extends TableReduce { + /** constructor */ + public IndexJoinReduce() { + super(); + } + + @Override + public void reduce(Text key, @SuppressWarnings("unchecked") + Iterator values, TableOutputCollector output, Reporter reporter) + throws IOException { + while (values.hasNext()) { + MapWritable r = (MapWritable) values.next(); + output.collect(key, r); + } + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/JoinCondition.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/JoinCondition.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/JoinCondition.java (revision 0) @@ -0,0 +1,168 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.Map; +import java.util.SortedMap; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HTable; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; + +public class JoinCondition { + Configuration conf = new HBaseConfiguration(); + BooleanCondition booleanExpression = new BooleanCondition(); + private String joinKeyColumn; + private String boolExpression; + private String secondRelation; + + public void setExpression(String expression) { + String[] exps = expression + .split(Constants.CONDITION_SEPERATOR); + String joinExpression = exps[0]; + if (exps.length > 1) { + boolExpression = exps[1]; + } + + joinKeyColumn = joinExpression.split(" = ")[0].substring(joinExpression + .split(" = ")[1].indexOf(".") + 1, joinExpression.split(" = ")[0] + .length()); + secondRelation = joinExpression.split(" = ")[1].substring(0, joinExpression + .split(" = ")[1].indexOf(".")); + } + + public MapWritable getJoinColumns(MapWritable value, int numCols, + String secondRelation) { + MapWritable appendValue = new MapWritable(); + String joinKey = null; + + if (numCols > 0) { + for (Map.Entry e : value.entrySet()) { + Text column = (Text) e.getKey(); + for (int i = 0; i < numCols; i++) { + if (column.equals(new Text(joinKeyColumn))) { + joinKey = new String(((ImmutableBytesWritable) e.getValue()).get()); + break; + } + } + } + } + + SortedMap secondValue = null; + try { + HTable table = new HTable(conf, new Text(secondRelation)); + secondValue = table.getRow(new Text(joinKey)); + for (Map.Entry e : secondValue.entrySet()) { + appendValue.put(e.getKey(), new ImmutableBytesWritable(e.getValue())); + } + } catch (IOException e) { + e.printStackTrace(); + } + + if (boolExpression != null) { + booleanExpression.setExpression(extractBooleanCondition(secondValue)); + } + + return appendValue; + } + + private String extractBooleanCondition(SortedMap secondValue) { + String andStr = ""; + String orStr = ""; + + String[] or = boolExpression.split(Constants.LOGICAL_CONNECTOR_OR); + String[] and = null; + for (int i = 0; i < or.length; i++) { + if (or[i].split(Constants.LOGICAL_CONNECTOR_AND).length == 1) { + String[] parse = or[i].split(" "); + if (!orStr.equals("")) { + orStr += Constants.LOGICAL_CONNECTOR_OR; + } + if (parse[0].trim().indexOf(".") > 0 && parse[2].indexOf(".") > 0) { + if (parse[2].startsWith(secondRelation)) { + orStr += resetExpression(parse[0].trim(), parse[1], secondValue + .get(new Text(getColumnName(parse[2])))); + } else { + orStr += resetExpression(parse[2], parse[1], secondValue + .get(new Text(getColumnName(parse[0].trim())))); + } + } else { + orStr += resetExpression(or[i], null, null); + } + } else { + and = new String[or[i] + .split(Constants.LOGICAL_CONNECTOR_AND).length]; + and = or[i].split(Constants.LOGICAL_CONNECTOR_AND); + } + } + + if (and != null) { + for (int j = 0; j < and.length; j++) { + String[] parse = and[j].split(" "); + if (!andStr.equals("")) { + andStr += Constants.LOGICAL_CONNECTOR_AND; + } + + if (parse[0].trim().indexOf(".") > 0 && parse[2].indexOf(".") > 0) { + if (parse[2].startsWith(secondRelation)) { + resetExpression(parse[0].trim(), parse[1], secondValue + .get(new Text(getColumnName(parse[2])))); + } else { + andStr += resetExpression(parse[2], parse[1], secondValue + .get(new Text(getColumnName(parse[0].trim())))); + } + } else { + andStr += resetExpression(and[j], null, null); + } + } + } + + if (andStr.equals("") && !orStr.equals("")) { + return orStr; + } else if (!andStr.equals("") && orStr.equals("")) { + return andStr; + } else { + return orStr + Constants.LOGICAL_CONNECTOR_OR + andStr; + } + } + + private String resetExpression(String string, String comparator, + byte[] comparand) { + String result = string.substring(string.indexOf(".") + 1, string.length()); + return (comparator == null && comparand == null) ? result : result + " " + + comparator + " " + new String(comparand); + } + + private String getColumnName(String string) { + return string.substring(string.indexOf("."), string.length()); + } + + public boolean checkConstraints(MapWritable value) + throws UnsupportedEncodingException { + return (boolExpression == null) ? true : booleanExpression + .checkConstraints(value); + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java (revision 0) @@ -0,0 +1,35 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapred.JobConf; + +/** + * A factory class that provides configuration of operation information. + */ +public interface Operation { + JobConf getConf(Configuration conf, String input, String output, + Map statements); + + RelationalOperation getRelationalOperation(); +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationFactory.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationFactory.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationFactory.java (revision 0) @@ -0,0 +1,24 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +public interface OperationFactory { + Operation getOperation(); +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationManager.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationManager.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationManager.java (revision 0) @@ -0,0 +1,85 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.mapred.JobConf; + +public class OperationManager { + private Configuration conf; + private String input; + private String output; + Map statements = new HashMap(); + HashMap operations = new HashMap(); + + /** Constructor */ + public OperationManager(Configuration conf, String statement, String output) { + + this.conf = conf; + this.output = output; + String chainedIndex = statement; + while (chainedIndex != null) { + for (Map.Entry entry : VariablesPool.get( + chainedIndex).entrySet()) { + if (entry.getKey() == null) { + this.input = entry.getValue().getArgument(); + } else { + statements.put(entry.getValue().getOperation(), entry.getValue() + .getArgument()); + } + chainedIndex = entry.getKey(); + } + } + } + + public JobConf getJobConf() { + return getOperation().getConf(conf, input, output, statements); + } + + /** + * Returns the job configuration object for statements type + * @return ConfigurationFactory + */ + private Operation getOperation() { + OperationFactory factory; + if (statements.containsKey(Constants.RELATIONAL_SELECTION)) { + factory = (OperationFactory) new Selection(); + } else if (statements.containsKey(Constants.RELATIONAL_PROJECTION)) { + factory = (OperationFactory) new Projection(); + } else if (statements.containsKey(Constants.RELATIONAL_JOIN)) { + factory = (OperationFactory) new IndexJoin(); + } else { + factory = (OperationFactory) new DuplicateTable(); + } + + return factory.getOperation(); + } + + public void setOutput(String output) { + this.output = output; + } + + public Map getStatements() { + return statements; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java (revision 0) @@ -0,0 +1,91 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConnection; +import org.apache.hadoop.hbase.HConnectionManager; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.mapred.GroupingTableMap; +import org.apache.hadoop.hbase.mapred.IdentityTableMap; +import org.apache.hadoop.hbase.mapred.IdentityTableReduce; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.ClusterStatus; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; + +/** + * Perform a relational projection using MapReduce. + */ +public class Projection extends RelationalOperation { + public JobConf getConf(Configuration conf, String input, String output, + Map statements) { + JobConf jobConf = new JobConf(conf, Projection.class); + jobConf.setJobName("shell.mapred.proj-" + System.currentTimeMillis()); + + try { + HConnection conn = HConnectionManager.getConnection(conf); + if (conn.tableExists(new Text(output))) { + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, false); + return jobConf; + } else { + HTableDescriptor desc = new HTableDescriptor(output); + String columns = statements + .get(Constants.RELATIONAL_PROJECTION); + String[] cols = columns.split(" "); + for (int i = 0; i < cols.length; i++) { + desc.addFamily(new HColumnDescriptor(cols[i])); + } + + // create output table. + HBaseAdmin admin = new HBaseAdmin(conf); + admin.createTable(desc); + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, true); + } + + if (statements.containsKey(Constants.RELATIONAL_GROUP)) { + GroupingTableMap.initJob(input, statements + .get(Constants.RELATIONAL_PROJECTION), statements + .get(Constants.RELATIONAL_GROUP), GroupingTableMap.class, + jobConf); + } else { + IdentityTableMap.initJob(input, statements + .get(Constants.RELATIONAL_PROJECTION), + IdentityTableMap.class, jobConf); + } + + IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf); + JobClient jobClient = new JobClient(jobConf); + ClusterStatus cluster = jobClient.getClusterStatus(); + jobConf.setNumMapTasks(cluster.getMapTasks()); + jobConf.setNumReduceTasks(1); + + } catch (IOException e) { + e.printStackTrace(); + } + + return jobConf; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java (revision 0) @@ -0,0 +1,30 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +public abstract class RelationalOperation implements Operation, OperationFactory { + public RelationalOperation getRelationalOperation() { + return this; + } + + public Operation getOperation() { + return this; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SaveCommand.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SaveCommand.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SaveCommand.java (revision 0) @@ -0,0 +1,133 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.File; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.Writer; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.shell.BasicCommand; +import org.apache.hadoop.hbase.shell.ReturnMsg; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.RunningJob; + +/** + * Save results to specified table. + */ +public class SaveCommand extends BasicCommand { + private String statement; + private String output; + + public SaveCommand(Writer o) { + super(o); + } + + public ReturnMsg execute(Configuration conf) { + try { + OperationManager operation = new OperationManager(conf, statement, output); + JobConf jobConf = operation.getJobConf(); + + // setJar() method for hbase/mapred classes. + File jarFiles = new File("build"); + int i; + String[] ls; + FilenameFilter filter = new JarFilter(); + for (ls = jarFiles.list(filter), i = 0; ls != null && i < ls.length; jobConf + .setJar("build/" + ls[i++])) + ; + + boolean tableCreate = jobConf.getBoolean( + Constants.CONFIG_OUTPUT_TABLE, false); + + if (tableCreate) { + if (submitJob(jobConf)) { + return new ReturnMsg(0, "Successfully complete."); + } else { + // Should i delete output table? + return new ReturnMsg(0, "Job failed."); + } + } else { + return new ReturnMsg(0, "'" + output + "' table already exist."); + } + + } catch (IOException e) { + return new ReturnMsg(0, e.toString()); + } + } + + /** + * Submit a job to job tracker. + * + * @param job + * @return result + * @throws IOException + */ + public boolean submitJob(JobConf job) throws IOException { + JobClient jc = new JobClient(job); + boolean success = true; + RunningJob running = null; + try { + running = jc.submitJob(job); + String jobId = running.getJobID(); + print("Job " + jobId + " is still running."); + + while (!running.isComplete()) { + print("."); + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + } + running = jc.getJob(jobId); + } + success = running.isSuccessful(); + } finally { + if (!success && (running != null)) { + running.killJob(); + } + jc.close(); + println(""); + } + return success; + } + + public void setOutput(String output) { + this.output = output; + } + + public void setStatement(String statement) { + this.statement = statement; + } + + @Override + public CommandType getCommandType() { + return CommandType.SHELL; + } + + class JarFilter implements FilenameFilter { + + public boolean accept(File dir, String name) { + return (name.endsWith("hbase.jar")); + } + + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java (revision 0) @@ -0,0 +1,103 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConnection; +import org.apache.hadoop.hbase.HConnectionManager; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.mapred.IdentityTableReduce; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.ClusterStatus; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; + +/** + * Perform a relational selection using MapReduce. + */ +public class Selection extends RelationalOperation { + public JobConf getConf(Configuration conf, String input, String output, + Map statements) { + JobConf jobConf = new JobConf(conf, Selection.class); + jobConf.setJobName("shell.mapred.select-" + +System.currentTimeMillis()); + + try { + HConnection conn = HConnectionManager.getConnection(conf); + HBaseAdmin admin = new HBaseAdmin(conf); + HTableDescriptor desc = new HTableDescriptor(output); + String groupColumns = ""; + + if (statements.containsKey(Constants.RELATIONAL_PROJECTION)) { + groupColumns = statements.get(Constants.RELATIONAL_PROJECTION); + } else { + + HTableDescriptor[] tables = conn.listTables(); + HColumnDescriptor[] columns = null; + for (int i = 0; i < tables.length; i++) { + if (tables[i].getName().equals(new Text(input))) { + columns = tables[i].getFamilies().values().toArray( + new HColumnDescriptor[] {}); + break; + } + } + if (conn.tableExists(new Text(output))) { + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, false); + return jobConf; + } else { + for (int i = 0; i < columns.length; i++) { + desc.addFamily(columns[i]); + groupColumns += columns[i].getName() + " "; + } + } + admin.createTable(desc); // create output table. + jobConf.setBoolean(Constants.CONFIG_OUTPUT_TABLE, true); + } + + if (statements.containsKey(Constants.RELATIONAL_GROUP)) { + GroupingFilterMap.initJob(input, groupColumns, statements + .get(Constants.RELATIONAL_GROUP), statements + .get(Constants.RELATIONAL_SELECTION), GroupingFilterMap.class, + jobConf); + } else { + IdentityFilterMap.initJob(input, groupColumns, statements + .get(Constants.RELATIONAL_SELECTION), IdentityFilterMap.class, + jobConf); + } + + IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf); + + JobClient jobClient = new JobClient(jobConf); + + ClusterStatus cluster = jobClient.getClusterStatus(); + jobConf.setNumMapTasks(cluster.getMapTasks()); + jobConf.setNumReduceTasks(1); + + } catch (IOException e) { + e.printStackTrace(); + } + + return jobConf; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SubstituteCommand.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SubstituteCommand.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/SubstituteCommand.java (revision 0) @@ -0,0 +1,112 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.Writer; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.shell.BasicCommand; +import org.apache.hadoop.hbase.shell.ReturnMsg; + +/** + * This class represents a substitute command. + */ +public class SubstituteCommand extends BasicCommand { + private String key; + private String chainKey; + private String operation; + private String condition; + + public SubstituteCommand(Writer o) { + super(o); + } + + public ReturnMsg execute(Configuration conf) { + VariableRef formula = new VariableRef(operation, condition); + VariablesPool.put(key, chainKey, formula); + return null; + } + + public void setInput(String input) { + this.operation = Constants.SUBSTITUTE_TABLE; + this.condition = input; + } + + public void setKey(String key) { + this.key = key; + } + + public void setChainKey(String chainKey) { + this.chainKey = chainKey; + } + + public void setOperation(String operation) { + this.operation = operation; + } + + public void setCondition(String condition) { + this.condition = condition; + } + + public void setJoinCondition(String con, String extendedKey, String chainedKey) { + String joinKey = null; + String joinCond = ""; + String secondRelation = null; + String[] term = con.split(Constants.LOGICAL_CONNECTOR_AND); + + for (int i = 0; i < term.length; i++) { + String[] token = term[i].split(" "); + if (token[0].endsWith(Constants.RELATIONAL_JOIN_KEY)) { + secondRelation = token[0].substring(0, token[0].length() - 4); + joinKey = appendDelimiter(token[2]) + " " + token[1] + " " + token[0]; + } else if (token[2].endsWith(Constants.RELATIONAL_JOIN_KEY)) { + secondRelation = token[2].substring(0, token[2].length() - 4); + joinKey = appendDelimiter(token[0]) + " " + token[1] + " " + token[2]; + } else { + if (!joinCond.equals("")) { + joinCond += Constants.LOGICAL_CONNECTOR_AND; + } + + joinCond += term[i]; + } + } + + setCondition(joinKey + Constants.CONDITION_SEPERATOR + joinCond); + + if (secondRelation.equals(extendedKey)) { + resetVariableRelation(extendedKey, chainedKey); + } else { + resetVariableRelation(chainedKey, extendedKey); + } + } + + public void resetVariableRelation(String r1, String r2) { + setChainKey(r1); + String tableName = VariablesPool.get(r1).get(null).getArgument(); + VariableRef formula = new VariableRef( + Constants.RELATIONAL_JOIN_SECOND_RELATION, tableName); + VariablesPool.put(r1, r2, formula); + } + + @Override + public CommandType getCommandType() { + return CommandType.SHELL; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariableRef.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariableRef.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariableRef.java (revision 0) @@ -0,0 +1,50 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +/** + * VariableRef is used to reference declared Variables. + */ +public class VariableRef { + String operation; + String argument; + + /** Constructor */ + public VariableRef(String operation, String condition) { + this.operation = operation; + this.argument = condition; + } + + /** + * Return argument of an operation + * @return argument + */ + public String getArgument() { + return argument; + } + + /** + * Return operation + * @return operation + */ + public String getOperation() { + return operation; + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariablesPool.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariablesPool.java (revision 0) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/VariablesPool.java (revision 0) @@ -0,0 +1,51 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.util.HashMap; + +/** + * Variable pool is a collection of substitution variables. + */ +public class VariablesPool { + static HashMap> variables = + new HashMap>(); + + /** + * puts the date in the substitution variable. + * @param key + * @param parentKey + * @param statement + */ + public static void put(String key, String parentKey, VariableRef statement) { + HashMap value = new HashMap(); + value.put(parentKey, statement); + variables.put(key, value); + } + + /** + * returns the substitution variable's value. + * @param key + * @return HashMap + */ + public static HashMap get(String key) { + return variables.get(key); + } +} Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (revision 587398) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (working copy) @@ -69,7 +69,7 @@ * at the end of the column name. */ protected String appendDelimiter(String column) { - return (!column.endsWith(FAMILY_INDICATOR))? + return (!column.endsWith(FAMILY_INDICATOR) && column.indexOf(FAMILY_INDICATOR) == -1)? column + FAMILY_INDICATOR: column; } Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java (revision 587398) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java (working copy) @@ -28,8 +28,11 @@ import java.io.StringReader; import java.io.Reader; import java.io.Writer; +import java.net.URLEncoder; +import java.io.UnsupportedEncodingException; import org.apache.hadoop.hbase.shell.*; +import org.apache.hadoop.hbase.shell.algebra.*; /** * Parsing command line. @@ -72,7 +75,9 @@ case SELECT: case ENABLE: case DISABLE: - case 62: + case SAVE: + case ID: + case 69: switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case HELP: case ALTER: @@ -90,6 +95,8 @@ case SELECT: case ENABLE: case DISABLE: + case SAVE: + case ID: statement = cmdStatement(); break; default: @@ -96,7 +103,7 @@ jj_la1[0] = jj_gen; ; } - jj_consume_token(62); + jj_consume_token(69); break; case 0: jj_consume_token(0); @@ -159,6 +166,12 @@ case JAR: cmd = jarCommand(); break; + case ID: + cmd = substituteCommand(); + break; + case SAVE: + cmd = saveCommand(); + break; default: jj_la1[2] = jj_gen; jj_consume_token(-1); @@ -251,6 +264,8 @@ case INSERT: case DELETE: case SELECT: + case SAVE: + case GROUP: case ID: switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case SHOW: @@ -289,6 +304,12 @@ case JAR: t = jj_consume_token(JAR); break; + case GROUP: + t = jj_consume_token(GROUP); + break; + case SAVE: + t = jj_consume_token(SAVE); + break; case ID: t = jj_consume_token(ID); break; @@ -316,7 +337,7 @@ case ID: case QUOTED_IDENTIFIER: case STRING_LITERAL: - argument = Identifier(); + argument = identifier(); break; default: jj_la1[8] = jj_gen; @@ -342,7 +363,7 @@ jj_consume_token(-1); throw new ParseException(); } - argument = Identifier(); + argument = identifier(); desc.setArgument(argument); {if (true) return desc;} throw new Error("Missing return statement in function"); @@ -461,10 +482,10 @@ String column = null; jj_consume_token(CREATE); jj_consume_token(TABLE); - table = Identifier(); + table = identifier(); createCommand.setTable(table); jj_consume_token(LPAREN); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); createCommand.addColumnSpec(column, columnSpec); label_4: @@ -478,7 +499,7 @@ break label_4; } jj_consume_token(COMMA); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); createCommand.addColumnSpec(column, columnSpec); } @@ -494,11 +515,11 @@ Map columnSpec = null; jj_consume_token(ALTER); jj_consume_token(TABLE); - table = Identifier(); + table = identifier(); alterCommand.setTable(table); if (jj_2_1(2)) { jj_consume_token(ADD); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); alterCommand.setOperationType(AlterCommand.OperationType.ADD); alterCommand.addColumnSpec(column, columnSpec); @@ -508,7 +529,7 @@ jj_consume_token(ADD); jj_consume_token(LPAREN); alterCommand.setOperationType(AlterCommand.OperationType.ADD); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); alterCommand.addColumnSpec(column, columnSpec); label_5: @@ -522,7 +543,7 @@ break label_5; } jj_consume_token(COMMA); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); alterCommand.addColumnSpec(column, columnSpec); } @@ -530,7 +551,7 @@ break; case DROP: jj_consume_token(DROP); - column = Identifier(); + column = identifier(); alterCommand.setOperationType(AlterCommand.OperationType.DROP); alterCommand.setColumn(column); break; @@ -536,7 +557,7 @@ break; case CHANGE: jj_consume_token(CHANGE); - column = Identifier(); + column = identifier(); columnSpec = ColumnSpec(); alterCommand.setOperationType(AlterCommand.OperationType.CHANGE); alterCommand.addColumnSpec(column, columnSpec); @@ -556,7 +577,7 @@ List tableList = null; jj_consume_token(DROP); jj_consume_token(TABLE); - tableList = TableList(); + tableList = tableList(); drop.setTableList(tableList); {if (true) return drop;} throw new Error("Missing return statement in function"); @@ -570,7 +591,7 @@ Token t = null; jj_consume_token(INSERT); jj_consume_token(INTO); - table = Identifier(); + table = identifier(); in.setTable(table); columnfamilies = getColumns(); in.setColumnfamilies(columnfamilies); @@ -603,10 +624,10 @@ Token t = null; String table = null; jj_consume_token(DELETE); - columnList = ColumnList(); + columnList = columnList(); deleteCommand.setColumnList(columnList); jj_consume_token(FROM); - table = Identifier(); + table = identifier(); deleteCommand.setTable(table); jj_consume_token(WHERE); jj_consume_token(ROW); @@ -637,9 +658,9 @@ String tableName = null; int limit; jj_consume_token(SELECT); - columns = ColumnList(); + columns = columnList(); jj_consume_token(FROM); - tableName = Identifier(); + tableName = identifier(); select.setColumns(columns); select.setTable(tableName); switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { @@ -712,7 +733,7 @@ EnableCommand enableCommand = new EnableCommand(this.out); String table = null; jj_consume_token(ENABLE); - table = Identifier(); + table = identifier(); enableCommand.setTable(table); {if (true) return enableCommand;} throw new Error("Missing return statement in function"); @@ -722,7 +743,7 @@ DisableCommand disableCommand = new DisableCommand(this.out); String table = null; jj_consume_token(DISABLE); - table = Identifier(); + table = identifier(); disableCommand.setTable(table); {if (true) return disableCommand;} throw new Error("Missing return statement in function"); @@ -735,7 +756,102 @@ throw new Error("Missing return statement in function"); } -//////////////////////////////////////////////// + final public SubstituteCommand substituteCommand() throws ParseException { + Token key = null; + Token chainKey = null; + Token operation = null; + String tableName = null; + String condition = ""; + List notInList = new ArrayList(); + SubstituteCommand substitute = new SubstituteCommand(this.out); + Token extendedKey = null; + key = jj_consume_token(ID); + jj_consume_token(EQUALS); + substitute.setKey(key.image.toString()); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case ID: + chainKey = jj_consume_token(ID); + jj_consume_token(DOT); + substitute.setChainKey(chainKey.image.toString()); + operation = jj_consume_token(ID); + substitute.setOperation(operation.image.toString()); + jj_consume_token(LPAREN); + String operationType = operation.image.toLowerCase(); + if(operationType.equals(Constants.RELATIONAL_PROJECTION)) { + List columnList = columnList(); + for (int i = 0; i < columnList.size(); i++) { + condition += appendIndicator(columnList.get(i)) + " "; + } + + } else if(operationType.equals(Constants.RELATIONAL_SELECTION)) { + condition = booleanTerm(); + } else if(operationType.equals(Constants.RELATIONAL_JOIN)) { + condition = booleanTerm(); + } + + substitute.setCondition(condition); + jj_consume_token(RPAREN); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case AND: + jj_consume_token(AND); + extendedKey = jj_consume_token(ID); + String eKey = extendedKey.image.toString(); + String cKey = chainKey.image.toString(); + substitute.setJoinCondition(condition, eKey, cKey); + break; + default: + jj_la1[24] = jj_gen; + ; + } + break; + case TABLE: + jj_consume_token(TABLE); + jj_consume_token(LPAREN); + tableName = identifier(); + substitute.setInput(tableName); + jj_consume_token(RPAREN); + break; + case GROUP: + operation = jj_consume_token(GROUP); + chainKey = jj_consume_token(ID); + jj_consume_token(BY); + jj_consume_token(LPAREN); + List columnList = columnList(); + for (int i = 0; i < columnList.size(); i++) { + condition += appendIndicator(columnList.get(i)); + } + jj_consume_token(RPAREN); + substitute.setChainKey(chainKey.image.toString()); + substitute.setOperation(operation.image.toString()); + substitute.setCondition(condition); + break; + default: + jj_la1[25] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + {if (true) return substitute;} + throw new Error("Missing return statement in function"); + } + + final public SaveCommand saveCommand() throws ParseException { + Token t = null; + String tableName; + SaveCommand save = new SaveCommand(this.out); + jj_consume_token(SAVE); + t = jj_consume_token(ID); + save.setStatement(t.image.toString()); + jj_consume_token(INTO); + jj_consume_token(TABLE); + jj_consume_token(LPAREN); + tableName = identifier(); + save.setOutput(tableName); + jj_consume_token(RPAREN); + {if (true) return save;} + throw new Error("Missing return statement in function"); + } + +// ///////////////////////////////////////////////////////////////////////////////////// // Utility expansion units... final public List getLiteralValues() throws ParseException { List values = new ArrayList(); @@ -753,7 +869,7 @@ ; break; default: - jj_la1[24] = jj_gen; + jj_la1[26] = jj_gen; break label_6; } switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { @@ -776,7 +892,7 @@ jj_consume_token(QUOTED_IDENTIFIER); break; default: - jj_la1[25] = jj_gen; + jj_la1[27] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -783,7 +899,7 @@ values.removeAll(values); break; default: - jj_la1[26] = jj_gen; + jj_la1[28] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -803,7 +919,7 @@ s = jj_consume_token(QUOTED_IDENTIFIER); break; default: - jj_la1[27] = jj_gen; + jj_la1[29] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -812,31 +928,6 @@ throw new Error("Missing return statement in function"); } - final public List getColumns() throws ParseException { - List values = new ArrayList(); - String literal = null; - jj_consume_token(LPAREN); - literal = getColumn(); - if(literal != null) values.add(literal); - label_7: - while (true) { - switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { - case COMMA: - ; - break; - default: - jj_la1[28] = jj_gen; - break label_7; - } - jj_consume_token(COMMA); - literal = getColumn(); - if(literal != null) values.add(literal); - } - jj_consume_token(RPAREN); - {if (true) return values;} - throw new Error("Missing return statement in function"); - } - final public String getColumn() throws ParseException { Token col; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { @@ -842,6 +933,7 @@ switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case ASTERISK: case ID: + case INTEGER_LITERAL: switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case ID: col = jj_consume_token(ID); @@ -846,6 +938,9 @@ case ID: col = jj_consume_token(ID); break; + case INTEGER_LITERAL: + col = jj_consume_token(INTEGER_LITERAL); + break; case ASTERISK: col = jj_consume_token(ASTERISK); break; @@ -850,7 +945,7 @@ col = jj_consume_token(ASTERISK); break; default: - jj_la1[29] = jj_gen; + jj_la1[30] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -866,7 +961,7 @@ col = jj_consume_token(STRING_LITERAL); break; default: - jj_la1[30] = jj_gen; + jj_la1[31] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -873,7 +968,7 @@ {if (true) return col.image.substring(1,col.image.toString().length() - 1);} break; default: - jj_la1[31] = jj_gen; + jj_la1[32] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -880,10 +975,35 @@ throw new Error("Missing return statement in function"); } - final public List TableList() throws ParseException { + final public List getColumns() throws ParseException { + List values = new ArrayList(); + String literal = null; + jj_consume_token(LPAREN); + literal = getColumn(); + if(literal != null) values.add(literal); + label_7: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case COMMA: + ; + break; + default: + jj_la1[33] = jj_gen; + break label_7; + } + jj_consume_token(COMMA); + literal = getColumn(); + if(literal != null) values.add(literal); + } + jj_consume_token(RPAREN); + {if (true) return values;} + throw new Error("Missing return statement in function"); + } + + final public List tableList() throws ParseException { List tableList = new ArrayList(); String table = null; - table = Identifier(); + table = identifier(); tableList.add(table); label_8: while (true) { @@ -892,11 +1012,11 @@ ; break; default: - jj_la1[32] = jj_gen; + jj_la1[34] = jj_gen; break label_8; } jj_consume_token(COMMA); - table = Identifier(); + table = identifier(); tableList.add(table); } {if (true) return tableList;} @@ -903,7 +1023,7 @@ throw new Error("Missing return statement in function"); } - final public List ColumnList() throws ParseException { + final public List columnList() throws ParseException { List columnList = new ArrayList(); String column = null; column = getColumn(); @@ -919,7 +1039,7 @@ ; break; default: - jj_la1[33] = jj_gen; + jj_la1[35] = jj_gen; break label_9; } jj_consume_token(COMMA); @@ -937,7 +1057,7 @@ throw new Error("Missing return statement in function"); } - final public String Identifier() throws ParseException { + final public String identifier() throws ParseException { Token t = null; switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case ID: @@ -954,7 +1074,7 @@ t = jj_consume_token(STRING_LITERAL); break; default: - jj_la1[34] = jj_gen; + jj_la1[36] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -961,7 +1081,7 @@ {if (true) return t.image.substring(1,t.image.toString().length() - 1);} break; default: - jj_la1[35] = jj_gen; + jj_la1[37] = jj_gen; jj_consume_token(-1); throw new ParseException(); } @@ -968,6 +1088,207 @@ throw new Error("Missing return statement in function"); } + final public String booleanTerm() throws ParseException { + String query = null; + String tmp = null; + query = booleanTerms(); + label_10: + while (true) { + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case AND: + case OR: + ; + break; + default: + jj_la1[38] = jj_gen; + break label_10; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case AND: + jj_consume_token(AND); + query += Constants.LOGICAL_CONNECTOR_AND; + break; + case OR: + jj_consume_token(OR); + query += Constants.LOGICAL_CONNECTOR_OR; + break; + default: + jj_la1[39] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + tmp = booleanTerms(); + query += tmp; + } + {if (true) return query;} + throw new Error("Missing return statement in function"); + } + + final public String booleanTerms() throws ParseException { + Token tSearchName, tComparator, tComparand; + List inList = new ArrayList(); + String searchName=null,comparator=null,comparand=null; + Token joinColumn = null; + Token joinKey = null; + tSearchName = jj_consume_token(ID); + searchName = tSearchName.image.toString(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case DOT: + jj_consume_token(DOT); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case ID: + joinColumn = jj_consume_token(ID); + searchName += "." + joinColumn.image.toString(); + break; + case ROW: + jj_consume_token(ROW); + searchName += Constants.RELATIONAL_JOIN_KEY; + break; + default: + jj_la1[40] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[41] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case LCOMP: + tComparator = jj_consume_token(LCOMP); + comparator = tComparator.image.toString(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case EQUALS: + jj_consume_token(EQUALS); + comparator += "="; + break; + default: + jj_la1[42] = jj_gen; + ; + } + break; + case RCOMP: + tComparator = jj_consume_token(RCOMP); + comparator = tComparator.image.toString(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case EQUALS: + jj_consume_token(EQUALS); + comparator += "="; + break; + default: + jj_la1[43] = jj_gen; + ; + } + break; + case EQUALS: + tComparator = jj_consume_token(EQUALS); + comparator = tComparator.image.toString(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case LCOMP: + jj_consume_token(LCOMP); + comparator = ">" + comparator; + break; + default: + jj_la1[44] = jj_gen; + ; + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case RCOMP: + jj_consume_token(RCOMP); + comparator = "<" + comparator; + break; + default: + jj_la1[45] = jj_gen; + ; + } + break; + case NOTEQUAL: + tComparator = jj_consume_token(NOTEQUAL); + comparator = tComparator.image.toString(); + break; + case NOT: + jj_consume_token(NOT); + jj_consume_token(IN); + comparator = Constants.EXPRESSION_NOT_IN; + break; + case IN: + jj_consume_token(IN); + comparator = Constants.EXPRESSION_IN; + break; + default: + jj_la1[46] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case INTEGER_LITERAL: + tComparand = jj_consume_token(INTEGER_LITERAL); + comparand = tComparand.image.toString(); + break; + case STRING_LITERAL: + tComparand = jj_consume_token(STRING_LITERAL); + comparand = tComparand.image.substring(1,tComparand.image.length() - 1); + break; + case ID: + tComparand = jj_consume_token(ID); + comparand = tComparand.image.toString(); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case DOT: + jj_consume_token(DOT); + switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { + case ROW: + jj_consume_token(ROW); + comparand += Constants.RELATIONAL_JOIN_KEY; + break; + case ID: + joinColumn = jj_consume_token(ID); + comparand += "." + joinColumn.image.toString(); + break; + default: + jj_la1[47] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[48] = jj_gen; + ; + } + break; + case LPAREN: + inList = getColumns(); + if(comparator == null) { + comparator = Constants.EXPRESSION_IN; + } + comparand = ""; + try{ + for(int i=0; i= 0) { @@ -1211,7 +1537,7 @@ la1tokens[jj_kind] = true; jj_kind = -1; } - for (int i = 0; i < 36; i++) { + for (int i = 0; i < 50; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & (1<\"", + "\">\"", + "\"<\"", + "\"not\"", + "\"in\"", + "\"!=\"", "\"*\"", "\"max_versions\"", "\"max_length\"", @@ -121,6 +132,9 @@ "\"num_entries\"", "\"add\"", "\"change\"", + "\"save\"", + "\"group\"", + "\"by\"", "", "", "", Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java (revision 587398) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java (working copy) @@ -27,7 +27,10 @@ import java.io.StringReader; import java.io.Reader; import java.io.Writer; +import java.net.URLEncoder; +import java.io.UnsupportedEncodingException; import org.apache.hadoop.hbase.shell.*; +import org.apache.hadoop.hbase.shell.algebra.*; public class ParserTokenManager implements ParserConstants { @@ -33,7 +36,7 @@ { public java.io.PrintStream debugStream = System.out; public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } -private final int jjStopStringLiteralDfa_0(int pos, long active0) +private final int jjStopStringLiteralDfa_0(int pos, long active0, long active1) { switch (pos) { @@ -39,10 +42,10 @@ { case 0: if ((active0 & 0x800000000L) != 0L) - return 32; - if ((active0 & 0xfffe03ffffffe0L) != 0L) + return 3; + if ((active0 & 0x7fffe603ffffffe0L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; return 1; } return -1; @@ -47,12 +50,15 @@ } return -1; case 1: - if ((active0 & 0x200002000L) != 0L) + if ((active0 & 0x4008040200032000L) != 0L) return 1; - if ((active0 & 0xfffe01ffffdfe0L) != 0L) + if ((active0 & 0x3ff7e201fffcdfe0L) != 0L) { - jjmatchedKind = 56; - jjmatchedPos = 1; + if (jjmatchedPos != 1) + { + jjmatchedKind = 63; + jjmatchedPos = 1; + } return 1; } return -1; @@ -57,11 +63,11 @@ } return -1; case 2: - if ((active0 & 0x40000104004000L) != 0L) + if ((active0 & 0x400020104004000L) != 0L) return 1; - if ((active0 & 0xbffe00fbff9fe0L) != 0L) + if ((active0 & 0x3bffe000fbff9fe0L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 2; return 1; } @@ -67,13 +73,11 @@ } return -1; case 3: - if ((active0 & 0x100002029720L) != 0L) - return 1; - if ((active0 & 0xbfee00f9fd08c0L) != 0L) + if ((active0 & 0x2bfee000f9fd08c0L) != 0L) { if (jjmatchedPos != 3) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 3; } return 1; @@ -78,31 +82,33 @@ } return 1; } + if ((active0 & 0x1001000002029720L) != 0L) + return 1; return -1; case 4: - if ((active0 & 0xbfce0078f90a00L) != 0L) + if ((active0 & 0xbfce00078f90a00L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 4; return 1; } - if ((active0 & 0x2000810400c0L) != 0L) + if ((active0 & 0x20020000810400c0L) != 0L) return 1; return -1; case 5: - if ((active0 & 0x3f8e0070c00200L) != 0L) + if ((active0 & 0x3f8e00070c00200L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 5; return 1; } - if ((active0 & 0x80400008390800L) != 0L) + if ((active0 & 0x804000008390800L) != 0L) return 1; return -1; case 6: - if ((active0 & 0x3f8e0070800200L) != 0L) + if ((active0 & 0x3f8e00070800200L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 6; return 1; } @@ -110,41 +116,41 @@ return 1; return -1; case 7: - if ((active0 & 0x2f8e0070000000L) != 0L) + if ((active0 & 0x2f8e00070000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 7; return 1; } - if ((active0 & 0x10000000800200L) != 0L) + if ((active0 & 0x100000000800200L) != 0L) return 1; return -1; case 8: - if ((active0 & 0x2f0e0050000000L) != 0L) + if ((active0 & 0x2f0e00050000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 8; return 1; } - if ((active0 & 0x800020000000L) != 0L) + if ((active0 & 0x8000020000000L) != 0L) return 1; return -1; case 9: - if ((active0 & 0x2f0a0050000000L) != 0L) + if ((active0 & 0x400000000000L) != 0L) + return 1; + if ((active0 & 0x2f0a00050000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 9; return 1; } - if ((active0 & 0x40000000000L) != 0L) - return 1; return -1; case 10: - if ((active0 & 0x29080000000000L) != 0L) + if ((active0 & 0x290800000000000L) != 0L) return 1; - if ((active0 & 0x6020050000000L) != 0L) + if ((active0 & 0x60200050000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 10; return 1; } @@ -150,19 +156,19 @@ } return -1; case 11: - if ((active0 & 0x6000010000000L) != 0L) + if ((active0 & 0x200040000000L) != 0L) + return 1; + if ((active0 & 0x60000010000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 11; return 1; } - if ((active0 & 0x20040000000L) != 0L) - return 1; return -1; case 12: - if ((active0 & 0x6000010000000L) != 0L) + if ((active0 & 0x60000010000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 12; return 1; } @@ -168,19 +174,19 @@ } return -1; case 13: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x10000000L) != 0L) + return 1; + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 13; return 1; } - if ((active0 & 0x10000000L) != 0L) - return 1; return -1; case 14: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 14; return 1; } @@ -186,9 +192,9 @@ } return -1; case 15: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 15; return 1; } @@ -194,9 +200,9 @@ } return -1; case 16: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 16; return 1; } @@ -202,9 +208,9 @@ } return -1; case 17: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 17; return 1; } @@ -210,9 +216,9 @@ } return -1; case 18: - if ((active0 & 0x6000000000000L) != 0L) + if ((active0 & 0x60000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 18; return 1; } @@ -218,13 +224,13 @@ } return -1; case 19: - if ((active0 & 0x4000000000000L) != 0L) + if ((active0 & 0x40000000000000L) != 0L) { - jjmatchedKind = 56; + jjmatchedKind = 63; jjmatchedPos = 19; return 1; } - if ((active0 & 0x2000000000000L) != 0L) + if ((active0 & 0x20000000000000L) != 0L) return 1; return -1; default : @@ -231,9 +237,9 @@ return -1; } } -private final int jjStartNfa_0(int pos, long active0) +private final int jjStartNfa_0(int pos, long active0, long active1) { - return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1); + return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0, active1), pos + 1); } private final int jjStopAtPos(int pos, int kind) { @@ -253,6 +259,8 @@ { switch(curChar) { + case 33: + return jjMoveStringLiteralDfa1_0(0x80000000000L); case 40: return jjStopAtPos(0, 36); case 41: @@ -258,26 +266,28 @@ case 41: return jjStopAtPos(0, 37); case 42: - return jjStopAtPos(0, 40); + return jjStopAtPos(0, 44); case 44: return jjStopAtPos(0, 34); case 46: - return jjStartNfaWithStates_0(0, 35, 32); + return jjStartNfaWithStates_0(0, 35, 3); case 59: - return jjStopAtPos(0, 62); + return jjStopAtPos(0, 69); case 60: - return jjMoveStringLiteralDfa1_0(0x8000000000L); + return jjStopAtPos(0, 40); case 61: return jjStopAtPos(0, 38); + case 62: + return jjStopAtPos(0, 39); case 65: case 97: - return jjMoveStringLiteralDfa1_0(0x40000100000040L); + return jjMoveStringLiteralDfa1_0(0x400000100000040L); case 66: case 98: - return jjMoveStringLiteralDfa1_0(0x1200000000000L); + return jjMoveStringLiteralDfa1_0(0x4012000000000000L); case 67: case 99: - return jjMoveStringLiteralDfa1_0(0x82080010000880L); + return jjMoveStringLiteralDfa1_0(0x820800010000880L); case 68: case 100: return jjMoveStringLiteralDfa1_0(0x481600L); @@ -287,6 +297,9 @@ case 70: case 102: return jjMoveStringLiteralDfa1_0(0x2002000L); + case 71: + case 103: + return jjMoveStringLiteralDfa1_0(0x2000000000000000L); case 72: case 104: return jjMoveStringLiteralDfa1_0(0x20L); @@ -292,7 +305,7 @@ return jjMoveStringLiteralDfa1_0(0x20L); case 73: case 105: - return jjMoveStringLiteralDfa1_0(0x800000030000L); + return jjMoveStringLiteralDfa1_0(0x8040000030000L); case 74: case 106: return jjMoveStringLiteralDfa1_0(0x4000L); @@ -301,10 +314,10 @@ return jjMoveStringLiteralDfa1_0(0x80000000L); case 77: case 109: - return jjMoveStringLiteralDfa1_0(0x60000000000L); + return jjMoveStringLiteralDfa1_0(0x600000000000L); case 78: case 110: - return jjMoveStringLiteralDfa1_0(0x30100040000000L); + return jjMoveStringLiteralDfa1_0(0x301020040000000L); case 79: case 111: return jjMoveStringLiteralDfa1_0(0x200000000L); @@ -310,10 +323,10 @@ return jjMoveStringLiteralDfa1_0(0x200000000L); case 82: case 114: - return jjMoveStringLiteralDfa1_0(0x4400004000000L); + return jjMoveStringLiteralDfa1_0(0x44000004000000L); case 83: case 115: - return jjMoveStringLiteralDfa1_0(0x900100L); + return jjMoveStringLiteralDfa1_0(0x1000000000900100L); case 84: case 116: return jjMoveStringLiteralDfa1_0(0x20040000L); @@ -319,7 +332,7 @@ return jjMoveStringLiteralDfa1_0(0x20040000L); case 86: case 118: - return jjMoveStringLiteralDfa1_0(0x8000008000000L); + return jjMoveStringLiteralDfa1_0(0x80000008000000L); case 87: case 119: return jjMoveStringLiteralDfa1_0(0x1000000L); @@ -331,7 +344,7 @@ { try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(0, active0); + jjStopStringLiteralDfa_0(0, active0, 0L); return 1; } switch(curChar) @@ -336,22 +349,22 @@ } switch(curChar) { - case 62: - if ((active0 & 0x8000000000L) != 0L) - return jjStopAtPos(1, 39); + case 61: + if ((active0 & 0x80000000000L) != 0L) + return jjStopAtPos(1, 43); break; case 65: case 97: - return jjMoveStringLiteralDfa2_0(active0, 0x60008044000L); + return jjMoveStringLiteralDfa2_0(active0, 0x1000600008044000L); case 68: case 100: - return jjMoveStringLiteralDfa2_0(active0, 0x40000000000000L); + return jjMoveStringLiteralDfa2_0(active0, 0x400000000000000L); case 69: case 101: - return jjMoveStringLiteralDfa2_0(active0, 0xc400000180620L); + return jjMoveStringLiteralDfa2_0(active0, 0xc4000000180620L); case 72: case 104: - return jjMoveStringLiteralDfa2_0(active0, 0x80000001000100L); + return jjMoveStringLiteralDfa2_0(active0, 0x800000001000100L); case 73: case 105: return jjMoveStringLiteralDfa2_0(active0, 0xa0400000L); @@ -357,13 +370,18 @@ return jjMoveStringLiteralDfa2_0(active0, 0xa0400000L); case 76: case 108: - return jjMoveStringLiteralDfa2_0(active0, 0x12000000000c0L); + return jjMoveStringLiteralDfa2_0(active0, 0x120000000000c0L); case 78: case 110: - return jjMoveStringLiteralDfa2_0(active0, 0x800100230000L); + if ((active0 & 0x40000000000L) != 0L) + { + jjmatchedKind = 42; + jjmatchedPos = 1; + } + return jjMoveStringLiteralDfa2_0(active0, 0x8000100230000L); case 79: case 111: - return jjMoveStringLiteralDfa2_0(active0, 0x2180014000000L); + return jjMoveStringLiteralDfa2_0(active0, 0x21820014000000L); case 82: case 114: if ((active0 & 0x200000000L) != 0L) @@ -368,7 +386,7 @@ case 114: if ((active0 & 0x200000000L) != 0L) return jjStartNfaWithStates_0(1, 33, 1); - return jjMoveStringLiteralDfa2_0(active0, 0x2001800L); + return jjMoveStringLiteralDfa2_0(active0, 0x2000000002001800L); case 83: case 115: if ((active0 & 0x2000L) != 0L) @@ -379,14 +397,19 @@ return jjMoveStringLiteralDfa2_0(active0, 0x800000L); case 85: case 117: - return jjMoveStringLiteralDfa2_0(active0, 0x30000040000000L); + return jjMoveStringLiteralDfa2_0(active0, 0x300000040000000L); case 88: case 120: return jjMoveStringLiteralDfa2_0(active0, 0x8000L); + case 89: + case 121: + if ((active0 & 0x4000000000000000L) != 0L) + return jjStartNfaWithStates_0(1, 62, 1); + break; default : break; } - return jjStartNfa_0(0, active0); + return jjStartNfa_0(0, active0, 0L); } private final int jjMoveStringLiteralDfa2_0(long old0, long active0) { @@ -391,10 +414,10 @@ private final int jjMoveStringLiteralDfa2_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(0, old0); + return jjStartNfa_0(0, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(1, active0); + jjStopStringLiteralDfa_0(1, active0, 0L); return 2; } switch(curChar) @@ -400,10 +423,10 @@ switch(curChar) { case 95: - return jjMoveStringLiteralDfa3_0(active0, 0x800000000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x8000000000000L); case 65: case 97: - return jjMoveStringLiteralDfa3_0(active0, 0x80000000a00000L); + return jjMoveStringLiteralDfa3_0(active0, 0x800000000a00000L); case 66: case 98: return jjMoveStringLiteralDfa3_0(active0, 0x40000L); @@ -409,7 +432,7 @@ return jjMoveStringLiteralDfa3_0(active0, 0x40000L); case 67: case 99: - return jjMoveStringLiteralDfa3_0(active0, 0x8400000000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x84000000000000L); case 68: case 100: if ((active0 & 0x100000000L) != 0L) @@ -414,8 +437,8 @@ case 100: if ((active0 & 0x100000000L) != 0L) return jjStartNfaWithStates_0(2, 32, 1); - else if ((active0 & 0x40000000000000L) != 0L) - return jjStartNfaWithStates_0(2, 54, 1); + else if ((active0 & 0x400000000000000L) != 0L) + return jjStartNfaWithStates_0(2, 58, 1); break; case 69: case 101: @@ -428,13 +451,13 @@ return jjMoveStringLiteralDfa3_0(active0, 0x18180020L); case 77: case 109: - return jjMoveStringLiteralDfa3_0(active0, 0x300800e0000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x3008000e0000000L); case 78: case 110: - return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x1000000000000L); case 79: case 111: - return jjMoveStringLiteralDfa3_0(active0, 0x1200002001100L); + return jjMoveStringLiteralDfa3_0(active0, 0x2012000002001100L); case 82: case 114: if ((active0 & 0x4000L) != 0L) @@ -445,10 +468,15 @@ return jjMoveStringLiteralDfa3_0(active0, 0x410600L); case 84: case 116: - return jjMoveStringLiteralDfa3_0(active0, 0x4000000020040L); + if ((active0 & 0x20000000000L) != 0L) + return jjStartNfaWithStates_0(2, 41, 1); + return jjMoveStringLiteralDfa3_0(active0, 0x40000000020040L); case 85: case 117: - return jjMoveStringLiteralDfa3_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x20000000000000L); + case 86: + case 118: + return jjMoveStringLiteralDfa3_0(active0, 0x1000000000000000L); case 87: case 119: if ((active0 & 0x4000000L) != 0L) @@ -456,11 +484,11 @@ break; case 88: case 120: - return jjMoveStringLiteralDfa3_0(active0, 0x60000000000L); + return jjMoveStringLiteralDfa3_0(active0, 0x600000000000L); default : break; } - return jjStartNfa_0(1, active0); + return jjStartNfa_0(1, active0, 0L); } private final int jjMoveStringLiteralDfa3_0(long old0, long active0) { @@ -465,10 +493,10 @@ private final int jjMoveStringLiteralDfa3_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(1, old0); + return jjStartNfa_0(1, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(2, active0); + jjStopStringLiteralDfa_0(2, active0, 0L); return 3; } switch(curChar) @@ -474,7 +502,7 @@ switch(curChar) { case 95: - return jjMoveStringLiteralDfa4_0(active0, 0x30060040000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x300600040000000L); case 65: case 97: return jjMoveStringLiteralDfa4_0(active0, 0x400880L); @@ -488,11 +516,13 @@ jjmatchedKind = 10; jjmatchedPos = 3; } - return jjMoveStringLiteralDfa4_0(active0, 0x200000000200L); + return jjMoveStringLiteralDfa4_0(active0, 0x2000000000200L); case 69: case 101: - if ((active0 & 0x100000000000L) != 0L) - return jjStartNfaWithStates_0(3, 44, 1); + if ((active0 & 0x1000000000000L) != 0L) + return jjStartNfaWithStates_0(3, 48, 1); + else if ((active0 & 0x1000000000000000L) != 0L) + return jjStartNfaWithStates_0(3, 60, 1); return jjMoveStringLiteralDfa4_0(active0, 0x20190040L); case 73: case 105: @@ -504,10 +534,10 @@ case 109: if ((active0 & 0x2000000L) != 0L) return jjStartNfaWithStates_0(3, 25, 1); - return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x8000000000000L); case 78: case 110: - return jjMoveStringLiteralDfa4_0(active0, 0x82000000000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x820000000000000L); case 79: case 111: if ((active0 & 0x20000L) != 0L) @@ -512,7 +542,7 @@ case 111: if ((active0 & 0x20000L) != 0L) return jjStartNfaWithStates_0(3, 17, 1); - return jjMoveStringLiteralDfa4_0(active0, 0x5400000000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x54000000000000L); case 80: case 112: if ((active0 & 0x20L) != 0L) @@ -519,7 +549,7 @@ return jjStartNfaWithStates_0(3, 5, 1); else if ((active0 & 0x1000L) != 0L) return jjStartNfaWithStates_0(3, 12, 1); - return jjMoveStringLiteralDfa4_0(active0, 0x80000000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x800000000000L); case 82: case 114: return jjMoveStringLiteralDfa4_0(active0, 0x1800000L); @@ -527,10 +557,10 @@ case 116: if ((active0 & 0x8000L) != 0L) return jjStartNfaWithStates_0(3, 15, 1); - return jjMoveStringLiteralDfa4_0(active0, 0x8000000000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x80000000000000L); case 85: case 117: - return jjMoveStringLiteralDfa4_0(active0, 0x18000000L); + return jjMoveStringLiteralDfa4_0(active0, 0x2000000018000000L); case 87: case 119: if ((active0 & 0x100L) != 0L) @@ -539,7 +569,7 @@ default : break; } - return jjStartNfa_0(2, active0); + return jjStartNfa_0(2, active0, 0L); } private final int jjMoveStringLiteralDfa4_0(long old0, long active0) { @@ -544,10 +574,10 @@ private final int jjMoveStringLiteralDfa4_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(2, old0); + return jjStartNfa_0(2, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(3, active0); + jjStopStringLiteralDfa_0(3, active0, 0L); return 4; } switch(curChar) @@ -564,27 +594,32 @@ return jjStartNfaWithStates_0(4, 18, 1); else if ((active0 & 0x1000000L) != 0L) return jjStartNfaWithStates_0(4, 24, 1); - return jjMoveStringLiteralDfa5_0(active0, 0x20800008000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x208000008000000L); case 71: case 103: - return jjMoveStringLiteralDfa5_0(active0, 0x80000000000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x800000000000000L); case 72: case 104: - return jjMoveStringLiteralDfa5_0(active0, 0x10000000000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x100000000000000L); case 75: case 107: - if ((active0 & 0x200000000000L) != 0L) - return jjStartNfaWithStates_0(4, 45, 1); + if ((active0 & 0x2000000000000L) != 0L) + return jjStartNfaWithStates_0(4, 49, 1); break; case 76: case 108: - return jjMoveStringLiteralDfa5_0(active0, 0x40000200000L); + return jjMoveStringLiteralDfa5_0(active0, 0x400000200000L); case 77: case 109: - return jjMoveStringLiteralDfa5_0(active0, 0x1000010000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x10000010000000L); case 79: case 111: - return jjMoveStringLiteralDfa5_0(active0, 0x8000000000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x80000000000000L); + case 80: + case 112: + if ((active0 & 0x2000000000000000L) != 0L) + return jjStartNfaWithStates_0(4, 61, 1); + break; case 82: case 114: if ((active0 & 0x40L) != 0L) @@ -591,7 +626,7 @@ return jjStartNfaWithStates_0(4, 6, 1); else if ((active0 & 0x80L) != 0L) return jjStartNfaWithStates_0(4, 7, 1); - return jjMoveStringLiteralDfa5_0(active0, 0x480000010200L); + return jjMoveStringLiteralDfa5_0(active0, 0x4800000010200L); case 83: case 115: return jjMoveStringLiteralDfa5_0(active0, 0x20000000L); @@ -599,17 +634,17 @@ case 116: if ((active0 & 0x80000000L) != 0L) return jjStartNfaWithStates_0(4, 31, 1); - return jjMoveStringLiteralDfa5_0(active0, 0x2000000880800L); + return jjMoveStringLiteralDfa5_0(active0, 0x20000000880800L); case 85: case 117: - return jjMoveStringLiteralDfa5_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x40000000000000L); case 86: case 118: - return jjMoveStringLiteralDfa5_0(active0, 0x20040000000L); + return jjMoveStringLiteralDfa5_0(active0, 0x200040000000L); default : break; } - return jjStartNfa_0(3, active0); + return jjStartNfa_0(3, active0, 0L); } private final int jjMoveStringLiteralDfa5_0(long old0, long active0) { @@ -614,10 +649,10 @@ private final int jjMoveStringLiteralDfa5_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(3, old0); + return jjStartNfa_0(3, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(4, active0); + jjStopStringLiteralDfa_0(4, active0, 0L); return 5; } switch(curChar) @@ -624,14 +659,14 @@ { case 65: case 97: - return jjMoveStringLiteralDfa6_0(active0, 0x10000000000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x100000000000000L); case 67: case 99: - return jjMoveStringLiteralDfa6_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x40000000000000L); case 68: case 100: - if ((active0 & 0x400000000000L) != 0L) - return jjStartNfaWithStates_0(5, 46, 1); + if ((active0 & 0x4000000000000L) != 0L) + return jjStartNfaWithStates_0(5, 50, 1); break; case 69: case 101: @@ -641,15 +676,15 @@ return jjStartNfaWithStates_0(5, 19, 1); else if ((active0 & 0x200000L) != 0L) return jjStartNfaWithStates_0(5, 21, 1); - else if ((active0 & 0x80000000000000L) != 0L) - return jjStartNfaWithStates_0(5, 55, 1); - return jjMoveStringLiteralDfa6_0(active0, 0xe0040000000L); + else if ((active0 & 0x800000000000000L) != 0L) + return jjStartNfaWithStates_0(5, 59, 1); + return jjMoveStringLiteralDfa6_0(active0, 0xe00040000000L); case 70: case 102: - return jjMoveStringLiteralDfa6_0(active0, 0x1000000000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x10000000000000L); case 73: case 105: - return jjMoveStringLiteralDfa6_0(active0, 0x2000000800200L); + return jjMoveStringLiteralDfa6_0(active0, 0x20000000800200L); case 76: case 108: return jjMoveStringLiteralDfa6_0(active0, 0x400000L); @@ -655,13 +690,13 @@ return jjMoveStringLiteralDfa6_0(active0, 0x400000L); case 77: case 109: - return jjMoveStringLiteralDfa6_0(active0, 0x800000000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x8000000000000L); case 78: case 110: - return jjMoveStringLiteralDfa6_0(active0, 0x20000010000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x200000010000000L); case 82: case 114: - return jjMoveStringLiteralDfa6_0(active0, 0x8000000000000L); + return jjMoveStringLiteralDfa6_0(active0, 0x80000000000000L); case 83: case 115: if ((active0 & 0x8000000L) != 0L) @@ -677,7 +712,7 @@ default : break; } - return jjStartNfa_0(4, active0); + return jjStartNfa_0(4, active0, 0L); } private final int jjMoveStringLiteralDfa6_0(long old0, long active0) { @@ -682,10 +717,10 @@ private final int jjMoveStringLiteralDfa6_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(4, old0); + return jjStartNfa_0(4, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(5, active0); + jjStopStringLiteralDfa_0(5, active0, 0L); return 6; } switch(curChar) @@ -691,7 +726,7 @@ switch(curChar) { case 95: - return jjMoveStringLiteralDfa7_0(active0, 0x8000000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x80000000000000L); case 65: case 97: return jjMoveStringLiteralDfa7_0(active0, 0x20000000L); @@ -708,29 +743,29 @@ return jjMoveStringLiteralDfa7_0(active0, 0x10000000L); case 72: case 104: - return jjMoveStringLiteralDfa7_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x40000000000000L); case 73: case 105: - return jjMoveStringLiteralDfa7_0(active0, 0x1000000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x10000000000000L); case 78: case 110: - return jjMoveStringLiteralDfa7_0(active0, 0x2040000800000L); + return jjMoveStringLiteralDfa7_0(active0, 0x20400000800000L); case 79: case 111: - return jjMoveStringLiteralDfa7_0(active0, 0x800000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x8000000000000L); case 82: case 114: - return jjMoveStringLiteralDfa7_0(active0, 0x20040000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x200040000000L); case 83: case 115: - return jjMoveStringLiteralDfa7_0(active0, 0x10080000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x100800000000000L); case 84: case 116: - return jjMoveStringLiteralDfa7_0(active0, 0x20000000000000L); + return jjMoveStringLiteralDfa7_0(active0, 0x200000000000000L); default : break; } - return jjStartNfa_0(5, active0); + return jjStartNfa_0(5, active0, 0L); } private final int jjMoveStringLiteralDfa7_0(long old0, long active0) { @@ -735,10 +770,10 @@ private final int jjMoveStringLiteralDfa7_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(5, old0); + return jjStartNfa_0(5, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(6, active0); + jjStopStringLiteralDfa_0(6, active0, 0L); return 7; } switch(curChar) @@ -750,7 +785,7 @@ case 101: if ((active0 & 0x200L) != 0L) return jjStartNfaWithStates_0(7, 9, 1); - return jjMoveStringLiteralDfa8_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa8_0(active0, 0x40000000000000L); case 71: case 103: if ((active0 & 0x800000L) != 0L) @@ -755,15 +790,15 @@ case 103: if ((active0 & 0x800000L) != 0L) return jjStartNfaWithStates_0(7, 23, 1); - return jjMoveStringLiteralDfa8_0(active0, 0x2040000000000L); + return jjMoveStringLiteralDfa8_0(active0, 0x20400000000000L); case 72: case 104: - if ((active0 & 0x10000000000000L) != 0L) - return jjStartNfaWithStates_0(7, 52, 1); + if ((active0 & 0x100000000000000L) != 0L) + return jjStartNfaWithStates_0(7, 56, 1); break; case 76: case 108: - return jjMoveStringLiteralDfa8_0(active0, 0x1000000000000L); + return jjMoveStringLiteralDfa8_0(active0, 0x10000000000000L); case 77: case 109: return jjMoveStringLiteralDfa8_0(active0, 0x20000000L); @@ -769,14 +804,14 @@ return jjMoveStringLiteralDfa8_0(active0, 0x20000000L); case 82: case 114: - return jjMoveStringLiteralDfa8_0(active0, 0x20800000000000L); + return jjMoveStringLiteralDfa8_0(active0, 0x208000000000000L); case 83: case 115: - return jjMoveStringLiteralDfa8_0(active0, 0x80a0040000000L); + return jjMoveStringLiteralDfa8_0(active0, 0x80a00040000000L); default : break; } - return jjStartNfa_0(6, active0); + return jjStartNfa_0(6, active0, 0L); } private final int jjMoveStringLiteralDfa8_0(long old0, long active0) { @@ -781,10 +816,10 @@ private final int jjMoveStringLiteralDfa8_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(6, old0); + return jjStartNfa_0(6, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(7, active0); + jjStopStringLiteralDfa_0(7, active0, 0L); return 8; } switch(curChar) @@ -790,13 +825,13 @@ switch(curChar) { case 95: - return jjMoveStringLiteralDfa9_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa9_0(active0, 0x20000000000000L); case 68: case 100: - return jjMoveStringLiteralDfa9_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa9_0(active0, 0x40000000000000L); case 73: case 105: - return jjMoveStringLiteralDfa9_0(active0, 0x280a0040000000L); + return jjMoveStringLiteralDfa9_0(active0, 0x280a00040000000L); case 77: case 109: return jjMoveStringLiteralDfa9_0(active0, 0x10000000L); @@ -807,11 +842,11 @@ break; case 84: case 116: - return jjMoveStringLiteralDfa9_0(active0, 0x1040000000000L); + return jjMoveStringLiteralDfa9_0(active0, 0x10400000000000L); case 89: case 121: - if ((active0 & 0x800000000000L) != 0L) - return jjStartNfaWithStates_0(8, 47, 1); + if ((active0 & 0x8000000000000L) != 0L) + return jjStartNfaWithStates_0(8, 51, 1); break; default : break; @@ -816,7 +851,7 @@ default : break; } - return jjStartNfa_0(7, active0); + return jjStartNfa_0(7, active0, 0L); } private final int jjMoveStringLiteralDfa9_0(long old0, long active0) { @@ -821,10 +856,10 @@ private final int jjMoveStringLiteralDfa9_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(7, old0); + return jjStartNfa_0(7, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(8, active0); + jjStopStringLiteralDfa_0(8, active0, 0L); return 9; } switch(curChar) @@ -830,17 +865,17 @@ switch(curChar) { case 95: - return jjMoveStringLiteralDfa10_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa10_0(active0, 0x40000000000000L); case 66: case 98: - return jjMoveStringLiteralDfa10_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa10_0(active0, 0x20000000000000L); case 69: case 101: - return jjMoveStringLiteralDfa10_0(active0, 0x21000000000000L); + return jjMoveStringLiteralDfa10_0(active0, 0x210000000000000L); case 72: case 104: - if ((active0 & 0x40000000000L) != 0L) - return jjStartNfaWithStates_0(9, 42, 1); + if ((active0 & 0x400000000000L) != 0L) + return jjStartNfaWithStates_0(9, 46, 1); break; case 73: case 105: @@ -847,14 +882,14 @@ return jjMoveStringLiteralDfa10_0(active0, 0x10000000L); case 79: case 111: - return jjMoveStringLiteralDfa10_0(active0, 0xa0040000000L); + return jjMoveStringLiteralDfa10_0(active0, 0xa00040000000L); case 90: case 122: - return jjMoveStringLiteralDfa10_0(active0, 0x8000000000000L); + return jjMoveStringLiteralDfa10_0(active0, 0x80000000000000L); default : break; } - return jjStartNfa_0(8, active0); + return jjStartNfa_0(8, active0, 0L); } private final int jjMoveStringLiteralDfa10_0(long old0, long active0) { @@ -859,10 +894,10 @@ private final int jjMoveStringLiteralDfa10_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(8, old0); + return jjStartNfa_0(8, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(9, active0); + jjStopStringLiteralDfa_0(9, active0, 0L); return 10; } switch(curChar) @@ -869,29 +904,29 @@ { case 66: case 98: - return jjMoveStringLiteralDfa11_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa11_0(active0, 0x40000000000000L); case 69: case 101: - if ((active0 & 0x8000000000000L) != 0L) - return jjStartNfaWithStates_0(10, 51, 1); + if ((active0 & 0x80000000000000L) != 0L) + return jjStartNfaWithStates_0(10, 55, 1); break; case 76: case 108: - return jjMoveStringLiteralDfa11_0(active0, 0x2000010000000L); + return jjMoveStringLiteralDfa11_0(active0, 0x20000010000000L); case 78: case 110: - if ((active0 & 0x80000000000L) != 0L) - return jjStartNfaWithStates_0(10, 43, 1); - return jjMoveStringLiteralDfa11_0(active0, 0x20040000000L); + if ((active0 & 0x800000000000L) != 0L) + return jjStartNfaWithStates_0(10, 47, 1); + return jjMoveStringLiteralDfa11_0(active0, 0x200040000000L); case 82: case 114: - if ((active0 & 0x1000000000000L) != 0L) - return jjStartNfaWithStates_0(10, 48, 1); + if ((active0 & 0x10000000000000L) != 0L) + return jjStartNfaWithStates_0(10, 52, 1); break; case 83: case 115: - if ((active0 & 0x20000000000000L) != 0L) - return jjStartNfaWithStates_0(10, 53, 1); + if ((active0 & 0x200000000000000L) != 0L) + return jjStartNfaWithStates_0(10, 57, 1); break; default : break; @@ -896,7 +931,7 @@ default : break; } - return jjStartNfa_0(9, active0); + return jjStartNfa_0(9, active0, 0L); } private final int jjMoveStringLiteralDfa11_0(long old0, long active0) { @@ -901,10 +936,10 @@ private final int jjMoveStringLiteralDfa11_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(9, old0); + return jjStartNfa_0(9, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(10, active0); + jjStopStringLiteralDfa_0(10, active0, 0L); return 11; } switch(curChar) @@ -914,10 +949,10 @@ return jjMoveStringLiteralDfa12_0(active0, 0x10000000L); case 76: case 108: - return jjMoveStringLiteralDfa12_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa12_0(active0, 0x40000000000000L); case 79: case 111: - return jjMoveStringLiteralDfa12_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa12_0(active0, 0x20000000000000L); case 83: case 115: if ((active0 & 0x40000000L) != 0L) @@ -922,8 +957,8 @@ case 115: if ((active0 & 0x40000000L) != 0L) return jjStartNfaWithStates_0(11, 30, 1); - else if ((active0 & 0x20000000000L) != 0L) - return jjStartNfaWithStates_0(11, 41, 1); + else if ((active0 & 0x200000000000L) != 0L) + return jjStartNfaWithStates_0(11, 45, 1); break; default : break; @@ -928,7 +963,7 @@ default : break; } - return jjStartNfa_0(10, active0); + return jjStartNfa_0(10, active0, 0L); } private final int jjMoveStringLiteralDfa12_0(long old0, long active0) { @@ -933,10 +968,10 @@ private final int jjMoveStringLiteralDfa12_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(10, old0); + return jjStartNfa_0(10, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(11, active0); + jjStopStringLiteralDfa_0(11, active0, 0L); return 12; } switch(curChar) @@ -946,11 +981,11 @@ return jjMoveStringLiteralDfa13_0(active0, 0x10000000L); case 79: case 111: - return jjMoveStringLiteralDfa13_0(active0, 0x6000000000000L); + return jjMoveStringLiteralDfa13_0(active0, 0x60000000000000L); default : break; } - return jjStartNfa_0(11, active0); + return jjStartNfa_0(11, active0, 0L); } private final int jjMoveStringLiteralDfa13_0(long old0, long active0) { @@ -955,10 +990,10 @@ private final int jjMoveStringLiteralDfa13_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(11, old0); + return jjStartNfa_0(11, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(12, active0); + jjStopStringLiteralDfa_0(12, active0, 0L); return 13; } switch(curChar) @@ -965,10 +1000,10 @@ { case 77: case 109: - return jjMoveStringLiteralDfa14_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa14_0(active0, 0x20000000000000L); case 79: case 111: - return jjMoveStringLiteralDfa14_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa14_0(active0, 0x40000000000000L); case 83: case 115: if ((active0 & 0x10000000L) != 0L) @@ -977,7 +1012,7 @@ default : break; } - return jjStartNfa_0(12, active0); + return jjStartNfa_0(12, active0, 0L); } private final int jjMoveStringLiteralDfa14_0(long old0, long active0) { @@ -982,10 +1017,10 @@ private final int jjMoveStringLiteralDfa14_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(12, old0); + return jjStartNfa_0(12, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(13, active0); + jjStopStringLiteralDfa_0(13, active0, 0L); return 14; } switch(curChar) @@ -992,14 +1027,14 @@ { case 70: case 102: - return jjMoveStringLiteralDfa15_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa15_0(active0, 0x20000000000000L); case 77: case 109: - return jjMoveStringLiteralDfa15_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa15_0(active0, 0x40000000000000L); default : break; } - return jjStartNfa_0(13, active0); + return jjStartNfa_0(13, active0, 0L); } private final int jjMoveStringLiteralDfa15_0(long old0, long active0) { @@ -1004,10 +1039,10 @@ private final int jjMoveStringLiteralDfa15_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(13, old0); + return jjStartNfa_0(13, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(14, active0); + jjStopStringLiteralDfa_0(14, active0, 0L); return 15; } switch(curChar) @@ -1014,14 +1049,14 @@ { case 70: case 102: - return jjMoveStringLiteralDfa16_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa16_0(active0, 0x40000000000000L); case 73: case 105: - return jjMoveStringLiteralDfa16_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa16_0(active0, 0x20000000000000L); default : break; } - return jjStartNfa_0(14, active0); + return jjStartNfa_0(14, active0, 0L); } private final int jjMoveStringLiteralDfa16_0(long old0, long active0) { @@ -1026,10 +1061,10 @@ private final int jjMoveStringLiteralDfa16_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(14, old0); + return jjStartNfa_0(14, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(15, active0); + jjStopStringLiteralDfa_0(15, active0, 0L); return 16; } switch(curChar) @@ -1036,14 +1071,14 @@ { case 73: case 105: - return jjMoveStringLiteralDfa17_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa17_0(active0, 0x40000000000000L); case 76: case 108: - return jjMoveStringLiteralDfa17_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa17_0(active0, 0x20000000000000L); default : break; } - return jjStartNfa_0(15, active0); + return jjStartNfa_0(15, active0, 0L); } private final int jjMoveStringLiteralDfa17_0(long old0, long active0) { @@ -1048,10 +1083,10 @@ private final int jjMoveStringLiteralDfa17_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(15, old0); + return jjStartNfa_0(15, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(16, active0); + jjStopStringLiteralDfa_0(16, active0, 0L); return 17; } switch(curChar) @@ -1058,14 +1093,14 @@ { case 76: case 108: - return jjMoveStringLiteralDfa18_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa18_0(active0, 0x40000000000000L); case 84: case 116: - return jjMoveStringLiteralDfa18_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa18_0(active0, 0x20000000000000L); default : break; } - return jjStartNfa_0(16, active0); + return jjStartNfa_0(16, active0, 0L); } private final int jjMoveStringLiteralDfa18_0(long old0, long active0) { @@ -1070,10 +1105,10 @@ private final int jjMoveStringLiteralDfa18_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(16, old0); + return jjStartNfa_0(16, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(17, active0); + jjStopStringLiteralDfa_0(17, active0, 0L); return 18; } switch(curChar) @@ -1080,14 +1115,14 @@ { case 69: case 101: - return jjMoveStringLiteralDfa19_0(active0, 0x2000000000000L); + return jjMoveStringLiteralDfa19_0(active0, 0x20000000000000L); case 84: case 116: - return jjMoveStringLiteralDfa19_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa19_0(active0, 0x40000000000000L); default : break; } - return jjStartNfa_0(17, active0); + return jjStartNfa_0(17, active0, 0L); } private final int jjMoveStringLiteralDfa19_0(long old0, long active0) { @@ -1092,10 +1127,10 @@ private final int jjMoveStringLiteralDfa19_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(17, old0); + return jjStartNfa_0(17, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(18, active0); + jjStopStringLiteralDfa_0(18, active0, 0L); return 19; } switch(curChar) @@ -1102,11 +1137,11 @@ { case 69: case 101: - return jjMoveStringLiteralDfa20_0(active0, 0x4000000000000L); + return jjMoveStringLiteralDfa20_0(active0, 0x40000000000000L); case 82: case 114: - if ((active0 & 0x2000000000000L) != 0L) - return jjStartNfaWithStates_0(19, 49, 1); + if ((active0 & 0x20000000000000L) != 0L) + return jjStartNfaWithStates_0(19, 53, 1); break; default : break; @@ -1111,7 +1146,7 @@ default : break; } - return jjStartNfa_0(18, active0); + return jjStartNfa_0(18, active0, 0L); } private final int jjMoveStringLiteralDfa20_0(long old0, long active0) { @@ -1116,10 +1151,10 @@ private final int jjMoveStringLiteralDfa20_0(long old0, long active0) { if (((active0 &= old0)) == 0L) - return jjStartNfa_0(18, old0); + return jjStartNfa_0(18, old0, 0L); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { - jjStopStringLiteralDfa_0(19, active0); + jjStopStringLiteralDfa_0(19, active0, 0L); return 20; } switch(curChar) @@ -1126,8 +1161,8 @@ { case 82: case 114: - if ((active0 & 0x4000000000000L) != 0L) - return jjStartNfaWithStates_0(20, 50, 1); + if ((active0 & 0x40000000000000L) != 0L) + return jjStartNfaWithStates_0(20, 54, 1); break; default : break; @@ -1132,7 +1167,7 @@ default : break; } - return jjStartNfa_0(19, active0); + return jjStartNfa_0(19, active0, 0L); } private final void jjCheckNAdd(int state) { @@ -1189,14 +1224,14 @@ case 0: if ((0x3ff000000000000L & l) != 0L) { - if (kind > 57) - kind = 57; + if (kind > 64) + kind = 64; jjCheckNAddStates(0, 6); } - else if ((0x400e00000000000L & l) != 0L) + else if ((0x400a00000000000L & l) != 0L) { - if (kind > 56) - kind = 56; + if (kind > 63) + kind = 63; jjCheckNAdd(1); } else if (curChar == 39) @@ -1203,28 +1238,14 @@ jjCheckNAddStates(7, 9); else if (curChar == 34) jjCheckNAdd(8); - if (curChar == 46) + else if (curChar == 46) jjCheckNAdd(3); break; - case 32: - if ((0x7ffe00000000000L & l) != 0L) - { - if (kind > 56) - kind = 56; - jjCheckNAdd(1); - } - if ((0x3ff000000000000L & l) != 0L) - { - if (kind > 58) - kind = 58; - jjCheckNAddTwoStates(3, 4); - } - break; case 1: - if ((0x7ffe00000000000L & l) == 0L) + if ((0x7ffa00000000000L & l) == 0L) break; - if (kind > 56) - kind = 56; + if (kind > 63) + kind = 63; jjCheckNAdd(1); break; case 2: @@ -1234,8 +1255,8 @@ case 3: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAddTwoStates(3, 4); break; case 5: @@ -1245,8 +1266,8 @@ case 6: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAdd(6); break; case 7: @@ -1258,8 +1279,8 @@ jjCheckNAddTwoStates(8, 9); break; case 9: - if (curChar == 34 && kind > 60) - kind = 60; + if (curChar == 34 && kind > 67) + kind = 67; break; case 10: if (curChar == 39) @@ -1282,8 +1303,8 @@ jjCheckNAddStates(10, 12); break; case 15: - if (curChar == 39 && kind > 61) - kind = 61; + if (curChar == 39 && kind > 68) + kind = 68; break; case 16: if ((0x3ff000000000000L & l) == 0L) @@ -1288,8 +1309,8 @@ case 16: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 57) - kind = 57; + if (kind > 64) + kind = 64; jjCheckNAddStates(0, 6); break; case 17: @@ -1295,8 +1316,8 @@ case 17: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 57) - kind = 57; + if (kind > 64) + kind = 64; jjCheckNAdd(17); break; case 18: @@ -1310,8 +1331,8 @@ case 20: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAddTwoStates(20, 21); break; case 22: @@ -1321,8 +1342,8 @@ case 23: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAdd(23); break; case 24: @@ -1336,8 +1357,8 @@ case 27: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAdd(27); break; case 28: @@ -1343,8 +1364,8 @@ case 28: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAddTwoStates(28, 29); break; case 30: @@ -1354,8 +1375,8 @@ case 31: if ((0x3ff000000000000L & l) == 0L) break; - if (kind > 58) - kind = 58; + if (kind > 65) + kind = 65; jjCheckNAdd(31); break; default : break; @@ -1370,18 +1391,11 @@ switch(jjstateSet[--i]) { case 0: - if ((0x7fffffe87fffffeL & l) == 0L) - break; - if (kind > 56) - kind = 56; - jjCheckNAdd(1); - break; - case 32: case 1: if ((0x7fffffe87fffffeL & l) == 0L) break; - if (kind > 56) - kind = 56; + if (kind > 63) + kind = 63; jjCheckNAdd(1); break; case 4: @@ -1457,9 +1471,10 @@ public static final String[] jjstrLiteralImages = { "", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, "\54", "\56", "\50", "\51", "\75", -"\74\76", "\52", null, null, null, null, null, null, null, null, null, null, null, null, -null, null, null, null, null, null, null, null, null, "\73", }; +null, null, null, null, null, null, null, "\54", "\56", "\50", "\51", "\75", "\76", +"\74", null, null, "\41\75", "\52", null, null, null, null, null, null, null, null, +null, null, null, null, null, null, null, null, null, null, null, null, null, null, +null, null, "\73", }; public static final String[] lexStateNames = { "DEFAULT", }; @@ -1464,10 +1479,10 @@ "DEFAULT", }; static final long[] jjtoToken = { - 0x77ffffffffffffe1L, + 0xffffffffffffffe1L, 0x3bL, }; static final long[] jjtoSkip = { - 0x1eL, + 0x1eL, 0x0L, }; protected SimpleCharStream input_stream; private final int[] jjrounds = new int[32]; Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (revision 587398) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (working copy) @@ -33,8 +33,11 @@ import java.io.StringReader; import java.io.Reader; import java.io.Writer; +import java.net.URLEncoder; +import java.io.UnsupportedEncodingException; import org.apache.hadoop.hbase.shell.*; +import org.apache.hadoop.hbase.shell.algebra.*; /** * Parsing command line. @@ -102,7 +105,11 @@ | | | "> + | "> + | + | + | + | | | @@ -121,9 +128,16 @@ | } +TOKEN : +{ + + | + | +} + TOKEN : /** Literals */ { - + | | )? @@ -144,7 +158,10 @@ Command statement = null; } { - ([statement = cmdStatement()] ";" | ) + ( + [statement = cmdStatement()] ";" | + ) + { return statement; } @@ -171,6 +188,8 @@ | cmd = clearCommand() | cmd = fsCommand() | cmd = jarCommand() + | cmd = substituteCommand() + | cmd = saveCommand() ) { return cmd; @@ -245,6 +264,8 @@ | t= | t= | t= + | t= + | t= | t= ) { argument = t.image.toString(); } ] @@ -263,7 +284,7 @@ { [ - argument = Identifier() + argument = identifier() ] { show.setArgument(argument); @@ -278,7 +299,7 @@ } { ( | ) - argument = Identifier() + argument = identifier() { desc.setArgument(argument); return desc; @@ -360,7 +381,7 @@ { - table = Identifier() + table = identifier() { createCommand.setTable(table); } @@ -367,7 +388,7 @@ - column = Identifier() + column = identifier() columnSpec = ColumnSpec() { createCommand.addColumnSpec(column, columnSpec); @@ -375,7 +396,7 @@ ( - column = Identifier() + column = identifier() columnSpec = ColumnSpec() { createCommand.addColumnSpec(column, columnSpec); @@ -395,7 +416,7 @@ } { -
table = Identifier() +
table = identifier() { alterCommand.setTable(table); } ( @@ -400,7 +421,7 @@ ( LOOKAHEAD(2) - column = Identifier() columnSpec = ColumnSpec() + column = identifier() columnSpec = ColumnSpec() { alterCommand.setOperationType(AlterCommand.OperationType.ADD); alterCommand.addColumnSpec(column, columnSpec); @@ -412,7 +433,7 @@ alterCommand.setOperationType(AlterCommand.OperationType.ADD); } - column = Identifier() columnSpec = ColumnSpec() + column = identifier() columnSpec = ColumnSpec() { alterCommand.addColumnSpec(column, columnSpec); } @@ -419,7 +440,7 @@ ( - column = Identifier() + column = identifier() columnSpec = ColumnSpec() { alterCommand.addColumnSpec(column, columnSpec); @@ -427,7 +448,7 @@ )* | - column = Identifier() + column = identifier() { alterCommand.setOperationType(AlterCommand.OperationType.DROP); alterCommand.setColumn(column); @@ -433,7 +454,7 @@ alterCommand.setColumn(column); } | - column = Identifier() columnSpec = ColumnSpec() + column = identifier() columnSpec = ColumnSpec() { alterCommand.setOperationType(AlterCommand.OperationType.CHANGE); alterCommand.addColumnSpec(column, columnSpec); @@ -450,7 +471,7 @@ {
- tableList = TableList() + tableList = tableList() { drop.setTableList(tableList); return drop; @@ -468,11 +489,10 @@ { - table = Identifier() + table = identifier() { in.setTable(table); } - columnfamilies = getColumns() { in.setColumnfamilies(columnfamilies); @@ -477,7 +497,7 @@ { in.setColumnfamilies(columnfamilies); } - + values = getLiteralValues() { in.setValues(values); @@ -502,7 +522,7 @@ } { - columnList = ColumnList() + columnList = columnList() { deleteCommand.setColumnList(columnList); } @@ -508,7 +528,7 @@ } - table = Identifier() + table = identifier() { deleteCommand.setTable(table); } @@ -534,9 +554,9 @@ } {
+ tableName = identifier() + { substitute.setInput(tableName); } + + | operation=chainKey= + + { + List columnList = columnList(); + for (int i = 0; i < columnList.size(); i++) { + condition += appendIndicator(columnList.get(i)); + } + } + + { + substitute.setChainKey(chainKey.image.toString()); + substitute.setOperation(operation.image.toString()); + substitute.setCondition(condition); + } + ) + + { + return substitute; + } +} + +SaveCommand saveCommand() : +{ + Token t = null; + String tableName; + SaveCommand save = new SaveCommand(this.out); +} +{ + t= + { save.setStatement(t.image.toString()); } +
+ tableName = identifier() { save.setOutput(tableName); } + + { + return save; + } +} + +// ///////////////////////////////////////////////////////////////////////////////////// // Utility expansion units... List getLiteralValues() : @@ -657,6 +764,19 @@ } } +String getColumn() : +{ + Token col; +} +{ + ( + ( col= | col= | col= ) + { return col.image.toString(); } + | (col= | col= ) + { return col.image.substring(1,col.image.toString().length() - 1); } + ) +} + List getColumns() : // return parenthesized column list { List values = new ArrayList(); @@ -680,20 +800,7 @@ } } -String getColumn() : -{ - Token col; -} -{ - ( - ( col= | col= ) - { return col.image.toString(); } - | (col= | col= ) - { return col.image.substring(1,col.image.toString().length() - 1); } - ) -} - -List TableList() : +List tableList() : { List tableList = new ArrayList(); String table = null; @@ -699,8 +806,8 @@ String table = null; } { - table = Identifier() { tableList.add(table); } - ( table = Identifier() + table = identifier() { tableList.add(table); } + ( table = identifier() { tableList.add(table); } )* @@ -707,7 +814,7 @@ { return tableList; } } -List ColumnList() : +List columnList() : { List columnList = new ArrayList(); String column = null; @@ -737,7 +844,7 @@ { return Integer.parseInt(t.image.toString()); } } -String Identifier() : +String identifier() : { Token t = null; } @@ -748,4 +855,124 @@ | ( t= | t= ) { return t.image.substring(1,t.image.toString().length() - 1); } ) +} + +String booleanTerm() : +{ + String query = null; + String tmp = null; +} +{ + query = booleanTerms() + ( + ( + + { query += Constants.LOGICAL_CONNECTOR_AND; } + | + { query += Constants.LOGICAL_CONNECTOR_OR; } + ) tmp = booleanTerms() { query += tmp; } + )* + + { return query; } +} + +String booleanTerms() : +{ + Token tSearchName, tComparator, tComparand; + List inList = new ArrayList(); + String searchName=null,comparator=null,comparand=null; + Token joinColumn = null; + Token joinKey = null; +} +{ + ( + tSearchName= { searchName = tSearchName.image.toString(); } + [ + + + ( joinColumn= + { + searchName += "." + joinColumn.image.toString(); + } + | + { + searchName += Constants.RELATIONAL_JOIN_KEY; + } + ) + ] + ) + + ( + tComparator= + { comparator = tComparator.image.toString(); } + [ { comparator += "="; }] + | tComparator= + { comparator = tComparator.image.toString(); } + [ { comparator += "="; }] + | tComparator= + { comparator = tComparator.image.toString(); } + [ { comparator = ">" + comparator; } ] + [ { comparator = "<" + comparator; } ] + | tComparator= + { comparator = tComparator.image.toString(); } + | + { comparator = Constants.EXPRESSION_NOT_IN; } + | + { comparator = Constants.EXPRESSION_IN; } + ) + + ( + tComparand= + { comparand = tComparand.image.toString(); } + | tComparand= + { comparand = tComparand.image.substring(1,tComparand.image.length() - 1); } + | tComparand= + { comparand = tComparand.image.toString(); } + [ + + ( + + { + comparand += Constants.RELATIONAL_JOIN_KEY; + } + | joinColumn= + { + comparand += "." + joinColumn.image.toString(); + } + ) + ] + | inList = getColumns() + { + if(comparator == null) { + comparator = Constants.EXPRESSION_IN; + } + comparand = ""; + try{ + for(int i=0; i 100[ AND cf_name2 = 'string_value']);" }); + + // Aggregation Functions + load.put("GROUP", new String[] { + "Group rows by value of an attribute and apply aggregate function independently to each group of rows", + "A = Table('table_name');" + + " B = Group A by ('cf_name1'[, 'cf_name2']);" }); + + return load; } Index: src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java =================================================================== --- src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java (revision 587398) +++ src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java (working copy) @@ -46,7 +46,6 @@ * TODO: INTO FILE is not yet implemented. */ public class SelectCommand extends BasicCommand { - private Text tableName; private Text rowKey = new Text(""); private List columns; Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanCondition.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanCondition.java (revision 0) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanCondition.java (revision 0) @@ -0,0 +1,79 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.UnsupportedEncodingException; + +import junit.framework.TestCase; +import junit.framework.TestSuite; +import junit.textui.TestRunner; + +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.shell.relational.BooleanCondition; +import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.io.Text; + +/** + * Test boolean expression + */ +public class TestBooleanCondition extends TestCase { + + BooleanCondition booleanCondition = new BooleanCondition(); + public static String EXPRESSION_OR = "key:2 = value2 OR key:3 = value1"; + public static String EXPRESSION_AND = "key:2 = value2 AND key:3 = value1"; + public static String EXPRESSION = "key:2 = value2 AND key:1 = value1 AND key:3 = value3"; + + Text[] keys = { + new Text("key:1"), + new Text("key:2"), + new Text("Key:3"), + }; + + ImmutableBytesWritable[] values = { + new ImmutableBytesWritable("value1".getBytes()), + new ImmutableBytesWritable("value2".getBytes()), + new ImmutableBytesWritable("value3".getBytes()) + }; + + public void testBooleanCondition() { + booleanCondition.setExpression(EXPRESSION_OR); + } + + public void testCheckConstraints() throws UnsupportedEncodingException { + MapWritable data = new MapWritable(); + for (int i = 0; i < keys.length; i++) { + data.put(keys[i], values[i]); + } + + booleanCondition.setExpression(EXPRESSION_OR); + assertTrue(booleanCondition.checkConstraints(data)); + + booleanCondition.setExpression(EXPRESSION_AND); + assertFalse(booleanCondition.checkConstraints(data)); + + booleanCondition.setExpression(EXPRESSION); + assertTrue(booleanCondition.checkConstraints(data)); + } + + public static void main(String[] args) { + TestRunner.run(new TestSuite(TestBooleanCondition.class)); + } + +} Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanTermFilter.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanTermFilter.java (revision 0) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanTermFilter.java (revision 0) @@ -0,0 +1,239 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.Map; +import java.util.Random; +import java.util.TreeMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.dfs.MiniDFSCluster; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HScannerInterface; +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.hbase.HTable; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.MultiRegionTable; +import org.apache.hadoop.hbase.mapred.IdentityTableReduce; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.MiniMRCluster; + +public class TestBooleanTermFilter extends MultiRegionTable { + @SuppressWarnings("hiding") + private static final Log LOG = LogFactory.getLog(TestBooleanTermFilter.class + .getName()); + + static final String INPUT_TABLE = "test_table"; + static final String OUTPUT_TABLE = "result_table"; + static final Text COUNT_COLUMNFAMILY = new Text("count:"); + static final Text RANDOMINT_COLUMNFAMILY = new Text("randomInt:"); + static final String GROUP_COLUMN_FAMILIES = "count: randomInt:"; + static final String BOOLEAN_TERM = "randomInt: > 100 AND count: <= 100 AND randomInt: !! 110|120|130|140|150"; + private MiniDFSCluster dfsCluster = null; + private FileSystem fs; + private Path dir; + private MiniHBaseCluster hCluster = null; + + /** + * {@inheritDoc} + */ + @Override + public void setUp() throws Exception { + super.setUp(); + conf.setLong("hbase.hregion.max.filesize", 256 * 1024); + dfsCluster = new MiniDFSCluster(conf, 1, true, (String[]) null); + try { + fs = dfsCluster.getFileSystem(); + dir = new Path("/hbase"); + fs.mkdirs(dir); + // Start up HBase cluster + hCluster = new MiniHBaseCluster(conf, 1, dfsCluster); + } catch (Exception e) { + if (dfsCluster != null) { + dfsCluster.shutdown(); + dfsCluster = null; + } + throw e; + } + } + + /** + * {@inheritDoc} + */ + @Override + public void tearDown() throws Exception { + super.tearDown(); + if (hCluster != null) { + hCluster.shutdown(); + } + + if (dfsCluster != null) { + dfsCluster.shutdown(); + } + + if (fs != null) { + try { + fs.close(); + } catch (IOException e) { + LOG.info("During tear down got a " + e.getMessage()); + } + } + } + + public void testBooleanFilterMapReduce() { + try { + HTableDescriptor desc = new HTableDescriptor(INPUT_TABLE); + String[] columns = GROUP_COLUMN_FAMILIES.split(" "); + for (int i = 0; i < columns.length; i++) { + desc.addFamily(new HColumnDescriptor(columns[i])); + } + HBaseAdmin admin = new HBaseAdmin(this.conf); + admin.createTable(desc); + + // insert random data into the input table + HTable table = new HTable(conf, new Text(INPUT_TABLE)); + Random oRandom = new Random(); + + for (int j = 0; j < 200; j++) { + int i = oRandom.nextInt(200) + 1; + + long lockid = table.startUpdate(new Text("rowKey" + j)); + table.put(lockid, COUNT_COLUMNFAMILY, Integer.toString(j).getBytes( + HConstants.UTF8_ENCODING)); + table.put(lockid, RANDOMINT_COLUMNFAMILY, Integer.toString(i).getBytes( + HConstants.UTF8_ENCODING)); + table.commit(lockid, System.currentTimeMillis()); + } + + long lockid = table.startUpdate(new Text("rowKey2001")); + table.put(lockid, COUNT_COLUMNFAMILY, "12" + .getBytes(HConstants.UTF8_ENCODING)); + table.put(lockid, RANDOMINT_COLUMNFAMILY, "110" + .getBytes(HConstants.UTF8_ENCODING)); + table.commit(lockid, System.currentTimeMillis()); + + } catch (MasterNotRunningException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + + try { + HTableDescriptor output = new HTableDescriptor(OUTPUT_TABLE); + String[] columns = GROUP_COLUMN_FAMILIES.split(" "); + for (int i = 0; i < columns.length; i++) { + output.addFamily(new HColumnDescriptor(columns[i])); + } + // create output table + HBaseAdmin admin = new HBaseAdmin(this.conf); + admin.createTable(output); + } catch (MasterNotRunningException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + + MiniMRCluster mrCluster = null; + try { + mrCluster = new MiniMRCluster(2, fs.getUri().toString(), 1); + + JobConf jobConf = new JobConf(conf, TestBooleanTermFilter.class); + jobConf.setJobName("process boolean term filter mapreduce"); + jobConf.setNumMapTasks(2); + jobConf.setNumReduceTasks(1); + + IdentityFilterMap.initJob(INPUT_TABLE, GROUP_COLUMN_FAMILIES, + BOOLEAN_TERM, IdentityFilterMap.class, jobConf); + + IdentityTableReduce.initJob(OUTPUT_TABLE, IdentityTableReduce.class, + jobConf); + + JobClient.runJob(jobConf); + + } catch (IOException e) { + e.printStackTrace(); + } finally { + mrCluster.shutdown(); + } + + try { + verify(conf, OUTPUT_TABLE); + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Check the filtered value + * + * @param conf + * @param outputTable + * @throws IOException + */ + private void verify(Configuration conf, String outputTable) + throws IOException { + HTable table = new HTable(conf, new Text(outputTable)); + Text[] columns = { COUNT_COLUMNFAMILY, RANDOMINT_COLUMNFAMILY }; + HScannerInterface scanner = table.obtainScanner(columns, + HConstants.EMPTY_START_ROW); + + try { + HStoreKey key = new HStoreKey(); + TreeMap results = new TreeMap(); + + while (scanner.next(key, results)) { + for (Map.Entry e : results.entrySet()) { + if (e.getKey().equals(COUNT_COLUMNFAMILY)) { + LOG.info("result_table.count: " + new String(e.getValue())); + assertTrue((Integer.parseInt(new String(e.getValue())) <= 100)); + } else { + LOG.info("result_table.randomInt: " + new String(e.getValue())); + assertTrue((Integer.parseInt(new String(e.getValue())) > 100 && checkNotInList(Integer + .parseInt(new String(e.getValue()))))); + } + } + } + + } finally { + scanner.close(); + } + + } + + /** + * Check 'NOT IN' filter-list + */ + private boolean checkNotInList(int parseInt) { + return (parseInt != 110 && parseInt != 120 && parseInt != 130 + && parseInt != 140 && parseInt != 150) ? true : false; + } + +} Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestJoinCondition.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestJoinCondition.java (revision 0) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestJoinCondition.java (revision 0) @@ -0,0 +1,44 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import junit.framework.TestCase; +import junit.framework.TestSuite; +import junit.textui.TestRunner; + +import org.apache.hadoop.hbase.shell.relational.JoinCondition; + +/** + * Test join expression + */ +public class TestJoinCondition extends TestCase { + + JoinCondition joinCondition = new JoinCondition(); + public static String EXPRESSION = "b.size = a.ROW BOOL a.length = b.length AND a.studioName = Fox"; + + public void testJoinCondition() { + joinCondition.setExpression(EXPRESSION); + } + + public static void main(String[] args) { + TestRunner.run(new TestSuite(TestJoinCondition.class)); + } + +} Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestTableJoinMapReduce.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestTableJoinMapReduce.java (revision 0) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestTableJoinMapReduce.java (revision 0) @@ -0,0 +1,240 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell.algebra; + +import java.io.IOException; +import java.util.TreeMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.dfs.MiniDFSCluster; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseAdmin; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HScannerInterface; +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.hbase.HTable; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.MultiRegionTable; +import org.apache.hadoop.hbase.shell.relational.IndexJoinMap; +import org.apache.hadoop.hbase.shell.relational.IndexJoinReduce; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.MiniMRCluster; + +public class TestTableJoinMapReduce extends MultiRegionTable { + @SuppressWarnings("hiding") + private static final Log LOG = LogFactory.getLog(TestTableJoinMapReduce.class + .getName()); + static final String FIRST_RELATION = "r1"; + static final String SECOND_RELATION = "r2"; + static final String JOIN_EXPRESSION = "r1.c: = r2.ROW BOOL "; + static final String FIRST_COLUMNS = "a: b: c:"; + static final String SECOND_COLUMNS = "d: e:"; + static final String OUTPUT_TABLE = "result_table"; + private MiniDFSCluster dfsCluster = null; + private FileSystem fs; + private Path dir; + private MiniHBaseCluster hCluster = null; + + /** + * {@inheritDoc} + */ + @Override + public void setUp() throws Exception { + super.setUp(); + conf.setLong("hbase.hregion.max.filesize", 256 * 1024); + dfsCluster = new MiniDFSCluster(conf, 1, true, (String[]) null); + try { + fs = dfsCluster.getFileSystem(); + dir = new Path("/hbase"); + fs.mkdirs(dir); + // Start up HBase cluster + hCluster = new MiniHBaseCluster(conf, 1, dfsCluster); + } catch (Exception e) { + if (dfsCluster != null) { + dfsCluster.shutdown(); + dfsCluster = null; + } + throw e; + } + } + + /** + * {@inheritDoc} + */ + @Override + public void tearDown() throws Exception { + super.tearDown(); + if (hCluster != null) { + hCluster.shutdown(); + } + + if (dfsCluster != null) { + dfsCluster.shutdown(); + } + + if (fs != null) { + try { + fs.close(); + } catch (IOException e) { + LOG.info("During tear down got a " + e.getMessage()); + } + } + } + + public void testTableJoinMapReduce() { + try { + HTableDescriptor desc = new HTableDescriptor(FIRST_RELATION); + String[] columns = FIRST_COLUMNS.split(" "); + for (int i = 0; i < columns.length; i++) { + desc.addFamily(new HColumnDescriptor(columns[i])); + } + HBaseAdmin admin = new HBaseAdmin(this.conf); + admin.createTable(desc); + + // insert random data into the input table + HTable table = new HTable(conf, new Text(FIRST_RELATION)); + for (int j = 0; j < 5; j++) { + long lockid = table.startUpdate(new Text("rowKey" + j)); + table.put(lockid, new Text("a:"), Integer.toString(j).getBytes( + HConstants.UTF8_ENCODING)); + table.put(lockid, new Text("b:"), Integer.toString(j).getBytes( + HConstants.UTF8_ENCODING)); + table.put(lockid, new Text("c:"), ("joinKey-" + Integer.toString(j)) + .getBytes(HConstants.UTF8_ENCODING)); + table.commit(lockid, System.currentTimeMillis()); + } + + } catch (MasterNotRunningException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + + try { + HTableDescriptor desc = new HTableDescriptor(SECOND_RELATION); + String[] columns = SECOND_COLUMNS.split(" "); + for (int i = 0; i < columns.length; i++) { + desc.addFamily(new HColumnDescriptor(columns[i])); + } + HBaseAdmin admin = new HBaseAdmin(this.conf); + admin.createTable(desc); + + // insert random data into the input table + HTable table = new HTable(conf, new Text(SECOND_RELATION)); + + for (int j = 0; j < 3; j++) { + long lockid = table.startUpdate(new Text("joinKey-" + j)); + table.put(lockid, new Text("d:"), ("s-" + Integer.toString(j)) + .getBytes(HConstants.UTF8_ENCODING)); + table.put(lockid, new Text("e:"), ("s-" + Integer.toString(j)) + .getBytes(HConstants.UTF8_ENCODING)); + table.commit(lockid, System.currentTimeMillis()); + } + + } catch (MasterNotRunningException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + + try { + HTableDescriptor output = new HTableDescriptor(OUTPUT_TABLE); + String[] columns = (FIRST_COLUMNS + " " + SECOND_COLUMNS).split(" "); + for (int i = 0; i < columns.length; i++) { + output.addFamily(new HColumnDescriptor(columns[i])); + } + // create output table + HBaseAdmin admin = new HBaseAdmin(this.conf); + admin.createTable(output); + } catch (MasterNotRunningException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + + MiniMRCluster mrCluster = null; + try { + mrCluster = new MiniMRCluster(2, fs.getUri().toString(), 1); + + JobConf jobConf = new JobConf(conf, TestTableJoinMapReduce.class); + jobConf.setJobName("process table join mapreduce"); + jobConf.setNumMapTasks(2); + jobConf.setNumReduceTasks(1); + + IndexJoinMap.initJob(FIRST_RELATION, SECOND_RELATION, FIRST_COLUMNS, + SECOND_COLUMNS, JOIN_EXPRESSION, IndexJoinMap.class, jobConf); + IndexJoinReduce.initJob(OUTPUT_TABLE, IndexJoinReduce.class, jobConf); + + JobClient.runJob(jobConf); + + } catch (IOException e) { + e.printStackTrace(); + } finally { + mrCluster.shutdown(); + } + + try { + verify(conf, OUTPUT_TABLE); + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Check the result table lattice size. + * + * @param conf + * @param outputTable + * @throws IOException + */ + private void verify(Configuration conf, String outputTable) + throws IOException { + HTable table = new HTable(conf, new Text(outputTable)); + Text[] columns = { new Text("a:"), new Text("b:"), new Text("c:"), + new Text("d:"), new Text("e:") }; + HScannerInterface scanner = table.obtainScanner(columns, + HConstants.EMPTY_START_ROW); + + try { + HStoreKey key = new HStoreKey(); + TreeMap results = new TreeMap(); + + int i = 0; + while (scanner.next(key, results)) { + assertTrue(results.keySet().size() == 5); + LOG.info("result_table.column.size: " + results.keySet().size()); + i++; + } + assertTrue(i == 3); + LOG.info("result_table.row.count: " + i); + } finally { + scanner.close(); + } + + } +} Index: src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestSubstitutionVariables.java =================================================================== --- src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestSubstitutionVariables.java (revision 0) +++ src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestSubstitutionVariables.java (revision 0) @@ -0,0 +1,84 @@ +/** + * Copyright 2007 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.shell; + +import java.io.OutputStreamWriter; +import java.io.UnsupportedEncodingException; +import java.io.Writer; +import java.util.Map; + +import junit.framework.TestCase; +import junit.framework.TestSuite; +import junit.textui.TestRunner; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.shell.algebra.Constants; +import org.apache.hadoop.hbase.shell.relational.OperationManager; +import org.apache.hadoop.hbase.shell.relational.SubstituteCommand; +import org.apache.hadoop.hbase.shell.relational.TestBooleanCondition; +import org.apache.hadoop.hbase.shell.relational.VariableRef; +import org.apache.hadoop.hbase.shell.relational.VariablesPool; + +/** + * Binding variables, substitution variables test + */ +public class TestSubstitutionVariables extends TestCase { + + private String TABLE_NAME = "table_name"; + private String SUBSTITUTION_VARIABLE = "A"; + static Configuration conf = new HBaseConfiguration(); + + public void testSubstitution() { + SubstituteCommand substitute = new SubstituteCommand(null); + + substitute.setKey(SUBSTITUTION_VARIABLE); + substitute.setInput(TABLE_NAME); + substitute.execute(conf); + + VariableRef ref = VariablesPool.get(SUBSTITUTION_VARIABLE).get(null); + assertTrue(ref.getArgument().equals(TABLE_NAME)); + } + + public void testCombinedQueries() throws UnsupportedEncodingException { + Writer out = new OutputStreamWriter(System.out, "UTF-8"); + SubstituteCommand substitute = new SubstituteCommand(out); + + substitute.setKey(SUBSTITUTION_VARIABLE); + substitute.setInput(TABLE_NAME); + substitute.execute(conf); + + substitute = new SubstituteCommand(out); + substitute.setKey("B"); + substitute.setChainKey(SUBSTITUTION_VARIABLE); + substitute.setOperation(Constants.RELATIONAL_SELECTION); + substitute.setCondition(TestBooleanCondition.EXPRESSION_OR); + substitute.execute(conf); + + OperationManager queryProc = new OperationManager(conf, "B", "output_table"); + Map statements = queryProc.getStatements(); + assertTrue(statements.containsKey(Constants.RELATIONAL_SELECTION)); + } + + public static void main(String[] args) { + TestRunner.run(new TestSuite(TestSubstitutionVariables.class)); + } + +}