Index: modules/analysis/common/src/java/org/apache/lucene/analysis/compound/CompoundWordTokenFilterBase.java =================================================================== --- modules/analysis/common/src/java/org/apache/lucene/analysis/compound/CompoundWordTokenFilterBase.java (revision 1003666) +++ modules/analysis/common/src/java/org/apache/lucene/analysis/compound/CompoundWordTokenFilterBase.java (working copy) @@ -133,7 +133,7 @@ this(matchVersion, input,makeDictionary(dictionary),DEFAULT_MIN_WORD_SIZE,DEFAULT_MIN_SUBWORD_SIZE,DEFAULT_MAX_SUBWORD_SIZE, onlyLongestMatch); } - protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary, boolean onlyLongestMatch) { + protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary, boolean onlyLongestMatch) { this(matchVersion, input,dictionary,DEFAULT_MIN_WORD_SIZE,DEFAULT_MIN_SUBWORD_SIZE,DEFAULT_MAX_SUBWORD_SIZE, onlyLongestMatch); } @@ -141,11 +141,11 @@ this(matchVersion, input,makeDictionary(dictionary),DEFAULT_MIN_WORD_SIZE,DEFAULT_MIN_SUBWORD_SIZE,DEFAULT_MAX_SUBWORD_SIZE, false); } - protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary) { + protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary) { this(matchVersion, input,dictionary,DEFAULT_MIN_WORD_SIZE,DEFAULT_MIN_SUBWORD_SIZE,DEFAULT_MAX_SUBWORD_SIZE, false); } - protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary, int minWordSize, int minSubwordSize, int maxSubwordSize, boolean onlyLongestMatch) { + protected CompoundWordTokenFilterBase(Version matchVersion, TokenStream input, Set dictionary, int minWordSize, int minSubwordSize, int maxSubwordSize, boolean onlyLongestMatch) { super(input); this.tokens=new LinkedList(); @@ -221,8 +221,9 @@ } } - protected static final void addAllLowerCase(Set target, Collection col) { - for (String string : col) { + protected static final void addAllLowerCase(CharArraySet target, Collection col) { + for (Object obj : col) { + String string = (String) obj; target.add(string.toLowerCase()); } } Index: modules/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/TernaryTree.java =================================================================== --- modules/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/TernaryTree.java (revision 1003666) +++ modules/analysis/common/src/java/org/apache/lucene/analysis/compound/hyphenation/TernaryTree.java (working copy) @@ -453,11 +453,11 @@ } } - public Enumeration keys() { + public Enumeration keys() { return new Iterator(); } - public class Iterator implements Enumeration { + public class Iterator implements Enumeration { /** * current node index @@ -494,7 +494,7 @@ /** * Node stack */ - Stack ns; + Stack ns; /** * key stack implemented with a StringBuilder @@ -503,7 +503,7 @@ public Iterator() { cur = -1; - ns = new Stack(); + ns = new Stack(); ks = new StringBuilder(); rewind(); } @@ -515,7 +515,7 @@ run(); } - public Object nextElement() { + public String nextElement() { String res = new String(curkey); cur = up(); run(); @@ -557,11 +557,11 @@ case 1: if (sc[i.parent] != 0) { res = eq[i.parent]; - ns.push(i.clone()); + ns.push((Item) i.clone()); ks.append(sc[i.parent]); } else { i.child++; - ns.push(i.clone()); + ns.push((Item) i.clone()); res = hi[i.parent]; } climb = false; @@ -569,7 +569,7 @@ case 2: res = hi[i.parent]; - ns.push(i.clone()); + ns.push((Item) i.clone()); if (ks.length() > 0) { ks.setLength(ks.length() - 1); // pop } Index: modules/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java =================================================================== --- modules/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java (revision 1003666) +++ modules/analysis/common/src/java/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzer.java (working copy) @@ -158,6 +158,7 @@ /* if the stopwords for a field are changed, * then saved streams for that field are erased. */ + @SuppressWarnings("unchecked") Map streamMap = (Map) getPreviousTokenStream(); if (streamMap != null) streamMap.remove(fieldName); @@ -195,6 +196,7 @@ public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { /* map of SavedStreams for each field */ + @SuppressWarnings("unchecked") Map streamMap = (Map) getPreviousTokenStream(); if (streamMap == null) { streamMap = new HashMap(); Index: modules/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchStemFilter.java =================================================================== --- modules/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchStemFilter.java (revision 1003666) +++ modules/analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchStemFilter.java (working copy) @@ -107,7 +107,7 @@ */ @Deprecated // TODO remove in 3.2 public void setExclusionTable( Map exclusiontable ) { - exclusions = new HashSet(exclusiontable.keySet()); + exclusions = exclusiontable.keySet(); } } Index: modules/analysis/common/src/java/org/tartarus/snowball/TestApp.java =================================================================== --- modules/analysis/common/src/java/org/tartarus/snowball/TestApp.java (revision 1003666) +++ modules/analysis/common/src/java/org/tartarus/snowball/TestApp.java (working copy) @@ -54,9 +54,9 @@ return; } - Class stemClass = Class.forName("org.tartarus.snowball.ext." + - args[0] + "Stemmer"); - SnowballProgram stemmer = (SnowballProgram) stemClass.newInstance(); + Class stemClass = Class.forName("org.tartarus.snowball.ext." + + args[0] + "Stemmer").asSubclass(SnowballProgram.class); + SnowballProgram stemmer = stemClass.newInstance(); Method stemMethod = stemClass.getMethod("stem", new Class[0]); Reader reader;