Index: contrib/analyzers/common/src/java/org/apache/lucene/analysis/cjk/CJKTokenizer.java
===================================================================
--- contrib/analyzers/common/src/java/org/apache/lucene/analysis/cjk/CJKTokenizer.java	(revision 898866)
+++ contrib/analyzers/common/src/java/org/apache/lucene/analysis/cjk/CJKTokenizer.java	(working copy)
@@ -178,6 +178,7 @@
 
                     break;
                 } else {
+                    offset--;
                     return false;
                 }
             } else {
@@ -288,6 +289,7 @@
           typeAtt.setType(TOKEN_TYPE_NAMES[tokenType]);
           return true;
         } else if (dataLen == -1) {
+          offset--;
           return false;
         }
 
Index: contrib/analyzers/common/src/test/org/apache/lucene/analysis/cjk/TestCJKTokenizer.java
===================================================================
--- contrib/analyzers/common/src/test/org/apache/lucene/analysis/cjk/TestCJKTokenizer.java	(revision 898751)
+++ contrib/analyzers/common/src/test/org/apache/lucene/analysis/cjk/TestCJKTokenizer.java	(working copy)
@@ -261,4 +261,72 @@
     };
     checkCJKTokenReusable(analyzer, str, out_tokens2);
   }
+  
+  /**
+   * LUCENE-2207: wrong offset calculated by end()
+   */
+  public void testEndOffset() throws IOException {
+    Analyzer a = new CJKAnalyzer(Version.LUCENE_CURRENT);
+    TokenStream stream = a.reusableTokenStream("bogus", new StringReader("あい"));
+    TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
+    OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
+    stream.reset();
+    assertTrue(stream.incrementToken());
+    assertEquals("あい", termAtt.term());
+    assertFalse(stream.incrementToken());
+    stream.end();
+    assertEquals(2, offsetAtt.endOffset());
+    stream.close();
+  }
+  
+  /**
+   * LUCENE-2207: wrong offset calculated by end()
+   */
+  public void testEndOffset2() throws IOException {
+    Analyzer a = new CJKAnalyzer(Version.LUCENE_CURRENT);
+    TokenStream stream = a.reusableTokenStream("bogus", new StringReader("あい   "));
+    TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
+    OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
+    stream.reset();
+    assertTrue(stream.incrementToken());
+    assertEquals("あい", termAtt.term());
+    assertFalse(stream.incrementToken());
+    stream.end();
+    assertEquals(5, offsetAtt.endOffset());
+    stream.close();
+  }
+  
+  /**
+   * LUCENE-2207: wrong offset calculated by end()
+   */
+  public void testEndOffset3() throws IOException {
+    Analyzer a = new CJKAnalyzer(Version.LUCENE_CURRENT);
+    TokenStream stream = a.reusableTokenStream("bogus", new StringReader("test"));
+    TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
+    OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
+    stream.reset();
+    assertTrue(stream.incrementToken());
+    assertEquals("test", termAtt.term());
+    assertFalse(stream.incrementToken());
+    stream.end();
+    assertEquals(4, offsetAtt.endOffset());
+    stream.close();
+  }
+  
+  /**
+   * LUCENE-2207: wrong offset calculated by end()
+   */
+  public void testEndOffset4() throws IOException {
+    Analyzer a = new CJKAnalyzer(Version.LUCENE_CURRENT);
+    TokenStream stream = a.reusableTokenStream("bogus", new StringReader("test   "));
+    TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
+    OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
+    stream.reset();
+    assertTrue(stream.incrementToken());
+    assertEquals("test", termAtt.term());
+    assertFalse(stream.incrementToken());
+    stream.end();
+    assertEquals(7, offsetAtt.endOffset());
+    stream.close();
+  }
 }
