diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/PrefixAwareTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/PrefixAwareTokenFilter.java index 603e6e1..d0e2869 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/PrefixAwareTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/PrefixAwareTokenFilter.java @@ -25,7 +25,7 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; import java.io.IOException; @@ -93,7 +93,7 @@ public class PrefixAwareTokenFilter extends TokenStream { } else { previousPrefixToken.reinit(nextToken); // Make it a deep copy - Payload p = previousPrefixToken.getPayload(); + BytesRef p = previousPrefixToken.getPayload(); if (p != null) { previousPrefixToken.setPayload(p.clone()); } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/AbstractEncoder.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/AbstractEncoder.java index 6a5a81f..ca4d9d2 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/AbstractEncoder.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/AbstractEncoder.java @@ -1,5 +1,7 @@ package org.apache.lucene.analysis.payloads; +import org.apache.lucene.util.BytesRef; + /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -17,15 +19,14 @@ package org.apache.lucene.analysis.payloads; * limitations under the License. */ -import org.apache.lucene.index.Payload; /** * Base class for payload encoders. * **/ -public abstract class AbstractEncoder implements PayloadEncoder{ - public Payload encode(char[] buffer) { +public abstract class AbstractEncoder implements PayloadEncoder { + public BytesRef encode(char[] buffer) { return encode(buffer, 0, buffer.length); } } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/FloatEncoder.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/FloatEncoder.java index 2dd8d83..e71da14 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/FloatEncoder.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/FloatEncoder.java @@ -1,4 +1,7 @@ package org.apache.lucene.analysis.payloads; + +import org.apache.lucene.util.BytesRef; + /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -16,9 +19,6 @@ package org.apache.lucene.analysis.payloads; * limitations under the License. */ -import org.apache.lucene.index.Payload; - - /** * Encode a character array Float as a {@link org.apache.lucene.index.Payload}. *
@@ -27,11 +27,10 @@ import org.apache.lucene.index.Payload; **/ public class FloatEncoder extends AbstractEncoder implements PayloadEncoder { - public Payload encode(char[] buffer, int offset, int length) { - Payload result = new Payload(); + public BytesRef encode(char[] buffer, int offset, int length) { float payload = Float.parseFloat(new String(buffer, offset, length));//TODO: improve this so that we don't have to new Strings byte[] bytes = PayloadHelper.encodeFloat(payload); - result.setData(bytes); + BytesRef result = new BytesRef(bytes); return result; } } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IdentityEncoder.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IdentityEncoder.java index f143dda..db07ab9 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IdentityEncoder.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IdentityEncoder.java @@ -16,12 +16,12 @@ package org.apache.lucene.analysis.payloads; * limitations under the License. */ -import org.apache.lucene.index.Payload; - import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; +import org.apache.lucene.util.BytesRef; + /** * Does nothing other than convert the char array to a byte array using the specified encoding. @@ -37,15 +37,15 @@ public class IdentityEncoder extends AbstractEncoder implements PayloadEncoder{ this.charset = charset; } - public Payload encode(char[] buffer, int offset, int length) { + public BytesRef encode(char[] buffer, int offset, int length) { final ByteBuffer bb = charset.encode(CharBuffer.wrap(buffer, offset, length)); if (bb.hasArray()) { - return new Payload(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); + return new BytesRef(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()); } else { // normally it should always have an array, but who knows? final byte[] b = new byte[bb.remaining()]; bb.get(b); - return new Payload(b); + return new BytesRef(b); } } } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IntegerEncoder.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IntegerEncoder.java index 47da782..44bcf99 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IntegerEncoder.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/IntegerEncoder.java @@ -16,8 +16,8 @@ package org.apache.lucene.analysis.payloads; * limitations under the License. */ -import org.apache.lucene.index.Payload; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; /** @@ -28,11 +28,10 @@ import org.apache.lucene.util.ArrayUtil; **/ public class IntegerEncoder extends AbstractEncoder implements PayloadEncoder { - public Payload encode(char[] buffer, int offset, int length) { - Payload result = new Payload(); + public BytesRef encode(char[] buffer, int offset, int length) { int payload = ArrayUtil.parseInt(buffer, offset, length);//TODO: improve this so that we don't have to new Strings byte[] bytes = PayloadHelper.encodeInt(payload); - result.setData(bytes); + BytesRef result = new BytesRef(bytes); return result; } } \ No newline at end of file diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilter.java index 8ec5f70..3785af5 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilter.java @@ -21,7 +21,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; import java.io.IOException; @@ -33,7 +33,7 @@ import java.io.IOException; public class NumericPayloadTokenFilter extends TokenFilter { private String typeMatch; - private Payload thePayload; + private BytesRef thePayload; private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class); private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class); @@ -41,7 +41,7 @@ public class NumericPayloadTokenFilter extends TokenFilter { public NumericPayloadTokenFilter(TokenStream input, float payload, String typeMatch) { super(input); //Need to encode the payload - thePayload = new Payload(PayloadHelper.encodeFloat(payload)); + thePayload = new BytesRef(PayloadHelper.encodeFloat(payload)); this.typeMatch = typeMatch; } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/PayloadEncoder.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/PayloadEncoder.java index ebcdec4..7b73e07 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/PayloadEncoder.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/PayloadEncoder.java @@ -1,4 +1,7 @@ package org.apache.lucene.analysis.payloads; + +import org.apache.lucene.util.BytesRef; + /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -16,8 +19,6 @@ package org.apache.lucene.analysis.payloads; * limitations under the License. */ -import org.apache.lucene.index.Payload; - /** * Mainly for use with the DelimitedPayloadTokenFilter, converts char buffers to Payload. @@ -27,14 +28,14 @@ import org.apache.lucene.index.Payload; **/ public interface PayloadEncoder { - Payload encode(char[] buffer); + BytesRef encode(char[] buffer); /** * Convert a char array to a {@link org.apache.lucene.index.Payload} * @param buffer * @param offset * @param length - * @return encoded {@link Payload} + * @return encoded {@link BytesRef} */ - Payload encode(char [] buffer, int offset, int length); + BytesRef encode(char [] buffer, int offset, int length); } diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilter.java index 24c16db..b13691b 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilter.java @@ -23,7 +23,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; /** @@ -46,7 +46,7 @@ public class TokenOffsetPayloadTokenFilter extends TokenFilter { byte[] data = new byte[8]; PayloadHelper.encodeInt(offsetAtt.startOffset(), data, 0); PayloadHelper.encodeInt(offsetAtt.endOffset(), data, 4); - Payload payload = new Payload(data); + BytesRef payload = new BytesRef(data); payAtt.setPayload(payload); return true; } else { diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilter.java index eaf7647..828028c 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilter.java @@ -21,7 +21,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; import java.io.IOException; @@ -46,7 +46,7 @@ public class TypeAsPayloadTokenFilter extends TokenFilter { if (input.incrementToken()) { String type = typeAtt.type(); if (type != null && type.equals("") == false) { - payloadAtt.setPayload(new Payload(type.getBytes("UTF-8"))); + payloadAtt.setPayload(new BytesRef(type.getBytes("UTF-8"))); } return true; } else { diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java index a50a753..1476ccb 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestAnalyzers.java @@ -26,7 +26,7 @@ import org.apache.lucene.analysis.*; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; public class TestAnalyzers extends BaseTokenStreamTestCase { @@ -85,7 +85,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase { if (!hasNext) break; // System.out.println("id="+System.identityHashCode(nextToken) + " " + t); // System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]); - assertEquals(b, payloadAtt.getPayload().toByteArray()[0]); + assertEquals(b, payloadAtt.getPayload().bytes[0]); } } @@ -213,7 +213,7 @@ final class PayloadSetter extends TokenFilter { } byte[] data = new byte[1]; - Payload p = new Payload(data,0,1); + BytesRef p = new BytesRef(data,0,1); @Override public boolean incrementToken() throws IOException { diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/DelimitedPayloadTokenFilterTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/DelimitedPayloadTokenFilterTest.java index 753b56e..17049e2 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/DelimitedPayloadTokenFilterTest.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/DelimitedPayloadTokenFilterTest.java @@ -20,7 +20,7 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; import java.io.StringReader; @@ -109,11 +109,11 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase { stream.reset(); assertTrue(stream.incrementToken()); assertEquals(expected, termAtt.toString()); - Payload payload = payloadAtt.getPayload(); + BytesRef payload = payloadAtt.getPayload(); if (payload != null) { - assertTrue(payload.length() + " does not equal: " + expectPay.length, payload.length() == expectPay.length); + assertTrue(payload.length + " does not equal: " + expectPay.length, payload.length == expectPay.length); for (int i = 0; i < expectPay.length; i++) { - assertTrue(expectPay[i] + " does not equal: " + payload.byteAt(i), expectPay[i] == payload.byteAt(i)); + assertTrue(expectPay[i] + " does not equal: " + payload.bytes[i + payload.offset], expectPay[i] == payload.bytes[i + payload.offset]); } } else { @@ -126,11 +126,11 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase { stream.reset(); assertTrue(stream.incrementToken()); assertEquals(expected, termAtt.toString()); - Payload payload = payAtt.getPayload(); + BytesRef payload = payAtt.getPayload(); if (payload != null) { - assertTrue(payload.length() + " does not equal: " + expectPay.length, payload.length() == expectPay.length); + assertTrue(payload.length + " does not equal: " + expectPay.length, payload.length == expectPay.length); for (int i = 0; i < expectPay.length; i++) { - assertTrue(expectPay[i] + " does not equal: " + payload.byteAt(i), expectPay[i] == payload.byteAt(i)); + assertTrue(expectPay[i] + " does not equal: " + payload.bytes[i + payload.offset], expectPay[i] == payload.bytes[i + payload.offset]); } } else { diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilterTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilterTest.java index 85d6b3f..2a88a0e 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilterTest.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/NumericPayloadTokenFilterTest.java @@ -43,9 +43,9 @@ public class NumericPayloadTokenFilterTest extends BaseTokenStreamTestCase { seenDogs = true; assertTrue(typeAtt.type() + " is not equal to " + "D", typeAtt.type().equals("D") == true); assertTrue("payloadAtt.getPayload() is null and it shouldn't be", payloadAtt.getPayload() != null); - byte [] bytes = payloadAtt.getPayload().getData();//safe here to just use the bytes, otherwise we should use offset, length - assertTrue(bytes.length + " does not equal: " + payloadAtt.getPayload().length(), bytes.length == payloadAtt.getPayload().length()); - assertTrue(payloadAtt.getPayload().getOffset() + " does not equal: " + 0, payloadAtt.getPayload().getOffset() == 0); + byte [] bytes = payloadAtt.getPayload().bytes;//safe here to just use the bytes, otherwise we should use offset, length + assertTrue(bytes.length + " does not equal: " + payloadAtt.getPayload().length, bytes.length == payloadAtt.getPayload().length); + assertTrue(payloadAtt.getPayload().offset + " does not equal: " + 0, payloadAtt.getPayload().offset == 0); float pay = PayloadHelper.decodeFloat(bytes); assertTrue(pay + " does not equal: " + 3, pay == 3); } else { diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilterTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilterTest.java index 97b4fa1..32e2bda 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilterTest.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TokenOffsetPayloadTokenFilterTest.java @@ -20,7 +20,7 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.index.Payload; +import org.apache.lucene.util.BytesRef; import java.io.IOException; import java.io.StringReader; @@ -36,9 +36,9 @@ public class TokenOffsetPayloadTokenFilterTest extends BaseTokenStreamTestCase { OffsetAttribute offsetAtt = nptf.getAttribute(OffsetAttribute.class); nptf.reset(); while (nptf.incrementToken()) { - Payload pay = payloadAtt.getPayload(); + BytesRef pay = payloadAtt.getPayload(); assertTrue("pay is null and it shouldn't be", pay != null); - byte [] data = pay.getData(); + byte [] data = pay.bytes; int start = PayloadHelper.decodeInt(data, 0); assertTrue(start + " does not equal: " + offsetAtt.startOffset(), start == offsetAtt.startOffset()); int end = PayloadHelper.decodeInt(data, 4); diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilterTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilterTest.java index 889b7ea..6bc6b82 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilterTest.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/payloads/TypeAsPayloadTokenFilterTest.java @@ -41,7 +41,7 @@ public class TypeAsPayloadTokenFilterTest extends BaseTokenStreamTestCase { while (nptf.incrementToken()) { assertTrue(typeAtt.type() + " is not null and it should be", typeAtt.type().equals(String.valueOf(Character.toUpperCase(termAtt.buffer()[0])))); assertTrue("nextToken.getPayload() is null and it shouldn't be", payloadAtt.getPayload() != null); - String type = new String(payloadAtt.getPayload().getData(), "UTF-8"); + String type = new String(payloadAtt.getPayload().bytes, "UTF-8"); assertTrue(type + " is not equal to " + typeAtt.type(), type.equals(typeAtt.type()) == true); count++; } diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java index 019231e..64f53e6 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/snowball/TestSnowball.java @@ -24,7 +24,6 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.index.Payload; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; @@ -34,6 +33,7 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; public class TestSnowball extends BaseTokenStreamTestCase { @@ -68,7 +68,7 @@ public class TestSnowball extends BaseTokenStreamTestCase { assertEquals("wrd", typeAtt.type()); assertEquals(3, posIncAtt.getPositionIncrement()); assertEquals(77, flagsAtt.getFlags()); - assertEquals(new Payload(new byte[]{0,1,2,3}), payloadAtt.getPayload()); + assertEquals(new BytesRef(new byte[]{0,1,2,3}), payloadAtt.getPayload()); } private final class TestTokenStream extends TokenStream { @@ -90,7 +90,7 @@ public class TestSnowball extends BaseTokenStreamTestCase { offsetAtt.setOffset(2, 7); typeAtt.setType("wrd"); posIncAtt.setPositionIncrement(3); - payloadAtt.setPayload(new Payload(new byte[]{0,1,2,3})); + payloadAtt.setPayload(new BytesRef(new byte[]{0,1,2,3})); flagsAtt.setFlags(77); return true; } diff --git a/lucene/core/src/java/org/apache/lucene/analysis/Token.java b/lucene/core/src/java/org/apache/lucene/analysis/Token.java index 72fce99..817a924 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/Token.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/Token.java @@ -24,12 +24,12 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.index.Payload; import org.apache.lucene.index.DocsAndPositionsEnum; // for javadoc import org.apache.lucene.util.Attribute; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeImpl; import org.apache.lucene.util.AttributeReflector; +import org.apache.lucene.util.BytesRef; /** A Token is an occurrence of a term from the text of a field. It consists of @@ -127,7 +127,7 @@ public class Token extends CharTermAttributeImpl private int startOffset,endOffset; private String type = DEFAULT_TYPE; private int flags; - private Payload payload; + private BytesRef payload; private int positionIncrement = 1; private int positionLength = 1; @@ -357,14 +357,14 @@ public class Token extends CharTermAttributeImpl /** * Returns this Token's payload. */ - public Payload getPayload() { + public BytesRef getPayload() { return this.payload; } /** * Sets this Token's payload. */ - public void setPayload(Payload payload) { + public void setPayload(BytesRef payload) { this.payload = payload; } diff --git a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttribute.java b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttribute.java index 006cc5e..fe62626 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttribute.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttribute.java @@ -17,8 +17,8 @@ package org.apache.lucene.analysis.tokenattributes; * limitations under the License. */ -import org.apache.lucene.index.Payload; import org.apache.lucene.util.Attribute; +import org.apache.lucene.util.BytesRef; /** * The payload of a Token. See also {@link Payload}. @@ -27,10 +27,10 @@ public interface PayloadAttribute extends Attribute { /** * Returns this Token's payload. */ - public Payload getPayload(); + public BytesRef getPayload(); /** * Sets this Token's payload. */ - public void setPayload(Payload payload); + public void setPayload(BytesRef payload); } diff --git a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttributeImpl.java b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttributeImpl.java index 6d44924..7681581 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttributeImpl.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PayloadAttributeImpl.java @@ -17,14 +17,14 @@ package org.apache.lucene.analysis.tokenattributes; * limitations under the License. */ -import org.apache.lucene.index.Payload; import org.apache.lucene.util.AttributeImpl; +import org.apache.lucene.util.BytesRef; /** * The payload of a Token. See also {@link Payload}. */ public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttribute, Cloneable { - private Payload payload; + private BytesRef payload; /** * Initialize this attribute with no payload. @@ -34,21 +34,21 @@ public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttrib /** * Initialize this attribute with the given payload. */ - public PayloadAttributeImpl(Payload payload) { + public PayloadAttributeImpl(BytesRef payload) { this.payload = payload; } /** * Returns this Token's payload. */ - public Payload getPayload() { + public BytesRef getPayload() { return this.payload; } /** * Sets this Token's payload. */ - public void setPayload(Payload payload) { + public void setPayload(BytesRef payload) { this.payload = payload; } diff --git a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java index c959ec1..960928c 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java +++ b/lucene/core/src/java/org/apache/lucene/index/FreqProxTermsWriterPerField.java @@ -128,7 +128,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem void writeProx(final int termID, int proxCode) { //System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode); assert hasProx; - final Payload payload; + final BytesRef payload; if (payloadAttribute == null) { payload = null; } else { @@ -138,7 +138,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem if (payload != null && payload.length > 0) { termsHashPerField.writeVInt(1, (proxCode<<1)|1); termsHashPerField.writeVInt(1, payload.length); - termsHashPerField.writeBytes(1, payload.data, payload.offset, payload.length); + termsHashPerField.writeBytes(1, payload.bytes, payload.offset, payload.length); hasPayloads = true; } else { termsHashPerField.writeVInt(1, proxCode<<1); diff --git a/lucene/core/src/java/org/apache/lucene/index/Payload.java b/lucene/core/src/java/org/apache/lucene/index/Payload.java deleted file mode 100644 index ec8b8c4..0000000 --- a/lucene/core/src/java/org/apache/lucene/index/Payload.java +++ /dev/null @@ -1,199 +0,0 @@ -package org.apache.lucene.index; - -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.util.ArrayUtil; - -/** - * A Payload is metadata that can be stored together with each occurrence - * of a term. This metadata is stored inline in the posting list of the - * specific term. - *- * To store payloads in the index a {@link TokenStream} has to be used that - * produces payload data. - *
- * Use {@link DocsAndPositionsEnum#getPayload()}
- * to retrieve the payloads from the index.
- *
- */
-public class Payload implements Cloneable {
- /** the byte array containing the payload data */
- protected byte[] data;
-
- /** the offset within the byte array */
- protected int offset;
-
- /** the length of the payload data */
- protected int length;
-
- /** Creates an empty payload and does not allocate a byte array. */
- public Payload() {
- // nothing to do
- }
-
- /**
- * Creates a new payload with the the given array as data.
- * A reference to the passed-in array is held, i. e. no
- * copy is made.
- *
- * @param data the data of this payload
- */
- public Payload(byte[] data) {
- this(data, 0, data.length);
- }
-
- /**
- * Creates a new payload with the the given array as data.
- * A reference to the passed-in array is held, i. e. no
- * copy is made.
- *
- * @param data the data of this payload
- * @param offset the offset in the data byte array
- * @param length the length of the data
- */
- public Payload(byte[] data, int offset, int length) {
- if (offset < 0 || offset + length > data.length) {
- throw new IllegalArgumentException();
- }
- this.data = data;
- this.offset = offset;
- this.length = length;
- }
-
- /**
- * Sets this payloads data.
- * A reference to the passed-in array is held, i. e. no
- * copy is made.
- */
- public void setData(byte[] data) {
- setData(data, 0, data.length);
- }
-
- /**
- * Sets this payloads data.
- * A reference to the passed-in array is held, i. e. no
- * copy is made.
- */
- public void setData(byte[] data, int offset, int length) {
- this.data = data;
- this.offset = offset;
- this.length = length;
- }
-
- /**
- * Returns a reference to the underlying byte array
- * that holds this payloads data.
- */
- public byte[] getData() {
- return this.data;
- }
-
- /**
- * Returns the offset in the underlying byte array
- */
- public int getOffset() {
- return this.offset;
- }
-
- /**
- * Returns the length of the payload data.
- */
- public int length() {
- return this.length;
- }
-
- /**
- * Returns the byte at the given index.
- */
- public byte byteAt(int index) {
- if (0 <= index && index < this.length) {
- return this.data[this.offset + index];
- }
- throw new ArrayIndexOutOfBoundsException(index);
- }
-
- /**
- * Allocates a new byte array, copies the payload data into it and returns it.
- */
- public byte[] toByteArray() {
- byte[] retArray = new byte[this.length];
- System.arraycopy(this.data, this.offset, retArray, 0, this.length);
- return retArray;
- }
-
- /**
- * Copies the payload data to a byte array.
- *
- * @param target the target byte array
- * @param targetOffset the offset in the target byte array
- */
- public void copyTo(byte[] target, int targetOffset) {
- if (this.length > target.length + targetOffset) {
- throw new ArrayIndexOutOfBoundsException();
- }
- System.arraycopy(this.data, this.offset, target, targetOffset, this.length);
- }
-
- /**
- * Clones this payload by creating a copy of the underlying
- * byte array.
- */
- @Override
- public Payload clone() {
- try {
- // Start with a shallow copy of data
- Payload clone = (Payload) super.clone();
- // Only copy the part of data that belongs to this Payload
- if (offset == 0 && length == data.length) {
- // It is the whole thing, so just clone it.
- clone.data = data.clone();
- }
- else {
- // Just get the part
- clone.data = this.toByteArray();
- clone.offset = 0;
- }
- return clone;
- } catch (CloneNotSupportedException e) {
- throw new RuntimeException(e); // shouldn't happen
- }
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj == this)
- return true;
- if (obj instanceof Payload) {
- Payload other = (Payload) obj;
- if (length == other.length) {
- for(int i=0;i