lucene-core-2.9.4-dev

er)
    {
      super();
      this.filter = filter;
    }
    
    private boolean accept(AttributeSource source)
    {
      return filter.accept(source);
    }
    
    private void addState(AttributeSource.State state)
    {
      if (it != null) {
        throw new IllegalStateException("The tee must be consumed before sinks are consumed.");
      }
      cachedStates.add(state);
    }
    
    private void setFinalState(AttributeSource.State finalState)
    {
      this.finalState = finalState;
    }
    
    public final boolean incrementToken()
      throws IOException
    {
      if (it == null) {
        it = cachedStates.iterator();
      }
      if (!it.hasNext()) {
        return false;
      }
      AttributeSource.State state = (AttributeSource.State)it.next();
      restoreState(state);
      return true;
    }
    
    public final void end()
      throws IOException
    {
      if (finalState != null) {
        restoreState(finalState);
      }
    }
    
    public final void reset()
    {
      it = cachedStates.iterator();
    }
  }
  
  private static final SinkFilter ACCEPT_ALL_FILTER = new SinkFilter()
  {
    public boolean accept(AttributeSource source)
    {
      return true;
    }
  };
  
  public static abstract class SinkFilter
  {
    public abstract boolean accept(AttributeSource paramAttributeSource);
    
    public void reset()
      throws IOException
    {}
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TeeSinkTokenFilter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.IOException;

/**
 * @deprecated
 */
public class TeeTokenFilter
  extends TokenFilter
{
  SinkTokenizer sink;
  
  public TeeTokenFilter(TokenStream input, SinkTokenizer sink)
  {
    super(input);
    this.sink = sink;
  }
  
  public Token next(Token reusableToken)
    throws IOException
  {
    assert (reusableToken != null);
    Token nextToken = input.next(reusableToken);
    sink.add(nextToken);
    return nextToken;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TeeTokenFilter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.AttributeImpl;

public class Token
  extends AttributeImpl
  implements Cloneable, TermAttribute, TypeAttribute, PositionIncrementAttribute, FlagsAttribute, OffsetAttribute, PayloadAttribute
{
  public static final String DEFAULT_TYPE = "word";
  private static int MIN_BUFFER_SIZE = 10;
  /**
   * @deprecated
   */
  private String termText;
  /**
   * @deprecated
   */
  char[] termBuffer;
  /**
   * @deprecated
   */
  int termLength;
  /**
   * @deprecated
   */
  int startOffset;
  /**
   * @deprecated
   */
  int endOffset;
  /**
   * @deprecated
   */
  String type = "word";
  private int flags;
  /**
   * @deprecated
   */
  Payload payload;
  /**
   * @deprecated
   */
  int positionIncrement = 1;
  
  public Token() {}
  
  public Token(int start, int end)
  {
    startOffset = start;
    endOffset = end;
  }
  
  public Token(int start, int end, String typ)
  {
    startOffset = start;
    endOffset = end;
    type = typ;
  }
  
  public Token(int start, int end, int flags)
  {
    startOffset = start;
    endOffset = end;
    this.flags = flags;
  }
  
  public Token(String text, int start, int end)
  {
    termText = text;
    startOffset = start;
    endOffset = end;
  }
  
  public Token(String text, int start, int end, String typ)
  {
    termText = text;
    startOffset = start;
    endOffset = end;
    type = typ;
  }
  
  public Token(String text, int start, int end, int flags)
  {
    termText = text;
    startOffset = start;
    endOffset = end;
    this.flags = flags;
  }
  
  public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end)
  {
    setTermBuffer(startTermBuffer, termBufferOffset, termBufferLength);
    startOffset = start;
    endOffset = end;
  }
  
  public void setPositionIncrement(int positionIncrement)
  {
    if (positionIncrement < 0) {
      throw new IllegalArgumentException("Increment must be zero or greater: " + positionIncrement);
    }
    this.positionIncrement = positionIncrement;
  }
  
  public int getPositionIncrement()
  {
    return positionIncrement;
  }
  
  /**
   * @deprecated
   */
  public void setTermText(String text)
  {
    termText = text;
    termBuffer = null;
  }
  
  /**
   * @deprecated
   */
  public final String termText()
  {
    if ((termText == null) && (termBuffer != null)) {
      termText = new String(termBuffer, 0, termLength);
    }
    return termText;
  }
  
  public final String term()
  {
    if (termText != null) {
      return termText;
    }
    initTermBuffer();
    return new String(termBuffer, 0, termLength);
  }
  
  public final void setTermBuffer(char[] buffer, int offset, int length)
  {
    termText = null;
    growTermBuffer(length);
    System.arraycopy(buffer, offset, termBuffer, 0, length);
    termLength = length;
  }
  
  public final void setTermBuffer(String buffer)
  {
    termText = null;
    int length = buffer.length();
    growTermBuffer(length);
    buffer.getChars(0, length, termBuffer, 0);
    termLength = length;
  }
  
  public final void setTermBuffer(String buffer, int offset, int length)
  {
    assert (offset <= buffer.length());
    assert (offset + length <= buffer.length());
    termText = null;
    growTermBuffer(length);
    buffer.getChars(offset, offset + length, termBuffer, 0);
    termLength = length;
  }
  
  public final char[] termBuffer()
  {
    initTermBuffer();
    return termBuffer;
  }
  
  public char[] resizeTermBuffer(int newSize)
  {
    if (termBuffer == null)
    {
      newSize = newSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : newSize;
      if (termText != null)
      {
        int ttLen = termText.length();
        newSize = newSize < ttLen ? ttLen : newSize;
        termBuffer = new char[ArrayUtil.getNextSize(newSize)];
        termText.getChars(0, termText.length(), termBuffer, 0);
        termText = null;
      }
      else
      {
        termBuffer = new char[ArrayUtil.getNextSize(newSize)];
      }
    }
    else if (termBuffer.length < newSize)
    {
      char[] newCharBuffer = new char[ArrayUtil.getNextSize(newSize)];
      System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length);
      termBuffer = newCharBuffer;
    }
    return termBuffer;
  }
  
  private void growTermBuffer(int newSize)
  {
    if (termBuffer == null) {
      termBuffer = new char[ArrayUtil.getNextSize(newSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : newSize)];
    } else if (termBuffer.length < newSize) {
      termBuffer = new char[ArrayUtil.getNextSize(newSize)];
    }
  }
  
  private void initTermBuffer()
  {
    if (termBuffer == null)
    {
      if (termText == null)
      {
        termBuffer = new char[ArrayUtil.getNextSize(MIN_BUFFER_SIZE)];
        termLength = 0;
      }
      else
      {
        int length = termText.length();
        if (length < MIN_BUFFER_SIZE) {
          length = MIN_BUFFER_SIZE;
        }
        termBuffer = new char[ArrayUtil.getNextSize(length)];
        termLength = termText.length();
        termText.getChars(0, termText.length(), termBuffer, 0);
        termText = null;
      }
    }
    else {
      termText = null;
    }
  }
  
  public final int termLength()
  {
    initTermBuffer();
    return termLength;
  }
  
  public final void setTermLength(int length)
  {
    initTermBuffer();
    if (length > termBuffer.length) {
      throw new IllegalArgumentException("length " + length + " exceeds the size of the termBuffer (" + termBuffer.length + ")");
    }
    termLength = length;
  }
  
  public final int startOffset()
  {
    return startOffset;
  }
  
  public void setStartOffset(int offset)
  {
    startOffset = offset;
  }
  
  public final int endOffset()
  {
    return endOffset;
  }
  
  public void setEndOffset(int offset)
  {
    endOffset = offset;
  }
  
  public void setOffset(int startOffset, int endOffset)
  {
    this.startOffset = startOffset;
    this.endOffset = endOffset;
  }
  
  public final String type()
  {
    return type;
  }
  
  public final void setType(String type)
  {
    this.type = type;
  }
  
  public int getFlags()
  {
    return flags;
  }
  
  public void setFlags(int flags)
  {
    this.flags = flags;
  }
  
  public Payload getPayload()
  {
    return payload;
  }
  
  public void setPayload(Payload payload)
  {
    this.payload = payload;
  }
  
  public String toString()
  {
    StringBuffer sb = new StringBuffer();
    sb.append('(');
    initTermBuffer();
    if (termBuffer == null) {
      sb.append("null");
    } else {
      sb.append(termBuffer, 0, termLength);
    }
    sb.append(',').append(startOffset).append(',').append(endOffset);
    if (!type.equals("word")) {
      sb.append(",type=").append(type);
    }
    if (positionIncrement != 1) {
      sb.append(",posIncr=").append(positionIncrement);
    }
    sb.append(')');
    return sb.toString();
  }
  
  public void clear()
  {
    payload = null;
    
    termLength = 0;
    termText = null;
    positionIncrement = 1;
    flags = 0;
    startOffset = (endOffset = 0);
    type = "word";
  }
  
  public Object clone()
  {
    Token t = (Token)super.clone();
    if (termBuffer != null)
    {
      termBuffer = new char[termLength];
      System.arraycopy(termBuffer, 0, termBuffer, 0, termLength);
    }
    if (payload != null) {
      payload = ((Payload)payload.clone());
    }
    return t;
  }
  
  public Token clone(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset)
  {
    Token t = new Token(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset);
    positionIncrement = positionIncrement;
    flags = flags;
    type = type;
    if (payload != null) {
      payload = ((Payload)payload.clone());
    }
    return t;
  }
  
  public boolean equals(Object obj)
  {
    if (obj == this) {
      return true;
    }
    if ((obj instanceof Token))
    {
      Token other = (Token)obj;
      
      initTermBuffer();
      other.initTermBuffer();
      if ((termLength == termLength) && (startOffset == startOffset) && (endOffset == endOffset) && (flags == flags) && (positionIncrement == positionIncrement) && (subEqual(type, type)) && (subEqual(payload, payload)))
      {
        for (int i = 0; i < termLength; i++) {
          if (termBuffer[i] != termBuffer[i]) {
            return false;
          }
        }
        return true;
      }
      return false;
    }
    return false;
  }
  
  private boolean subEqual(Object o1, Object o2)
  {
    if (o1 == null) {
      return o2 == null;
    }
    return o1.equals(o2);
  }
  
  public int hashCode()
  {
    initTermBuffer();
    int code = termLength;
    code = code * 31 + startOffset;
    code = code * 31 + endOffset;
    code = code * 31 + flags;
    code = code * 31 + positionIncrement;
    code = code * 31 + type.hashCode();
    code = payload == null ? code : code * 31 + payload.hashCode();
    code = code * 31 + ArrayUtil.hashCode(termBuffer, 0, termLength);
    return code;
  }
  
  private void clearNoTermBuffer()
  {
    payload = null;
    positionIncrement = 1;
    flags = 0;
    startOffset = (endOffset = 0);
    type = "word";
  }
  
  public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType)
  {
    clearNoTermBuffer();
    payload = null;
    positionIncrement = 1;
    setTermBuffer(newTermBuffer, newTermOffset, newTermLength);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = newType;
    return this;
  }
  
  public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset)
  {
    clearNoTermBuffer();
    setTermBuffer(newTermBuffer, newTermOffset, newTermLength);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = "word";
    return this;
  }
  
  public Token reinit(String newTerm, int newStartOffset, int newEndOffset, String newType)
  {
    clearNoTermBuffer();
    setTermBuffer(newTerm);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = newType;
    return this;
  }
  
  public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType)
  {
    clearNoTermBuffer();
    setTermBuffer(newTerm, newTermOffset, newTermLength);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = newType;
    return this;
  }
  
  public Token reinit(String newTerm, int newStartOffset, int newEndOffset)
  {
    clearNoTermBuffer();
    setTermBuffer(newTerm);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = "word";
    return this;
  }
  
  public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset)
  {
    clearNoTermBuffer();
    setTermBuffer(newTerm, newTermOffset, newTermLength);
    startOffset = newStartOffset;
    endOffset = newEndOffset;
    type = "word";
    return this;
  }
  
  public void reinit(Token prototype)
  {
    prototype.initTermBuffer();
    setTermBuffer(termBuffer, 0, termLength);
    positionIncrement = positionIncrement;
    flags = flags;
    startOffset = startOffset;
    endOffset = endOffset;
    type = type;
    payload = payload;
  }
  
  public void reinit(Token prototype, String newTerm)
  {
    setTermBuffer(newTerm);
    positionIncrement = positionIncrement;
    flags = flags;
    startOffset = startOffset;
    endOffset = endOffset;
    type = type;
    payload = payload;
  }
  
  public void reinit(Token prototype, char[] newTermBuffer, int offset, int length)
  {
    setTermBuffer(newTermBuffer, offset, length);
    positionIncrement = positionIncrement;
    flags = flags;
    startOffset = startOffset;
    endOffset = endOffset;
    type = type;
    payload = payload;
  }
  
  public void copyTo(AttributeImpl target)
  {
    if ((target instanceof Token))
    {
      Token to = (Token)target;
      to.reinit(this);
      if (payload != null) {
        payload = ((Payload)payload.clone());
      }
    }
    else if ((target instanceof TokenWrapper))
    {
      delegate = ((Token)clone());
    }
    else
    {
      initTermBuffer();
      ((TermAttribute)target).setTermBuffer(termBuffer, 0, termLength);
      ((OffsetAttribute)target).setOffset(startOffset, endOffset);
      ((PositionIncrementAttribute)target).setPositionIncrement(positionIncrement);
      ((PayloadAttribute)target).setPayload(payload == null ? null : (Payload)payload.clone());
      ((FlagsAttribute)target).setFlags(flags);
      ((TypeAttribute)target).setType(type);
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.Token
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.IOException;

public abstract class TokenFilter
  extends TokenStream
{
  protected final TokenStream input;
  
  protected TokenFilter(TokenStream input)
  {
    super(input);
    this.input = input;
  }
  
  public void end()
    throws IOException
  {
    input.end();
  }
  
  public void close()
    throws IOException
  {
    input.close();
  }
  
  public void reset()
    throws IOException
  {
    input.reset();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenFilter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

class TokenStream$1 {}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenStream.1
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.lang.reflect.Method;

/**
 * @deprecated
 */
final class TokenStream$MethodSupport
{
  final boolean hasIncrementToken;
  final boolean hasReusableNext;
  final boolean hasNext;
  
  TokenStream$MethodSupport(Class clazz)
  {
    hasIncrementToken = isMethodOverridden(clazz, "incrementToken", METHOD_NO_PARAMS);
    hasReusableNext = isMethodOverridden(clazz, "next", METHOD_TOKEN_PARAM);
    hasNext = isMethodOverridden(clazz, "next", METHOD_NO_PARAMS);
  }
  
  private static boolean isMethodOverridden(Class clazz, String name, Class[] params)
  {
    try
    {
      return clazz.getMethod(name, params).getDeclaringClass() != (TokenStream.class$org$apache$lucene$analysis$TokenStream == null ? (TokenStream.class$org$apache$lucene$analysis$TokenStream = TokenStream.class$("org.apache.lucene.analysis.TokenStream")) : TokenStream.class$org$apache$lucene$analysis$TokenStream);
    }
    catch (NoSuchMethodException e)
    {
      throw new RuntimeException(e);
    }
  }
  
  private static final Class[] METHOD_NO_PARAMS = new Class[0];
  private static final Class[] METHOD_TOKEN_PARAM = { TokenStream.class$org$apache$lucene$analysis$Token == null ? (TokenStream.class$org$apache$lucene$analysis$Token = TokenStream.class$("org.apache.lucene.analysis.Token")) : TokenStream.class$org$apache$lucene$analysis$Token };
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenStream.MethodSupport
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeSource.AttributeFactory;

/**
 * @deprecated
 */
final class TokenStream$TokenWrapperAttributeFactory
  extends AttributeSource.AttributeFactory
{
  private final AttributeSource.AttributeFactory delegate;
  
  TokenStream$TokenWrapperAttributeFactory(AttributeSource.AttributeFactory x0, TokenStream.1 x1)
  {
    this(x0);
  }
  
  private TokenStream$TokenWrapperAttributeFactory(AttributeSource.AttributeFactory delegate)
  {
    this.delegate = delegate;
  }
  
  public AttributeImpl createAttributeInstance(Class attClass)
  {
    return attClass.isAssignableFrom(TokenStream.class$org$apache$lucene$analysis$TokenWrapper == null ? (TokenStream.class$org$apache$lucene$analysis$TokenWrapper = TokenStream.class$("org.apache.lucene.analysis.TokenWrapper")) : TokenStream.class$org$apache$lucene$analysis$TokenWrapper) ? new TokenWrapper() : delegate.createAttributeInstance(attClass);
  }
  
  public boolean equals(Object other)
  {
    if (this == other) {
      return true;
    }
    if ((other instanceof TokenWrapperAttributeFactory))
    {
      TokenWrapperAttributeFactory af = (TokenWrapperAttributeFactory)other;
      return delegate.equals(delegate);
    }
    return false;
  }
  
  public int hashCode()
  {
    return delegate.hashCode() ^ 0xA45FF31;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenStream.TokenWrapperAttributeFactory
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.IOException;
import java.lang.reflect.Method;
import java.util.IdentityHashMap;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeSource.AttributeFactory;

public abstract class TokenStream
  extends AttributeSource
{
  /**
   * @deprecated
   */
  private static final AttributeSource.AttributeFactory DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY = new TokenWrapperAttributeFactory(AttributeSource.AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY, null);
  /**
   * @deprecated
   */
  private final TokenWrapper tokenWrapper;
  /**
   * @deprecated
   */
  private static boolean onlyUseNewAPI = false;
  /**
   * @deprecated
   */
  private final MethodSupport supportedMethods = getSupportedMethods(getClass());
  
  /**
   * @deprecated
   */
  private static final class MethodSupport
  {
    final boolean hasIncrementToken;
    final boolean hasReusableNext;
    final boolean hasNext;
    
    MethodSupport(Class clazz)
    {
      hasIncrementToken = isMethodOverridden(clazz, "incrementToken", METHOD_NO_PARAMS);
      hasReusableNext = isMethodOverridden(clazz, "next", METHOD_TOKEN_PARAM);
      hasNext = isMethodOverridden(clazz, "next", METHOD_NO_PARAMS);
    }
    
    private static boolean isMethodOverridden(Class clazz, String name, Class[] params)
    {
      try
      {
        return clazz.getMethod(name, params).getDeclaringClass() != TokenStream.class;
      }
      catch (NoSuchMethodException e)
      {
        throw new RuntimeException(e);
      }
    }
    
    private static final Class[] METHOD_NO_PARAMS = new Class[0];
    private static final Class[] METHOD_TOKEN_PARAM = { Token.class };
  }
  
  /**
   * @deprecated
   */
  private static final IdentityHashMap knownMethodSupport = new IdentityHashMap();
  
  /**
   * @deprecated
   */
  private static MethodSupport getSupportedMethods(Class clazz)
  {
    MethodSupport supportedMethods;
    synchronized (knownMethodSupport)
    {
      supportedMethods = (MethodSupport)knownMethodSupport.get(clazz);
      if (supportedMethods == null) {
        knownMethodSupport.put(clazz, supportedMethods = new MethodSupport(clazz));
      }
    }
    return supportedMethods;
  }
  
  /**
   * @deprecated
   */
  private static final class TokenWrapperAttributeFactory
    extends AttributeSource.AttributeFactory
  {
    private final AttributeSource.AttributeFactory delegate;
    
    TokenWrapperAttributeFactory(AttributeSource.AttributeFactory x0, TokenStream.1 x1)
    {
      this(x0);
    }
    
    private TokenWrapperAttributeFactory(AttributeSource.AttributeFactory delegate)
    {
      this.delegate = delegate;
    }
    
    public AttributeImpl createAttributeInstance(Class attClass)
    {
      return attClass.isAssignableFrom(TokenWrapper.class) ? new TokenWrapper() : delegate.createAttributeInstance(attClass);
    }
    
    public boolean equals(Object other)
    {
      if (this == other) {
        return true;
      }
      if ((other instanceof TokenWrapperAttributeFactory))
      {
        TokenWrapperAttributeFactory af = (TokenWrapperAttributeFactory)other;
        return delegate.equals(delegate);
      }
      return false;
    }
    
    public int hashCode()
    {
      return delegate.hashCode() ^ 0xA45FF31;
    }
  }
  
  protected TokenStream()
  {
    super(onlyUseNewAPI ? AttributeSource.AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY : DEFAULT_TOKEN_WRAPPER_ATTRIBUTE_FACTORY);
    
    tokenWrapper = initTokenWrapper(null);
    check();
  }
  
  protected TokenStream(AttributeSource input)
  {
    super(input);
    tokenWrapper = initTokenWrapper(input);
    check();
  }
  
  protected TokenStream(AttributeSource.AttributeFactory factory)
  {
    super(onlyUseNewAPI ? factory : new TokenWrapperAttributeFactory(factory, null));
    
    tokenWrapper = initTokenWrapper(null);
    check();
  }
  
  /**
   * @deprecated
   */
  private TokenWrapper initTokenWrapper(AttributeSource input)
  {
    if (onlyUseNewAPI) {
      return null;
    }
    if (((input instanceof TokenStream)) && (tokenWrapper != null)) {
      return tokenWrapper;
    }
    Attribute att = addAttribute(TermAttribute.class);
    if (((att instanceof TokenWrapper)) && (addAttribute(TypeAttribute.class) == att) && (addAttribute(PositionIncrementAttribute.class) == att) && (addAttribute(FlagsAttribute.class) == att) && (addAttribute(OffsetAttribute.class) == att) && (addAttribute(PayloadAttribute.class) == att)) {
      return (TokenWrapper)att;
    }
    throw new UnsupportedOperationException("If onlyUseNewAPI is disabled, all basic Attributes must be implemented by the internal class TokenWrapper. Please make sure, that all TokenStreams/TokenFilters in this chain have been instantiated with this flag disabled and do not add any custom instances for the basic Attributes!");
  }
  
  /**
   * @deprecated
   */
  private void check()
  {
    if ((onlyUseNewAPI) && (!supportedMethods.hasIncrementToken)) {
      throw new UnsupportedOperationException(getClass().getName() + " does not implement incrementToken() which is needed for onlyUseNewAPI.");
    }
    if ((!supportedMethods.hasIncrementToken) && (!supportedMethods.hasNext) && (!supportedMethods.hasReusableNext)) {
      throw new UnsupportedOperationException(getClass().getName() + " does not implement any of incrementToken(), next(Token), next().");
    }
  }
  
  /**
   * @deprecated
   */
  public static void setOnlyUseNewAPI(boolean onlyUseNewAPI)
  {
    onlyUseNewAPI = onlyUseNewAPI;
  }
  
  /**
   * @deprecated
   */
  public static boolean getOnlyUseNewAPI()
  {
    return onlyUseNewAPI;
  }
  
  public boolean incrementToken()
    throws IOException
  {
    assert (tokenWrapper != null);
    Token token;
    Token token;
    if (supportedMethods.hasReusableNext)
    {
      token = next(tokenWrapper.delegate);
    }
    else
    {
      assert (supportedMethods.hasNext);
      token = next();
    }
    if (token == null) {
      return false;
    }
    tokenWrapper.delegate = token;
    return true;
  }
  
  public void end()
    throws IOException
  {}
  
  /**
   * @deprecated
   */
  public Token next(Token reusableToken)
    throws IOException
  {
    assert (reusableToken != null);
    if (tokenWrapper == null) {
      throw new UnsupportedOperationException("This TokenStream only supports the new Attributes API.");
    }
    if (supportedMethods.hasIncrementToken)
    {
      tokenWrapper.delegate = reusableToken;
      return incrementToken() ? tokenWrapper.delegate : null;
    }
    assert (supportedMethods.hasNext);
    return next();
  }
  
  /**
   * @deprecated
   */
  public Token next()
    throws IOException
  {
    if (tokenWrapper == null) {
      throw new UnsupportedOperationException("This TokenStream only supports the new Attributes API.");
    }
    Token nextToken;
    if (supportedMethods.hasIncrementToken)
    {
      Token savedDelegate = tokenWrapper.delegate;
      tokenWrapper.delegate = new Token();
      Token nextToken = incrementToken() ? tokenWrapper.delegate : null;
      tokenWrapper.delegate = savedDelegate;
    }
    else
    {
      assert (supportedMethods.hasReusableNext);
      nextToken = next(new Token());
    }
    if (nextToken != null)
    {
      Payload p = nextToken.getPayload();
      if (p != null) {
        nextToken.setPayload((Payload)p.clone());
      }
    }
    return nextToken;
  }
  
  public void reset()
    throws IOException
  {}
  
  public void close()
    throws IOException
  {}
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenStream
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.AttributeImpl;

/**
 * @deprecated
 */
final class TokenWrapper
  extends AttributeImpl
  implements Cloneable, TermAttribute, TypeAttribute, PositionIncrementAttribute, FlagsAttribute, OffsetAttribute, PayloadAttribute
{
  Token delegate;
  
  TokenWrapper()
  {
    this(new Token());
  }
  
  TokenWrapper(Token delegate)
  {
    this.delegate = delegate;
  }
  
  public String term()
  {
    return delegate.term();
  }
  
  public void setTermBuffer(char[] buffer, int offset, int length)
  {
    delegate.setTermBuffer(buffer, offset, length);
  }
  
  public void setTermBuffer(String buffer)
  {
    delegate.setTermBuffer(buffer);
  }
  
  public void setTermBuffer(String buffer, int offset, int length)
  {
    delegate.setTermBuffer(buffer, offset, length);
  }
  
  public char[] termBuffer()
  {
    return delegate.termBuffer();
  }
  
  public char[] resizeTermBuffer(int newSize)
  {
    return delegate.resizeTermBuffer(newSize);
  }
  
  public int termLength()
  {
    return delegate.termLength();
  }
  
  public void setTermLength(int length)
  {
    delegate.setTermLength(length);
  }
  
  public String type()
  {
    return delegate.type();
  }
  
  public void setType(String type)
  {
    delegate.setType(type);
  }
  
  public void setPositionIncrement(int positionIncrement)
  {
    delegate.setPositionIncrement(positionIncrement);
  }
  
  public int getPositionIncrement()
  {
    return delegate.getPositionIncrement();
  }
  
  public int getFlags()
  {
    return delegate.getFlags();
  }
  
  public void setFlags(int flags)
  {
    delegate.setFlags(flags);
  }
  
  public int startOffset()
  {
    return delegate.startOffset();
  }
  
  public void setOffset(int startOffset, int endOffset)
  {
    delegate.setOffset(startOffset, endOffset);
  }
  
  public int endOffset()
  {
    return delegate.endOffset();
  }
  
  public Payload getPayload()
  {
    return delegate.getPayload();
  }
  
  public void setPayload(Payload payload)
  {
    delegate.setPayload(payload);
  }
  
  public void clear()
  {
    delegate.clear();
  }
  
  public String toString()
  {
    return delegate.toString();
  }
  
  public int hashCode()
  {
    return delegate.hashCode();
  }
  
  public boolean equals(Object other)
  {
    if ((other instanceof TokenWrapper)) {
      return delegate.equals(delegate);
    }
    return false;
  }
  
  public Object clone()
  {
    return new TokenWrapper((Token)delegate.clone());
  }
  
  public void copyTo(AttributeImpl target)
  {
    if ((target instanceof TokenWrapper)) {
      delegate = ((Token)delegate.clone());
    } else {
      delegate.copyTo(target);
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.TokenWrapper
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.IOException;
import java.io.Reader;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeSource.AttributeFactory;

public abstract class Tokenizer
  extends TokenStream
{
  protected Reader input;
  
  protected Tokenizer() {}
  
  protected Tokenizer(Reader input)
  {
    this.input = CharReader.get(input);
  }
  
  protected Tokenizer(AttributeSource.AttributeFactory factory)
  {
    super(factory);
  }
  
  protected Tokenizer(AttributeSource.AttributeFactory factory, Reader input)
  {
    super(factory);
    this.input = CharReader.get(input);
  }
  
  protected Tokenizer(AttributeSource source)
  {
    super(source);
  }
  
  protected Tokenizer(AttributeSource source, Reader input)
  {
    super(source);
    this.input = CharReader.get(input);
  }
  
  public void close()
    throws IOException
  {
    if (input != null)
    {
      input.close();
      
      input = null;
    }
  }
  
  protected final int correctOffset(int currentOff)
  {
    return (input instanceof CharStream) ? ((CharStream)input).correctOffset(currentOff) : currentOff;
  }
  
  public void reset(Reader input)
    throws IOException
  {
    this.input = input;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.Tokenizer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.IOException;
import java.io.Reader;

public final class WhitespaceAnalyzer
  extends Analyzer
{
  public TokenStream tokenStream(String fieldName, Reader reader)
  {
    return new WhitespaceTokenizer(reader);
  }
  
  public TokenStream reusableTokenStream(String fieldName, Reader reader)
    throws IOException
  {
    Tokenizer tokenizer = (Tokenizer)getPreviousTokenStream();
    if (tokenizer == null)
    {
      tokenizer = new WhitespaceTokenizer(reader);
      setPreviousTokenStream(tokenizer);
    }
    else
    {
      tokenizer.reset(reader);
    }
    return tokenizer;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.WhitespaceAnalyzer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.Reader;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeSource.AttributeFactory;

public class WhitespaceTokenizer
  extends CharTokenizer
{
  public WhitespaceTokenizer(Reader in)
  {
    super(in);
  }
  
  public WhitespaceTokenizer(AttributeSource source, Reader in)
  {
    super(source, in);
  }
  
  public WhitespaceTokenizer(AttributeSource.AttributeFactory factory, Reader in)
  {
    super(factory, in);
  }
  
  protected boolean isTokenChar(char c)
  {
    return !Character.isWhitespace(c);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.WhitespaceTokenizer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.HashSet;

public class WordlistLoader
{
  public static HashSet getWordSet(File wordfile)
    throws IOException
  {
    HashSet result = new HashSet();
    FileReader reader = null;
    try
    {
      reader = new FileReader(wordfile);
      result = getWordSet(reader);
    }
    finally
    {
      if (reader != null) {
        reader.close();
      }
    }
    return result;
  }
  
  public static HashSet getWordSet(File wordfile, String comment)
    throws IOException
  {
    HashSet result = new HashSet();
    FileReader reader = null;
    try
    {
      reader = new FileReader(wordfile);
      result = getWordSet(reader, comment);
    }
    finally
    {
      if (reader != null) {
        reader.close();
      }
    }
    return result;
  }
  
  public static HashSet getWordSet(Reader reader)
    throws IOException
  {
    HashSet result = new HashSet();
    BufferedReader br = null;
    try
    {
      if ((reader instanceof BufferedReader)) {
        br = (BufferedReader)reader;
      } else {
        br = new BufferedReader(reader);
      }
      String word = null;
      while ((word = br.readLine()) != null) {
        result.add(word.trim());
      }
    }
    finally
    {
      if (br != null) {
        br.close();
      }
    }
    return result;
  }
  
  public static HashSet getWordSet(Reader reader, String comment)
    throws IOException
  {
    HashSet result = new HashSet();
    BufferedReader br = null;
    try
    {
      if ((reader instanceof BufferedReader)) {
        br = (BufferedReader)reader;
      } else {
        br = new BufferedReader(reader);
      }
      String word = null;
      while ((word = br.readLine()) != null) {
        if (!word.startsWith(comment)) {
          result.add(word.trim());
        }
      }
    }
    finally
    {
      if (br != null) {
        br.close();
      }
    }
    return result;
  }
  
  public static HashMap getStemDict(File wordstemfile)
    throws IOException
  {
    if (wordstemfile == null) {
      throw new NullPointerException("wordstemfile may not be null");
    }
    HashMap result = new HashMap();
    BufferedReader br = null;
    FileReader fr = null;
    try
    {
      fr = new FileReader(wordstemfile);
      br = new BufferedReader(fr);
      String line;
      while ((line = br.readLine()) != null)
      {
        String[] wordstem = line.split("\t", 2);
        result.put(wordstem[0], wordstem[1]);
      }
    }
    finally
    {
      if (fr != null) {
        fr.close();
      }
      if (br != null) {
        br.close();
      }
    }
    return result;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.WordlistLoader
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis.standard;

class StandardAnalyzer$1 {}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.standard.StandardAnalyzer.1
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis.standard;

import org.apache.lucene.analysis.TokenStream;

final class StandardAnalyzer$SavedStreams
{
  StandardTokenizer tokenStream;
  TokenStream filteredTokenStream;
  
  private StandardAnalyzer$SavedStreams() {}
  
  StandardAnalyzer$SavedStreams(StandardAnalyzer.1 x0)
  {
    this();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.analysis.standard.StandardAnalyzer.SavedStreams
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.analysis.standard;

import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.WordlistLoader;
import org.apache.lucene.util.Version;

public class StandardAnalyzer
  extends Analyzer
{
  private Set stopSet;
  /**
   * @deprecated
   */
  private boolean replaceInvalidAcronym = defaultReplaceInvalidAcronym;
  private static boolean defaultReplaceInvalidAcronym;
  private boolean enableStopPositionIncrements;
  private boolean useDefaultStopPositionIncrements;
  
  static
  {
    String v = System.getProperty("org.apache.lucene.analysis.standard.StandardAnalyzer.replaceInvalidAcronym");
    if ((v == null) || (v.equals("true"))) {
      defaultReplaceInvalidAcronym = true;
    } else {
      defaultReplaceInvalidAcronym = false;
    }
  }
  
  /**
   * @deprecated
   */
  public static boolean getDefaultReplaceInvalidAcronym()
  {
    return defaultReplaceInvalidAcronym;
  }
  
  /**
   * @deprecated
   */
  public static void setDefaultReplaceInvalidAcronym(boolean replaceInvalidAcronym)
  {
    defaultReplaceInvalidAcronym = replaceInvalidAcronym;
  }
  
  /**
   * @deprecated
   */
  public static final String[] STOP_WORDS = StopAnalyzer.ENGLISH_STOP_WORDS;
  public static final Set STOP_WORDS_SET = StopAnalyzer.ENGLISH_STOP_WORDS_SET;
  public static final int DEFAULT_MAX_TOKEN_LENGTH = 255;
  
  /**
   * @deprecated
   */
  public StandardAnalyzer()
  {
    this(Version.LUCENE_24, STOP_WORDS_SET);
  }
  
  public StandardAnalyzer(Version matchVersion)
  {
    this(matchVersion, STOP_WORDS_SET);
  }
  
  /**
   * @deprecated
   */
  public StandardAnalyzer(Set stopWords)
  {
    this(Version.LUCENE_24, stopWords);
  }
  
  public StandardAnalyzer(Version matchVersion, Set stopWords)
  {
    stopSet = stopWords;
    init(matchVersion);
  }
  
  /**
   * @deprecated
   */
  public StandardAnalyzer(String[] stopWords)
  {
    this(Version.LUCENE_24, StopFilter.makeStopSet(stopWords));
  }
  
  /**
   * @deprecated
   */
  public StandardAnalyzer(File stopwords)
    throws IOException
  {
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56

Further reading...

For more information on Java 1.5 Tiger, you may find Java 1.5 Tiger, A developer's Notebook by D. Flanagan and B. McLaughlin from O'Reilly of interest.

New!JAR listings


Copyright 2006-2017. Infinite Loop Ltd