lucene-core-2.9.4-dev

}
  
  public boolean next()
    throws IOException
  {
    for (;;)
    {
      if (count == df) {
        return false;
      }
      int docCode = freqStream.readVInt();
      if (currentFieldOmitTermFreqAndPositions)
      {
        doc += docCode;
        freq = 1;
      }
      else
      {
        doc += (docCode >>> 1);
        if ((docCode & 0x1) != 0) {
          freq = 1;
        } else {
          freq = freqStream.readVInt();
        }
      }
      count += 1;
      if ((deletedDocs == null) || (!deletedDocs.get(doc))) {
        break;
      }
      skippingDoc();
    }
    return true;
  }
  
  public int read(int[] docs, int[] freqs)
    throws IOException
  {
    int length = docs.length;
    if (currentFieldOmitTermFreqAndPositions) {
      return readNoTf(docs, freqs, length);
    }
    int i = 0;
    while ((i < length) && (count < df))
    {
      int docCode = freqStream.readVInt();
      doc += (docCode >>> 1);
      if ((docCode & 0x1) != 0) {
        freq = 1;
      } else {
        freq = freqStream.readVInt();
      }
      count += 1;
      if ((deletedDocs == null) || (!deletedDocs.get(doc)))
      {
        docs[i] = doc;
        freqs[i] = freq;
        i++;
      }
    }
    return i;
  }
  
  private final int readNoTf(int[] docs, int[] freqs, int length)
    throws IOException
  {
    int i = 0;
    while ((i < length) && (count < df))
    {
      doc += freqStream.readVInt();
      count += 1;
      if ((deletedDocs == null) || (!deletedDocs.get(doc)))
      {
        docs[i] = doc;
        
        freqs[i] = 1;
        i++;
      }
    }
    return i;
  }
  
  protected void skipProx(long proxPointer, int payloadLength)
    throws IOException
  {}
  
  public boolean skipTo(int target)
    throws IOException
  {
    if (df >= skipInterval)
    {
      if (skipListReader == null) {
        skipListReader = new DefaultSkipListReader((IndexInput)freqStream.clone(), maxSkipLevels, skipInterval);
      }
      if (!haveSkipped)
      {
        skipListReader.init(skipPointer, freqBasePointer, proxBasePointer, df, currentFieldStoresPayloads);
        haveSkipped = true;
      }
      int newCount = skipListReader.skipTo(target);
      if (newCount > count)
      {
        freqStream.seek(skipListReader.getFreqPointer());
        skipProx(skipListReader.getProxPointer(), skipListReader.getPayloadLength());
        
        doc = skipListReader.getDoc();
        count = newCount;
      }
    }
    do
    {
      if (!next()) {
        return false;
      }
    } while (target > doc);
    return true;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentTermDocs
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.IndexInput;

final class SegmentTermEnum
  extends TermEnum
  implements Cloneable
{
  private IndexInput input;
  FieldInfos fieldInfos;
  long size;
  long position = -1L;
  private TermBuffer termBuffer = new TermBuffer();
  private TermBuffer prevBuffer = new TermBuffer();
  private TermBuffer scanBuffer = new TermBuffer();
  private TermInfo termInfo = new TermInfo();
  private int format;
  private boolean isIndex = false;
  long indexPointer = 0L;
  int indexInterval;
  int skipInterval;
  int maxSkipLevels;
  private int formatM1SkipInterval;
  
  SegmentTermEnum(IndexInput i, FieldInfos fis, boolean isi)
    throws CorruptIndexException, IOException
  {
    input = i;
    fieldInfos = fis;
    isIndex = isi;
    maxSkipLevels = 1;
    
    int firstInt = input.readInt();
    if (firstInt >= 0)
    {
      format = 0;
      size = firstInt;
      
      indexInterval = 128;
      skipInterval = Integer.MAX_VALUE;
    }
    else
    {
      format = firstInt;
      if (format < -4) {
        throw new CorruptIndexException("Unknown format version:" + format + " expected " + -4 + " or higher");
      }
      size = input.readLong();
      if (format == -1)
      {
        if (!isIndex)
        {
          indexInterval = input.readInt();
          formatM1SkipInterval = input.readInt();
        }
        skipInterval = Integer.MAX_VALUE;
      }
      else
      {
        indexInterval = input.readInt();
        skipInterval = input.readInt();
        if (format <= -3) {
          maxSkipLevels = input.readInt();
        }
      }
      assert (indexInterval > 0) : ("indexInterval=" + indexInterval + " is negative; must be > 0");
      assert (skipInterval > 0) : ("skipInterval=" + skipInterval + " is negative; must be > 0");
    }
    if (format > -4)
    {
      termBuffer.setPreUTF8Strings();
      scanBuffer.setPreUTF8Strings();
      prevBuffer.setPreUTF8Strings();
    }
  }
  
  protected Object clone()
  {
    SegmentTermEnum clone = null;
    try
    {
      clone = (SegmentTermEnum)super.clone();
    }
    catch (CloneNotSupportedException e) {}
    input = ((IndexInput)input.clone());
    termInfo = new TermInfo(termInfo);
    
    termBuffer = ((TermBuffer)termBuffer.clone());
    prevBuffer = ((TermBuffer)prevBuffer.clone());
    scanBuffer = new TermBuffer();
    
    return clone;
  }
  
  final void seek(long pointer, long p, Term t, TermInfo ti)
    throws IOException
  {
    input.seek(pointer);
    position = p;
    termBuffer.set(t);
    prevBuffer.reset();
    termInfo.set(ti);
  }
  
  public final boolean next()
    throws IOException
  {
    if (position++ >= size - 1L)
    {
      prevBuffer.set(termBuffer);
      termBuffer.reset();
      return false;
    }
    prevBuffer.set(termBuffer);
    termBuffer.read(input, fieldInfos);
    
    termInfo.docFreq = input.readVInt();
    termInfo.freqPointer += input.readVLong();
    termInfo.proxPointer += input.readVLong();
    if (format == -1)
    {
      if ((!isIndex) && 
        (termInfo.docFreq > formatM1SkipInterval)) {
        termInfo.skipOffset = input.readVInt();
      }
    }
    else if (termInfo.docFreq >= skipInterval) {
      termInfo.skipOffset = input.readVInt();
    }
    if (isIndex) {
      indexPointer += input.readVLong();
    }
    return true;
  }
  
  final int scanTo(Term term)
    throws IOException
  {
    scanBuffer.set(term);
    int count = 0;
    while ((scanBuffer.compareTo(termBuffer) > 0) && (next())) {
      count++;
    }
    return count;
  }
  
  public final Term term()
  {
    return termBuffer.toTerm();
  }
  
  final Term prev()
  {
    return prevBuffer.toTerm();
  }
  
  final TermInfo termInfo()
  {
    return new TermInfo(termInfo);
  }
  
  final void termInfo(TermInfo ti)
  {
    ti.set(termInfo);
  }
  
  public final int docFreq()
  {
    return termInfo.docFreq;
  }
  
  final long freqPointer()
  {
    return termInfo.freqPointer;
  }
  
  final long proxPointer()
  {
    return termInfo.proxPointer;
  }
  
  public final void close()
    throws IOException
  {
    input.close();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentTermEnum
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

class SegmentTermPositionVector
  extends SegmentTermVector
  implements TermPositionVector
{
  protected int[][] positions;
  protected TermVectorOffsetInfo[][] offsets;
  public static final int[] EMPTY_TERM_POS = new int[0];
  
  public SegmentTermPositionVector(String field, String[] terms, int[] termFreqs, int[][] positions, TermVectorOffsetInfo[][] offsets)
  {
    super(field, terms, termFreqs);
    this.offsets = offsets;
    this.positions = positions;
  }
  
  public TermVectorOffsetInfo[] getOffsets(int index)
  {
    TermVectorOffsetInfo[] result = TermVectorOffsetInfo.EMPTY_OFFSET_INFO;
    if (offsets == null) {
      return null;
    }
    if ((index >= 0) && (index < offsets.length)) {
      result = offsets[index];
    }
    return result;
  }
  
  public int[] getTermPositions(int index)
  {
    int[] result = EMPTY_TERM_POS;
    if (positions == null) {
      return null;
    }
    if ((index >= 0) && (index < positions.length)) {
      result = positions[index];
    }
    return result;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentTermPositionVector
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.IndexInput;

final class SegmentTermPositions
  extends SegmentTermDocs
  implements TermPositions
{
  private IndexInput proxStream;
  private int proxCount;
  private int position;
  private int payloadLength;
  private boolean needToLoadPayload;
  private long lazySkipPointer = -1L;
  private int lazySkipProxCount = 0;
  
  SegmentTermPositions(SegmentReader p)
  {
    super(p);
    proxStream = null;
  }
  
  final void seek(TermInfo ti, Term term)
    throws IOException
  {
    super.seek(ti, term);
    if (ti != null) {
      lazySkipPointer = proxPointer;
    }
    lazySkipProxCount = 0;
    proxCount = 0;
    payloadLength = 0;
    needToLoadPayload = false;
  }
  
  public final void close()
    throws IOException
  {
    super.close();
    if (proxStream != null) {
      proxStream.close();
    }
  }
  
  public final int nextPosition()
    throws IOException
  {
    if (currentFieldOmitTermFreqAndPositions) {
      return 0;
    }
    lazySkip();
    proxCount -= 1;
    return position += readDeltaPosition();
  }
  
  private final int readDeltaPosition()
    throws IOException
  {
    int delta = proxStream.readVInt();
    if (currentFieldStoresPayloads)
    {
      if ((delta & 0x1) != 0) {
        payloadLength = proxStream.readVInt();
      }
      delta >>>= 1;
      needToLoadPayload = true;
    }
    return delta;
  }
  
  protected final void skippingDoc()
    throws IOException
  {
    lazySkipProxCount += freq;
  }
  
  public final boolean next()
    throws IOException
  {
    lazySkipProxCount += proxCount;
    if (super.next())
    {
      proxCount = freq;
      position = 0;
      return true;
    }
    return false;
  }
  
  public final int read(int[] docs, int[] freqs)
  {
    throw new UnsupportedOperationException("TermPositions does not support processing multiple documents in one call. Use TermDocs instead.");
  }
  
  protected void skipProx(long proxPointer, int payloadLength)
    throws IOException
  {
    lazySkipPointer = proxPointer;
    lazySkipProxCount = 0;
    proxCount = 0;
    this.payloadLength = payloadLength;
    needToLoadPayload = false;
  }
  
  private void skipPositions(int n)
    throws IOException
  {
    assert (!currentFieldOmitTermFreqAndPositions);
    for (int f = n; f > 0; f--)
    {
      readDeltaPosition();
      skipPayload();
    }
  }
  
  private void skipPayload()
    throws IOException
  {
    if ((needToLoadPayload) && (payloadLength > 0)) {
      proxStream.seek(proxStream.getFilePointer() + payloadLength);
    }
    needToLoadPayload = false;
  }
  
  private void lazySkip()
    throws IOException
  {
    if (proxStream == null) {
      proxStream = ((IndexInput)parent.core.proxStream.clone());
    }
    skipPayload();
    if (lazySkipPointer != -1L)
    {
      proxStream.seek(lazySkipPointer);
      lazySkipPointer = -1L;
    }
    if (lazySkipProxCount != 0)
    {
      skipPositions(lazySkipProxCount);
      lazySkipProxCount = 0;
    }
  }
  
  public int getPayloadLength()
  {
    return payloadLength;
  }
  
  public byte[] getPayload(byte[] data, int offset)
    throws IOException
  {
    if (!needToLoadPayload) {
      throw new IOException("Either no payload exists at this term position or an attempt was made to load it more than once.");
    }
    int retOffset;
    byte[] retArray;
    int retOffset;
    if ((data == null) || (data.length - offset < payloadLength))
    {
      byte[] retArray = new byte[payloadLength];
      retOffset = 0;
    }
    else
    {
      retArray = data;
      retOffset = offset;
    }
    proxStream.readBytes(retArray, retOffset, payloadLength);
    needToLoadPayload = false;
    return retArray;
  }
  
  public boolean isPayloadAvailable()
  {
    return (needToLoadPayload) && (payloadLength > 0);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentTermPositions
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.util.Arrays;

class SegmentTermVector
  implements TermFreqVector
{
  private String field;
  private String[] terms;
  private int[] termFreqs;
  
  SegmentTermVector(String field, String[] terms, int[] termFreqs)
  {
    this.field = field;
    this.terms = terms;
    this.termFreqs = termFreqs;
  }
  
  public String getField()
  {
    return field;
  }
  
  public String toString()
  {
    StringBuffer sb = new StringBuffer();
    sb.append('{');
    sb.append(field).append(": ");
    if (terms != null) {
      for (int i = 0; i < terms.length; i++)
      {
        if (i > 0) {
          sb.append(", ");
        }
        sb.append(terms[i]).append('/').append(termFreqs[i]);
      }
    }
    sb.append('}');
    
    return sb.toString();
  }
  
  public int size()
  {
    return terms == null ? 0 : terms.length;
  }
  
  public String[] getTerms()
  {
    return terms;
  }
  
  public int[] getTermFrequencies()
  {
    return termFreqs;
  }
  
  public int indexOf(String termText)
  {
    if (terms == null) {
      return -1;
    }
    int res = Arrays.binarySearch(terms, termText);
    return res >= 0 ? res : -1;
  }
  
  public int[] indexesOf(String[] termNumbers, int start, int len)
  {
    int[] res = new int[len];
    for (int i = 0; i < len; i++) {
      res[i] = indexOf(termNumbers[(start + i)]);
    }
    return res;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentTermVector
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.util.Collection;
import java.util.HashSet;
import org.apache.lucene.store.Directory;

class SegmentWriteState
{
  DocumentsWriter docWriter;
  Directory directory;
  String segmentName;
  String docStoreSegmentName;
  int numDocs;
  int termIndexInterval;
  int numDocsInStore;
  Collection flushedFiles;
  
  public SegmentWriteState(DocumentsWriter docWriter, Directory directory, String segmentName, String docStoreSegmentName, int numDocs, int numDocsInStore, int termIndexInterval)
  {
    this.docWriter = docWriter;
    this.directory = directory;
    this.segmentName = segmentName;
    this.docStoreSegmentName = docStoreSegmentName;
    this.numDocs = numDocs;
    this.numDocsInStore = numDocsInStore;
    this.termIndexInterval = termIndexInterval;
    flushedFiles = new HashSet();
  }
  
  public String segmentFileName(String ext)
  {
    return segmentName + "." + ext;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentWriteState
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

public class SerialMergeScheduler
  extends MergeScheduler
{
  public synchronized void merge(IndexWriter writer)
    throws CorruptIndexException, IOException
  {
    for (;;)
    {
      MergePolicy.OneMerge merge = writer.getNextMerge();
      if (merge == null) {
        break;
      }
      writer.merge(merge);
    }
  }
  
  public void close() {}
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SerialMergeScheduler
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import org.apache.lucene.store.Directory;

class SnapshotDeletionPolicy$MyCommitPoint
  extends IndexCommit
{
  IndexCommit cp;
  private final SnapshotDeletionPolicy this$0;
  
  SnapshotDeletionPolicy$MyCommitPoint(SnapshotDeletionPolicy paramSnapshotDeletionPolicy, IndexCommit cp)
  {
    this.cp = cp;
  }
  
  public String toString()
  {
    return "SnapshotDeletionPolicy.SnapshotCommitPoint(" + cp + ")";
  }
  
  public String getSegmentsFileName()
  {
    return cp.getSegmentsFileName();
  }
  
  public Collection getFileNames()
    throws IOException
  {
    return cp.getFileNames();
  }
  
  public Directory getDirectory()
  {
    return cp.getDirectory();
  }
  
  public void delete()
  {
    synchronized (this$0)
    {
      if ((SnapshotDeletionPolicy.access$000(this$0) == null) || (!SnapshotDeletionPolicy.access$000(this$0).equals(getSegmentsFileName()))) {
        cp.delete();
      }
    }
  }
  
  public boolean isDeleted()
  {
    return cp.isDeleted();
  }
  
  public long getVersion()
  {
    return cp.getVersion();
  }
  
  public long getGeneration()
  {
    return cp.getGeneration();
  }
  
  public Map getUserData()
    throws IOException
  {
    return cp.getUserData();
  }
  
  public boolean isOptimized()
  {
    return cp.isOptimized();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SnapshotDeletionPolicy.MyCommitPoint
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.lucene.store.Directory;

public class SnapshotDeletionPolicy
  implements IndexDeletionPolicy
{
  private IndexCommit lastCommit;
  private IndexDeletionPolicy primary;
  private String snapshot;
  
  public SnapshotDeletionPolicy(IndexDeletionPolicy primary)
  {
    this.primary = primary;
  }
  
  public synchronized void onInit(List commits)
    throws IOException
  {
    primary.onInit(wrapCommits(commits));
    lastCommit = ((IndexCommit)commits.get(commits.size() - 1));
  }
  
  public synchronized void onCommit(List commits)
    throws IOException
  {
    primary.onCommit(wrapCommits(commits));
    lastCommit = ((IndexCommit)commits.get(commits.size() - 1));
  }
  
  public synchronized IndexCommitPoint snapshot()
  {
    if (lastCommit == null) {
      throw new IllegalStateException("no index commits to snapshot !");
    }
    if (snapshot == null) {
      snapshot = lastCommit.getSegmentsFileName();
    } else {
      throw new IllegalStateException("snapshot is already set; please call release() first");
    }
    return lastCommit;
  }
  
  public synchronized void release()
  {
    if (snapshot != null) {
      snapshot = null;
    } else {
      throw new IllegalStateException("snapshot was not set; please call snapshot() first");
    }
  }
  
  private class MyCommitPoint
    extends IndexCommit
  {
    IndexCommit cp;
    
    MyCommitPoint(IndexCommit cp)
    {
      this.cp = cp;
    }
    
    public String toString()
    {
      return "SnapshotDeletionPolicy.SnapshotCommitPoint(" + cp + ")";
    }
    
    public String getSegmentsFileName()
    {
      return cp.getSegmentsFileName();
    }
    
    public Collection getFileNames()
      throws IOException
    {
      return cp.getFileNames();
    }
    
    public Directory getDirectory()
    {
      return cp.getDirectory();
    }
    
    public void delete()
    {
      synchronized (SnapshotDeletionPolicy.this)
      {
        if ((snapshot == null) || (!snapshot.equals(getSegmentsFileName()))) {
          cp.delete();
        }
      }
    }
    
    public boolean isDeleted()
    {
      return cp.isDeleted();
    }
    
    public long getVersion()
    {
      return cp.getVersion();
    }
    
    public long getGeneration()
    {
      return cp.getGeneration();
    }
    
    public Map getUserData()
      throws IOException
    {
      return cp.getUserData();
    }
    
    public boolean isOptimized()
    {
      return cp.isOptimized();
    }
  }
  
  private List wrapCommits(List commits)
  {
    int count = commits.size();
    List myCommits = new ArrayList(count);
    for (int i = 0; i < count; i++) {
      myCommits.add(new MyCommitPoint((IndexCommit)commits.get(i)));
    }
    return myCommits;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SnapshotDeletionPolicy
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;

public class SortedTermVectorMapper
  extends TermVectorMapper
{
  private SortedSet currentSet;
  private Map termToTVE = new HashMap();
  private boolean storeOffsets;
  private boolean storePositions;
  public static final String ALL = "_ALL_";
  
  public SortedTermVectorMapper(Comparator comparator)
  {
    this(false, false, comparator);
  }
  
  public SortedTermVectorMapper(boolean ignoringPositions, boolean ignoringOffsets, Comparator comparator)
  {
    super(ignoringPositions, ignoringOffsets);
    currentSet = new TreeSet(comparator);
  }
  
  public void map(String term, int frequency, TermVectorOffsetInfo[] offsets, int[] positions)
  {
    TermVectorEntry entry = (TermVectorEntry)termToTVE.get(term);
    if (entry == null)
    {
      entry = new TermVectorEntry("_ALL_", term, frequency, storeOffsets == true ? offsets : null, storePositions == true ? positions : null);
      
      termToTVE.put(term, entry);
      currentSet.add(entry);
    }
    else
    {
      entry.setFrequency(entry.getFrequency() + frequency);
      if (storeOffsets)
      {
        TermVectorOffsetInfo[] existingOffsets = entry.getOffsets();
        if ((existingOffsets != null) && (offsets != null) && (offsets.length > 0))
        {
          TermVectorOffsetInfo[] newOffsets = new TermVectorOffsetInfo[existingOffsets.length + offsets.length];
          System.arraycopy(existingOffsets, 0, newOffsets, 0, existingOffsets.length);
          System.arraycopy(offsets, 0, newOffsets, existingOffsets.length, offsets.length);
          entry.setOffsets(newOffsets);
        }
        else if ((existingOffsets == null) && (offsets != null) && (offsets.length > 0))
        {
          entry.setOffsets(offsets);
        }
      }
      if (storePositions)
      {
        int[] existingPositions = entry.getPositions();
        if ((existingPositions != null) && (positions != null) && (positions.length > 0))
        {
          int[] newPositions = new int[existingPositions.length + positions.length];
          System.arraycopy(existingPositions, 0, newPositions, 0, existingPositions.length);
          System.arraycopy(positions, 0, newPositions, existingPositions.length, positions.length);
          entry.setPositions(newPositions);
        }
        else if ((existingPositions == null) && (positions != null) && (positions.length > 0))
        {
          entry.setPositions(positions);
        }
      }
    }
  }
  
  public void setExpectations(String field, int numTerms, boolean storeOffsets, boolean storePositions)
  {
    this.storeOffsets = storeOffsets;
    this.storePositions = storePositions;
  }
  
  public SortedSet getTermVectorEntrySet()
  {
    return currentSet;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SortedTermVectorMapper
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

public class StaleReaderException
  extends IOException
{
  public StaleReaderException(String message)
  {
    super(message);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.StaleReaderException
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.RAMOutputStream;

class StoredFieldsWriter$PerDoc
  extends DocumentsWriter.DocWriter
{
  final DocumentsWriter.PerDocBuffer buffer = this$0.docWriter.newPerDocBuffer();
  RAMOutputStream fdt = new RAMOutputStream(buffer);
  int numStoredFields;
  private final StoredFieldsWriter this$0;
  
  StoredFieldsWriter$PerDoc(StoredFieldsWriter paramStoredFieldsWriter) {}
  
  void reset()
  {
    fdt.reset();
    buffer.recycle();
    numStoredFields = 0;
  }
  
  void abort()
  {
    reset();
    this$0.free(this);
  }
  
  public long sizeInBytes()
  {
    return buffer.getSizeInBytes();
  }
  
  public void finish()
    throws IOException
  {
    this$0.finishDocument(this);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.StoredFieldsWriter.PerDoc
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.ArrayUtil;

final class StoredFieldsWriter
{
  FieldsWriter fieldsWriter;
  final DocumentsWriter docWriter;
  final FieldInfos fieldInfos;
  int lastDocID;
  PerDoc[] docFreeList = new PerDoc[1];
  int freeCount;
  int allocCount;
  
  public StoredFieldsWriter(DocumentsWriter docWriter, FieldInfos fieldInfos)
  {
    this.docWriter = docWriter;
    this.fieldInfos = fieldInfos;
  }
  
  public StoredFieldsWriterPerThread addThread(DocumentsWriter.DocState docState)
    throws IOException
  {
    return new StoredFieldsWriterPerThread(docState, this);
  }
  
  public synchronized void flush(SegmentWriteState state)
    throws IOException
  {
    if (numDocsInStore > 0)
    {
      initFieldsWriter();
      
      fill(numDocsInStore - docWriter.getDocStoreOffset());
    }
    if (fieldsWriter != null) {
      fieldsWriter.flush();
    }
  }
  
  private void initFieldsWriter()
    throws IOException
  {
    if (fieldsWriter == null)
    {
      String docStoreSegment = docWriter.getDocStoreSegment();
      if (docStoreSegment != null)
      {
        assert (docStoreSegment != null);
        fieldsWriter = new FieldsWriter(docWriter.directory, docStoreSegment, fieldInfos);
        
        docWriter.addOpenFile(docStoreSegment + "." + "fdt");
        docWriter.addOpenFile(docStoreSegment + "." + "fdx");
        lastDocID = 0;
      }
    }
  }
  
  public synchronized void closeDocStore(SegmentWriteState state)
    throws IOException
  {
    int inc = numDocsInStore - lastDocID;
    if (inc > 0)
    {
      initFieldsWriter();
      fill(numDocsInStore - docWriter.getDocStoreOffset());
    }
    if (fieldsWriter != null)
    {
      fieldsWriter.close();
      fieldsWriter = null;
      lastDocID = 0;
      assert (docStoreSegmentName != null);
      flushedFiles.add(docStoreSegmentName + "." + "fdt");
      flushedFiles.add(docStoreSegmentName + "." + "fdx");
      
      docWriter.removeOpenFile(docStoreSegmentName + "." + "fdt");
      docWriter.removeOpenFile(docStoreSegmentName + "." + "fdx");
      
      String fileName = docStoreSegmentName + "." + "fdx";
      if (4L + numDocsInStore * 8L != directory.fileLength(fileName)) {
        throw new RuntimeException("after flush: fdx size mismatch: " + numDocsInStore + " docs vs " + directory.fileLength(fileName) + " length in bytes of " + fileName + " file exists?=" + directory.fileExists(fileName));
      }
    }
  }
  
  synchronized PerDoc getPerDoc()
  {
    if (freeCount == 0)
    {
      allocCount += 1;
      if (allocCount > docFreeList.length)
      {
        assert (allocCount == 1 + docFreeList.length);
        docFreeList = new PerDoc[ArrayUtil.getNextSize(allocCount)];
      }
      return new PerDoc();
    }
    return docFreeList[(--freeCount)];
  }
  
  synchronized void abort()
  {
    if (fieldsWriter != null)
    {
      try
      {
        fieldsWriter.close();
      }
      catch (Throwable t) {}
      fieldsWriter = null;
      lastDocID = 0;
    }
  }
  
  void fill(int docID)
    throws IOException
  {
    int docStoreOffset = docWriter.getDocStoreOffset();
    
    int end = docID + docStoreOffset;
    while (lastDocID < end)
    {
      fieldsWriter.skipDocument();
      lastDocID += 1;
    }
  }
  
  synchronized void finishDocument(PerDoc perDoc)
    throws IOException
  {
    assert (docWriter.writer.testPoint("StoredFieldsWriter.finishDocument start"));
    initFieldsWriter();
    
    fill(docID);
    
    fieldsWriter.flushDocument(numStoredFields, fdt);
    lastDocID += 1;
    perDoc.reset();
    free(perDoc);
    assert (docWriter.writer.testPoint("StoredFieldsWriter.finishDocument end"));
  }
  
  public boolean freeRAM()
  {
    return false;
  }
  
  synchronized void free(PerDoc perDoc)
  {
    assert (freeCount < docFreeList.length);
    assert (0 == numStoredFields);
    assert (0L == fdt.length());
    assert (0L == fdt.getFilePointer());
    docFreeList[(freeCount++)] = perDoc;
  }
  
  class PerDoc
    extends DocumentsWriter.DocWriter
  {
    final DocumentsWriter.PerDocBuffer buffer = docWriter.newPerDocBuffer();
    RAMOutputStream fdt = new RAMOutputStream(buffer);
    int numStoredFields;
    
    PerDoc() {}
    
    void reset()
    {
      fdt.reset();
      buffer.recycle();
      numStoredFields = 0;
    }
    
    void abort()
    {
      reset();
      free(this);
    }
    
    public long sizeInBytes()
    {
      return buffer.getSizeInBytes();
    }
    
    public void finish()
      throws IOException
    {
      finishDocument(this);
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.StoredFieldsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;

final class StoredFieldsWriterPerThread
{
  final FieldsWriter localFieldsWriter;
  final StoredFieldsWriter storedFieldsWriter;
  final DocumentsWriter.DocState docState;
  StoredFieldsWriter.PerDoc doc;
  
  public StoredFieldsWriterPerThread(DocumentsWriter.DocState docState, StoredFieldsWriter storedFieldsWriter)
    throws IOException
  {
    this.storedFieldsWriter = storedFieldsWriter;
    this.docState = docState;
    localFieldsWriter = new FieldsWriter((IndexOutput)null, (IndexOutput)null, fieldInfos);
  }
  
  public void startDocument()
  {
    if (doc != null)
    {
      doc.reset();
      doc.docID = docState.docID;
    }
  }
  
  public void addField(Fieldable field, FieldInfo fieldInfo)
    throws IOException
  {
    if (doc == null)
    {
      doc = storedFieldsWriter.getPerDoc();
      doc.docID = docState.docID;
      localFieldsWriter.setFieldsStream(doc.fdt);
      assert (doc.numStoredFields == 0) : ("doc.numStoredFields=" + doc.numStoredFields);
      assert (0L == doc.fdt.length());
      assert (0L == doc.fdt.getFilePointer());
    }
    localFieldsWriter.writeField(fieldInfo, field);
    assert (docState.testPoint("StoredFieldsWriterPerThread.processFields.writeField"));
    doc.numStoredFields += 1;
  }
  
  public DocumentsWriter.DocWriter finishDocument()
  {
    try
    {
      return doc;
    }
    finally
    {
      doc = null;
    }
  }
  
  public void abort()
  {
    if (doc != null)
    {
      doc.abort();
      doc = null;
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.StoredFieldsWriterPerThread
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import org.apache.lucene.util.StringHelper;

public final class Term
  implements Comparable, Serializable
{
  String field;
  String text;
  
  public Term(String fld, String txt)
  {
    field = StringHelper.intern(fld);
    text = txt;
  }
  
  public Term(String fld)
  {
    this(fld, "", true);
  }
  
  Term(String fld, String txt, boolean intern)
  {
    field = (intern ? StringHelper.intern(fld) : fld);
    text = txt;
  }
  
  public final String field()
  {
    return field;
  }
  
  public final String text()
  {
    return text;
  }
  
  public Term createTerm(String text)
  {
    return new Term(field, text, false);
  }
  
  public boolean equals(Object obj)
  {
    if (this == obj) {
      return true;
    }
    if (obj == null) {
      return false;
    }
    if (getClass() != obj.getClass()) {
      return false;
    }
    Term other = (Term)obj;
    if (field == null)
    {
      if (field != null) {
        return false;
      }
    }
    else if (!field.equals(field)) {
      return false;
    }
    if (text == null)
    {
      if (text != null) {
        return false;
      }
    }
    else if (!text.equals(text)) {
      return false;
    }
    return true;
  }
  
  public int hashCode()
  {
    int prime = 31;
    int result = 1;
    result = 31 * result + (field == null ? 0 : field.hashCode());
    result = 31 * result + (text == null ? 0 : text.hashCode());
    return result;
  }
  
  public int compareTo(Object other)
  {
    return compareTo((Term)other);
  }
  
  public final int compareTo(Term other)
  {
    if (field == field) {
      return text.compareTo(text);
    }
    return field.compareTo(field);
  }
  
  final void set(String fld, String txt)
  {
    field = fld;
    text = txt;
  }
  
  public final String toString()
  {
    return field + ":" + text;
  }
  
  private void readObject(ObjectInputStream in)
    throws IOException, ClassNotFoundException
  {
    in.defaultReadObject();
    field = StringHelper.intern(field);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.Term
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.UnicodeUtil.UTF16Result;
import org.apache.lucene.util.UnicodeUtil.UTF8Result;

final class TermBuffer
  implements Cloneable
{
  private String field;
  private Term term;
  private boolean preUTF8Strings;
  private boolean dirty;
  private UnicodeUtil.UTF16Result text = new UnicodeUtil.UTF16Result();
  private UnicodeUtil.UTF8Result bytes = new UnicodeUtil.UTF8Result();
  
  public final int compareTo(TermBuffer other)
  {
    if (field == field) {
      return compareChars(text.result, text.length, text.result, text.length);
    }
    return field.compareTo(field);
  }
  
  private static final int compareChars(char[] chars1, int len1, char[] chars2, int len2)
  {
    int end = len1 < len2 ? len1 : len2;
    for (int k = 0; k < end; k++)
    {
      char c1 = chars1[k];
      char c2 = chars2[k];
      if (c1 != c2) {
        return c1 - c2;
      }
    }
    return len1 - len2;
  }
  
  void setPreUTF8Strings()
  {
    preUTF8Strings = true;
  }
  
  public final void read(IndexInput input, FieldInfos fieldInfos)
    throws IOException
  {
    term = null;
    int start = input.readVInt();
    int length = input.readVInt();
    int totalLength = start + length;
    if (preUTF8Strings)
    {
      text.setLength(totalLength);
      input.readChars(text.result, start, length);
    }
    else if (dirty)
    {
      UnicodeUtil.UTF16toUTF8(text.result, 0, text.length, bytes);
      bytes.setLength(totalLength);
      input.readBytes(bytes.result, start, length);
      UnicodeUtil.UTF8toUTF16(bytes.result, 0, totalLength, text);
      dirty = false;
    }
    else
    {
      bytes.setLength(totalLength);
      input.readBytes(bytes.result, start, length);
      UnicodeUtil.UTF8toUTF16(bytes.result, start, length, text);
    }
    field = fieldInfos.fieldName(input.readVInt());
  }
  
  public final void set(Term term)
  {
    if (term == null)
    {
      reset();
      return;
    }
    String termText = term.text();
    int termLen = termText.length();
    text.setLength(termLen);
    termText.getChars(0, termLen, text.result, 0);
    dirty = true;
    field = term.field();
    this.term = term;
  }
  
  public final void set(TermBuffer other)
  {
    text.copyText(text);
    dirty = true;
    field = field;
    term = term;
  }
  
  public void reset()
  {
    field = null;
    text.setLength(0);
    term = null;
    dirty = true;
  }
  
  public Term toTerm()
  {
    if (field == null) {
      return null;
    }
    if (term == null) {
      term = new Term(field, new String(text.result, 0, text.length), false);
    }
    return term;
  }
  
  protected Object clone()
  {
    TermBuffer clone = null;
    try
    {
      clone = (TermBuffer)super.clone();
    }
    catch (CloneNotSupportedException e) {}
    dirty = true;
    bytes = new UnicodeUtil.UTF8Result();
    text = new UnicodeUtil.UTF16Result();
    text.copyText(text);
    return clone;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermBuffer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

public abstract interface TermDocs
{
  public abstract void seek(Term paramTerm)
    throws IOException;
  
  public abstract void seek(TermEnum paramTermEnum)
    throws IOException;
  
  public abstract int doc();
  
  public abstract int freq();
  
  public abstract boolean next()
    throws IOException;
  
  public abstract int read(int[] paramArrayOfInt1, int[] paramArrayOfInt2)
    throws IOException;
  
  public abstract boolean skipTo(int paramInt)
    throws IOException;
  
  public abstract void close()
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermDocs
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

public abstract class TermEnum
{
  public abstract boolean next()
    throws IOException;
  
  public abstract Term term();
  
  public abstract int docFreq();
  
  public abstract void close()
    throws IOException;
  
  /**
   * @deprecated
   */
  public boolean skipTo(Term target)
    throws IOException
  {
    do
    {
      if (!next()) {
        return false;
      }
    } while (target.compareTo(term()) > 0);
    return true;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermEnum
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

public abstract interface TermFreqVector
{
  public abstract String getField();
  
  public abstract int size();
  
  public abstract String[] getTerms();
  
  public abstract int[] getTermFrequencies();
  
  public abstract int indexOf(String paramString);
  
  public abstract int[] indexesOf(String[] paramArrayOfString, int paramInt1, int paramInt2);
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermFreqVector
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

final class TermInfo
{
  int docFreq = 0;
  long freqPointer = 0L;
  long proxPointer = 0L;
  int skipOffset;
  
  TermInfo() {}
  
  TermInfo(int df, long fp, long pp)
  {
    docFreq = df;
    freqPointer = fp;
    proxPointer = pp;
  }
  
  TermInfo(TermInfo ti)
  {
    docFreq = docFreq;
    freqPointer = freqPointer;
    proxPointer = proxPointer;
    skipOffset = skipOffset;
  }
  
  final void set(int docFreq, long freqPointer, long proxPointer, int skipOffset)
  {
    this.docFreq = docFreq;
    this.freqPointer = freqPointer;
    this.proxPointer = proxPointer;
    this.skipOffset = skipOffset;
  }
  
  final void set(TermInfo ti)
  {
    docFreq = docFreq;
    freqPointer = freqPointer;
    proxPointer = proxPointer;
    skipOffset = skipOffset;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermInfo
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

class TermInfosReader$1 {}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermInfosReader.1
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import org.apache.lucene.util.cache.Cache;

final class TermInfosReader$ThreadResources
{
  SegmentTermEnum termEnum;
  Cache termInfoCache;
  
  private TermInfosReader$ThreadResour
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56

Further reading...

For more information on Java 1.5 Tiger, you may find Java 1.5 Tiger, A developer's Notebook by D. Flanagan and B. McLaughlin from O'Reilly of interest.

New!JAR listings


Copyright 2006-2017. Infinite Loop Ltd