lucene-core-2.9.4-dev

et;
    int lastPosition;
  }
  
  class PerDoc
    extends DocumentsWriter.DocWriter
  {
    final DocumentsWriter.PerDocBuffer buffer = docWriter.newPerDocBuffer();
    RAMOutputStream perDocTvf = new RAMOutputStream(buffer);
    int numVectorFields;
    int[] fieldNumbers = new int[1];
    long[] fieldPointers = new long[1];
    
    PerDoc() {}
    
    void reset()
    {
      perDocTvf.reset();
      buffer.recycle();
      numVectorFields = 0;
    }
    
    void abort()
    {
      reset();
      free(this);
    }
    
    void addField(int fieldNumber)
    {
      if (numVectorFields == fieldNumbers.length)
      {
        fieldNumbers = ArrayUtil.grow(fieldNumbers);
        fieldPointers = ArrayUtil.grow(fieldPointers);
      }
      fieldNumbers[numVectorFields] = fieldNumber;
      fieldPointers[numVectorFields] = perDocTvf.getFilePointer();
      numVectorFields += 1;
    }
    
    public long sizeInBytes()
    {
      return buffer.getSizeInBytes();
    }
    
    public void finish()
      throws IOException
    {
      finishDocument(this);
    }
  }
  
  int bytesPerPosting()
  {
    return 32;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermVectorsTermsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.UnicodeUtil.UTF8Result;

final class TermVectorsTermsWriterPerField
  extends TermsHashConsumerPerField
{
  final TermVectorsTermsWriterPerThread perThread;
  final TermsHashPerField termsHashPerField;
  final TermVectorsTermsWriter termsWriter;
  final FieldInfo fieldInfo;
  final DocumentsWriter.DocState docState;
  final FieldInvertState fieldState;
  boolean doVectors;
  boolean doVectorPositions;
  boolean doVectorOffsets;
  int maxNumPostings;
  OffsetAttribute offsetAttribute = null;
  
  public TermVectorsTermsWriterPerField(TermsHashPerField termsHashPerField, TermVectorsTermsWriterPerThread perThread, FieldInfo fieldInfo)
  {
    this.termsHashPerField = termsHashPerField;
    this.perThread = perThread;
    termsWriter = termsWriter;
    this.fieldInfo = fieldInfo;
    docState = docState;
    fieldState = fieldState;
  }
  
  int getStreamCount()
  {
    return 2;
  }
  
  boolean start(Fieldable[] fields, int count)
  {
    doVectors = false;
    doVectorPositions = false;
    doVectorOffsets = false;
    for (int i = 0; i < count; i++)
    {
      Fieldable field = fields[i];
      if ((field.isIndexed()) && (field.isTermVectorStored()))
      {
        doVectors = true;
        doVectorPositions |= field.isStorePositionWithTermVector();
        doVectorOffsets |= field.isStoreOffsetWithTermVector();
      }
    }
    if (doVectors)
    {
      if (perThread.doc == null)
      {
        perThread.doc = termsWriter.getPerDoc();
        perThread.doc.docID = docState.docID;
        assert (perThread.doc.numVectorFields == 0);
        assert (0L == perThread.doc.perDocTvf.length());
        assert (0L == perThread.doc.perDocTvf.getFilePointer());
      }
      assert (perThread.doc.docID == docState.docID);
      if (termsHashPerField.numPostings != 0)
      {
        termsHashPerField.reset();
        perThread.termsHashPerThread.reset(false);
      }
    }
    return doVectors;
  }
  
  public void abort() {}
  
  void finish()
    throws IOException
  {
    assert (docState.testPoint("TermVectorsTermsWriterPerField.finish start"));
    
    int numPostings = termsHashPerField.numPostings;
    
    assert (numPostings >= 0);
    if ((!doVectors) || (numPostings == 0)) {
      return;
    }
    if (numPostings > maxNumPostings) {
      maxNumPostings = numPostings;
    }
    IndexOutput tvf = perThread.doc.perDocTvf;
    
    assert (fieldInfo.storeTermVector);
    assert (perThread.vectorFieldsInOrder(fieldInfo));
    
    perThread.doc.addField(termsHashPerField.fieldInfo.number);
    
    RawPostingList[] postings = termsHashPerField.sortPostings();
    
    tvf.writeVInt(numPostings);
    byte bits = 0;
    if (doVectorPositions) {
      bits = (byte)(bits | 0x1);
    }
    if (doVectorOffsets) {
      bits = (byte)(bits | 0x2);
    }
    tvf.writeByte(bits);
    
    int encoderUpto = 0;
    int lastTermBytesCount = 0;
    
    ByteSliceReader reader = perThread.vectorSliceReader;
    char[][] charBuffers = perThread.termsHashPerThread.charPool.buffers;
    for (int j = 0; j < numPostings; j++)
    {
      TermVectorsTermsWriter.PostingList posting = (TermVectorsTermsWriter.PostingList)postings[j];
      int freq = freq;
      
      char[] text2 = charBuffers[(textStart >> 14)];
      int start2 = textStart & 0x3FFF;
      
      UnicodeUtil.UTF8Result utf8Result = perThread.utf8Results[encoderUpto];
      
      UnicodeUtil.UTF16toUTF8(text2, start2, utf8Result);
      int termBytesCount = length;
      
      int prefix = 0;
      if (j > 0)
      {
        byte[] lastTermBytes = perThread.utf8Results[(1 - encoderUpto)].result;
        byte[] termBytes = perThread.utf8Results[encoderUpto].result;
        while ((prefix < lastTermBytesCount) && (prefix < termBytesCount) && 
          (lastTermBytes[prefix] == termBytes[prefix])) {
          prefix++;
        }
      }
      encoderUpto = 1 - encoderUpto;
      lastTermBytesCount = termBytesCount;
      
      int suffix = termBytesCount - prefix;
      tvf.writeVInt(prefix);
      tvf.writeVInt(suffix);
      tvf.writeBytes(result, prefix, suffix);
      tvf.writeVInt(freq);
      if (doVectorPositions)
      {
        termsHashPerField.initReader(reader, posting, 0);
        reader.writeTo(tvf);
      }
      if (doVectorOffsets)
      {
        termsHashPerField.initReader(reader, posting, 1);
        reader.writeTo(tvf);
      }
    }
    termsHashPerField.reset();
    
    perThread.termsHashPerThread.reset(false);
  }
  
  void shrinkHash()
  {
    termsHashPerField.shrinkHash(maxNumPostings);
    maxNumPostings = 0;
  }
  
  void start(Fieldable f)
  {
    if (doVectorOffsets) {
      offsetAttribute = ((OffsetAttribute)fieldState.attributeSource.addAttribute(OffsetAttribute.class));
    } else {
      offsetAttribute = null;
    }
  }
  
  void newTerm(RawPostingList p0)
  {
    assert (docState.testPoint("TermVectorsTermsWriterPerField.newTerm start"));
    
    TermVectorsTermsWriter.PostingList p = (TermVectorsTermsWriter.PostingList)p0;
    
    freq = 1;
    if (doVectorOffsets)
    {
      int startOffset = fieldState.offset + offsetAttribute.startOffset();
      int endOffset = fieldState.offset + offsetAttribute.endOffset();
      
      termsHashPerField.writeVInt(1, startOffset);
      termsHashPerField.writeVInt(1, endOffset - startOffset);
      lastOffset = endOffset;
    }
    if (doVectorPositions)
    {
      termsHashPerField.writeVInt(0, fieldState.position);
      lastPosition = fieldState.position;
    }
  }
  
  void addTerm(RawPostingList p0)
  {
    assert (docState.testPoint("TermVectorsTermsWriterPerField.addTerm start"));
    
    TermVectorsTermsWriter.PostingList p = (TermVectorsTermsWriter.PostingList)p0;
    freq += 1;
    if (doVectorOffsets)
    {
      int startOffset = fieldState.offset + offsetAttribute.startOffset();
      int endOffset = fieldState.offset + offsetAttribute.endOffset();
      
      termsHashPerField.writeVInt(1, startOffset - lastOffset);
      termsHashPerField.writeVInt(1, endOffset - startOffset);
      lastOffset = endOffset;
    }
    if (doVectorPositions)
    {
      termsHashPerField.writeVInt(0, fieldState.position - lastPosition);
      lastPosition = fieldState.position;
    }
  }
  
  void skippingLongTerm() {}
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermVectorsTermsWriterPerField
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import org.apache.lucene.util.UnicodeUtil.UTF8Result;

final class TermVectorsTermsWriterPerThread
  extends TermsHashConsumerPerThread
{
  final TermVectorsTermsWriter termsWriter;
  final TermsHashPerThread termsHashPerThread;
  final DocumentsWriter.DocState docState;
  TermVectorsTermsWriter.PerDoc doc;
  
  public TermVectorsTermsWriterPerThread(TermsHashPerThread termsHashPerThread, TermVectorsTermsWriter termsWriter)
  {
    this.termsWriter = termsWriter;
    this.termsHashPerThread = termsHashPerThread;
    docState = docState;
  }
  
  final ByteSliceReader vectorSliceReader = new ByteSliceReader();
  final UnicodeUtil.UTF8Result[] utf8Results = { new UnicodeUtil.UTF8Result(), new UnicodeUtil.UTF8Result() };
  String lastVectorFieldName;
  
  public void startDocument()
  {
    assert (clearLastVectorFieldName());
    if (doc != null)
    {
      doc.reset();
      doc.docID = docState.docID;
    }
  }
  
  public DocumentsWriter.DocWriter finishDocument()
  {
    try
    {
      return doc;
    }
    finally
    {
      doc = null;
    }
  }
  
  public TermsHashConsumerPerField addField(TermsHashPerField termsHashPerField, FieldInfo fieldInfo)
  {
    return new TermVectorsTermsWriterPerField(termsHashPerField, this, fieldInfo);
  }
  
  public void abort()
  {
    if (doc != null)
    {
      doc.abort();
      doc = null;
    }
  }
  
  final boolean clearLastVectorFieldName()
  {
    lastVectorFieldName = null;
    return true;
  }
  
  final boolean vectorFieldsInOrder(FieldInfo fi)
  {
    try
    {
      boolean bool;
      if (lastVectorFieldName != null) {
        return lastVectorFieldName.compareTo(name) < 0;
      }
      return true;
    }
    finally
    {
      lastVectorFieldName = name;
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermVectorsTermsWriterPerThread
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.UnicodeUtil.UTF8Result;

final class TermVectorsWriter
{
  private IndexOutput tvx = null;
  private IndexOutput tvd = null;
  private IndexOutput tvf = null;
  final UnicodeUtil.UTF8Result[] utf8Results = { new UnicodeUtil.UTF8Result(), new UnicodeUtil.UTF8Result() };
  private FieldInfos fieldInfos;
  
  public TermVectorsWriter(Directory directory, String segment, FieldInfos fieldInfos)
    throws IOException
  {
    tvx = directory.createOutput(segment + "." + "tvx");
    tvx.writeInt(4);
    tvd = directory.createOutput(segment + "." + "tvd");
    tvd.writeInt(4);
    tvf = directory.createOutput(segment + "." + "tvf");
    tvf.writeInt(4);
    
    this.fieldInfos = fieldInfos;
  }
  
  public final void addAllDocVectors(TermFreqVector[] vectors)
    throws IOException
  {
    tvx.writeLong(tvd.getFilePointer());
    tvx.writeLong(tvf.getFilePointer());
    if (vectors != null)
    {
      int numFields = vectors.length;
      tvd.writeVInt(numFields);
      
      long[] fieldPointers = new long[numFields];
      for (int i = 0; i < numFields; i++)
      {
        fieldPointers[i] = tvf.getFilePointer();
        
        int fieldNumber = fieldInfos.fieldNumber(vectors[i].getField());
        
        tvd.writeVInt(fieldNumber);
        
        int numTerms = vectors[i].size();
        tvf.writeVInt(numTerms);
        byte bits;
        TermPositionVector tpVector;
        byte bits;
        boolean storePositions;
        boolean storeOffsets;
        if ((vectors[i] instanceof TermPositionVector))
        {
          TermPositionVector tpVector = (TermPositionVector)vectors[i];
          boolean storePositions = (tpVector.size() > 0) && (tpVector.getTermPositions(0) != null);
          boolean storeOffsets = (tpVector.size() > 0) && (tpVector.getOffsets(0) != null);
          bits = (byte)((storePositions ? 1 : 0) + (storeOffsets ? 2 : 0));
        }
        else
        {
          tpVector = null;
          bits = 0;
          storePositions = false;
          storeOffsets = false;
        }
        tvf.writeVInt(bits);
        
        String[] terms = vectors[i].getTerms();
        int[] freqs = vectors[i].getTermFrequencies();
        
        int utf8Upto = 0;
        utf8Results[1].length = 0;
        for (int j = 0; j < numTerms; j++)
        {
          UnicodeUtil.UTF16toUTF8(terms[j], 0, terms[j].length(), utf8Results[utf8Upto]);
          
          int start = StringHelper.bytesDifference(utf8Results[(1 - utf8Upto)].result, utf8Results[(1 - utf8Upto)].length, utf8Results[utf8Upto].result, utf8Results[utf8Upto].length);
          
          int length = utf8Results[utf8Upto].length - start;
          tvf.writeVInt(start);
          tvf.writeVInt(length);
          tvf.writeBytes(utf8Results[utf8Upto].result, start, length);
          utf8Upto = 1 - utf8Upto;
          
          int termFreq = freqs[j];
          
          tvf.writeVInt(termFreq);
          if (storePositions)
          {
            int[] positions = tpVector.getTermPositions(j);
            if (positions == null) {
              throw new IllegalStateException("Trying to write positions that are null!");
            }
            assert (positions.length == termFreq);
            
            int lastPosition = 0;
            for (int k = 0; k < positions.length; k++)
            {
              int position = positions[k];
              tvf.writeVInt(position - lastPosition);
              lastPosition = position;
            }
          }
          if (storeOffsets)
          {
            TermVectorOffsetInfo[] offsets = tpVector.getOffsets(j);
            if (offsets == null) {
              throw new IllegalStateException("Trying to write offsets that are null!");
            }
            assert (offsets.length == termFreq);
            
            int lastEndOffset = 0;
            for (int k = 0; k < offsets.length; k++)
            {
              int startOffset = offsets[k].getStartOffset();
              int endOffset = offsets[k].getEndOffset();
              tvf.writeVInt(startOffset - lastEndOffset);
              tvf.writeVInt(endOffset - startOffset);
              lastEndOffset = endOffset;
            }
          }
        }
      }
      if (numFields > 1)
      {
        long lastFieldPointer = fieldPointers[0];
        for (int i = 1; i < numFields; i++)
        {
          long fieldPointer = fieldPointers[i];
          tvd.writeVLong(fieldPointer - lastFieldPointer);
          lastFieldPointer = fieldPointer;
        }
      }
    }
    else
    {
      tvd.writeVInt(0);
    }
  }
  
  final void addRawDocuments(TermVectorsReader reader, int[] tvdLengths, int[] tvfLengths, int numDocs)
    throws IOException
  {
    long tvdPosition = tvd.getFilePointer();
    long tvfPosition = tvf.getFilePointer();
    long tvdStart = tvdPosition;
    long tvfStart = tvfPosition;
    for (int i = 0; i < numDocs; i++)
    {
      tvx.writeLong(tvdPosition);
      tvdPosition += tvdLengths[i];
      tvx.writeLong(tvfPosition);
      tvfPosition += tvfLengths[i];
    }
    tvd.copyBytes(reader.getTvdStream(), tvdPosition - tvdStart);
    tvf.copyBytes(reader.getTvfStream(), tvfPosition - tvfStart);
    assert (tvd.getFilePointer() == tvdPosition);
    assert (tvf.getFilePointer() == tvfPosition);
  }
  
  final void close()
    throws IOException
  {
    IOException keep = null;
    if (tvx != null) {
      try
      {
        tvx.close();
      }
      catch (IOException e)
      {
        if (keep == null) {
          keep = e;
        }
      }
    }
    if (tvd != null) {
      try
      {
        tvd.close();
      }
      catch (IOException e)
      {
        if (keep == null) {
          keep = e;
        }
      }
    }
    if (tvf != null) {
      try
      {
        tvf.close();
      }
      catch (IOException e)
      {
        if (keep == null) {
          keep = e;
        }
      }
    }
    if (keep != null) {
      throw ((IOException)keep.fillInStackTrace());
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermVectorsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.lucene.util.ArrayUtil;

final class TermsHash
  extends InvertedDocConsumer
{
  final TermsHashConsumer consumer;
  final TermsHash nextTermsHash;
  final int bytesPerPosting;
  final int postingsFreeChunk;
  final DocumentsWriter docWriter;
  private RawPostingList[] postingsFreeList = new RawPostingList[1];
  private int postingsFreeCount;
  private int postingsAllocCount;
  boolean trackAllocations;
  
  public TermsHash(DocumentsWriter docWriter, boolean trackAllocations, TermsHashConsumer consumer, TermsHash nextTermsHash)
  {
    this.docWriter = docWriter;
    this.consumer = consumer;
    this.nextTermsHash = nextTermsHash;
    this.trackAllocations = trackAllocations;
    
    bytesPerPosting = (consumer.bytesPerPosting() + 4 * DocumentsWriter.POINTER_NUM_BYTE);
    postingsFreeChunk = (32768 / bytesPerPosting);
  }
  
  InvertedDocConsumerPerThread addThread(DocInverterPerThread docInverterPerThread)
  {
    return new TermsHashPerThread(docInverterPerThread, this, nextTermsHash, null);
  }
  
  TermsHashPerThread addThread(DocInverterPerThread docInverterPerThread, TermsHashPerThread primaryPerThread)
  {
    return new TermsHashPerThread(docInverterPerThread, this, nextTermsHash, primaryPerThread);
  }
  
  void setFieldInfos(FieldInfos fieldInfos)
  {
    this.fieldInfos = fieldInfos;
    consumer.setFieldInfos(fieldInfos);
  }
  
  public void abort()
  {
    consumer.abort();
    if (nextTermsHash != null) {
      nextTermsHash.abort();
    }
  }
  
  void shrinkFreePostings(Map threadsAndFields, SegmentWriteState state)
  {
    assert (postingsFreeCount == postingsAllocCount) : (Thread.currentThread().getName() + ": postingsFreeCount=" + postingsFreeCount + " postingsAllocCount=" + postingsAllocCount + " consumer=" + consumer);
    
    int newSize = 1;
    if (1 != postingsFreeList.length)
    {
      if (postingsFreeCount > 1)
      {
        if (trackAllocations) {
          docWriter.bytesAllocated(-(postingsFreeCount - 1) * bytesPerPosting);
        }
        postingsFreeCount = 1;
        postingsAllocCount = 1;
      }
      RawPostingList[] newArray = new RawPostingList[1];
      System.arraycopy(postingsFreeList, 0, newArray, 0, postingsFreeCount);
      postingsFreeList = newArray;
    }
  }
  
  synchronized void closeDocStore(SegmentWriteState state)
    throws IOException
  {
    consumer.closeDocStore(state);
    if (nextTermsHash != null) {
      nextTermsHash.closeDocStore(state);
    }
  }
  
  synchronized void flush(Map threadsAndFields, SegmentWriteState state)
    throws IOException
  {
    Map childThreadsAndFields = new HashMap();
    Map nextThreadsAndFields;
    Map nextThreadsAndFields;
    if (nextTermsHash != null) {
      nextThreadsAndFields = new HashMap();
    } else {
      nextThreadsAndFields = null;
    }
    Iterator it = threadsAndFields.entrySet().iterator();
    while (it.hasNext())
    {
      Map.Entry entry = (Map.Entry)it.next();
      
      TermsHashPerThread perThread = (TermsHashPerThread)entry.getKey();
      
      Collection fields = (Collection)entry.getValue();
      
      Iterator fieldsIt = fields.iterator();
      Collection childFields = new HashSet();
      Collection nextChildFields;
      Collection nextChildFields;
      if (nextTermsHash != null) {
        nextChildFields = new HashSet();
      } else {
        nextChildFields = null;
      }
      while (fieldsIt.hasNext())
      {
        TermsHashPerField perField = (TermsHashPerField)fieldsIt.next();
        childFields.add(consumer);
        if (nextTermsHash != null) {
          nextChildFields.add(nextPerField);
        }
      }
      childThreadsAndFields.put(consumer, childFields);
      if (nextTermsHash != null) {
        nextThreadsAndFields.put(nextPerThread, nextChildFields);
      }
    }
    consumer.flush(childThreadsAndFields, state);
    
    shrinkFreePostings(threadsAndFields, state);
    if (nextTermsHash != null) {
      nextTermsHash.flush(nextThreadsAndFields, state);
    }
  }
  
  public boolean freeRAM()
  {
    if (!trackAllocations) {
      return false;
    }
    long bytesFreed = 0L;
    boolean any;
    synchronized (this)
    {
      int numToFree;
      int numToFree;
      if (postingsFreeCount >= postingsFreeChunk) {
        numToFree = postingsFreeChunk;
      } else {
        numToFree = postingsFreeCount;
      }
      any = numToFree > 0;
      if (any)
      {
        Arrays.fill(postingsFreeList, postingsFreeCount - numToFree, postingsFreeCount, null);
        postingsFreeCount -= numToFree;
        postingsAllocCount -= numToFree;
        bytesFreed = -numToFree * bytesPerPosting;
        any = true;
      }
    }
    if (any) {
      docWriter.bytesAllocated(bytesFreed);
    }
    if (nextTermsHash != null) {
      any |= nextTermsHash.freeRAM();
    }
    return any;
  }
  
  public synchronized void recyclePostings(RawPostingList[] postings, int numPostings)
  {
    assert (postings.length >= numPostings);
    
    assert (postingsFreeCount + numPostings <= postingsFreeList.length);
    System.arraycopy(postings, 0, postingsFreeList, postingsFreeCount, numPostings);
    postingsFreeCount += numPostings;
  }
  
  public synchronized void getPostings(RawPostingList[] postings)
  {
    assert (docWriter.writer.testPoint("TermsHash.getPostings start"));
    
    assert (postingsFreeCount <= postingsFreeList.length);
    assert (postingsFreeCount <= postingsAllocCount) : ("postingsFreeCount=" + postingsFreeCount + " postingsAllocCount=" + postingsAllocCount);
    int numToCopy;
    int numToCopy;
    if (postingsFreeCount < postings.length) {
      numToCopy = postingsFreeCount;
    } else {
      numToCopy = postings.length;
    }
    int start = postingsFreeCount - numToCopy;
    assert (start >= 0);
    assert (start + numToCopy <= postingsFreeList.length);
    assert (numToCopy <= postings.length);
    System.arraycopy(postingsFreeList, start, postings, 0, numToCopy);
    if (numToCopy != postings.length)
    {
      int extra = postings.length - numToCopy;
      int newPostingsAllocCount = postingsAllocCount + extra;
      
      consumer.createPostings(postings, numToCopy, extra);
      assert (docWriter.writer.testPoint("TermsHash.getPostings after create"));
      postingsAllocCount += extra;
      if (trackAllocations) {
        docWriter.bytesAllocated(extra * bytesPerPosting);
      }
      if (newPostingsAllocCount > postingsFreeList.length) {
        postingsFreeList = new RawPostingList[ArrayUtil.getNextSize(newPostingsAllocCount)];
      }
    }
    postingsFreeCount -= numToCopy;
    if (trackAllocations) {
      docWriter.bytesUsed(postings.length * bytesPerPosting);
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermsHash
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Map;

abstract class TermsHashConsumer
{
  FieldInfos fieldInfos;
  
  abstract int bytesPerPosting();
  
  abstract void createPostings(RawPostingList[] paramArrayOfRawPostingList, int paramInt1, int paramInt2);
  
  abstract TermsHashConsumerPerThread addThread(TermsHashPerThread paramTermsHashPerThread);
  
  abstract void flush(Map paramMap, SegmentWriteState paramSegmentWriteState)
    throws IOException;
  
  abstract void abort();
  
  abstract void closeDocStore(SegmentWriteState paramSegmentWriteState)
    throws IOException;
  
  void setFieldInfos(FieldInfos fieldInfos)
  {
    this.fieldInfos = fieldInfos;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermsHashConsumer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.document.Fieldable;

abstract class TermsHashConsumerPerField
{
  abstract boolean start(Fieldable[] paramArrayOfFieldable, int paramInt)
    throws IOException;
  
  abstract void finish()
    throws IOException;
  
  abstract void skippingLongTerm()
    throws IOException;
  
  abstract void start(Fieldable paramFieldable);
  
  abstract void newTerm(RawPostingList paramRawPostingList)
    throws IOException;
  
  abstract void addTerm(RawPostingList paramRawPostingList)
    throws IOException;
  
  abstract int getStreamCount();
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermsHashConsumerPerField
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

abstract class TermsHashConsumerPerThread
{
  abstract void startDocument()
    throws IOException;
  
  abstract DocumentsWriter.DocWriter finishDocument()
    throws IOException;
  
  public abstract TermsHashConsumerPerField addField(TermsHashPerField paramTermsHashPerField, FieldInfo paramFieldInfo);
  
  public abstract void abort();
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermsHashConsumerPerThread
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.util.AttributeSource;

final class TermsHashPerField
  extends InvertedDocConsumerPerField
{
  final TermsHashConsumerPerField consumer;
  final TermsHashPerField nextPerField;
  final TermsHashPerThread perThread;
  final DocumentsWriter.DocState docState;
  final FieldInvertState fieldState;
  TermAttribute termAtt;
  final CharBlockPool charPool;
  final IntBlockPool intPool;
  final ByteBlockPool bytePool;
  final int streamCount;
  final int numPostingInt;
  final FieldInfo fieldInfo;
  boolean postingsCompacted;
  int numPostings;
  private int postingsHashSize = 4;
  private int postingsHashHalfSize = postingsHashSize / 2;
  private int postingsHashMask = postingsHashSize - 1;
  private RawPostingList[] postingsHash = new RawPostingList[postingsHashSize];
  private RawPostingList p;
  private boolean doCall;
  private boolean doNextCall;
  int[] intUptos;
  int intUptoStart;
  
  public TermsHashPerField(DocInverterPerField docInverterPerField, TermsHashPerThread perThread, TermsHashPerThread nextPerThread, FieldInfo fieldInfo)
  {
    this.perThread = perThread;
    intPool = intPool;
    charPool = charPool;
    bytePool = bytePool;
    docState = docState;
    fieldState = fieldState;
    consumer = consumer.addField(this, fieldInfo);
    streamCount = consumer.getStreamCount();
    numPostingInt = (2 * streamCount);
    this.fieldInfo = fieldInfo;
    if (nextPerThread != null) {
      nextPerField = ((TermsHashPerField)nextPerThread.addField(docInverterPerField, fieldInfo));
    } else {
      nextPerField = null;
    }
  }
  
  void shrinkHash(int targetSize)
  {
    assert ((postingsCompacted) || (numPostings == 0));
    
    int newSize = 4;
    if (4 != postingsHash.length)
    {
      postingsHash = new RawPostingList[4];
      postingsHashSize = 4;
      postingsHashHalfSize = 2;
      postingsHashMask = 3;
    }
    Arrays.fill(postingsHash, null);
  }
  
  public void reset()
  {
    if (!postingsCompacted) {
      compactPostings();
    }
    assert (numPostings <= postingsHash.length);
    if (numPostings > 0)
    {
      perThread.termsHash.recyclePostings(postingsHash, numPostings);
      Arrays.fill(postingsHash, 0, numPostings, null);
      numPostings = 0;
    }
    postingsCompacted = false;
    if (nextPerField != null) {
      nextPerField.reset();
    }
  }
  
  public synchronized void abort()
  {
    reset();
    if (nextPerField != null) {
      nextPerField.abort();
    }
  }
  
  public void initReader(ByteSliceReader reader, RawPostingList p, int stream)
  {
    assert (stream < streamCount);
    int[] ints = intPool.buffers[(intStart >> 13)];
    int upto = intStart & 0x1FFF;
    reader.init(bytePool, byteStart + stream * ByteBlockPool.FIRST_LEVEL_SIZE, ints[(upto + stream)]);
  }
  
  private synchronized void compactPostings()
  {
    int upto = 0;
    for (int i = 0; i < postingsHashSize; i++) {
      if (postingsHash[i] != null)
      {
        if (upto < i)
        {
          postingsHash[upto] = postingsHash[i];
          postingsHash[i] = null;
        }
        upto++;
      }
    }
    assert (upto == numPostings);
    postingsCompacted = true;
  }
  
  public RawPostingList[] sortPostings()
  {
    compactPostings();
    quickSort(postingsHash, 0, numPostings - 1);
    return postingsHash;
  }
  
  void quickSort(RawPostingList[] postings, int lo, int hi)
  {
    if (lo >= hi) {
      return;
    }
    if (hi == 1 + lo)
    {
      if (comparePostings(postings[lo], postings[hi]) > 0)
      {
        RawPostingList tmp = postings[lo];
        postings[lo] = postings[hi];
        postings[hi] = tmp;
      }
      return;
    }
    int mid = lo + hi >>> 1;
    if (comparePostings(postings[lo], postings[mid]) > 0)
    {
      RawPostingList tmp = postings[lo];
      postings[lo] = postings[mid];
      postings[mid] = tmp;
    }
    if (comparePostings(postings[mid], postings[hi]) > 0)
    {
      RawPostingList tmp = postings[mid];
      postings[mid] = postings[hi];
      postings[hi] = tmp;
      if (comparePostings(postings[lo], postings[mid]) > 0)
      {
        RawPostingList tmp2 = postings[lo];
        postings[lo] = postings[mid];
        postings[mid] = tmp2;
      }
    }
    int left = lo + 1;
    int right = hi - 1;
    if (left >= right) {
      return;
    }
    RawPostingList partition = postings[mid];
    for (;;)
    {
      if (comparePostings(postings[right], partition) > 0)
      {
        right--;
      }
      else
      {
        while ((left < right) && (comparePostings(postings[left], partition) <= 0)) {
          left++;
        }
        if (left >= right) {
          break;
        }
        RawPostingList tmp = postings[left];
        postings[left] = postings[right];
        postings[right] = tmp;
        right--;
      }
    }
    quickSort(postings, lo, left);
    quickSort(postings, left + 1, hi);
  }
  
  int comparePostings(RawPostingList p1, RawPostingList p2)
  {
    if (p1 == p2) {
      return 0;
    }
    char[] text1 = charPool.buffers[(textStart >> 14)];
    int pos1 = textStart & 0x3FFF;
    char[] text2 = charPool.buffers[(textStart >> 14)];
    int pos2 = textStart & 0x3FFF;
    
    assert ((text1 != text2) || (pos1 != pos2));
    for (;;)
    {
      char c1 = text1[(pos1++)];
      char c2 = text2[(pos2++)];
      if (c1 != c2)
      {
        if (65535 == c2) {
          return 1;
        }
        if (65535 == c1) {
          return -1;
        }
        return c1 - c2;
      }
      assert (c1 != 65535);
    }
  }
  
  private boolean postingEquals(char[] tokenText, int tokenTextLen)
  {
    char[] text = perThread.charPool.buffers[(p.textStart >> 14)];
    assert (text != null);
    int pos = p.textStart & 0x3FFF;
    for (int tokenPos = 0; tokenPos < tokenTextLen; tokenPos++)
    {
      if (tokenText[tokenPos] != text[pos]) {
        return false;
      }
      pos++;
    }
    return 65535 == text[pos];
  }
  
  void start(Fieldable f)
  {
    termAtt = ((TermAttribute)fieldState.attributeSource.addAttribute(TermAttribute.class));
    consumer.start(f);
    if (nextPerField != null) {
      nextPerField.start(f);
    }
  }
  
  boolean start(Fieldable[] fields, int count)
    throws IOException
  {
    doCall = consumer.start(fields, count);
    if (nextPerField != null) {
      doNextCall = nextPerField.start(fields, count);
    }
    return (doCall) || (doNextCall);
  }
  
  public void add(int textStart)
    throws IOException
  {
    int code = textStart;
    
    int hashPos = code & postingsHashMask;
    
    assert (!postingsCompacted);
    
    p = postingsHash[hashPos];
    if ((p != null) && (p.textStart != textStart))
    {
      int inc = (code >> 8) + code | 0x1;
      do
      {
        code += inc;
        hashPos = code & postingsHashMask;
        p = postingsHash[hashPos];
      } while ((p != null) && (p.textStart != textStart));
    }
    if (p == null)
    {
      if (0 == perThread.freePostingsCount) {
        perThread.morePostings();
      }
      p = perThread.freePostings[(--perThread.freePostingsCount)];
      assert (p != null);
      
      p.textStart = textStart;
      
      assert (postingsHash[hashPos] == null);
      postingsHash[hashPos] = p;
      numPostings += 1;
      if (numPostings == postingsHashHalfSize) {
        rehashPostings(2 * postingsHashSize);
      }
      if (numPostingInt + intPool.intUpto > 8192) {
        intPool.nextBuffer();
      }
      if (32768 - bytePool.byteUpto < numPostingInt * ByteBlockPool.FIRST_LEVEL_SIZE) {
        bytePool.nextBuffer();
      }
      intUptos = intPool.buffer;
      intUptoStart = intPool.intUpto;
      intPool.intUpto += streamCount;
      
      p.intStart = (intUptoStart + intPool.intOffset);
      for (int i = 0; i < streamCount; i++)
      {
        int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE);
        intUptos[(intUptoStart + i)] = (upto + bytePool.byteOffset);
      }
      p.byteStart = intUptos[intUptoStart];
      
      consumer.newTerm(p);
    }
    else
    {
      intUptos = intPool.buffers[(p.intStart >> 13)];
      intUptoStart = (p.intStart & 0x1FFF);
      consumer.addTerm(p);
    }
  }
  
  void add()
    throws IOException
  {
    assert (!postingsCompacted);
    
    char[] tokenText = termAtt.termBuffer();
    int tokenTextLen = termAtt.termLength();
    
    int downto = tokenTextLen;
    int code = 0;
    while (downto > 0)
    {
      char ch = tokenText[(--downto)];
      if ((ch >= 56320) && (ch <= 57343))
      {
        if (0 == downto)
        {
          ch = tokenText[downto] = 65533;
        }
        else
        {
          char ch2 = tokenText[(downto - 1)];
          if ((ch2 >= 55296) && (ch2 <= 56319))
          {
            code = (code * 31 + ch) * 31 + ch2;
            downto--;
            continue;
          }
          ch = tokenText[downto] = 65533;
        }
      }
      else if ((ch >= 55296) && ((ch <= 56319) || (ch == 65535))) {
        ch = tokenText[downto] = 65533;
      }
      code = code * 31 + ch;
    }
    int hashPos = code & postingsHashMask;
    
    p = postingsHash[hashPos];
    if ((p != null) && (!postingEquals(tokenText, tokenTextLen)))
    {
      int inc = (code >> 8) + code | 0x1;
      do
      {
        code += inc;
        hashPos = code & postingsHashMask;
        p = postingsHash[hashPos];
      } while ((p != null) && (!postingEquals(tokenText, tokenTextLen)));
    }
    if (p == null)
    {
      int textLen1 = 1 + tokenTextLen;
      if (textLen1 + charPool.charUpto > 16384)
      {
        if (textLen1 > 16384)
        {
          if (docState.maxTermPrefix == null) {
            docState.maxTermPrefix = new String(tokenText, 0, 30);
          }
          consumer.skippingLongTerm();
          return;
        }
        charPool.nextBuffer();
      }
      if (0 == perThread.freePostingsCount) {
        perThread.morePostings();
      }
      p = perThread.freePostings[(--perThread.freePostingsCount)];
      assert (p != null);
      
      char[] text = charPool.buffer;
      int textUpto = charPool.charUpto;
      p.textStart = (textUpto + charPool.charOffset);
      charPool.charUpto += textLen1;
      System.arraycopy(tokenText, 0, text, textUpto, tokenTextLen);
      text[(textUpto + tokenTextLen)] = 65535;
      
      assert (postingsHash[hashPos] == null);
      postingsHash[hashPos] = p;
      numPostings += 1;
      if (numPostings == postingsHashHalfSize) {
        rehashPostings(2 * postingsHashSize);
      }
      if (numPostingInt + intPool.intUpto > 8192) {
        intPool.nextBuffer();
      }
      if (32768 - bytePool.byteUpto < numPostingInt * ByteBlockPool.FIRST_LEVEL_SIZE) {
        bytePool.nextBuffer();
      }
      intUptos = intPool.buffer;
      intUptoStart = intPool.intUpto;
      intPool.intUpto += streamCount;
      
      p.intStart = (intUptoStart + intPool.intOffset);
      for (int i = 0; i < streamCount; i++)
      {
        int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE);
        intUptos[(intUptoStart + i)] = (upto + bytePool.byteOffset);
      }
      p.byteStart = intUptos[intUptoStart];
      
      consumer.newTerm(p);
    }
    else
    {
      intUptos = intPool.buffers[(p.intStart >> 13)];
      intUptoStart = (p.intStart & 0x1FFF);
      consumer.addTerm(p);
    }
    if (doNextCall) {
      nextPerField.add(p.textStart);
    }
  }
  
  void writeByte(int stream, byte b)
  {
    int upto = intUptos[(intUptoStart + stream)];
    byte[] bytes = bytePool.buffers[(upto >> 15)];
    assert (bytes != null);
    int offset = upto & 0x7FFF;
    if (bytes[offset] != 0)
    {
      offset = bytePool.allocSlice(bytes, offset);
      bytes = bytePool.buffer;
      intUptos[(intUptoStart + stream)] = (offset + bytePool.byteOffset);
    }
    bytes[offset] = b;
    intUptos[(intUptoStart + stream)] += 1;
  }
  
  public void writeBytes(int stream, byte[] b, int offset, int len)
  {
    int end = offset + len;
    for (int i = offset; i < end; i++) {
      writeByte(stream, b[i]);
    }
  }
  
  void writeVInt(int stream, int i)
  {
    assert (stream < streamCount);
    while ((i & 0xFFFFFF80) != 0)
    {
      writeByte(stream, (byte)(i & 0x7F | 0x80));
      i >>>= 7;
    }
    writeByte(stream, (byte)i);
  }
  
  void finish()
    throws IOException
  {
    consumer.finish();
    if (nextPerField != null) {
      nextPerField.finish();
    }
  }
  
  void rehashPostings(int newSize)
  {
    int newMask = newSize - 1;
    
    RawPostingList[] newHash = new RawPostingList[newSize];
    for (int i = 0; i < postingsHashSize; i++)
    {
      RawPostingList p0 = postingsHash[i];
      if (p0 != null)
      {
        int code;
        if (perThread.primary)
        {
          int start = textStart & 0x3FFF;
          char[] text = charPool.buffers[(textStart >> 14)];
          int pos = start;
          while (text[pos] != 65535) {
            pos++;
          }
          int code = 0;
          while (pos > start) {
            code = code * 31 + text[(--pos)];
          }
        }
        else
        {
          code = textStart;
        }
        int hashPos = code & newMask;
        assert (hashPos >= 0);
        if (newHash[hashPos] != null)
        {
          int inc = (code >> 8) + code | 0x1;
          do
          {
            code += inc;
            hashPos = code & newMask;
          } while (newHash[hashPos] != null);
        }
        newHash[hashPos] = p0;
      }
    }
    postingsHashMask = newMask;
    postingsHash = newHash;
    postingsHashSize = newSize;
    postingsHashHalfSize = (newSize >> 1);
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.TermsHashPerField
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

final class TermsHashPerThread
  extends InvertedDocConsumerPerThread
{
  final TermsHash termsHash;
  final TermsHashConsumerPerThread consumer;
  final TermsHashPerThre
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56

Further reading...

For more information on Java 1.5 Tiger, you may find Java 1.5 Tiger, A developer's Notebook by D. Flanagan and B. McLaughlin from O'Reilly of interest.

New!JAR listings


Copyright 2006-2017. Infinite Loop Ltd