lucene-core-2.9.4-dev

()
    throws IOException
  {
    out.close();
    posWriter.close();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsDocsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

abstract class FormatPostingsFieldsConsumer
{
  abstract FormatPostingsTermsConsumer addField(FieldInfo paramFieldInfo)
    throws IOException;
  
  abstract void finish()
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsFieldsConsumer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.store.Directory;

final class FormatPostingsFieldsWriter
  extends FormatPostingsFieldsConsumer
{
  final Directory dir;
  final String segment;
  final TermInfosWriter termsOut;
  final FieldInfos fieldInfos;
  final FormatPostingsTermsWriter termsWriter;
  final DefaultSkipListWriter skipListWriter;
  final int totalNumDocs;
  
  public FormatPostingsFieldsWriter(SegmentWriteState state, FieldInfos fieldInfos)
    throws IOException
  {
    dir = directory;
    segment = segmentName;
    totalNumDocs = numDocs;
    this.fieldInfos = fieldInfos;
    termsOut = new TermInfosWriter(dir, segment, fieldInfos, termIndexInterval);
    
    skipListWriter = new DefaultSkipListWriter(termsOut.skipInterval, termsOut.maxSkipLevels, totalNumDocs, null, null);
    
    flushedFiles.add(state.segmentFileName("tis"));
    flushedFiles.add(state.segmentFileName("tii"));
    
    termsWriter = new FormatPostingsTermsWriter(state, this);
  }
  
  FormatPostingsTermsConsumer addField(FieldInfo field)
  {
    termsWriter.setField(field);
    return termsWriter;
  }
  
  void finish()
    throws IOException
  {
    termsOut.close();
    termsWriter.close();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsFieldsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

abstract class FormatPostingsPositionsConsumer
{
  abstract void addPosition(int paramInt1, byte[] paramArrayOfByte, int paramInt2, int paramInt3)
    throws IOException;
  
  abstract void finish()
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsPositionsConsumer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexOutput;

final class FormatPostingsPositionsWriter
  extends FormatPostingsPositionsConsumer
{
  final FormatPostingsDocsWriter parent;
  final IndexOutput out;
  boolean omitTermFreqAndPositions;
  boolean storePayloads;
  int lastPayloadLength = -1;
  int lastPosition;
  
  FormatPostingsPositionsWriter(SegmentWriteState state, FormatPostingsDocsWriter parent)
    throws IOException
  {
    this.parent = parent;
    omitTermFreqAndPositions = omitTermFreqAndPositions;
    if (parent.parent.fieldInfos.hasProx())
    {
      String fileName = IndexFileNames.segmentFileName(parent.parent.segment, "prx");
      flushedFiles.add(fileName);
      out = parent.parent.dir.createOutput(fileName);
      skipListWriter.setProxOutput(out);
    }
    else
    {
      out = null;
    }
  }
  
  void addPosition(int position, byte[] payload, int payloadOffset, int payloadLength)
    throws IOException
  {
    assert (!omitTermFreqAndPositions) : "omitTermFreqAndPositions is true";
    assert (out != null);
    
    int delta = position - lastPosition;
    lastPosition = position;
    if (storePayloads)
    {
      if (payloadLength != lastPayloadLength)
      {
        lastPayloadLength = payloadLength;
        out.writeVInt(delta << 1 | 0x1);
        out.writeVInt(payloadLength);
      }
      else
      {
        out.writeVInt(delta << 1);
      }
      if (payloadLength > 0) {
        out.writeBytes(payload, payloadLength);
      }
    }
    else
    {
      out.writeVInt(delta);
    }
  }
  
  void setField(FieldInfo fieldInfo)
  {
    omitTermFreqAndPositions = omitTermFreqAndPositions;
    storePayloads = (omitTermFreqAndPositions ? false : storePayloads);
  }
  
  void finish()
  {
    lastPosition = 0;
    lastPayloadLength = -1;
  }
  
  void close()
    throws IOException
  {
    if (out != null) {
      out.close();
    }
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsPositionsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.util.ArrayUtil;

abstract class FormatPostingsTermsConsumer
{
  char[] termBuffer;
  
  abstract FormatPostingsDocsConsumer addTerm(char[] paramArrayOfChar, int paramInt)
    throws IOException;
  
  FormatPostingsDocsConsumer addTerm(String text)
    throws IOException
  {
    int len = text.length();
    if ((termBuffer == null) || (termBuffer.length < 1 + len)) {
      termBuffer = new char[ArrayUtil.getNextSize(1 + len)];
    }
    text.getChars(0, len, termBuffer, 0);
    termBuffer[len] = 65535;
    return addTerm(termBuffer, 0);
  }
  
  abstract void finish()
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsTermsConsumer
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.IndexOutput;

final class FormatPostingsTermsWriter
  extends FormatPostingsTermsConsumer
{
  final FormatPostingsFieldsWriter parent;
  final FormatPostingsDocsWriter docsWriter;
  final TermInfosWriter termsOut;
  FieldInfo fieldInfo;
  char[] currentTerm;
  int currentTermStart;
  long freqStart;
  long proxStart;
  
  FormatPostingsTermsWriter(SegmentWriteState state, FormatPostingsFieldsWriter parent)
    throws IOException
  {
    this.parent = parent;
    termsOut = termsOut;
    docsWriter = new FormatPostingsDocsWriter(state, this);
  }
  
  void setField(FieldInfo fieldInfo)
  {
    this.fieldInfo = fieldInfo;
    docsWriter.setField(fieldInfo);
  }
  
  FormatPostingsDocsConsumer addTerm(char[] text, int start)
  {
    currentTerm = text;
    currentTermStart = start;
    
    freqStart = docsWriter.out.getFilePointer();
    if (docsWriter.posWriter.out != null) {
      proxStart = docsWriter.posWriter.out.getFilePointer();
    }
    parent.skipListWriter.resetSkip();
    
    return docsWriter;
  }
  
  void finish() {}
  
  void close()
    throws IOException
  {
    docsWriter.close();
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FormatPostingsTermsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;

final class FreqProxFieldMergeState
{
  final FreqProxTermsWriterPerField field;
  final int numPostings;
  final CharBlockPool charPool;
  final RawPostingList[] postings;
  private FreqProxTermsWriter.PostingList p;
  char[] text;
  int textOffset;
  private int postingUpto = -1;
  final ByteSliceReader freq = new ByteSliceReader();
  final ByteSliceReader prox = new ByteSliceReader();
  int docID;
  int termFreq;
  
  public FreqProxFieldMergeState(FreqProxTermsWriterPerField field)
  {
    this.field = field;
    charPool = perThread.termsHashPerThread.charPool;
    numPostings = termsHashPerField.numPostings;
    postings = termsHashPerField.sortPostings();
  }
  
  boolean nextTerm()
    throws IOException
  {
    postingUpto += 1;
    if (postingUpto == numPostings) {
      return false;
    }
    p = ((FreqProxTermsWriter.PostingList)postings[postingUpto]);
    docID = 0;
    
    text = charPool.buffers[(p.textStart >> 14)];
    textOffset = (p.textStart & 0x3FFF);
    
    field.termsHashPerField.initReader(freq, p, 0);
    if (!field.fieldInfo.omitTermFreqAndPositions) {
      field.termsHashPerField.initReader(prox, p, 1);
    }
    boolean result = nextDoc();
    assert (result);
    
    return true;
  }
  
  public boolean nextDoc()
    throws IOException
  {
    if (freq.eof())
    {
      if (p.lastDocCode != -1)
      {
        docID = p.lastDocID;
        if (!field.omitTermFreqAndPositions) {
          termFreq = p.docFreq;
        }
        p.lastDocCode = -1;
        return true;
      }
      return false;
    }
    int code = freq.readVInt();
    if (field.omitTermFreqAndPositions)
    {
      docID += code;
    }
    else
    {
      docID += (code >>> 1);
      if ((code & 0x1) != 0) {
        termFreq = 1;
      } else {
        termFreq = freq.readVInt();
      }
    }
    assert (docID != p.lastDocID);
    
    return true;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FreqProxFieldMergeState
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

final class FreqProxTermsWriter$PostingList
  extends RawPostingList
{
  int docFreq;
  int lastDocID;
  int lastDocCode;
  int lastPosition;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FreqProxTermsWriter.PostingList
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.lucene.util.UnicodeUtil.UTF8Result;

final class FreqProxTermsWriter
  extends TermsHashConsumer
{
  private byte[] payloadBuffer;
  private final TermInfo termInfo;
  final UnicodeUtil.UTF8Result termsUTF8;
  
  public TermsHashConsumerPerThread addThread(TermsHashPerThread perThread)
  {
    return new FreqProxTermsWriterPerThread(perThread);
  }
  
  void createPostings(RawPostingList[] postings, int start, int count)
  {
    int end = start + count;
    for (int i = start; i < end; i++) {
      postings[i] = new PostingList();
    }
  }
  
  private static int compareText(char[] text1, int pos1, char[] text2, int pos2)
  {
    for (;;)
    {
      char c1 = text1[(pos1++)];
      char c2 = text2[(pos2++)];
      if (c1 != c2)
      {
        if (65535 == c2) {
          return 1;
        }
        if (65535 == c1) {
          return -1;
        }
        return c1 - c2;
      }
      if (65535 == c1) {
        return 0;
      }
    }
  }
  
  public void flush(Map threadsAndFields, SegmentWriteState state)
    throws IOException
  {
    List allFields = new ArrayList();
    
    Iterator it = threadsAndFields.entrySet().iterator();
    while (it.hasNext())
    {
      Map.Entry entry = (Map.Entry)it.next();
      
      Collection fields = (Collection)entry.getValue();
      
      Iterator fieldsIt = fields.iterator();
      while (fieldsIt.hasNext())
      {
        FreqProxTermsWriterPerField perField = (FreqProxTermsWriterPerField)fieldsIt.next();
        if (termsHashPerField.numPostings > 0) {
          allFields.add(perField);
        }
      }
    }
    Collections.sort(allFields);
    int numAllFields = allFields.size();
    
    FormatPostingsFieldsConsumer consumer = new FormatPostingsFieldsWriter(state, fieldInfos);
    
    int start = 0;
    while (start < numAllFields)
    {
      FieldInfo fieldInfo = getfieldInfo;
      String fieldName = name;
      
      int end = start + 1;
      while ((end < numAllFields) && (getfieldInfo.name.equals(fieldName))) {
        end++;
      }
      FreqProxTermsWriterPerField[] fields = new FreqProxTermsWriterPerField[end - start];
      for (int i = start; i < end; i++)
      {
        fields[(i - start)] = ((FreqProxTermsWriterPerField)allFields.get(i));
        
        storePayloads |= hasPayloads;
      }
      appendPostings(fields, consumer);
      for (int i = 0; i < fields.length; i++)
      {
        TermsHashPerField perField = termsHashPerField;
        int numPostings = numPostings;
        perField.reset();
        perField.shrinkHash(numPostings);
        fields[i].reset();
      }
      start = end;
    }
    it = threadsAndFields.entrySet().iterator();
    while (it.hasNext())
    {
      Map.Entry entry = (Map.Entry)it.next();
      FreqProxTermsWriterPerThread perThread = (FreqProxTermsWriterPerThread)entry.getKey();
      termsHashPerThread.reset(true);
    }
    consumer.finish();
  }
  
  void appendPostings(FreqProxTermsWriterPerField[] fields, FormatPostingsFieldsConsumer consumer)
    throws CorruptIndexException, IOException
  {
    int numFields = fields.length;
    
    FreqProxFieldMergeState[] mergeStates = new FreqProxFieldMergeState[numFields];
    for (int i = 0; i < numFields; i++)
    {
      FreqProxFieldMergeState fms = mergeStates[i] = new FreqProxFieldMergeState(fields[i]);
      
      assert (field.fieldInfo == 0fieldInfo);
      
      boolean result = fms.nextTerm();
      assert (result);
    }
    FormatPostingsTermsConsumer termsConsumer = consumer.addField(0fieldInfo);
    
    FreqProxFieldMergeState[] termStates = new FreqProxFieldMergeState[numFields];
    
    boolean currentFieldOmitTermFreqAndPositions = 0fieldInfo.omitTermFreqAndPositions;
    while (numFields > 0)
    {
      termStates[0] = mergeStates[0];
      int numToMerge = 1;
      for (int i = 1; i < numFields; i++)
      {
        char[] text = text;
        int textOffset = textOffset;
        int cmp = compareText(text, textOffset, 0text, 0textOffset);
        if (cmp < 0)
        {
          termStates[0] = mergeStates[i];
          numToMerge = 1;
        }
        else if (cmp == 0)
        {
          termStates[(numToMerge++)] = mergeStates[i];
        }
      }
      FormatPostingsDocsConsumer docConsumer = termsConsumer.addTerm(0text, 0textOffset);
      while (numToMerge > 0)
      {
        FreqProxFieldMergeState minState = termStates[0];
        for (int i = 1; i < numToMerge; i++) {
          if (docID < docID) {
            minState = termStates[i];
          }
        }
        int termDocFreq = termFreq;
        
        FormatPostingsPositionsConsumer posConsumer = docConsumer.addDoc(docID, termDocFreq);
        
        ByteSliceReader prox = prox;
        if (!currentFieldOmitTermFreqAndPositions)
        {
          int position = 0;
          for (int j = 0; j < termDocFreq; j++)
          {
            int code = prox.readVInt();
            position += (code >> 1);
            int payloadLength;
            if ((code & 0x1) != 0)
            {
              int payloadLength = prox.readVInt();
              if ((payloadBuffer == null) || (payloadBuffer.length < payloadLength)) {
                payloadBuffer = new byte[payloadLength];
              }
              prox.readBytes(payloadBuffer, 0, payloadLength);
            }
            else
            {
              payloadLength = 0;
            }
            posConsumer.addPosition(position, payloadBuffer, 0, payloadLength);
          }
          posConsumer.finish();
        }
        if (!minState.nextDoc())
        {
          int upto = 0;
          for (int i = 0; i < numToMerge; i++) {
            if (termStates[i] != minState) {
              termStates[(upto++)] = termStates[i];
            }
          }
          numToMerge--;
          assert (upto == numToMerge);
          if (!minState.nextTerm())
          {
            upto = 0;
            for (int i = 0; i < numFields; i++) {
              if (mergeStates[i] != minState) {
                mergeStates[(upto++)] = mergeStates[i];
              }
            }
            numFields--;
            assert (upto == numFields);
          }
        }
      }
      docConsumer.finish();
    }
    termsConsumer.finish();
  }
  
  FreqProxTermsWriter()
  {
    termInfo = new TermInfo();
    
    termsUTF8 = new UnicodeUtil.UTF8Result();
  }
  
  int bytesPerPosting()
  {
    return 36;
  }
  
  void closeDocStore(SegmentWriteState state) {}
  
  void abort() {}
  
  void files(Collection files) {}
  
  static final class PostingList
    extends RawPostingList
  {
    int docFreq;
    int lastDocID;
    int lastDocCode;
    int lastPosition;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FreqProxTermsWriter
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.util.AttributeSource;

final class FreqProxTermsWriterPerField
  extends TermsHashConsumerPerField
  implements Comparable
{
  final FreqProxTermsWriterPerThread perThread;
  final TermsHashPerField termsHashPerField;
  final FieldInfo fieldInfo;
  final DocumentsWriter.DocState docState;
  final FieldInvertState fieldState;
  boolean omitTermFreqAndPositions;
  PayloadAttribute payloadAttribute;
  boolean hasPayloads;
  
  public FreqProxTermsWriterPerField(TermsHashPerField termsHashPerField, FreqProxTermsWriterPerThread perThread, FieldInfo fieldInfo)
  {
    this.termsHashPerField = termsHashPerField;
    this.perThread = perThread;
    this.fieldInfo = fieldInfo;
    docState = docState;
    fieldState = fieldState;
    omitTermFreqAndPositions = omitTermFreqAndPositions;
  }
  
  int getStreamCount()
  {
    if (fieldInfo.omitTermFreqAndPositions) {
      return 1;
    }
    return 2;
  }
  
  void finish() {}
  
  void skippingLongTerm()
    throws IOException
  {}
  
  public int compareTo(Object other0)
  {
    FreqProxTermsWriterPerField other = (FreqProxTermsWriterPerField)other0;
    return fieldInfo.name.compareTo(fieldInfo.name);
  }
  
  void reset()
  {
    omitTermFreqAndPositions = fieldInfo.omitTermFreqAndPositions;
    payloadAttribute = null;
  }
  
  boolean start(Fieldable[] fields, int count)
  {
    for (int i = 0; i < count; i++) {
      if (fields[i].isIndexed()) {
        return true;
      }
    }
    return false;
  }
  
  void start(Fieldable f)
  {
    if (fieldState.attributeSource.hasAttribute(PayloadAttribute.class)) {
      payloadAttribute = ((PayloadAttribute)fieldState.attributeSource.getAttribute(PayloadAttribute.class));
    } else {
      payloadAttribute = null;
    }
  }
  
  final void writeProx(FreqProxTermsWriter.PostingList p, int proxCode)
  {
    Payload payload;
    Payload payload;
    if (payloadAttribute == null) {
      payload = null;
    } else {
      payload = payloadAttribute.getPayload();
    }
    if ((payload != null) && (length > 0))
    {
      termsHashPerField.writeVInt(1, proxCode << 1 | 0x1);
      termsHashPerField.writeVInt(1, length);
      termsHashPerField.writeBytes(1, data, offset, length);
      hasPayloads = true;
    }
    else
    {
      termsHashPerField.writeVInt(1, proxCode << 1);
    }
    lastPosition = fieldState.position;
  }
  
  final void newTerm(RawPostingList p0)
  {
    assert (docState.testPoint("FreqProxTermsWriterPerField.newTerm start"));
    FreqProxTermsWriter.PostingList p = (FreqProxTermsWriter.PostingList)p0;
    lastDocID = docState.docID;
    if (omitTermFreqAndPositions)
    {
      lastDocCode = docState.docID;
    }
    else
    {
      lastDocCode = (docState.docID << 1);
      docFreq = 1;
      writeProx(p, fieldState.position);
    }
  }
  
  final void addTerm(RawPostingList p0)
  {
    assert (docState.testPoint("FreqProxTermsWriterPerField.addTerm start"));
    
    FreqProxTermsWriter.PostingList p = (FreqProxTermsWriter.PostingList)p0;
    
    assert ((omitTermFreqAndPositions) || (docFreq > 0));
    if (omitTermFreqAndPositions)
    {
      if (docState.docID != lastDocID)
      {
        assert (docState.docID > lastDocID);
        termsHashPerField.writeVInt(0, lastDocCode);
        lastDocCode = (docState.docID - lastDocID);
        lastDocID = docState.docID;
      }
    }
    else if (docState.docID != lastDocID)
    {
      assert (docState.docID > lastDocID);
      if (1 == docFreq)
      {
        termsHashPerField.writeVInt(0, lastDocCode | 0x1);
      }
      else
      {
        termsHashPerField.writeVInt(0, lastDocCode);
        termsHashPerField.writeVInt(0, docFreq);
      }
      docFreq = 1;
      lastDocCode = (docState.docID - lastDocID << 1);
      lastDocID = docState.docID;
      writeProx(p, fieldState.position);
    }
    else
    {
      docFreq += 1;
      writeProx(p, fieldState.position - lastPosition);
    }
  }
  
  public void abort() {}
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FreqProxTermsWriterPerField
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

final class FreqProxTermsWriterPerThread
  extends TermsHashConsumerPerThread
{
  final TermsHashPerThread termsHashPerThread;
  final DocumentsWriter.DocState docState;
  
  public FreqProxTermsWriterPerThread(TermsHashPerThread perThread)
  {
    docState = docState;
    termsHashPerThread = perThread;
  }
  
  public TermsHashConsumerPerField addField(TermsHashPerField termsHashPerField, FieldInfo fieldInfo)
  {
    return new FreqProxTermsWriterPerField(termsHashPerField, this, fieldInfo);
  }
  
  void startDocument() {}
  
  DocumentsWriter.DocWriter finishDocument()
  {
    return null;
  }
  
  public void abort() {}
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.FreqProxTermsWriterPerThread
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import org.apache.lucene.store.Directory;

public abstract class IndexCommit
  implements IndexCommitPoint
{
  public abstract String getSegmentsFileName();
  
  public abstract Collection getFileNames()
    throws IOException;
  
  public abstract Directory getDirectory();
  
  public abstract void delete();
  
  public abstract boolean isDeleted();
  
  public abstract boolean isOptimized();
  
  public boolean equals(Object other)
  {
    if ((other instanceof IndexCommit))
    {
      IndexCommit otherCommit = (IndexCommit)other;
      return (otherCommit.getDirectory().equals(getDirectory())) && (otherCommit.getVersion() == getVersion());
    }
    return false;
  }
  
  public int hashCode()
  {
    return (int)(getDirectory().hashCode() + getVersion());
  }
  
  public abstract long getVersion();
  
  public abstract long getGeneration();
  
  public long getTimestamp()
    throws IOException
  {
    return getDirectory().fileModified(getSegmentsFileName());
  }
  
  public abstract Map getUserData()
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.IndexCommit
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;

/**
 * @deprecated
 */
public abstract interface IndexCommitPoint
{
  public abstract String getSegmentsFileName();
  
  public abstract Collection getFileNames()
    throws IOException;
  
  public abstract void delete();
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.IndexCommitPoint
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.List;

public abstract interface IndexDeletionPolicy
{
  public abstract void onInit(List paramList)
    throws IOException;
  
  public abstract void onCommit(List paramList)
    throws IOException;
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.IndexDeletionPolicy
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import org.apache.lucene.store.Directory;

final class IndexFileDeleter$CommitPoint
  extends IndexCommit
  implements Comparable
{
  long gen;
  Collection files;
  String segmentsFileName;
  boolean deleted;
  Directory directory;
  Collection commitsToDelete;
  long version;
  long generation;
  final boolean isOptimized;
  final Map userData;
  
  public IndexFileDeleter$CommitPoint(Collection commitsToDelete, Directory directory, SegmentInfos segmentInfos)
    throws IOException
  {
    this.directory = directory;
    this.commitsToDelete = commitsToDelete;
    userData = segmentInfos.getUserData();
    segmentsFileName = segmentInfos.getCurrentSegmentFileName();
    version = segmentInfos.getVersion();
    generation = segmentInfos.getGeneration();
    files = Collections.unmodifiableCollection(segmentInfos.files(directory, true));
    gen = segmentInfos.getGeneration();
    isOptimized = ((segmentInfos.size() == 1) && (!segmentInfos.info(0).hasDeletions()));
    
    assert (!segmentInfos.hasExternalSegments(directory));
  }
  
  public String toString()
  {
    return "IndexFileDeleter.CommitPoint(" + segmentsFileName + ")";
  }
  
  public boolean isOptimized()
  {
    return isOptimized;
  }
  
  public String getSegmentsFileName()
  {
    return segmentsFileName;
  }
  
  public Collection getFileNames()
    throws IOException
  {
    return files;
  }
  
  public Directory getDirectory()
  {
    return directory;
  }
  
  public long getVersion()
  {
    return version;
  }
  
  public long getGeneration()
  {
    return generation;
  }
  
  public Map getUserData()
  {
    return userData;
  }
  
  public void delete()
  {
    if (!deleted)
    {
      deleted = true;
      commitsToDelete.add(this);
    }
  }
  
  public boolean isDeleted()
  {
    return deleted;
  }
  
  public int compareTo(Object obj)
  {
    CommitPoint commit = (CommitPoint)obj;
    if (gen < gen) {
      return -1;
    }
    if (gen > gen) {
      return 1;
    }
    return 0;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.IndexFileDeleter.CommitPoint
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

final class IndexFileDeleter$RefCount
{
  final String fileName;
  boolean initDone;
  int count;
  
  IndexFileDeleter$RefCount(String fileName)
  {
    this.fileName = fileName;
  }
  
  public int IncRef()
  {
    if (!initDone) {
      initDone = true;
    } else {
      assert (count > 0) : ("RefCount is 0 pre-increment for file \"" + fileName + "\"");
    }
    return ++count;
  }
  
  public int DecRef()
  {
    assert (count > 0) : ("RefCount is 0 pre-decrement for file \"" + fileName + "\"");
    return --count;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.IndexFileDeleter.RefCount
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.store.Directory;

final class IndexFileDeleter
{
  private List deletable;
  private Map refCounts = new HashMap();
  private List commits = new ArrayList();
  private List lastFiles = new ArrayList();
  private List commitsToDelete = new ArrayList();
  private PrintStream infoStream;
  private Directory directory;
  private IndexDeletionPolicy policy;
  private DocumentsWriter docWriter;
  final boolean startingCommitDeleted;
  private SegmentInfos lastSegmentInfos;
  private final Set synced;
  public static boolean VERBOSE_REF_COUNTS = false;
  
  void setInfoStream(PrintStream infoStream)
  {
    this.infoStream = infoStream;
    if (infoStream != null) {
      message("setInfoStream deletionPolicy=" + policy);
    }
  }
  
  private void message(String message)
  {
    infoStream.println("IFD [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);
  }
  
  public IndexFileDeleter(Directory directory, IndexDeletionPolicy policy, SegmentInfos segmentInfos, PrintStream infoStream, DocumentsWriter docWriter, Set synced)
    throws CorruptIndexException, IOException
  {
    this.docWriter = docWriter;
    this.infoStream = infoStream;
    this.synced = synced;
    if (infoStream != null) {
      message("init: current segments file is \"" + segmentInfos.getCurrentSegmentFileName() + "\"; deletionPolicy=" + policy);
    }
    this.policy = policy;
    this.directory = directory;
    
    long currentGen = segmentInfos.getGeneration();
    IndexFileNameFilter filter = IndexFileNameFilter.getFilter();
    
    String[] files = directory.listAll();
    
    CommitPoint currentCommitPoint = null;
    for (int i = 0; i < files.length; i++)
    {
      String fileName = files[i];
      if ((filter.accept(null, fileName)) && (!fileName.equals("segments.gen")))
      {
        getRefCount(fileName);
        if (fileName.startsWith("segments"))
        {
          if (infoStream != null) {
            message("init: load commit \"" + fileName + "\"");
          }
          SegmentInfos sis = new SegmentInfos();
          try
          {
            sis.read(directory, fileName);
          }
          catch (FileNotFoundException e)
          {
            if (infoStream != null) {
              message("init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
            }
            sis = null;
          }
          catch (IOException e)
          {
            if (SegmentInfos.generationFromSegmentsFileName(fileName) <= currentGen) {
              throw e;
            }
            sis = null;
          }
          if (sis != null)
          {
            CommitPoint commitPoint = new CommitPoint(commitsToDelete, directory, sis);
            if (sis.getGeneration() == segmentInfos.getGeneration()) {
              currentCommitPoint = commitPoint;
            }
            commits.add(commitPoint);
            incRef(sis, true);
            if ((lastSegmentInfos == null) || (sis.getGeneration() > lastSegmentInfos.getGeneration())) {
              lastSegmentInfos = sis;
            }
          }
        }
      }
    }
    if (currentCommitPoint == null)
    {
      SegmentInfos sis = new SegmentInfos();
      try
      {
        sis.read(directory, segmentInfos.getCurrentSegmentFileName());
      }
      catch (IOException e)
      {
        throw new CorruptIndexException("failed to locate current segments_N file");
      }
      if (infoStream != null) {
        message("forced open of current segments file " + segmentInfos.getCurrentSegmentFileName());
      }
      currentCommitPoint = new CommitPoint(commitsToDelete, directory, sis);
      commits.add(currentCommitPoint);
      incRef(sis, true);
    }
    Collections.sort(commits);
    
    Iterator it = refCounts.keySet().iterator();
    while (it.hasNext())
    {
      String fileName = (String)it.next();
      RefCount rc = (RefCount)refCounts.get(fileName);
      if (0 == count)
      {
        if (infoStream != null) {
          message("init: removing unreferenced file \"" + fileName + "\"");
        }
        deleteFile(fileName);
      }
    }
    policy.onInit(commits);
    
    checkpoint(segmentInfos, false);
    
    startingCommitDeleted = currentCommitPoint.isDeleted();
    
    deleteCommits();
  }
  
  public SegmentInfos getLastSegmentInfos()
  {
    return lastSegmentInfos;
  }
  
  private void deleteCommits()
    throws IOException
  {
    int size = commitsToDelete.size();
    if (size > 0)
    {
      for (int i = 0; i < size; i++)
      {
        CommitPoint commit = (CommitPoint)commitsToDelete.get(i);
        if (infoStream != null) {
          message("deleteCommits: now decRef commit \"" + commit.getSegmentsFileName() + "\"");
        }
        Iterator it = files.iterator();
        while (it.hasNext()) {
          decRef((String)it.next());
        }
      }
      commitsToDelete.clear();
      
      size = commits.size();
      int readFrom = 0;
      int writeTo = 0;
      while (readFrom < size)
      {
        CommitPoint commit = (CommitPoint)commits.get(readFrom);
        if (!deleted)
        {
          if (writeTo != readFrom) {
            commits.set(writeTo, commits.get(readFrom));
          }
          writeTo++;
        }
        readFrom++;
      }
      while (size > writeTo)
      {
        commits.remove(size - 1);
        size--;
      }
    }
  }
  
  public void refresh(String segmentName)
    throws IOException
  {
    String[] files = directory.listAll();
    IndexFileNameFilter filter = IndexFileNameFilter.getFilter();
    String segmentPrefix2;
    String segmentPrefix1;
    String segmentPrefix2;
    if (segmentName != null)
    {
      String segmentPrefix1 = segmentName + ".";
      segmentPrefix2 = segmentName + "_";
    }
    else
    {
      segmentPrefix1 = null;
      segmentPrefix2 = null;
    }
    for (int i = 0; i < files.length; i++)
    {
      String fileName = files[i];
      if ((filter.accept(null, fileName)) && ((segmentName == null) || (fileName.startsWith(segmentPrefix1)) || (fileName.startsWith(segmentPrefix2))) && (!refCounts.containsKey(fileName)) && (!fileName.equals("segments.gen")))
      {
        if (infoStream != null) {
          message("refresh [prefix=" + segmentName + "]: removing newly created unreferenced file \"" + fileName + "\"");
        }
        deleteFile(fileName);
      }
    }
  }
  
  public void refresh()
    throws IOException
  {
    refresh(null);
  }
  
  public void close()
    throws IOException
  {
    int size = lastFiles.size();
    if (size > 0)
    {
      for (int i = 0; i < size; i++) {
        decRef((Collection)lastFiles.get(i));
      }
      lastFiles.clear();
    }
    deletePendingFiles();
  }
  
  private void deletePendingFiles()
    throws IOException
  {
    if (deletable != null)
    {
      List oldDeletable = deletable;
      deletable = null;
      int size = oldDeletable.size();
      for (int i = 0; i < size; i++)
      {
        if (infoStream != null) {
          message("delete pending file " + oldDeletable.get(i));
        }
        deleteFile((String)oldDeletable.get(i));
      }
    }
  }
  
  public void checkpoint(SegmentInfos segmentInfos, boolean isCommit)
    throws IOException
  {
    if (infoStream != null) {
      message("now checkpoint \"" + segmentInfos.getCurrentSegmentFileName() + "\" [" + segmentInfos.size() + " segments " + "; isCommit = " + isCommit + "]");
    }
    deletePendingFiles();
    
    incRef(segmentInfos, isCommit);
    if (isCommit)
    {
      commits.add(new CommitPoint(commitsToDelete, directory, segmentInfos));
      
      policy.onCommit(commits);
      
      deleteCommits();
    }
    else
    {
      List docWriterFiles;
      if (docWriter != null)
      {
        List docWriterFiles = docWriter.openFiles();
        if (docWriterFiles != null) {
          incRef(docWriterFiles);
        }
      }
      else
      {
        docWriterFiles = null;
      }
      int size = lastFiles.size();
      if (size > 0)
      {
        for (int i = 0; i < size; i++) {
          decRef((Collection)lastFiles.get(i));
        }
        lastFiles.clear();
      }
      lastFiles.add(segmentInfos.files(directory, false));
      if (docWriterFiles != null) {
        lastFiles.add(docWriterFiles);
      }
    }
  }
  
  void incRef(SegmentInfos segmentInfos, boolean isCommit)
    throws IOException
  {
    Iterator it = segmentInfos.files(directory, isCommit).iterator();
    while (it.hasNext()) {
      incRef((String)it.next());
    }
  }
  
  void incRef(List files)
    throws IOException
  {
    int size = files.size();
    for (int i = 0; i < size; i++) {
      incRef((String)files.get(i));
    }
  }
  
  void incRef(String fileName)
    throws IOException
  {
    RefCount rc = getRefCount(fileName);
    if ((infoStream != null) && (VERBOSE_REF_COUNTS)) {
      message("  IncRef \"" + fileName + "\": pre-incr count is " + count);
    }
    rc.IncRef();
  }
  
  void decRef(Collection files)
    throws IOException
  {
    Iterator it = files.iterator();
    while (it.hasNext()) {
      decRef((String)it.next());
    }
  }
  
  void decRef(String fileName)
    throws IOException
  {
    RefCount rc = getRefCount(fileName);
    if ((infoStream != null) && (VERBOSE_REF_COUNTS)) {
      message("  DecRef \"" + fileName + "\": pre-decr count is " + count);
    }
    if (0 == rc.DecRef())
    {
      deleteFile(fileName);
      refCounts.remove(fileName);
      if (synced != null) {
        synchronized (synced)
        {
          synced.remove(fileName);
        }
      }
    }
  }
  
  void decRef(SegmentInfos segmentInfos)
    throws IOException
  {
    Iterator it = segmentInfos.files(directory, false).iterator();
    while (it.hasNext()) {
      decRef((String)it.next());
    }
  }
  
  public boolean exists(String fileName)
  {
    if (!refCounts.containsKey(fileName)) {
      return false;
    }
    return getRefCountcount > 0;
  }
  
  private RefCount getRefCount(String fileName)
  {
    RefCount rc;
    if (!refCounts.containsKey(fileName))
    {
      RefCount rc = new RefCount(fileName);
      refCounts.put(fileName, rc);
    }
    else
    {
      rc = (RefCount)refCounts.get(fileName);
    }
    return rc;
  }
  
  void deleteFiles(List files)
    throws IOException
  {
    int size = files.size();
    for (int i = 0; i < size; i++) {
      deleteFile((String)files.get(i));
    }
  }
  
  void deleteNewFiles(Collection files)
    throws IOException
  {
    Iterator it = files.iterator();
    while (it.hasNext())
    {
      String fileName = (String)it.next();
      if (!refCounts.containsKey(fileName))
      {
        if (infoStream != null) {
          message("delete new file \"" + fileName + "\"");
        }
        deleteFile(fileName);
      }
    }
  }
  
  void deleteFile(String fileName)
    throws IOException
  {
    try
    {
      if (infoStream != null) {
        message("delete \"" + fileName + "\"");
      }
      directory.deleteFile(fileName);
    }
    catch (IOException e)
    {
      if (directory.fileExists(fileName))
      {
        if (infoStream != null) {
          message("IndexFileDeleter: unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
        }
        if (deletable == null) {
          deletable = new ArrayList();
        }
        deletable.add(fileName);
      }
    }
  }
  
  private static final class RefCount
  {
    final String fileName;
    boolean initDone;
    int count;
    
    RefCount(String fileName)
    {
      this.fileName = fileName;
    }
    
    public int IncRef()
    {
      if (!initDone) {
        initDone = true;
      } else {
        assert (count > 0) : ("RefCount is 0 pre-increment for file \"" + fileName + "\"");
      }
      return ++count;
    }
    
    public int DecRef()
    {
      assert (count > 0) : ("RefCount is 0 pre-decrement for file \"" + fileName + "\"");
      return --count;
    }
  }
  
  private static final class CommitPoint
    extends IndexCommit
    implements Comparable
  {
    long gen;
    Collection files;
    String segmentsFileName;
    boolean deleted;
    Directory directory;
    Collection commitsToDelete;
    long version;
    long generation;
    final boolean isOptimized;
    final Map userData;
    
    public CommitPoint(Collection commitsToDelete, Directory directory, SegmentInfos segmentInfos)
      throws IOException
    {
      this.directory = directory;
      this.commitsToDelete = commitsToDelete;
      userData = segmentInfos.getUserData();
      segmentsFileName = segmentInfos.getCurrentSegmentFileName();
      version = segmentInfos.getVersion();
      generation = segmentInfos.g
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56

Further reading...

For more information on Java 1.5 Tiger, you may find Java 1.5 Tiger, A developer's Notebook by D. Flanagan and B. McLaughlin from O'Reilly of interest.

New!JAR listings


Copyright 2006-2017. Infinite Loop Ltd