lucene-core-2.9.4-dev

ert (bytesRef == null);
      if (origNorm == null) {
        origNorm = this;
      }
      origNorm.incRef();
    }
    in = null;
    
    return clone;
  }
  
  public void reWrite(SegmentInfo si)
    throws IOException
  {
    assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0))) : ("refCount=" + refCount + " origNorm=" + origNorm);
    
    si.advanceNormGen(number);
    String normFileName = si.getNormFileName(number);
    IndexOutput out = this$0.directory().createOutput(normFileName);
    boolean success = false;
    try
    {
      try
      {
        out.writeBytes(bytes, this$0.maxDoc());
      }
      finally
      {
        out.close();
      }
      success = true;
    }
    finally
    {
      if (!success) {
        try
        {
          this$0.directory().deleteFile(normFileName);
        }
        catch (Throwable t) {}
      }
    }
    dirty = false;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentReader.Norm
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

class SegmentReader$Ref
{
  private int refCount = 1;
  
  public String toString()
  {
    return "refcount: " + refCount;
  }
  
  public synchronized int refCount()
  {
    return refCount;
  }
  
  public synchronized int incRef()
  {
    assert (refCount > 0);
    refCount += 1;
    return refCount;
  }
  
  public synchronized int decRef()
  {
    assert (refCount > 0);
    refCount -= 1;
    return refCount;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentReader.Ref
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.CloseableThreadLocal;

public class SegmentReader
  extends IndexReader
  implements Cloneable
{
  protected boolean readOnly;
  private SegmentInfo si;
  private int readBufferSize;
  CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal(null);
  CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal();
  BitVector deletedDocs = null;
  Ref deletedDocsRef = null;
  private boolean deletedDocsDirty = false;
  private boolean normsDirty = false;
  private int pendingDeleteCount;
  private boolean rollbackHasChanges = false;
  private boolean rollbackDeletedDocsDirty = false;
  private boolean rollbackNormsDirty = false;
  private SegmentInfo rollbackSegmentInfo;
  private int rollbackPendingDeleteCount;
  private IndexInput singleNormStream;
  private Ref singleNormRef;
  CoreReaders core;
  
  static final class CoreReaders
  {
    private final SegmentReader.Ref ref = new SegmentReader.Ref();
    final String segment;
    final FieldInfos fieldInfos;
    final IndexInput freqStream;
    final IndexInput proxStream;
    final TermInfosReader tisNoIndex;
    final Directory dir;
    final Directory cfsDir;
    final int readBufferSize;
    final int termsIndexDivisor;
    private final SegmentReader origInstance;
    TermInfosReader tis;
    FieldsReader fieldsReaderOrig;
    TermVectorsReader termVectorsReaderOrig;
    CompoundFileReader cfsReader;
    CompoundFileReader storeCFSReader;
    
    CoreReaders(SegmentReader origInstance, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor)
      throws IOException
    {
      segment = name;
      this.readBufferSize = readBufferSize;
      this.dir = dir;
      
      boolean success = false;
      try
      {
        Directory dir0 = dir;
        if (si.getUseCompoundFile())
        {
          cfsReader = new CompoundFileReader(dir, segment + "." + "cfs", readBufferSize);
          dir0 = cfsReader;
        }
        cfsDir = dir0;
        
        fieldInfos = new FieldInfos(cfsDir, segment + "." + "fnm");
        
        this.termsIndexDivisor = termsIndexDivisor;
        TermInfosReader reader = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize, termsIndexDivisor);
        if (termsIndexDivisor == -1)
        {
          tisNoIndex = reader;
        }
        else
        {
          tis = reader;
          tisNoIndex = null;
        }
        freqStream = cfsDir.openInput(segment + "." + "frq", readBufferSize);
        if (fieldInfos.hasProx()) {
          proxStream = cfsDir.openInput(segment + "." + "prx", readBufferSize);
        } else {
          proxStream = null;
        }
        success = true;
      }
      finally
      {
        if (!success) {
          decRef();
        }
      }
      this.origInstance = origInstance;
    }
    
    synchronized TermVectorsReader getTermVectorsReaderOrig()
    {
      return termVectorsReaderOrig;
    }
    
    synchronized FieldsReader getFieldsReaderOrig()
    {
      return fieldsReaderOrig;
    }
    
    synchronized void incRef()
    {
      ref.incRef();
    }
    
    synchronized Directory getCFSReader()
    {
      return cfsReader;
    }
    
    synchronized TermInfosReader getTermsReader()
    {
      if (tis != null) {
        return tis;
      }
      return tisNoIndex;
    }
    
    synchronized boolean termsIndexIsLoaded()
    {
      return tis != null;
    }
    
    synchronized void loadTermsIndex(SegmentInfo si, int termsIndexDivisor)
      throws IOException
    {
      if (tis == null)
      {
        Directory dir0;
        Directory dir0;
        if (si.getUseCompoundFile())
        {
          if (cfsReader == null) {
            cfsReader = new CompoundFileReader(dir, segment + "." + "cfs", readBufferSize);
          }
          dir0 = cfsReader;
        }
        else
        {
          dir0 = dir;
        }
        tis = new TermInfosReader(dir0, segment, fieldInfos, readBufferSize, termsIndexDivisor);
      }
    }
    
    synchronized void decRef()
      throws IOException
    {
      if (ref.decRef() == 0)
      {
        if (tis != null)
        {
          tis.close();
          
          tis = null;
        }
        if (tisNoIndex != null) {
          tisNoIndex.close();
        }
        if (freqStream != null) {
          freqStream.close();
        }
        if (proxStream != null) {
          proxStream.close();
        }
        if (termVectorsReaderOrig != null) {
          termVectorsReaderOrig.close();
        }
        if (fieldsReaderOrig != null) {
          fieldsReaderOrig.close();
        }
        if (cfsReader != null) {
          cfsReader.close();
        }
        if (storeCFSReader != null) {
          storeCFSReader.close();
        }
        if (origInstance != null) {
          FieldCache.DEFAULT.purge(origInstance);
        }
      }
    }
    
    synchronized void openDocStores(SegmentInfo si)
      throws IOException
    {
      assert (name.equals(segment));
      if (fieldsReaderOrig == null)
      {
        Directory storeDir;
        if (si.getDocStoreOffset() != -1)
        {
          if (si.getDocStoreIsCompoundFile())
          {
            assert (storeCFSReader == null);
            storeCFSReader = new CompoundFileReader(dir, si.getDocStoreSegment() + "." + "cfx", readBufferSize);
            
            Directory storeDir = storeCFSReader;
            if ((!$assertionsDisabled) && (storeDir == null)) {
              throw new AssertionError();
            }
          }
          else
          {
            Directory storeDir = dir;
            if ((!$assertionsDisabled) && (storeDir == null)) {
              throw new AssertionError();
            }
          }
        }
        else if (si.getUseCompoundFile())
        {
          if (cfsReader == null) {
            cfsReader = new CompoundFileReader(dir, segment + "." + "cfs", readBufferSize);
          }
          Directory storeDir = cfsReader;
          if ((!$assertionsDisabled) && (storeDir == null)) {
            throw new AssertionError();
          }
        }
        else
        {
          storeDir = dir;
          assert (storeDir != null);
        }
        String storesSegment;
        String storesSegment;
        if (si.getDocStoreOffset() != -1) {
          storesSegment = si.getDocStoreSegment();
        } else {
          storesSegment = segment;
        }
        fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), docCount);
        if ((si.getDocStoreOffset() == -1) && (fieldsReaderOrig.size() != docCount)) {
          throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + docCount);
        }
        if (fieldInfos.hasVectors()) {
          termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), docCount);
        }
      }
    }
  }
  
  private class FieldsReaderLocal
    extends CloseableThreadLocal
  {
    FieldsReaderLocal(SegmentReader.1 x1)
    {
      this();
    }
    
    protected Object initialValue()
    {
      return core.getFieldsReaderOrig().clone();
    }
    
    private FieldsReaderLocal() {}
  }
  
  static class Ref
  {
    private int refCount = 1;
    
    public String toString()
    {
      return "refcount: " + refCount;
    }
    
    public synchronized int refCount()
    {
      return refCount;
    }
    
    public synchronized int incRef()
    {
      assert (refCount > 0);
      refCount += 1;
      return refCount;
    }
    
    public synchronized int decRef()
    {
      assert (refCount > 0);
      refCount -= 1;
      return refCount;
    }
  }
  
  final class Norm
    implements Cloneable
  {
    private int refCount = 1;
    private Norm origNorm;
    private IndexInput in;
    private long normSeek;
    private SegmentReader.Ref bytesRef;
    private byte[] bytes;
    private boolean dirty;
    private int number;
    private boolean rollbackDirty;
    
    public Norm(IndexInput in, int number, long normSeek)
    {
      this.in = in;
      this.number = number;
      this.normSeek = normSeek;
    }
    
    public synchronized void incRef()
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      refCount += 1;
    }
    
    private void closeInput()
      throws IOException
    {
      if (in != null)
      {
        if (in != singleNormStream)
        {
          in.close();
        }
        else if (singleNormRef.decRef() == 0)
        {
          singleNormStream.close();
          singleNormStream = null;
        }
        in = null;
      }
    }
    
    public synchronized void decRef()
      throws IOException
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      if (--refCount == 0)
      {
        if (origNorm != null)
        {
          origNorm.decRef();
          origNorm = null;
        }
        else
        {
          closeInput();
        }
        if (bytes != null)
        {
          assert (bytesRef != null);
          bytesRef.decRef();
          bytes = null;
          bytesRef = null;
        }
        else
        {
          assert (bytesRef == null);
        }
      }
    }
    
    public synchronized void bytes(byte[] bytesOut, int offset, int len)
      throws IOException
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      if (bytes != null)
      {
        assert (len <= maxDoc());
        System.arraycopy(bytes, 0, bytesOut, offset, len);
      }
      else if (origNorm != null)
      {
        origNorm.bytes(bytesOut, offset, len);
      }
      else
      {
        synchronized (in)
        {
          in.seek(normSeek);
          in.readBytes(bytesOut, offset, len, false);
        }
      }
    }
    
    public synchronized byte[] bytes()
      throws IOException
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      if (bytes == null)
      {
        assert (bytesRef == null);
        if (origNorm != null)
        {
          bytes = origNorm.bytes();
          bytesRef = origNorm.bytesRef;
          bytesRef.incRef();
          
          origNorm.decRef();
          origNorm = null;
        }
        else
        {
          int count = maxDoc();
          bytes = new byte[count];
          
          assert (in != null);
          synchronized (in)
          {
            in.seek(normSeek);
            in.readBytes(bytes, 0, count, false);
          }
          bytesRef = new SegmentReader.Ref();
          closeInput();
        }
      }
      return bytes;
    }
    
    SegmentReader.Ref bytesRef()
    {
      return bytesRef;
    }
    
    public synchronized byte[] copyOnWrite()
      throws IOException
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      bytes();
      assert (bytes != null);
      assert (bytesRef != null);
      if (bytesRef.refCount() > 1)
      {
        assert (refCount == 1);
        SegmentReader.Ref oldRef = bytesRef;
        bytes = cloneNormBytes(bytes);
        bytesRef = new SegmentReader.Ref();
        oldRef.decRef();
      }
      dirty = true;
      return bytes;
    }
    
    public synchronized Object clone()
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0)));
      Norm clone;
      try
      {
        clone = (Norm)super.clone();
      }
      catch (CloneNotSupportedException cnse)
      {
        throw new RuntimeException("unexpected CloneNotSupportedException", cnse);
      }
      refCount = 1;
      if (bytes != null)
      {
        assert (bytesRef != null);
        assert (origNorm == null);
        
        bytesRef.incRef();
      }
      else
      {
        assert (bytesRef == null);
        if (origNorm == null) {
          origNorm = this;
        }
        origNorm.incRef();
      }
      in = null;
      
      return clone;
    }
    
    public void reWrite(SegmentInfo si)
      throws IOException
    {
      assert ((refCount > 0) && ((origNorm == null) || (origNorm.refCount > 0))) : ("refCount=" + refCount + " origNorm=" + origNorm);
      
      si.advanceNormGen(number);
      String normFileName = si.getNormFileName(number);
      IndexOutput out = directory().createOutput(normFileName);
      boolean success = false;
      try
      {
        try
        {
          out.writeBytes(bytes, maxDoc());
        }
        finally
        {
          out.close();
        }
        success = true;
      }
      finally
      {
        if (!success) {
          try
          {
            directory().deleteFile(normFileName);
          }
          catch (Throwable t) {}
        }
      }
      dirty = false;
    }
  }
  
  Map norms = new HashMap();
  private static Class IMPL;
  private static Class READONLY_IMPL;
  private byte[] ones;
  
  static
  {
    try
    {
      String name = System.getProperty("org.apache.lucene.SegmentReader.class", SegmentReader.class.getName());
      
      IMPL = Class.forName(name);
    }
    catch (ClassNotFoundException e)
    {
      throw new RuntimeException("cannot load SegmentReader class: " + e, e);
    }
    catch (SecurityException se)
    {
      try
      {
        IMPL = Class.forName(SegmentReader.class.getName());
      }
      catch (ClassNotFoundException e)
      {
        throw new RuntimeException("cannot load default SegmentReader class: " + e, e);
      }
    }
    try
    {
      String name = System.getProperty("org.apache.lucene.ReadOnlySegmentReader.class", ReadOnlySegmentReader.class.getName());
      
      READONLY_IMPL = Class.forName(name);
    }
    catch (ClassNotFoundException e)
    {
      throw new RuntimeException("cannot load ReadOnlySegmentReader class: " + e, e);
    }
    catch (SecurityException se)
    {
      try
      {
        READONLY_IMPL = Class.forName(ReadOnlySegmentReader.class.getName());
      }
      catch (ClassNotFoundException e)
      {
        throw new RuntimeException("cannot load default ReadOnlySegmentReader class: " + e, e);
      }
    }
  }
  
  /**
   * @deprecated
   */
  public static SegmentReader get(SegmentInfo si)
    throws CorruptIndexException, IOException
  {
    return get(false, dir, si, 1024, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
  }
  
  public static SegmentReader get(boolean readOnly, SegmentInfo si, int termInfosIndexDivisor)
    throws CorruptIndexException, IOException
  {
    return get(readOnly, dir, si, 1024, true, termInfosIndexDivisor);
  }
  
  /**
   * @deprecated
   */
  static SegmentReader get(SegmentInfo si, int readBufferSize, boolean doOpenStores, int termInfosIndexDivisor)
    throws CorruptIndexException, IOException
  {
    return get(false, dir, si, readBufferSize, doOpenStores, termInfosIndexDivisor);
  }
  
  public static SegmentReader get(boolean readOnly, Directory dir, SegmentInfo si, int readBufferSize, boolean doOpenStores, int termInfosIndexDivisor)
    throws CorruptIndexException, IOException
  {
    SegmentReader instance;
    try
    {
      SegmentReader instance;
      if (readOnly) {
        instance = (SegmentReader)READONLY_IMPL.newInstance();
      } else {
        instance = (SegmentReader)IMPL.newInstance();
      }
    }
    catch (Exception e)
    {
      throw new RuntimeException("cannot load SegmentReader class: " + e, e);
    }
    readOnly = readOnly;
    si = si;
    readBufferSize = readBufferSize;
    
    boolean success = false;
    try
    {
      core = new CoreReaders(instance, dir, si, readBufferSize, termInfosIndexDivisor);
      if (doOpenStores) {
        core.openDocStores(si);
      }
      instance.loadDeletedDocs();
      instance.openNorms(core.cfsDir, readBufferSize);
      success = true;
    }
    finally
    {
      if (!success) {
        instance.doClose();
      }
    }
    return instance;
  }
  
  void openDocStores()
    throws IOException
  {
    core.openDocStores(si);
  }
  
  private boolean checkDeletedCounts()
    throws IOException
  {
    int recomputedCount = deletedDocs.getRecomputedCount();
    
    assert (deletedDocs.count() == recomputedCount) : ("deleted count=" + deletedDocs.count() + " vs recomputed count=" + recomputedCount);
    
    assert (si.getDelCount() == recomputedCount) : ("delete count mismatch: info=" + si.getDelCount() + " vs BitVector=" + recomputedCount);
    
    assert (si.getDelCount() <= maxDoc()) : ("delete count mismatch: " + recomputedCount + ") exceeds max doc (" + maxDoc() + ") for segment " + si.name);
    
    return true;
  }
  
  private void loadDeletedDocs()
    throws IOException
  {
    if (hasDeletions(si))
    {
      deletedDocs = new BitVector(directory(), si.getDelFileName());
      deletedDocsRef = new Ref();
      if ((!$assertionsDisabled) && (!checkDeletedCounts())) {
        throw new AssertionError();
      }
    }
    else
    {
      assert (si.getDelCount() == 0);
    }
  }
  
  protected byte[] cloneNormBytes(byte[] bytes)
  {
    byte[] cloneBytes = new byte[bytes.length];
    System.arraycopy(bytes, 0, cloneBytes, 0, bytes.length);
    return cloneBytes;
  }
  
  protected BitVector cloneDeletedDocs(BitVector bv)
  {
    return (BitVector)bv.clone();
  }
  
  public final synchronized Object clone()
  {
    try
    {
      return clone(readOnly);
    }
    catch (Exception ex)
    {
      throw new RuntimeException(ex);
    }
  }
  
  public final synchronized IndexReader clone(boolean openReadOnly)
    throws CorruptIndexException, IOException
  {
    return reopenSegment(si, true, openReadOnly);
  }
  
  synchronized SegmentReader reopenSegment(SegmentInfo si, boolean doClone, boolean openReadOnly)
    throws CorruptIndexException, IOException
  {
    boolean deletionsUpToDate = (this.si.hasDeletions() == si.hasDeletions()) && ((!si.hasDeletions()) || (this.si.getDelFileName().equals(si.getDelFileName())));
    
    boolean normsUpToDate = true;
    
    boolean[] fieldNormsChanged = new boolean[core.fieldInfos.size()];
    int fieldCount = core.fieldInfos.size();
    for (int i = 0; i < fieldCount; i++) {
      if (!this.si.getNormFileName(i).equals(si.getNormFileName(i)))
      {
        normsUpToDate = false;
        fieldNormsChanged[i] = true;
      }
    }
    if ((normsUpToDate) && (deletionsUpToDate) && (!doClone) && (openReadOnly) && (readOnly)) {
      return this;
    }
    assert ((!doClone) || ((normsUpToDate) && (deletionsUpToDate)));
    SegmentReader clone;
    try
    {
      SegmentReader clone;
      if (openReadOnly) {
        clone = (SegmentReader)READONLY_IMPL.newInstance();
      } else {
        clone = (SegmentReader)IMPL.newInstance();
      }
    }
    catch (Exception e)
    {
      throw new RuntimeException("cannot load SegmentReader class: " + e, e);
    }
    boolean success = false;
    try
    {
      core.incRef();
      core = core;
      readOnly = openReadOnly;
      si = si;
      readBufferSize = readBufferSize;
      if ((!openReadOnly) && (hasChanges))
      {
        pendingDeleteCount = pendingDeleteCount;
        deletedDocsDirty = deletedDocsDirty;
        normsDirty = normsDirty;
        hasChanges = hasChanges;
        hasChanges = false;
      }
      if (doClone)
      {
        if (deletedDocs != null)
        {
          deletedDocsRef.incRef();
          deletedDocs = deletedDocs;
          deletedDocsRef = deletedDocsRef;
        }
      }
      else if (!deletionsUpToDate)
      {
        assert (deletedDocs == null);
        clone.loadDeletedDocs();
      }
      else if (deletedDocs != null)
      {
        deletedDocsRef.incRef();
        deletedDocs = deletedDocs;
        deletedDocsRef = deletedDocsRef;
      }
      clone.setDisableFakeNorms(getDisableFakeNorms());
      norms = new HashMap();
      for (int i = 0; i < fieldNormsChanged.length; i++) {
        if ((doClone) || (fieldNormsChanged[i] == 0))
        {
          String curField = core.fieldInfos.fieldInfo(i).name;
          Norm norm = (Norm)norms.get(curField);
          if (norm != null) {
            norms.put(curField, norm.clone());
          }
        }
      }
      clone.openNorms(si.getUseCompoundFile() ? core.getCFSReader() : directory(), readBufferSize);
      
      success = true;
    }
    finally
    {
      if (!success) {
        clone.decRef();
      }
    }
    return clone;
  }
  
  /**
   * @deprecated
   */
  protected void doCommit()
    throws IOException
  {
    doCommit(null);
  }
  
  protected void doCommit(Map commitUserData)
    throws IOException
  {
    if (hasChanges)
    {
      startCommit();
      boolean success = false;
      try
      {
        commitChanges(commitUserData);
        success = true;
      }
      finally
      {
        if (!success) {
          rollbackCommit();
        }
      }
    }
  }
  
  private void commitChanges(Map commitUserData)
    throws IOException
  {
    if (deletedDocsDirty)
    {
      si.advanceDelGen();
      
      String delFileName = si.getDelFileName();
      boolean success = false;
      try
      {
        deletedDocs.write(directory(), delFileName);
        success = true;
      }
      finally
      {
        if (!success) {
          try
          {
            directory().deleteFile(delFileName);
          }
          catch (Throwable t) {}
        }
      }
      si.setDelCount(si.getDelCount() + pendingDeleteCount);
      pendingDeleteCount = 0;
      assert (deletedDocs.count() == si.getDelCount()) : ("delete count mismatch during commit: info=" + si.getDelCount() + " vs BitVector=" + deletedDocs.count());
    }
    else
    {
      assert (pendingDeleteCount == 0);
    }
    if (normsDirty)
    {
      si.setNumFields(core.fieldInfos.size());
      Iterator it = norms.values().iterator();
      while (it.hasNext())
      {
        Norm norm = (Norm)it.next();
        if (dirty) {
          norm.reWrite(si);
        }
      }
    }
    deletedDocsDirty = false;
    normsDirty = false;
    hasChanges = false;
  }
  
  FieldsReader getFieldsReader()
  {
    return (FieldsReader)fieldsReaderLocal.get();
  }
  
  protected void doClose()
    throws IOException
  {
    termVectorsLocal.close();
    fieldsReaderLocal.close();
    if (deletedDocs != null)
    {
      deletedDocsRef.decRef();
      
      deletedDocs = null;
    }
    Iterator it = norms.values().iterator();
    while (it.hasNext()) {
      ((Norm)it.next()).decRef();
    }
    if (core != null) {
      core.decRef();
    }
  }
  
  static boolean hasDeletions(SegmentInfo si)
    throws IOException
  {
    return si.hasDeletions();
  }
  
  public boolean hasDeletions()
  {
    return deletedDocs != null;
  }
  
  static boolean usesCompoundFile(SegmentInfo si)
    throws IOException
  {
    return si.getUseCompoundFile();
  }
  
  static boolean hasSeparateNorms(SegmentInfo si)
    throws IOException
  {
    return si.hasSeparateNorms();
  }
  
  protected void doDelete(int docNum)
  {
    if (deletedDocs == null)
    {
      deletedDocs = new BitVector(maxDoc());
      deletedDocsRef = new Ref();
    }
    if (deletedDocsRef.refCount() > 1)
    {
      Ref oldRef = deletedDocsRef;
      deletedDocs = cloneDeletedDocs(deletedDocs);
      deletedDocsRef = new Ref();
      oldRef.decRef();
    }
    deletedDocsDirty = true;
    if (!deletedDocs.getAndSet(docNum)) {
      pendingDeleteCount += 1;
    }
  }
  
  protected void doUndeleteAll()
  {
    deletedDocsDirty = false;
    if (deletedDocs != null)
    {
      assert (deletedDocsRef != null);
      deletedDocsRef.decRef();
      deletedDocs = null;
      deletedDocsRef = null;
      pendingDeleteCount = 0;
      si.clearDelGen();
      si.setDelCount(0);
    }
    else
    {
      assert (deletedDocsRef == null);
      assert (pendingDeleteCount == 0);
    }
  }
  
  List files()
    throws IOException
  {
    return new ArrayList(si.files());
  }
  
  public TermEnum terms()
  {
    ensureOpen();
    return core.getTermsReader().terms();
  }
  
  public TermEnum terms(Term t)
    throws IOException
  {
    ensureOpen();
    return core.getTermsReader().terms(t);
  }
  
  FieldInfos fieldInfos()
  {
    return core.fieldInfos;
  }
  
  public Document document(int n, FieldSelector fieldSelector)
    throws CorruptIndexException, IOException
  {
    ensureOpen();
    return getFieldsReader().doc(n, fieldSelector);
  }
  
  public synchronized boolean isDeleted(int n)
  {
    return (deletedDocs != null) && (deletedDocs.get(n));
  }
  
  public TermDocs termDocs(Term term)
    throws IOException
  {
    if (term == null) {
      return new AllTermDocs(this);
    }
    return super.termDocs(term);
  }
  
  public TermDocs termDocs()
    throws IOException
  {
    ensureOpen();
    return new SegmentTermDocs(this);
  }
  
  public TermPositions termPositions()
    throws IOException
  {
    ensureOpen();
    return new SegmentTermPositions(this);
  }
  
  public int docFreq(Term t)
    throws IOException
  {
    ensureOpen();
    TermInfo ti = core.getTermsReader().get(t);
    if (ti != null) {
      return docFreq;
    }
    return 0;
  }
  
  public int numDocs()
  {
    int n = maxDoc();
    if (deletedDocs != null) {
      n -= deletedDocs.count();
    }
    return n;
  }
  
  public int maxDoc()
  {
    return si.docCount;
  }
  
  public Collection getFieldNames(IndexReader.FieldOption fieldOption)
  {
    ensureOpen();
    
    Set fieldSet = new HashSet();
    for (int i = 0; i < core.fieldInfos.size(); i++)
    {
      FieldInfo fi = core.fieldInfos.fieldInfo(i);
      if (fieldOption == IndexReader.FieldOption.ALL) {
        fieldSet.add(name);
      } else if ((!isIndexed) && (fieldOption == IndexReader.FieldOption.UNINDEXED)) {
        fieldSet.add(name);
      } else if ((omitTermFreqAndPositions) && (fieldOption == IndexReader.FieldOption.OMIT_TERM_FREQ_AND_POSITIONS)) {
        fieldSet.add(name);
      } else if ((storePayloads) && (fieldOption == IndexReader.FieldOption.STORES_PAYLOADS)) {
        fieldSet.add(name);
      } else if ((isIndexed) && (fieldOption == IndexReader.FieldOption.INDEXED)) {
        fieldSet.add(name);
      } else if ((isIndexed) && (!storeTermVector) && (fieldOption == IndexReader.FieldOption.INDEXED_NO_TERMVECTOR)) {
        fieldSet.add(name);
      } else if ((storeTermVector == true) && (!storePositionWithTermVector) && (!storeOffsetWithTermVector) && (fieldOption == IndexReader.FieldOption.TERMVECTOR)) {
        fieldSet.add(name);
      } else if ((isIndexed) && (storeTermVector) && (fieldOption == IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR)) {
        fieldSet.add(name);
      } else if ((storePositionWithTermVector) && (!storeOffsetWithTermVector) && (fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION)) {
        fieldSet.add(name);
      } else if ((storeOffsetWithTermVector) && (!storePositionWithTermVector) && (fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET)) {
        fieldSet.add(name);
      } else if ((storeOffsetWithTermVector) && (storePositionWithTermVector) && (fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET)) {
        fieldSet.add(name);
      }
    }
    return fieldSet;
  }
  
  public synchronized boolean hasNorms(String field)
  {
    ensureOpen();
    return norms.containsKey(field);
  }
  
  static byte[] createFakeNorms(int size)
  {
    byte[] ones = new byte[size];
    Arrays.fill(ones, DefaultSimilarity.encodeNorm(1.0F));
    return ones;
  }
  
  private byte[] fakeNorms()
  {
    assert (!getDisableFakeNorms());
    if (ones == null) {
      ones = createFakeNorms(maxDoc());
    }
    return ones;
  }
  
  protected synchronized byte[] getNorms(String field)
    throws IOException
  {
    Norm norm = (Norm)norms.get(field);
    if (norm == null) {
      return null;
    }
    return norm.bytes();
  }
  
  public synchronized byte[] norms(String field)
    throws IOException
  {
    ensureOpen();
    byte[] bytes = getNorms(field);
    if ((bytes == null) && (!getDisableFakeNorms())) {
      bytes = fakeNorms();
    }
    return bytes;
  }
  
  protected void doSetNorm(int doc, String field, byte value)
    throws IOException
  {
    Norm norm = (Norm)norms.get(field);
    if (norm == null) {
      return;
    }
    normsDirty = true;
    norm.copyOnWrite()[doc] = value;
  }
  
  public synchronized void norms(String field, byte[] bytes, int offset)
    throws IOException
  {
    ensureOpen();
    Norm norm = (Norm)norms.get(field);
    if (norm == null)
    {
      Arrays.fill(bytes, offset, bytes.length, DefaultSimilarity.encodeNorm(1.0F));
      return;
    }
    norm.bytes(bytes, offset, maxDoc());
  }
  
  private void openNorms(Directory cfsDir, int readBufferSize)
    throws IOException
  {
    long nextNormSeek = SegmentMerger.NORMS_HEADER.length;
    int maxDoc = maxDoc();
    for (int i = 0; i < core.fieldInfos.size(); i++)
    {
      FieldInfo fi = core.fieldInfos.fieldInfo(i);
      if (!norms.containsKey(name)) {
        if ((isIndexed) && (!omitNorms))
        {
          Directory d = directory();
          String fileName = si.getNormFileName(number);
          if (!si.hasSeparateNorms(number)) {
            d = cfsDir;
          }
          boolean singleNormFile = fileName.endsWith(".nrm");
          IndexInput normInput = null;
          long normSeek;
          if (singleNormFile)
          {
            long normSeek = nextNormSeek;
            if (singleNormStream == null)
            {
              singleNormStream = d.openInput(fileName, readBufferSize);
              singleNormRef = new Ref();
            }
            else
            {
              singleNormRef.incRef();
            }
            normInput = singleNormStream;
          }
          else
          {
            normSeek = 0L;
            normInput = d.openInput(fileName);
          }
          norms.put(name, new Norm(normInput, number, normSeek));
          nextNormSeek += maxDoc;
        }
      }
    }
  }
  
  boolean termsIndexLoaded()
  {
    return core.termsIndexIsLoaded();
  }
  
  void loadTermsIndex(int termsIndexDivisor)
    throws IOException
  {
    core.loadTermsIndex(si, termsIndexDivisor);
  }
  
  boolean normsClosed()
  {
    if (singleNormStream != null) {
      return false;
    }
    Iterator it = norms.values().iterator();
    while (it.hasNext())
    {
      Norm norm = (Norm)it.next();
      if (refCount > 0) {
        return false;
      }
    }
    return true;
  }
  
  boolean normsClosed(String field)
  {
    Norm norm = (Norm)norms.get(field);
    return refCount == 0;
  }
  
  TermVectorsReader getTermVectorsReader()
  {
    TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get();
    if (tvReader == null)
    {
      TermVectorsReader orig = core.getTermVectorsReaderOrig();
      if (orig == null) {
        return null;
      }
      try
      {
        tvReader = (TermVectorsReader)orig.clone();
      }
      catch (CloneNotSupportedException cnse)
      {
        return null;
      }
      termVectorsLocal.set(tvReader);
    }
    return tvReader;
  }
  
  TermVectorsReader getTermVectorsReaderOrig()
  {
    return core.getTermVectorsReaderOrig();
  }
  
  public TermFreqVector getTermFreqVector(int docNumber, String field)
    throws IOException
  {
    ensureOpen();
    FieldInfo fi = core.fieldInfos.fieldInfo(field);
    if ((fi == null) || (!storeTermVector)) {
      return null;
    }
    TermVectorsReader termVectorsReader = getTermVectorsReader();
    if (termVectorsReader == null) {
      return null;
    }
    return termVectorsReader.get(docNumber, field);
  }
  
  public void getTermFreqVector(int docNumber, String field, TermVectorMapper mapper)
    throws IOException
  {
    ensureOpen();
    FieldInfo fi = core.fieldInfos.fieldInfo(field);
    if ((fi == null) || (!storeTermVector)) {
      return;
    }
    TermVectorsReader termVectorsReader = getTermVectorsReader();
    if (termVectorsReader == null) {
      return;
    }
    termVectorsReader.get(docNumber, field, mapper);
  }
  
  public void getTermFreqVector(int docNumber, TermVectorMapper mapper)
    throws IOException
  {
    ensureOpen();
    
    TermVectorsReader termVectorsReader = getTermVectorsReader();
    if (termVectorsReader == null) {
      return;
    }
    termVectorsReader.get(docNumber, mapper);
  }
  
  public TermFreqVector[] getTermFreqVectors(int docNumber)
    throws IOException
  {
    ensureOpen();
    
    TermVectorsReader termVectorsReader = getTermVectorsReader();
    if (termVectorsReader == null) {
      return null;
    }
    return termVectorsReader.get(docNumber);
  }
  
  public String getSegmentName()
  {
    return core.segment;
  }
  
  SegmentInfo getSegmentInfo()
  {
    return si;
  }
  
  void setSegmentInfo(SegmentInfo info)
  {
    si = info;
  }
  
  void startCommit()
  {
    rollbackSegmentInfo = ((SegmentInfo)si.clone());
    rollbackHasChanges = hasChanges;
    rollbackDeletedDocsDirty = deletedDocsDirty;
    rollbackNormsDirty = normsDirty;
    rollbackPendingDeleteCount = pendingDeleteCount;
    Iterator it = norms.values().iterator();
    while (it.hasNext())
    {
      Norm norm = (Norm)it.next();
      rollbackDirty = dirty;
    }
  }
  
  void rollbackCommit()
  {
    si.reset(rollbackSegmentInfo);
    hasChanges = rollbackHasChanges;
    deletedDocsDirty = rollbackDeletedDocsDirty;
    normsDirty = rollbackNormsDirty;
    pendingDeleteCount = rollbackPendingDeleteCount;
    Iterator it = norms.values().iterator();
    while (it.hasNext())
    {
      Norm norm = (Norm)it.next();
      dirty = rollbackDirty;
    }
  }
  
  public Directory directory()
  {
    return core.dir;
  }
  
  public final Object getFieldCacheKey()
  {
    return core.freqStream;
  }
  
  public Object getDeletesCacheKey()
  {
    return deletedDocs;
  }
  
  public long getUniqueTermCount()
  {
    return core.getTermsReader().size();
  }
  
  static SegmentReader getOnlySegmentReader(Directory dir)
    throws IOException
  {
    return getOnlySegmentReader(IndexReader.open(dir));
  }
  
  static SegmentReader getOnlySegmentReader(IndexReader reader)
  {
    if ((reader instanceof SegmentReader)) {
      return (SegmentReader)reader;
    }
    if ((reader instanceof DirectoryReader))
    {
      IndexReader[] subReaders = reader.getSequentialSubReaders();
      if (subReaders.length != 1) {
        throw new IllegalArgumentException(reader + " has " + subReaders.length + " segments instead of exactly one");
      }
      return (SegmentReader)subReaders[0];
    }
    throw new IllegalArgumentException(reader + " is not a SegmentReader or a single-segment DirectoryReader");
  }
  
  public int getTermInfosIndexDivisor()
  {
    return core.termsIndexDivisor;
  }
}

/* Location:
 * Qualified Name:     org.apache.lucene.index.SegmentReader
 * Java Class Version: 1.4 (48.0)
 * JD-Core Version:    0.7.1
 */
package org.apache.lucene.index;

import java.io.IOException;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BitVector;

class SegmentTermDocs
  implements TermDocs
{
  protected SegmentReader parent;
  protected IndexInput freqStream;
  protected int count;
  protected int df;
  protected BitVector deletedDocs;
  int doc = 0;
  int freq;
  private int skipInterval;
  private int maxSkipLevels;
  private DefaultSkipListReader skipListReader;
  private long freqBasePointer;
  private long proxBasePointer;
  private long skipPointer;
  private boolean haveSkipped;
  protected boolean currentFieldStoresPayloads;
  protected boolean currentFieldOmitTermFreqAndPositions;
  
  protected SegmentTermDocs(SegmentReader parent)
  {
    this.parent = parent;
    freqStream = ((IndexInput)core.freqStream.clone());
    synchronized (parent)
    {
      deletedDocs = deletedDocs;
    }
    skipInterval = core.getTermsReader().getSkipInterval();
    maxSkipLevels = core.getTermsReader().getMaxSkipLevels();
  }
  
  public void seek(Term term)
    throws IOException
  {
    TermInfo ti = parent.core.getTermsReader().get(term);
    seek(ti, term);
  }
  
  public void seek(TermEnum termEnum)
    throws IOException
  {
    TermInfo ti;
    Term term;
    TermInfo ti;
    if (((termEnum instanceof SegmentTermEnum)) && (fieldInfos == parent.core.fieldInfos))
    {
      SegmentTermEnum segmentTermEnum = (SegmentTermEnum)termEnum;
      Term term = segmentTermEnum.term();
      ti = segmentTermEnum.termInfo();
    }
    else
    {
      term = termEnum.term();
      ti = parent.core.getTermsReader().get(term);
    }
    seek(ti, term);
  }
  
  void seek(TermInfo ti, Term term)
    throws IOException
  {
    count = 0;
    FieldInfo fi = parent.core.fieldInfos.fieldInfo(field);
    currentFieldOmitTermFreqAndPositions = (fi != null ? omitTermFreqAndPositions : false);
    currentFieldStoresPayloads = (fi != null ? storePayloads : false);
    if (ti == null)
    {
      df = 0;
    }
    else
    {
      df = docFreq;
      doc = 0;
      freqBasePointer = freqPointer;
      proxBasePointer = proxPointer;
      skipPointer = (freqBasePointer + skipOffset);
      freqStream.seek(freqBasePointer);
      haveSkipped = false;
    }
  }
  
  public void close()
    throws IOException
  {
    freqStream.close();
    if (skipListReader != null) {
      skipListReader.close();
    }
  }
  
  public final int doc()
  {
    return doc;
  }
  
  public final int freq()
  {
    return freq;
  }
  
  protected void skippingDoc()
    throws IOException
  {
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56

Further reading...

For more information on Java 1.5 Tiger, you may find Java 1.5 Tiger, A developer's Notebook by D. Flanagan and B. McLaughlin from O'Reilly of interest.

New!JAR listings


Copyright 2006-2017. Infinite Loop Ltd