/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Document = Lucene.Net.Documents.Document;
using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
namespace Lucene.Net.Search
{
///
/// Caution: Iterate only over the hits needed. Iterating over all
/// hits is generally not desirable and may be the source of
/// performance issues. If you need to iterate over many or all hits, consider
/// using the search method that takes a {@link HitCollector}.
/// Note: Deleting matching documents concurrently with traversing
/// the hits, might, when deleting hits that were not yet retrieved, decrease
/// {@link #Length()}. In such case,
/// {@link java.util.ConcurrentModificationException ConcurrentModificationException}
/// is thrown when accessing hit n
≥ current_{@link #Length()}
/// (but n
< {@link #Length()}_at_start).
///
/// TopDocCollector collector = new TopDocCollector(hitsPerPage); /// searcher.search(query, collector); /// ScoreDoc[] hits = collector.topDocs().scoreDocs; /// for (int i = 0; i < hits.length; i++) { /// int docId = hits[i].doc; /// Document d = searcher.doc(docId); /// // do something with current hit /// ... ////// [System.Obsolete("Hits will be removed in Lucene 3.0. Insead used TopDocCollector and TopDocs.")] public sealed class Hits { private Weight weight; private Searcher searcher; private Filter filter = null; private Sort sort = null; private int length; // the total number of hits private System.Collections.ArrayList hitDocs = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10)); // cache of hits retrieved private HitDoc first; // head of LRU cache private HitDoc last; // tail of LRU cache private int numDocs = 0; // number cached private int maxDocs = 200; // max to cache private int nDeletions; // # deleted docs in the index. private int lengthAtStart; // this is the number apps usually count on (although deletions can bring it down). private int nDeletedHits = 0; // # of already collected hits that were meanwhile deleted. public /*internal*/ bool debugCheckedForDeletions = false; // for test purposes. internal Hits(Searcher s, Query q, Filter f) { weight = q.Weight(s); searcher = s; filter = f; nDeletions = CountDeletions(s); GetMoreDocs(50); // retrieve 100 initially lengthAtStart = length; } internal Hits(Searcher s, Query q, Filter f, Sort o) { weight = q.Weight(s); searcher = s; filter = f; sort = o; nDeletions = CountDeletions(s); GetMoreDocs(50); // retrieve 100 initially lengthAtStart = length; } // count # deletions, return -1 if unknown. private int CountDeletions(Searcher s) { int cnt = - 1; if (s is IndexSearcher) { cnt = s.MaxDoc() - ((IndexSearcher) s).GetIndexReader().NumDocs(); } return cnt; } ///
min
has been retrieved.
/// Documents are cached, so that repeated requests for the same element may /// return the same Document object. ///
/// Caution: Iterate only over the hits needed. Iterating over all /// hits is generally not desirable and may be the source of /// performance issues. If you need to iterate over many or all hits, consider /// using a search method that takes a {@link HitCollector}. ///
///