Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,6 @@ public EntrySupplier finishAndSort() throws IOException {
ByteSequencesReader reader =
new ByteSequencesReader(tempDir.openChecksumInput(sortedFile), sortedFile);
return new EntrySupplier() {
boolean success = false;

@Override
public int wordCount() {
return wordCount;
Expand All @@ -98,7 +96,6 @@ public int wordCount() {
public String next() throws IOException {
BytesRef scratch = reader.next();
if (scratch == null) {
success = true;
return null;
}
return scratch.utf8ToString();
Expand All @@ -107,11 +104,7 @@ public String next() throws IOException {
@Override
public void close() throws IOException {
reader.close();
if (success) {
tempDir.deleteFile(sortedFile);
} else {
IOUtils.deleteFilesIgnoringExceptions(tempDir, sortedFile);
}
tempDir.deleteFile(sortedFile);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Exceptions thrown by this should be handled by the method calling this close() method

}
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,19 +38,16 @@ final class DocumentsWriterFlushQueue {
synchronized FlushTicket addTicket(Supplier<FlushTicket> ticketSupplier) throws IOException {
// first inc the ticket count - freeze opens a window for #anyChanges to fail
incTickets();
boolean success = false;
try {
FlushTicket ticket = ticketSupplier.get();
if (ticket != null) {
// no need to publish anything if we don't have any frozen updates
queue.add(ticket);
success = true;
}
return ticket;
} finally {
if (!success) {
decTickets();
}
} catch (Throwable t) {
decTickets();
throw t;
}
}

Expand Down
124 changes: 61 additions & 63 deletions lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
Original file line number Diff line number Diff line change
Expand Up @@ -74,80 +74,78 @@ final class SegmentCoreReaders {
SegmentCoreReaders(Directory dir, SegmentCommitInfo si, IOContext context) throws IOException {

final Codec codec = si.info.getCodec();
final Directory
cfsDir; // confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory.
boolean success = false;
// confusing name: if (cfs) it's the cfsdir, otherwise it's the segment's directory:
final Directory cfsDir;

try {
if (si.info.getUseCompoundFile()) {
cfsDir = cfsReader = codec.compoundFormat().getCompoundReader(dir, si.info);
} else {
cfsReader = null;
cfsDir = dir;
}
try {
if (si.info.getUseCompoundFile()) {
cfsDir = cfsReader = codec.compoundFormat().getCompoundReader(dir, si.info);
} else {
cfsReader = null;
cfsDir = dir;
}

segment = si.info.name;
segment = si.info.name;

coreFieldInfos = codec.fieldInfosFormat().read(cfsDir, si.info, "", context);
coreFieldInfos = codec.fieldInfosFormat().read(cfsDir, si.info, "", context);

final SegmentReadState segmentReadState =
new SegmentReadState(cfsDir, si.info, coreFieldInfos, context);
if (coreFieldInfos.hasPostings()) {
final PostingsFormat format = codec.postingsFormat();
// Ask codec for its Fields
fields = format.fieldsProducer(segmentReadState);
assert fields != null;
} else {
fields = null;
}
// ask codec for its Norms:
// TODO: since we don't write any norms file if there are no norms,
// kinda jaky to assume the codec handles the case of no norms file at all gracefully?!

if (coreFieldInfos.hasNorms()) {
normsProducer = codec.normsFormat().normsProducer(segmentReadState);
assert normsProducer != null;
} else {
normsProducer = null;
}

fieldsReaderOrig =
si.info
.getCodec()
.storedFieldsFormat()
.fieldsReader(cfsDir, si.info, coreFieldInfos, context);
final SegmentReadState segmentReadState =
new SegmentReadState(cfsDir, si.info, coreFieldInfos, context);
if (coreFieldInfos.hasPostings()) {
final PostingsFormat format = codec.postingsFormat();
// Ask codec for its Fields
fields = format.fieldsProducer(segmentReadState);
assert fields != null;
} else {
fields = null;
}
// ask codec for its Norms:
// TODO: since we don't write any norms file if there are no norms,
// kinda jaky to assume the codec handles the case of no norms file at all gracefully?!

if (coreFieldInfos.hasNorms()) {
normsProducer = codec.normsFormat().normsProducer(segmentReadState);
assert normsProducer != null;
} else {
normsProducer = null;
}

if (coreFieldInfos.hasTermVectors()) { // open term vector files only as needed
termVectorsReaderOrig =
fieldsReaderOrig =
si.info
.getCodec()
.termVectorsFormat()
.vectorsReader(cfsDir, si.info, coreFieldInfos, context);
} else {
termVectorsReaderOrig = null;
}

if (coreFieldInfos.hasPointValues()) {
pointsReader = codec.pointsFormat().fieldsReader(segmentReadState);
} else {
pointsReader = null;
}
.storedFieldsFormat()
.fieldsReader(cfsDir, si.info, coreFieldInfos, context);

if (coreFieldInfos.hasTermVectors()) { // open term vector files only as needed
termVectorsReaderOrig =
si.info
.getCodec()
.termVectorsFormat()
.vectorsReader(cfsDir, si.info, coreFieldInfos, context);
} else {
termVectorsReaderOrig = null;
}

if (coreFieldInfos.hasVectorValues()) {
knnVectorsReader = codec.knnVectorsFormat().fieldsReader(segmentReadState);
} else {
knnVectorsReader = null;
}
if (coreFieldInfos.hasPointValues()) {
pointsReader = codec.pointsFormat().fieldsReader(segmentReadState);
} else {
pointsReader = null;
}

success = true;
} catch (EOFException | FileNotFoundException e) {
throw new CorruptIndexException("Problem reading index from " + dir, dir.toString(), e);
} catch (NoSuchFileException e) {
throw new CorruptIndexException("Problem reading index.", e.getFile(), e);
} finally {
if (!success) {
decRef();
if (coreFieldInfos.hasVectorValues()) {
knnVectorsReader = codec.knnVectorsFormat().fieldsReader(segmentReadState);
} else {
knnVectorsReader = null;
}
} catch (EOFException | FileNotFoundException e) {
throw new CorruptIndexException("Problem reading index from " + dir, dir.toString(), e);
} catch (NoSuchFileException e) {
throw new CorruptIndexException("Problem reading index.", e.getFile(), e);
}
} catch (Throwable t) {
decRef();
throw t;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.apache.lucene.store;

import java.io.Closeable;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UncheckedIOException;
Expand Down Expand Up @@ -249,11 +250,9 @@ public IndexOutput createTempOutput(String prefix, String suffix, IOContext cont
if (VERBOSE) {
System.out.println("nrtdir.createTempOutput prefix=" + prefix + " suffix=" + suffix);
}
Set<String> toDelete = new HashSet<>();

// This is very ugly/messy/dangerous (can in some disastrous case maybe create too many temp
// files), but I don't know of a cleaner way:
boolean success = false;
Set<String> toDelete = new HashSet<>();

Directory first;
Directory second;
Expand All @@ -266,7 +265,7 @@ public IndexOutput createTempOutput(String prefix, String suffix, IOContext cont
}

IndexOutput out = null;
try {
try (Closeable _ = () -> IOUtils.deleteFiles(first, toDelete)) {
while (true) {
out = first.createTempOutput(prefix, suffix, context);
String name = out.getName();
Expand All @@ -275,17 +274,12 @@ public IndexOutput createTempOutput(String prefix, String suffix, IOContext cont
out.close();
} else {
toDelete.remove(name);
success = true;
break;
}
}
} finally {
if (success) {
IOUtils.deleteFiles(first, toDelete);
} else {
IOUtils.closeWhileHandlingException(out);
IOUtils.deleteFilesIgnoringExceptions(first, toDelete);
}
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(t, out);
throw t;
}

return out;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,16 +155,13 @@ public final void decRef() throws IOException {
ensureOpen();
final int rc = refCount.decrementAndGet();
if (rc == 0) {
boolean success = false;
try {
doClose();
closed = true;
success = true;
} finally {
if (!success) {
// Put reference back on failure
refCount.incrementAndGet();
}
} catch (Throwable t) {
// Put reference back on failure
refCount.incrementAndGet();
throw t;
}
} else if (rc < 0) {
throw new IllegalStateException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,6 @@ protected DirectoryTaxonomyReader doOpenIfChanged() throws IOException {
}

// check if the taxonomy was recreated
boolean success = false;
try {
boolean recreated = false;
if (taxoWriter == null) {
Expand Down Expand Up @@ -222,12 +221,10 @@ protected DirectoryTaxonomyReader doOpenIfChanged() throws IOException {
new DirectoryTaxonomyReader(r2, taxoWriter, ordinalCache, categoryCache, taxoArrays);
}

success = true;
return newTaxoReader;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(r2);
}
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(t, r2);
throw t;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,6 @@ private class AlwaysRefreshDirectoryTaxonomyReader extends DirectoryTaxonomyRead

@Override
protected DirectoryTaxonomyReader doOpenIfChanged() throws IOException {
boolean success = false;

// the getInternalIndexReader() function performs the ensureOpen() check
final DirectoryReader reader = DirectoryReader.openIfChanged(super.getInternalIndexReader());
if (reader == null) {
Expand All @@ -181,14 +179,10 @@ protected DirectoryTaxonomyReader doOpenIfChanged() throws IOException {
// Returning a AlwaysRefreshDirectoryTaxonomyReader ensures that the recreated taxonomy
// reader also uses the overridden doOpenIfChanged
// method (that always recomputes values).
final AlwaysRefreshDirectoryTaxonomyReader newTaxonomyReader =
new AlwaysRefreshDirectoryTaxonomyReader(reader);
success = true;
return newTaxonomyReader;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(reader);
}
return new AlwaysRefreshDirectoryTaxonomyReader(reader);
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(t, reader);
throw t;
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,7 @@ public ClientHandler(ServerSocket ss, Node node, Socket socket) {

@Override
public void run() {
boolean success = false;
try {
try (socket) {
// node.message("using stream buffer size=" + bufferSize);
InputStream is = new BufferedInputStream(socket.getInputStream(), bufferSize);
DataInput in = new InputStreamDataInput(is);
Expand All @@ -104,30 +103,18 @@ public void run() {
if (VERBOSE_CONNECTIONS) {
node.message("bos.flush done");
}

success = true;
} catch (Throwable t) {
if (t instanceof SocketException == false
&& t instanceof NodeCommunicationException == false) {
node.message("unexpected exception handling client connection; now failing test:");
t.printStackTrace(System.out);
IOUtils.closeWhileHandlingException(ss);
IOUtils.closeWhileSuppressingExceptions(t, ss);
// Test should fail with this:
throw new RuntimeException(t);
} else {
node.message("exception handling client connection; ignoring:");
t.printStackTrace(System.out);
}
} finally {
if (success) {
try {
IOUtils.close(socket);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
} else {
IOUtils.closeWhileHandlingException(socket);
}
}
if (VERBOSE_CONNECTIONS) {
node.message("socket.close done");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ public BytesRef next() throws IOException {
if (done) {
return null;
}
boolean success = false;
BytesRef result;
try {
String line;
Expand All @@ -90,11 +89,9 @@ public BytesRef next() throws IOException {
IOUtils.close(in);
result = null;
}
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(in);
}
} catch (Throwable t) {
IOUtils.closeWhileSuppressingExceptions(t, in);
throw t;
}
return result;
}
Expand Down
Loading
Loading