Skip to content

Commit a741ae4

Browse files
committed
Improve limit handling in StringDecoder
The case of one data buffer containing multiple lines can could cause a buffer leak due to a suspected issue in concatMapIterable. This commit adds workarounds for that until the underlying issue is addressed. Closes gh-24339
1 parent 850cbf0 commit a741ae4

File tree

2 files changed

+109
-54
lines changed

2 files changed

+109
-54
lines changed

spring-core/src/main/java/org/springframework/core/codec/StringDecoder.java

Lines changed: 87 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright 2002-2019 the original author or authors.
2+
* Copyright 2002-2020 the original author or authors.
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
55
* you may not use this file except in compliance with the License.
@@ -94,20 +94,44 @@ public Flux<String> decode(Publisher<DataBuffer> input, ResolvableType elementTy
9494

9595
byte[][] delimiterBytes = getDelimiterBytes(mimeType);
9696

97-
// TODO: Drop Consumer and use bufferUntil with Supplier<LimistedDataBufferList> (reactor-core#1925)
98-
// TODO: Drop doOnDiscard(LimitedDataBufferList.class, ...) (reactor-core#1924)
99-
LimitedDataBufferConsumer limiter = new LimitedDataBufferConsumer(getMaxInMemorySize());
100-
10197
Flux<DataBuffer> inputFlux = Flux.defer(() -> {
10298
DataBufferUtils.Matcher matcher = DataBufferUtils.matcher(delimiterBytes);
103-
return Flux.from(input)
104-
.concatMapIterable(buffer -> endFrameAfterDelimiter(buffer, matcher))
105-
.doOnNext(limiter)
106-
.bufferUntil(buffer -> buffer instanceof EndFrameBuffer)
107-
.map(buffers -> joinAndStrip(buffers, this.stripDelimiter))
108-
.doOnDiscard(LimitedDataBufferList.class, LimitedDataBufferList::releaseAndClear)
109-
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
99+
if (getMaxInMemorySize() != -1) {
100+
101+
// Passing limiter into endFrameAfterDelimiter helps to ensure that in case of one DataBuffer
102+
// containing multiple lines, the limit is checked and raised immediately without accumulating
103+
// subsequent lines. This is necessary because concatMapIterable doesn't respect doOnDiscard.
104+
// When reactor-core#1925 is resolved, we could replace bufferUntil with:
105+
106+
// .windowUntil(buffer -> buffer instanceof EndFrameBuffer)
107+
// .concatMap(fluxes -> fluxes.collect(() -> new LimitedDataBufferList(getMaxInMemorySize()), LimitedDataBufferList::add))
108+
109+
LimitedDataBufferList limiter = new LimitedDataBufferList(getMaxInMemorySize());
110+
111+
return Flux.from(input)
112+
.concatMapIterable(buffer -> endFrameAfterDelimiter(buffer, matcher, limiter))
113+
.bufferUntil(buffer -> buffer instanceof EndFrameBuffer)
114+
.map(buffers -> joinAndStrip(buffers, this.stripDelimiter))
115+
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
116+
}
117+
else {
110118

119+
// When the decoder is unlimited (-1), concatMapIterable will cache buffers that may not
120+
// be released if cancel is signalled before they are turned into String lines
121+
// (see test maxInMemoryLimitReleasesUnprocessedLinesWhenUnlimited).
122+
// When reactor-core#1925 is resolved, the workaround can be removed and the entire
123+
// else clause possibly dropped.
124+
125+
ConcatMapIterableDiscardWorkaroundCache cache = new ConcatMapIterableDiscardWorkaroundCache();
126+
127+
return Flux.from(input)
128+
.concatMapIterable(buffer -> cache.addAll(endFrameAfterDelimiter(buffer, matcher, null)))
129+
.doOnNext(cache)
130+
.doOnCancel(cache)
131+
.bufferUntil(buffer -> buffer instanceof EndFrameBuffer)
132+
.map(buffers -> joinAndStrip(buffers, this.stripDelimiter))
133+
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
134+
}
111135
});
112136

113137
return super.decode(inputFlux, elementType, mimeType, hints);
@@ -152,29 +176,49 @@ private static Charset getCharset(@Nullable MimeType mimeType) {
152176
*
153177
* @param dataBuffer the buffer to find delimiters in
154178
* @param matcher used to find the first delimiters
179+
* @param limiter to enforce maxInMemorySize with
155180
* @return a flux of buffers, containing {@link EndFrameBuffer} after each delimiter that was
156181
* found in {@code dataBuffer}. Returns Flux, because returning List (w/ flatMapIterable)
157182
* results in memory leaks due to pre-fetching.
158183
*/
159-
private static List<DataBuffer> endFrameAfterDelimiter(DataBuffer dataBuffer, DataBufferUtils.Matcher matcher) {
184+
private static List<DataBuffer> endFrameAfterDelimiter(
185+
DataBuffer dataBuffer, DataBufferUtils.Matcher matcher, @Nullable LimitedDataBufferList limiter) {
186+
160187
List<DataBuffer> result = new ArrayList<>();
161-
do {
162-
int endIdx = matcher.match(dataBuffer);
163-
if (endIdx != -1) {
164-
int readPosition = dataBuffer.readPosition();
165-
int length = endIdx - readPosition + 1;
166-
result.add(dataBuffer.retainedSlice(readPosition, length));
167-
result.add(new EndFrameBuffer(matcher.delimiter()));
168-
dataBuffer.readPosition(endIdx + 1);
188+
try {
189+
do {
190+
int endIdx = matcher.match(dataBuffer);
191+
if (endIdx != -1) {
192+
int readPosition = dataBuffer.readPosition();
193+
int length = (endIdx - readPosition + 1);
194+
DataBuffer slice = dataBuffer.retainedSlice(readPosition, length);
195+
result.add(slice);
196+
result.add(new EndFrameBuffer(matcher.delimiter()));
197+
dataBuffer.readPosition(endIdx + 1);
198+
if (limiter != null) {
199+
limiter.add(slice); // enforce the limit
200+
limiter.clear();
201+
}
202+
}
203+
else {
204+
result.add(DataBufferUtils.retain(dataBuffer));
205+
if (limiter != null) {
206+
limiter.add(dataBuffer);
207+
}
208+
break;
209+
}
169210
}
170-
else {
171-
result.add(DataBufferUtils.retain(dataBuffer));
172-
break;
211+
while (dataBuffer.readableByteCount() > 0);
212+
}
213+
catch (DataBufferLimitException ex) {
214+
if (limiter != null) {
215+
limiter.releaseAndClear();
173216
}
217+
throw ex;
218+
}
219+
finally {
220+
DataBufferUtils.release(dataBuffer);
174221
}
175-
while (dataBuffer.readableByteCount() > 0);
176-
177-
DataBufferUtils.release(dataBuffer);
178222
return result;
179223
}
180224

@@ -288,34 +332,32 @@ public byte[] delimiter() {
288332
}
289333

290334

291-
/**
292-
* Temporary measure for reactor-core#1925.
293-
* Consumer that adds to a {@link LimitedDataBufferList} to enforce limits.
294-
*/
295-
private static class LimitedDataBufferConsumer implements Consumer<DataBuffer> {
335+
private class ConcatMapIterableDiscardWorkaroundCache implements Consumer<DataBuffer>, Runnable {
296336

297-
private final LimitedDataBufferList bufferList;
337+
private final List<DataBuffer> buffers = new ArrayList<>();
298338

299339

300-
public LimitedDataBufferConsumer(int maxInMemorySize) {
301-
this.bufferList = new LimitedDataBufferList(maxInMemorySize);
340+
public List<DataBuffer> addAll(List<DataBuffer> buffersToAdd) {
341+
this.buffers.addAll(buffersToAdd);
342+
return buffersToAdd;
302343
}
303344

345+
@Override
346+
public void accept(DataBuffer dataBuffer) {
347+
this.buffers.remove(dataBuffer);
348+
}
304349

305350
@Override
306-
public void accept(DataBuffer buffer) {
307-
if (buffer instanceof EndFrameBuffer) {
308-
this.bufferList.clear();
309-
}
310-
else {
351+
public void run() {
352+
this.buffers.forEach(buffer -> {
311353
try {
312-
this.bufferList.add(buffer);
313-
}
314-
catch (DataBufferLimitException ex) {
315354
DataBufferUtils.release(buffer);
316-
throw ex;
317355
}
318-
}
356+
catch (Throwable ex) {
357+
// Keep going..
358+
}
359+
});
319360
}
320361
}
362+
321363
}

spring-core/src/test/java/org/springframework/core/codec/StringDecoderTests.java

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright 2002-2019 the original author or authors.
2+
* Copyright 2002-2020 the original author or authors.
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
55
* you may not use this file except in compliance with the License.
@@ -130,17 +130,30 @@ void decodeNewLine() {
130130
}
131131

132132
@Test
133-
void decodeNewLineWithLimit() {
133+
void maxInMemoryLimit() {
134134
Flux<DataBuffer> input = Flux.just(
135-
stringBuffer("abc\n"),
136-
stringBuffer("defg\n"),
137-
stringBuffer("hijkl\n")
138-
);
139-
this.decoder.setMaxInMemorySize(5);
135+
stringBuffer("abc\n"), stringBuffer("defg\n"), stringBuffer("hijkl\n"));
140136

137+
this.decoder.setMaxInMemorySize(5);
141138
testDecode(input, String.class, step ->
142-
step.expectNext("abc", "defg")
143-
.verifyError(DataBufferLimitException.class));
139+
step.expectNext("abc", "defg").verifyError(DataBufferLimitException.class));
140+
}
141+
142+
@Test // gh-24312
143+
void maxInMemoryLimitReleaseUnprocessedLinesFromCurrentBuffer() {
144+
Flux<DataBuffer> input = Flux.just(
145+
stringBuffer("TOO MUCH DATA\nanother line\n\nand another\n"));
146+
147+
this.decoder.setMaxInMemorySize(5);
148+
testDecode(input, String.class, step -> step.verifyError(DataBufferLimitException.class));
149+
}
150+
151+
@Test // gh-24339
152+
void maxInMemoryLimitReleaseUnprocessedLinesWhenUnlimited() {
153+
Flux<DataBuffer> input = Flux.just(stringBuffer("Line 1\nLine 2\nLine 3\n"));
154+
155+
this.decoder.setMaxInMemorySize(-1);
156+
testDecodeCancel(input, ResolvableType.forClass(String.class), null, Collections.emptyMap());
144157
}
145158

146159
@Test

0 commit comments

Comments
 (0)