forked from Ponysearch/Ponysearch
Merge pull request #354 from dalf/mod-searx-network-stream
[mod] /image_proxy: fix memory leak
This commit is contained in:
commit
7124fd1704
2 changed files with 20 additions and 16 deletions
|
@ -219,8 +219,7 @@ def stream(method, url, **kwargs):
|
||||||
"""Replace httpx.stream.
|
"""Replace httpx.stream.
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
stream = poolrequests.stream(...)
|
response, stream = poolrequests.stream(...)
|
||||||
response = next(stream)
|
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
@ -236,6 +235,5 @@ def stream(method, url, **kwargs):
|
||||||
|
|
||||||
response._generator = generator # pylint: disable=protected-access
|
response._generator = generator # pylint: disable=protected-access
|
||||||
response.close = MethodType(_close_response_method, response)
|
response.close = MethodType(_close_response_method, response)
|
||||||
yield response
|
|
||||||
|
|
||||||
yield from generator
|
return response, generator
|
||||||
|
|
|
@ -1089,12 +1089,11 @@ def image_proxy():
|
||||||
'DNT': '1',
|
'DNT': '1',
|
||||||
}
|
}
|
||||||
set_context_network_name('image_proxy')
|
set_context_network_name('image_proxy')
|
||||||
stream = http_stream(
|
resp, stream = http_stream(
|
||||||
method = 'GET',
|
method = 'GET',
|
||||||
url = url,
|
url = url,
|
||||||
headers = request_headers
|
headers = request_headers
|
||||||
)
|
)
|
||||||
resp = next(stream)
|
|
||||||
content_length = resp.headers.get('Content-Length')
|
content_length = resp.headers.get('Content-Length')
|
||||||
if (content_length
|
if (content_length
|
||||||
and content_length.isdigit()
|
and content_length.isdigit()
|
||||||
|
@ -1124,22 +1123,29 @@ def image_proxy():
|
||||||
except httpx.HTTPError:
|
except httpx.HTTPError:
|
||||||
logger.exception('HTTP error on closing')
|
logger.exception('HTTP error on closing')
|
||||||
|
|
||||||
|
def close_stream():
|
||||||
|
nonlocal resp, stream
|
||||||
|
try:
|
||||||
|
resp.close()
|
||||||
|
del resp
|
||||||
|
del stream
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.debug('Exception while closing response', e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
headers = dict_subset(
|
headers = dict_subset(
|
||||||
resp.headers,
|
resp.headers,
|
||||||
{'Content-Type', 'Content-Encoding', 'Content-Length', 'Length'}
|
{'Content-Type', 'Content-Encoding', 'Content-Length', 'Length'}
|
||||||
)
|
)
|
||||||
|
response = Response(
|
||||||
def forward_chunk():
|
stream,
|
||||||
total_length = 0
|
mimetype=resp.headers['Content-Type'],
|
||||||
for chunk in stream:
|
headers=headers,
|
||||||
total_length += len(chunk)
|
direct_passthrough=True)
|
||||||
if total_length > maximum_size:
|
response.call_on_close(close_stream)
|
||||||
break
|
return response
|
||||||
yield chunk
|
|
||||||
|
|
||||||
return Response(forward_chunk(), mimetype=resp.headers['Content-Type'], headers=headers)
|
|
||||||
except httpx.HTTPError:
|
except httpx.HTTPError:
|
||||||
|
close_stream()
|
||||||
return '', 400
|
return '', 400
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue