2023-01-12 14:33:20 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
2023-01-12 21:06:52 +01:00
|
|
|
import datetime
|
2023-01-12 14:33:20 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import threading
|
2023-01-12 21:06:52 +01:00
|
|
|
import time
|
2023-01-12 14:33:20 +01:00
|
|
|
from pathlib import Path
|
|
|
|
from queue import Queue
|
|
|
|
from typing import Callable, IO, AnyStr
|
|
|
|
|
2023-01-12 21:06:52 +01:00
|
|
|
from p1st.data_units import DataUnitConverter
|
|
|
|
|
2023-01-12 14:33:20 +01:00
|
|
|
|
|
|
|
def execute_consume_chunks(command: list[str],
|
|
|
|
handle_chunks: Callable[[Queue.put], None],
|
|
|
|
) -> int:
|
|
|
|
"""
|
2023-01-12 21:37:39 +01:00
|
|
|
Local chunks are deleted after they are fed to stdin of subprocess.
|
|
|
|
|
2023-01-12 14:33:20 +01:00
|
|
|
:param command:
|
2023-01-12 21:37:39 +01:00
|
|
|
:param handle_chunks: Has one parameter, `queue_put`. `handle_chunks` must call queue_put.(chunk_path, last_chunk) for each saved chunk.
|
2023-01-12 14:33:20 +01:00
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
process = subprocess.Popen(
|
|
|
|
command,
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
close_fds=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
q = Queue(maxsize=2)
|
|
|
|
|
|
|
|
threads = [
|
|
|
|
threading.Thread(
|
|
|
|
target=_stdin_worker,
|
|
|
|
args=(q.get,
|
|
|
|
process.stdin,
|
|
|
|
)),
|
|
|
|
threading.Thread(
|
|
|
|
target=handle_chunks,
|
|
|
|
args=(q.put,
|
|
|
|
)),
|
|
|
|
threading.Thread(
|
|
|
|
target=_stderr_worker,
|
|
|
|
args=(process.stderr,
|
|
|
|
)),
|
|
|
|
]
|
|
|
|
|
|
|
|
for t in threads:
|
|
|
|
t.daemon = True
|
|
|
|
t.start()
|
|
|
|
|
|
|
|
returncode: int = process.wait()
|
|
|
|
for t in threads:
|
|
|
|
t.join()
|
|
|
|
|
|
|
|
return returncode
|
|
|
|
|
|
|
|
|
|
|
|
def _stdin_worker(queue_get: Queue.get,
|
|
|
|
binary_stdin: IO[AnyStr],
|
|
|
|
):
|
2023-01-12 21:06:52 +01:00
|
|
|
start_time = time.time()
|
|
|
|
transferred_bytes = 0
|
|
|
|
|
2023-01-12 14:33:20 +01:00
|
|
|
while True:
|
2023-01-12 21:37:39 +01:00
|
|
|
chunk_path: Path
|
2023-01-12 14:33:20 +01:00
|
|
|
chunk_path, last_chunk = queue_get()
|
|
|
|
chunk = _read_chunk(chunk_path)
|
2023-01-12 21:37:39 +01:00
|
|
|
chunk_path.unlink(missing_ok=False)
|
2023-01-12 14:33:20 +01:00
|
|
|
binary_stdin.write(chunk)
|
|
|
|
# binary_stdin.flush() # TODO: is this required?
|
2023-01-12 21:06:52 +01:00
|
|
|
|
|
|
|
current_time = time.time()
|
|
|
|
elapsed_time = current_time - start_time
|
|
|
|
transferred_bytes += len(chunk)
|
|
|
|
bytes_per_second = round(transferred_bytes / elapsed_time)
|
|
|
|
print(f'Elapsed time: {datetime.timedelta(seconds=elapsed_time)}\n'
|
|
|
|
f'Transferred: {DataUnitConverter.to_unit_auto_str(transferred_bytes)}\n'
|
|
|
|
f'Speed: {DataUnitConverter.to_unit_auto_str(bytes_per_second)}/s')
|
|
|
|
|
2023-01-12 14:33:20 +01:00
|
|
|
if last_chunk:
|
|
|
|
break
|
|
|
|
|
|
|
|
binary_stdin.flush()
|
|
|
|
|
|
|
|
# TODO: Has this any effect? On stdin probably yes!
|
|
|
|
binary_stdin.close()
|
|
|
|
|
|
|
|
|
|
|
|
def _read_chunk(chunk_path: Path) -> bytes:
|
|
|
|
"""
|
|
|
|
Reads a chunk from the given path.
|
|
|
|
"""
|
|
|
|
print(f'Reading chunk {chunk_path}')
|
|
|
|
|
|
|
|
# Fails if file does not exist.
|
|
|
|
return chunk_path.read_bytes()
|
|
|
|
|
|
|
|
|
|
|
|
def _stderr_worker(binary_stderr: IO[AnyStr]):
|
|
|
|
"""
|
|
|
|
Prints stderr of subprocess to sys.stderr.
|
|
|
|
"""
|
|
|
|
b: bytes
|
|
|
|
for b in binary_stderr:
|
|
|
|
sys.stderr.write(f'[STDERR] {b.decode("UTF-8")}')
|
|
|
|
|
|
|
|
# TODO: Has this any effect?
|
|
|
|
# binary_stderr.close()
|