Enforce a "log_limit" to limit stdout/stderr logs

Default "log_limit" of 1 GiB for stdout/stderr from building packages
output.
This commit is contained in:
Stephen Seo 2023-06-23 20:01:58 +09:00
parent 024c41f4a6
commit fd86336ee0
3 changed files with 89 additions and 46 deletions

View file

@ -30,6 +30,16 @@ the TOML config file in a "other\_deps" array for the package like so:
"mesa" "mesa"
] ]
## Package stdout/stderr size limit
The possible issue of output logs filling up disk space is addressed with a
"log\_limit" config option. By default, if the output log file reaches the
limit, the compilation output is no longer logged to file in the logs dir.
Change "log\_limit" in the config to a value in bytes if the default of 1 GiB
is too little for your use case (if the size of your output logs extend past 1
GiB somehow).
# Setting up the AUR Helper # Setting up the AUR Helper
The AUR Helper requires several things: The AUR Helper requires several things:

View file

@ -16,6 +16,8 @@ editor = "/usr/bin/vim"
is_timed = true is_timed = true
# if true, all output build logs are prepended with current time in UTC # if true, all output build logs are prepended with current time in UTC
is_log_timed = true is_log_timed = true
# Default log_limit is 1 GiB
log_limit = 1073741824
########## END OF MANDATORY VARIABLES ########## END OF MANDATORY VARIABLES
# Each [[entry]] needs a "name". # Each [[entry]] needs a "name".

View file

@ -1083,7 +1083,25 @@ def cleanup_sccache(chroot: str):
) )
def prepend_timestamp_stream(handle, output_file): def limited_stream(handle, output_file, log_limit: int):
log_count = 0
while True:
line = handle.readline()
if len(line) == 0:
break
log_count += len(line)
if log_count > log_limit:
output_file.write(
"\nWARNING: Reached log_limit! No longer logging to file!\n"
)
output_file.flush()
break
output_file.write(line)
output_file.flush()
def prepend_timestamp_stream(handle, output_file, log_limit: int):
log_count = 0
while True: while True:
line = handle.readline() line = handle.readline()
if len(line) == 0: if len(line) == 0:
@ -1091,6 +1109,13 @@ def prepend_timestamp_stream(handle, output_file):
nowstring = datetime.datetime.now(datetime.timezone.utc).strftime( nowstring = datetime.datetime.now(datetime.timezone.utc).strftime(
"%Y-%m-%d_%H-%M-%S_%Z " "%Y-%m-%d_%H-%M-%S_%Z "
) )
log_count += len(nowstring) + len(line)
if log_count > log_limit:
output_file.write(
"\nWARNING: Reached log_limit! No longer logging to file!\n"
)
output_file.flush()
break
output_file.write(nowstring + line) output_file.write(nowstring + line)
output_file.flush() output_file.flush()
@ -1223,7 +1248,6 @@ def update_pkg_list(
encoding="utf-8", encoding="utf-8",
) as log_stderr: ) as log_stderr:
try: try:
if other_state["is_log_timed"]:
p1 = subprocess.Popen( p1 = subprocess.Popen(
command_list + post_command_list, command_list + post_command_list,
cwd=pkgdir, cwd=pkgdir,
@ -1232,12 +1256,16 @@ def update_pkg_list(
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
) )
tout = threading.Thread( tout = threading.Thread(
target=prepend_timestamp_stream, target=prepend_timestamp_stream
args=[p1.stdout, log_stdout], if other_state["is_log_timed"]
else limited_stream,
args=[p1.stdout, log_stdout, other_state["log_limit"]],
) )
terr = threading.Thread( terr = threading.Thread(
target=prepend_timestamp_stream, target=prepend_timestamp_stream
args=[p1.stderr, log_stderr], if other_state["is_log_timed"]
else limited_stream,
args=[p1.stderr, log_stderr, other_state["log_limit"]],
) )
tout.start() tout.start()
@ -1247,30 +1275,17 @@ def update_pkg_list(
tout.join() tout.join()
terr.join() terr.join()
if ( if p1.returncode is None:
p1.returncode is None raise RuntimeError("pOpen process didn't finish")
or type(p1.returncode) is not int elif type(p1.returncode) is not int:
or p1.returncode != 0 raise RuntimeError("pOpen process non-integer returncode")
): elif p1.returncode != 0:
raise RuntimeError("pOpen process failed") raise RuntimeError(
else: f"pOpen process non-zero return code {p1.returncode}"
subprocess.run(
command_list + post_command_list,
check=True,
cwd=pkgdir,
stdout=log_stdout,
stderr=log_stderr,
) )
except subprocess.CalledProcessError:
log_print(
'ERROR: Failed to build pkg "{}" in chroot'.format(pkg),
other_state=other_state,
)
pkg_state[pkg]["build_status"] = "fail"
continue
except BaseException as e: except BaseException as e:
log_print( log_print(
'ERROR: Failed to build pkg "{}" in chroot (unknown Exception): {}'.format( 'ERROR: Failed to build pkg "{}" in chroot: {}'.format(
pkg, e pkg, e
), ),
other_state=other_state, other_state=other_state,
@ -1638,6 +1653,7 @@ if __name__ == "__main__":
pkg_state = {} pkg_state = {}
other_state = {} other_state = {}
other_state["logs_dir"] = None other_state["logs_dir"] = None
other_state["log_limit"] = 1024 * 1024 * 1024
if args.pkg and not args.config: if args.pkg and not args.config:
for pkg in args.pkg: for pkg in args.pkg:
pkg_state[pkg] = {} pkg_state[pkg] = {}
@ -1773,6 +1789,21 @@ if __name__ == "__main__":
other_state["is_log_timed"] = True other_state["is_log_timed"] = True
else: else:
other_state["is_log_timed"] = False other_state["is_log_timed"] = False
if (
"log_limit" in d
and type(d["log_limit"]) is int
and d["log_limit"] > 0
):
other_state["log_limit"] = d["log_limit"]
log_print('Set "log_limit" to {}'.format(d["log_limit"]))
else:
log_print(
'Using default "log_limit" of {}'.format(
other_state["log_limit"]
)
)
log_print(" {} KiB".format(other_state["log_limit"] / 1024))
log_print(" {} MiB".format(other_state["log_limit"] / 1024 / 1024))
else: else:
log_print( log_print(
'ERROR: At least "--config" or "--pkg" must be specified', 'ERROR: At least "--config" or "--pkg" must be specified',