]> git.seodisparate.com - AnotherAURHelper/commitdiff
Impl setting build to fail/continue on limit
authorStephen Seo <seo.disparate@gmail.com>
Sun, 25 Jun 2023 11:21:03 +0000 (20:21 +0900)
committerStephen Seo <seo.disparate@gmail.com>
Sun, 25 Jun 2023 11:21:22 +0000 (20:21 +0900)
If "error_on_build" is set to "true" in the config, then the build will
fail if the "log_limit" is reached.

README.md
example_config.toml
update.py

index 9d400b20043124f787c4c6dccdbc38ce6a633ce4..b482994adb2fb436925c1f299f0b0b90601e6b89 100644 (file)
--- a/README.md
+++ b/README.md
@@ -40,6 +40,12 @@ Change "log\_limit" in the config to a value in bytes if the default of 1 GiB
 is too little for your use case (if the size of your output logs extend past 1
 GiB somehow).
 
+### Error when reaching limit
+
+"error\_on\_limit" can be set to true/false in the config. If set to true, then
+the build will fail if the limit is reached. If set to false, then the build
+will continue even if the limit is reached.
+
 # Setting up the AUR Helper
 
 The AUR Helper requires several things:
index 2246289de75ddbef025631e3d6d053ba48b115d7..08bf3133ebf2f62d2263dff7169e4c77c4835286 100644 (file)
@@ -18,6 +18,8 @@ is_timed = true
 is_log_timed = true
 # Default log_limit is 1 GiB
 log_limit = 1073741824
+# If true, then make the build fail if the limit is reached
+error_on_limit = false
 ########## END OF MANDATORY VARIABLES
 
 # Each [[entry]] needs a "name".
index 6c1be0f4146f881dbd3de8dabbec6196e6d7529c..4900498cd4d9d55c2d2079eae234d4fb3c5bbec5 100755 (executable)
--- a/update.py
+++ b/update.py
@@ -1083,41 +1083,44 @@ def cleanup_sccache(chroot: str):
         )
 
 
-def limited_stream(handle, output_file, log_limit: int):
+def handle_output_stream(handle, output_file, other_state):
     log_count = 0
+    limit_reached = False
     while True:
         line = handle.readline()
         if len(line) == 0:
             break
-        log_count += len(line)
-        if log_count > log_limit:
-            output_file.write(
-                "\nWARNING: Reached log_limit! No longer logging to file!\n"
-            )
-            output_file.flush()
-            break
-        output_file.write(line)
-        output_file.flush()
 
-
-def prepend_timestamp_stream(handle, output_file, log_limit: int):
-    log_count = 0
-    while True:
-        line = handle.readline()
-        if len(line) == 0:
-            break
-        nowstring = datetime.datetime.now(datetime.timezone.utc).strftime(
-            "%Y-%m-%d_%H-%M-%S_%Z "
-        )
-        log_count += len(nowstring) + len(line)
-        if log_count > log_limit:
-            output_file.write(
-                "\nWARNING: Reached log_limit! No longer logging to file!\n"
-            )
-            output_file.flush()
-            break
-        output_file.write(nowstring + line)
-        output_file.flush()
+        if not limit_reached:
+            if other_state["is_log_timed"]:
+                nowstring = datetime.datetime.now(
+                    datetime.timezone.utc
+                ).strftime("%Y-%m-%d_%H-%M-%S_%Z ")
+                line = nowstring + line
+            log_count += len(line)
+            if log_count > other_state["log_limit"]:
+                limit_reached = True
+                if other_state["error_on_limit"]:
+                    output_file.write(
+                        "\nERROR: Reached log_limit! No longer logging to file!\n"
+                    )
+                    output_file.flush()
+                    log_print(
+                        "ERROR: Reached log_limit! No longer logging to file!"
+                    )
+                    handle.close()
+                    break
+                else:
+                    output_file.write(
+                        "\nWARNING: Reached log_limit! No longer logging to file!\n"
+                    )
+                    output_file.flush()
+                    log_print(
+                        "WARNING: Reached log_limit! No longer logging to file!"
+                    )
+            else:
+                output_file.write(line)
+                output_file.flush()
 
 
 def update_pkg_list(
@@ -1256,16 +1259,12 @@ def update_pkg_list(
                     stderr=subprocess.PIPE,
                 )
                 tout = threading.Thread(
-                    target=prepend_timestamp_stream
-                    if other_state["is_log_timed"]
-                    else limited_stream,
-                    args=[p1.stdout, log_stdout, other_state["log_limit"]],
+                    target=handle_output_stream,
+                    args=[p1.stdout, log_stdout, other_state],
                 )
                 terr = threading.Thread(
-                    target=prepend_timestamp_stream
-                    if other_state["is_log_timed"]
-                    else limited_stream,
-                    args=[p1.stderr, log_stderr, other_state["log_limit"]],
+                    target=handle_output_stream,
+                    args=[p1.stderr, log_stderr, other_state],
                 )
 
                 tout.start()
@@ -1654,6 +1653,7 @@ if __name__ == "__main__":
     other_state = {}
     other_state["logs_dir"] = None
     other_state["log_limit"] = 1024 * 1024 * 1024
+    other_state["error_on_limit"] = False
     if args.pkg and not args.config:
         for pkg in args.pkg:
             pkg_state[pkg] = {}
@@ -1804,6 +1804,17 @@ if __name__ == "__main__":
             )
         log_print("  {} KiB".format(other_state["log_limit"] / 1024))
         log_print("  {} MiB".format(other_state["log_limit"] / 1024 / 1024))
+        if (
+            "error_on_limit" in d
+            and type(d["error_on_limit"]) is bool
+            and d["error_on_limit"]
+        ):
+            other_state["error_on_limit"] = True
+        log_print(
+            'Notice: "error_on_limit" is set to "{}"'.format(
+                other_state["error_on_limit"]
+            )
+        )
     else:
         log_print(
             'ERROR: At least "--config" or "--pkg" must be specified',