commit 2e0e1fa5d66f01effe07d30e53e5cf558494796f
parent 4200d5c107d236dcf71a38fc54887039079a4531
Author: Hiltjo Posthuma <hiltjo@codemadness.org>
Date: Sun, 30 Sep 2018 19:20:01 +0200
sfeed_update: add variable for max amount of feeds to update concurrently
This adds a variable for the maximum amount of feeds to update concurrently. A
system/user may have fork resource limits or want to setup some job limit.
Thanks leot for the idea and feedback!
Diffstat:
1 file changed, 11 insertions(+), 0 deletions(-)
diff --git a/sfeed_update b/sfeed_update
@@ -5,6 +5,10 @@
# defaults
sfeedpath="$HOME/.sfeed/feeds"
+# used for processing feeds concurrently: wait until ${maxjobs} amount of
+# feeds are finished at a time.
+maxjobs=8
+
# load config (evaluate shellscript).
# loadconfig(configfile)
loadconfig() {
@@ -71,6 +75,11 @@ fetchfeed() {
# fetch and parse feed.
# feed(name, feedurl, [basesiteurl], [encoding])
feed() {
+ # wait until ${maxjobs} are finished: throughput using this logic is
+ # non-optimal, but it is simple and portable.
+ test $((curjobs % maxjobs)) -eq 0 && wait
+ curjobs=$((curjobs + 1))
+
(name="$1"
filename="$(printf '%s' "$1" | tr '/' '_')"
feedurl="$2"
@@ -122,6 +131,8 @@ feeds() {
echo "See sfeedrc.example for an example." >&2
}
+# job counter.
+curjobs=0
# kill whole current process group on ^C (SIGINT).
isinterrupted="0"
# SIGTERM: signal to terminate parent.