file=/tmp/log
export file
addurls() {
{ while URL= read -r url; do echo $url; echo $url >> $file; done }
}
export -f addurls
techmeme() {
xidel -s $feed --extract '//description' | ack '^<A HREF' | awk -F\" '{ print $2 }' \
| addurls
}
export -f techmeme
feeds=(
'https://www.techmeme.com/feed.xml'
'https://www.memeorandum.com/feed.xml'
'https://mediagazer.com/feed.xml'
)
for feed in $feeds; do
#techmeme
sem -j 1 "techmeme"
done;
sem --wait
I'm trying to use sem
to do things in parallel but I can't pipe it seems