0
file=/tmp/log
export file

addurls() {
  { while URL= read -r url; do echo $url; echo $url >> $file; done }
}
export -f addurls

techmeme() {
  xidel -s $feed --extract '//description' | ack '^<A HREF' | awk -F\" '{ print $2 }' \
    | addurls
}

export -f techmeme

feeds=(
  'https://www.techmeme.com/feed.xml'
  'https://www.memeorandum.com/feed.xml'
  'https://mediagazer.com/feed.xml'
)

for feed in $feeds; do
  #techmeme
  sem -j 1 "techmeme"
done;
sem --wait

I'm trying to use sem to do things in parallel but I can't pipe it seems

chovy
  • 72,281
  • 52
  • 227
  • 295
  • For an exported function to work, the function must run in bash (not `sh`), and the environment containing the exported function must be inherited by the child bash instance. I'm not familiar with `sem`, but you could try `sem -j 1 bash -c techmeme`. – dan Nov 18 '21 at 08:43
  • I just had to `export feed` – chovy Nov 18 '21 at 10:15
  • `xidel -se 'for $x in ("https://www.techmeme.com/feed.xml","https://www.memeorandum.com/feed.xml","https://mediagazer.com/feed.xml") return parse-xml(doc($x)//description)//a/@href'` ? – Reino Nov 18 '21 at 22:11

1 Answers1

0

adding export feed in loop fixes it.

chovy
  • 72,281
  • 52
  • 227
  • 295