#!/bin/sh # Download URLs from file (called queue) with history. # License: LGPLv2 # I schedule this to run every night. # The file called queue must be in the same dir # as this script and lines starting with # are ignored. cp queue queue.tmp sed "/ *#/d" queue | while read url { echo "#$url" >> queue.tmp wget -nv $url } sed -n "/ *#/p" queue.tmp > queue rm -f queue.tmp (CommandList children: [ (C {(cp)} {(queue)} {(queue.tmp)}) (Pipeline children: [ (C {(sed)} {(DQ ("/ *#/d"))} {(queue)}) (While cond: [(Sentence child:(C {(read)} {(url)}) terminator:)] body: (DoGroup children: [ (SimpleCommand words: [{(echo)} {(DQ ("#") ($ VSub_Name "$url"))}] redirects: [(Redir op_id:Redir_DGreat fd:-1 arg_word:{(queue.tmp)} spids:[56])] ) (C {(wget)} {(-nv)} {(DQ ($ VSub_Name "$url"))}) ] spids: [46 69] ) ) ] negated: False ) (SimpleCommand words: [{(sed)} {(-n)} {(DQ ("/ *#/p"))} {(queue.tmp)}] redirects: [(Redir op_id:Redir_Great fd:-1 arg_word:{(queue)} spids:[82])] ) (C {(rm)} {(-f)} {(queue.tmp)}) ] )