#!/bin/sh # constants DOCUMENTS_FILE='documents.tsv' CACHE='./cache' # target folder for all downloads # strip first line of table file TABLE=$(sed '1d' "${DOCUMENTS_FILE}") # create chache folder mkdir -p ${CACHE} # download each document and save it to the cache directory echo "${TABLE}" | while read -r line # read each line of table do FILENAME=$(echo "$line" | awk -F '\t' '{print $1}') # column 1 -> filename URL=$(echo "$line" | awk -F '\t' '{print $2}') # column 2 -> URL # actual download of one document printf 'Downloading %s/%s ...' "${CACHE}" "${FILENAME}" if wget -qO "${CACHE}/${FILENAME}" "${URL}" 2> /dev/null then printf " ok\n" # if wget returned with exit code 0 else printf " error!\n" # in any other case fi done