#!/bin/env bashPATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbinexport PATHclearUrl="H
#!/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
clear
Url="Http://mirrors.cnnic.cn/apache/"
DownListFile="/tmp/downlist.txt"
DownListTmpFile="/tmp/tmplist.txt"
DownFileType="zip$|gz$"
DownList=""
UrlBack="$Url"
[ ! -f $DownListFile ] && touch $DownListFile || echo > $DownListFile
[ ! -f $DownListTmpFile ] && touch $DownListTmpFile || echo > $DownListTmpFile
CURL_URLS(){
Urls=`curl $UrlBack |awk -F "a href="" '{printf "%sn",$2}'|awk -F """ '{printf "%sn",$1}'|grep -vE "^$|^?|^http://"|^#`
}
URL_LIST(){
CURL_URLS
for i in $Urls ;do
echo "$UrlBack$i" >> $DownListTmpFile
done
}
RECURSIVE_SEARCH_URL(){
UrlBackTmps=`cat $DownListTmpFile`
[[ "$UrlBackTmps" == "" ]] && echo "no more page for search" && exit 1
for j in $UrlBackTmps ;do
if [[ "${j##*/}" != "" ]] ;then
echo "$j" >> $DownListFile
else
UrlBack="$j"
URL_LIST
fi
UrlTmps=`grep -vE "$j$" $DownListTmpFile`
echo "$UrlTmps" > $DownListTmpFile
RECURSIVE_SEARCH_URL
done
}
DOWNLOAD_FILE(){
DownList=`grep -E "$DownFileType" $DownListFile`
for k in $DownList ;do
FilePath=/tmp/${k#*//}
[ ! -d `dirname $FilePath` ] && mkdir -p `dirname $FilePath`
[ ! -f $FilePath ] && cd `dirname $FilePath` && curl -O $k
done
}
URL_LIST $Urls
RECURSIVE_SEARCH_URL
--结束END--
本文标题: 使用curl递归下载软件脚本分享
本文链接: https://lsjlt.com/news/17924.html(转载时请注明来源链接)
有问题或投稿请发送至: 邮箱/279061341@qq.com QQ/279061341
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
2024-10-22
回答
回答
回答
回答
回答
回答
回答
回答
回答
回答
0