歡迎來到Linux教程網
Linux教程網
Linux教程網
Linux教程網
Linux教程網 >> Linux基礎 >> Linux教程 >> Linux 下使用 wget/aria2 進行離線迅雷批量下載

Linux 下使用 wget/aria2 進行離線迅雷批量下載

日期:2017/2/28 16:04:28   编辑:Linux教程

在Linux下資源下載的速度長期受限,ed2k,torrent什麼都木有速度,坑爹呀,自從購買了迅雷VIP的服務,可以直接以http形式來從迅雷服務器內下載自己托迅雷下載的東西,而且如果你這個資源別人下載過的話,你就不用再次下載了,迅雷馬上提示你這個任務已經完成了。

至於其他的,用過的人都知道了,也不再細說。如果windows平台配合迅雷客戶端用迅雷VIP的話,這個腳本也沒有啥意義了(因為客戶端更人性化^_^,當然占用資源也不少,嘿嘿),所以前提是你的OS要是Linux,然後使用迅雷離線的web界面。

由於firefox下載迅雷離線的東西存在這樣幾個問題,比如文件名中文亂碼,要自己改(暗罵編碼ing),不支持斷點續傳(我掛過幾次,不過無奈重新下載了T_T),迅雷在點擊下載的時候,響應慢死了,好久才跳出窗口。

出於這幾個原因,我就去研究了下PT醬的那個離線下載的腳本,然後根據自己的需要重新寫如下:

#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Time-stamp: <2011-10-25 21:36:28 Tuesday by roowe>
#File Name: thuner_xl_with_wget.py
#Author: [email protected]
#My Blog: www.iroowe.com

import re
import time
import os
import logging
import sys
from htmlentitydefs import entitydefs
import subprocess
LOG_FILE = "/tmp/thuner_with_wget.log"
log = None
def log_init(log_file, quiet=False):
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
hdlr = logging.FileHandler(log_file)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
if not quiet:
hdlr = logging.StreamHandler()
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
return logger

def handle_entitydef(matchobj):
key = matchobj.group(1)
if entitydefs.has_key(key):
return entitydefs[key]
else:
return matchobj.group(0)
def collect_urls(html, only_bturls = False):
"""
collect urls
"""
urls = []
for name, url in re.findall(r"<a.+?name=['\"]bturls['\"] title=['\"](.+?)['\"].+?href=['\"](http.+?)['\"]>", html):
name = re.sub("&(.*?);", handle_entitydef, name)
url = re.sub("&(.*?);", handle_entitydef, url)
urls.append((name, url))
if not only_bturls:
for id, name in re.findall(r'<input id=[\'"]durl(\w+?)[\'"].+title=[\'"](.+?)[\'"].+', html):
result = re.search(r'<input id=[\'"]dl_url%s[\'"].+value=[\'"](http.*?)[\'"]' % id, html)
if result:
name = re.sub("&(.*?);", handle_entitydef, name)
url = result.group(1)
url = re.sub("&(.*?);", handle_entitydef, url)
urls.append((name, url))
log.info("Filter get %d links" % len(urls))
return urls
def choose_download(urls):
download_list = {}
for name, url in urls:
while True:
ans = raw_input("Download %s?[Y/n](default: Y) " % name)
if len(ans) == 0:
ans = True
break
elif ans.lower() == 'y':
ans = True
break
elif ans.lower() == 'n':
ans = False
break
else:
sys.stdout.write("please enter y or n!\n")
continue
download_list[name] = ans
return download_list
def thuner_xl_with_wget(urls, output_dir, cookies_file, quiet=False):
download_list = choose_download(urls)
for name, url in urls:
if len(url) == 0:
log.debug("Empty Link, Name: " + name)
continue
if not download_list[name]:
continue
cmd = ["wget", "--load-cookies", cookies_file, "-c", "-t", "5", "-O", os.path.join(output_dir, name), url]
if quiet:
cmd.insert(1, "-q")
log.info("wget cmd: '%s'" % ' '.join(cmd))
ret = subprocess.call(cmd)
if ret != 0:
log.debug("wget returned %d." % ret)
if ret in (3, 8):
log.error("Give up '%s', may be already finished download, or something wrong with disk." % name)
else:
urls.append((name, url))
log.error("will retry for %s later." % name)
continue
else:
log.info("Finished %s" % name)
time.sleep(2)
def thuner_xl_with_aria2c(urls, output_dir, cookies_file, quiet=False):
"""
download with aria2c
"""
download_list = choose_download(urls)
for name, url in urls:
if len(url) == 0:
log.debug("Empty Link, Name: " + name)
continue
if not download_list[name]:
continue
cmd = ["aria2c", "--load-cookies", cookies_file, "-d", output_dir, "-c", "-m", "5", "-s", "5", "-o", name, url]
if quiet:
cmd.insert(1, "-q")
log.info("wget cmd: '%s'" % ' '.join(cmd))
ret = subprocess.call(cmd)
if ret != 0:
log.debug("wget returned %d." % ret)
if ret in (13):
log.error("Give up '%s', file already existed." % name)
else:
urls.append((name, url))
log.error("the exit status number is %d, and then will retry for %s later." % (ret, name))
continue
else:
log.info("Finished %s" % name)
time.sleep(2)

if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Thuner li xian with wget', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', nargs='?', default="~/user_task.htm", help="load page file")
parser.add_argument('-c', nargs='?', default="~/cookies.txt", help="load cookie file")
parser.add_argument('-o', nargs='?', default="~/Downloads", help="output dir")
parser.add_argument('-b', action='store_true', default=False, help="bt files only")
parser.add_argument('-q', action="store_true", default=False, help="quiet, only log to file.")
parser.add_argument('-a', action="store_true", default=False, help="download with aria2c")
args = parser.parse_args()

only_bturls, cookies_file, output_dir, page_file, quiet = args.b, args.c, args.o, args.p, args.q

page_file = os.path.expanduser(page_file)
cookies_file = os.path.realpath(os.path.expanduser(cookies_file))
output_dir = os.path.expanduser(output_dir)

log = log_init(LOG_FILE, quiet = quiet)
if not os.path.exists(cookies_file):
log.info("please export cookies file")
sys.exit(0)
if not os.path.isdir(output_dir):
log.info("No such %s", output_dir)
sys.exit(0)
with open(page_file) as f:
page_html = f.read()
urls = collect_urls(page_html, only_bturls)
if not args.a:
thuner_xl_with_wget(urls, output_dir, cookies_file, quiet)
else:
thuner_xl_with_aria2c(urls, output_dir, cookies_file, quiet)

Copyright © Linux教程網 All Rights Reserved