forked from ShenXuGongZi/FanHaoSearch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
FanHao_win.py
104 lines (87 loc) · 3.1 KB
/
FanHao_win.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#-*- coding: cp936 -*
#cp936
#from pip.backwardcompat import raw_input
import urllib2
import re
import random
import webbrowser
import sys
print '#'*50
print '#'+'番号下载器'
print '#'*50
print '-'*20+'开始获取代理'+'-'*20
proxy_txt = open('proxy_list.txt','w')
proxy_tr = re.compile("(?isu)<tr[^>]*>(.*?)</tr>")
proxy_td = re.compile("(?isu)<td[^>]*>(.*?)</td>")
proxy_ua = {'User-Agent:':'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36'}
proxy_url = urllib2.Request(url='http://www.site-digger.com/html/articles/20110516/proxieslist.html',headers=proxy_ua)
try:
GetProxy = urllib2.urlopen(proxy_url)
HtmlRead = GetProxy.read()
except Exception:
print '-'*50
print '采集代理错误,请检查您的网络是否正常!'
print '-'*50
raw_input('按回车结束程序:')
else:
for row in proxy_tr.findall(HtmlRead):
for col in proxy_td.findall(row)[:1]:
proxy_txt.write(col+'\n')
proxy_txt.close()
print '-'*20+'获取代理完毕'+'-'*20
open_proxy = open('proxy_list.txt','r')
line0 = open_proxy.readlines()
open_proxy.close()
proxy_line = random.choice(line0)
proxy_handler = urllib2.ProxyHandler({'http://':'%s'%proxy_line})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
print '#'*50
print '##肾虚公子制作'
print '##项目主页: FanHao.miaowu.asia'
print '#'*50
Fanhao = raw_input('请输入番号:')
FanHao_style = open('FanHao.html','w')
FanHao_NR = '<link rel="stylesheet" href="http://cdn.bootcss.com/bootstrap/3.2.0/css/bootstrap.min.css">'+'\n'+'<center><h3>肾虚公子番号采集器</h3><hr/><p>本软件资料来源于网络,如有异议请联系作者</p><br/><table class="table table-striped table-hover"></center>'
FanHao_style.write(FanHao_NR)
FanHao_style.close()
Fanhao_html = open('FanHao.html','a')
ZhuaQ_LJ = re.compile('(?isu)<table class="torrent_name_tbl">(.*?)</table>')
proxy_ua = {'User-Agent:':'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36','Accept-Language:':'zh-CN,zh;q=0.8'}
proxy_url = urllib2.Request(url='https://btdigg.org/search?info_hash=&q='+Fanhao ,headers=proxy_ua)
GetProxy = urllib2.urlopen(proxy_url)
HtmlRead = GetProxy.read()
for LianJ in ZhuaQ_LJ.findall(HtmlRead):
type2 = sys.getfilesystemencoding()
xieru = LianJ.decode('utf-8').encode(type2)
Fanhao_html.write(xieru+'\n')
Fanhao_html.close()
#
FanHao_style_end = open('FanHao.html','a')
FanHao_NR_end = '</table>'
FanHao_style_end.write(FanHao_NR_end)
FanHao_style_end.close()
def tihuan(tiH,tiH2):
TiHuan='FanHao.html'
fp=open(TiHuan,'r')
alllines=fp.readlines()
fp.close()
fp=open(TiHuan,'w')
for eachline in alllines:
a=re.sub(tiH,tiH2,eachline)
fp.writelines(a)
fp.close()
dl2 = tihuan('title="Add to BTCloud"','style="display:none;"')
dl3 = tihuan('class="attr_name"','style="display:none;"')
dl4 = tihuan('class="attr_val"','style="display:none;"')
dl5 = tihuan('magnet-link','磁力连接')
dl2
dl3
dl4
dl5
print '#'*50
print '>'*10+'番号获取成功'
print '#'*50
raw_input('按回车查看结果:')
open_url = webbrowser.open("FanHao.html")
open_url