-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathwww.dy2018.com.py
More file actions
73 lines (63 loc) · 2.21 KB
/
www.dy2018.com.py
File metadata and controls
73 lines (63 loc) · 2.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# www.dy2018.com 测试脚本
from urllib.request import urlopen, HTTPError, Request
import threading
import pymysql
from bs4 import BeautifulSoup
conn1 = pymysql.connect(host='127.0.0.1',
user='root',
passwd='root',
db='mysql', charset='utf8',
cursorclass=pymysql.cursors.DictCursor)
cur1 = conn1.cursor()
cur1.execute("use dy2018")
def DownloadUrl(id, url):
cur1.execute(
"insert into url (id, url) values (\"%s\", \"%s\")", (id, url))
conn1.commit()
def spideInformation(url):
try:
html = urlopen(url).read()
html = html.decode('gbk', 'ignore').encode('utf-8')
bsObj = BeautifulSoup(html, "html.parser")
print("命中" + str(id))
title = bsObj.find("div", id="Zoom").find_all("p")
urllist = bsObj.find_all("td", {"bgcolor": "#fdfddf"})
urlName = bsObj.find_all("div", class_="title_all")
for t in urlName:
if t.find("h1") == None:
pass
else:
temp, urlName = t.find("h1").get_text().split('《')
urlName, temp = urlName.split('》')
break
for url in urllist:
print(urlName)
DownloadUrl(urlName, url.find('a').get_text())
except HTTPError as e:
print(id)
print(e.code)
LOCK.acquire()
THREADCOUNT = THREADCOUNT + 1
LOCK.release()
def SpideUrl(id):
urlList = []
html = urlopen("http://117.169.20.240:9090/4/index_" + str(id) + ".html").read()
html = html.decode('gbk', 'ignore').encode('utf-8')
bsObj = BeautifulSoup(html, "html.parser")
tableList = bsObj.find_all("table", class_="tbspan")
for url in tableList:
print("a")
print(url.find("a", title=True)['href'])
urlList.append(url.find("a", title=True)['href'])
for u in urlList:
print(u[-10:-5])
spideInformation("http://117.169.20.240:9090"+ u)
'''
if __name__ == '__main__':
spideInformation("http://117.169.20.240:9090/html/gndy/jddy/52928.html")
'''
if __name__ == '__main__':
for x in range(35, 36):
SpideUrl(x)
print(x)
conn1.close()