有感而发几篇博客:
"While we teach we learn " 欢迎提问!
可以当包来用:
'''
1.
用户代理伪装浏览器使用爬虫爬取代理ip
2.
代理ip写入txt文件
'''
import time
from urllib import request
import re
import random
def useuaippools():
path = 'C:\\Users\\Administrator\\Desktop\\Python基础与应用\\爬虫\\爬到的东西\\'
uapools = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763,',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) CriOS/31.0.1650.18 Mobile/11B554a Safari/8536.25',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12F70 Safari/600.1.4'
]
def coolectipdaxiang():
url = 'http://www.xiladaili.com/gaoni/'
n1 = int(input("请输入爬取开始页数:"))
n2 = int(input("请输入爬取页数结束页数:"))
for k in range(n1,n2+1):
url = 'http://www.xiladaili.com/gaoni/'+ str(k)+'/'
data = request.urlopen(url).read().decode('utf-8')
pet = '<tr>.*?<td>(.*?)</td>'
data1 = re.compile(pet,re.S).findall(data)
print(data1)
with open(path+'4.ip地址储存'+str(k)+'.txt','w+',encoding='utf-8') as f:
for k in data1:
f.write(k+'\n')
print('写如文本文档成功!!!')
def addiptxt():
path = 'C:\\Users\\Administrator\\Desktop\\Python基础与应用\\爬虫\\爬到的东西\\'
with open(path+'4.ip地址储存.txt','r+',encoding='utf-8') as f:
read = f.readlines()
data1 = random.choice(read)
print('读取文本文档成功!!!')
return read
def ipuapo():
opener = random.choice(uapools)
head = ('User-Agent', opener)
ippools = addiptxt()
thisip = random.choice(ippools)
proxy = request.ProxyHandler({"http":thisip})
opener = request.build_opener(proxy,request.HTTPHandler)
opener.addheaders = [head]
request.install_opener(opener=opener)
print("此次使用的UA为:{}此次使用的ip为:{}".format(head,thisip))
if int(input("输入1直接使用文件中的ip地址"))==1:
ipuapo()
'''
for i in range(3):
useuaippools() #
time.sleep(random.uniform(1.1,1.5))
url = 'https://useragent.buyaocha.com/'
data = request.urlopen(url).read().decode('utf-8')
print(data)
'''
|