背景
最近需要分析一批apk文件,手动下载再上传分析太耗时间,所以用python写了个小爬虫,调用apk3.com和mogua.co两个网站,用来实现手机apk的自动下载、上传、分析功能(分析完回显示查看结果的链接)。
爬虫的两个模块:downapk和upload_scan是基本独立的,所以可以分开来单独使用下载或分析功能。
如果对部分解析apk3网站的代码感兴趣,需自行查看网站的html代码进行研究。mogua网站的代码也可以参考网站的api文档。
需要的工具
- python3以及需要用到的库:BeautifulSoup、requests等
- apk下载网站:https://www.apk3.com
- apk分析网站:https://mogua.co
实现代码
from bs4 import BeautifulSoup
import requests
import re
import urllib
import time
import json
import subprocess
def upload_scan(file0):
filename = file0
mgurl_upload = 'https://mogua.co/api/v1/upload'
mgurl_scan = 'https://mogua.co/api/v1/scan'
files = "file=@/data/apk3/"+filename
print('begin to upload '+files)
uploadresult = subprocess.check_output(
'curl -F "{file}" https://mogua.co/api/v1/upload -H "Authorization:xxx"'.format(file=files),
shell=True)
print(uploadresult)
djson = json.loads(uploadresult)
if djson.get('status')=='success':
print('[+]upload apk success')
filehash = djson.get('hash')
fname = djson.get('file_name')
scanresult = subprocess.check_output(
'curl -X POST --url https://mogua.co/api/v1/scan --data "scan_type=apk&file_name={filename}&hash={filehash}" -H "Authorization:xxx"'.format(filename=fname,filehash=filehash),
shell=True)
sjson = json.loads(scanresult)
if sjson.get('title'):
print('[+]scan success. view result: '+ 'https://mogua.co/static_analyzer/?name={filename}&{filehash}&type=apk'.format(filename=fname,filehash=filehash))
else:
print('[-]scan failed')
else:
print('[-]upload failed')
def downapk(dlink):
apkurl = 'https://www.apk3.com'+dlink
print('[i]current url: ' + apkurl)
header = {'User-Agent':'Mozilla/5.0 (compatible; Baiduspider-render/2.0; +http://www.baidu.com/search/spider.html)'}
r1 = requests.get(apkurl,headers=header)
demo1 = r1.text
soup1 = BeautifulSoup(demo1, "html.parser")
try:
apklink = re.findall(r"http://gyxza32\.yjruhu\.com.*?\.apk", demo1)[0]
apkname = 'apk'+re.findall(r"\d+", dlink)[0]+'.apk'
urllib.request.urlretrieve(apklink,apkname)
print('[+]download success: '+apkname)
upload_scan(apkname)
except:
return
global count
count = 0
for i in range(1,21):
url = 'https://www.apk3.com/List/add-soft-'+str(i)+'.html'
header = {'User-Agent':'Mozilla/5.0 (compatible; Baiduspider-render/2.0; +http://www.baidu.com/search/spider.html)'}
r = requests.get(url,headers=header)
demo = r.text
soup = BeautifulSoup(demo, "html.parser")
ginfoList=soup.find_all(name='div',attrs={"class":"ginfo"})
for j in ginfoList:
alinks = j.select('a')
alink1 = alinks[0]['href']
asize = int(re.findall(r'\d+',str(j.find_all(name='i')[1]))[0])
if asize > 10:
continue
else:
count =count +1
downapk(alink1)
time.sleep(1)
print('done apks: '+str(count))
|