python+selenium实现百度和京东首页搜索功能测试
class BasePage(object):
def __init__(self, driver):
self.driver = driver
"""访问url方法"""
def get_url(self, url):
self.driver.get(url)
"""元素定位"""
def locater_element(self, locater):
return self.driver.find_element(*locater)
"""输入文本"""
def input_text(self, locater, text):
self.locater_element(locater).send_keys(text)
"""点击"""
def ciclk_method(self, locater):
self.locater_element(locater).click()
"""关闭浏览器"""
def close_bowser(self):
self.driver.quit()
class JD(BasePage):
"""定义页面需要的属性"""
jd_input_id = (By.XPATH, "//input[@aria-label='搜索']")
jd_ciclk_value = (By.XPATH, "//button['@text=搜索']")
def serch_text(self,text ):
self.input_text(self.jd_input_id,text)
def ciclk_m(self):
self.ciclk_method(self.jd_ciclk_value)
def check(self,url,text):
self.get_url(url)
self.serch_text(text)
self.ciclk_m()
from baseobject.selenium_base import BasePage
from selenium.webdriver.common.by import By
class SerchPage(BasePage):
"""定义页面需要的属性"""
input_id = (By.ID, "kw")
ciclk_value = (By.ID, "su")
def serch_text(self,text ):
self.input_text(self.input_id,text)
def ciclk_m(self):
self.ciclk_method(self.ciclk_value)
def check(self,url,text):
self.get_url(url)
self.serch_text(text)
self.ciclk_m()
测试用例
import unittest
import requests
from ddt import ddt, data, unpack
from selenium import webdriver
from po.serch_jd import JD
from po.serch_test_baidu import SerchPage
import time
@ddt()
class Test_Case(unittest.TestCase):
def setUp(self) -> None:
driver = webdriver.Chrome()
self.sp = SerchPage(driver)
self.jd = JD(driver)
def tearDown(self) -> None:
self.sp.close_bowser()
@data(["https://www.baidu.com","java"],["https://www.baidu.com","python"])
@unpack
def test_01(self,url, text):
self.sp.check(url,text)
self.assertEqual(200,requests.get(url).status_code)
print(requests.get(url).status_code)
print(requests.get(url).text)
time.sleep(3)
@data(["https://www.jd.com","java"],["https://www.jd.com","python"])
@unpack
def test_02(self,url, text):
self.jd.check(url,text)
print(requests.get(url).status_code)
print(requests.get(url).text)
time.sleep(3)
|