From 44645f177d504e00f9b224cee1e9304d43f6ae9a Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Fri, 19 Mar 2021 16:14:34 +0800 Subject: [PATCH 1/6] =?UTF-8?q?Create=20=E7=AC=AC=E5=8D=81=E4=BA=8C?= =?UTF-8?q?=E5=91=A8=5F=E7=AC=AC=E4=B8=80=E8=8A=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../.keep" | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/.keep" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/.keep" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/.keep" new file mode 100644 index 00000000..e69de29b -- Gitee From 64c46f201033b5cb46eff05221a8617d7a419802 Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Fri, 19 Mar 2021 16:15:04 +0800 Subject: [PATCH 2/6] =?UTF-8?q?Create=20=E7=AC=AC=E5=8D=81=E4=BA=8C?= =?UTF-8?q?=E5=91=A8=5F=E7=AC=AC=E4=BA=8C=E8=8A=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../.keep" | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/.keep" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/.keep" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/.keep" new file mode 100644 index 00000000..e69de29b -- Gitee From 8e60c473430a0c176293942bf8fb18daf7f65ac6 Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Fri, 19 Mar 2021 16:17:56 +0800 Subject: [PATCH 3/6] scrapy redis practice --- .../items.py" | 17 +++ .../pipelines.py" | 26 +++++ .../producer.py" | 37 ++++++ .../settings.py" | 109 ++++++++++++++++++ .../stackoverflow_redis.py" | 32 +++++ 5 files changed, 221 insertions(+) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/items.py" create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/pipelines.py" create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/producer.py" create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/settings.py" create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/stackoverflow_redis.py" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/items.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/items.py" new file mode 100644 index 00000000..fc516987 --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/items.py" @@ -0,0 +1,17 @@ +# Define here the models for your scraped items +# +# See documentation in: +# https://docs.scrapy.org/en/latest/topics/items.html + +import scrapy + + +class StackoverflowSrItem(scrapy.Item): + # define the fields for your item here like: + # name = scrapy.Field() + id = scrapy.Field() + questions = scrapy.Field() + votes = scrapy.Field() + answers = scrapy.Field() + views = scrapy.Field() + links = scrapy.Field() \ No newline at end of file diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/pipelines.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/pipelines.py" new file mode 100644 index 00000000..98b6a73d --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/pipelines.py" @@ -0,0 +1,26 @@ +# Define your item pipelines here +# +# Don't forget to add your pipeline to the ITEM_PIPELINES setting +# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html + + +# useful for handling different item types with a single interface +from itemadapter import ItemAdapter +import pymongo + + +class StackoverflowSrPipeline: + def __init__(self): + self.connection = pymongo.MongoClient('127.0.0.1', 27017) + self.db = self.connection.scrapy_redis + self.collection = self.db.stackoverflow + + def process_item(self, item, spider): + if not self.connection or not item: + return + self.collection.save(item) + + def __del__(self): + if self.connection: + self.connection.close() + diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/producer.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/producer.py" new file mode 100644 index 00000000..39ed26c6 --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/producer.py" @@ -0,0 +1,37 @@ +import redis +import json +import time + + +class Producer(object): + + def __init__(self): + self.redis_conn = None + self.redis_host = '127.0.0.1' + self.redis_port = 6379 + self.redis_db = 5 + + def stackoverflow_producer(self): + if not self.redis_conn: + self.redis_conn = redis.Redis(host=self.redis_host, port=self.redis_port, db=self.redis_db) + + _url = "https://stackoverflow.com/questions/tagged/python?tab=newest&page={}&pagesize=15" + + for page in range(1, 10000): + url = _url.format(page) + meta = { + "sta_date": time.strftime("%Y-%m-%d"), + "page_num": page + } + task = json.dumps({ + "url": url, + "meta": meta, + "method": 'GET', + "body": '' + }) + self.redis_conn.lpush("stackoverflow:start_urls", task) + + +if __name__ == '__main__': + producer = Producer() + producer.stackoverflow_producer() diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/settings.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/settings.py" new file mode 100644 index 00000000..3494a0bb --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/settings.py" @@ -0,0 +1,109 @@ +# Scrapy settings for stackoverflow_sr project +# +# For simplicity, this file contains only settings considered important or +# commonly used. You can find more settings consulting the documentation: +# +# https://docs.scrapy.org/en/latest/topics/settings.html +# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html +# https://docs.scrapy.org/en/latest/topics/spider-middleware.html + +BOT_NAME = 'stackoverflow_sr' + +SPIDER_MODULES = ['stackoverflow_sr.spiders'] +NEWSPIDER_MODULE = 'stackoverflow_sr.spiders' + + +# Crawl responsibly by identifying yourself (and your website) on the user-agent +#USER_AGENT = 'stackoverflow_sr (+http://www.yourdomain.com)' + +# Obey robots.txt rules +ROBOTSTXT_OBEY = False + +# Configure maximum concurrent requests performed by Scrapy (default: 16) +#CONCURRENT_REQUESTS = 32 + +# Configure a delay for requests for the same website (default: 0) +# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay +# See also autothrottle settings and docs +#DOWNLOAD_DELAY = 3 +# The download delay setting will honor only one of: +#CONCURRENT_REQUESTS_PER_DOMAIN = 16 +#CONCURRENT_REQUESTS_PER_IP = 16 + +# Disable cookies (enabled by default) +#COOKIES_ENABLED = False + +# Disable Telnet Console (enabled by default) +#TELNETCONSOLE_ENABLED = False + +# Override the default request headers: +#DEFAULT_REQUEST_HEADERS = { +# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', +# 'Accept-Language': 'en', +#} + +# Enable or disable spider middlewares +# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html +#SPIDER_MIDDLEWARES = { +# 'stackoverflow_sr.middlewares.StackoverflowSrSpiderMiddleware': 543, +#} + +# Enable or disable downloader middlewares +# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html +#DOWNLOADER_MIDDLEWARES = { +# 'stackoverflow_sr.middlewares.StackoverflowSrDownloaderMiddleware': 543, +#} + +# Enable or disable extensions +# See https://docs.scrapy.org/en/latest/topics/extensions.html +#EXTENSIONS = { +# 'scrapy.extensions.telnet.TelnetConsole': None, +#} + +# Configure item pipelines +# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html +#ITEM_PIPELINES = { +# 'stackoverflow_sr.pipelines.StackoverflowSrPipeline': 300, +#} + +# Enable and configure the AutoThrottle extension (disabled by default) +# See https://docs.scrapy.org/en/latest/topics/autothrottle.html +#AUTOTHROTTLE_ENABLED = True +# The initial download delay +#AUTOTHROTTLE_START_DELAY = 5 +# The maximum download delay to be set in case of high latencies +#AUTOTHROTTLE_MAX_DELAY = 60 +# The average number of requests Scrapy should be sending in parallel to +# each remote server +#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 +# Enable showing throttling stats for every response received: +#AUTOTHROTTLE_DEBUG = False + +# Enable and configure HTTP caching (disabled by default) +# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings +#HTTPCACHE_ENABLED = True +#HTTPCACHE_EXPIRATION_SECS = 0 +#HTTPCACHE_DIR = 'httpcache' +#HTTPCACHE_IGNORE_HTTP_CODES = [] +#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' + + +USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.82 Safari/537.36" + + +# scrapy redis configuration +DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter" +SCHEDULER = "scrapy_redis.scheduler.Scheduler" +SCHEDULER_QUEUE_CLASS = 'scrapy_redis.queue.PriorityQueue' +SCHEDULER_PERSIST = True + +ITEM_PIPELINES = { + 'stackoverflow_sr.pipelines.StackoverflowSrPipeline': 300, + 'scrapy_redis.pipelines.RedisPipeline': 400 +} + +# Redis settings +REDIS_HOST = 'localhost' +REDIS_PORT = 6379 +# Redis parameters +REDIS_PARAMS = {"db": 5} diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/stackoverflow_redis.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/stackoverflow_redis.py" new file mode 100644 index 00000000..1207055d --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\200\350\212\202/stackoverflow_redis.py" @@ -0,0 +1,32 @@ +from scrapy_redis.spiders import RedisSpider +import json +import scrapy +from stackoverflow_sr.items import StackoverflowSrItem + +class StackOverFlowRedis(RedisSpider): + + name = "stackoverflow_redis" + + redis_key = "stackoverflow:start_urls" + + def make_request_from_data(self, data): + task = json.loads(data.decode('utf-8')) + return scrapy.http.FormRequest(url=task['url'], + method=task['method'], + meta=task['meta'], + dont_filter=False, + callback=self.stackoverflow_parse + ) + + def stackoverflow_parse(self, response): + question_list = response.xpath('//*[@id="questions"]') + + for ques in question_list.xpath('./div/div'): + item = StackoverflowSrItem() + item['id'] = ques.attrib['id'] + item['questions'] = ques.xpath('div[2]/h3/a/text()').get() + item['votes'] = ques.xpath('div[1]/div[1]/div[1]/div/span/strong/text()').get() + item['answers'] = ques.xpath('div[1]/div[1]/div[2]/strong/text()').get() + item['views'] = ques.xpath('div[1]/div[2]/@title').get() + item['links'] = ques.xpath('div[2]/h3/a/@href').get() + yield item -- Gitee From e797237429b5843582a93b460eeecfda64bace5e Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Fri, 19 Mar 2021 16:27:21 +0800 Subject: [PATCH 4/6] selenium test 01 --- .../selenium_demo01.py" | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/selenium_demo01.py" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/selenium_demo01.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/selenium_demo01.py" new file mode 100644 index 00000000..239c1248 --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\272\214\350\212\202/selenium_demo01.py" @@ -0,0 +1,23 @@ +from selenium import webdriver +from time import sleep + + +class Demo(object): + + def __init__(self): + self.options = webdriver.ChromeOptions() + self.options.add_argument('disable-blink-features=AutomationControlled') + self.driver = webdriver.Chrome(options=self.options) + self.driver.get("https://www.baidu.com") + self.driver.maximize_window() + + def test01(self): + self.driver.find_element_by_id('kw').send_keys('selenium') + self.driver.find_element_by_id('su').click() + sleep(5) + self.driver.close() + + +if __name__ == '__main__': + demo = Demo() + demo.test01() -- Gitee From 3c48c1080358543f497b9e12c9b00898c804df67 Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Sun, 21 Mar 2021 19:49:59 +0800 Subject: [PATCH 5/6] =?UTF-8?q?=E6=96=B0=E5=BB=BA=20=E7=AC=AC=E5=8D=81?= =?UTF-8?q?=E4=BA=8C=E5=91=A8=5F=E7=AC=AC=E4=B8=89=E8=8A=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../.keep" | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/.keep" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/.keep" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/.keep" new file mode 100644 index 00000000..e69de29b -- Gitee From 92e0405311e5f6aee24540f81ebc973d15618ba2 Mon Sep 17 00:00:00 2001 From: chaos <381810956@qq.com> Date: Sun, 21 Mar 2021 19:50:32 +0800 Subject: [PATCH 6/6] selenium demo 03 --- .../selenium_demo03.py" | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 "\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/selenium_demo03.py" diff --git "a/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/selenium_demo03.py" "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/selenium_demo03.py" new file mode 100644 index 00000000..fd687cc8 --- /dev/null +++ "b/\347\254\254\344\272\214\346\234\237\350\256\255\347\273\203\350\220\245/2\347\217\255/2\347\217\255_chaos/\347\254\254\345\215\201\344\272\214\345\221\250_\347\254\254\344\270\211\350\212\202/selenium_demo03.py" @@ -0,0 +1,68 @@ +from selenium import webdriver +from time import sleep +from selenium.webdriver.common.by import By +from selenium.webdriver.support.ui import WebDriverWait +from selenium.webdriver.support import expected_conditions as EC +from selenium.webdriver import ActionChains +from selenium.webdriver.common.keys import Keys + + +class TestSelenium03(): + + def __init__(self, proxy=None, user_agent=None, user_dir=None): + self.options = webdriver.ChromeOptions() + self.options.add_argument("disable-blink-features=AutomationControlled") + self.options.add_argument("--headless") + if not proxy: + self.options.add_argument(f'--proxy-server={proxy}') + if not user_agent: + self.options.add_argument(f'--user-agent={user_agent}') + if not user_dir: + self.options.add_argument(f'--user-data-dir={user_dir}') + # add extension + # self.driver.add_extension("extension path") + + self.driver = webdriver.Chrome() + self.driver.get("https://www.jd.com") + + def test_js(self): + self.driver.execute_script("return location.href") + self.driver.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocment", { + "source": """ + Object.defineProperty(navigator, 'webdriver', { + get: ()=> 'my_webdriver' + }) + """ + }) + sleep(3) + self.driver.close() + + def test_windows(self): + self.driver.maximize_window() + self.driver.set_window_size(width=1200, height=600) + self.driver.execute_script("window.open('http://www.baidu.com')") + self.driver.switch_to.window(self.driver.window_handles[-1]) + self.driver.switch_to.frame(self.driver.find_element_by_css_selector("iframe")) + self.driver.close() + + def test_webdriver_wait(self): + waiter = WebDriverWait(self.driver, 10) + waiter.until(EC.presence_of_all_elements_located((By.ID, "testElement"))) + waiter.until(EC.element_to_be_clickable((By.ID, "testClickElement"))) + self.driver.close() + + def test_action_chains(self): + ac = ActionChains(self.driver) + ac.move_to_element("element").click().perform() + ac.send_keys(Keys.ENTER).perform() + self.driver.close() + + +if __name__ == '__main__': + custom_proxy = '127.0.0.1:4080' + custom_user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36" + ts = TestSelenium03(custom_proxy, custom_user_agent) + ts.test_js() + # ts.test_windows() + # ts.test_webdriver_wait() + # ts.test_action_chains() -- Gitee