requests设置代理

import requests

proxy = \'127.0.0.1:9743\'
proxies = {
    \'http\': \'http://\' + proxy,
    \'https\': \'https://\' + proxy,
}
try:
    response = requests.get(\'http://httpbin.org/get\', proxies=proxies)
    print(response.text)
except requests.exceptions.ConnectionError as e:
    print(\'Error\', e.args)

selenium设置代理

from selenium import webdriver

proxy = \'127.0.0.1:9743\'
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument(\'--proxy-server=http://\' + proxy)
browser = webdriver.Chrome(chrome_options=chrome_options)
browser.get(\'http://httpbin.org/get\')

scrapy设置代理

class ProxyMiddleware(object):

    def get_random_proxy(self):
        # 自己维护在本地的IP代理池
        try:
            proxy = requests.get(\'http://127.0.0.1:5555/random\')
            if proxy.status_code == 200:
                return proxy.text
        except:
            return None
        
    def process_request(self, request, spider):
        proxy = self.get_random_proxy()
        if proxy:
            request.meta[\'proxy\'] = proxy

版权声明:本文为wangshx666原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/wangshx666/p/12838203.html