diff --git a/src/clover_lightnovel/wenku8.py b/src/clover_lightnovel/wenku8.py
index e01703a..5c20d7c 100644
--- a/src/clover_lightnovel/wenku8.py
+++ b/src/clover_lightnovel/wenku8.py
@@ -2,7 +2,9 @@ from os import getcwd
import requests
from bs4 import BeautifulSoup
-from src.configs.api_config import wenku8_username, wenku8_password
+from src.configs.api_config import wenku8_username, wenku8_password, proxy_api
+
+
# 登录页面的URL
login_url = 'https://www.wenku8.net/login.php?jumpurl=http%3A%2F%2Fwww.wenku8.net%2Findex.php'
@@ -11,7 +13,7 @@ index_url = 'https://www.wenku8.net/index.php'
headers = {
'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Connection': 'close',
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 Edg/111.0.1661.41',
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 Edg/111.0.1661.41'
}
# 登录表单数据
@@ -22,12 +24,26 @@ login_data = {
'action': 'login'
}
+def get_proxy(headers):
+ #proxy_url可通过多米HTTP代理网站购买后生成代理api链接,每次请求api链接都是新的ip
+ proxy_url = proxy_api
+ aaa=requests.get(proxy_url, headers=headers).text
+ proxy_host = aaa.splitlines()[0]
+ print('代理IP为:'+proxy_host)
+ #proxy_host='117.35.254.105:22001'
+ #proxy_host='192.168.0.134:1080'
+ proxy = {
+ 'http': 'http://'+proxy_host,
+ 'https': 'http://'+proxy_host
+ }
+ return proxy
async def login():
# 发送登录请求
with requests.Session() as session:
+ proxy = get_proxy(headers)
# 注意:这里使用了Session对象来保持会话状态
- login_response = session.post(login_url, data=login_data, headers=headers)
+ login_response = session.post(login_url, data=login_data, headers=headers, proxies=proxy)
# 检查登录是否成功(根据实际需求调整)
if login_response.status_code == 200:
@@ -54,8 +70,8 @@ async def get_books():
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 Edg/111.0.1661.41',
'Cookie': cookie
}
-
- response = requests.get(index_url, headers=headers1)
+ proxy = get_proxy(headers1)
+ response = requests.get(index_url, headers=headers1, proxies=proxy)
print(response)
html = response.content.decode('gbk')
soup = BeautifulSoup(html, 'html.parser')
@@ -80,6 +96,7 @@ async def get_books():
"""
+ # print(orders[7].text)
with open(getcwd() + "/src/clover_lightnovel/output1.html", 'w', encoding='utf-8') as file:
file.write(head + str(orders[7]).replace('(查看 这本轻小说真厉害!2025 TOP榜单)', '') + str(orders[8]) + str(orders[9]))
diff --git a/src/configs/api_config_example.py b/src/configs/api_config_example.py
index 33fa7d3..52a2efb 100644
--- a/src/configs/api_config_example.py
+++ b/src/configs/api_config_example.py
@@ -27,4 +27,9 @@ deepseek_key= ""
Wenku8账号
"""
wenku8_username = ""
-wenku8_password = ""
\ No newline at end of file
+wenku8_password = ""
+
+"""
+多米HTTP代理api
+"""
+proxy_api = ""
\ No newline at end of file