Skip to content

Commit

Permalink
perf: add system proxy check
Browse files Browse the repository at this point in the history
  • Loading branch information
ihmily committed Nov 19, 2024
1 parent 645f2ea commit 14552af
Show file tree
Hide file tree
Showing 2 changed files with 105 additions and 8 deletions.
92 changes: 92 additions & 0 deletions douyinliverecorder/proxy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
import os
import sys
from enum import Enum, auto
from dataclasses import dataclass, field
from .utils import logger


class ProxyType(Enum):
HTTP = auto()
HTTPS = auto()
SOCKS = auto()


@dataclass(frozen=True)
class ProxyInfo:
ip: str = field(default="", repr=True)
port: str = field(default="", repr=True)

def __post_init__(self):
if (self.ip and not self.port) or (not self.ip and self.port):
raise ValueError("IP or port cannot be empty")

if (self.ip and self.port) and (not self.port.isdigit() or not (1 <= int(self.port) <= 65535)):
raise ValueError("Port must be a digit between 1 and 65535")


class ProxyDetector:
def __init__(self):
if sys.platform.startswith('win'):
import winreg
self.winreg = winreg
self.__path = r'Software\Microsoft\Windows\CurrentVersion\Internet Settings'
with winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) as key_user:
self.__INTERNET_SETTINGS = winreg.OpenKeyEx(key_user, self.__path, 0, winreg.KEY_ALL_ACCESS)
else:
self.__is_windows = False

def get_proxy_info(self) -> ProxyInfo:
if sys.platform.startswith('win'):
ip, port = self._get_proxy_info_windows()
else:
ip, port = self._get_proxy_info_linux()
return ProxyInfo(ip, port)

def is_proxy_enabled(self) -> bool:
if sys.platform.startswith('win'):
return self._is_proxy_enabled_windows()
else:
return self._is_proxy_enabled_linux()

def _get_proxy_info_windows(self) -> tuple[str, str]:
ip, port = "", ""
if self._is_proxy_enabled_windows():
try:
ip_port = self.winreg.QueryValueEx(self.__INTERNET_SETTINGS, "ProxyServer")[0]
if ip_port:
ip, port = ip_port.split(":")
except FileNotFoundError as err:
logger.warning("No proxy information found: " + str(err))
except Exception as err:
logger.error("An error occurred: " + str(err))
else:
logger.debug("No proxy is enabled on the system")
return ip, port

def _is_proxy_enabled_windows(self) -> bool:
try:
if self.winreg.QueryValueEx(self.__INTERNET_SETTINGS, "ProxyEnable")[0] == 1:
return True
except FileNotFoundError as err:
print("No proxy information found: " + str(err))
except Exception as err:
print("An error occurred: " + str(err))
return False

@staticmethod
def _get_proxy_info_linux() -> tuple[str, str]:
proxies = {
'http': os.getenv('http_proxy'),
'https': os.getenv('https_proxy'),
'ftp': os.getenv('ftp_proxy')
}
ip = port = ""
for proto, proxy in proxies.items():
if proxy:
ip, port = proxy.split(':')
break
return ip, port

def _is_proxy_enabled_linux(self) -> bool:
proxies = self._get_proxy_info_linux()
return any(proxy != '' for proxy in proxies)
21 changes: 13 additions & 8 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@
from urllib.error import URLError, HTTPError
from typing import Any
import configparser
from douyinliverecorder import spider
from douyinliverecorder import stream
from douyinliverecorder import spider, stream
from douyinliverecorder.proxy import ProxyDetector
from douyinliverecorder.utils import logger
from douyinliverecorder import utils
from msg_push import (
Expand Down Expand Up @@ -95,7 +95,10 @@ def display_info() -> None:
print(f"是否开启代理录制: {'是' if use_proxy else '否'}", end=" | ")
if split_video_by_time:
print(f"录制分段开启: {split_time}秒", end=" | ")
print(f"是否生成时间文件: {'是' if create_time_file else '否'}", end=" | ")
else:
print(f"录制分段开启: 否", end=" | ")
if create_time_file:
print(f"是否生成时间文件: 是", end=" | ")
print(f"录制视频质量为: {video_record_quality}", end=" | ")
print(f"录制视频格式为: {video_save_type}", end=" | ")
print(f"目前瞬时错误数为: {error_count}", end=" | ")
Expand Down Expand Up @@ -1514,6 +1517,10 @@ def read_config_value(config_parser: configparser.RawConfigParser, section: str,
response_g = urllib.request.urlopen("https://www.google.com/", timeout=15)
global_proxy = True
print('\r全局/规则网络代理已开启√')
pd = ProxyDetector()
if pd.is_proxy_enabled():
proxy_info = pd.get_proxy_info()
print("System Proxy: http://{}:{}".format(proxy_info.ip, proxy_info.port))
except HTTPError as err:
print(f"HTTP error occurred: {err.code} - {err.reason}")
except URLError as err:
Expand Down Expand Up @@ -1664,18 +1671,15 @@ def read_config_value(config_parser: configparser.RawConfigParser, section: str,
logger.warning(f"Disk space remaining is below {disk_space_limit} GB. "
f"Exiting program due to the disk space limit being reached.")
sys.exit(-1)

print("")

def contains_url(string: str) -> bool:
pattern = r"(https?://)?(www\.)?[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)+(:\d+)?(/.*)?"
return re.search(pattern, string) is not None


try:
url_comments = []
line_list = []
url_line_list = []

url_comments, line_list, url_line_list = [[] for _ in range(3)]
with (open(url_config_file, "r", encoding=text_encoding, errors='ignore') as file):
for origin_line in file:
if origin_line in line_list:
Expand Down Expand Up @@ -1868,3 +1872,4 @@ def contains_url(string: str) -> bool:
first_run = False

time.sleep(3)

0 comments on commit 14552af

Please sign in to comment.