mirror of
https://github.com/alantang1977/JunTV.git
synced 2024-12-05 00:33:10 +02:00
refactor:log and get_speed
This commit is contained in:
parent
ba155206e1
commit
df6e6dac67
7 changed files with 104 additions and 104 deletions
11
.github/workflows/main.yml
vendored
11
.github/workflows/main.yml
vendored
|
@ -96,14 +96,11 @@ jobs:
|
||||||
if [[ -f "$final_m3u_file" ]]; then
|
if [[ -f "$final_m3u_file" ]]; then
|
||||||
git add -f "$final_m3u_file"
|
git add -f "$final_m3u_file"
|
||||||
fi
|
fi
|
||||||
if [[ -f "output/result_cache.pkl" ]]; then
|
if [[ -f "output/cache.pkl" ]]; then
|
||||||
git add -f "output/result_cache.pkl"
|
git add -f "output/cache.pkl"
|
||||||
fi
|
|
||||||
if [[ -f "output/user_result.log" ]]; then
|
|
||||||
git add -f "output/user_result.log"
|
|
||||||
elif [[ -f "output/result.log" ]]; then
|
|
||||||
git add -f "output/result.log"
|
|
||||||
fi
|
fi
|
||||||
|
if [[ -f "output/sort.log" ]]; then
|
||||||
|
git add -f "output/sort.log"
|
||||||
if [[ -f "updates/fofa/fofa_hotel_region_result.pkl" ]]; then
|
if [[ -f "updates/fofa/fofa_hotel_region_result.pkl" ]]; then
|
||||||
git add -f "updates/fofa/fofa_hotel_region_result.pkl"
|
git add -f "updates/fofa/fofa_hotel_region_result.pkl"
|
||||||
fi
|
fi
|
||||||
|
|
22
main.py
22
main.py
|
@ -1,5 +1,6 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
from utils.config import config
|
from utils.config import config
|
||||||
|
import utils.constants as constants
|
||||||
from service.app import run_service
|
from service.app import run_service
|
||||||
from utils.channel import (
|
from utils.channel import (
|
||||||
get_channel_items,
|
get_channel_items,
|
||||||
|
@ -18,8 +19,6 @@ from utils.tools import (
|
||||||
format_interval,
|
format_interval,
|
||||||
check_ipv6_support,
|
check_ipv6_support,
|
||||||
resource_path,
|
resource_path,
|
||||||
setup_logging,
|
|
||||||
cleanup_logging,
|
|
||||||
)
|
)
|
||||||
from updates.subscribe import get_channels_by_subscribe_urls
|
from updates.subscribe import get_channels_by_subscribe_urls
|
||||||
from updates.multicast import get_channels_by_multicast
|
from updates.multicast import get_channels_by_multicast
|
||||||
|
@ -34,9 +33,6 @@ import pickle
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
|
|
||||||
atexit.register(cleanup_logging)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateSource:
|
class UpdateSource:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -109,7 +105,6 @@ class UpdateSource:
|
||||||
async def main(self):
|
async def main(self):
|
||||||
try:
|
try:
|
||||||
if config.open_update:
|
if config.open_update:
|
||||||
setup_logging()
|
|
||||||
main_start_time = time()
|
main_start_time = time()
|
||||||
self.channel_items = get_channel_items()
|
self.channel_items = get_channel_items()
|
||||||
channel_names = [
|
channel_names = [
|
||||||
|
@ -143,7 +138,7 @@ class UpdateSource:
|
||||||
)
|
)
|
||||||
self.start_time = time()
|
self.start_time = time()
|
||||||
self.pbar = tqdm(total=self.total, desc="Sorting")
|
self.pbar = tqdm(total=self.total, desc="Sorting")
|
||||||
self.channel_data = process_sort_channel_list(
|
self.channel_data = await process_sort_channel_list(
|
||||||
self.channel_data,
|
self.channel_data,
|
||||||
ipv6=ipv6_support,
|
ipv6=ipv6_support,
|
||||||
callback=sort_callback,
|
callback=sort_callback,
|
||||||
|
@ -160,24 +155,17 @@ class UpdateSource:
|
||||||
)
|
)
|
||||||
self.pbar.close()
|
self.pbar.close()
|
||||||
user_final_file = config.final_file
|
user_final_file = config.final_file
|
||||||
update_file(user_final_file, "output/result_new.txt")
|
update_file(user_final_file, constants.result_path)
|
||||||
if config.open_use_old_result:
|
if config.open_use_old_result:
|
||||||
if open_sort:
|
if open_sort:
|
||||||
get_channel_data_cache_with_compare(
|
get_channel_data_cache_with_compare(
|
||||||
channel_data_cache, self.channel_data
|
channel_data_cache, self.channel_data
|
||||||
)
|
)
|
||||||
with open(
|
with open(
|
||||||
resource_path("output/result_cache.pkl", persistent=True), "wb"
|
resource_path(constants.cache_path, persistent=True),
|
||||||
|
"wb",
|
||||||
) as file:
|
) as file:
|
||||||
pickle.dump(channel_data_cache, file)
|
pickle.dump(channel_data_cache, file)
|
||||||
if open_sort:
|
|
||||||
user_log_file = "output/" + (
|
|
||||||
"user_result.log"
|
|
||||||
if os.path.exists("config/user_config.ini")
|
|
||||||
else "result.log"
|
|
||||||
)
|
|
||||||
update_file(user_log_file, "output/result_new.log", copy=True)
|
|
||||||
cleanup_logging()
|
|
||||||
convert_to_m3u()
|
convert_to_m3u()
|
||||||
total_time = format_interval(time() - main_start_time)
|
total_time = format_interval(time() - main_start_time)
|
||||||
print(
|
print(
|
||||||
|
|
|
@ -3,9 +3,8 @@ import sys
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(sys.path[0]))
|
sys.path.append(os.path.dirname(sys.path[0]))
|
||||||
from flask import Flask, render_template_string
|
from flask import Flask, render_template_string
|
||||||
from utils.tools import get_result_file_content, get_ip_address
|
from utils.tools import get_result_file_content, get_ip_address, resource_path
|
||||||
import utils.constants as constants
|
import utils.constants as constants
|
||||||
from utils.config import config
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
@ -32,11 +31,9 @@ def show_content():
|
||||||
|
|
||||||
@app.route("/log")
|
@app.route("/log")
|
||||||
def show_log():
|
def show_log():
|
||||||
user_log_file = "output/" + (
|
log_path = resource_path(constants.sort_log_path)
|
||||||
"user_result.log" if os.path.exists("config/user_config.ini") else "result.log"
|
if os.path.exists(log_path):
|
||||||
)
|
with open(log_path, "r", encoding="utf-8") as file:
|
||||||
if os.path.exists(user_log_file):
|
|
||||||
with open(user_log_file, "r", encoding="utf-8") as file:
|
|
||||||
content = file.read()
|
content = file.read()
|
||||||
else:
|
else:
|
||||||
content = constants.waiting_tip
|
content = constants.waiting_tip
|
||||||
|
|
|
@ -8,6 +8,7 @@ from utils.tools import (
|
||||||
remove_cache_info,
|
remove_cache_info,
|
||||||
resource_path,
|
resource_path,
|
||||||
write_content_into_txt,
|
write_content_into_txt,
|
||||||
|
get_logger,
|
||||||
)
|
)
|
||||||
from utils.speed import (
|
from utils.speed import (
|
||||||
get_speed,
|
get_speed,
|
||||||
|
@ -22,7 +23,8 @@ import base64
|
||||||
import pickle
|
import pickle
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
import asyncio
|
||||||
|
from logging import INFO
|
||||||
|
|
||||||
|
|
||||||
def get_name_url(content, pattern, multiline=False, check_url=True):
|
def get_name_url(content, pattern, multiline=False, check_url=True):
|
||||||
|
@ -84,7 +86,7 @@ def get_channel_items():
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.open_use_old_result:
|
if config.open_use_old_result:
|
||||||
result_cache_path = resource_path("output/result_cache.pkl")
|
result_cache_path = resource_path(constants.cache_path)
|
||||||
if os.path.exists(result_cache_path):
|
if os.path.exists(result_cache_path):
|
||||||
with open(result_cache_path, "rb") as file:
|
with open(result_cache_path, "rb") as file:
|
||||||
old_result = pickle.load(file)
|
old_result = pickle.load(file)
|
||||||
|
@ -543,7 +545,7 @@ def append_total_data(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def process_sort_channel_list(data, ipv6=False, callback=None):
|
async def process_sort_channel_list(data, ipv6=False, callback=None):
|
||||||
"""
|
"""
|
||||||
Processs the sort channel list
|
Processs the sort channel list
|
||||||
"""
|
"""
|
||||||
|
@ -551,22 +553,29 @@ def process_sort_channel_list(data, ipv6=False, callback=None):
|
||||||
need_sort_data = copy.deepcopy(data)
|
need_sort_data = copy.deepcopy(data)
|
||||||
process_nested_dict(need_sort_data, seen=set(), flag=r"cache:(.*)", force_str="!")
|
process_nested_dict(need_sort_data, seen=set(), flag=r"cache:(.*)", force_str="!")
|
||||||
result = {}
|
result = {}
|
||||||
with ThreadPoolExecutor(max_workers=30) as executor:
|
semaphore = asyncio.Semaphore(10)
|
||||||
try:
|
|
||||||
for channel_obj in need_sort_data.values():
|
async def limited_get_speed(info, ipv6_proxy, callback):
|
||||||
for info_list in channel_obj.values():
|
async with semaphore:
|
||||||
for info in info_list:
|
return await get_speed(info[0], ipv6_proxy=ipv6_proxy, callback=callback)
|
||||||
executor.submit(
|
|
||||||
get_speed,
|
tasks = [
|
||||||
info[0],
|
asyncio.create_task(
|
||||||
ipv6_proxy=ipv6_proxy,
|
limited_get_speed(
|
||||||
callback=callback,
|
info,
|
||||||
)
|
ipv6_proxy=ipv6_proxy,
|
||||||
except Exception as e:
|
callback=callback,
|
||||||
print(f"Get speed Error: {e}")
|
)
|
||||||
|
)
|
||||||
|
for channel_obj in need_sort_data.values()
|
||||||
|
for info_list in channel_obj.values()
|
||||||
|
for info in info_list
|
||||||
|
]
|
||||||
|
await asyncio.gather(*tasks)
|
||||||
|
logger = get_logger(constants.sort_log_path, level=INFO, init=True)
|
||||||
for cate, obj in data.items():
|
for cate, obj in data.items():
|
||||||
for name, info_list in obj.items():
|
for name, info_list in obj.items():
|
||||||
info_list = sort_urls_by_speed_and_resolution(name, info_list)
|
info_list = sort_urls_by_speed_and_resolution(name, info_list, logger)
|
||||||
append_data_to_info_data(
|
append_data_to_info_data(
|
||||||
result,
|
result,
|
||||||
cate,
|
cate,
|
||||||
|
|
|
@ -2,9 +2,13 @@ import os
|
||||||
|
|
||||||
output_dir = "output"
|
output_dir = "output"
|
||||||
|
|
||||||
log_file = "result_new.log"
|
result_path = os.path.join(output_dir, "result_new.txt")
|
||||||
|
|
||||||
log_path = os.path.join(output_dir, log_file)
|
cache_path = os.path.join(output_dir, "cache.pkl")
|
||||||
|
|
||||||
|
sort_log_path = os.path.join(output_dir, "sort.log")
|
||||||
|
|
||||||
|
log_path = os.path.join(output_dir, "log.log")
|
||||||
|
|
||||||
url_pattern = r"((https?):\/\/)?(\[[0-9a-fA-F:]+\]|([\w-]+\.)+[\w-]+)(:[0-9]{1,5})?(\/[^\s]*)?(\$[^\s]+)?"
|
url_pattern = r"((https?):\/\/)?(\[[0-9a-fA-F:]+\]|([\w-]+\.)+[\w-]+)(:[0-9]{1,5})?(\/[^\s]*)?(\$[^\s]+)?"
|
||||||
|
|
||||||
|
|
|
@ -3,39 +3,55 @@ from time import time
|
||||||
import asyncio
|
import asyncio
|
||||||
import re
|
import re
|
||||||
from utils.config import config
|
from utils.config import config
|
||||||
from utils.tools import is_ipv6, remove_cache_info, get_resolution_value
|
import utils.constants as constants
|
||||||
|
from utils.tools import is_ipv6, remove_cache_info, get_resolution_value, get_logger
|
||||||
import subprocess
|
import subprocess
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
import logging
|
from concurrent.futures import ProcessPoolExecutor
|
||||||
|
import functools
|
||||||
|
|
||||||
|
logger = get_logger(constants.log_path)
|
||||||
|
|
||||||
|
|
||||||
def get_speed_yt_dlp(url, timeout=config.sort_timeout):
|
def get_info_yt_dlp(url, timeout=config.sort_timeout):
|
||||||
|
"""
|
||||||
|
Get the url info by yt_dlp
|
||||||
|
"""
|
||||||
|
ydl_opts = {
|
||||||
|
"socket_timeout": timeout,
|
||||||
|
"skip_download": True,
|
||||||
|
"quiet": True,
|
||||||
|
"no_warnings": True,
|
||||||
|
"format": "best",
|
||||||
|
"logger": logger,
|
||||||
|
}
|
||||||
|
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||||
|
return ydl.sanitize_info(ydl.extract_info(url, download=False))
|
||||||
|
|
||||||
|
|
||||||
|
async def get_speed_yt_dlp(url, timeout=config.sort_timeout):
|
||||||
"""
|
"""
|
||||||
Get the speed of the url by yt_dlp
|
Get the speed of the url by yt_dlp
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
ydl_opts = {
|
async with asyncio.timeout(timeout + 2):
|
||||||
"socket_timeout": timeout,
|
|
||||||
"skip_download": True,
|
|
||||||
"quiet": True,
|
|
||||||
"no_warnings": True,
|
|
||||||
"format": "best",
|
|
||||||
"logger": logging.getLogger(),
|
|
||||||
}
|
|
||||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|
||||||
start_time = time()
|
start_time = time()
|
||||||
info = ydl.extract_info(url, download=False)
|
loop = asyncio.get_running_loop()
|
||||||
fps = info.get("fps", None) or (
|
with ProcessPoolExecutor() as exc:
|
||||||
int(round((time() - start_time) * 1000))
|
info = await loop.run_in_executor(
|
||||||
if "id" in info
|
exc, functools.partial(get_info_yt_dlp, url, timeout)
|
||||||
else float("inf")
|
)
|
||||||
)
|
fps = (
|
||||||
resolution = (
|
int(round((time() - start_time) * 1000))
|
||||||
f"{info['width']}x{info['height']}"
|
if len(info)
|
||||||
if "width" in info and "height" in info
|
else float("inf")
|
||||||
else None
|
)
|
||||||
)
|
resolution = (
|
||||||
return (fps, resolution)
|
f"{info['width']}x{info['height']}"
|
||||||
|
if "width" in info and "height" in info
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
return (fps, resolution)
|
||||||
except:
|
except:
|
||||||
return (float("inf"), None)
|
return (float("inf"), None)
|
||||||
|
|
||||||
|
@ -146,7 +162,7 @@ async def check_stream_speed(url_info):
|
||||||
speed_cache = {}
|
speed_cache = {}
|
||||||
|
|
||||||
|
|
||||||
def get_speed(url, ipv6_proxy=None, callback=None):
|
async def get_speed(url, ipv6_proxy=None, callback=None):
|
||||||
"""
|
"""
|
||||||
Get the speed of the url
|
Get the speed of the url
|
||||||
"""
|
"""
|
||||||
|
@ -163,7 +179,7 @@ def get_speed(url, ipv6_proxy=None, callback=None):
|
||||||
if ipv6_proxy and url_is_ipv6:
|
if ipv6_proxy and url_is_ipv6:
|
||||||
speed = 0
|
speed = 0
|
||||||
else:
|
else:
|
||||||
speed = get_speed_yt_dlp(url)
|
speed = await get_speed_yt_dlp(url)
|
||||||
if cache_key and cache_key not in speed_cache:
|
if cache_key and cache_key not in speed_cache:
|
||||||
speed_cache[cache_key] = speed
|
speed_cache[cache_key] = speed
|
||||||
return speed
|
return speed
|
||||||
|
@ -174,7 +190,7 @@ def get_speed(url, ipv6_proxy=None, callback=None):
|
||||||
callback()
|
callback()
|
||||||
|
|
||||||
|
|
||||||
def sort_urls_by_speed_and_resolution(name, data):
|
def sort_urls_by_speed_and_resolution(name, data, logger=None):
|
||||||
"""
|
"""
|
||||||
Sort by speed and resolution
|
Sort by speed and resolution
|
||||||
"""
|
"""
|
||||||
|
@ -192,9 +208,13 @@ def sort_urls_by_speed_and_resolution(name, data):
|
||||||
resolution = cache_resolution or resolution
|
resolution = cache_resolution or resolution
|
||||||
if response_time != float("inf"):
|
if response_time != float("inf"):
|
||||||
url = remove_cache_info(url)
|
url = remove_cache_info(url)
|
||||||
logging.info(
|
try:
|
||||||
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms"
|
if logger:
|
||||||
)
|
logger.info(
|
||||||
|
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
filter_data.append((url, date, resolution, origin))
|
filter_data.append((url, date, resolution, origin))
|
||||||
|
|
||||||
def combined_key(item):
|
def combined_key(item):
|
||||||
|
|
|
@ -15,35 +15,20 @@ import sys
|
||||||
import logging
|
import logging
|
||||||
from logging.handlers import RotatingFileHandler
|
from logging.handlers import RotatingFileHandler
|
||||||
|
|
||||||
handler = None
|
|
||||||
|
|
||||||
|
def get_logger(path, level=logging.ERROR, init=False):
|
||||||
def setup_logging():
|
|
||||||
"""
|
"""
|
||||||
Setup logging
|
get the logger
|
||||||
"""
|
"""
|
||||||
global handler
|
|
||||||
if not os.path.exists(constants.output_dir):
|
if not os.path.exists(constants.output_dir):
|
||||||
os.makedirs(constants.output_dir)
|
os.makedirs(constants.output_dir)
|
||||||
handler = RotatingFileHandler(constants.log_path, encoding="utf-8")
|
if init and os.path.exists(path):
|
||||||
logging.basicConfig(
|
os.remove(path)
|
||||||
handlers=[handler],
|
handler = RotatingFileHandler(path, encoding="utf-8")
|
||||||
format="%(message)s",
|
logger = logging.getLogger(path)
|
||||||
level=logging.INFO,
|
logger.addHandler(handler)
|
||||||
)
|
logger.setLevel(level)
|
||||||
|
return logger
|
||||||
|
|
||||||
def cleanup_logging():
|
|
||||||
"""
|
|
||||||
Cleanup logging
|
|
||||||
"""
|
|
||||||
global handler
|
|
||||||
if handler:
|
|
||||||
for handler in logging.root.handlers[:]:
|
|
||||||
handler.close()
|
|
||||||
logging.root.removeHandler(handler)
|
|
||||||
if os.path.exists(constants.log_path):
|
|
||||||
os.remove(constants.log_path)
|
|
||||||
|
|
||||||
|
|
||||||
def format_interval(t):
|
def format_interval(t):
|
||||||
|
|
Loading…
Reference in a new issue