refactor:origin map

This commit is contained in:
guorong.zheng 2024-11-05 14:05:34 +08:00
parent db57d7634b
commit 399300b53a
10 changed files with 46 additions and 32 deletions

View file

@ -6,6 +6,7 @@ import updates.fofa.fofa_map as fofa_map
from driver.setup import setup_driver
import re
from utils.config import config
import utils.constants as constants
from utils.retry import retry_func
from utils.channel import format_channel_name
from utils.tools import merge_objects, get_pbar_remaining, add_url_info, resource_path
@ -91,9 +92,10 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
test_url = fofa_urls[0][0]
proxy = await get_proxy(test_url, best=True, with_test=True)
cancel_event = threading.Event()
hotel_name = constants.origin_map["hotel"]
def process_fofa_channels(fofa_info):
nonlocal proxy, fofa_urls_len, open_driver, open_sort, cancel_event
nonlocal proxy
if cancel_event.is_set():
return {}
fofa_url = fofa_info[0]
@ -130,7 +132,11 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
with ThreadPoolExecutor(max_workers=100) as executor:
futures = [
executor.submit(
process_fofa_json_url, url, fofa_info[1], open_sort
process_fofa_json_url,
url,
fofa_info[1],
open_sort,
hotel_name,
)
for url in urls
]
@ -184,7 +190,7 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
return fofa_results
def process_fofa_json_url(url, region, open_sort):
def process_fofa_json_url(url, region, open_sort, hotel_name="酒店源"):
"""
Process the FOFA json url
"""
@ -208,11 +214,11 @@ def process_fofa_json_url(url, region, open_sort):
total_url = (
add_url_info(
f"{url}{item_url}",
f"{region}酒店源|cache:{url}",
f"{region}{hotel_name}|cache:{url}",
)
if open_sort
else add_url_info(
f"{url}{item_url}", f"{region}酒店源"
f"{url}{item_url}", f"{region}{hotel_name}"
)
)
if item_name not in channels:

View file

@ -41,7 +41,7 @@ async def get_channels_by_hotel(callback=None):
start_time = time()
def process_region_by_hotel(region):
nonlocal proxy, open_driver, page_num
nonlocal proxy
name = f"{region}"
info_list = []
driver = None

View file

@ -53,7 +53,7 @@ async def get_channels_by_multicast(names, callback=None):
merge_objects(search_region_type_result, fofa_result)
def process_channel_by_multicast(region, type):
nonlocal proxy, open_driver, page_num, start_time
nonlocal proxy
name = f"{region}{type}"
info_list = []
driver = None

View file

@ -1,5 +1,6 @@
from asyncio import create_task, gather
from utils.config import config
import utils.constants as constants
from utils.speed import get_speed
from utils.channel import (
format_channel_name,
@ -11,6 +12,7 @@ from utils.tools import (
get_pbar_remaining,
get_soup,
format_url_with_cache,
add_url_info,
)
from updates.proxy import get_proxy, get_proxy_next
from time import time
@ -61,9 +63,10 @@ async def get_channels_by_online_search(names, callback=None):
if open_proxy:
proxy = await get_proxy(pageUrl, best=True, with_test=True)
start_time = time()
online_search_name = constants.origin_map["online_search"]
def process_channel_by_online_search(name):
nonlocal proxy, open_proxy, open_driver, page_num
nonlocal proxy
info_list = []
driver = None
try:
@ -166,6 +169,7 @@ async def get_channels_by_online_search(names, callback=None):
for result in results:
url, date, resolution = result
if url and check_url_by_patterns(url):
url = add_url_info(url, online_search_name)
url = format_url_with_cache(url)
info_list.append((url, date, resolution))
break

View file

@ -28,7 +28,6 @@ def get_proxy_list(page_count=1):
pbar = tqdm(total=len(urls), desc="Getting proxy list")
def get_proxy(url):
nonlocal open_driver
proxys = []
try:
if open_driver:

View file

@ -3,6 +3,7 @@ from tqdm.asyncio import tqdm_asyncio
from time import time
from requests import Session, exceptions
from utils.config import config
import utils.constants as constants
from utils.retry import retry_func
from utils.channel import get_name_url, format_channel_name
from utils.tools import (
@ -40,6 +41,9 @@ async def get_channels_by_subscribe_urls(
0,
)
session = Session()
hotel_name = constants.origin_map["hotel"]
multicast_name = constants.origin_map["multicast"]
subscribe_name = constants.origin_map["subscribe"]
def process_subscribe_channels(subscribe_info):
if (multicast or hotel) and isinstance(subscribe_info, dict):
@ -83,9 +87,13 @@ async def get_channels_by_subscribe_urls(
url = url.partition("$")[0]
if not multicast:
info = (
f"{region}酒店源"
f"{region}{hotel_name}"
if hotel
else "组播源" if "/rtp/" in url else "订阅源"
else (
f"{multicast_name}"
if "/rtp/" in url
else f"{subscribe_name}"
)
)
url = add_url_info(url, info)
url = format_url_with_cache(

View file

@ -253,17 +253,19 @@ def get_channel_multicast_result(result, search_result):
Get the channel multicast info result by result and search result
"""
info_result = {}
multicast_name = constants.origin_map["multicast"]
for name, result_obj in result.items():
info_list = [
(
(
add_url_info(
f"http://{url}/rtp/{ip}",
f"{result_region}{result_type}组播源|cache:{url}",
f"{result_region}{result_type}{multicast_name}|cache:{url}",
)
if config.open_sort
else add_url_info(
f"http://{url}/rtp/{ip}", f"{result_region}{result_type}组播源"
f"http://{url}/rtp/{ip}",
f"{result_region}{result_type}{multicast_name}",
)
),
date,
@ -614,8 +616,6 @@ async def sort_channel_list(
semaphore,
ffmpeg=False,
ipv6_proxy=None,
filter_resolution=False,
min_resolution=None,
callback=None,
):
"""
@ -630,10 +630,6 @@ async def sort_channel_list(
)
if sorted_data:
for (url, date, resolution, origin), response_time in sorted_data:
if resolution and filter_resolution:
resolution_value = get_resolution_value(resolution)
if resolution_value < min_resolution:
continue
logging.info(
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms"
)
@ -670,8 +666,6 @@ async def process_sort_channel_list(data, ipv6=False, callback=None):
semaphore,
ffmpeg=is_ffmpeg,
ipv6_proxy=ipv6_proxy,
filter_resolution=config.open_filter_resolution,
min_resolution=config.min_resolution_value,
callback=callback,
)
)
@ -718,12 +712,6 @@ async def process_sort_channel_list(data, ipv6=False, callback=None):
continue
response_time, resolution = cache
if response_time and response_time != float("inf"):
if resolution:
if config.open_filter_resolution:
resolution_value = get_resolution_value(resolution)
if resolution_value < config.min_resolution_value:
continue
url = add_url_info(url, resolution)
append_data_to_info_data(
sort_data,
cate,
@ -845,6 +833,4 @@ def format_channel_url_info(data):
for url_info in obj.values():
for i, (url, date, resolution, origin) in enumerate(url_info):
url = remove_cache_info(url)
if resolution:
url = add_url_info(url, resolution)
url_info[i] = (url, date, resolution, origin)

View file

@ -51,3 +51,10 @@ replace_dict = {
"CCTV17农业农村": "CCTV17",
"CCTV17农业": "CCTV17",
}
origin_map = {
"hotel": "酒店源",
"multicast": "组播源",
"subscribe": "订阅源",
"online_search": "关键字源",
}

View file

@ -103,8 +103,6 @@ async def check_stream_speed(url_info):
frame, resolution = get_video_info(video_info)
if frame is None or frame == float("inf"):
return float("inf")
if resolution:
url_info[0] = add_url_info(url, resolution)
url_info[2] = resolution
return (url_info, frame)
except Exception as e:

View file

@ -6,6 +6,7 @@ import ipaddress
from urllib.parse import urlparse
import socket
from utils.config import config
import utils.constants as constants
import re
from bs4 import BeautifulSoup
from flask import render_template_string, send_file
@ -158,12 +159,17 @@ def get_total_urls_from_info_list(infoList, ipv6=False):
pure_url, _, info = url.partition("$")
if not info:
url = add_url_info(pure_url, origin)
origin_name = constants.origin_map[origin]
if origin_name:
url = add_url_info(pure_url, origin_name)
url_is_ipv6 = is_ipv6(url)
if url_is_ipv6:
url = add_url_info(url, "IPv6")
if resolution:
url = add_url_info(url, resolution)
if url_is_ipv6:
categorized_urls[origin]["ipv6"].append(url)
else: