mirror of https://github.com/Free-TV/IPTV
Improve make_playlist.py code quality and fix bugs
- Fix group name bug: replace underscores with spaces before title() so filenames like north_korea.md produce "North Korea" not "North_Korea" - Fix resource leaks: use context managers for all file handles including EPG list and per-country playlist files - Remove os.chdir() global side effect: use absolute paths derived from the script location instead - Avoid calling to_m3u_line() twice per channel by caching the result - Fix redundant trailing colon in filename[:-3:] slice - Consistent use of write() for headers instead of mixing print/write - Strip blank lines from EPG URL list when readingpull/989/head
parent
1f72d975d3
commit
ff99c72b04
|
|
@ -3,7 +3,6 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
EPG_LIST = open('epglist.txt',"r") # for a clean code
|
|
||||||
|
|
||||||
class Channel:
|
class Channel:
|
||||||
def __init__(self, group, md_line):
|
def __init__(self, group, md_line):
|
||||||
|
|
@ -29,32 +28,39 @@ class Channel:
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
dir_playlists = 'playlists'
|
base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
if not (os.path.isdir(dir_playlists)):
|
lists_dir = os.path.join(base_dir, "lists")
|
||||||
|
dir_playlists = os.path.join(base_dir, "playlists")
|
||||||
|
|
||||||
|
if not os.path.isdir(dir_playlists):
|
||||||
os.mkdir(dir_playlists)
|
os.mkdir(dir_playlists)
|
||||||
with open("playlist.m3u8", "w", encoding='utf-8') as playlist:
|
|
||||||
processed_epg_list = ", ".join(EPG_LIST).replace('\n', '')
|
with open(os.path.join(base_dir, "epglist.txt"), encoding='utf-8') as epg_file:
|
||||||
head_playlist = f'#EXTM3U x-tvg-url="{processed_epg_list}"'
|
epg_urls = [line.strip() for line in epg_file if line.strip()]
|
||||||
print(f'#EXTM3U x-tvg-url="{processed_epg_list}"', file=playlist)
|
processed_epg_list = ", ".join(epg_urls)
|
||||||
os.chdir("lists")
|
head_playlist = f'#EXTM3U x-tvg-url="{processed_epg_list}"\n'
|
||||||
for filename in sorted(os.listdir(".")):
|
|
||||||
|
with open(os.path.join(base_dir, "playlist.m3u8"), "w", encoding='utf-8') as playlist:
|
||||||
|
playlist.write(head_playlist)
|
||||||
|
for filename in sorted(os.listdir(lists_dir)):
|
||||||
if filename == "README.md" or not filename.endswith(".md"):
|
if filename == "README.md" or not filename.endswith(".md"):
|
||||||
continue
|
continue
|
||||||
with open(filename, encoding='utf-8') as markup_file:
|
markup_path = os.path.join(lists_dir, filename)
|
||||||
file_country = os.path.join("..", dir_playlists, "playlist_" + filename[:-3:] + ".m3u8")
|
country_path = os.path.join(dir_playlists, "playlist_" + filename[:-3] + ".m3u8")
|
||||||
playlist_country = open(file_country, "w", encoding='utf-8')
|
group = filename[:-3].replace("_", " ").title()
|
||||||
playlist_country.write(head_playlist + "\n")
|
|
||||||
group = filename.replace(".md", "").title()
|
|
||||||
print(f"Generating {group}")
|
print(f"Generating {group}")
|
||||||
|
with open(markup_path, encoding='utf-8') as markup_file, \
|
||||||
|
open(country_path, "w", encoding='utf-8') as playlist_country:
|
||||||
|
playlist_country.write(head_playlist)
|
||||||
for line in markup_file:
|
for line in markup_file:
|
||||||
if "<h1>" in line.lower() and "</h1>" in line.lower():
|
if "<h1>" in line.lower() and "</h1>" in line.lower():
|
||||||
group = re.sub('<[^<>]+>', '', line.strip())
|
group = re.sub('<[^<>]+>', '', line.strip())
|
||||||
if not "[>]" in line:
|
if "[>]" not in line:
|
||||||
continue
|
continue
|
||||||
channel = Channel(group, line)
|
channel = Channel(group, line)
|
||||||
print(channel.to_m3u_line(), file=playlist)
|
m3u_line = channel.to_m3u_line()
|
||||||
print(channel.to_m3u_line(), file=playlist_country)
|
print(m3u_line, file=playlist)
|
||||||
playlist_country.close()
|
print(m3u_line, file=playlist_country)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue