mirror of
https://github.com/doms9/iptv.git
synced 2025-12-10 20:39:03 +01:00
e
This commit is contained in:
commit
00000d99f4
38 changed files with 395070 additions and 0 deletions
1480
M3U8/TV.m3u8
Normal file
1480
M3U8/TV.m3u8
Normal file
File diff suppressed because it is too large
Load diff
442
M3U8/base.m3u8
Normal file
442
M3U8/base.m3u8
Normal file
|
|
@ -0,0 +1,442 @@
|
|||
#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"
|
||||
|
||||
#EXTINF:-1 tvg-chno="1" tvg-id="A.and.E.HD.East.us2" tvg-name="A&E TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s51529_dark_360w_270h.png" group-title="TV",A&E TV
|
||||
http://fl1.moveonjoy.com/ANE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="2" tvg-id="WFTS-DT.us_locals1" tvg-name="ABC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10003_dark_360w_270h.png" group-title="TV",ABC
|
||||
https://fl1.moveonjoy.com/FL_Tampa_ABC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="3" tvg-id="ACC.Network.us2" tvg-name="ACC Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s111871_dark_360w_270h.png" group-title="TV",ACC Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/9273
|
||||
|
||||
#EXTINF:-1 tvg-chno="4" tvg-id="AdultSwim.com.Cartoon.Network.us2" tvg-name="Adult Swim" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16496_dark_360w_270h.png" group-title="TV",Adult Swim
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023183/aseast/noslate/VIDEO_1_5128000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="5" tvg-id="Altitude.Sports.us2" tvg-name="Altitude Sports" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s44263_dark_360w_270h.png" group-title="TV",Altitude Sports
|
||||
http://cord-cutter.net:8080/30550113/30550113/79545
|
||||
|
||||
#EXTINF:-1 tvg-chno="6" tvg-id="AMC.HD.us2" tvg-name="AMC" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10021_dark_360w_270h.png" group-title="TV",AMC
|
||||
http://cord-cutter.net:8080/30550113/30550113/18925
|
||||
|
||||
#EXTINF:-1 tvg-chno="7" tvg-id="Animal.Planet.HD.us2" tvg-name="Animal Planet" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16331_dark_360w_270h.png" group-title="TV",Animal Planet
|
||||
http://fl1.moveonjoy.com/Animal_Planet/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="8" tvg-id="ASPiRE.HD.us2" tvg-name="Aspire" tvg-logo="https://i.gyazo.com/0dec42cc5ef48c489cc10db906dc5b9b.png" group-title="TV",Aspire
|
||||
http://fl1.moveonjoy.com/Aspire/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="9" tvg-id="Astro.Premier.League.my" tvg-name="Astro Premier League" tvg-logo="https://1000logos.net/wp-content/uploads/2020/08/Astro-Logo-500x281.png" group-title="TV",Astro Premier League
|
||||
http://stalker.klma2023.net/play/live.php?mac=00:1B:79:F8:59:0E&stream=1163984&extension=ts
|
||||
|
||||
#EXTINF:-1 tvg-chno="10" tvg-id="BBC.America.HD.us2" tvg-name="BBC America" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s64492_dark_360w_270h.png" group-title="TV",BBC America
|
||||
http://cord-cutter.net:8080/30550113/30550113/20194
|
||||
|
||||
#EXTINF:-1 tvg-chno="11" tvg-id="BBC.News.(North.America).HD.us2" tvg-name="BBC World News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89542_dark_360w_270h.png" group-title="TV",BBC World News
|
||||
http://cord-cutter.net:8080/30550113/30550113/139752
|
||||
|
||||
#EXTINF:-1 tvg-chno="12" tvg-id="BET.HD.us2" tvg-name="BET" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10051_dark_360w_270h.png" group-title="TV",BET
|
||||
http://fl1.moveonjoy.com/BET_EAST/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="13" tvg-id="Big.Ten.Network.HD.us2" tvg-name="Big Ten Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s56783_dark_360w_270h.png" group-title="TV",Big Ten Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/9828
|
||||
|
||||
#EXTINF:-1 tvg-chno="14" tvg-id="Bloomberg.HD.us2" tvg-name="Bloomberg TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s71799_dark_360w_270h.png" group-title="TV",Bloomberg TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/15158
|
||||
|
||||
#EXTINF:-1 tvg-chno="15" tvg-id="Boomerang.us2" tvg-name="Boomerang" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s21883_dark_360w_270h.png" group-title="TV",Boomerang
|
||||
http://cord-cutter.net:8080/30550113/30550113/14741
|
||||
|
||||
#EXTINF:-1 tvg-chno="16" tvg-id="Bounce.TV.us2" tvg-name="Bounce TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s73067_dark_360w_270h.png" group-title="TV",Bounce TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/48323
|
||||
|
||||
#EXTINF:-1 tvg-chno="17" tvg-id="Bravo.HD.us2" tvg-name="Bravo TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10057_dark_360w_270h.png" group-title="TV",Bravo TV
|
||||
http://fl1.moveonjoy.com/BRAVO/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="18" tvg-id="BUZZR.Stream.us2" tvg-name="Buzzr" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s93430_dark_360w_270h.png" group-title="TV",Buzzr
|
||||
https://buzzrota-web.amagi.tv/playlist.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="19" tvg-id="CSPAN.us2" tvg-name="C-SPAN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10161_dark_360w_270h.png" group-title="TV",C-SPAN
|
||||
http://fl1.moveonjoy.com/C-SPAN/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="20" tvg-id="Cartoon.Network.HD.us2" tvg-name="Cartoon Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12131_dark_360w_270h.png" group-title="TV",Cartoon Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/46708
|
||||
|
||||
#EXTINF:-1 tvg-chno="21" tvg-id="WCBS-DT.us_locals1" tvg-name="CBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10098_dark_360w_270h.png" group-title="TV",CBS
|
||||
http://cord-cutter.net:8080/30550113/30550113/120749
|
||||
|
||||
#EXTINF:-1 tvg-chno="22" tvg-id="plex.tv.CBS.Sports.Golazo.Network.plex" tvg-name="CBS Sports Golazo Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s133691_dark_360w_270h.png" group-title="TV",CBS Sports Golazo Network
|
||||
https://dai.google.com/linear/hls/event/GxrCGmwST0ixsrc_QgB6qw/master.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="23" tvg-id="CBS.Sports.Network.HD.us2" tvg-name="CBS Sports Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16365_dark_360w_270h.png" group-title="TV",CBS Sports Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/10454
|
||||
|
||||
#EXTINF:-1 tvg-chno="24" tvg-id="CMT.HD.us2" tvg-name="CMT" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10138_dark_360w_270h.png" group-title="TV",CMT
|
||||
https://fl1.moveonjoy.com/CMT/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="25" tvg-id="CNBC.HD.us2" tvg-name="CNBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10139_dark_360w_270h.png" group-title="TV",CNBC
|
||||
https://fl1.moveonjoy.com/CNBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="26" tvg-id="CNN.HD.us2" tvg-name="CNN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58646_dark_360w_270h.png" group-title="TV",CNN
|
||||
https://turnerlive.warnermediacdn.com/hls/live/586495/cnngo/cnn_slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="27" tvg-id="Comedy.Central.HD.us2" tvg-name="Comedy Central" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10149_dark_360w_270h.png" group-title="TV",Comedy Central
|
||||
http://cord-cutter.net:8080/30550113/30550113/7466
|
||||
|
||||
#EXTINF:-1 tvg-chno="28" tvg-id="Comedy.TV.HD.us2" tvg-name="Comedy TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82470_dark_360w_270h.png" group-title="TV",Comedy TV
|
||||
https://fl1.moveonjoy.com/Comedy_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="29" tvg-id="Comet.us2" tvg-name="Comet TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s97051_dark_360w_270h.png" group-title="TV",Comet TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/125831
|
||||
|
||||
#EXTINF:-1 tvg-chno="30" tvg-id="Cooking.Channel.HD.us2" tvg-name="Cooking Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30156_dark_360w_270h.png" group-title="TV",Cooking Channel
|
||||
https://fl1.moveonjoy.com/COOKING_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="31" tvg-id="Court.TV.us2" tvg-name="Court TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s117160_dark_360w_270h.png" group-title="TV",Court TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/21092
|
||||
|
||||
#EXTINF:-1 tvg-chno="32" tvg-id="COZI.TV.us2" tvg-name="Cozi TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s78851_dark_360w_270h.png" group-title="TV",Cozi TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/11868
|
||||
|
||||
#EXTINF:-1 tvg-chno="33" tvg-id="Crime.and.Investigation.Network.HD.us2" tvg-name="Crime & Investigation Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61469_dark_360w_270h.png" group-title="TV",Crime & Investigation Network
|
||||
https://fl1.moveonjoy.com/Crime_and_Investigation_Network/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="34" tvg-id="WKCF-DT.us_locals1" tvg-name="CW" tvg-logo="https://i.gyazo.com/afd5b481b327d204087dfde6a7741f9d.png" group-title="TV",CW
|
||||
http://cord-cutter.net:8080/30550113/30550113/120893
|
||||
|
||||
#EXTINF:-1 tvg-chno="35" tvg-id="Discovery.Channel.HD.us2" tvg-name="Discovery Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11150_dark_360w_270h.png" group-title="TV",Discovery Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/46720
|
||||
|
||||
#EXTINF:-1 tvg-chno="36" tvg-id="Discovery.Family.Channel.HD.us2" tvg-name="Discovery Family Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16618_dark_360w_270h.png" group-title="TV",Discovery Family Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/10538
|
||||
|
||||
#EXTINF:-1 tvg-chno="37" tvg-id="Discovery.Life.Channel.us2" tvg-name="Discovery Life" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16125_dark_360w_270h.png" group-title="TV",Discovery Life
|
||||
https://fl1.moveonjoy.com/DISCOVERY_LIFE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="38" tvg-id="Science.Channel.HD.us2" tvg-name="Discovery Science" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s24282_dark_360w_270h.png" group-title="TV",Discovery Science
|
||||
https://fl1.moveonjoy.com/Discovery_Science/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="39" tvg-id="Disney.Channel.HD.us2" tvg-name="Disney" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10171_dark_360w_270h.png" group-title="TV",Disney
|
||||
http://cord-cutter.net:8080/30550113/30550113/2206
|
||||
|
||||
#EXTINF:-1 tvg-chno="40" tvg-id="Disney.XD.HD.us2" tvg-name="Disney XD" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18279_dark_360w_270h.png" group-title="TV",Disney XD
|
||||
http://cord-cutter.net:8080/30550113/30550113/75621
|
||||
|
||||
#EXTINF:-1 tvg-chno="41" tvg-id="E!.Entertainment.Television.HD.us2" tvg-name="E! Entertainment" tvg-logo="https://i.gyazo.com/f73b80e3eb56cec06df6705d00e2f422.png" group-title="TV",E! Entertainment
|
||||
http://fl1.moveonjoy.com/E_ENTERTAINMENT_TELEVISION/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="42" tvg-id="ESPN.HD.us2" tvg-name="ESPN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10179_dark_360w_270h.png" group-title="TV",ESPN
|
||||
http://cord-cutter.net:8080/30550113/30550113/14197
|
||||
|
||||
#EXTINF:-1 tvg-chno="43" tvg-id="ESPNEWS.HD.us2" tvg-name="ESPN News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16485_dark_360w_270h.png" group-title="TV",ESPN News
|
||||
http://cord-cutter.net:8080/30550113/30550113/17707
|
||||
|
||||
#EXTINF:-1 tvg-chno="44" tvg-id="ESPNU.HD.us2" tvg-name="ESPN U" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s45654_dark_360w_270h.png" group-title="TV",ESPN U
|
||||
http://cord-cutter.net:8080/30550113/30550113/10255
|
||||
|
||||
#EXTINF:-1 tvg-chno="45" tvg-id="ESPN2.HD.us2" tvg-name="ESPN2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12444_dark_360w_270h.png" group-title="TV",ESPN2
|
||||
http://cord-cutter.net:8080/30550113/30550113/2210
|
||||
|
||||
#EXTINF:-1 tvg-chno="46" tvg-id="FanDuel.Sports.Network.Detroit.24/7.HDTV.us" tvg-name="FDSN Detroit" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s54286_dark_360w_270h.png" group-title="TV",FDSN Detroit
|
||||
http://cord-cutter.net:8080/30550113/30550113/20936
|
||||
|
||||
#EXTINF:-1 tvg-chno="47" tvg-id="FanDuel.Sports.Network.Florida.HDTV.(Out.of.Market).us" tvg-name="FDSN Florida" tvg-logo="https://i.gyazo.com/fad701fbaaafe161b13b23ed9b50179b.png" group-title="TV",FDSN Florida
|
||||
http://cord-cutter.net:8080/30550113/30550113/46794
|
||||
|
||||
#EXTINF:-1 tvg-chno="48" tvg-id="FanDuel.Sports.Network.Midwest.24/7.HDTV.us" tvg-name="FDSN Midwest" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11058_dark_360w_270h.png" group-title="TV",FDSN Midwest
|
||||
http://cord-cutter.net:8080/30550113/30550113/66795
|
||||
|
||||
#EXTINF:-1 tvg-chno="49" tvg-id="FanDuel.Sports.Network.North.HDTV.us" tvg-name="FDSN North" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10977_dark_360w_270h.png" group-title="TV",FDSN North
|
||||
http://cord-cutter.net:8080/30550113/30550113/58827
|
||||
|
||||
#EXTINF:-1 tvg-chno="50" tvg-id="FanDuel.Sports.Network.Ohio.(Cleveland).HDTV.us" tvg-name="FDSN Ohio" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49691_dark_360w_270h.png" group-title="TV",FDSN Ohio
|
||||
http://cord-cutter.net:8080/30550113/30550113/17752
|
||||
|
||||
#EXTINF:-1 tvg-chno="51" tvg-id="FanDuel.Sports.Network.Oklahoma.24/7.HDTV.(Tulsa).us" tvg-name="FDSN Oklahoma" tvg-logo="https://i.gyazo.com/80ad6fd142cd67f06eef58d9ce5aa72b.png" group-title="TV",FDSN Oklahoma
|
||||
http://cord-cutter.net:8080/30550113/30550113/20934
|
||||
|
||||
#EXTINF:-1 tvg-chno="52" tvg-id="FanDuel.Sports.Network.SoCal.HDTV.us" tvg-name="FDSN SoCal" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16743_dark_360w_270h.png" group-title="TV",FDSN SoCal
|
||||
http://cord-cutter.net:8080/30550113/30550113/221151
|
||||
|
||||
#EXTINF:-1 tvg-chno="53" tvg-id="FanDuel.Sports.Network.Southeast.HDTV.(Mont./Birm./Dothan/Mobile.AL).us" tvg-name="FDSN Southeast" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s20789_dark_360w_270h.png" group-title="TV",FDSN Southeast
|
||||
http://cord-cutter.net:8080/30550113/30550113/81111
|
||||
|
||||
#EXTINF:-1 tvg-chno="54" tvg-id="FanDuel.Sports.Network.Southwest.HDTV.24/7.(Main).us" tvg-name="FDSN Southwest" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59629_dark_360w_270h.png" group-title="TV",FDSN Southwest
|
||||
http://cord-cutter.net:8080/30550113/30550113/21843
|
||||
|
||||
#EXTINF:-1 tvg-chno="55" tvg-id="FanDuel.Sports.Network.Sun.South.24/7.HDTV.(South.Marlins,.Rays,.Heat).us" tvg-name="FDSN Sun" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61084_dark_360w_270h.png" group-title="TV",FDSN Sun
|
||||
http://cord-cutter.net:8080/30550113/30550113/104917
|
||||
|
||||
#EXTINF:-1 tvg-chno="56" tvg-id="FanDuel.Sports.Network.West.HDTV.us" tvg-name="FDSN West" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59627_dark_360w_270h.png" group-title="TV",FDSN West
|
||||
http://cord-cutter.net:8080/30550113/30550113/20932
|
||||
|
||||
#EXTINF:-1 tvg-chno="57" tvg-id="FanDuel.Sports.Network.Wisconsin.24/7.HDTV.us" tvg-name="FDSN Wisconsin" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16348_dark_360w_270h.png" group-title="TV",FDSN Wisconsin
|
||||
http://cord-cutter.net:8080/30550113/30550113/78599
|
||||
|
||||
#EXTINF:-1 tvg-chno="58" tvg-id="plex.tv.FIFA+.plex" tvg-name="FIFA+ TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s136235_dark_360w_270h.png" group-title="TV",FIFA+ TV
|
||||
https://jmp2.uk/stvp-IN270000230
|
||||
|
||||
#EXTINF:-1 tvg-chno="59" tvg-id="Food.Network.HD.us2" tvg-name="Food Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s44718_dark_360w_270h.png" group-title="TV",Food Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/7323
|
||||
|
||||
#EXTINF:-1 tvg-chno="60" tvg-id="WFLX-DT.us_locals1" tvg-name="Fox" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s28719_dark_360w_270h.png" group-title="TV",Fox
|
||||
http://cord-cutter.net:8080/30550113/30550113/121595
|
||||
|
||||
#EXTINF:-1 tvg-chno="61" tvg-id="Fox.Business.HD.us2" tvg-name="Fox Business" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58649_dark_360w_270h.png" group-title="TV",Fox Business
|
||||
http://cord-cutter.net:8080/30550113/30550113/17639
|
||||
|
||||
#EXTINF:-1 tvg-chno="62" tvg-id="Fox.News.Channel.HD.us2" tvg-name="Fox News" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16374_dark_360w_270h.png" group-title="TV",Fox News
|
||||
http://cord-cutter.net:8080/30550113/30550113/1818
|
||||
|
||||
#EXTINF:-1 tvg-chno="63" tvg-id="FS1.Fox.Sports.1.HD.us2" tvg-name="Fox Sports 1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82541_dark_360w_270h.png" group-title="TV",Fox Sports 1
|
||||
http://cord-cutter.net:8080/30550113/30550113/1846
|
||||
|
||||
#EXTINF:-1 tvg-chno="64" tvg-id="FS2.Fox.Sports.2.HD.us2" tvg-name="Fox Sports 2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s33178_dark_360w_270h.png" group-title="TV",Fox Sports 2
|
||||
http://cord-cutter.net:8080/30550113/30550113/1847
|
||||
|
||||
#EXTINF:-1 tvg-chno="65" tvg-id="Freeform.HD.us2" tvg-name="Freeform TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10093_dark_360w_270h.png" group-title="TV",Freeform TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/13370
|
||||
|
||||
#EXTINF:-1 tvg-chno="66" tvg-id="Fuse.HD.us2" tvg-name="FUSE" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14929_dark_360w_270h.png" group-title="TV",FUSE
|
||||
http://fl1.moveonjoy.com/FUSE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="67" tvg-id="FX.HD.us2" tvg-name="FX" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14321_dark_360w_270h.png" group-title="TV",FX
|
||||
http://cord-cutter.net:8080/30550113/30550113/46690
|
||||
|
||||
#EXTINF:-1 tvg-chno="68" tvg-id="FX.Movie.Channel.HD.us2" tvg-name="FX Movie Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s70253_dark_360w_270h.png" group-title="TV",FX Movie Channel
|
||||
http://fl1.moveonjoy.com/FX_MOVIE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="69" tvg-id="FXX.HD.us2" tvg-name="FXX" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/fxx-us.png" group-title="TV",FXX
|
||||
http://cord-cutter.net:8080/30550113/30550113/46699
|
||||
|
||||
#EXTINF:-1 tvg-chno="70" tvg-id="FYI.Channel.HD.us2" tvg-name="FYI TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16834_dark_360w_270h.png" group-title="TV",FYI TV
|
||||
http://fl1.moveonjoy.com/FYI/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="71" tvg-id="Game.Show.Network.HD.us2" tvg-name="Game Show Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14909_dark_360w_270h.png" group-title="TV",Game Show Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/120633
|
||||
|
||||
#EXTINF:-1 tvg-chno="72" tvg-id="get.us2" tvg-name="getTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s82563_dark_360w_270h.png" group-title="TV",getTV
|
||||
http://cord-cutter.net:8080/30550113/30550113/18366
|
||||
|
||||
#EXTINF:-1 tvg-chno="73" tvg-id="Golf.Channel.HD.us2" tvg-name="Golf Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14899_dark_360w_270h.png" group-title="TV",Golf Channel
|
||||
https://fl1.moveonjoy.com/GOLF/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="74" tvg-id="Grit.us2" tvg-name="Grit TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89922_dark_360w_270h.png" group-title="TV",Grit TV
|
||||
http://fl1.moveonjoy.com/GRIT_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="75" tvg-id="Hallmark.Channel.HD.us2" tvg-name="Hallmark Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11221_dark_360w_270h.png" group-title="TV",Hallmark Channel
|
||||
http://fl1.moveonjoy.com/HALLMARK_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="76" tvg-id="Hallmark.Family.us2" tvg-name="Hallmark Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s105723_dark_360w_270h.png" group-title="TV",Hallmark Family
|
||||
https://fl1.moveonjoy.com/HALLMARK_DRAMA/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="77" tvg-id="Hallmark.Mystery.HD.us2" tvg-name="Hallmark Mystery" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61522_dark_360w_270h.png" group-title="TV",Hallmark Mystery
|
||||
https://fl1.moveonjoy.com/HALLMARK_MOVIES_MYSTERIES/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="78" tvg-id="HBO.East.us2" tvg-name="HBO" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10240_dark_360w_270h.png" group-title="TV",HBO
|
||||
http://cord-cutter.net:8080/30550113/30550113/46713
|
||||
|
||||
#EXTINF:-1 tvg-chno="79" tvg-id="HBO2.HD.us2" tvg-name="HBO 2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68140_dark_360w_270h.png" group-title="TV",HBO 2
|
||||
http://fl1.moveonjoy.com/HBO_2/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="80" tvg-id="HBO.Comedy.HD.us2" tvg-name="HBO Comedy" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s59839_dark_360w_270h.png" group-title="TV",HBO Comedy
|
||||
http://fl1.moveonjoy.com/HBO_COMEDY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="81" tvg-id="HBO.Drama.us2" tvg-name="HBO Family" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s34879_dark_360w_270h.png" group-title="TV",HBO Family
|
||||
https://fl1.moveonjoy.com/HBO_FAMILY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="82" tvg-id="HBO.Zone.HD.us2" tvg-name="HBO Zone" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18431_dark_360w_270h.png" group-title="TV",HBO Zone
|
||||
https://fl1.moveonjoy.com/HBO_ZONE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="83" tvg-id="History.HD.us2" tvg-name="History Channel" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s87679_dark_360w_270h.png" group-title="TV",History Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/15017
|
||||
|
||||
#EXTINF:-1 tvg-chno="84" tvg-id="HLN.HD.us2" tvg-name="HLN TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10145_dark_360w_270h.png" group-title="TV",HLN TV
|
||||
https://turnerlive.warnermediacdn.com/hls/live/586496/cnngo/hln/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="85" tvg-id="Investigation.Discovery.HD.us2" tvg-name="Investigation Discovery" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16615_dark_360w_270h.png" group-title="TV",Investigation Discovery
|
||||
https://fl1.moveonjoy.com/INVESTIGATION_DISCOVERY/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="86" tvg-id="ION.Television.HD.us2" tvg-name="ION TV" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18633_dark_360w_270h.png" group-title="TV",ION TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/9297
|
||||
|
||||
#EXTINF:-1 tvg-chno="87" tvg-id="Lifetime.HD.us2" tvg-name="Lifetime" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10918_dark_360w_270h.png" group-title="TV",Lifetime
|
||||
http://fl1.moveonjoy.com/LIFETIME/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="88" tvg-id="LMN.HD.us2" tvg-name="Lifetime Movie Network" tvg-logo="https://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18480_dark_360w_270h.png" group-title="TV",Lifetime Movie Network
|
||||
https://fl1.moveonjoy.com/LIFETIME_MOVIE_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="89" tvg-id="Marquee.Sports.Network.HD.us2" tvg-name="Marquee Sports Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s113768_dark_360w_270h.png" group-title="TV",Marquee Sports Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/13379
|
||||
|
||||
#EXTINF:-1 tvg-chno="90" tvg-id="MLB.Network.HD.us2" tvg-name="MLB Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62081_dark_360w_270h.png" group-title="TV",MLB Network
|
||||
https://fl1.moveonjoy.com/MLB_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="91" tvg-id="MOTORTREND.HD.us2" tvg-name="MotorTrend TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s31046_dark_360w_270h.png" group-title="TV",MotorTrend TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/10399
|
||||
|
||||
#EXTINF:-1 tvg-chno="92" tvg-id="MSG.National.us2" tvg-name="MSG" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10979_dark_360w_270h.png" group-title="TV",MSG
|
||||
http://cord-cutter.net:8080/30550113/30550113/21090
|
||||
|
||||
#EXTINF:-1 tvg-chno="93" tvg-id="MSNBC.HD.us2" tvg-name="MSNBC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16300_dark_360w_270h.png" group-title="TV",MSNBC
|
||||
https://fl1.moveonjoy.com/MSNBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="94" tvg-id="MTV.-.Music.Television.HD.us2" tvg-name="MTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10986_dark_360w_270h.png" group-title="TV",MTV
|
||||
http://fl1.moveonjoy.com/MTV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="95" tvg-id="National.Geographic.HD.us2" tvg-name="National Geographic" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49438_dark_360w_270h.png" group-title="TV",National Geographic
|
||||
http://fl1.moveonjoy.com/National_Geographic/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="96" tvg-id="NBA.TV.HD.us2" tvg-name="NBA TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s32281_dark_360w_270h.png" group-title="TV",NBA TV
|
||||
http://fl1.moveonjoy.com/NBA_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="97" tvg-id="WFLA-DT.us_locals1" tvg-name="NBC" tvg-logo="https://i.gyazo.com/39d42952500205d04e448247b7889256.png" group-title="TV",NBC
|
||||
https://fl1.moveonjoy.com/FL_Tampa_NBC/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="98" tvg-id="NBC.Sports.Bay.Area.HD.us2" tvg-name="NBC Sports Bay Area" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s63138_dark_360w_270h.png" group-title="TV",NBC Sports Bay Area
|
||||
http://cord-cutter.net:8080/30550113/30550113/9900
|
||||
|
||||
#EXTINF:-1 tvg-chno="99" tvg-id="NBC.Sports.Boston.HD.us2" tvg-name="NBC Sports Boston" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49198_dark_360w_270h.png" group-title="TV",NBC Sports Boston
|
||||
http://cord-cutter.net:8080/30550113/30550113/20939
|
||||
|
||||
#EXTINF:-1 tvg-chno="100" tvg-id="NBC.Sports.California.SAT.us2" tvg-name="NBC Sports California" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s45540_dark_360w_270h.png" group-title="TV",NBC Sports California
|
||||
http://cord-cutter.net:8080/30550113/30550113/20940
|
||||
|
||||
#EXTINF:-1 tvg-chno="101" tvg-id="a90a91570ce0536cbb22b591ad7e0da2" tvg-name="NBC Sports NOW" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s114140_dark_360w_270h.png" group-title="TV",NBC Sports NOW
|
||||
https://jmp2.uk/plu-6549306c83595c000815a696.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="102" tvg-id="NBC.Sports.Philadelphia.HD.us2" tvg-name="NBC Sports Philadelphia" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s32571_dark_360w_270h.png" group-title="TV",NBC Sports Philadelphia
|
||||
http://cord-cutter.net:8080/30550113/30550113/20943
|
||||
|
||||
#EXTINF:-1 tvg-chno="103" tvg-id="New.England.Sports.Network.HD.us2" tvg-name="NESN" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s35038_dark_360w_270h.png" group-title="TV",NESN
|
||||
http://cord-cutter.net:8080/30550113/30550113/31637
|
||||
|
||||
#EXTINF:-1 tvg-chno="104" tvg-id="NewsNation.us2" tvg-name="NewsNation" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s91096_dark_360w_270h.png" group-title="TV",NewsNation
|
||||
http://cord-cutter.net:8080/30550113/30550113/161450
|
||||
|
||||
#EXTINF:-1 tvg-chno="105" tvg-id="NFL.Network.HD.us2" tvg-name="NFL Network" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/nfl-network-hz-us.png" group-title="TV",NFL Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/159117
|
||||
|
||||
#EXTINF:-1 tvg-chno="106" tvg-id="NFL.RedZone.HD.us2" tvg-name="NFL RedZone" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/nfl-red-zone-hz-us.png" group-title="TV",NFL RedZone
|
||||
http://cord-cutter.net:8080/30550113/30550113/208830
|
||||
|
||||
#EXTINF:-1 tvg-chno="107" tvg-id="NHL.Network.HD.us2" tvg-name="NHL Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58570_dark_360w_270h.png" group-title="TV",NHL Network
|
||||
http://23.237.104.106:8080/USA_NHL_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="108" tvg-id="Nickelodeon.HD.us2" tvg-name="Nickelodeon" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11006_dark_360w_270h.png" group-title="TV",Nickelodeon
|
||||
http://cord-cutter.net:8080/30550113/30550113/38
|
||||
|
||||
#EXTINF:-1 tvg-chno="109" tvg-id="Nicktoons.us2" tvg-name="Nicktoons" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30420_dark_360w_270h.png" group-title="TV",Nicktoons
|
||||
http://cord-cutter.net:8080/30550113/30550113/36
|
||||
|
||||
#EXTINF:-1 tvg-chno="110" tvg-id="Outdoor.Channel.HD.us2" tvg-name="Outdoor Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s14776_dark_360w_270h.png" group-title="TV",Outdoor Channel
|
||||
http://fl1.moveonjoy.com/OUTDOOR_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="111" tvg-id="Ovation.HD.us2" tvg-name="Ovation" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s15807_dark_360w_270h.png" group-title="TV",Ovation
|
||||
https://fl1.moveonjoy.com/Ovation/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="112" tvg-id="Oxygen.True.Crime.HD.us2" tvg-name="Oxygen" tvg-logo="https://i.gyazo.com/68db5ec4dba40181592406e7c3128fc7.png" group-title="TV",Oxygen
|
||||
https://fl1.moveonjoy.com/OXYGEN/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="113" tvg-id="Paramount.Network.HD.us2" tvg-name="Paramount Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11163_dark_360w_270h.png" group-title="TV",Paramount Network
|
||||
https://fl1.moveonjoy.com/PARAMOUNT_NETWORK/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="114" tvg-id="POP.HD.us2" tvg-name="Pop TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16715_dark_360w_270h.png" group-title="TV",Pop TV
|
||||
https://fl1.moveonjoy.com/Pop_TV/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="115" tvg-id="Premier.Sports.1.HD.uk" tvg-name="Premier Sports 1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s79027_dark_360w_270h.png" group-title="TV",Premier Sports 1
|
||||
http://c3921155.edmonst.net/iptv/ZQHGFQ9PRYN859UHYGWY674B/2160/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="116" tvg-id="ReelzChannel.HD.us2" tvg-name="Reelz Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68385_dark_360w_270h.png" group-title="TV",Reelz Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/10526
|
||||
|
||||
#EXTINF:-1 tvg-chno="117" tvg-id="ROOT.Sports.Northwest.HD.us2" tvg-name="Root Sports" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11062_dark_360w_270h.png" group-title="TV",Root Sports
|
||||
http://cord-cutter.net:8080/30550113/30550113/85232
|
||||
|
||||
#EXTINF:-1 tvg-chno="118" tvg-id="SEC.Network.HD.us2" tvg-name="SEC Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s89535_dark_360w_270h.png" group-title="TV",SEC Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/17608
|
||||
|
||||
#EXTINF:-1 tvg-chno="119" tvg-id="Paramount+.with.Showtime.HD.us2" tvg-name="Showtime" tvg-logo="https://raw.githubusercontent.com/tv-logo/tv-logos/refs/heads/main/countries/united-states/showtime-us.png" group-title="TV",Showtime
|
||||
http://fl1.moveonjoy.com/SHOWTIME/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="120" tvg-id="Smithsonian.HD.Network.us2" tvg-name="Smithsonian Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58532_dark_360w_270h.png" group-title="TV",Smithsonian Channel
|
||||
http://fl1.moveonjoy.com/SMITHSONIAN_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="121" tvg-id="Sony.Movie.Channel.HD.us2" tvg-name="Sony Movie Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s69130_dark_360w_270h.png" group-title="TV",Sony Movie Channel
|
||||
http://fl1.moveonjoy.com/Sony_Movie_Channel/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="122" tvg-id="Space.City.Home.Network.HD.us2" tvg-name="Space City Home Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s77744_dark_360w_270h.png" group-title="TV",Space City Home Network
|
||||
http://cord-cutter.net:8080/30550113/30550113/213668
|
||||
|
||||
#EXTINF:-1 tvg-chno="123" tvg-id="Spectrum.SportsNet.LA.Dodgers.HD.us2" tvg-name="Spectrum SportsNet LA Dodgers" tvg-logo="https://i.gyazo.com/765cce528ddda366695bb178d9dee6da.png" group-title="TV",Spectrum SportsNet LA Dodgers
|
||||
http://cord-cutter.net:8080/30550113/30550113/31636
|
||||
|
||||
#EXTINF:-1 tvg-chno="124" tvg-id="Spectrum.SportsNet.Lakers.HD.us2" tvg-name="Spectrum SportsNet Lakers" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s77422_dark_360w_270h.png" group-title="TV",Spectrum SportsNet Lakers
|
||||
http://cord-cutter.net:8080/30550113/30550113/20946
|
||||
|
||||
#EXTINF:-1 tvg-chno="125" tvg-id="Sportsnet.360.HD.ca2" tvg-name="Sportsnet 360" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s49952_dark_360w_270h.png" group-title="TV",Sportsnet 360
|
||||
http://cord-cutter.net:8080/30550113/30550113/57299
|
||||
|
||||
#EXTINF:-1 tvg-chno="126" tvg-id="Sportsnet.East.ca2" tvg-name="Sportsnet East" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62109_dark_360w_270h.png" group-title="TV",Sportsnet East
|
||||
http://cord-cutter.net:8080/30550113/30550113/57298
|
||||
|
||||
#EXTINF:-1 tvg-chno="127" tvg-id="SNY.SportsNet.New.York.HD.us2" tvg-name="SportsNet New York" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s50038_dark_360w_270h.png" group-title="TV",SportsNet New York
|
||||
http://cord-cutter.net:8080/30550113/30550113/20938
|
||||
|
||||
#EXTINF:-1 tvg-chno="128" tvg-id="Sportsnet.One.ca2" tvg-name="Sportsnet One" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68859_dark_360w_270h.png" group-title="TV",Sportsnet One
|
||||
http://cord-cutter.net:8080/30550113/30550113/10247
|
||||
|
||||
#EXTINF:-1 tvg-chno="129" tvg-id="Sportsnet.Ontario.HD.ca2" tvg-name="Sportsnet Ontario" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s62111_dark_360w_270h.png" group-title="TV",Sportsnet Ontario
|
||||
http://cord-cutter.net:8080/30550113/30550113/11649
|
||||
|
||||
#EXTINF:-1 tvg-chno="130" tvg-id="SportsNet.Pittsburgh.HD.us2" tvg-name="SportsNet Pittsburgh" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s26028_dark_360w_270h.png" group-title="TV",SportsNet Pittsburgh
|
||||
http://cord-cutter.net:8080/30550113/30550113/108178
|
||||
|
||||
#EXTINF:-1 tvg-chno="131" tvg-id="Starz.HD.us2" tvg-name="Starz" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12719_dark_360w_270h.png" group-title="TV",Starz
|
||||
http://cord-cutter.net:8080/30550113/30550113/9299
|
||||
|
||||
#EXTINF:-1 tvg-chno="132" tvg-id="Syfy.HD.us2" tvg-name="Syfy" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11097_dark_360w_270h.png" group-title="TV",Syfy
|
||||
http://cord-cutter.net:8080/30550113/30550113/46685
|
||||
|
||||
#EXTINF:-1 tvg-chno="133" tvg-id="TBS.HD.us2" tvg-name="TBS" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11867_dark_360w_270h.png" group-title="TV",TBS
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023172/tbseast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="134" tvg-id="Tennis.Channel.HD.us2" tvg-name="Tennis Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s60316_dark_360w_270h.png" group-title="TV",Tennis Channel
|
||||
https://fl1.moveonjoy.com/TENNIS_CHANNEL/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="135" tvg-id="The.Weather.Channel.HD.us2" tvg-name="The Weather Channel" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s58812_dark_360w_270h.png" group-title="TV",The Weather Channel
|
||||
http://cord-cutter.net:8080/30550113/30550113/18926
|
||||
|
||||
#EXTINF:-1 tvg-chno="136" tvg-id="TLC.HD.(US).us2" tvg-name="TLC" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11158_dark_360w_270h.png" group-title="TV",TLC
|
||||
http://cord-cutter.net:8080/30550113/30550113/12734
|
||||
|
||||
#EXTINF:-1 tvg-chno="137" tvg-id="TNT.HD.us2" tvg-name="TNT" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11164_dark_360w_270h.png" group-title="TV",TNT
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023168/tnteast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="138" tvg-id="truTV.HD.us2" tvg-name="truTV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s10153_dark_360w_270h.png" group-title="TV",truTV
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023176/trueast/slate/VIDEO_0_3564000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="139" tvg-id="TSN.1.ca2" tvg-name="TSN1" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11182_dark_360w_270h.png" group-title="TV",TSN1
|
||||
http://cord-cutter.net:8080/30550113/30550113/57292
|
||||
|
||||
#EXTINF:-1 tvg-chno="140" tvg-id="TSN.2.ca2" tvg-name="TSN2" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s61474_dark_360w_270h.png" group-title="TV",TSN2
|
||||
http://cord-cutter.net:8080/30550113/30550113/47442
|
||||
|
||||
#EXTINF:-1 tvg-chno="141" tvg-id="Turner.Classic.Movies.HD.us2" tvg-name="Turner Classic Movies" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s12852_dark_360w_270h.png" group-title="TV",Turner Classic Movies
|
||||
https://turnerlive.warnermediacdn.com/hls/live/2023186/tcmeast/noslate/VIDEO_1_5128000.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="142" tvg-id="TV.Land.HD.us2" tvg-name="TV Land" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s16123_dark_360w_270h.png" group-title="TV",TV Land
|
||||
https://fl1.moveonjoy.com/TV_LAND/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="143" tvg-id="TV.ONE.HD.us2" tvg-name="TV One" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s35513_dark_360w_270h.png" group-title="TV",TV One
|
||||
https://fl1.moveonjoy.com/TV_ONE/index.m3u8
|
||||
|
||||
#EXTINF:-1 tvg-chno="144" tvg-id="USA.Network.HD.us2" tvg-name="USA East" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s11207_dark_360w_270h.png" group-title="TV",USA East
|
||||
http://cord-cutter.net:8080/30550113/30550113/10252
|
||||
|
||||
#EXTINF:-1 tvg-chno="145" tvg-id="Vice.HD.us2" tvg-name="Vice TV" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s18822_dark_360w_270h.png" group-title="TV",Vice TV
|
||||
http://cord-cutter.net:8080/30550113/30550113/46697
|
||||
|
||||
#EXTINF:-1 tvg-chno="146" tvg-id="Willow.Cricket.HD.us2" tvg-name="Willow Cricket" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s68605_dark_360w_270h.png" group-title="TV",Willow Cricket
|
||||
http://cord-cutter.net:8080/30550113/30550113/41979
|
||||
|
||||
#EXTINF:-1 tvg-chno="147" tvg-id="Yes.Network.us2" tvg-name="YES Network" tvg-logo="http://schedulesdirect-api20141201-logos.s3.dualstack.us-east-1.amazonaws.com/stationLogos/s30017_dark_360w_270h.png" group-title="TV",YES Network
|
||||
https://fl1.moveonjoy.com/YES_NETWORK/index.m3u8
|
||||
1039
M3U8/events.m3u8
Normal file
1039
M3U8/events.m3u8
Normal file
File diff suppressed because it is too large
Load diff
138
M3U8/fetch.py
Normal file
138
M3U8/fetch.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from scrapers import (
|
||||
fawa,
|
||||
lotus,
|
||||
pixel,
|
||||
ppv,
|
||||
roxie,
|
||||
shark,
|
||||
sport9,
|
||||
streambtw,
|
||||
streamcenter,
|
||||
streamfree,
|
||||
streamsgate,
|
||||
strmd,
|
||||
tvpass,
|
||||
watchfooty,
|
||||
webcast,
|
||||
)
|
||||
from scrapers.utils import get_logger, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
BASE_FILE = Path(__file__).parent / "base.m3u8"
|
||||
|
||||
EVENTS_FILE = Path(__file__).parent / "events.m3u8"
|
||||
|
||||
COMBINED_FILE = Path(__file__).parent / "TV.m3u8"
|
||||
|
||||
|
||||
def load_base() -> tuple[list[str], int]:
|
||||
log.info("Fetching base M3U8")
|
||||
|
||||
data = BASE_FILE.read_text(encoding="utf-8")
|
||||
|
||||
pattern = re.compile(r'tvg-chno="(\d+)"')
|
||||
|
||||
last_chnl_num = max(map(int, pattern.findall(data)), default=0)
|
||||
|
||||
return data.splitlines(), last_chnl_num
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
base_m3u8, tvg_chno = load_base()
|
||||
|
||||
tasks = [
|
||||
asyncio.create_task(fawa.scrape(network.client)),
|
||||
asyncio.create_task(lotus.scrape(network.client)),
|
||||
asyncio.create_task(pixel.scrape()),
|
||||
asyncio.create_task(ppv.scrape(network.client)),
|
||||
asyncio.create_task(roxie.scrape(network.client)),
|
||||
asyncio.create_task(shark.scrape(network.client)),
|
||||
asyncio.create_task(sport9.scrape(network.client)),
|
||||
asyncio.create_task(streambtw.scrape(network.client)),
|
||||
asyncio.create_task(streamcenter.scrape(network.client)),
|
||||
asyncio.create_task(streamfree.scrape(network.client)),
|
||||
asyncio.create_task(streamsgate.scrape(network.client)),
|
||||
asyncio.create_task(strmd.scrape(network.client)),
|
||||
asyncio.create_task(tvpass.scrape(network.client)),
|
||||
asyncio.create_task(watchfooty.scrape(network.client)),
|
||||
asyncio.create_task(webcast.scrape(network.client)),
|
||||
]
|
||||
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
additions = (
|
||||
fawa.urls
|
||||
| lotus.urls
|
||||
| pixel.urls
|
||||
| ppv.urls
|
||||
| roxie.urls
|
||||
| shark.urls
|
||||
| sport9.urls
|
||||
| streambtw.urls
|
||||
| streamcenter.urls
|
||||
| strmd.urls
|
||||
| streamfree.urls
|
||||
| streamsgate.urls
|
||||
| tvpass.urls
|
||||
| watchfooty.urls
|
||||
| webcast.urls
|
||||
)
|
||||
|
||||
live_events: list[str] = []
|
||||
|
||||
combined_channels: list[str] = []
|
||||
|
||||
for i, (event, info) in enumerate(
|
||||
sorted(additions.items()),
|
||||
start=1,
|
||||
):
|
||||
extinf_all = (
|
||||
f'#EXTINF:-1 tvg-chno="{tvg_chno + i}" tvg-id="{info["id"]}" '
|
||||
f'tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}'
|
||||
)
|
||||
|
||||
extinf_live = (
|
||||
f'#EXTINF:-1 tvg-chno="{i}" tvg-id="{info["id"]}" '
|
||||
f'tvg-name="{event}" tvg-logo="{info["logo"]}" group-title="Live Events",{event}'
|
||||
)
|
||||
|
||||
vlc_block = [
|
||||
f'#EXTVLCOPT:http-referrer={info["base"]}',
|
||||
f'#EXTVLCOPT:http-origin={info["base"]}',
|
||||
f"#EXTVLCOPT:http-user-agent={network.UA}",
|
||||
info["url"],
|
||||
]
|
||||
|
||||
combined_channels.extend(["\n" + extinf_all, *vlc_block])
|
||||
|
||||
live_events.extend(["\n" + extinf_live, *vlc_block])
|
||||
|
||||
COMBINED_FILE.write_text(
|
||||
"\n".join(base_m3u8 + combined_channels),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
log.info(f"Base + Events saved to {COMBINED_FILE.resolve()}")
|
||||
|
||||
EVENTS_FILE.write_text(
|
||||
'#EXTM3U url-tvg="https://raw.githubusercontent.com/doms9/iptv/refs/heads/default/EPG/TV.xml"\n'
|
||||
+ "\n".join(live_events),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
log.info(f"Events saved to {EVENTS_FILE.resolve()}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
try:
|
||||
asyncio.run(network.client.aclose())
|
||||
except Exception:
|
||||
pass
|
||||
160
M3U8/scrapers/fawa.py
Normal file
160
M3U8/scrapers/fawa.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import quote, urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("fawa.json", exp=10_800)
|
||||
|
||||
BASE_URL = "http://www.fawanews.sc/"
|
||||
|
||||
TAG = "FAWA"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(
|
||||
r'var\s+(\w+)\s*=\s*\[["\']?(https?:\/\/[^"\'\s>]+\.m3u8(?:\?[^"\'\s>]*)?)["\']\]?',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match[2]
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
valid_event = re.compile(r"\d{1,2}:\d{1,2}")
|
||||
clean_event = re.compile(r"\s+-+\s+\w{1,4}")
|
||||
|
||||
events = []
|
||||
|
||||
for item in soup.css(".user-item"):
|
||||
text = item.css_first(".user-item__name")
|
||||
subtext = item.css_first(".user-item__playing")
|
||||
link = item.css_first("a[href]")
|
||||
|
||||
if not (href := link.attributes.get("href")):
|
||||
continue
|
||||
|
||||
href = quote(href)
|
||||
|
||||
if cached_hrefs & {href}:
|
||||
continue
|
||||
|
||||
if not (text and subtext):
|
||||
continue
|
||||
|
||||
event_name, details = text.text(strip=True), subtext.text(strip=True)
|
||||
|
||||
if not (valid_event.search(details)):
|
||||
continue
|
||||
|
||||
sport = valid_event.split(details)[0].strip()
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": clean_event.sub("", event_name),
|
||||
"link": urljoin(BASE_URL, href),
|
||||
"href": href,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, cached_hrefs)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
168
M3U8/scrapers/lotus.py
Normal file
168
M3U8/scrapers/lotus.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("lotus.json", exp=5_400)
|
||||
|
||||
API_CACHE = Cache("lotus-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://lotusgamehd.xyz/api-event.php"
|
||||
|
||||
TAG = "LOTUS"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
return " ".join(x.capitalize() for x in s.split()) if len(s) > 5 else s.upper()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.now()
|
||||
|
||||
if not (api_data := API_CACHE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
url,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_CACHE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
for info in api_data.get("days", []):
|
||||
event_dt = Time.from_str(info["day_et"], timezone="ET")
|
||||
|
||||
if now.date() != event_dt.date():
|
||||
continue
|
||||
|
||||
for event in info["items"]:
|
||||
event_league = event["league"]
|
||||
|
||||
if event_league == "channel tv":
|
||||
continue
|
||||
|
||||
event_streams: list[dict[str, str]] = event["streams"]
|
||||
|
||||
if not (event_link := event_streams[0].get("link")):
|
||||
continue
|
||||
|
||||
sport = fix_league(event_league)
|
||||
event_name = event["title"]
|
||||
|
||||
key = f"[{sport}] {event_name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event_name,
|
||||
"link": event_link,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
BASE_URL,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://vividmosaica.com/",
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
181
M3U8/scrapers/old/fstv.py
Normal file
181
M3U8/scrapers/old/fstv.py
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
from functools import partial
|
||||
from urllib.parse import unquote, urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("fstv.json", exp=10_800)
|
||||
|
||||
MIRRORS = ["https://fstv.zip", "https://fstv.space"]
|
||||
|
||||
TAG = "FSTV"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> tuple[str, str]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
|
||||
return "", ""
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
match_name = None
|
||||
|
||||
if category_links := soup.css(".common-list-category .category-item a"):
|
||||
match_name = category_links[-1].text(strip=True)
|
||||
|
||||
if not match_name or match_name.lower() == "vs":
|
||||
if og_title := soup.css_first("meta[property='og:title']"):
|
||||
match_name = (
|
||||
og_title.attributes.get("content", "").split(" start on")[0].strip()
|
||||
)
|
||||
|
||||
if not (ifr := soup.css_first("iframe")):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return "", ""
|
||||
|
||||
if src := ifr.attributes.get("src"):
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match_name or "", unquote(src).split("link=")[-1]
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_hrefs: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
for wrpr in soup.css("div.fixtures-live-wrapper"):
|
||||
for league_block in wrpr.css(".match-table-item > .league-info-wrapper"):
|
||||
if not (
|
||||
league_name_el := league_block.css_first(".league-info a.league-name")
|
||||
):
|
||||
continue
|
||||
|
||||
full_text = league_name_el.text(strip=True)
|
||||
|
||||
if "]" in full_text:
|
||||
event_name = full_text.split("]", 1)[1].strip()
|
||||
|
||||
else:
|
||||
event_name = full_text
|
||||
|
||||
parent_item = league_block.parent
|
||||
|
||||
for game in parent_item.css(".common-table-row a[href*='/match/']"):
|
||||
if not (href := game.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if cached_hrefs & {href}:
|
||||
continue
|
||||
|
||||
cached_hrefs.add(href)
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": event_name,
|
||||
"link": urljoin(url, href),
|
||||
"href": href,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_hrefs = {entry["href"] for entry in cached_urls.values()}
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working FSTV mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
cached_hrefs,
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
match_name, url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport = ev["sport"]
|
||||
|
||||
key = (
|
||||
f"[{sport}] {match_name} ({TAG})"
|
||||
if match_name
|
||||
else f"[{sport}] ({TAG})"
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": base_url,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"href": ev["href"],
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
|
||||
|
||||
# cloudflare bot check added
|
||||
176
M3U8/scrapers/old/streameast.py
Normal file
176
M3U8/scrapers/old/streameast.py
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streameast.json", exp=10_800)
|
||||
|
||||
prefixes = {
|
||||
"ga": None,
|
||||
"ph": None,
|
||||
"sg": None,
|
||||
"ch": None,
|
||||
"ec": None,
|
||||
"fi": None,
|
||||
"ms": None,
|
||||
"ps": None,
|
||||
"cf": None,
|
||||
"sk": None,
|
||||
"co": "the",
|
||||
"fun": "the",
|
||||
"ru": "the",
|
||||
"su": "the",
|
||||
}
|
||||
|
||||
MIRRORS = [
|
||||
*[f"https://streameast.{ext}" for ext in prefixes if not prefixes[ext]],
|
||||
*[f"https://thestreameast.{ext}" for ext in prefixes if prefixes[ext] == "the"],
|
||||
]
|
||||
|
||||
TAG = "STRMEST"
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for section in soup.css("div.se-sport-section"):
|
||||
if not (sport := section.attributes.get("data-sport-name", "").strip()):
|
||||
continue
|
||||
|
||||
for a in section.css("a.uefa-card"):
|
||||
if not (href := a.attributes.get("href")):
|
||||
continue
|
||||
|
||||
link = urljoin(url, href)
|
||||
|
||||
team_spans = [t.text(strip=True) for t in a.css("span.uefa-name")]
|
||||
|
||||
if len(team_spans) == 2:
|
||||
name = f"{team_spans[0]} vs {team_spans[1]}"
|
||||
|
||||
elif len(team_spans) == 1:
|
||||
name = team_spans[0]
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
if not (time_span := a.css_first(".uefa-time")):
|
||||
continue
|
||||
|
||||
time_text = time_span.text(strip=True)
|
||||
|
||||
timestamp = int(a.attributes.get("data-time", Time.default_8()))
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(timestamp)
|
||||
|
||||
if time_text == "LIVE" or (start_dt <= event_dt <= end_dt):
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": link,
|
||||
"timestamp": timestamp,
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working Streameast mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts = ev["sport"], ev["event"], ev["timestamp"]
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://embedsports.top/",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
100
M3U8/scrapers/pixel.py
Normal file
100
M3U8/scrapers/pixel.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
import json
|
||||
import re
|
||||
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("pixel.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://pixelsport.tv/backend/livetv/events"
|
||||
|
||||
TAG = "PIXL"
|
||||
|
||||
|
||||
async def get_api_data(url: str) -> dict[str, list[dict, str, str]]:
|
||||
async with async_playwright() as p:
|
||||
try:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=10_000,
|
||||
)
|
||||
|
||||
raw_json = await page.locator("pre").inner_text(timeout=5_000)
|
||||
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
finally:
|
||||
await browser.close()
|
||||
|
||||
return json.loads(raw_json)
|
||||
|
||||
|
||||
async def get_events() -> dict[str, dict[str, str | float]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
api_data = await get_api_data(BASE_URL)
|
||||
|
||||
events = {}
|
||||
|
||||
pattern = re.compile(r"https?://[^\s'\"]+?\.m3u8(?:\?[^\s'\"]*)?", re.IGNORECASE)
|
||||
|
||||
for event in api_data.get("events", []):
|
||||
event_dt = Time.from_str(event["date"], timezone="UTC")
|
||||
|
||||
if event_dt.date() != now.date():
|
||||
continue
|
||||
|
||||
event_name = event["match_name"]
|
||||
channel_info: dict[str, str] = event["channel"]
|
||||
category: dict[str, str] = channel_info["TVCategory"]
|
||||
|
||||
sport = category["name"]
|
||||
|
||||
stream_urls = [(i, f"server{i}URL") for i in range(1, 4)]
|
||||
|
||||
for z, stream_url in stream_urls:
|
||||
if stream_link := channel_info.get(stream_url):
|
||||
if pattern.search(stream_link):
|
||||
key = f"[{sport}] {event_name} {z} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event_name)
|
||||
|
||||
events[key] = {
|
||||
"url": stream_link,
|
||||
"logo": logo,
|
||||
"base": "https://pixelsport.tv",
|
||||
"timestamp": now.timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape() -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events()
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
177
M3U8/scrapers/ppv.py
Normal file
177
M3U8/scrapers/ppv.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("ppv.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("ppv-api.json", exp=19_800)
|
||||
|
||||
API_MIRRORS = [
|
||||
"https://old.ppv.to/api/streams",
|
||||
"https://api.ppvs.su/api/streams",
|
||||
"https://api.ppv.to/api/streams",
|
||||
]
|
||||
|
||||
BASE_MIRRORS = [
|
||||
"https://old.ppv.to",
|
||||
"https://ppvs.su",
|
||||
"https://ppv.to",
|
||||
]
|
||||
|
||||
TAG = "PPV"
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
api_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
if not (api_data := API_FILE.load(per_entry=False)):
|
||||
api_data = await refresh_api_cache(client, api_url)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data.get("streams", []):
|
||||
sport = stream_group["category"]
|
||||
|
||||
if sport == "24/7 Streams":
|
||||
continue
|
||||
|
||||
for event in stream_group.get("streams", []):
|
||||
name = event.get("name")
|
||||
start_ts = event.get("starts_at")
|
||||
logo = event.get("poster")
|
||||
iframe = event.get("iframe")
|
||||
|
||||
if not (name and start_ts and iframe):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": iframe,
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
base_url = await network.get_base(BASE_MIRRORS)
|
||||
|
||||
api_url = await network.get_base(API_MIRRORS)
|
||||
|
||||
if not (base_url and api_url):
|
||||
log.warning("No working PPV mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
api_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
timeout=6,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": base_url,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
215
M3U8/scrapers/roxie.py
Normal file
215
M3U8/scrapers/roxie.py
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("roxie.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("roxie-html.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://roxiestreams.live"
|
||||
|
||||
SPORT_ENDPOINTS = {
|
||||
"fighting": "Fighting",
|
||||
"mlb": "MLB",
|
||||
"motorsports": "Racing",
|
||||
"nba": "NBA",
|
||||
"nfl": "American Football",
|
||||
"soccer": "Soccer",
|
||||
}
|
||||
|
||||
TAG = "ROXIE"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(
|
||||
r"showPlayer\(['\"]clappr['\"],\s*['\"]([^'\"]+?\.m3u8(?:\?[^'\"]*)?)['\"]\)",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return match[1]
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
sport: str,
|
||||
now_ts: float,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
for row in soup.css("table#eventsTable tbody tr"):
|
||||
if not (a_tag := row.css_first("td a")):
|
||||
continue
|
||||
|
||||
event = a_tag.text(strip=True)
|
||||
|
||||
if not (href := a_tag.attributes.get("href")):
|
||||
continue
|
||||
|
||||
if not (span := row.css_first("span.countdown-timer")):
|
||||
continue
|
||||
|
||||
data_start = span.attributes["data-start"].rsplit(":", 1)[0]
|
||||
|
||||
event_dt = Time.from_str(data_start, timezone="PST")
|
||||
|
||||
event_sport = SPORT_ENDPOINTS[sport]
|
||||
|
||||
key = f"[{event_sport}] {event} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": event_sport,
|
||||
"event": event,
|
||||
"link": href,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now_ts,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
sport_urls: dict[str, str],
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
tasks = [
|
||||
refresh_html_cache(
|
||||
client,
|
||||
url,
|
||||
sport,
|
||||
now.timestamp(),
|
||||
)
|
||||
for sport, url in sport_urls.items()
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
events = {k: v for data in results for k, v in data.items()}
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(minutes=-30).timestamp()
|
||||
end_ts = now.delta(minutes=30).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
sport_urls = {sport: urljoin(BASE_URL, sport) for sport in SPORT_ENDPOINTS}
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
sport_urls,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
192
M3U8/scrapers/shark.py
Normal file
192
M3U8/scrapers/shark.py
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
import re
|
||||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("shark.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("shark-html.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://sharkstreams.net"
|
||||
|
||||
TAG = "SHARK"
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
data: dict[str, list[str]] = r.json()
|
||||
|
||||
if not data.get("urls"):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
|
||||
return
|
||||
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
|
||||
return data["urls"][0]
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
now_ts: float,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
pattern = re.compile(r"openEmbed\('([^']+)'\)", re.IGNORECASE)
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
for row in soup.css(".row"):
|
||||
date_node = row.css_first(".ch-date")
|
||||
sport_node = row.css_first(".ch-category")
|
||||
name_node = row.css_first(".ch-name")
|
||||
|
||||
if not (date_node and sport_node and name_node):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(date_node.text(strip=True), timezone="EST")
|
||||
sport = sport_node.text(strip=True)
|
||||
event_name = name_node.text(strip=True)
|
||||
|
||||
embed_btn = row.css_first("a.hd-link.secondary")
|
||||
|
||||
if not embed_btn or not (onclick := embed_btn.attributes.get("onclick")):
|
||||
continue
|
||||
|
||||
if not (match := pattern.search(onclick)):
|
||||
continue
|
||||
|
||||
link = match[1].replace("player.php", "get-stream.php")
|
||||
|
||||
key = f"[{sport}] {event_name} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": sport,
|
||||
"event": event_name,
|
||||
"link": link,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now_ts,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
events = await refresh_html_cache(
|
||||
client,
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(hours=-1).timestamp()
|
||||
end_ts = now.delta(minutes=10).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
170
M3U8/scrapers/sport9.py
Normal file
170
M3U8/scrapers/sport9.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("sport9.json", exp=3_600)
|
||||
|
||||
BASE_URL = "https://sport9.ru"
|
||||
|
||||
TAG = "SPRT9"
|
||||
|
||||
|
||||
async def get_html(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
date: str,
|
||||
) -> bytes:
|
||||
|
||||
try:
|
||||
r = await client.get(url, params={"date": date})
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return b""
|
||||
|
||||
return r.content
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.now()
|
||||
|
||||
tasks = [
|
||||
get_html(client, BASE_URL, str(d.date()))
|
||||
for d in [
|
||||
now.delta(days=-1),
|
||||
now,
|
||||
now.delta(days=1),
|
||||
]
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
soups = [HTMLParser(html) for html in results]
|
||||
|
||||
events = []
|
||||
|
||||
for soup in soups:
|
||||
for card in soup.css("a.match-card"):
|
||||
live_badge = card.css_first(".live-badge")
|
||||
|
||||
if not live_badge or live_badge.text(strip=True) != "Live":
|
||||
continue
|
||||
|
||||
if not (sport_node := card.css_first(".tournament-name")):
|
||||
continue
|
||||
|
||||
sport = sport_node.text(strip=True)
|
||||
team_1_node = card.css_first(".team1 .team-name")
|
||||
team_2_node = card.css_first(".team2 .team-name")
|
||||
|
||||
if team_1_node and team_2_node:
|
||||
event = event = (
|
||||
f"{team_1_node.text(strip=True)} vs {team_2_node.text(strip=True)}"
|
||||
)
|
||||
|
||||
elif team_1_node:
|
||||
event = team_1_node.text(strip=True)
|
||||
|
||||
elif team_2_node:
|
||||
event = team_2_node.text(strip=True)
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
if not (href := card.attributes.get("href")):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://vividmosaica.com/",
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
141
M3U8/scrapers/streambtw.py
Normal file
141
M3U8/scrapers/streambtw.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import base64
|
||||
import re
|
||||
from functools import partial
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streambtw.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://streambtw.com"
|
||||
|
||||
TAG = "STRMBTW"
|
||||
|
||||
|
||||
def fix_league(s: str) -> str:
|
||||
pattern = re.compile(r"^\w*-\w*", re.IGNORECASE)
|
||||
|
||||
return " ".join(s.split("-")) if pattern.search(s) else s
|
||||
|
||||
|
||||
async def process_event(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
url_num: int,
|
||||
) -> str | None:
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'URL {url_num}) Failed to fetch "{url}": {e}')
|
||||
return
|
||||
|
||||
valid_m3u8 = re.compile(r'var\s+(\w+)\s*=\s*"([^"]*)"', re.IGNORECASE)
|
||||
|
||||
if not (match := valid_m3u8.search(r.text)):
|
||||
log.info(f"URL {url_num}) No M3U8 found")
|
||||
return
|
||||
|
||||
encoded = match[2][::-1]
|
||||
decoded = base64.b64decode(encoded[::-1]).decode("utf-8")
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return decoded
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> list[dict[str, str]]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = []
|
||||
|
||||
for card in soup.css("div.container div.card"):
|
||||
link = card.css_first("a.btn.btn-primary")
|
||||
|
||||
if not (href := link.attrs.get("href")):
|
||||
continue
|
||||
|
||||
league = card.css_first("h5.card-title").text(strip=True)
|
||||
|
||||
name = card.css_first("p.card-text").text(strip=True)
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": fix_league(league),
|
||||
"event": name,
|
||||
"link": urljoin(BASE_URL, href),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
client=client,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": now,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = entry
|
||||
|
||||
log.info(f"Collected {len(urls)} event(s)")
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
176
M3U8/scrapers/streamcenter.py
Normal file
176
M3U8/scrapers/streamcenter.py
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamcenter.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("streamcenter-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://backendstreamcenter.youshop.pro:488/api/Parties"
|
||||
|
||||
TAG = "STRMCNTR"
|
||||
|
||||
categories = {
|
||||
4: "Basketball",
|
||||
9: "Football",
|
||||
13: "Baseball",
|
||||
14: "American Football",
|
||||
15: "Motor Sport",
|
||||
16: "Hockey",
|
||||
17: "Fight MMA",
|
||||
18: "Boxing",
|
||||
19: "NCAA Sports",
|
||||
20: "WWE",
|
||||
21: "Tennis",
|
||||
}
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
now_ts: float,
|
||||
) -> list[dict[str, str | int]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url, params={"pageNumber": 1, "pageSize": 500})
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data[-1]["timestamp"] = now_ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
BASE_URL,
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data:
|
||||
category_id: int = stream_group.get("categoryId")
|
||||
|
||||
name: str = stream_group.get("gameName")
|
||||
|
||||
iframe: str = stream_group.get("videoUrl")
|
||||
|
||||
event_time: str = stream_group.get("beginPartie")
|
||||
|
||||
if not (name and category_id and iframe and event_time):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(event_time, timezone="CET")
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
if not (sport := categories.get(category_id)):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": iframe.replace("<", "?", count=1),
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info('Scraping from "https://streamcenter.xyz"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": "https://streamcenter.xyz",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
87
M3U8/scrapers/streamfree.py
Normal file
87
M3U8/scrapers/streamfree.py
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamfree.json", exp=19_800)
|
||||
|
||||
BASE_URL = "https://streamfree.to"
|
||||
|
||||
TAG = "STRMFR"
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> dict[str, dict[str, list]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
||||
api_data = await refresh_api_cache(client, urljoin(BASE_URL, "streams"))
|
||||
|
||||
events = {}
|
||||
|
||||
for streams in api_data.get("streams", {}).values():
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
for stream in streams:
|
||||
sport, name, stream_key = (
|
||||
stream.get("league"),
|
||||
stream.get("name"),
|
||||
stream.get("stream_key"),
|
||||
)
|
||||
|
||||
if not (sport and name and stream_key):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
logo = (
|
||||
urljoin(BASE_URL, thumbnail)
|
||||
if (thumbnail := stream.get("thumbnail_url"))
|
||||
else None
|
||||
)
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, name)
|
||||
|
||||
events[key] = {
|
||||
"url": urljoin(BASE_URL, f"live/{stream_key}720p/index.m3u8"),
|
||||
"logo": logo or pic,
|
||||
"base": BASE_URL,
|
||||
"timestamp": Time.now().timestamp(),
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
204
M3U8/scrapers/streamsgate.py
Normal file
204
M3U8/scrapers/streamsgate.py
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("streamsgate.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("streamsgate-api.json", exp=28_800)
|
||||
|
||||
BASE_URL = "https://streamingon.org"
|
||||
|
||||
SPORT_ENDPOINTS = [
|
||||
"soccer",
|
||||
"nfl",
|
||||
"nba",
|
||||
"cfb",
|
||||
"mlb",
|
||||
"nhl",
|
||||
"ufc",
|
||||
"boxing",
|
||||
"f1",
|
||||
]
|
||||
|
||||
TAG = "STRMSG8"
|
||||
|
||||
|
||||
def get_event(t1: str, t2: str) -> str:
|
||||
match t1:
|
||||
case "RED ZONE":
|
||||
return "NFL RedZone"
|
||||
|
||||
case "TBD":
|
||||
return "TBD"
|
||||
|
||||
case _:
|
||||
return f"{t1.strip()} vs {t2.strip()}"
|
||||
|
||||
|
||||
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient, ts: float
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
tasks = [
|
||||
get_api_data(client, urljoin(BASE_URL, f"data/{sport}.json"))
|
||||
for sport in SPORT_ENDPOINTS
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient, cached_keys: set[str]
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(client, now.timestamp())
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
|
||||
for stream_group in api_data:
|
||||
event_ts = stream_group.get("ts")
|
||||
|
||||
sport = stream_group.get("league")
|
||||
|
||||
t1, t2 = stream_group.get("away"), stream_group.get("home")
|
||||
|
||||
if not (event_ts and sport):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_ts(event_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
event = get_event(t1, t2)
|
||||
|
||||
if not (streams := stream_group.get("streams")):
|
||||
continue
|
||||
|
||||
if not (url := streams[0].get("url")):
|
||||
continue
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": url,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
|
||||
valid_count = cached_count = len(valid_urls)
|
||||
urls.update(valid_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URL,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
cached_urls[key] = entry
|
||||
|
||||
if url:
|
||||
valid_count += 1
|
||||
urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
215
M3U8/scrapers/strmd.py
Normal file
215
M3U8/scrapers/strmd.py
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import BrowserContext, async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("strmd.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("strmd-api.json", exp=28_800)
|
||||
|
||||
MIRRORS = [
|
||||
"https://streami.su",
|
||||
"https://streamed.st",
|
||||
"https://streamed.pk",
|
||||
]
|
||||
|
||||
TAG = "STRMD"
|
||||
|
||||
|
||||
def fix_sport(s: str) -> str:
|
||||
if "-" in s:
|
||||
return " ".join(i.capitalize() for i in s.split("-"))
|
||||
|
||||
elif s == "fight":
|
||||
return "Fight (UFC/Boxing)"
|
||||
|
||||
return s.capitalize() if len(s) >= 4 else s.upper()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
ts: float,
|
||||
) -> list[dict[str, Any]]:
|
||||
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
data = r.json()
|
||||
|
||||
data[-1]["timestamp"] = ts
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(
|
||||
client,
|
||||
urljoin(url, "api/matches/all-today"),
|
||||
now.timestamp(),
|
||||
)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
start_dt = now.delta(minutes=-30)
|
||||
end_dt = now.delta(minutes=30)
|
||||
pattern = re.compile(r"[\n\r]+|\s{2,}")
|
||||
|
||||
for event in api_data:
|
||||
if (category := event.get("category")) == "other":
|
||||
continue
|
||||
|
||||
if not (ts := event["date"]):
|
||||
continue
|
||||
|
||||
start_ts = int(f"{ts}"[:-3])
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
sport = fix_sport(category)
|
||||
|
||||
parts = pattern.split(event["title"].strip())
|
||||
name = " | ".join(p.strip() for p in parts if p.strip())
|
||||
|
||||
logo = urljoin(url, poster) if (poster := event.get("poster")) else None
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
sources: list[dict[str, str]] = event["sources"]
|
||||
|
||||
if not sources:
|
||||
continue
|
||||
|
||||
skip_types = {"alpha", "bravo"}
|
||||
valid_sources = [d for d in sources if d.get("source") not in skip_types]
|
||||
|
||||
if not valid_sources:
|
||||
continue
|
||||
|
||||
srce = valid_sources[0]
|
||||
|
||||
source_type = srce.get("source")
|
||||
stream_id = srce.get("id")
|
||||
|
||||
if not (source_type and stream_id):
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": f"https://embedsports.top/embed/{source_type}/{stream_id}/1",
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
if not (base_url := await network.get_base(MIRRORS)):
|
||||
log.warning("No working STRMD mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p, browser="brave")
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": "https://embedsports.top/",
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
82
M3U8/scrapers/tvpass.py
Normal file
82
M3U8/scrapers/tvpass.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
import re
|
||||
|
||||
import httpx
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("tvpass.json", exp=86_400)
|
||||
|
||||
BASE_URL = "https://tvpass.org/playlist/m3u"
|
||||
|
||||
TAG = "TVP"
|
||||
|
||||
|
||||
async def get_data(client: httpx.AsyncClient) -> list[str]:
|
||||
try:
|
||||
r = await client.get(BASE_URL)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{BASE_URL}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.text.splitlines()
|
||||
|
||||
|
||||
async def get_events(client: httpx.AsyncClient) -> dict[str, dict[str, str | float]]:
|
||||
now = Time.now().timestamp()
|
||||
|
||||
events = {}
|
||||
|
||||
data = await get_data(client)
|
||||
|
||||
for i, line in enumerate(data, start=1):
|
||||
if line.startswith("#EXTINF"):
|
||||
tvg_id_match = re.search(r'tvg-id="([^"]*)"', line)
|
||||
tvg_name_match = re.search(r'tvg-name="([^"]*)"', line)
|
||||
group_title_match = re.search(r'group-title="([^"]*)"', line)
|
||||
|
||||
tvg = tvg_id_match[1] if tvg_id_match else None
|
||||
|
||||
if not tvg and (url := data[i]).endswith("/sd"):
|
||||
if tvg_name := tvg_name_match[1]:
|
||||
sport = group_title_match[1].upper().strip()
|
||||
|
||||
event = "(".join(tvg_name.split("(")[:-1]).strip()
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
channel = url.split("/")[-2]
|
||||
|
||||
tvg_id, logo = leagues.info(sport)
|
||||
|
||||
events[key] = {
|
||||
"url": f"http://origin.thetvapp.to/hls/{channel}/mono.m3u8",
|
||||
"logo": logo,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"base": "https://tvpass.org",
|
||||
"timestamp": now,
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
if cached := CACHE_FILE.load():
|
||||
urls.update(cached)
|
||||
log.info(f"Loaded {len(urls)} event(s) from cache")
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{BASE_URL}"')
|
||||
|
||||
events = await get_events(client)
|
||||
|
||||
urls.update(events)
|
||||
|
||||
CACHE_FILE.write(urls)
|
||||
|
||||
log.info(f"Collected and cached {len(urls)} new event(s)")
|
||||
12
M3U8/scrapers/utils/__init__.py
Normal file
12
M3U8/scrapers/utils/__init__.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from .caching import Cache
|
||||
from .config import Time, leagues
|
||||
from .logger import get_logger
|
||||
from .webwork import network
|
||||
|
||||
__all__ = [
|
||||
"Cache",
|
||||
"Time",
|
||||
"get_logger",
|
||||
"leagues",
|
||||
"network",
|
||||
]
|
||||
57
M3U8/scrapers/utils/caching.py
Normal file
57
M3U8/scrapers/utils/caching.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Time
|
||||
|
||||
|
||||
class Cache:
|
||||
def __init__(self, file: str, exp: int | float) -> None:
|
||||
self.file = Path(__file__).parent.parent / "caches" / file
|
||||
self.exp = exp
|
||||
self.now_ts = Time.now().timestamp()
|
||||
|
||||
def is_fresh(self, entry: dict) -> bool:
|
||||
ts: float | int = entry.get("timestamp", Time.default_8())
|
||||
|
||||
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
|
||||
|
||||
return self.now_ts - dt_ts < self.exp
|
||||
|
||||
def write(self, data: dict) -> None:
|
||||
self.file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.file.write_text(
|
||||
json.dumps(
|
||||
data,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def load(
|
||||
self,
|
||||
per_entry: bool = True,
|
||||
index: int | None = None,
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
|
||||
try:
|
||||
data: dict = json.loads(self.file.read_text(encoding="utf-8"))
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return {}
|
||||
|
||||
if per_entry:
|
||||
return {k: v for k, v in data.items() if self.is_fresh(v)}
|
||||
|
||||
if index:
|
||||
ts: float | int = data[index].get("timestamp", Time.default_8())
|
||||
|
||||
else:
|
||||
ts: float | int = data.get("timestamp", Time.default_8())
|
||||
|
||||
dt_ts = Time.clean(Time.from_ts(ts)).timestamp()
|
||||
|
||||
return data if self.is_fresh({"timestamp": dt_ts}) else {}
|
||||
|
||||
|
||||
__all__ = ["Cache"]
|
||||
212
M3U8/scrapers/utils/config.py
Normal file
212
M3U8/scrapers/utils/config.py
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
import json
|
||||
import re
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
|
||||
|
||||
class Time(datetime):
|
||||
ZONES = {
|
||||
"CET": pytz.timezone("Europe/Berlin"),
|
||||
"ET": pytz.timezone("America/New_York"),
|
||||
"PST": pytz.timezone("America/Los_Angeles"),
|
||||
"UTC": timezone.utc,
|
||||
}
|
||||
|
||||
ZONES["EDT"] = ZONES["EST"] = ZONES["ET"]
|
||||
|
||||
TZ = ZONES["ET"]
|
||||
|
||||
@classmethod
|
||||
def now(cls) -> "Time":
|
||||
return cls.from_ts(datetime.now(cls.TZ).timestamp())
|
||||
|
||||
@classmethod
|
||||
def from_ts(cls, ts: int | float) -> "Time":
|
||||
return cls.fromtimestamp(ts, tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def default_8(cls) -> float:
|
||||
return (
|
||||
cls.now()
|
||||
.replace(hour=8, minute=0, second=0, microsecond=0, tzinfo=cls.TZ)
|
||||
.timestamp()
|
||||
)
|
||||
|
||||
def delta(self, **kwargs) -> "Time":
|
||||
return self.from_ts((self + timedelta(**kwargs)).timestamp())
|
||||
|
||||
def clean(self) -> "Time":
|
||||
return self.__class__.fromtimestamp(
|
||||
self.replace(second=0, microsecond=0).timestamp(),
|
||||
tz=self.TZ,
|
||||
)
|
||||
|
||||
def to_tz(self, tzone: str) -> "Time":
|
||||
dt = self.astimezone(self.ZONES[tzone])
|
||||
return self.__class__.fromtimestamp(dt.timestamp(), tz=self.ZONES[tzone])
|
||||
|
||||
@classmethod
|
||||
def _to_class_tz(cls, dt) -> "Time":
|
||||
dt = dt.astimezone(cls.TZ)
|
||||
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def from_only_time(cls, s: str, d: date, timezone: str) -> "Time":
|
||||
hour, minute = map(int, s.split(":"))
|
||||
|
||||
dt = datetime(
|
||||
2000,
|
||||
1,
|
||||
1,
|
||||
hour,
|
||||
minute,
|
||||
tzinfo=cls.ZONES.get(timezone, cls.TZ),
|
||||
)
|
||||
|
||||
dt = dt.astimezone(cls.TZ)
|
||||
|
||||
dt = datetime.combine(d, dt.timetz())
|
||||
|
||||
return cls.fromtimestamp(dt.timestamp(), tz=cls.TZ)
|
||||
|
||||
@classmethod
|
||||
def from_str(
|
||||
cls,
|
||||
s: str,
|
||||
fmt: str | None = None,
|
||||
timezone: str | None = None,
|
||||
) -> "Time":
|
||||
tz = cls.ZONES.get(timezone, cls.TZ)
|
||||
|
||||
if fmt:
|
||||
dt = datetime.strptime(s, fmt)
|
||||
|
||||
dt = tz.localize(dt)
|
||||
|
||||
else:
|
||||
formats = [
|
||||
"%B %d, %Y %I:%M %p",
|
||||
"%B %d, %Y %I:%M:%S %p",
|
||||
"%m/%d/%Y %I:%M %p",
|
||||
"%B %d, %Y %H:%M",
|
||||
"%B %d, %Y %H:%M:%S",
|
||||
"%Y-%m-%d",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M %p",
|
||||
"%Y-%m-%d %I:%M %p",
|
||||
"%Y/%m/%d %H:%M",
|
||||
"%Y/%m/%d %H:%M:%S",
|
||||
"%m/%d/%Y %H:%M",
|
||||
"%m/%d/%Y %H:%M:%S",
|
||||
"%Y-%m-%dT%H:%M:%S",
|
||||
"%Y/%m/%dT%H:%M:%S.%fZ",
|
||||
"%Y-%m-%dT%H:%M:%S.%fZ",
|
||||
"%a, %d %b %Y %H:%M:%S %z",
|
||||
]
|
||||
|
||||
for frmt in formats:
|
||||
try:
|
||||
dt = datetime.strptime(s, frmt)
|
||||
break
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
return cls.from_ts(Time.default_8())
|
||||
|
||||
if not dt.tzinfo:
|
||||
dt = (
|
||||
tz.localize(dt)
|
||||
if hasattr(tz, "localize")
|
||||
else dt.replace(tzinfo=tz)
|
||||
)
|
||||
|
||||
return cls._to_class_tz(dt)
|
||||
|
||||
|
||||
class Leagues:
|
||||
live_img = "https://i.gyazo.com/978f2eb4a199ca5b56b447aded0cb9e3.png"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.data = json.loads(
|
||||
(Path(__file__).parent / "leagues.json").read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
def teams(self, league: str) -> list[str]:
|
||||
return self.data["teams"].get(league, [])
|
||||
|
||||
def info(self, name: str) -> tuple[str | None, str]:
|
||||
name = name.upper()
|
||||
|
||||
if match := next(
|
||||
(
|
||||
(tvg_id, league_data.get("logo"))
|
||||
for tvg_id, leagues in self.data["leagues"].items()
|
||||
for league_entry in leagues
|
||||
for league_name, league_data in league_entry.items()
|
||||
if name == league_name or name in league_data.get("names", [])
|
||||
),
|
||||
None,
|
||||
):
|
||||
tvg_id, logo = match
|
||||
|
||||
return (tvg_id, logo or self.live_img)
|
||||
|
||||
return (None, self.live_img)
|
||||
|
||||
def is_valid(
|
||||
self,
|
||||
event: str,
|
||||
league: str,
|
||||
) -> bool:
|
||||
|
||||
pattern = re.compile(r"\s+(?:-|vs\.?|at|@)\s+", flags=re.IGNORECASE)
|
||||
|
||||
if pattern.search(event):
|
||||
t1, t2 = re.split(pattern, event)
|
||||
|
||||
return any(t in self.teams(league) for t in (t1.strip(), t2.strip()))
|
||||
|
||||
return event.lower() in {
|
||||
"nfl redzone",
|
||||
"redzone",
|
||||
"red zone",
|
||||
"college gameday",
|
||||
}
|
||||
|
||||
def get_tvg_info(
|
||||
self,
|
||||
sport: str,
|
||||
event: str,
|
||||
) -> tuple[str | None, str]:
|
||||
|
||||
match sport:
|
||||
case "American Football" | "NFL":
|
||||
return (
|
||||
self.info("NFL")
|
||||
if self.is_valid(event, "NFL")
|
||||
else self.info("NCAA")
|
||||
)
|
||||
|
||||
case "Basketball" | "NBA":
|
||||
if self.is_valid(event, "NBA"):
|
||||
return self.info("NBA")
|
||||
|
||||
elif self.is_valid(event, "WNBA"):
|
||||
return self.info("WNBA")
|
||||
|
||||
else:
|
||||
return self.info("Basketball")
|
||||
|
||||
case "Ice Hockey" | "Hockey":
|
||||
return self.info("NHL")
|
||||
|
||||
case _:
|
||||
return self.info(sport)
|
||||
|
||||
|
||||
leagues = Leagues()
|
||||
|
||||
__all__ = ["leagues", "Time"]
|
||||
893
M3U8/scrapers/utils/leagues.json
Normal file
893
M3U8/scrapers/utils/leagues.json
Normal file
|
|
@ -0,0 +1,893 @@
|
|||
{
|
||||
"leagues": {
|
||||
"Basketball.Dummy.us": [
|
||||
{
|
||||
"BASKETBALL": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2024/04/Basketball-Emoji-1536x864.png",
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"Golf.Dummy.us": [
|
||||
{
|
||||
"GOLF": {
|
||||
"logo": "https://i.gyazo.com/14a883f22796f631e6f97c34dbeb6ada.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"PGA": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2024/10/PGA-Tour-Logo-500x281.png",
|
||||
"names": ["PGA TOUR"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"MLB.Baseball.Dummy.us": [
|
||||
{
|
||||
"MLB": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/mlb.png",
|
||||
"names": ["BASEBALL", "MAJOR LEAGUE BASEBALL"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NBA.Basketball.Dummy.us": [
|
||||
{
|
||||
"NBA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nba.png",
|
||||
"names": [
|
||||
"NATIONAL BASKETBALL ASSOCIATION",
|
||||
"NBA BASKETBALL",
|
||||
"NBA PRESEASON"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NCAA.Sports.Dummy.us": [
|
||||
{
|
||||
"NCAA": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/12/NCAA-Logo-500x281.png",
|
||||
"names": [
|
||||
"CBB",
|
||||
"CFB",
|
||||
"COLLEGE BASKETBALL",
|
||||
"COLLEGE FOOTBALL",
|
||||
"NCAA - BASKETBALL",
|
||||
"NCAA - FOOTBALL",
|
||||
"NCAA AMERICAN FOOTBALL",
|
||||
"NCAA BASKETBALL",
|
||||
"NCAA FOOTBALL",
|
||||
"NCAA SPORTS",
|
||||
"NCAAB",
|
||||
"NCAAB D",
|
||||
"NCAAB D-I",
|
||||
"NCAAF",
|
||||
"NCAAF D-I",
|
||||
"NCAAM",
|
||||
"NCAAW"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NFL.Dummy.us": [
|
||||
{
|
||||
"NFL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nfl.png",
|
||||
"names": [
|
||||
"AMERICAN FOOTBALL",
|
||||
"NATIONAL FOOTBALL LEAGUE",
|
||||
"NFL PRESEASON",
|
||||
"USA NFL"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"NHL.Hockey.Dummy.us": [
|
||||
{
|
||||
"NHL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/nhl.png",
|
||||
"names": [
|
||||
"HOCKEY",
|
||||
"NATIONAL HOCKEY LEAGUE",
|
||||
"NHL HOCKEY",
|
||||
"NHL PRESEASON"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"PPV.EVENTS.Dummy.us": [
|
||||
{
|
||||
"PAY PER VIEW": {
|
||||
"logo": null,
|
||||
"names": ["PAY-PER-VIEW", "PAYPERVIEW", "PPV"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"WRESTLING": {
|
||||
"logo": null,
|
||||
"names": ["AEW", "WWE"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"Racing.Dummy.us": [
|
||||
{
|
||||
"F1": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/06/F1-logo-500x281.png",
|
||||
"names": [
|
||||
"FORMULA 1",
|
||||
"FORMULA 1 GP",
|
||||
"FORMULA ONE",
|
||||
"FORMULA ONE GP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"MOTO GP": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2021/03/MotoGP-Logo-500x281.png",
|
||||
"names": ["MOTOGP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"RACING": {
|
||||
"logo": null,
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"Soccer.Dummy.us": [
|
||||
{
|
||||
"2. BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/6c343e57acf501f4df3502d7ec646897.png",
|
||||
"names": ["GERMAN 2. BUNDESLIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"3. LIGA": {
|
||||
"logo": "https://i.gyazo.com/9f4f2e8370377b6214b4103003196de7.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"AFC CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2200.png&scale=crop&cquality=40&location=origin&w=500&h=500",
|
||||
"names": [
|
||||
"ACL",
|
||||
"ACL ELITE",
|
||||
"AFC CHAMPIONS LEAGUE ELITE",
|
||||
"ASIAN CHAMPIONS LEAGUE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AFRICA CUP OF NATIONS": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/76.png",
|
||||
"names": ["AFCON"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AUSTRIA 2 LIGA": {
|
||||
"logo": "https://i.gyazo.com/5d1464502b841fef6e5d78c8b0764b52.png",
|
||||
"names": ["ADMIRAL 2. LIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"AUSTRIA BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/83d851fb1110f1e395690403f9cf01bb.webp",
|
||||
"names": ["ADMIRAL BUNDESLIGA", "FEDERAL LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"BUNDESLIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/10.png",
|
||||
"names": ["BUNDESLIG", "GERMAN BUNDESLIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CAF CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2391.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"CANADIAN PREMIER LEAGUE": {
|
||||
"logo": "https://i.gyazo.com/f61986e2ccfbf88f7d753b4e7f2c9fdc.png",
|
||||
"names": ["CANPL", "CPL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/24.png",
|
||||
"names": [
|
||||
"ENGLISH CHAMPIONSHIP",
|
||||
"ENGLISH FOOTBALL LEAGUE CHAMPIONSHIP",
|
||||
"ENGLISH LEAGUE CHAMPIONSHIP",
|
||||
"SKY BET CHAMPIONSHIP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF CENTRAL AMERICAN CUP": {
|
||||
"logo": "https://b.fssta.com/uploads/application/soccer/competition-logos/CONCACAFCentralAmericanCup.png",
|
||||
"names": ["COPA CENTROAMERICANA DE CONCACAF"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2298.png",
|
||||
"names": ["CONCACAF CHAMPIONS CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF GOLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/59.png",
|
||||
"names": ["COPA ORO CONCACAF"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF W CHAMPIONS CUP": {
|
||||
"logo": "https://i.gyazo.com/c1caff728e9a32711254b98d008194b2.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"CONCACAF W CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/18969.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA AMÉRICA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/83.png",
|
||||
"names": [
|
||||
"CONMEBOL COPA AMERICA",
|
||||
"COPA AMERICA",
|
||||
"COPA LIBERTADORES DE AMÉRICA",
|
||||
"SOUTH AMERICAN FOOTBALL CHAMPIONSHIP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA LIBERTADORES": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/58.png",
|
||||
"names": [
|
||||
"CONMEBOL LIBERTADORES",
|
||||
"COPA LIBERTADORES DE AMERICA",
|
||||
"COPA LIBERTADORES DE AMÉRICA",
|
||||
"LIBERTADORES"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPA SUDAMERICANA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1208.png",
|
||||
"names": ["CONMEBOL SUDAMERICANA", "COPA CONMEBOL SUDAMERICANA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"COPPA ITALIA": {
|
||||
"logo": "https://i.gyazo.com/8fd7660cca8f8b690f50979b72b295c3.png",
|
||||
"names": ["ITALIAN CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL": {
|
||||
"logo": "https://i.gyazo.com/c8842fbcb2eeb6a53bc69fa6055b8b5d.png",
|
||||
"names": [
|
||||
"CARABAO CUP",
|
||||
"EFL CUP",
|
||||
"ENGLISH CARABAO CUP",
|
||||
"ENGLISH FOOTBALL LEAGUE CUP",
|
||||
"LEAGUE CUP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL LEAGUE ONE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/25.png",
|
||||
"names": [
|
||||
"ENGLISH FOOTBALL LEAGUE ONE",
|
||||
"LEAGUE ONE",
|
||||
"SKY BET LEAGUE ONE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EFL LEAGUE TWO": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/26.png",
|
||||
"names": [
|
||||
"ENGLISH FOOTBALL LEAGUE TWO",
|
||||
"LEAGUE TWO",
|
||||
"SKY BET LEAGUE TWO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EKSTRAKLASA": {
|
||||
"logo": "https://i.gyazo.com/362e31efdd0dad03b00858f4fb0901b5.png",
|
||||
"names": ["PKO BANK POLSKI EKSTRAKLASA", "POLAND EKSTRAKLASA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"EREDIVISIE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/11.png",
|
||||
"names": [
|
||||
"DUTCH EERSTE EREDIVISIE",
|
||||
"DUTCH EREDIVISIE",
|
||||
"NETHERLANDS EREDIVISIE",
|
||||
"VRIENDENLOTERIJ EREDIVISIE"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/40.png&w=500&h=500",
|
||||
"names": [
|
||||
"EMIRATES FA CUP",
|
||||
"ENGLISH FA CUP",
|
||||
"FA CUP",
|
||||
"FOOTBALL ASSOCIATION CHALLENGE CUP"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA CLUB WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1932.png",
|
||||
"names": ["FIFA CWC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/4.png",
|
||||
"names": ["FIFA WC", "WC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FIFA'S WOMEN WORLD CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/60.png",
|
||||
"names": ["FIFA WOMEN WC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"FOOTBALL": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"FRAUEN BUNDESLIGA": {
|
||||
"logo": "https://i.gyazo.com/d13d4c0330be96801aa4b2d8b83d3a8f.png",
|
||||
"names": ["GOOGLE PIXEL FRAUEN-BUNDESLIGA", "WOMEN'S FEDERAL LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"GREECE CUP": {
|
||||
"logo": "https://i.gyazo.com/f80306df9b94a90f991b3cce386dc2b5.png",
|
||||
"names": ["BETSSON GREECE UP", "GREEK CUP", "GREEK FOOTBALL CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"J1 LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2199.png",
|
||||
"names": ["J.LEAGUE", "JAPANESE J.LEAGUE", "MEIJI YASUDA J1 LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"K LEAGUE 1": {
|
||||
"logo": "https://i.gyazo.com/721eba6c954e2015d999ead7a0bd5c69.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA LIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/15.png",
|
||||
"names": [
|
||||
"CAMPEONATO NACIONAL DE LIGA DE PRIMERA DIVISION",
|
||||
"CAMPEONATO NACIONAL DE LIGA DE PRIMERA DIVISIÓN",
|
||||
"LA-LIGA",
|
||||
"LALIGA",
|
||||
"PRIMERA DIVISION",
|
||||
"PRIMERA DIVISIÓN",
|
||||
"SPANISH LA LIGA",
|
||||
"SPANISH LALIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA LIGA 2": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/107.png",
|
||||
"names": [
|
||||
"CAMPEONATO NACIONAL DE LIGA DE SEGUNDA DIVISION",
|
||||
"CAMPEONATO NACIONAL DE LIGA DE SEGUNDA DIVISIÓN",
|
||||
"LALIGA 2",
|
||||
"SEGUNDA DIVISION",
|
||||
"SEGUNDA DIVISIÓN",
|
||||
"SPAIN SEGUNDA DIVISION",
|
||||
"SPANISH LA LIGA 2",
|
||||
"SPANISH LALIGA 2",
|
||||
"SPANISH SEGUNDA LIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LA PRIMERA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2244.png",
|
||||
"names": [
|
||||
"LA LIGA MAYOR",
|
||||
"PRIMERA DIVISION DE FUTBOL PROFESIONAL DE EL SALVADOR",
|
||||
"PRIMERA DIVISIÓN DE EL SALVADOR",
|
||||
"PRIMERA DIVISIÓN DE FÚTBOL PROFESIONAL DE EL SALVADOR",
|
||||
"SALVADORAN PRIMERA DIVISION"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LEAGUES CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2410.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA DE EXPANSIÓN MX": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2306.png",
|
||||
"names": ["LIGA BBVA EXPANSIÓN MX"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA FPD": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2245.png",
|
||||
"names": [
|
||||
"COSTA RICAN PRIMERA DIVISION",
|
||||
"LIGA DE FUTBOL DE PRIMERA DIVISION",
|
||||
"LIGA DE FÚTBOL DE PRIMERA DIVISIÓN",
|
||||
"LIGA PROMERICA",
|
||||
"PRIMERA DIVISION OF COSTA RICA",
|
||||
"PRIMERA DIVISIÓN OF COSTA RICA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA GUATE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2248.png",
|
||||
"names": [
|
||||
"LIGA GUATE BANRURAL",
|
||||
"LIGA NACIONAL",
|
||||
"LIGA NACIONAL DE FUTBOL DE GUATEMALA",
|
||||
"LIGA NACIONAL DE FÚTBOL DE GUATEMALA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA HONDUBET": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2247.png",
|
||||
"names": [
|
||||
"HONDURAN LIGA NACIONAL",
|
||||
"LIGA NACIONAL DE FUTBOL PROFESIONAL DE HONDURAS",
|
||||
"LIGA NACIONAL DE FÚTBOL PROFESIONAL DE HONDURAS"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA I": {
|
||||
"logo": "https://i.gyazo.com/3fd4b38d5263ca391e45850eb58d11e6.png",
|
||||
"names": [
|
||||
"ROMANIA LIGA 1",
|
||||
"ROMANIA LIGA I",
|
||||
"ROMANIAN LIGA 1",
|
||||
"ROMANIAN LIGA I",
|
||||
"SUPERLIGA"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA MX": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/22.png",
|
||||
"names": [
|
||||
"LIGA BBVA MX",
|
||||
"MEXICAN LIGA BBVA MX",
|
||||
"MEXICO LIGA MX",
|
||||
"PRIMERA DIVISION DE MEXICO",
|
||||
"PRIMERA DIVISIÓN DE MÉXICO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA MX FEMENIL": {
|
||||
"logo": "https://i.gyazo.com/ee0e1ba5ea748951b7ec7f46fb411c4f.png",
|
||||
"names": ["LIGA BBVA MX FEMENIL", "MEXICO WOMEN LIGA MX"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGA PROFESIONAL ARGENTINA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1.png",
|
||||
"names": [
|
||||
"ARGENTINE PRIMERA DIVISION",
|
||||
"ARGENTINE PRIMERA DIVISIÓN",
|
||||
"LIGA PROFESIONAL DE FUTBOL",
|
||||
"LIGA PROFESIONAL DE FÚTBOL",
|
||||
"PRIMERA DIVISION",
|
||||
"PRIMERA DIVISIÓN",
|
||||
"TORNEO BETANO"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGUE 1": {
|
||||
"logo": "https://ligue1.com/images/Logo_Ligue_1.webp",
|
||||
"names": ["FRANCE LIGUE 1", "FRENCH LIGUE 1"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"LIGUE 2": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/96.png",
|
||||
"names": ["FRANCE LIGUE 2", "FRENCH LIGUE 2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"MLS": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/19.png",
|
||||
"names": ["MAJOR LEAGUE SOCCER"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NORTHERN SUPER LEAGUE": {
|
||||
"logo": "https://i.gyazo.com/042f5bf51ab721bede2d9b56ce1818ae.png",
|
||||
"names": ["NSL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NWSL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2323.png",
|
||||
"names": ["NATIONAL WOMEN'S SOCCER LEAGUE", "NWSL WOMEN"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"NWSL CHALLENGE CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2445.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"PREMIER LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/23.png",
|
||||
"names": ["ENGLISH PREMIER LEAGUE", "EPL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMEIRA LIGA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/14.png",
|
||||
"names": ["LIGA PORTUGAL", "PORTUGUESE PRIMEIRA LIGA"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMERA A": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/1543.png",
|
||||
"names": ["COLOMBIA PRIMERA A", "COLOMBIAN PRIMERA A"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"PRIMERA B": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2295.png",
|
||||
"names": ["COLOMBIA PRIMERA B", "COLOMBIAN PRIMERA B"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SCOTTISH PREMIERSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/45.png",
|
||||
"names": ["PREMIERSHIP", "SPFL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERBIA SUPERLIGA": {
|
||||
"logo": "https://i.gyazo.com/0992f078dcacfef489477fc7bb1f5220.webp",
|
||||
"names": ["MOZZART SUPERLIGA", "SERBIAN SUPER LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERIE A": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/12.png",
|
||||
"names": ["ITALIAN SERIE A", "ITALY SERIE A", "SERIE-A"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SERIE B": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/99.png",
|
||||
"names": ["ITALIAN SERIE B", "ITALY SERIE B", "SERIE-B"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SOCCER": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"SUPER LEAGUE GREECE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/98.png",
|
||||
"names": [
|
||||
"A1 ETHNIKI KATIGORIA",
|
||||
"GREECE SUPER LEAGUE",
|
||||
"GREEK SUPER LEAGUE",
|
||||
"SUPER LEAGUE 1"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"SÜPER LIG": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/18.png",
|
||||
"names": [
|
||||
"SUPER LIG",
|
||||
"SUPERLIG",
|
||||
"SÜPERLIG",
|
||||
"TURKEY SUPER LIG",
|
||||
"TURKISH SUPER LIG"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"TURKEY 1 LIG": {
|
||||
"logo": "https://i.gyazo.com/730673f84223a85c9b9ae66123907bba.png",
|
||||
"names": ["TFF 1. LIG", "TRENDYOL 1. LIG"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"U.S. OPEN CUP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/69.png",
|
||||
"names": ["LAMAR HUNT U.S. OPEN CUP", "US OPEN CUP", "USOC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2.png",
|
||||
"names": ["CHAMPIONS LEAGUE", "UCL"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA CONFERENCE LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/20296.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA EUROPA LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2310.png",
|
||||
"names": ["EUROPA LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA EUROPEAN CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/74.png",
|
||||
"names": ["EUROS", "UEFA EUROS"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA SUPER CUP": {
|
||||
"logo": "https://i.gyazo.com/3b786181aba130321b85c0e2f9604652.png",
|
||||
"names": ["EUROPEAN SUPER CUP"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"UEFA WOMEN'S CHAMPIONS LEAGUE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2408.png",
|
||||
"names": ["UCL WOMEN", "UEFA WOMEN", "WOMEN'S CHAMPIONS LEAGUE"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"USL CHAMPIONSHIP": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2292.png",
|
||||
"names": ["UNITED SOCCER LEAGUE CHAMPIONSHIP", "USLC"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"USL LEAGUE ONE": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2452.png",
|
||||
"names": ["UNITED SOCCER LEAGUE LEAGUE ONE", "USL 1", "USL1"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"WORLD CUP QUALIFIERS": {
|
||||
"logo": "https://i.gyazo.com/1c4aa937f5ea01b0f29bb27adb59884c.png",
|
||||
"names": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"WSL": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/leaguelogos/soccer/500/2314.png",
|
||||
"names": [
|
||||
"BARCLAY'S WOMEN'S SUPER LEAGUE",
|
||||
"ENGLISH WOMEN'S SUPER LEAGUE",
|
||||
"FA WSL",
|
||||
"WOMEN'S SUPER LEAGUE"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"Tennis.Dummy.us": [
|
||||
{
|
||||
"TENNIS": {
|
||||
"logo": "https://i.gyazo.com/b5e83afc3a75dacfb831abe975fd3821.png",
|
||||
"names": []
|
||||
}
|
||||
}
|
||||
],
|
||||
"UFC.247.Dummy.us": [
|
||||
{
|
||||
"UFC": {
|
||||
"logo": "https://1000logos.net/wp-content/uploads/2017/06/Logo-UFC-500x313.png",
|
||||
"names": ["UFC FIGHT NIGHT"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"WNBA.dummy.us": [
|
||||
{
|
||||
"WNBA": {
|
||||
"logo": "https://a.espncdn.com/combiner/i?img=/i/teamlogos/leagues/500/wnba.png",
|
||||
"names": ["NBA W", "WOMEN'S NATIONAL BASKETBALL ASSOCIATION"]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"teams": {
|
||||
"NBA": [
|
||||
"76ers",
|
||||
"Atlanta Hawks",
|
||||
"Blazers",
|
||||
"Boston Celtics",
|
||||
"Brooklyn Nets",
|
||||
"Bucks",
|
||||
"Bulls",
|
||||
"Cavaliers",
|
||||
"Celtics",
|
||||
"Charlotte Hornets",
|
||||
"Chicago Bulls",
|
||||
"Cleveland Cavaliers",
|
||||
"Clippers",
|
||||
"Dallas Mavericks",
|
||||
"Denver Nuggets",
|
||||
"Detroit Pistons",
|
||||
"Golden State Warriors",
|
||||
"Grizzlies",
|
||||
"Hawks",
|
||||
"Heat",
|
||||
"Hornets",
|
||||
"Houston Rockets",
|
||||
"Indiana Pacers",
|
||||
"Jazz",
|
||||
"Kings",
|
||||
"Knicks",
|
||||
"Lakers",
|
||||
"Los Angeles Clippers",
|
||||
"Los Angeles Lakers",
|
||||
"Magic",
|
||||
"Mavericks",
|
||||
"Memphis Grizzlies",
|
||||
"Miami Heat",
|
||||
"Milwaukee Bucks",
|
||||
"Minnesota Timberwolves",
|
||||
"Nets",
|
||||
"New Orleans Pelicans",
|
||||
"New York Knicks",
|
||||
"Nuggets",
|
||||
"Oklahoma City Thunder",
|
||||
"Orlando Magic",
|
||||
"Pacers",
|
||||
"Pelicans",
|
||||
"Philadelphia 76ers",
|
||||
"Phoenix Suns",
|
||||
"Pistons",
|
||||
"Portland Trail Blazers",
|
||||
"Raptors",
|
||||
"Rockets",
|
||||
"Sacramento Kings",
|
||||
"San Antonio Spurs",
|
||||
"Sixers",
|
||||
"Spurs",
|
||||
"Suns",
|
||||
"Thunder",
|
||||
"Timberwolves",
|
||||
"Toronto Raptors",
|
||||
"Trail Blazers",
|
||||
"Utah Jazz",
|
||||
"Warriors",
|
||||
"Washington Wizards",
|
||||
"Wizards",
|
||||
"Wolves"
|
||||
],
|
||||
"NFL": [
|
||||
"49ers",
|
||||
"9ers",
|
||||
"Arizona Cardinals",
|
||||
"Atlanta Falcons",
|
||||
"Baltimore Ravens",
|
||||
"Bears",
|
||||
"Bengals",
|
||||
"Bills",
|
||||
"Broncos",
|
||||
"Browns",
|
||||
"Buccaneers",
|
||||
"Buffalo Bills",
|
||||
"Cardinals",
|
||||
"Carolina Panthers",
|
||||
"Chargers",
|
||||
"Chicago Bears",
|
||||
"Chiefs",
|
||||
"Cincinnati Bengals",
|
||||
"Cleveland Browns",
|
||||
"Colts",
|
||||
"Commanders",
|
||||
"Cowboys",
|
||||
"Dallas Cowboys",
|
||||
"Denver Broncos",
|
||||
"Detroit Lions",
|
||||
"Dolphins",
|
||||
"Eagles",
|
||||
"Falcons",
|
||||
"Giants",
|
||||
"Green Bay Packers",
|
||||
"Houston Texans",
|
||||
"Indianapolis Colts",
|
||||
"Jacksonville Jaguars",
|
||||
"Jaguars",
|
||||
"Jets",
|
||||
"Kansas City Chiefs",
|
||||
"Las Vegas Raiders",
|
||||
"Lions",
|
||||
"Los Angeles Chargers",
|
||||
"Los Angeles Rams",
|
||||
"Miami Dolphins",
|
||||
"Minnesota Vikings",
|
||||
"New England Patriots",
|
||||
"New Orleans Saints",
|
||||
"New York Giants",
|
||||
"New York Jets",
|
||||
"Niners",
|
||||
"Packers",
|
||||
"Panthers",
|
||||
"Patriots",
|
||||
"Philadelphia Eagles",
|
||||
"Pittsburgh Steelers",
|
||||
"Raiders",
|
||||
"Rams",
|
||||
"Ravens",
|
||||
"Redskins",
|
||||
"Saints",
|
||||
"San Francisco 49ers",
|
||||
"Seahawks",
|
||||
"Seattle Seahawks",
|
||||
"Steelers",
|
||||
"Tampa Bay Buccaneers",
|
||||
"Tennessee Titans",
|
||||
"Texans",
|
||||
"Titans",
|
||||
"Vikings",
|
||||
"Washington Commanders",
|
||||
"Washington Redskins"
|
||||
],
|
||||
"WNBA": [
|
||||
"Aces",
|
||||
"Atlanta Dream",
|
||||
"Chicago Sky",
|
||||
"Connecticut Sun",
|
||||
"Dallas Wings",
|
||||
"Dream",
|
||||
"Fever",
|
||||
"Golden State Valkyries",
|
||||
"Indiana Fever",
|
||||
"Las Vegas Aces",
|
||||
"Liberty",
|
||||
"Los Angeles Sparks",
|
||||
"Lynx",
|
||||
"Mercury",
|
||||
"Minnesota Lynx",
|
||||
"Mystics",
|
||||
"New York Liberty",
|
||||
"Phoenix Mercury",
|
||||
"Seattle Storm",
|
||||
"Sky",
|
||||
"Sparks",
|
||||
"Storm",
|
||||
"Sun",
|
||||
"Valkyries",
|
||||
"Washington Mystics",
|
||||
"Wings"
|
||||
]
|
||||
}
|
||||
}
|
||||
50
M3U8/scrapers/utils/logger.py
Normal file
50
M3U8/scrapers/utils/logger.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
LOG_FMT = (
|
||||
"[%(asctime)s] "
|
||||
"%(levelname)-8s "
|
||||
"[%(name)s] "
|
||||
"%(message)-70s "
|
||||
"(%(filename)s:%(lineno)d)"
|
||||
)
|
||||
|
||||
COLORS = {
|
||||
"DEBUG": "\033[36m",
|
||||
"INFO": "\033[32m",
|
||||
"WARNING": "\033[33m",
|
||||
"ERROR": "\033[31m",
|
||||
"CRITICAL": "\033[1;41m",
|
||||
"reset": "\033[0m",
|
||||
}
|
||||
|
||||
|
||||
class ColorFormatter(logging.Formatter):
|
||||
def format(self, record) -> str:
|
||||
color = COLORS.get(record.levelname, COLORS["reset"])
|
||||
levelname = record.levelname
|
||||
record.levelname = f"{color}{levelname:<8}{COLORS['reset']}"
|
||||
formatted = super().format(record)
|
||||
record.levelname = levelname
|
||||
|
||||
return formatted
|
||||
|
||||
|
||||
def get_logger(name: str | None = None) -> logging.Logger:
|
||||
if not name:
|
||||
name = Path(__file__).stem
|
||||
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
if not logger.hasHandlers():
|
||||
handler = logging.StreamHandler()
|
||||
formatter = ColorFormatter(LOG_FMT, datefmt="%Y-%m-%d | %H:%M:%S")
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = False
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
__all__ = ["get_logger", "ColorFormatter"]
|
||||
255
M3U8/scrapers/utils/webwork.py
Normal file
255
M3U8/scrapers/utils/webwork.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
from collections.abc import Awaitable, Callable
|
||||
from functools import partial
|
||||
from typing import TypeVar
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import Browser, BrowserContext, Playwright, Request
|
||||
|
||||
from .logger import get_logger
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class Network:
|
||||
UA = (
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
"Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0"
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.client = httpx.AsyncClient(
|
||||
timeout=5,
|
||||
follow_redirects=True,
|
||||
headers={"User-Agent": Network.UA},
|
||||
http2=True,
|
||||
)
|
||||
|
||||
self._logger = get_logger("network")
|
||||
|
||||
async def check_status(self, url: str) -> bool:
|
||||
try:
|
||||
r = await self.client.get(url)
|
||||
r.raise_for_status()
|
||||
return r.status_code == 200
|
||||
except (httpx.HTTPError, httpx.TimeoutException) as e:
|
||||
self._logger.debug(f"Status check failed for {url}: {e}")
|
||||
return False
|
||||
|
||||
async def get_base(self, mirrors: list[str]) -> str | None:
|
||||
random.shuffle(mirrors)
|
||||
|
||||
tasks = [self.check_status(link) for link in mirrors]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
working_mirrors = [
|
||||
mirror for mirror, success in zip(mirrors, results) if success
|
||||
]
|
||||
|
||||
return working_mirrors[0] if working_mirrors else None
|
||||
|
||||
@staticmethod
|
||||
async def safe_process(
|
||||
fn: Callable[[], Awaitable[T]],
|
||||
url_num: int,
|
||||
timeout: int | float = 15,
|
||||
log: logging.Logger | None = None,
|
||||
) -> T | None:
|
||||
|
||||
if not log:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
task = asyncio.create_task(fn())
|
||||
|
||||
try:
|
||||
return await asyncio.wait_for(task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out after {timeout}s, skipping event")
|
||||
|
||||
task.cancel()
|
||||
|
||||
try:
|
||||
await task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
log.debug(f"URL {url_num}) Ignore exception after timeout: {e}")
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
log.error(f"URL {url_num}) Unexpected error: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def capture_req(
|
||||
req: Request,
|
||||
captured: list[str],
|
||||
got_one: asyncio.Event,
|
||||
) -> None:
|
||||
|
||||
invalids = ["amazonaws", "knitcdn"]
|
||||
|
||||
escaped = [re.escape(i) for i in invalids]
|
||||
|
||||
pattern = re.compile(
|
||||
rf"^(?!.*({'|'.join(escaped)})).*\.m3u8",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
if pattern.search(req.url):
|
||||
captured.append(req.url)
|
||||
got_one.set()
|
||||
|
||||
async def process_event(
|
||||
self,
|
||||
url: str,
|
||||
url_num: int,
|
||||
context: BrowserContext,
|
||||
timeout: int | float = 10,
|
||||
log: logging.Logger | None = None,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(
|
||||
self.capture_req,
|
||||
captured=captured,
|
||||
got_one=got_one,
|
||||
)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
||||
finally:
|
||||
if not wait_task.done():
|
||||
wait_task.cancel()
|
||||
|
||||
try:
|
||||
await wait_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return captured[0]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"URL {url_num}) Exception while processing: {e}")
|
||||
return
|
||||
|
||||
finally:
|
||||
page.remove_listener("request", handler)
|
||||
await page.close()
|
||||
|
||||
@staticmethod
|
||||
async def browser(
|
||||
playwright: Playwright,
|
||||
browser: str = "firefox",
|
||||
ignore_https_errors: bool = False,
|
||||
) -> tuple[Browser, BrowserContext]:
|
||||
|
||||
if browser == "brave":
|
||||
brwsr = await playwright.chromium.connect_over_cdp("http://localhost:9222")
|
||||
context = brwsr.contexts[0]
|
||||
else:
|
||||
brwsr = await playwright.firefox.launch(headless=True)
|
||||
|
||||
context = await brwsr.new_context(
|
||||
user_agent=Network.UA,
|
||||
ignore_https_errors=ignore_https_errors,
|
||||
viewport={"width": 1366, "height": 768},
|
||||
device_scale_factor=1,
|
||||
locale="en-US",
|
||||
timezone_id="America/New_York",
|
||||
color_scheme="dark",
|
||||
permissions=["geolocation"],
|
||||
extra_http_headers={
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
},
|
||||
)
|
||||
|
||||
await context.add_init_script(
|
||||
"""
|
||||
Object.defineProperty(navigator, "webdriver", { get: () => undefined });
|
||||
|
||||
Object.defineProperty(navigator, "languages", {
|
||||
get: () => ["en-US", "en"],
|
||||
});
|
||||
|
||||
Object.defineProperty(navigator, "plugins", {
|
||||
get: () => [1, 2, 3, 4],
|
||||
});
|
||||
|
||||
const elementDescriptor = Object.getOwnPropertyDescriptor(
|
||||
HTMLElement.prototype,
|
||||
"offsetHeight"
|
||||
);
|
||||
|
||||
Object.defineProperty(HTMLDivElement.prototype, "offsetHeight", {
|
||||
...elementDescriptor,
|
||||
get: function () {
|
||||
if (this.id === "modernizr") {
|
||||
return 24;
|
||||
}
|
||||
return elementDescriptor.get.apply(this);
|
||||
},
|
||||
});
|
||||
|
||||
Object.defineProperty(window.screen, "width", { get: () => 1366 });
|
||||
Object.defineProperty(window.screen, "height", { get: () => 768 });
|
||||
|
||||
const getParameter = WebGLRenderingContext.prototype.getParameter;
|
||||
|
||||
WebGLRenderingContext.prototype.getParameter = function (param) {
|
||||
if (param === 37445) return "Intel Inc."; // UNMASKED_VENDOR_WEBGL
|
||||
if (param === 37446) return "Intel Iris OpenGL Engine"; // UNMASKED_RENDERER_WEBGL
|
||||
return getParameter.apply(this, [param]);
|
||||
};
|
||||
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
mutations.forEach((mutation) => {
|
||||
mutation.addedNodes.forEach((node) => {
|
||||
if (node.tagName === "IFRAME" && node.hasAttribute("sandbox")) {
|
||||
node.removeAttribute("sandbox");
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, { childList: true, subtree: true });
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
return brwsr, context
|
||||
|
||||
|
||||
network = Network()
|
||||
|
||||
__all__ = ["network"]
|
||||
304
M3U8/scrapers/watchfooty.py
Normal file
304
M3U8/scrapers/watchfooty.py
Normal file
|
|
@ -0,0 +1,304 @@
|
|||
import asyncio
|
||||
import re
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from typing import Any
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import BrowserContext, async_playwright
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("watchfty.json", exp=10_800)
|
||||
|
||||
API_FILE = Cache("watchfty-api.json", exp=28_800)
|
||||
|
||||
API_MIRRORS = ["https://api.watchfooty.top", "https://api.watchfooty.st"]
|
||||
|
||||
BASE_MIRRORS = ["https://www.watchfooty.top", "https://www.watchfooty.st"]
|
||||
|
||||
SPORT_ENDPOINTS = [
|
||||
"american-football",
|
||||
# "australian-football",
|
||||
# "baseball",
|
||||
"basketball",
|
||||
# "cricket",
|
||||
# "darts",
|
||||
"fighting",
|
||||
"football",
|
||||
"golf",
|
||||
"hockey",
|
||||
"racing",
|
||||
# "rugby",
|
||||
# "tennis",
|
||||
# "volleyball",
|
||||
]
|
||||
|
||||
TAG = "WFTY"
|
||||
|
||||
|
||||
async def get_api_data(client: httpx.AsyncClient, url: str) -> list[dict[str, Any]]:
|
||||
try:
|
||||
r = await client.get(url, timeout=5)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return []
|
||||
|
||||
return r.json()
|
||||
|
||||
|
||||
async def refresh_api_cache(
|
||||
client: httpx.AsyncClient,
|
||||
url: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
log.info("Refreshing API cache")
|
||||
|
||||
tasks = [
|
||||
get_api_data(client, urljoin(url, f"api/v1/matches/{sport}"))
|
||||
for sport in SPORT_ENDPOINTS
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
data = list(chain(*results))
|
||||
|
||||
for ev in data:
|
||||
ev["ts"] = ev.pop("timestamp")
|
||||
|
||||
data[-1]["timestamp"] = Time.now().timestamp()
|
||||
|
||||
return data
|
||||
|
||||
|
||||
async def process_event(
|
||||
url: str,
|
||||
url_num: int,
|
||||
context: BrowserContext,
|
||||
) -> str | None:
|
||||
|
||||
page = await context.new_page()
|
||||
|
||||
captured: list[str] = []
|
||||
|
||||
got_one = asyncio.Event()
|
||||
|
||||
handler = partial(
|
||||
network.capture_req,
|
||||
captured=captured,
|
||||
got_one=got_one,
|
||||
)
|
||||
|
||||
page.on("request", handler)
|
||||
|
||||
try:
|
||||
await page.goto(
|
||||
url,
|
||||
wait_until="domcontentloaded",
|
||||
timeout=15_000,
|
||||
)
|
||||
|
||||
await page.wait_for_timeout(1_500)
|
||||
|
||||
try:
|
||||
header = await page.wait_for_selector(
|
||||
"text=/Stream Links/i",
|
||||
timeout=5_000,
|
||||
)
|
||||
|
||||
text = await header.inner_text()
|
||||
except TimeoutError:
|
||||
log.warning(f"URL {url_num}) Can't find stream links header.")
|
||||
return
|
||||
|
||||
match = re.search(r"\((\d+)\)", text)
|
||||
|
||||
if not match or int(match[1]) == 0:
|
||||
log.warning(f"URL {url_num}) No available stream links.")
|
||||
return
|
||||
|
||||
first_available = await page.wait_for_selector(
|
||||
'a[href*="/stream/"]', timeout=3_000
|
||||
)
|
||||
|
||||
await first_available.click()
|
||||
|
||||
wait_task = asyncio.create_task(got_one.wait())
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(wait_task, timeout=6)
|
||||
except asyncio.TimeoutError:
|
||||
log.warning(f"URL {url_num}) Timed out waiting for M3U8.")
|
||||
return
|
||||
|
||||
finally:
|
||||
if not wait_task.done():
|
||||
wait_task.cancel()
|
||||
|
||||
try:
|
||||
await wait_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if captured:
|
||||
log.info(f"URL {url_num}) Captured M3U8")
|
||||
return captured[-1]
|
||||
|
||||
log.warning(f"URL {url_num}) No M3U8 captured after waiting.")
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
log.warning(f"URL {url_num}) Exception while processing: {e}")
|
||||
return
|
||||
|
||||
finally:
|
||||
page.remove_listener("request", handler)
|
||||
await page.close()
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient,
|
||||
api_url: str,
|
||||
base_url: str,
|
||||
cached_keys: set[str],
|
||||
) -> list[dict[str, str]]:
|
||||
|
||||
if not (api_data := API_FILE.load(per_entry=False, index=-1)):
|
||||
api_data = await refresh_api_cache(client, api_url)
|
||||
|
||||
API_FILE.write(api_data)
|
||||
|
||||
events = []
|
||||
|
||||
now = Time.clean(Time.now())
|
||||
start_dt = now.delta(hours=-1)
|
||||
end_dt = now.delta(minutes=10)
|
||||
pattern = re.compile(r"\-+|\(")
|
||||
|
||||
for event in api_data:
|
||||
match_id = event.get("matchId")
|
||||
name = event.get("title")
|
||||
league = event.get("league")
|
||||
|
||||
if not (match_id and name and league):
|
||||
continue
|
||||
|
||||
if not (ts := event.get("ts")):
|
||||
continue
|
||||
|
||||
start_ts = int(f"{ts}"[:-3])
|
||||
|
||||
event_dt = Time.from_ts(start_ts)
|
||||
|
||||
if not start_dt <= event_dt <= end_dt:
|
||||
continue
|
||||
|
||||
sport = pattern.split(league, 1)[0].strip()
|
||||
|
||||
logo = urljoin(api_url, poster) if (poster := event.get("poster")) else None
|
||||
|
||||
key = f"[{sport}] {name} ({TAG})"
|
||||
|
||||
if cached_keys & {key}:
|
||||
continue
|
||||
|
||||
events.append(
|
||||
{
|
||||
"sport": sport,
|
||||
"event": name,
|
||||
"link": urljoin(base_url, f"stream/{match_id}"),
|
||||
"logo": logo,
|
||||
"timestamp": event_dt.timestamp(),
|
||||
}
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
valid_urls = {k: v for k, v in cached_urls.items() if v["url"]}
|
||||
valid_count = cached_count = len(valid_urls)
|
||||
urls.update(valid_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
base_url = await network.get_base(BASE_MIRRORS)
|
||||
|
||||
api_url = await network.get_base(API_MIRRORS)
|
||||
|
||||
if not (base_url and api_url):
|
||||
log.warning("No working Watch Footy mirrors")
|
||||
CACHE_FILE.write(cached_urls)
|
||||
return
|
||||
|
||||
log.info(f'Scraping from "{base_url}"')
|
||||
|
||||
events = await get_events(
|
||||
client,
|
||||
api_url,
|
||||
base_url,
|
||||
set(cached_urls.keys()),
|
||||
)
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
sport, event, logo, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["logo"],
|
||||
ev["timestamp"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, pic = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo or pic,
|
||||
"base": base_url,
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
cached_urls[key] = entry
|
||||
|
||||
if url:
|
||||
valid_count += 1
|
||||
urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := valid_count - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
185
M3U8/scrapers/webcast.py
Normal file
185
M3U8/scrapers/webcast.py
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import asyncio
|
||||
from functools import partial
|
||||
|
||||
import httpx
|
||||
from playwright.async_api import async_playwright
|
||||
from selectolax.parser import HTMLParser
|
||||
|
||||
from .utils import Cache, Time, get_logger, leagues, network
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
urls: dict[str, dict[str, str | float]] = {}
|
||||
|
||||
CACHE_FILE = Cache("webcast.json", exp=10_800)
|
||||
|
||||
HTML_CACHE = Cache("webcast-html.json", exp=86_400)
|
||||
|
||||
BASE_URLS = {"NFL": "https://nflwebcast.com", "NHL": "https://slapstreams.com"}
|
||||
|
||||
TAG = "WEBCST"
|
||||
|
||||
|
||||
def fix_event(s: str) -> str:
|
||||
return " vs ".join(s.split("@"))
|
||||
|
||||
|
||||
async def refresh_html_cache(
|
||||
client: httpx.AsyncClient, url: str
|
||||
) -> dict[str, dict[str, str | float]]:
|
||||
try:
|
||||
r = await client.get(url)
|
||||
r.raise_for_status()
|
||||
except Exception as e:
|
||||
log.error(f'Failed to fetch "{url}": {e}')
|
||||
|
||||
return {}
|
||||
|
||||
now = Time.now()
|
||||
|
||||
soup = HTMLParser(r.content)
|
||||
|
||||
events = {}
|
||||
|
||||
title = soup.css_first("title").text(strip=True)
|
||||
|
||||
sport = "NFL" if "NFL" in title else "NHL"
|
||||
|
||||
date_text = now.strftime("%B %d, %Y")
|
||||
|
||||
if date_row := soup.css_first("tr.mdatetitle"):
|
||||
if mtdate_span := date_row.css_first("span.mtdate"):
|
||||
date_text = mtdate_span.text(strip=True)
|
||||
|
||||
for row in soup.css("tr.singele_match_date"):
|
||||
if not (time_node := row.css_first("td.matchtime")):
|
||||
continue
|
||||
|
||||
time = time_node.text(strip=True)
|
||||
|
||||
if not (vs_node := row.css_first("td.teamvs a")):
|
||||
continue
|
||||
|
||||
event_name = vs_node.text(strip=True)
|
||||
|
||||
for span in vs_node.css("span.mtdate"):
|
||||
date = span.text(strip=True)
|
||||
|
||||
event_name = event_name.replace(date, "").strip()
|
||||
|
||||
if not (href := vs_node.attributes.get("href")):
|
||||
continue
|
||||
|
||||
event_dt = Time.from_str(f"{date_text} {time} PM", timezone="EST")
|
||||
|
||||
event = fix_event(event_name)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
events[key] = {
|
||||
"sport": sport,
|
||||
"event": event,
|
||||
"link": href,
|
||||
"event_ts": event_dt.timestamp(),
|
||||
"timestamp": now.timestamp(),
|
||||
}
|
||||
|
||||
return events
|
||||
|
||||
|
||||
async def get_events(
|
||||
client: httpx.AsyncClient, cached_keys: set[str]
|
||||
) -> list[dict[str, str]]:
|
||||
now = Time.clean(Time.now())
|
||||
|
||||
if not (events := HTML_CACHE.load()):
|
||||
log.info("Refreshing HTML cache")
|
||||
|
||||
tasks = [refresh_html_cache(client, url) for url in BASE_URLS.values()]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
events = {k: v for data in results for k, v in data.items()}
|
||||
|
||||
HTML_CACHE.write(events)
|
||||
|
||||
live = []
|
||||
|
||||
start_ts = now.delta(minutes=-30).timestamp()
|
||||
end_ts = now.delta(minutes=30).timestamp()
|
||||
|
||||
for k, v in events.items():
|
||||
if cached_keys & {k}:
|
||||
continue
|
||||
|
||||
if not start_ts <= v["event_ts"] <= end_ts:
|
||||
continue
|
||||
|
||||
live.append({**v})
|
||||
|
||||
return live
|
||||
|
||||
|
||||
async def scrape(client: httpx.AsyncClient) -> None:
|
||||
cached_urls = CACHE_FILE.load()
|
||||
cached_count = len(cached_urls)
|
||||
urls.update(cached_urls)
|
||||
|
||||
log.info(f"Loaded {cached_count} event(s) from cache")
|
||||
|
||||
log.info(f'Scraping from "{' & '.join(BASE_URLS.values())}"')
|
||||
|
||||
events = await get_events(client, set(cached_urls.keys()))
|
||||
|
||||
log.info(f"Processing {len(events)} new URL(s)")
|
||||
|
||||
if events:
|
||||
async with async_playwright() as p:
|
||||
browser, context = await network.browser(p)
|
||||
|
||||
for i, ev in enumerate(events, start=1):
|
||||
handler = partial(
|
||||
network.process_event,
|
||||
url=ev["link"],
|
||||
url_num=i,
|
||||
context=context,
|
||||
log=log,
|
||||
)
|
||||
|
||||
url = await network.safe_process(
|
||||
handler,
|
||||
url_num=i,
|
||||
log=log,
|
||||
)
|
||||
|
||||
if url:
|
||||
sport, event, ts, link = (
|
||||
ev["sport"],
|
||||
ev["event"],
|
||||
ev["event_ts"],
|
||||
ev["link"],
|
||||
)
|
||||
|
||||
key = f"[{sport}] {event} ({TAG})"
|
||||
|
||||
tvg_id, logo = leagues.get_tvg_info(sport, event)
|
||||
|
||||
entry = {
|
||||
"url": url,
|
||||
"logo": logo,
|
||||
"base": BASE_URLS[sport],
|
||||
"timestamp": ts,
|
||||
"id": tvg_id or "Live.Event.us",
|
||||
"link": link,
|
||||
}
|
||||
|
||||
urls[key] = cached_urls[key] = entry
|
||||
|
||||
await browser.close()
|
||||
|
||||
if new_count := len(cached_urls) - cached_count:
|
||||
log.info(f"Collected and cached {new_count} new event(s)")
|
||||
else:
|
||||
log.info("No new events found")
|
||||
|
||||
CACHE_FILE.write(cached_urls)
|
||||
Loading…
Add table
Add a link
Reference in a new issue