|
| 1 | +# -*- coding: UTF-8 -*- |
| 2 | +from __future__ import absolute_import |
| 3 | +from __future__ import print_function |
| 4 | + |
| 5 | +import sys |
| 6 | +import difflib |
| 7 | +from six.moves import html_parser |
| 8 | +from six.moves.urllib.parse import quote_plus, urlencode |
| 9 | +import urllib.request |
| 10 | +import urllib.parse |
| 11 | +import html |
| 12 | +import urllib3 |
| 13 | +import requests, json, re, random, string, time, warnings |
| 14 | +from requests.packages.urllib3.exceptions import InsecureRequestWarning |
| 15 | +warnings.simplefilter('ignore',InsecureRequestWarning) |
| 16 | +import os, os.path |
| 17 | +from six.moves.urllib.request import HTTPCookieProcessor, build_opener, install_opener, Request, urlopen, FancyURLopener |
| 18 | +from six.moves import http_cookiejar |
| 19 | +from ..utilities import languageTranslate, log, getFileSize |
| 20 | +from urllib import request, parse |
| 21 | +from urllib.parse import urlencode |
| 22 | +import six |
| 23 | +from six.moves import urllib |
| 24 | +from six.moves import xmlrpc_client |
| 25 | +import calendar |
| 26 | +from ..seeker import SubtitlesDownloadError, SubtitlesErrors |
| 27 | + |
| 28 | +PY3 = False |
| 29 | +if sys.version_info[0] == 3: |
| 30 | + from urllib.request import urlopen, Request |
| 31 | + PY3 = True |
| 32 | +else: |
| 33 | + from urllib2 import urlopen, Request |
| 34 | + from urllib3 import urlopen, Request |
| 35 | + |
| 36 | + |
| 37 | +if sys.version_info[0] == 3: |
| 38 | + from urllib.parse import quote # Python 3 |
| 39 | +else: |
| 40 | + from urllib import quote # Python 2 |
| 41 | + from urllib3 import quote |
| 42 | + |
| 43 | + |
| 44 | +try: |
| 45 | + from urllib import unquote |
| 46 | +except ImportError: |
| 47 | + from urllib.parse import unquote |
| 48 | + |
| 49 | +HDR= {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; rv:109.0) Gecko/20100101 Firefox/115.0', |
| 50 | + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8', |
| 51 | + 'Accept-Language': 'fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3', |
| 52 | + 'Content-Type': 'text/html; charset=UTF-8', |
| 53 | + 'Host': 'subs.ath.cx', |
| 54 | + 'Referer': 'http://subs.ath.cx', |
| 55 | + 'Upgrade-Insecure-Requests': '1', |
| 56 | + 'Connection': 'keep-alive', |
| 57 | + 'Accept-Encoding':'gzip, deflate'}#, deflate'} |
| 58 | + |
| 59 | +s = requests.Session() |
| 60 | + |
| 61 | +main_url = "http://subs.ath.cx" |
| 62 | +debug_pretext = "subs.ath.cx" |
| 63 | + |
| 64 | + |
| 65 | +def get_url(url, referer=None): |
| 66 | + if referer is None: |
| 67 | + headers = {'User-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0'} |
| 68 | + else: |
| 69 | + headers = {'User-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0', 'Referer': referer} |
| 70 | + req = urllib.request.Request(url, None, headers) |
| 71 | + response = urllib.request.urlopen(req) |
| 72 | + content = response.read().decode('utf-8') |
| 73 | + response.close() |
| 74 | + content = content.replace('\n', '') |
| 75 | + return content |
| 76 | + |
| 77 | + |
| 78 | +def get_rating(downloads): |
| 79 | + rating = int(downloads) |
| 80 | + if (rating < 50): |
| 81 | + rating = 1 |
| 82 | + elif (rating >= 50 and rating < 100): |
| 83 | + rating = 2 |
| 84 | + elif (rating >= 100 and rating < 150): |
| 85 | + rating = 3 |
| 86 | + elif (rating >= 150 and rating < 200): |
| 87 | + rating = 4 |
| 88 | + elif (rating >= 200 and rating < 250): |
| 89 | + rating = 5 |
| 90 | + elif (rating >= 250 and rating < 300): |
| 91 | + rating = 6 |
| 92 | + elif (rating >= 300 and rating < 350): |
| 93 | + rating = 7 |
| 94 | + elif (rating >= 350 and rating < 400): |
| 95 | + rating = 8 |
| 96 | + elif (rating >= 400 and rating < 450): |
| 97 | + rating = 9 |
| 98 | + elif (rating >= 450): |
| 99 | + rating = 10 |
| 100 | + return rating |
| 101 | + |
| 102 | + |
| 103 | +def search_subtitles(file_original_path, title, tvshow, year, season, episode, set_temp, rar, lang1, lang2, lang3, stack): #standard input |
| 104 | + subtitles_list = [] |
| 105 | + msg = "" |
| 106 | + title = str(title).replace(':', '').replace(' ', ' ').replace(',', '').replace("'", "").replace("&", "and").replace("!", "").replace("?", "").replace("- ", "").replace(" III", " 3").replace(" II", " 2").title() |
| 107 | + if len(tvshow) == 0 and year: # Movie |
| 108 | + searchstring = "%s (%s)" % (title, year) |
| 109 | + elif len(tvshow) > 0 and title == tvshow: # Movie not in Library |
| 110 | + searchstring = "%s (%#02d%#02d)" % (tvshow, int(season), int(episode)) |
| 111 | + elif len(tvshow) > 0: # TVShow |
| 112 | + searchstring = "%s S%#02dE%#02d" % (tvshow, int(season), int(episode)) |
| 113 | + else: |
| 114 | + searchstring = title |
| 115 | + log(__name__, "%s Search string = %s" % (debug_pretext, searchstring)) |
| 116 | + get_subtitles_list(title, searchstring, "ar", "Arabic", subtitles_list) |
| 117 | + return subtitles_list, "", msg #standard output |
| 118 | + |
| 119 | + |
| 120 | +def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, session_id): #standard input |
| 121 | + language = subtitles_list[pos]["language_name"] |
| 122 | + id = subtitles_list[pos]["id"] |
| 123 | + #id = re.compile('(.+?.+?)/').findall(id)[-1] |
| 124 | + downloadlink = 'http://subs.ath.cx/subtitles/%s' % (id) |
| 125 | + #id = 'http://www.findsubtitles.eu/getp.php?id=%s' % (id) |
| 126 | + print(downloadlink) |
| 127 | + if downloadlink: |
| 128 | + log(__name__ , "%s Downloadlink: %s " % (debug_pretext, downloadlink)) |
| 129 | + viewstate = 0 |
| 130 | + previouspage = 0 |
| 131 | + subtitleid = 0 |
| 132 | + typeid = "zip" |
| 133 | + filmid = 0 |
| 134 | + postparams = { '__EVENTTARGET': 's$lc$bcr$downloadLink', '__EVENTARGUMENT': '' , '__VIEWSTATE': viewstate, '__PREVIOUSPAGE': previouspage, 'subtitleId': subtitleid, 'typeId': typeid, 'filmId': filmid} |
| 135 | + #postparams = urllib3.request.urlencode({ '__EVENTTARGET': 's$lc$bcr$downloadLink', '__EVENTARGUMENT': '' , '__VIEWSTATE': viewstate, '__PREVIOUSPAGE': previouspage, 'subtitleId': subtitleid, 'typeId': typeid, 'filmId': filmid}) |
| 136 | + #class MyOpener(urllib.FancyURLopener): |
| 137 | + #version = 'User-Agent=Mozilla/5.0 (Windows NT 6.1; rv:109.0) Gecko/20100101 Firefox/115.0' |
| 138 | + #my_urlopener = MyOpener() |
| 139 | + #my_urlopener.addheader('Referer', url) |
| 140 | + log(__name__ , "%s Fetching subtitles using url with referer header '%s' and post parameters '%s'" % (debug_pretext, downloadlink, postparams)) |
| 141 | + #response = my_urlopener.open(downloadlink, postparams) |
| 142 | + response = s.get(downloadlink,data=postparams,headers=HDR,verify=False,allow_redirects=True) |
| 143 | + print(response.content) |
| 144 | + local_tmp_file = zip_subs |
| 145 | + try: |
| 146 | + log(__name__ , "%s Saving subtitles to '%s'" % (debug_pretext, local_tmp_file)) |
| 147 | + if not os.path.exists(tmp_sub_dir): |
| 148 | + os.makedirs(tmp_sub_dir) |
| 149 | + local_file_handle = open(local_tmp_file, 'wb') |
| 150 | + local_file_handle.write(response.content) |
| 151 | + local_file_handle.close() |
| 152 | + # Check archive type (rar/zip/else) through the file header (rar=Rar!, zip=PK) urllib3.request.urlencode |
| 153 | + myfile = open(local_tmp_file, "rb") |
| 154 | + myfile.seek(0) |
| 155 | + if (myfile.read(1).decode('utf-8') == 'R'): |
| 156 | + typeid = "rar" |
| 157 | + packed = True |
| 158 | + log(__name__ , "Discovered RAR Archive") |
| 159 | + else: |
| 160 | + myfile.seek(0) |
| 161 | + if (myfile.read(1).decode('utf-8') == 'P'): |
| 162 | + typeid = "zip" |
| 163 | + packed = True |
| 164 | + log(__name__ , "Discovered ZIP Archive") |
| 165 | + else: |
| 166 | + typeid = "srt" |
| 167 | + packed = False |
| 168 | + subs_file = local_tmp_file |
| 169 | + log(__name__ , "Discovered a non-archive file") |
| 170 | + myfile.close() |
| 171 | + log(__name__ , "%s Saving to %s" % (debug_pretext, local_tmp_file)) |
| 172 | + except: |
| 173 | + log(__name__ , "%s Failed to save subtitle to %s" % (debug_pretext, local_tmp_file)) |
| 174 | + if packed: |
| 175 | + subs_file = typeid |
| 176 | + log(__name__ , "%s Subtitles saved to '%s'" % (debug_pretext, local_tmp_file)) |
| 177 | + return packed, language, subs_file # standard output |
| 178 | + |
| 179 | +def get_subtitles_list(title, searchstring, languageshort, languagelong, subtitles_list): |
| 180 | + url = '%s/subtitles' % (main_url) |
| 181 | + title = title.strip() |
| 182 | + |
| 183 | + #url = 'https://archive.org/download/iptvworld-1/A/' quote_plus(title) |
| 184 | + d = quote_plus(title) |
| 185 | + d = d.replace('+', '.') |
| 186 | + print('url', url) |
| 187 | + print('d', d) |
| 188 | + print('searchstring', searchstring) |
| 189 | + try: |
| 190 | + log(__name__, "%s Getting url: %s" % (debug_pretext, url)) |
| 191 | + content = s.get(url,headers=HDR,verify=False,allow_redirects=True).text |
| 192 | + #print(content) |
| 193 | + except: |
| 194 | + pass |
| 195 | + log(__name__, "%s Failed to get url:%s" % (debug_pretext, url)) |
| 196 | + return |
| 197 | + try: |
| 198 | + log( __name__ ,"%s Getting '%s' subs ..." % (debug_pretext, languageshort)) |
| 199 | + subtitles = re.compile(r'(<td><a href.+?">'+d+'.+?</a></td>)', re.IGNORECASE).findall(content) |
| 200 | + #print(subtitles) |
| 201 | + except: |
| 202 | + log( __name__ ,"%s Failed to get subtitles" % (debug_pretext)) |
| 203 | + return |
| 204 | + for subtitle in subtitles: |
| 205 | + try: |
| 206 | + filename = re.compile('<td><a href=".+?">(.+?)</a></td>').findall(subtitle)[0] |
| 207 | + filename = filename.strip().replace('.srt', '') |
| 208 | + #print(filename) |
| 209 | + id = re.compile('href="(.+?)"').findall(subtitle)[0] |
| 210 | + #print(id) |
| 211 | + if not (filename == 'Εργαστήρι Υποτίτλων' or filename == 'subs4series'): |
| 212 | + log( __name__ ,"%s Subtitles found: %s (id = %s)" % (debug_pretext, filename, id)) |
| 213 | + subtitles_list.append({'no_files': 1, 'filename': filename, 'sync': True, 'id' : id, 'language_flag': 'flags/' + languageshort + '.gif', 'language_name': languagelong}) |
| 214 | + except: |
| 215 | + pass |
| 216 | + return |
0 commit comments