Skip to content

Commit 79e0971

Browse files
author
subssupport python bot
committed
PEP8 double aggressive W291 ~ W293 and W391
1 parent 2262db7 commit 79e0971

File tree

6 files changed

+55
-57
lines changed

6 files changed

+55
-57
lines changed

plugin/seekers/Novalermora/service.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
from urllib.parse import quote # Python 3
4646
else:
4747
from urllib import quote # Python 2
48-
from urllib3 import quote
48+
from urllib3 import quote
4949

5050

5151
try:
@@ -62,8 +62,8 @@
6262
'Upgrade-Insecure-Requests': '1',
6363
'Connection': 'keep-alive',
6464
'Accept-Encoding': 'gzip, deflate'} # , deflate'}
65-
66-
s = requests.Session()
65+
66+
s = requests.Session()
6767

6868
main_url = "http://subs.ath.cx"
6969
debug_pretext = "subs.ath.cx"
@@ -76,7 +76,7 @@ def get_url(url, referer=None):
7676
headers = {'User-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0', 'Referer': referer}
7777
req = urllib.request.Request(url, None, headers)
7878
response = urllib.request.urlopen(req)
79-
content = response.read().decode('utf-8')
79+
content = response.read().decode('utf-8')
8080
response.close()
8181
content = content.replace('\n', '')
8282
return content
@@ -130,7 +130,7 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
130130
#id = re.compile('(.+?.+?)/').findall(id)[-1]
131131
downloadlink = 'http://subs.ath.cx/subtitles/%s' % (id)
132132
#id = 'http://www.findsubtitles.eu/getp.php?id=%s' % (id)
133-
print(downloadlink)
133+
print(downloadlink)
134134
if downloadlink:
135135
log(__name__, "%s Downloadlink: %s " % (debug_pretext, downloadlink))
136136
viewstate = 0
@@ -146,7 +146,7 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
146146
#my_urlopener.addheader('Referer', url)
147147
log(__name__, "%s Fetching subtitles using url with referer header '%s' and post parameters '%s'" % (debug_pretext, downloadlink, postparams))
148148
#response = my_urlopener.open(downloadlink, postparams)
149-
response = s.get(downloadlink, data=postparams, headers=HDR, verify=False, allow_redirects=True)
149+
response = s.get(downloadlink, data=postparams, headers=HDR, verify=False, allow_redirects=True)
150150
print(response.content)
151151
local_tmp_file = zip_subs
152152
try:
@@ -185,9 +185,9 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
185185

186186

187187
def get_subtitles_list(title, searchstring, languageshort, languagelong, subtitles_list):
188-
url = '%s/subtitles' % (main_url)
188+
url = '%s/subtitles' % (main_url)
189189
title = title.strip()
190-
190+
191191
#url = 'https://archive.org/download/iptvworld-1/A/' quote_plus(title)
192192
d = quote_plus(title)
193193
d = d.replace('+', '.')
@@ -197,25 +197,25 @@ def get_subtitles_list(title, searchstring, languageshort, languagelong, subtitl
197197
try:
198198
log(__name__, "%s Getting url: %s" % (debug_pretext, url))
199199
content = s.get(url, headers=HDR, verify=False, allow_redirects=True).text
200-
#print(content)
200+
#print(content)
201201
except:
202202
pass
203203
log(__name__, "%s Failed to get url:%s" % (debug_pretext, url))
204204
return
205205
try:
206206
log(__name__, "%s Getting '%s' subs ..." % (debug_pretext, languageshort))
207207
subtitles = re.compile(r'(<td><a href.+?">' + d + '.+?</a></td>)', re.IGNORECASE).findall(content)
208-
#print(subtitles)
208+
#print(subtitles)
209209
except:
210210
log(__name__, "%s Failed to get subtitles" % (debug_pretext))
211211
return
212212
for subtitle in subtitles:
213213
try:
214214
filename = re.compile('<td><a href=".+?">(.+?)</a></td>').findall(subtitle)[0]
215215
filename = filename.strip().replace('.srt', '')
216-
#print(filename)
216+
#print(filename)
217217
id = re.compile('href="(.+?)"').findall(subtitle)[0]
218-
#print(id)
218+
#print(id)
219219
if not (filename == 'Εργαστήρι Υποτίτλων' or filename == 'subs4series'):
220220
log(__name__, "%s Subtitles found: %s (id = %s)" % (debug_pretext, filename, id))
221221
subtitles_list.append({'no_files': 1, 'filename': filename, 'sync': True, 'id': id, 'language_flag': 'flags/' + languageshort + '.gif', 'language_name': languagelong})

plugin/seekers/OpenSubtitlesMora/service.py

+12-13
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@
3939
root_url = 'https://www.opensubtitles.org/en/search/sublanguageid-ara/uploader-morafbi/idmovie-'
4040
main_url = "https://www.opensubtitles.org"
4141
main_download_url = 'https://www.opensubtitles.org/en/subtitleserve/sub/'
42-
43-
s = requests.Session()
42+
43+
s = requests.Session()
4444
debug_pretext = ""
4545
ses = requests.Session()
4646

@@ -73,7 +73,7 @@
7373
# log(__name__, " Failed to get url:%s" % (url))
7474
# content = None
7575
# return(content)
76-
76+
7777

7878
def getSearchTitle(title, year=None): # new Add
7979
title = prepare_search_string(title).replace('%26', '&')
@@ -94,7 +94,7 @@ def getSearchTitle(title, year=None): # new Add
9494
href = root_url + str(movie_id)
9595
print(("href", href))
9696
return movie_id
97-
97+
9898
except:
9999
break
100100
return movie_id
@@ -108,8 +108,8 @@ def find_movie(content, title, year):
108108
for matches in re.finditer(movie_season_pattern, content, re.IGNORECASE | re.DOTALL):
109109
print((tuple(matches.groups())))
110110
found_title = matches.group('title')
111-
found_title = html.unescape(found_title)
112-
print(("found_title", found_title))
111+
found_title = html.unescape(found_title)
112+
print(("found_title", found_title))
113113
log(__name__, "Found movie on search page: %s (%s)" % (found_title, matches.group('year')))
114114
if found_title.lower().find(title.lower()) > -1:
115115
if matches.group('year') == year:
@@ -129,7 +129,7 @@ def find_tv_show_season(content, tvshow, season):
129129
for matches in re.finditer(movie_season_pattern, content, re.IGNORECASE | re.DOTALL):
130130
found_title = matches.group('title')
131131
found_title = html.unescape(found_title)
132-
print(("found_title2", found_title))
132+
print(("found_title2", found_title))
133133
log(__name__, "Found tv show season on search page: %s" % found_title)
134134
s = difflib.SequenceMatcher(None, string.lower(found_title + ' ' + matches.group('year')), tvshow.lower())
135135
all_tvshows.append(matches.groups() + (s.ratio() * int(matches.group('numsubtitles')),))
@@ -149,8 +149,8 @@ def find_tv_show_season(content, tvshow, season):
149149
url_found = all_tvshows[0][0]
150150
log(__name__, "Selecting tv show with highest fuzzy string score: %s (score: %s subtitles: %s)" % (
151151
all_tvshows[0][1], all_tvshows[0][4], all_tvshows[0][3]))
152-
153-
return url_found
152+
153+
return url_found
154154

155155

156156
def getallsubs(content, allowed_languages, filename="", search_string=""):
@@ -163,7 +163,7 @@ def getallsubs(content, allowed_languages, filename="", search_string=""):
163163
soup = soup.find('form', method="post").find('table', id="search_results").tbody
164164
blocks1 = soup.findAll('tr', class_="change even expandable")
165165
blocks2 = soup.findAll('tr', class_="change odd expandable")
166-
blocks = blocks1 + blocks2
166+
blocks = blocks1 + blocks2
167167
i = 0
168168
subtitles = []
169169
if len(blocks) == 0:
@@ -276,7 +276,7 @@ def getallsubs(content, allowed_languages, filename="", search_string=""):
276276
def prepare_search_string(s):
277277
#s = s.strip()
278278
s = re.sub(r'\(\d\d\d\d\)$', '', s) # remove year from title
279-
279+
280280
s = quote_plus(s)
281281
return s
282282

@@ -376,7 +376,7 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
376376
filmid = 0
377377
postparams = {'__EVENTTARGET': 's$lc$bcr$downloadLink', '__EVENTARGUMENT': '', '__VIEWSTATE': viewstate, '__PREVIOUSPAGE': previouspage, 'subtitleId': subtitleid, 'typeId': typeid, 'filmId': filmid}
378378
log(__name__, "%s Fetching subtitles using url '%s' with referer header '%s' and post parameters '%s'" % (debug_pretext, downloadlink, url, postparams))
379-
response = requests.get(downloadlink, verify=False, allow_redirects=True)
379+
response = requests.get(downloadlink, verify=False, allow_redirects=True)
380380
local_tmp_file = zip_subs
381381
try:
382382
log(__name__, "%s Saving subtitles to '%s'" % (debug_pretext, local_tmp_file))
@@ -410,4 +410,3 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
410410
subs_file = typeid
411411
log(__name__, "%s Subtitles saved to '%s'" % (debug_pretext, local_tmp_file))
412412
return packed, language, subs_file # standard output
413-

plugin/seekers/PrijevodiOnline/po_utilities.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def search_subtitles(self, name, tvshow, season, episode, lang, year):
6868
raw = re.findall('<td class="naziv"><a href="(.*?)" title="{0}">{0}</a></td>'.format(search_string), data)
6969
url = ''.join(raw)
7070
url = '{0}{1}'.format(api_url, url)
71-
71+
7272
if url != "":
7373
#IZVADI TOKEN
7474
response = requests.get(url)
@@ -78,8 +78,8 @@ def search_subtitles(self, name, tvshow, season, episode, lang, year):
7878
data = ' '.join(data.split())
7979
raw = re.findall("epizode.key = '(.*?)';", data)
8080
token = ''.join(raw)
81-
82-
#BROJI SVE SEZONE
81+
82+
#BROJI SVE SEZONE
8383
broj_sezona = re.findall('<h3 id="sezona-.*?">.*?</h3>', data)
8484
duzina = str(len(broj_sezona))
8585
#IZVADI TRAZENU SEZONU U BLOCK. ALI AKO JE ZADNJA SEZONA ONDA MORAS POSTAVITI REGEX DRUGACIJE
@@ -91,7 +91,7 @@ def search_subtitles(self, name, tvshow, season, episode, lang, year):
9191
if block == "":
9292
return subtitles_list
9393

94-
#IZVADI SVE LINKOVE
94+
#IZVADI SVE LINKOVE
9595
raw = re.findall('<li class="broj">' + episode + '.</li> <li class="naziv"> <a class="open" rel="(.*?)"', block)
9696
url = ''.join(raw)
9797
if url == '':
@@ -104,7 +104,7 @@ def search_subtitles(self, name, tvshow, season, episode, lang, year):
104104
data = response.content
105105
if type(data) is bytes:
106106
data = data.decode("utf-8", "ignore")
107-
107+
108108
if response.status_code == requests.codes.ok:
109109
raw = re.findall('<a href="(.*?)"', data)
110110
for (url) in raw:
@@ -118,7 +118,7 @@ def search_subtitles(self, name, tvshow, season, episode, lang, year):
118118
else:
119119
lang_name = ""
120120
flag_image = ""
121-
121+
122122
format = "srt"
123123

124124
subtitles_list.append({'filename': release,

plugin/seekers/PrijevodiOnline/service.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,4 +26,4 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
2626
response = requests.get(url) #SKINI GA
2727
open(zip_subs, 'wb').write(response.content)
2828

29-
return True, language_name, "" # standard output
29+
return True, language_name, "" # standard output

plugin/seekers/Subscenebest/service.py

+14-14
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@
3838
'Referer': 'https://sub-scene.com',
3939
'Connection': 'keep-alive',
4040
'Accept-Encoding': 'gzip, deflate'}
41-
42-
s = requests.Session()
41+
42+
s = requests.Session()
4343
main_url = "https://sub-scene.com"
4444
debug_pretext = ""
4545
ses = requests.Session()
@@ -74,7 +74,7 @@ def geturl(url):
7474
log(__name__, " Failed to get url:%s" % (url))
7575
content = None
7676
return (content)
77-
77+
7878

7979
def getSearchTitle(title, year=None): # new Add
8080
url = 'https://sub-scene.com/search?query=%s' % quote_plus(title)
@@ -109,9 +109,9 @@ def getSearchTitle(title, year=None): # new Add
109109
if "/subscene/" in href:
110110
print(("href", href))
111111
return href
112-
112+
113113
except:
114-
break
114+
break
115115
return 'https://sub-scene.com/search?query=' + quote_plus(title)
116116

117117

@@ -121,8 +121,8 @@ def find_movie(content, title, year):
121121
for matches in re.finditer(movie_season_pattern, content, re.IGNORECASE | re.DOTALL):
122122
print((tuple(matches.groups())))
123123
found_title = matches.group('title')
124-
found_title = html.unescape(found_title)
125-
print(("found_title", found_title))
124+
found_title = html.unescape(found_title)
125+
print(("found_title", found_title))
126126
log(__name__, "Found movie on search page: %s (%s)" % (found_title, matches.group('year')))
127127
if found_title.lower().find(title.lower()) > -1:
128128
if matches.group('year') == year:
@@ -142,22 +142,22 @@ def find_tv_show_season(content, tvshow, season):
142142
for matches in re.finditer(season_pattern, content, re.IGNORECASE | re.DOTALL):
143143
found_title = matches.group('title')
144144
#found_title = html.unescape(found_title)
145-
print(("found_title2", found_title))
145+
print(("found_title2", found_title))
146146
log(__name__, "Found tv show season on search page: %s" % found_title)
147147
url_found = matches.group('link')
148-
149-
return url_found
148+
149+
return url_found
150150

151151

152152
def getallsubs(content, allowed_languages, filename="", search_string=""):
153153
soup = BeautifulSoup(content.text, 'html.parser')
154154
block = soup.find('tbody')
155-
155+
156156
# Check if block is None (no movies found)
157157
if block is None:
158158
log(__name__, "No movies found in the content.")
159159
return []
160-
160+
161161
movies = block.find_all("tr")
162162
i = 0
163163
subtitles = []
@@ -301,7 +301,7 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
301301
log(__name__, "%s Downloadlink: %s " % (debug_pretext, downloadlink))
302302

303303
response = requests.get(downloadlink, headers=HDR, verify=False, allow_redirects=True)
304-
304+
305305
# Sanitize the filename to remove slashes
306306
sanitized_filename = re.sub(r'[\\/]', '_', zip_subs)
307307
local_tmp_file = os.path.join(tmp_sub_dir, sanitized_filename)
@@ -353,4 +353,4 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
353353
return packed, language, subs_file # standard output
354354
else:
355355
log(__name__, "%s No download link found" % (debug_pretext))
356-
raise SubtitlesDownloadError(SubtitlesErrors.UNKNOWN_ERROR, "No download link found")
356+
raise SubtitlesDownloadError(SubtitlesErrors.UNKNOWN_ERROR, "No download link found")

plugin/seekers/Subsource/service.py

+10-11
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,8 @@
5151
__download = __api + "downloadSub/"
5252
root_url = "https://subsource.net/subtitles/"
5353
main_url = "https://subsource.net"
54-
55-
s = requests.Session()
54+
55+
s = requests.Session()
5656
debug_pretext = ""
5757
ses = requests.Session()
5858
# Seasons as strings for searching </div>
@@ -87,7 +87,7 @@ def geturl(url):
8787
log(__name__, " Failed to get url:%s" % (url))
8888
content = None
8989
return (content)
90-
90+
9191

9292
def getSearchTitle(title, year=None): # new Add
9393
url = __api + "searchMovie"
@@ -107,7 +107,7 @@ def getSearchTitle(title, year=None): # new Add
107107
href = root_url + linkName
108108
print(("href", href))
109109
return linkName
110-
110+
111111
except:
112112
break
113113
return linkName
@@ -121,8 +121,8 @@ def find_movie(content, title, year):
121121
for matches in re.finditer(movie_season_pattern, content, re.IGNORECASE | re.DOTALL):
122122
print((tuple(matches.groups())))
123123
found_title = matches.group('title')
124-
found_title = html.unescape(found_title)
125-
print(("found_title", found_title))
124+
found_title = html.unescape(found_title)
125+
print(("found_title", found_title))
126126
log(__name__, "Found movie on search page: %s (%s)" % (found_title, matches.group('year')))
127127
if found_title.lower().find(title.lower()) > -1:
128128
if matches.group('year') == year:
@@ -142,7 +142,7 @@ def find_tv_show_season(content, tvshow, season):
142142
for matches in re.finditer(movie_season_pattern, content, re.IGNORECASE | re.DOTALL):
143143
found_title = matches.group('title')
144144
found_title = html.unescape(found_title)
145-
print(("found_title2", found_title))
145+
print(("found_title2", found_title))
146146
log(__name__, "Found tv show season on search page: %s" % found_title)
147147
s = difflib.SequenceMatcher(None, string.lower(found_title + ' ' + matches.group('year')), tvshow.lower())
148148
all_tvshows.append(matches.groups() + (s.ratio() * int(matches.group('numsubtitles')),))
@@ -162,8 +162,8 @@ def find_tv_show_season(content, tvshow, season):
162162
url_found = all_tvshows[0][0]
163163
log(__name__, "Selecting tv show with highest fuzzy string score: %s (score: %s subtitles: %s)" % (
164164
all_tvshows[0][1], all_tvshows[0][4], all_tvshows[0][3]))
165-
166-
return url_found
165+
166+
return url_found
167167

168168

169169
def getallsubs(content, allowed_languages, filename="", search_string=""):
@@ -339,7 +339,7 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
339339
#my_urlopener.addheader('Referer', url)
340340
log(__name__, "%s Fetching subtitles using url '%s' with referer header '%s' and post parameters '%s'" % (debug_pretext, downloadlink, main_url, postparams))
341341
#response = my_urlopener.open(downloadlink, postparams)
342-
response = requests.get(downloadlink, data=postparams, headers=HDRDL, verify=False, allow_redirects=True)
342+
response = requests.get(downloadlink, data=postparams, headers=HDRDL, verify=False, allow_redirects=True)
343343
local_tmp_file = zip_subs
344344
try:
345345
log(__name__, "%s Saving subtitles to '%s'" % (debug_pretext, local_tmp_file))
@@ -374,4 +374,3 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
374374
subs_file = typeid
375375
log(__name__, "%s Subtitles saved to '%s'" % (debug_pretext, local_tmp_file))
376376
return packed, language, subs_file # standard output
377-

0 commit comments

Comments
 (0)