3
3
from __future__ import print_function
4
4
5
5
6
- from six .moves .urllib .request import FancyURLopener
7
- from six .moves .urllib .parse import quote_plus , urlencode
8
- import requests
6
+ import os
7
+ import os .path
9
8
import re
9
+ import requests
10
10
import warnings
11
11
from requests .packages .urllib3 .exceptions import InsecureRequestWarning
12
12
warnings .simplefilter ('ignore' , InsecureRequestWarning )
13
- import os
14
- import os .path
15
- from six .moves .urllib .request import Request , urlopen
16
13
from .SubdlUtilities import get_language_info
17
- from ..utilities import languageTranslate , log , getFileSize
14
+ from ..utilities import log
18
15
19
- import re
20
16
from ..seeker import SubtitlesDownloadError , SubtitlesErrors
21
17
22
18
HDR = {'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0' ,
32
28
s = requests .Session ()
33
29
34
30
35
- main_url = "https://www. subdl.com"
31
+ main_url = "https://subdl.com"
36
32
debug_pretext = "subdl.com"
37
33
38
34
43
39
'Ukranian' : 'Ukrainian' ,
44
40
'Farsi/Persian' : 'Persian'
45
41
}
42
+ headers = {'User-agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0' }
46
43
47
44
48
45
def get_url (url , referer = None ):
49
46
if referer is None :
50
47
headers = {'User-agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0' }
51
48
else :
52
49
headers = {'User-agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0' , 'Referer' : referer }
53
- req = Request (url , None , headers )
54
- response = urlopen (req )
55
- content = response .read ().decode ('utf-8' )
56
- response .close ()
57
- content = content .replace ('\n ' , '' )
50
+ content = requests .get (url , None , headers ).text
58
51
return content
59
52
60
53
61
54
def get_url2 (url , referer = None ):
62
- # from io import BytesIO
63
- # from zipfile import ZipFile
55
+ from urllib .request import urlopen , Request
64
56
req = Request (url )
65
57
response = urlopen (req )
66
58
content = response .read ().decode ('utf-8' )
@@ -87,7 +79,7 @@ def search_subtitles(file_original_path, title, tvshow, year, season, episode, s
87
79
elif len (tvshow ) > 0 : # TVShow
88
80
searchstring = "%s S%#02dE%#02d" % (tvshow , int (season ), int (episode ))
89
81
else :
90
- searchstring = title
82
+ searchstring = title . replace ( ' ' , '%20' ). lower ()
91
83
log (__name__ , "%s Search string = %s" % (debug_pretext , searchstring ))
92
84
get_subtitles_list (searchstring , title , language_info2 , language_info1 , subtitles_list )
93
85
return subtitles_list , "" , msg # standard output
@@ -99,23 +91,23 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
99
91
id = subtitles_list [pos ]["id" ]
100
92
url = 'https://dl.subdl.com/subtitle/%s' % (id )
101
93
downloadlink = 'https://dl.subdl.com/subtitle/%s' % (id )
102
- # print(downloadlink)
94
+ print (downloadlink )
103
95
if downloadlink :
104
96
log (__name__ , "%s Downloadlink: %s " % (debug_pretext , downloadlink ))
105
97
viewstate = 0
106
98
previouspage = 0
107
99
subtitleid = 0
108
100
typeid = "zip"
109
101
filmid = 0
110
- # postparams = { '__EVENTTARGET': 's$lc$bcr$downloadLink', '__EVENTARGUMENT': '' , '__VIEWSTATE': viewstate, '__PREVIOUSPAGE': previouspage, 'subtitleId': subtitleid, 'typeId': typeid, 'filmId': filmid}
111
- postparams = urlencode ({'__EVENTTARGET' : 's$lc$bcr$downloadLink' , '__EVENTARGUMENT' : '' , '__VIEWSTATE' : viewstate , '__PREVIOUSPAGE' : previouspage , 'subtitleId' : subtitleid , 'typeId' : typeid , 'filmId' : filmid })
102
+ postparams = {'__EVENTTARGET' : 's$lc$bcr$downloadLink' , '__EVENTARGUMENT' : '' , '__VIEWSTATE' : viewstate , '__PREVIOUSPAGE' : previouspage , 'subtitleId' : subtitleid , 'typeId' : typeid , 'filmId' : filmid }
103
+ # postparams = urllib3.request. urlencode({ '__EVENTTARGET': 's$lc$bcr$downloadLink', '__EVENTARGUMENT': '' , '__VIEWSTATE': viewstate, '__PREVIOUSPAGE': previouspage, 'subtitleId': subtitleid, 'typeId': typeid, 'filmId': filmid})
112
104
#class MyOpener(urllib.FancyURLopener):
113
105
#version = 'User-Agent=Mozilla/5.0 (Windows NT 6.1; rv:109.0) Gecko/20100101 Firefox/115.0'
114
106
#my_urlopener = MyOpener()
115
107
#my_urlopener.addheader('Referer', url)
116
108
log (__name__ , "%s Fetching subtitles using url '%s' with referer header '%s' and post parameters '%s'" % (debug_pretext , downloadlink , url , postparams ))
117
109
#response = my_urlopener.open(downloadlink, postparams)
118
- response = s .get (downloadlink , data = postparams , headers = HDR , verify = False , allow_redirects = True )
110
+ response = requests .get (downloadlink , data = postparams , headers = HDR , verify = False , allow_redirects = True )
119
111
#print(response.content)
120
112
local_tmp_file = zip_subs
121
113
try :
@@ -155,33 +147,36 @@ def download_subtitles(subtitles_list, pos, zip_subs, tmp_sub_dir, sub_folder, s
155
147
156
148
def get_subtitles_list (searchstring , title , languageshort , languagelong , subtitles_list ):
157
149
s = languagelong .lower ()
158
- url = '%s/search?query=%s' % (main_url , quote_plus (searchstring ))
150
+ url = '%s/search/%s' % (main_url , searchstring )
151
+ print (("url" , url ))
159
152
160
153
try :
161
154
log (__name__ , "%s Getting url: %s" % (debug_pretext , url ))
162
- content = get_url (url , referer = main_url )
163
- subtitles = re .compile ('(href="/subtitle/.+?<span)' ).findall (content )
155
+ content = requests .get (url , headers ).text
156
+ #print(("content", content))
157
+ subtitles = re .compile ('(href="/subtitle/.*?"><div)' ).findall (content )
158
+ #print(subtitles)
164
159
subtitles = " " .join (subtitles )
165
- regx = '<a.+ href="(.+ ?)">' + title + r'\s?< '
160
+ regx = 'href="(.* ?)"><div '
166
161
downloadlink = re .findall (regx , subtitles , re .M | re .I )[0 ]
167
162
#print(downloadlink)
168
163
link = '%s%s/%s' % (main_url , downloadlink , s )
169
- content = get_url (link , referer = main_url )
170
- print (content )
164
+ content = requests . get (link , headers ). text
165
+ # print((" content", content) )
171
166
subtitles = re .compile ('(language":"' + s + '".+?},)' ).findall (content )
172
- #print(subtitles)
167
+ #print((" subtitles", subtitles) )
173
168
except :
174
169
log (__name__ , "%s Failed to get subtitles" % (debug_pretext ))
175
170
return
176
171
for subtitle in subtitles :
177
172
try :
178
173
filename = re .compile ('"title":"(.+?)"' ).findall (subtitle )[0 ]
179
174
filename = filename .strip ()
180
- # print(filename)
175
+ print (filename )
181
176
182
177
try :
183
178
id = re .compile ('"link":"(.+?)"' ).findall (subtitle )[0 ]
184
- # print(id)
179
+ print (id )
185
180
except :
186
181
pass
187
182
0 commit comments