mirror of
https://github.com/correl/Transmission-XBMC.git
synced 2024-11-21 19:18:41 +00:00
Merge pull request #33 from 4hr1m4n/master
Minor fixes and added 1337x, YTS, LimeTorrents and EZTV to the search.
This commit is contained in:
commit
10f319deb0
9 changed files with 250 additions and 12 deletions
|
@ -97,6 +97,22 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr ""
|
||||
|
|
|
@ -111,7 +111,23 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr ""
|
||||
|
||||
#empty strings from id 32205 to 32289
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr ""
|
||||
|
||||
#empty strings from id 32209 to 32289
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
|
|
|
@ -110,7 +110,23 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr "KickassTorrents durchsuchen"
|
||||
|
||||
# empty strings from id 205 to 289
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr "1337x durchsuchen"
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr "YTS durchsuchen"
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr "LimeTorrents durchsuchen"
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr "EZTV durchsuchen"
|
||||
|
||||
# empty strings from id 209 to 289
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr "Suche läuft..."
|
||||
|
|
|
@ -97,6 +97,22 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr ""
|
||||
|
|
|
@ -97,6 +97,22 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr ""
|
||||
|
|
|
@ -97,6 +97,22 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr "Искать на KickassTorrents"
|
||||
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr "Искать на 1337x"
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr "Искать на YTS"
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr "Искать на LimeTorrents"
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr "Искать на EZTV"
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr "Поиск..."
|
||||
|
|
|
@ -97,6 +97,22 @@ msgctxt "#32204"
|
|||
msgid "Search KickassTorrents"
|
||||
msgstr "Buscar en KickassTorrents"
|
||||
|
||||
msgctxt "#32205"
|
||||
msgid "Search 1337x"
|
||||
msgstr "Buscar en 1337x"
|
||||
|
||||
msgctxt "#32206"
|
||||
msgid "Search YTS"
|
||||
msgstr "Buscar en YTS"
|
||||
|
||||
msgctxt "#32207"
|
||||
msgid "Search LimeTorrents"
|
||||
msgstr "Buscar en LimeTorrents"
|
||||
|
||||
msgctxt "#32208"
|
||||
msgid "Search EZTV"
|
||||
msgstr "Buscar en EZTV"
|
||||
|
||||
msgctxt "#32290"
|
||||
msgid "Searching..."
|
||||
msgstr "Buscando..."
|
||||
|
|
|
@ -114,6 +114,10 @@ class TransmissionGUI(xbmcgui.WindowXMLDialog):
|
|||
(_(32202), search.TPB),
|
||||
(_(32203), search.Mininova),
|
||||
(_(32204), search.Kickass),
|
||||
(_(32205), search.L337x),
|
||||
(_(32206), search.YTS),
|
||||
(_(32207), search.Lime),
|
||||
(_(32208), search.EZTV),
|
||||
]
|
||||
selected = xbmcgui.Dialog().select(_(32000), [i[0] for i in engines])
|
||||
if selected < 0:
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import sys
|
||||
import re
|
||||
import socket
|
||||
from urllib2 import urlopen, URLError
|
||||
from urllib2 import urlopen, Request, URLError, HTTPError
|
||||
from urllib import quote, quote_plus, urlencode
|
||||
from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
|
||||
|
||||
socket.setdefaulttimeout(15)
|
||||
|
@ -16,7 +18,7 @@ class Mininova(Search):
|
|||
self.search_uri = 'http://www.mininova.org/rss/%s'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
url = self.search_uri % '+'.join(terms.split(' '))
|
||||
url = self.search_uri % quote_plus(terms)
|
||||
f = urlopen(url)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for item in soup.findAll('item'):
|
||||
|
@ -30,14 +32,17 @@ class Mininova(Search):
|
|||
return torrents
|
||||
class TPB(Search):
|
||||
def __init__(self):
|
||||
self.search_uris = ['http://thepiratebay.se/search/%s/',
|
||||
self.user_agent = 'Mozilla/5.0'
|
||||
self.search_uris = ['https://thepiratebay.se/search/%s/',
|
||||
'http://pirateproxy.net/search/%s/']
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
f = None
|
||||
for url in [u % '+'.join(terms.split(' ')) for u in self.search_uris]:
|
||||
for url in [u % quote(terms) for u in self.search_uris]:
|
||||
req = Request(url)
|
||||
req.add_header('User-Agent', self.user_agent)
|
||||
try:
|
||||
f = urlopen(url)
|
||||
f = urlopen(req)
|
||||
break
|
||||
except URLError:
|
||||
continue
|
||||
|
@ -63,18 +68,135 @@ class Kickass(Search):
|
|||
self.search_uri = 'http://kickass.to/usearch/%s/?field=seeders&sorder=desc&rss=1'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
url = self.search_uri % '+'.join(terms.split(' '))
|
||||
url = self.search_uri % quote_plus(terms)
|
||||
try:
|
||||
f = urlopen(url)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for item in soup.findAll('item'):
|
||||
torrents.append({
|
||||
'url': item.enclosure['url'],
|
||||
'name': item.title.text,
|
||||
'seeds': int(item.find('torrent:seeds').text),
|
||||
'leechers': int(item.find('torrent:peers').text),
|
||||
})
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
return torrents
|
||||
class L337x(Search):
|
||||
def __init__(self):
|
||||
self.uri_prefix = 'http://1337x.to'
|
||||
self.search_uri = self.uri_prefix + '/sort-search/%s/seeders/desc/1/'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
url = self.search_uri % quote_plus(terms)
|
||||
f = urlopen(url)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for details in soup.findAll('a', {'href': re.compile('^/torrent/')}):
|
||||
div = details.findNext('div')
|
||||
seeds = int(div.text)
|
||||
div = div.findNext('div')
|
||||
f_link = urlopen(self.uri_prefix + details['href'])
|
||||
soup_link = BeautifulStoneSoup(f_link.read())
|
||||
link = soup_link.find('a', {'href': re.compile('^magnet:')})
|
||||
if not link:
|
||||
continue
|
||||
torrents.append({
|
||||
'url': link['href'],
|
||||
'name': details.text,
|
||||
'seeds': seeds,
|
||||
'leechers': int(div.text),
|
||||
})
|
||||
return torrents
|
||||
class YTS(Search):
|
||||
def __init__(self):
|
||||
self.search_uri = 'http://yts.to/rss/%s/all/all/0'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
url = self.search_uri % quote(terms, '')
|
||||
f = urlopen(url)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for item in soup.findAll('item'):
|
||||
item_quality = item.link.text.rpartition('_')[2]
|
||||
item_f = urlopen(item.link.text)
|
||||
item_soup = BeautifulStoneSoup(item_f.read())
|
||||
qualities = [s.text.strip() for s in
|
||||
item_soup.findAll('span', {'class': re.compile('^tech-quality')})]
|
||||
q_index = qualities.index(item_quality)
|
||||
span = item_soup.findAll('span', {'title': 'Peers and Seeds'})[q_index]
|
||||
ps_pos = len(span.parent.contents) - 1
|
||||
ps = span.parent.contents[ps_pos].split('/')
|
||||
torrents.append({
|
||||
'url': item.enclosure['url'],
|
||||
'name': item.title.text,
|
||||
'seeds': int(item.find('torrent:seeds').text),
|
||||
'leechers': int(item.find('torrent:peers').text),
|
||||
'seeds': int(ps[1]),
|
||||
'leechers': int(ps[0])
|
||||
})
|
||||
return torrents
|
||||
class Lime(Search):
|
||||
def __init__(self):
|
||||
self.search_uri = 'https://www.limetorrents.cc/searchrss/%s/'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
url = self.search_uri % quote(terms)
|
||||
f = urlopen(url)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for item in soup.findAll('item'):
|
||||
(seeds, leechers) = re.findall('Seeds: (\d+) , Leechers (\d+)', item.description.text)[0]
|
||||
torrents.append({
|
||||
'url': item.enclosure['url'],
|
||||
'name': item.title.text,
|
||||
'seeds': int(seeds),
|
||||
'leechers': int(leechers)
|
||||
})
|
||||
return torrents
|
||||
class EZTV(Search):
|
||||
def __init__(self):
|
||||
self.user_agent = 'Mozilla/5.0'
|
||||
self.uri_prefix = 'https://eztv.ch'
|
||||
self.search_uri = self.uri_prefix + '/search/'
|
||||
def search(self, terms):
|
||||
torrents = []
|
||||
data = {'SearchString': '', 'SearchString1': terms, 'search': 'Search'}
|
||||
req = Request(self.search_uri, urlencode(data))
|
||||
req.add_header('User-Agent', self.user_agent)
|
||||
f = urlopen(req)
|
||||
soup = BeautifulStoneSoup(f.read())
|
||||
for (c, item) in enumerate(soup.findAll('a', {'class': 'magnet'})):
|
||||
if c == 30: break
|
||||
info = item.findPrevious('a')
|
||||
link = self.uri_prefix + info['href']
|
||||
item_req = Request(link)
|
||||
item_req.add_header('User-Agent', self.user_agent)
|
||||
item_f = urlopen(item_req)
|
||||
item_soup = BeautifulStoneSoup(item_f.read())
|
||||
sp = item_soup.findAll('span', {'class': re.compile('^stat_')})
|
||||
if sp:
|
||||
sp = [int(i.text.replace(',', '')) for i in sp]
|
||||
else:
|
||||
sp = [0, 0]
|
||||
torrents.append({
|
||||
'url': item['href'],
|
||||
'name': info.text,
|
||||
'seeds': sp[0],
|
||||
'leechers': sp[1]
|
||||
})
|
||||
return torrents
|
||||
|
||||
if __name__ == '__main__':
|
||||
s = TPB()
|
||||
results = s.search('zettai')
|
||||
sites = [Mininova(), TPB(), Kickass(), L337x(), YTS(), Lime(), EZTV()]
|
||||
terms = 'transmission'
|
||||
if len(sys.argv) > 1:
|
||||
terms = sys.argv[1]
|
||||
print 'Searching for "' + terms + '"'
|
||||
for site in sites:
|
||||
print site.__class__.__name__.center(79, '=')
|
||||
torrents = site.search(terms)
|
||||
print 'Total found = ' + str(len(torrents))
|
||||
for counter, file in enumerate(torrents):
|
||||
print '[{:3},{:3}] {:33} "{:33}"'.format(file['seeds'], file['leechers'],
|
||||
file['name'].encode('ascii', 'replace')[:33],
|
||||
file['url'][:33])
|
||||
if counter == 9: break
|
||||
|
|
Loading…
Reference in a new issue