2011-09-13 04:26:50 +00:00
|
|
|
import re
|
2011-09-13 05:27:04 +00:00
|
|
|
import socket
|
2011-09-13 04:26:50 +00:00
|
|
|
from urllib2 import urlopen
|
|
|
|
from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
|
|
|
|
|
2011-09-13 05:27:04 +00:00
|
|
|
socket.setdefaulttimeout(15)
|
|
|
|
|
2011-09-13 04:26:50 +00:00
|
|
|
class Search:
|
|
|
|
def __init__(self):
|
|
|
|
return NotImplemented
|
|
|
|
def search(terms):
|
|
|
|
return NotImplemented
|
|
|
|
|
2011-09-13 12:32:14 +00:00
|
|
|
class Mininova(Search):
|
|
|
|
def __init__(self):
|
|
|
|
self.search_uri = 'http://www.mininova.org/rss/%s'
|
|
|
|
def search(self, terms):
|
|
|
|
torrents = []
|
|
|
|
url = self.search_uri % '+'.join(terms.split(' '))
|
|
|
|
f = urlopen(url)
|
|
|
|
soup = BeautifulStoneSoup(f.read())
|
|
|
|
for item in soup.findAll('item'):
|
|
|
|
(seeds, leechers) = re.findall('Ratio: (\d+) seeds, (\d+) leechers', item.description.text)[0]
|
|
|
|
torrents.append({
|
|
|
|
'url': item.enclosure['url'],
|
|
|
|
'name': item.title.text,
|
|
|
|
'seeds': int(seeds),
|
2011-09-13 04:26:50 +00:00
|
|
|
'leechers': int(leechers),
|
|
|
|
})
|
|
|
|
return torrents
|
|
|
|
class TPB(Search):
|
|
|
|
def __init__(self):
|
2012-03-13 23:50:30 +00:00
|
|
|
self.search_uri = 'http://thepiratebay.se/search/%s/'
|
2011-09-13 04:26:50 +00:00
|
|
|
def search(self, terms):
|
|
|
|
torrents = []
|
2011-09-13 05:27:04 +00:00
|
|
|
url = self.search_uri % '+'.join(terms.split(' '))
|
2011-09-13 04:26:50 +00:00
|
|
|
f = urlopen(url)
|
|
|
|
soup = BeautifulSoup(f.read())
|
|
|
|
for details in soup.findAll('a', {'class': 'detLink'}):
|
|
|
|
name = details.text
|
2012-03-13 23:50:30 +00:00
|
|
|
url = details.findNext('a', {'href': re.compile('^magnet:')})['href']
|
2011-09-13 04:26:50 +00:00
|
|
|
td = details.findNext('td')
|
|
|
|
seeds = int(td.text)
|
|
|
|
td = td.findNext('td')
|
|
|
|
leechers = int(td.text)
|
|
|
|
torrents.append({
|
|
|
|
'url': url,
|
|
|
|
'name': name,
|
|
|
|
'seeds': seeds,
|
|
|
|
'leechers': leechers,
|
|
|
|
})
|
|
|
|
return torrents
|
2014-06-04 03:33:24 +00:00
|
|
|
class Kickass(Search):
|
|
|
|
def __init__(self):
|
|
|
|
self.search_uri = 'http://kickass.to/usearch/%s/?field=seeders&sorder=desc&rss=1'
|
|
|
|
def search(self, terms):
|
|
|
|
torrents = []
|
|
|
|
url = self.search_uri % '+'.join(terms.split(' '))
|
|
|
|
f = urlopen(url)
|
|
|
|
soup = BeautifulStoneSoup(f.read())
|
|
|
|
for item in soup.findAll('item'):
|
|
|
|
torrents.append({
|
|
|
|
'url': item.enclosure['url'],
|
|
|
|
'name': item.title.text,
|
|
|
|
'seeds': int(item.find('torrent:seeds').text),
|
|
|
|
'leechers': int(item.find('torrent:peers').text),
|
|
|
|
})
|
|
|
|
return torrents
|
2011-09-13 04:26:50 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
s = TPB()
|
|
|
|
results = s.search('zettai')
|