Class: MechanizeManager

Inherits:
Object
  • Object
show all
Defined in:
lib/manager/mechanize_manager.rb

Overview

Manager responsible for scraping webpagem

Defined Under Namespace

Modules: Agent

Instance Method Summary collapse

Constructor Details

#initializeMechanizeManager

Returns a new instance of MechanizeManager.



12
13
# File 'lib/manager/mechanize_manager.rb', line 12

def initialize
end

Instance Method Details

#extra_torrent_cloudflare_ddos_breaker(url) ⇒ Object



83
84
85
86
87
88
89
90
91
92
93
94
95
96
# File 'lib/manager/mechanize_manager.rb', line 83

def extra_torrent_cloudflare_ddos_breaker(url)
  response = Typhoeus::Request.get(url, :cookiefile => '.typhoeus_cookies', :cookiejar => '.typhoeus_cookies')
  body = response.response_body
  challenge = body.match(%r{name="jschl_vc"\s*value="([a-zA-Z0-9]+)"/\>}).captures[0]
  math = body.match(/a\.value\s*=\s*(.+?\d?);/).captures[0]
  domain = url.split('/')[2]
  answer = eval(math) + domain.length
  asd = '/cdn-cgi/l/chk_jschl?jschl_vc='
  answer_url = domain + asd + "#{challenge}&jschl_answer=#{answer}"
  puts answer_url
  html = Typhoeus.get(answer_url, :followlocation => true)
  body = html.response_body
  puts 'body' + body
end

#search_extratorrent(search_term) ⇒ Object



68
69
70
71
72
73
74
75
76
77
78
79
80
81
# File 'lib/manager/mechanize_manager.rb', line 68

def search_extratorrent(search_term)
  white_space = '+'
  baseurl = ExtratorrentParser::Parser::BASEURL
  extratorrent_url = baseurl + '/search/?search='
  url = extratorrent_url + search_term.gsub(' ', white_space)

  # result = extra_torrent_cloudflare_ddos_breaker(url)
  begin
    page = Agent.get_web_page(url)
    ExtratorrentParser.new(page).main_divs
  rescue
    []
  end
end

#search_isohunt(search_term) ⇒ Object



54
55
56
57
58
59
60
61
62
63
64
65
66
# File 'lib/manager/mechanize_manager.rb', line 54

def search_isohunt(search_term)
  white_space = '%20'
  baseurl = IsohuntParser::Parser::BASEURL
  isohunt_url = baseurl + '/torrents/?ihq='
  url = isohunt_url + search_term.gsub(' ', white_space)

  begin
    page = Agent.get_web_page(url)
    IsohuntParser.new(page).main_divs
  rescue
    []
  end
end

#search_kickass(search_term) ⇒ Object



26
27
28
29
30
31
32
33
34
35
36
37
38
# File 'lib/manager/mechanize_manager.rb', line 26

def search_kickass(search_term)
  white_space = '%20'
  baseurl = KickassParser::Parser::BASEURL
  kickass_url = baseurl + '/usearch/'
  url = kickass_url + search_term.gsub(' ', white_space)

  begin
    page = Agent.get_web_page(url)
    KickassParser.new(page).main_divs
  rescue
    []
  end
end

#search_piratebay(search_term) ⇒ Object



40
41
42
43
44
45
46
47
48
49
50
51
52
# File 'lib/manager/mechanize_manager.rb', line 40

def search_piratebay(search_term)
  white_space = '%20'
  baseurl = PirateBayParser::Parser::BASEURL
  pirate_url = baseurl + '/search/'
  url = pirate_url + search_term.gsub(' ', white_space)

  begin
    page = Agent.get_web_page(url)
    PirateBayParser.new(page).main_divs
  rescue
    []
  end
end