Class: Wiki2Go::PublicWikiConfig

Inherits:
Config
  • Object
show all
Defined in:
lib/Wiki2Go/PublicWikiConfig.rb

Overview

Base class for public wiki

Direct Known Subclasses

LocalConfig

Instance Attribute Summary

Attributes inherited from Config

#allow_dynamic_pages, #amazon_affiliate, #blacklist_when_no_checksum, #blog_style, #checksum_required, #checksum_salt, #debug, #default_page, #default_web, #delete_spam, #editor, #generate_html, #group, #maximum_urls, #multi_wiki, #pages_in_recent_changes, #pages_in_rss, #port, #root_directory, #server, #site_admin, #site_directory, #subsite, #user

Instance Method Summary collapse

Methods inherited from Config

#accept_edit?, #add_processor, #banned_urls, #banned_users, #chonqed_urls, #close, #commit_to_repository, #default_wiki, #enable_dot_graphics, #enable_syntax_highlighting, #errorlog, #greylist, #instant_commit_to_repository, #log, #logfile, #logger, #preprocess, #redirect_to_html?, #save, #static_web, #storage, #update_from_repository, #use_repository

Constructor Details

#initialize(directory) ⇒ PublicWikiConfig

Initialize with root directory of wiki By default, generates HTML and filters SPAM


14
15
16
17
18
19
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 14

def initialize(directory)
  super(directory)
  @generate_html = true 
  @delete_spam   = true
  @spamfilter = Wiki2Go::SpamFilter.new(self)
end

Instance Method Details

#accept_page?(web, content) ⇒ Boolean

Accept a page save if

* The edit is by an authenticated user
* OR the user is not on the blacklist and none of the URLs on the page are on the blacklist and no more than 5 urls added
* AND no hidden style in tags    
* AND page not erased
* AND Akismet (if configured) accepts the page

Returns:

  • (Boolean)

45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 45

def accept_page?(web,content)    

  return true if web.secure?
  
  author   = web.user
  pagename = web.name.length > 0 ? "#{web.name}/#{web.current_page}" : web.current_page 
  
  if @spamfilter.hidden_text_in(content) then 
    blacklist_user(author)
    
    log(content)
    errorlog("User used hidden style in tags of '#{pagename}': Blacklisted user #{author}")
    
    tarpit
    return false
  elsif @spamfilter.cleared_page?(content) then
    @spamfilter.greylist_urls(author,[])
    log(content)
    errorlog("User erased page '#{pagename}': Greylisted user #{author}")
    
    tarpit
    return false
  elsif @spamfilter.empty_urls_in(content) then
    blacklist_user(author)
    
    log(content)
    errorlog("User used empty URL hrefs in '#{pagename}': Blacklisted user #{author}")
    
    tarpit
    return false
  end
  
  current_page = storage.load_page(web.name,web.current_page)
  urls = @spamfilter.added_urls(current_page.content,content)
     
  if @spamfilter.edit_by_banned_user?(author) then 
    @spamfilter.blacklist_urls(urls)
    
    log("User #{author} is blacklisted while editing '#{pagename}'.  Blacklisting #{urls.join(', ')}")
    
    # tarpit
    return false
  elsif urls.length > @maximum_urls then 
    blacklist_user(author)
    @spamfilter.blacklist_urls(urls)
    
    errorlog("User added too many URLS to '#{pagename}': #{urls.length}. Blacklisted user #{author} and #{urls.join(', ')}")
    
    tarpit
    return false
  elsif urls.length >0 && @spamfilter.edit_contains_banned_url?(urls) then
  
    blacklist_user(author)
    @spamfilter.blacklist_urls(urls)
    
    errorlog("Edit by user #{author} of page '#{pagename}' contains blacklisted url. Blacklisting #{urls.join(', ')}")
    
    tarpit
    return false
  elsif @spamfilter.stopped_by_spam_engine(web.url,content,author,web.alias) then
    blacklist_user(author)
    @spamfilter.blacklist_urls(urls)
    
    errorlog("Edit by user #{author} of page '#{pagename}' caught by Akismet. Blacklisting #{urls.join(', ')}")
    errorlog(content)
    
    tarpit
    return false
    
  else
    if urls.length > 0 then
      @spamfilter.greylist_urls(author,urls)
      errorlog("Greylisted user #{author} while editing '#{pagename}' because of the following urls: #{urls.join(', ')}")
    end
    return true
  end
end

#accept_user?(web) ⇒ Boolean

Accept a call from the user if they are not blacklisted

Returns:

  • (Boolean)

22
23
24
25
26
27
28
29
30
31
32
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 22

def accept_user?(web)
  return true if web.secure?
  
  author   = web.user
  pagename = web.name.length > 0 ? "#{web.name}/#{web.current_page}" : web.current_page 
  if @spamfilter.edit_by_banned_user?(author) then
    log("User #{author} is blacklisted while editing '#{pagename}'.")
    return false
  end
  return true
end

#blacklist_user(spammer) ⇒ Object

Add the spammer IP address to the blacklist


35
36
37
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 35

def blacklist_user(spammer)
  @spamfilter.blacklist_user(spammer)
end

#editable?(web) ⇒ Boolean

A public wiki is always editable

Returns:

  • (Boolean)

129
130
131
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 129

def editable?(web)
  true
end

#redirect_url?(web, url) ⇒ Boolean

Redirect if the url is on the greylist, unless the user is authenticated

Returns:

  • (Boolean)

134
135
136
137
138
139
140
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 134

def redirect_url?(web,url)
  return false if web.secure?
  
  redirect = @spamfilter.greylisted_url?(url)
  log("Redirect #{url}") if redirect
  return redirect
end

#tarpitObject

What to do when we encounter a spammer? Delay him for 60 seconds


124
125
126
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 124

def tarpit
  sleep(60)
end

#use_akismet(api_key, blog) ⇒ Object


142
143
144
# File 'lib/Wiki2Go/PublicWikiConfig.rb', line 142

def use_akismet(api_key,blog)
  @spamfilter.use_akismet(api_key,blog)
end