Skip to content

Commit 55092d2

Browse files
authored
Merge pull request CloudBotIRC#120 from linuxdaemon/gonzobot+remove-old-optout-system
Remove old optout lists in favor of the optout plugin
2 parents 27382f1 + a30cc4c commit 55092d2

4 files changed

Lines changed: 6 additions & 22 deletions

File tree

plugins/cheer.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,5 @@ def cheer(chan, message):
2222
"""
2323
:type chan: str
2424
"""
25-
if chan not in ["#yogscast"]:
26-
shit = random.choice(cheers)
27-
message(shit, chan)
25+
shit = random.choice(cheers)
26+
message(shit, chan)

plugins/dogpile.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,9 @@
99

1010
HEADERS = {'User-Agent': 'Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19'}
1111

12-
opt_out = []
13-
1412
@hook.command("dpis", "gis")
15-
def dogpileimage(text, chan):
13+
def dogpileimage(text):
1614
"""Uses the dogpile search engine to search for images."""
17-
if chan in opt_out:
18-
return
1915
image_url = search_url + "/images"
2016
params = {'q': " ".join(text.split())}
2117
r = requests.get(image_url, params=params, headers=HEADERS)
@@ -29,10 +25,8 @@ def dogpileimage(text, chan):
2925
return image
3026

3127
@hook.command("dp", "g", "dogpile")
32-
def dogpile(text, chan):
28+
def dogpile(text):
3329
"""Uses the dogpile search engine to find shit on the web."""
34-
if chan in opt_out:
35-
return
3630
web_url = search_url + "/web"
3731
params = {'q':" ".join(text.split())}
3832
r = requests.get(web_url, params=params, headers=HEADERS)

plugins/herald.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from cloudbot import hook
88
from cloudbot.util import database
99

10-
opt_out = []
1110
delay = 10
1211
floodcheck = {}
1312

@@ -80,9 +79,6 @@ def welcome(nick, message, db, bot, chan):
8079

8180
grab = bot.plugin_manager.find_plugin("grab")
8281

83-
if chan in opt_out:
84-
return
85-
8682
if chan in floodcheck:
8783
if time.time() - floodcheck[chan] <= delay:
8884
return

plugins/link_announcer.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
# This will match any URL, blacklist removed and abstracted to a priority/halting system
1111
url_re = re.compile(r'https?://(?:[a-zA-Z]|[0-9]|[$-_@.&+~]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', re.I)
1212

13-
opt_out = []
14-
1513
HEADERS = {
1614
'Accept-Language': 'en-US,en;q=0.5',
1715
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36'
@@ -21,10 +19,7 @@
2119

2220

2321
@hook.regex(url_re, priority=Priority.LOW, action=Action.HALTTYPE, only_no_match=True)
24-
def print_url_title(message, match, chan):
25-
if chan in opt_out:
26-
return
27-
22+
def print_url_title(message, match):
2823
with closing(requests.get(match.group(), headers=HEADERS, stream=True, timeout=3)) as r:
2924
r.raise_for_status()
3025
if not r.encoding:
@@ -39,4 +34,4 @@ def print_url_title(message, match, chan):
3934
html = BeautifulSoup(content, "lxml", from_encoding=encoding)
4035
title = " ".join(html.title.text.strip().splitlines())
4136
out = "Title: \x02{}\x02".format(title)
42-
message(out, chan)
37+
message(out)

0 commit comments

Comments
 (0)