User:RonBot/9/Source1
Appearance
from wikitools import *
import time
import datetime
import urllib
import json
import userpassbot #Bot password
import warnings
import re
import mwparserfromhell
import datetime
import sys
import Cconfig
site = wiki.Wiki() #Tell Python to use the English Wikipedia's API
site.login(userpassbot.username, userpassbot.password) #login
#routine to autoswitch some of the output - as filenames have accented chars!
def pnt(s):
try:
print(s)
except UnicodeEncodeError:
print(s.encode('utf-8'))
def startAllowed():
textpage = page.Page(site, "User:RonBot/9/Run").getWikiText()
if textpage == "Run":
return "run"
else:
return "no"
def allow_bots(text, user):
user = user.lower().strip()
text = mwparserfromhell.parse(text)
for tl in text.filter_templates():
if tl.name.matches(['bots', 'nobots']):
break
else:
return True
print "template found" #Have we found one
for param in tl.params:
bots = [x.lower().strip() for x in param.value.split(",")]
if param.name == 'allow':
print "We have an ALLOW" # allow found
if ''.join(bots) == 'none': return False
for bot in bots:
if bot in (user, 'all'):
return True
elif param.name == 'deny':
print "We have a DENY" # deny found
if ''.join(bots) == 'none':
print "none - true"
return True
for bot in bots:
if bot in (user, 'all'):
pnt(bot)
pnt(user)
print "all - false"
return False
if (tl.name.matches('nobots') and len(tl.params) == 0):
print "match - false"
return False
return True
def SearchReplace(search, size):
params = {'action':'query',
'list':'search',
'srsearch':search,
'srnamespace':2|3,
'srlimit':size
}
print search
#print "SR.params"
request = api.APIRequest(site, params) #Set the API request
#print "SR.request"
result = request.query(False)
print result
totalhits=result['query']['searchinfo']['totalhits']
print "search", search
print "TotalHits this search", totalhits
if size=="max":
size=totalhits
if totalhits>0:
for loopvar in range(0, size):
#print result
#print ""
pagetitle = result['query']['search'][loopvar]['title']
#pagetitle="User:Ronhjones/Sandbox2" # TEST PAGE
pagetitletext = pagetitle.encode('utf-8')
pnt(pagetitletext)
pagepage = page.Page(site, pagetitle)
print "pagepage"
pagetext = pagepage.getWikiText()
#Stop if there's nobots
stop = allow_bots(pagetext, "RonBot")
if not stop:
continue
print "Bot allowed to edit page"
#if Cconfig.tagged>=1:
# sys.exit('done for now') #Code to limit number of items
print "============================================TOP OF ORIG======================"
pnt(pagetext)
print "============================================BOTTOM OF ORIG======================"
pagetext=re.sub(r'(\<span style="font-size: 85%;"\>)\<center\>([\s\S]*?\<\/span\>)\<\/center\>',r'\1\2',pagetext)
print "++++++++++++++++++++++++++++++++++++++++++++TOP OF NEW+++++++++++++++++++++++++"
pnt(pagetext)
print "++++++++++++++++++++++++++++++++++++++++++++BOTTOM OF NEW++++++++++++++++++++"
print "End of Page"
try:
pagepage.edit(text=pagetext, minor=True, bot=True, summary="(Task 9 userpace trial - removing badly placed center tags") #(DO NOT UNCOMMENT UNTIL BOT IS APPROVED)
Cconfig.tagged += 1
print "writing changed page"
except:
print"Failed to write"
print "Pages done so far", Cconfig.tagged
print ""
else:
print "no pages to do"
print ""
return
def main():
go = startAllowed() #Check if task is enabled
if go == "no":
sys.exit('Disabled Task')
#parameters for API request
Cconfig.tagged=0
search='insource: "<center>" insource: "The Bugle" insource: "</span></center>"'
SearchReplace(search,"max")
if __name__ == "__main__":
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
main()