Jump to content

Wikipedia:Database reports/Recently created unreferenced biographies of living people/Configuration

From Wikipedia, the free encyclopedia

biobot.py

[edit]
#!/usr/bin/env python2.5

# Copyright 2009 bjweeks, MZMcBride

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

import datetime
import MySQLdb
import re
import urlparse
import wikitools
import settings

report_title = settings.rootpage + 'Recently-created unreferenced biographies of living people'

report_template = u'''

== %s ==
{| class="wikitable" style="width:100%%; margin:auto;"
|- style="white-space:nowrap;"
! style="width:10%%;" | No.
! Biography
|-
%s
|}

''Data as of %s.''
'''

wiki = wikitools.Wiki(settings.apiurl)
wiki.login(settings.username, settings.password)

nag_users = True

htime = re.sub(r'[ ]+0', ' ', datetime.datetime.strftime(datetime.datetime.utcnow() - datetime.timedelta(days=1), '%B %d'))
qtime = datetime.datetime.strftime(datetime.datetime.utcnow() - datetime.timedelta(days=1), '%Y%m%d')

def pageCreator(page_title):
    params = {
        'action': 'query',
        'prop': 'revisions',
        'titles': '%s' % page_title,
        'rvprop': 'user',
        'rvdir': 'newer',
        'format': 'json',
        'rvlimit': '1'
    }
    request = wikitools.APIRequest(wiki, params)
    response = request.query(querycontinue=False)
    query = response['query']['pages'].values()[0]
    user = query['revisions'][0]['user']
    return user

conn = MySQLdb.connect(host=settings.host, db=settings.dbname, read_default_file='~/.my.cnf')
cursor = conn.cursor()
cursor.execute('''
/* biobot.py SLOW_OK */
SELECT
  page_title,
  el_index
FROM recentchanges
JOIN page
ON page_id = rc_cur_id
JOIN categorylinks
ON cl_from = rc_cur_id
LEFT JOIN externallinks
ON el_from = rc_cur_id
WHERE rc_namespace = 0
AND page_is_redirect = 0
AND cl_to = "Living_people"
AND rc_new = 1
AND (SELECT
       MIN(rev_timestamp)
     FROM revision
     WHERE rev_page = rc_cur_id) LIKE "%s%%";
''' % qtime)

page_dict = {}
output_list = []
for row in cursor.fetchall():
    title = row[0]
    if not row[1]:
        output_list.append(title)
        continue
    url = urlparse.urlparse(row[1])
    domain = url.netloc[:-1]
    
    if title in page_dict:
        page_dict[title].append(domain)
    else:
        page_dict[title] = [domain]

for title, domain_list in page_dict.iteritems():
    for domain in domain_list:
        if domain != 'org.wikipedia.en':
            break
    else:
        output_list.append(title)

i = 1
output = []
for item in output_list:
    page_title = u'%s' % unicode(item, 'utf-8')
    page = wikitools.Page(wiki, page_title, followRedir=False)
    if not re.search(r'(==.*(further reading(s)?|bibliography|reference(s)?|external link(s)?).*==|<ref|http://)', page.getWikiText(), re.I|re.U):
        table_row = u'''! %d
| [[%s]]
|-''' % (i, page_title)
        output.append(table_row)
        i += 1
        ftitle = re.sub('_', ' ', page_title)
        if nag_users:
            user = pageCreator(page_title)
            user_page = wikitools.Page(wiki, 'User talk:%s' % user, followRedir=True)
            if user_page.exists:
                if not re.search(r'== \[\[(%s)\]\] ==' % re.escape(ftitle), user_page.getWikiText(), re.I|re.U):
                    user_page.edit(text='{{subst:Unreferenced BLP warning|1=%s}} --~~~~' % ftitle, summary='[[%s]]' % ftitle, section='new', bot=1, skipmd5=True)
            else:
                user_page.edit(text='{{subst:Unreferenced BLP warning|1=%s}} --~~~~' % ftitle, summary='[[%s]]' % ftitle, section='new', bot=1, skipmd5=True)

cursor.execute('SELECT UNIX_TIMESTAMP() - UNIX_TIMESTAMP(rc_timestamp) FROM recentchanges ORDER BY rc_timestamp DESC LIMIT 1;')
rep_lag = cursor.fetchone()[0]
current_of = (datetime.datetime.utcnow() - datetime.timedelta(seconds=rep_lag)).strftime('%H:%M, %d %B %Y (UTC)')

report = wikitools.Page(wiki, report_title)
report_text = report_template % (htime, '\n'.join(output), current_of)
report_text = report_text.encode('utf-8')
if not re.search(r'==.*%s.*==' % htime, report.getWikiText(), re.I|re.U):
    report.edit(appendtext=report_text, summary=settings.editsumm, bot=1)

cursor.close()
conn.close()

crontab

[edit]
10 0 * * * python ~/scripts/biobot/biobot.py