Wikipedia:Database reports/Completely unreferenced biographies of living people (newest)/Configuration
Appearance
compunrefbiosnewest.py
[edit]#!/usr/bin/env python2.5
# Copyright 2009 bjweeks, MZMcBride
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import MySQLdb
import re
import wikitools
import settings
report_title = settings.rootpage + 'Completely unreferenced biographies of living people (newest)'
report_template = u'''
Completely unreferenced [[WP:BLP|biographies of living people]] roughly ordered by \
date of page creation (descending); data as of <onlyinclude>%s</onlyinclude>.
{| class="wikitable sortable plainlinks" style="width:100%%; margin:auto;"
|- style="white-space:nowrap;"
! No.
! Biography
|-
%s
|}
'''
output_limit = 800
wiki = wikitools.Wiki(settings.apiurl); wiki.setMaxlag(-1)
wiki.login(settings.username, settings.password)
f = open('%s/reviewed-bios-page-ids.txt' % settings.bios_ids_directory, 'r')
reviewed_pages = f.read()
reviewed_pages_split = reviewed_pages.split('\n')
f.close()
conn = MySQLdb.connect(host=settings.host, db=settings.dbname, read_default_file='~/.my.cnf')
cursor = conn.cursor()
cursor.execute('''
/* compunrefbiosnewest.py SLOW_OK */
SELECT
page_id,
page_title
FROM page
JOIN categorylinks
ON cl_from = page_id
WHERE page_namespace = 0
AND page_is_redirect = 0
AND cl_to = 'Living_people'
ORDER BY page_id DESC;
''')
i = 1
output = []
g = open('%s/reviewed-bios-page-ids.txt' % settings.bios_ids_directory, 'a')
for row in cursor.fetchall():
if i > output_limit:
break
page_id = row[0]
page_title = u'%s' % unicode(row[1], 'utf-8')
if str(page_id) in reviewed_pages_split:
continue
else:
page = wikitools.Page(wiki, page_title, followRedir=False)
try:
if not re.search(r'(==.*(further reading(s)?|bibliography|reference(s)?|external link(s)?|source(s)?).*==|<ref|http(s)?://|isbn)', page.getWikiText(), re.I|re.U):
table_row = u'''| %d
| [[%s]]
|-''' % (i, page_title)
output.append(table_row)
print i
i += 1
else:
g.write('%s\n' % page_id)
except:
pass
g.close()
cursor.execute('SELECT UNIX_TIMESTAMP() - UNIX_TIMESTAMP(rc_timestamp) FROM recentchanges ORDER BY rc_timestamp DESC LIMIT 1;')
rep_lag = cursor.fetchone()[0]
current_of = (datetime.datetime.utcnow() - datetime.timedelta(seconds=rep_lag)).strftime('%H:%M, %d %B %Y (UTC)')
report = wikitools.Page(wiki, report_title)
report_text = report_template % (current_of, '\n'.join(output))
report_text = report_text.encode('utf-8')
report.edit(report_text, summary=settings.editsumm, bot=1)
cursor.close()
conn.close()
crontab
[edit]45 1 * * * python ~/scripts/biobot/compunrefbiosnewest.py > /dev/null