User:RonBot/13/Source1
Appearance
from wikitools import *
import time
import datetime
import urllib
import json
import userpassbot #Bot password
import warnings
import re
import mwparserfromhell
import datetime
import sys
import IMconfig
site = wiki.Wiki() #Tell Python to use the en-wiki APi
site.login(userpassbot.username, userpassbot.password) #login
#routine to autoswitch some of the output - as filenames have accented chars!
def pnt(s):
try:
print(s)
except UnicodeEncodeError:
print(s.encode('utf-8'))
def startAllowed():
textpage = page.Page(site, "User:RonBot/13/Run").getWikiText()
if textpage == "Run":
return "run"
else:
return "no"
def remove_duplicates(l):
return list(set(l))
def firstrevision(page):
params = {'action':'query',
'prop':'revisions',
'titles':page,
'rvlimit':'max'
}
req = api.APIRequest(site, params)
res = req.query(False)
#print res
pageid = res['query']['pages'].keys()[0]
#print len(res['query']['pages'][pageid]['revisions'])
first=len(res['query']['pages'][pageid]['revisions'])-1
timestamp = str(res['query']['pages'][pageid]['revisions'][first]['timestamp'])
#print
m = re.search(r'(.*?)T', timestamp)
datebit = m.group(1)
print datebit
return datebit
def SearchWiki(search2, size):
processed=0
lastContinue='0'
print"============================================"
searchstr = search2
print "search = ", searchstr
params = {'action':'query',
'list':'search',
'srsearch':searchstr,
'srnamespace':6,
'srlimit':5000,
'sroffset':lastContinue,
'srsort':'last_edit_desc'
}
#print searchstr
print "SR.params"
result="" #clear out previous run
request = api.APIRequest(site, params) #Set the API request
print "SR.request"
result = request.query(False)
#print result
totalhits=result['query']['searchinfo']['totalhits']
#print "search", search
print "TotalHits this search", totalhits
size=4999
if totalhits>0:
for loopvar in range(0, size):
#print loopvar,
#print ""
try:
pagetitle = result['query']['search'][loopvar]['title']
except:
pagetitle="Not Found"
pnt(pagetitle)
datepart = firstrevision(pagetitle)
x = datetime.datetime.utcnow().date()
print x
timestamp=datetime.datetime.strptime(datepart, '%Y-%m-%d').date()
print timestamp
if timestamp < datetime.datetime.utcnow().date()-datetime.timedelta(days=90):
print">90"
else:
print"<90......"
IMconfig.pagelist.append(datepart + pagetitle + "}}\n")
print
return
def writepage(title):
galhead='{{#tag:gallery|\n'
galfoot=''
pagetitle=title
pagepage = page.Page(site, pagetitle)
pagetext=''
galdate=''
remove_duplicates(IMconfig.pagelist)
IMconfig.pagelist.sort(reverse=True)
for line in IMconfig.pagelist:
pagedate=line[:10]
if pagedate<>galdate:
pagetext=pagetext+galfoot+"=="+pagedate+"==\n"+galhead
galfoot='}}\n'
galdate=pagedate
pagetext=pagetext+'{{IsLocal|' + line[15:]
pagetext=pagetext+galfoot
print "witing page"
pagepage.edit(text=pagetext, skipmd5=True, summary="(Task 13) update page")
def main():
go = startAllowed() #Check if task is enabled
if go == "no":
sys.exit('Disabled Task')
IMconfig.pagelist=list()
#parameters for API request
search='deepcat:"All free media" -deepcat:"Category:Copy to Wikimedia Commons" prefer-recent:1,1'
SearchWiki(search, 5000)
writepage("user:RonBot/NewImages")
if __name__ == "__main__":
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
main()