User:Reza1615/BOT/move-pages.py

This bot works with Pywikipedia Library and uses move-log to update item's language link.

This code also update the Labels according to rules which are exists in the MediaWiki:Gadget-slurpInterwiki.js (the popular JS Tool for importing link and labels)

customizing edit

you can change these variables and if you don't change it will check move_log for en.wikipedia for today's moves automatically.

variables (in lines 64-68)
  • Our_wiki='en'

with Our_wiki you can set bot for your own wiki

  • day_before=0

with day_before you can tell bot to check move log for how many days before. if it is set 0 it means today if it is set 1 it means yesterday.

  • delta_day=1

with delta_day you can tell bot to check how many day's move log if it set 1 it means for 1 day if it set 7 it means one week (7 days)


  • Note:In my opinion it is much better to set bot to run every 1 hour to update moved pages' Interwikis and don't confuse local users after they moved a page :)

code edit

<source lang='python' line start='1'>

  1. !/usr/bin/python
  2. -*- coding: utf-8 -*-
  3. Reza(User:reza1615), 2013
  4. Distributed under the terms of the CC-BY-SA 3.0 .
  5. -*- coding: utf-8 -*-

import query,wikipedia,time,codecs from datetime import timedelta,datetime

wikipedia.config.put_throttle = 0 wikipedia.put_throttle.setDelay()

def wiki_move_log(Our_wiki,timestart,timeend):

   Ok_list=[0,4,10,12,14,100,102]
   moved_pages=[]
   params = {
           'action': 'query',
           'list': 'logevents',
           'letype': 'move',
           'lestart': timestart,
           'leend': timeend,
           'lelimit':5000,
   }  
   try:
       wiki_site = wikipedia.getSite(Our_wiki)
       wikilog = query.GetData(params,wiki_site)  
       for item in wikilog[u'query'][u'logevents']:
           wikipedia.output(u'moved page is >'+item[u'title'])           
           if item[u'ns'] in Ok_list:
               moved_pages.append(item[u'title'])
   except:
       pass
   if moved_pages:
       return moved_pages    
   else:
       return False

def save_error(case):

   file = 'zz-move-interwiki_conflicts.txt'
   try:        
       file_text = codecs.open(file,'r' ,'utf8' )
       file_text = file_text.read().strip()
   except:
       file_text=u
   if not case in file_text:    
       with codecs.open(file ,mode = 'a',encoding = 'utf8' ) as f:
                           f.write(u'\n'+case)

def move_wikidata(moved_pages,Our_wiki):

   wiki_site = wikipedia.getSite(Our_wiki)
   
   for m_page in moved_pages:
       wikipedia.output(u'-----------------------------------')    
       wikipedia.output(u'working on '+m_page)
       try:    
           page = wikipedia.Page(wiki_site,m_page)
           old_data = wikipedia.DataPage(page)
           new_page = page.getRedirectTarget()
       except:
           wikipedia.output(m_page+u"'s changing language link is done before so it is passed!")    
           continue
       Edit_summary=u"Bot:Updating "+Our_wiki+u"wiki's moved page: "+page.title()+u' > '+new_page.title()
   
       if old_data.exists():
           New_data = wikipedia.DataPage(new_page)
           if New_data.exists():
               if len(New_data.interwiki())==1:    
                   new_item={'type': u'sitelink', 'site': Our_wiki, 'title': }
                   New_data.setitem(summary="Bot:Empty singel link Item for merging",items=new_item)
                   wikipedia.output("\03{lightgreen}Bot:Empty singel link Item for merging\03{default}")    
           new_label=new_page.title()
           
           try:    
               new_item={'type': u'sitelink', 'site': Our_wiki, 'title': new_label}
               old_data.setitem(summary=Edit_summary,items=new_item)
               page = wikipedia.Page(wiki_site,new_label)    
               old_data = wikipedia.DataPage(page)   
               if new_label==old_data.get()[u'links'][Our_wiki+u'wiki']:
                   wikipedia.output("\03{lightgreen}Bot:update language link\03{default}")
               else:
                   save_error(u''+Our_wiki+u':'+page.title()+u' > '+Our_wiki+u':'+new_label+u' not merged!')    
                   continue                    
           except:
               save_error(u''+Our_wiki+u':'+page.title()+u' > '+Our_wiki+u':'+new_label+u'')    
               continue
           try:   
               if Our_wiki !='fa':
                   new_label = new_label.split(u'(')[0].strip()    
               if Our_wiki =='es' or Our_wiki=='pt' or Our_wiki=='pt-br':
                   new_label = new_label.replace(u"Anexo:",u"")
               if Our_wiki == 'cs':
                   new_label = new_label.replace(u"Príloha:",u"")
               if Our_wiki == 'de-ch':
                   new_label = new_label.replace(u"ß",u"ss")    
               new_item={'type': u'item', 'label': Our_wiki, 'value': new_label}
               time.sleep(2)
               old_data.setitem(summary=Edit_summary,items=new_item)
               time.sleep(3)
               wikipedia.output(u'\03{lightblue}'+Edit_summary+u'\03{default}')
           except:
               wikipedia.output(m_page+u' was modified so it is passed!')

def main():

   #----------------------------------------your own data----------------------
   Our_wiki='en'
   day_before=0 # checking the site for today
   delta_day=1 # delta days that bot should check
   #---------------------------------------------------------------------------
   our_day=datetime.now()
   if day_before>0:
      our_day=datetime.now()- timedelta(day_before)    
   today=our_day.strftime("%Y%m%d000000")
   tommorow=(our_day+timedelta(delta_day)).strftime("%Y%m%d000000")
   timestart=tommorow
   timeend=today  
   moved_pages=wiki_move_log(Our_wiki,timestart,timeend)   
   if moved_pages:
       move_wikidata(moved_pages,Our_wiki)

if __name__ == '__main__':

       main()