# the script is called from wikimedia extension # it gets a job from the queue, upates the tree # calculates covering and saves the tree and covering to database import sys import TriesA5 import wiki2text_v2 as w2t import MySQLdb import ConfigParser import job_processing_functions as jpf ## const globals BASE_DIR = "/home/michael/science/wikipedia/code/" INI_FILE = BASE_DIR + "config_data.ini" ## parse the ini file ini_config = ConfigParser.ConfigParser() ini_config.readfp(open(INI_FILE)) ## init constants N = int(ini_config.get('constants', 'N')) K_REV = int(ini_config.get('constants', 'K_REVISIONS')) K_TIME = int(ini_config.get('constants', 'K_TIME')) ## init the DB connection = MySQLdb.connect(host=ini_config.get('db', 'host'), user=ini_config.get('db', 'user'), passwd=ini_config.get('db', 'passwd') \ , db=ini_config.get('db', 'db') ) curs = connection.cursor() if len(sys.argv) > 2: article_id = sys.argv[1] revision_id = sys.argv[2] job = (article_id, revision_id) jsn = jpf.get_treejson(curs, article_id) jpf.process_a_job(curs, job, jsn, N, K_REV, K_TIME) curs.close() connection.close()