From 17d58ea759a020034dd66c2f7f6e58f745d7d90d Mon Sep 17 00:00:00 2001 From: Masin Al-Dujaili Date: Thu, 18 Apr 2024 12:57:13 +0200 Subject: [PATCH] Init --- SkinAndExtendMediawiki.py | 149 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 149 insertions(+) create mode 100755 SkinAndExtendMediawiki.py diff --git a/SkinAndExtendMediawiki.py b/SkinAndExtendMediawiki.py new file mode 100755 index 0000000..a9b107f --- /dev/null +++ b/SkinAndExtendMediawiki.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 + +import requests +import argparse +import json +import os +import re +import shutil +import tarfile + +MW_API_URL = 'https://www.mediawiki.org/w/api.php' +DEBUG = False + +def get_filename_from_cd(cd): + """ + Get filename from content-disposition + """ + if not cd: + return None + fname = re.findall('filename=(.+)', cd) + if len(fname) == 0: + return None + return fname[0] + +def find_nearest_small_value(key, sorted_li): + return max(i for i in sorted_li if i <= key) + +def download(name, url, path): + if url == None: + return + if DEBUG: + print(f' ↪ Downloading { name } from { url } to "{ path }"') + else: + print(f' ↪ Downloading { name }') + r = requests.get(url, allow_redirects = True) + filename = get_filename_from_cd(r.headers.get('content-disposition')) + if filename == None and url.find('/'): + filename = url.rsplit('/', 1)[1] + if DEBUG: + print( f' ↪ Writing to {os.path.join( path, filename )}' ) + open(os.path.join( path, filename ), 'wb').write(r.content) + if DEBUG: + print( f' ↪ Decompressing to {os.path.join( path, name )}' ) + tgz = tarfile.open( os.path.join( path, filename ) ) + tgz.extractall( path ) + tgz.close + +def get_url(release, item, degrade): + if release in item: + return item[release] + elif degrade: + prv = find_nearest_small_value(release, list(item.keys())) + return item[prv] + else: + return None + +def ensure_path(path): + if os.path.exists(path): + if os.path.isdir(path): + if os.access(path, os.W_OK): + return True + else: + return False + else: + return False + else: + os.mkdir( path, mode=0o770 ) + if os.getuid == 0: + shutil.chown(path, user='www-data', group='www-data') + +def get_path(path): + if os.path.exists(path): + if os.path.isdir(path): + if os.access(path, os.W_OK): + return path + elif path != './': + print(f'"{path}" is not writable. Trying current directory.') + return( get_path('./') ) + else: + print(f'Cannot write to {path}. Terminating.') + exit(3) + else: + print(f'"{path}" is not a directory. Trying current directory.') + return( get_path('./') ) + else: + print(f'"{path}" does not exist. Trying current directory.') + return( get_path('./') ) + +def init(): + global DEBUG + parser = argparse.ArgumentParser( + prog="SkinAndExtendMediawiki", + description="It's all in the name, skinning and extending Mediawiki", + epilog="Skins and extensions respectively will be downloaded into 'skins' and 'extensions' subdirectory of the -p/--path given." + ) + parser.add_argument("-p","--path", help="installation path\nExamples:-p /var/www/mediawiki/\n--path /srv/mediawiki", type=str, default='./') + parser.add_argument("-r","--release", help="Mediawiki release, use release 'list' to list available release tags.", type=str, default="list") + parser.add_argument("-d","--degrade", help="degrade Mediawiki release if no download for the selected release exists (source → master → REL… → REL-1 …), otherwise will be skipped", action='store_true') + parser.add_argument("-s","--skins", help="install one or more Mediawiki skins", type=str, dest='skins', nargs='+') + parser.add_argument("-e","--extensions", help="install one or more Mediawiki extensions", type=str, dest='extensions', nargs='+') + parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true") + + args = parser.parse_args() + if args.verbose: + DEBUG = True + if DEBUG: + print(args) + payload = { 'action': 'query', 'list': 'extdistbranches', 'format': 'json' } + r = requests.get( MW_API_URL, params=( payload | { 'edbskins': 'Vector'} ) ) + if r.status_code == 200: + response = json.loads(r.text) + vector_keys = list( response['query']['extdistbranches']['skins']['Vector'].keys() ) + list_message = f'\nAvailable release tags: { vector_keys }.' + else: + list_message = f'Cannot retrieve release tags, received HTTP { r.status_code}.' + if args.release == 'list': + parser.print_help() + print(list_message) + exit(1) + elif args.release in vector_keys: + print(f'Using release tag { args.release }.') + else: + print(f'Invalid release tag { args.release }.') + exit(2) + + final_payload = payload.copy() + if args.skins != None: + final_payload |= { 'edbskins': '|'.join(args.skins) } + if args.extensions != None: + final_payload |= { 'edbexts': '|'.join(args.extensions) } + + #print( json.dumps(final_payload, indent=4) ) + r = requests.get( MW_API_URL, params=final_payload ) + response = json.loads(r.text) + edb = response['query']['extdistbranches'] + path = get_path( os.path.join( args.path, '' ) ) + for dl_type in list(response['query']['extdistbranches'].keys()): + print(f'Processing { dl_type }.') + ensure_path( os.path.join( path, dl_type ) ) + for item in edb[dl_type]: + if DEBUG: + print( f' ↪ {item}: { get_url( args.release, edb[dl_type][item], args.degrade )}' ) + download(item, get_url( args.release, edb[dl_type][item], args.degrade ), os.path.join( path, dl_type ) ) + + +if __name__ == "__main__": + init() + +