I wrote the following Bash script to upgrade All-core MediaWiki websites (no added extensions/skins or images - besides logo image).
I already tested the essence of this script and it worked for me; MediaWiki was upgraded from 1.32.0. to 1.33.0 and I can use the site regularly; yet I'd be glad for a review:
#!/bin/sh
date="$(date +%F-%T)"
db_user_name_and_db_name="SOME_IDENTICAL_NAMES" # DB user name and DB name must be the same;
war="SOME_WEB_APPLICATION_ROOT"
domain="SOME_DOMAIN" # As site's directory;
target_url="LINK_TO_LATEST_MEDIA_WIKI_ARCHIVE_DOWNLOAD"
mkdir -p "${war}/mediawiki_general_backups"
mkdir -p "${war}/mediawiki_specific_backups"
zip -r "${war}/mediawiki_general_backups/${domain}-directory-backup-${date}.zip" "${war}/${domain}"
mysqldump -u "${db_user_name_and_db_name}" -p "${db_user_name_and_db_name}" > "${war}/mediawiki_general_backups/${domain}-database-backup-${date}.sql"
files=(
LocalSettings.php
robots.txt
.htaccess*
${domain}.png
googlec69e044fede13fdc.html
)
cd "${war}/${domain}"
cp "${files[@]}" "${war}/mediawiki_specific_backups"
rm -rf "${war}/${domain}"
mkdir "${war}/${domain}"
wget ${target_url} -O - | tar -xzv --strip-components 1 -C ${war}/${domain}
cp -a "${war}/mediawiki_specific_backups"/* "${war}/${domain}"
cd "${war}/${domain}"/maintenance
php update.php
### Sitemap creation ###
mkdir -p "${war}/${domain}/sitemap"
php maintenance/generateSitemap.php \
--memory-limit=50M \
--fspath=/"${war}/${domain}/sitemap" \
--identifier="${domain}" \
--urlpath=/sitemap/ \
--server=https://"${domain}" \
--compress=yes
# Sitemap should be declared in robots.txt / preferred search engine's search console, or both;