Skip to content
Snippets Groups Projects
Commit 7a29bf10 authored by Jörg Sachse's avatar Jörg Sachse
Browse files

refactor for use with TSM backup client

parent 15f0bdc4
Branches
No related tags found
No related merge requests found
Pipeline #7069 passed
#!/bin/bash #!/usr/bin/env bash
# This script clones/updates/saves all Archivematica related GitHub repositories and issues in the current directory. # This script clones/updates/saves all Archivematica related GitHub repositories and issues.
# var init # functions
get_cli_args(){
while [[ $# -gt 0 ]]; do
case ${1} in
-b|--backup_dir|--backup-dir|--backupdir)
[[ -w "${2}" ]] && backup_dir="${2}"
shift; shift;
;;
*)
echo "'${1}' is not a valid parameter. Exiting."
exit 1
;;
esac
done
}
print_fails(){
# print fails (if any) and exit
if [[ ${#fails[@]} -eq 0 ]]; then
exit 0 # no error during execution
else
echo "################"
echo "FAILED COMMANDS:"
for fail in "${fails[@]}"; do
echo "${fail}" # log failed command
done
exit 1 # errors during execution
fi
fails=() # reset list of failed commands
}
# defaults
orgnames="archivematica artefactual artefactual-labs" # GitHub organisations orgnames="archivematica artefactual artefactual-labs" # GitHub organisations
page=1 # starting page page=1 # starting page
perpage=100 # HINT: 100 already per page max perpage=100 # HINT: 100 already per page max
maxpage=3 # HINT: should be future proof, currently <200 AM-related repos maxpage=3 # HINT: should be future proof, currently <200 AM-related repos
local_repo_dir="../archivematica_related_repos" # local subdir for repos
local_issue_dir="../archivematica_related_issues" # local subdir for issues
orgs_base_url="https://api.github.com/orgs/" # base API URL for GitHub organisations orgs_base_url="https://api.github.com/orgs/" # base API URL for GitHub organisations
issue_base_url="https://github.com/archivematica/Issues/issues/" # base URL for GitHub issues issue_base_url="https://github.com/archivematica/Issues/issues/" # base URL for GitHub issues
min_issue_count=1672 # as of 2024-04-02 min_issue_count=1709 # as of 2024-09-05
fails=() # set of failed commands fails=() # set of failed commands
RED="\\e[31m"
WHITE="\\e[0m"
ERROR="${RED}[ERROR]\t${WHITE}"
backup_dir=".."
# get CLI arguments
get_cli_args "${@}"
# var init
local_repo_dir="${backup_dir}/archivematica_related_repos" # local subdir for repos
local_issue_dir="${backup_dir}/archivematica_related_issues" # local subdir for issues
# list external required binaries here (space separated)
REQUIREMENTS="bash curl git sed wget"
for REQUIREMENT in ${REQUIREMENTS}; do
command -v "${REQUIREMENT}" >/dev/null 2>&1 || { echo >&2 "${ERROR} '${REQUIREMENT}' required but not installed. Aborting."; exit 1; }
done
### MAIN
# backup github repos # backup github repos
[[ ! -d ${local_repo_dir} ]] && mkdir -p ${local_repo_dir} [[ ! -d ${local_repo_dir} ]] && mkdir -p ${local_repo_dir}
...@@ -19,21 +69,17 @@ pushd ${local_repo_dir} || exit 1 ...@@ -19,21 +69,17 @@ pushd ${local_repo_dir} || exit 1
until [ ${page} -gt ${maxpage} ]; do until [ ${page} -gt ${maxpage} ]; do
for org in ${orgnames}; do for org in ${orgnames}; do
while read -r repo_url; do while read -r repo_url; do
if [ -n "${repo_url}" ]; then if [[ -n "${repo_url}" ]]; then
repo_name=$(echo "${repo_url}" | sed 's#^.*/\([^/]*\)\.git$#\1#g') # get repo name repo_name=$( basename -s ".git" "${repo_url}" ) # get repo name
echo "############" echo "############"
if [ -d "./${repo_name}" ]; then if [[ -d "./${repo_name}" ]]; then
echo "update repo: ${repo_name}" echo "update repo: ${repo_name}"
cmd="git -C ./${repo_name} pull --recurse-submodules" # update local repo cmd="git -C ./${repo_name} pull --recurse-submodules" # update local repo
else else
echo "clone repo : ${repo_name}" echo "clone repo : ${repo_name}"
cmd="git clone --recurse-submodules ${repo_url}" # create local repo cmd="git clone --recurse-submodules ${repo_url}" # create local repo
fi fi
$cmd # run command ${cmd} || fails+=("${cmd}") # remember fails
result=$?
if [ "${result}" -ne 0 ]; then
fails+=("${cmd}") # remember fails
fi
fi fi
done < <(curl -sS "${orgs_base_url}${org}/repos?page=${page}&per_page=${perpage}" | grep -e 'clone_url.*' | cut -d \" -f 4 | xargs -L1 echo) # HINT: use process substitution to remember $fails done < <(curl -sS "${orgs_base_url}${org}/repos?page=${page}&per_page=${perpage}" | grep -e 'clone_url.*' | cut -d \" -f 4 | xargs -L1 echo) # HINT: use process substitution to remember $fails
done done
...@@ -41,35 +87,23 @@ until [ ${page} -gt ${maxpage} ]; do ...@@ -41,35 +87,23 @@ until [ ${page} -gt ${maxpage} ]; do
done done
popd || exit 1 popd || exit 1
print_fails
# backup github issues # backup github issues
[[ ! -d ${local_issue_dir} ]] && mkdir -p ${local_issue_dir} [[ ! -d ${local_issue_dir} ]] && mkdir -p ${local_issue_dir}
pushd ${local_issue_dir} || exit 1 pushd ${local_issue_dir} || exit 1
for n in {1..100000}; do for n in {1..100000}; do
url="${issue_base_url}${n}.html" url="${issue_base_url}${n}.html"
if [ ${n} -gt ${min_issue_count} ]; then if [[ ${n} -gt ${min_issue_count} ]]; then
if ! wget --spider "${url}" 2>/dev/null; then if ! wget --spider "${url}" 2>/dev/null; then
echo "stop: issue ${n} does not exist." echo "stop: issue ${n} does not exist."
break break
fi fi
fi fi
echo "save issue: ${n}" echo "save issue: ${n}"
wget -q -N -E -K "${issue_base_url}${n}.html" # FIXME: broken layout wget -q -N -E -K "${issue_base_url}${n}.html" || fails+=("${cmd}") # FIXME: broken layout
# wget -q -N -E -K -k -p -H "${issue_base_url}${n}.html" # ALTERNATIVE: still broken layout but offline images # wget -q -N -E -K -k -p -H "${issue_base_url}${n}.html" # ALTERNATIVE: still broken layout but offline images
result=$?
if [ "${result}" -ne 0 ]; then
fails+=("${cmd}") # remember fails
fi
done done
popd || exit 1 popd || exit 1
# print fails (if any) and exit print_fails
if [ ${#fails[@]} -eq 0 ]; then
exit 0 # no error during execution
else
echo "################"
echo "FAILED COMMANDS:"
for fail in "${fails[@]}"; do
echo "${fail}" # log failed command
done
exit 1 # errors during execution
fi
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment