Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
B
backup_archivematica_repos
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Digital Preservation
backup_archivematica_repos
Commits
7a29bf10
Commit
7a29bf10
authored
9 months ago
by
Jörg Sachse
Browse files
Options
Downloads
Patches
Plain Diff
refactor for use with TSM backup client
parent
15f0bdc4
Branches
Branches containing commit
No related tags found
No related merge requests found
Pipeline
#7069
passed
9 months ago
Stage: test
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
backup_am_github_repos_and_issues.sh
+65
-31
65 additions, 31 deletions
backup_am_github_repos_and_issues.sh
with
65 additions
and
31 deletions
backup_am_github_repos_and_issues.sh
+
65
−
31
View file @
7a29bf10
#!/
bin/
bash
#!/
usr/bin/env
bash
# This script clones/updates/saves all Archivematica related GitHub repositories and issues
in the current directory
.
# This script clones/updates/saves all Archivematica related GitHub repositories and issues.
# var init
# functions
get_cli_args
(){
while
[[
$#
-gt
0
]]
;
do
case
${
1
}
in
-b
|
--backup_dir
|
--backup-dir
|
--backupdir
)
[[
-w
"
${
2
}
"
]]
&&
backup_dir
=
"
${
2
}
"
shift
;
shift
;
;;
*
)
echo
"'
${
1
}
' is not a valid parameter. Exiting."
exit
1
;;
esac
done
}
print_fails
(){
# print fails (if any) and exit
if
[[
${#
fails
[@]
}
-eq
0
]]
;
then
exit
0
# no error during execution
else
echo
"################"
echo
"FAILED COMMANDS:"
for
fail
in
"
${
fails
[@]
}
"
;
do
echo
"
${
fail
}
"
# log failed command
done
exit
1
# errors during execution
fi
fails
=()
# reset list of failed commands
}
# defaults
orgnames
=
"archivematica artefactual artefactual-labs"
# GitHub organisations
orgnames
=
"archivematica artefactual artefactual-labs"
# GitHub organisations
page
=
1
# starting page
page
=
1
# starting page
perpage
=
100
# HINT: 100 already per page max
perpage
=
100
# HINT: 100 already per page max
maxpage
=
3
# HINT: should be future proof, currently <200 AM-related repos
maxpage
=
3
# HINT: should be future proof, currently <200 AM-related repos
local_repo_dir
=
"../archivematica_related_repos"
# local subdir for repos
local_issue_dir
=
"../archivematica_related_issues"
# local subdir for issues
orgs_base_url
=
"https://api.github.com/orgs/"
# base API URL for GitHub organisations
orgs_base_url
=
"https://api.github.com/orgs/"
# base API URL for GitHub organisations
issue_base_url
=
"https://github.com/archivematica/Issues/issues/"
# base URL for GitHub issues
issue_base_url
=
"https://github.com/archivematica/Issues/issues/"
# base URL for GitHub issues
min_issue_count
=
1
672
# as of 2024-0
4
-0
2
min_issue_count
=
1
709
# as of 2024-0
9
-0
5
fails
=()
# set of failed commands
fails
=()
# set of failed commands
RED
=
"
\\
e[31m"
WHITE
=
"
\\
e[0m"
ERROR
=
"
${
RED
}
[ERROR]
\t
${
WHITE
}
"
backup_dir
=
".."
# get CLI arguments
get_cli_args
"
${
@
}
"
# var init
local_repo_dir
=
"
${
backup_dir
}
/archivematica_related_repos"
# local subdir for repos
local_issue_dir
=
"
${
backup_dir
}
/archivematica_related_issues"
# local subdir for issues
# list external required binaries here (space separated)
REQUIREMENTS
=
"bash curl git sed wget"
for
REQUIREMENT
in
${
REQUIREMENTS
}
;
do
command
-v
"
${
REQUIREMENT
}
"
>
/dev/null 2>&1
||
{
echo
>
&2
"
${
ERROR
}
'
${
REQUIREMENT
}
' required but not installed. Aborting."
;
exit
1
;
}
done
### MAIN
# backup github repos
# backup github repos
[[
!
-d
${
local_repo_dir
}
]]
&&
mkdir
-p
${
local_repo_dir
}
[[
!
-d
${
local_repo_dir
}
]]
&&
mkdir
-p
${
local_repo_dir
}
...
@@ -19,21 +69,17 @@ pushd ${local_repo_dir} || exit 1
...
@@ -19,21 +69,17 @@ pushd ${local_repo_dir} || exit 1
until
[
${
page
}
-gt
${
maxpage
}
]
;
do
until
[
${
page
}
-gt
${
maxpage
}
]
;
do
for
org
in
${
orgnames
}
;
do
for
org
in
${
orgnames
}
;
do
while
read
-r
repo_url
;
do
while
read
-r
repo_url
;
do
if
[
-n
"
${
repo_url
}
"
]
;
then
if
[
[
-n
"
${
repo_url
}
"
]
]
;
then
repo_name
=
$(
echo
"
${
repo_url
}
"
|
sed
's#^.*/\([^/]*\)\.git$#\1#g'
)
# get repo name
repo_name
=
$(
basename
-s
".git"
"
${
repo_url
}
"
)
# get repo name
echo
"############"
echo
"############"
if
[
-d
"./
${
repo_name
}
"
]
;
then
if
[
[
-d
"./
${
repo_name
}
"
]
]
;
then
echo
"update repo:
${
repo_name
}
"
echo
"update repo:
${
repo_name
}
"
cmd
=
"git -C ./
${
repo_name
}
pull --recurse-submodules"
# update local repo
cmd
=
"git -C ./
${
repo_name
}
pull --recurse-submodules"
# update local repo
else
else
echo
"clone repo :
${
repo_name
}
"
echo
"clone repo :
${
repo_name
}
"
cmd
=
"git clone --recurse-submodules
${
repo_url
}
"
# create local repo
cmd
=
"git clone --recurse-submodules
${
repo_url
}
"
# create local repo
fi
fi
$cmd
# run command
${
cmd
}
||
fails+
=(
"
${
cmd
}
"
)
# remember fails
result
=
$?
if
[
"
${
result
}
"
-ne
0
]
;
then
fails+
=(
"
${
cmd
}
"
)
# remember fails
fi
fi
fi
done
< <
(
curl
-sS
"
${
orgs_base_url
}${
org
}
/repos?page=
${
page
}
&per_page=
${
perpage
}
"
|
grep
-e
'clone_url.*'
|
cut
-d
\"
-f
4 | xargs
-L1
echo
)
# HINT: use process substitution to remember $fails
done
< <
(
curl
-sS
"
${
orgs_base_url
}${
org
}
/repos?page=
${
page
}
&per_page=
${
perpage
}
"
|
grep
-e
'clone_url.*'
|
cut
-d
\"
-f
4 | xargs
-L1
echo
)
# HINT: use process substitution to remember $fails
done
done
...
@@ -41,35 +87,23 @@ until [ ${page} -gt ${maxpage} ]; do
...
@@ -41,35 +87,23 @@ until [ ${page} -gt ${maxpage} ]; do
done
done
popd
||
exit
1
popd
||
exit
1
print_fails
# backup github issues
# backup github issues
[[
!
-d
${
local_issue_dir
}
]]
&&
mkdir
-p
${
local_issue_dir
}
[[
!
-d
${
local_issue_dir
}
]]
&&
mkdir
-p
${
local_issue_dir
}
pushd
${
local_issue_dir
}
||
exit
1
pushd
${
local_issue_dir
}
||
exit
1
for
n
in
{
1..100000
}
;
do
for
n
in
{
1..100000
}
;
do
url
=
"
${
issue_base_url
}${
n
}
.html"
url
=
"
${
issue_base_url
}${
n
}
.html"
if
[
${
n
}
-gt
${
min_issue_count
}
]
;
then
if
[
[
${
n
}
-gt
${
min_issue_count
}
]
]
;
then
if
!
wget
--spider
"
${
url
}
"
2>/dev/null
;
then
if
!
wget
--spider
"
${
url
}
"
2>/dev/null
;
then
echo
"stop: issue
${
n
}
does not exist."
echo
"stop: issue
${
n
}
does not exist."
break
break
fi
fi
fi
fi
echo
"save issue:
${
n
}
"
echo
"save issue:
${
n
}
"
wget
-q
-N
-E
-K
"
${
issue_base_url
}${
n
}
.html"
# FIXME: broken layout
wget
-q
-N
-E
-K
"
${
issue_base_url
}${
n
}
.html"
||
fails+
=(
"
${
cmd
}
"
)
# FIXME: broken layout
# wget -q -N -E -K -k -p -H "${issue_base_url}${n}.html" # ALTERNATIVE: still broken layout but offline images
# wget -q -N -E -K -k -p -H "${issue_base_url}${n}.html" # ALTERNATIVE: still broken layout but offline images
result
=
$?
if
[
"
${
result
}
"
-ne
0
]
;
then
fails+
=(
"
${
cmd
}
"
)
# remember fails
fi
done
done
popd
||
exit
1
popd
||
exit
1
# print fails (if any) and exit
print_fails
if
[
${#
fails
[@]
}
-eq
0
]
;
then
exit
0
# no error during execution
else
echo
"################"
echo
"FAILED COMMANDS:"
for
fail
in
"
${
fails
[@]
}
"
;
do
echo
"
${
fail
}
"
# log failed command
done
exit
1
# errors during execution
fi
This diff is collapsed.
Click to expand it.
Jens Steidl
@steidl
mentioned in issue
#1 (closed)
·
9 months ago
mentioned in issue
#1 (closed)
mentioned in issue #1
Toggle commit list
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment