Pour tout problème contactez-nous par mail : support@froggit.fr | La FAQ :grey_question: | Rejoignez-nous sur le Chat :speech_balloon:

Skip to content
Snippets Groups Projects

Draft: Resolve "fix: export all don't export projects"

Open Christophe Chaudier requested to merge 39-fix-export-all-don-t-export-projects into master
+ 77
56
@@ -101,7 +101,7 @@ _help() {
@@ -101,7 +101,7 @@ _help() {
fi
fi
}
}
_check_dependencies(){
_check_dependencies() {
_all_dependencies_are_installed=true
_all_dependencies_are_installed=true
for dep in ${DEPENDENCIES}; do
for dep in ${DEPENDENCIES}; do
@@ -125,11 +125,12 @@ _load_config() {
@@ -125,11 +125,12 @@ _load_config() {
echo "Use : mygb.sh init"
echo "Use : mygb.sh init"
_fail "File ${config_file} doesn't exist"
_fail "File ${config_file} doesn't exist"
fi
fi
backup_dir=${backup_dir:-/tmp/mygb}
backup_dir=${backup_dir:-/tmp/mygb}
mygb_tmp_dir=${backup_dir}/${mygb_date}
mygb_tmp_dir=${backup_dir}/${mygb_date}
mygb_file="${backup_dir}/mygb_${mygb_date}.tgz"
mygb_file="${backup_dir}/mygb_${mygb_date}.tgz"
mkdir -p "${mygb_tmp_dir}"
mkdir -p "${mygb_tmp_dir}"
 
projects_exported_list="${mygb_tmp_dir}/projects_exported_list.txt"
if [[ ${export_archived} == false ]]; then
if [[ ${export_archived} == false ]]; then
archived_filter="&archived=false"
archived_filter="&archived=false"
@@ -141,7 +142,7 @@ _load_config() {
@@ -141,7 +142,7 @@ _load_config() {
_info
_info
}
}
_info(){
_info() {
echo "
echo "
GitLab URL [${gitlab_url}]
GitLab URL [${gitlab_url}]
+ Mode dry_run [${dry_run}]
+ Mode dry_run [${dry_run}]
@@ -194,7 +195,7 @@ _api() {
@@ -194,7 +195,7 @@ _api() {
_nb_of_projects_from_group_page() {
_nb_of_projects_from_group_page() {
_api GET "groups/${group_url_encoded}/projects?include_subgroups=true&per_page=${API_PER_PAGE}" --head \
_api GET "groups/${group_url_encoded}/projects?include_subgroups=true&per_page=${API_PER_PAGE}" --head \
| grep 'x-total-pages'| cut -d ':' -f 2 | tr -d "[:space:]"
| grep 'x-total-pages' | cut -d ':' -f 2 | tr -d "[:space:]"
}
}
_get_projects_from_group() {
_get_projects_from_group() {
@@ -204,6 +205,18 @@ _get_projects_from_group() {
@@ -204,6 +205,18 @@ _get_projects_from_group() {
done
done
}
}
 
_nb_of_projects_page() {
 
_api GET "projects?per_page=${API_PER_PAGE}${archived_filter}${owned_filter}" --head \
 
| grep 'x-total-pages'| cut -d ':' -f 2 | tr -d "[:space:]"
 
}
 
 
_get_projects() {
 
total_projects_pages=$(_nb_of_projects_page)
 
for (( page=1; page<=total_projects_pages; page++ )); do
 
_api GET "projects?per_page=${API_PER_PAGE}&page=${page}${archived_filter}${owned_filter}" | jq -r '.[].path_with_namespace'
 
done
 
}
 
_get_project_infos() {
_get_project_infos() {
project_infos="$(_get_export_filename project "${project}").json"
project_infos="$(_get_export_filename project "${project}").json"
_api GET "projects/${project_url_encoded}" | jq > "${project_infos}"
_api GET "projects/${project_url_encoded}" | jq > "${project_infos}"
@@ -217,15 +230,15 @@ _get_export_filename() {
@@ -217,15 +230,15 @@ _get_export_filename() {
echo "${mygb_tmp_dir}/${mygb_date}_${1}_${2//\//-}_export"
echo "${mygb_tmp_dir}/${mygb_date}_${1}_${2//\//-}_export"
}
}
_get_curent_user_id() {
_get_current_user_id() {
_api GET "user" | jq -r '.id'
_api GET "user" | jq -r '.id'
}
}
_get_curent_user_email() {
_get_current_user_email() {
_api GET "user" | jq -r '.email'
_api GET "user" | jq -r '.email'
}
}
_dry_run(){
_dry_run() {
cmd="${*}"
cmd="${*}"
if [[ ${dry_run} == false ]]; then
if [[ ${dry_run} == false ]]; then
eval "${cmd}"
eval "${cmd}"
@@ -267,6 +280,7 @@ _export_project() {
@@ -267,6 +280,7 @@ _export_project() {
_dry_run _schedule_project_export
_dry_run _schedule_project_export
_dry_run _wait_export_finished
_dry_run _wait_export_finished
_dry_run _download_project_export
_dry_run _download_project_export
 
echo "${project}" >> "${projects_exported_list}"
((nb_projects_exported++))
((nb_projects_exported++))
fi
fi
}
}
@@ -308,12 +322,10 @@ _export_group() {
@@ -308,12 +322,10 @@ _export_group() {
_dry_run _download_group_export
_dry_run _download_group_export
((nb_groups_exported++))
((nb_groups_exported++))
if [[ ${export_all} == false ]]; then
projects=$(_get_projects_from_group)
projects=$(_get_projects_from_group)
for project in ${projects}; do
for project in ${projects}; do
_export_project
_export_project
done
done
fi
fi
fi
}
}
@@ -329,31 +341,22 @@ _get_groups() {
@@ -329,31 +341,22 @@ _get_groups() {
done
done
}
}
_nb_of_projects_page() {
_export_all() {
_api GET "projects?per_page=${API_PER_PAGE}${archived_filter}${owned_filter}" --head \
| grep 'x-total-pages'| cut -d ':' -f 2 | tr -d "[:space:]"
}
_get_projects() {
total_projects_pages=$(_nb_of_projects_page)
for (( page=1; page<=total_projects_pages; page++ )); do
_api GET "projects?per_page=${API_PER_PAGE}&page=${page}${archived_filter}${owned_filter}" | jq -r '.[].path'
done
}
_export_all(){
if [[ ${export_all} == true ]]; then
if [[ ${export_all} == true ]]; then
echo "Export all my groups and projects"
echo "Export all my groups and projects"
for group in $(_get_groups); do
for group in $(_get_groups); do
_export_group
_export_group
done
done
curent_user_id="$(_get_curent_user_id)"
current_user_id="$(_get_current_user_id)"
 
# on Gitlab.com we want only owned project
 
if [[ "${gitlab_url}" == "https://gitlab.com/" ]]; then
 
owned_filter="&owned=true"
 
fi
for project in $(_get_projects); do
for project in $(_get_projects); do
_export_project
# export the project only if is not already exported
 
grep -q "${project}" "${projects_exported_list}" || _export_project
done
done
fi
fi
}
}
@@ -374,7 +377,7 @@ _export() {
@@ -374,7 +377,7 @@ _export() {
# ---[ Import ]---
# ---[ Import ]---
_import_all(){
_import_all() {
local file_to_import="${1}"
local file_to_import="${1}"
if [[ -n $file_to_import ]]; then
if [[ -n $file_to_import ]]; then
@@ -401,8 +404,8 @@ _import_all(){
@@ -401,8 +404,8 @@ _import_all(){
_import() {
_import() {
_load_config
_load_config
curent_user_id="$(_get_curent_user_id)"
current_user_id="$(_get_current_user_id)"
curent_user_email="$(_get_curent_user_email)"
current_user_email="$(_get_current_user_email)"
api_path_remove_list="${mygb_tmp_dir}/_api_path_remove_list.csv"
api_path_remove_list="${mygb_tmp_dir}/_api_path_remove_list.csv"
_import_group "${group}"
_import_group "${group}"
@@ -415,7 +418,7 @@ _import() {
@@ -415,7 +418,7 @@ _import() {
# ---[ Import Group ]---
# ---[ Import Group ]---
_import_group(){
_import_group() {
local file_to_import="${1}"
local file_to_import="${1}"
if [[ -n $file_to_import ]]; then
if [[ -n $file_to_import ]]; then
@@ -463,7 +466,7 @@ _schedule_group_import() {
@@ -463,7 +466,7 @@ _schedule_group_import() {
# ---[ Import Project ]---
# ---[ Import Project ]---
_import_project(){
_import_project() {
local file_to_import="${1}"
local file_to_import="${1}"
if [[ -n $file_to_import ]]; then
if [[ -n $file_to_import ]]; then
@@ -491,7 +494,6 @@ _import_project(){
@@ -491,7 +494,6 @@ _import_project(){
echo -e "\nImporting projet : path [${project_path}] in namespace [${project_full_namespace}] ..."
echo -e "\nImporting projet : path [${project_path}] in namespace [${project_full_namespace}] ..."
_dry_run _schedule_projet_import
_dry_run _schedule_projet_import
_dry_run _wait_project_import_finished
_dry_run _wait_project_import_finished
fi
fi
}
}
@@ -531,20 +533,22 @@ _project_import_status() {
@@ -531,20 +533,22 @@ _project_import_status() {
}
}
_clean_import() {
_clean_import() {
while IFS= read -r api_path; do
if [[ -e ${api_path_remove_list} ]]; then
_dry_run _api DELETE "${api_path}" > /dev/null
while IFS= read -r api_path; do
done < "${api_path_remove_list}"
_dry_run _api DELETE "${api_path}" > /dev/null
 
done < "${api_path_remove_list}"
 
fi
}
}
_check_export_current_user_exist() {
_check_export_current_user_exist() {
export_type=${1}
export_type=${1}
if [[ ${export_type} = "groups" ]]; then
if [[ ${export_type} = "groups" ]]; then
cat ./*/members.ndjson | jq -r '.user.public_email' | grep "${curent_user_email}" \
cat ./*/members.ndjson | jq -r '.user.public_email' | grep --silent "${current_user_email}" \
|| echo "/${export_type}/${group_path}/members/${curent_user_id}" >> "${api_path_remove_list}"
|| echo "/${export_type}/${group_path}/members/${current_user_id}" >> "${api_path_remove_list}"
else
else
jq -r '.user.public_email' < ./tree/project/project_members.ndjson | grep "${curent_user_email}" \
jq -r '.user.public_email' < ./tree/project/project_members.ndjson | grep --silent "${current_user_email}" \
|| echo "/projects/$(_get_url_encode "${project_full_namespace}/${project_path}")/members/${curent_user_id}" >> "${api_path_remove_list}"
|| echo "/projects/$(_get_url_encode "${project_full_namespace}/${project_path}")/members/${current_user_id}" >> "${api_path_remove_list}"
fi
fi
}
}
@@ -555,11 +559,11 @@ _report_all() {
@@ -555,11 +559,11 @@ _report_all() {
local file_to_check="${backup_file}"
local file_to_check="${backup_file}"
report_file="${backup_dir}/mygb_${mygb_date}_report_missing_users.csv"
report_file="${backup_dir}/mygb_${mygb_date}_report_missing_users.csv"
report_tmp_file="${mygb_tmp_dir}/mygb_${mygb_date}_report_users_to_check.csv"
report_tmp_users_to_check_file="${mygb_tmp_dir}/mygb_${mygb_date}_report_users_to_check.csv"
echo "username;error" > "${report_file}"
echo "Creating report of missing user(s) from file [${file_to_check}] ..."
echo "Creating report of missing user(s) from file [${file_to_check}] ..."
echo "username;error" > "${report_file}"
 
if [[ -n $file_to_check ]]; then
if [[ -n $file_to_check ]]; then
import_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_all.XXXXX)
import_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_all.XXXXX)
@@ -577,17 +581,28 @@ _report_all() {
@@ -577,17 +581,28 @@ _report_all() {
cd "${import_tmp_dir}" || _fail "Can't move to ${import_tmp_dir}"
cd "${import_tmp_dir}" || _fail "Can't move to ${import_tmp_dir}"
done
done
done
done
echo "Check users :"
echo "Check users :"
while IFS= read -r line; do
while IFS= read -r line; do
username="$(echo "${line}" | cut -d ";" -f 1)"
username="$(echo "${line}" | cut -d ";" -f 1)"
public_email="$(echo "${line}" | cut -d ";" -f 2)"
public_email="$(echo "${line}" | cut -d ";" -f 2)"
if ! _check_user_exist "${username}"; then
if [[ -z ${public_email} ]]; then
echo "${username};user don't exist" >> "${report_file}"
echo "${username};source user without public_email" >> "${report_file}"
 
elif ! _check_user_exist "${username}"; then
 
dest_username=$(_get_user_with_public_email "${public_email}")
 
if [[ -z ${dest_username} ]]; then
 
echo "${username};user don't exist" >> "${report_file}"
 
else
 
echo "${username};user exist with the same source public_email but with the name ${dest_username}" >> "${report_file}"
 
fi
elif ! _check_user_public_email "${username}" "${public_email}"; then
elif ! _check_user_public_email "${username}" "${public_email}"; then
echo "${username};as not the source public_email : ${public_email}" >> "${report_file}"
dest_username=$(_get_user_with_public_email "${public_email}")
 
if [[ -z ${dest_username} ]]; then
 
echo "${username};exist but do not have the source public_email : ${public_email}" >> "${report_file}"
 
else
 
echo "${username};user exist with the same source public_email but with the name ${dest_username}" >> "${report_file}"
 
fi
fi
fi
done < <(sort -u < "${report_tmp_file}")
done < <(sort -u < "${report_tmp_users_to_check_file}")
fi
fi
echo "----------------------[ Users report ]----------------------"
echo "----------------------[ Users report ]----------------------"
@@ -597,7 +612,7 @@ _report_all() {
@@ -597,7 +612,7 @@ _report_all() {
_succed "Report done : ${report_file}"
_succed "Report done : ${report_file}"
}
}
_list_users_group(){
_list_users_group() {
local file_to_check="${1}"
local file_to_check="${1}"
if [[ -n $file_to_check ]]; then
if [[ -n $file_to_check ]]; then
import_group_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_group.XXXXX)
import_group_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_group.XXXXX)
@@ -612,11 +627,11 @@ _list_users_group(){
@@ -612,11 +627,11 @@ _list_users_group(){
tar zxf "${file_to_extract}" || _fail "Can't extract file ${file_to_extract}"
tar zxf "${file_to_extract}" || _fail "Can't extract file ${file_to_extract}"
# shellcheck disable=SC2002
# shellcheck disable=SC2002
cat ./tree/groups/*/members.ndjson | jq -r '.user.username + ";" + .user.public_email' >> "${report_tmp_file}"
cat ./tree/groups/*/members.ndjson | jq -r '.user.username + ";" + .user.public_email' >> "${report_tmp_users_to_check_file}"
fi
fi
}
}
_list_users_project(){
_list_users_project() {
local file_to_check="${1}"
local file_to_check="${1}"
if [[ -n $file_to_check ]]; then
if [[ -n $file_to_check ]]; then
import_projet_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_projet.XXXXX)
import_projet_tmp_dir=$(mktemp -d -p "${mygb_tmp_dir}" tmp_report_projet.XXXXX)
@@ -631,7 +646,7 @@ _list_users_project(){
@@ -631,7 +646,7 @@ _list_users_project(){
tar zxf "${file_to_extract}" || _fail "Can't extract file ${file_to_extract}"
tar zxf "${file_to_extract}" || _fail "Can't extract file ${file_to_extract}"
# shellcheck disable=SC2002
# shellcheck disable=SC2002
cat ./tree/project/project_members.ndjson | jq -r '.user.username + ";" + .user.public_email' >> "${report_tmp_file}"
cat ./tree/project/project_members.ndjson | jq -r '.user.username + ";" + .user.public_email' >>"${report_tmp_users_to_check_file}"
fi
fi
}
}
@@ -652,6 +667,12 @@ _check_user_exist() {
@@ -652,6 +667,12 @@ _check_user_exist() {
test "$(_api GET "users?username=${username_to_check}" | jq -r '.[].id')"
test "$(_api GET "users?username=${username_to_check}" | jq -r '.[].id')"
}
}
 
_get_user_with_public_email() {
 
local public_email_to_search=${1}
 
 
_api GET "users?search=${public_email_to_search}" | jq -r '.[].username'
 
}
 
# ---[ Main ]---
# ---[ Main ]---
_main() {
_main() {
Loading