Compare commits
No commits in common. "aa67146dab4953066b85586110f8f37e78f73f83" and "4e0aae0f011d00421af7f56cc2f9231482a20eb2" have entirely different histories.
aa67146dab
...
4e0aae0f01
@ -43,6 +43,9 @@ If you wish to override where pid files are stored to prevent duplicate module s
|
||||
### LOGPATH
|
||||
If you wish to override where logs are stored, the default is `/var/log/mirror-sync` and the directory must have write access for the mirror user.
|
||||
|
||||
### sync_timeout
|
||||
Timeout before a sync is cancelled, defaults to `timeout 1d` which should work for most mirrors.
|
||||
|
||||
### max_errors
|
||||
How many errors before an email is sent regarding the issue. This allows you to ignore anomolies.
|
||||
|
||||
|
@ -26,6 +26,7 @@ ERRORFILE="" # To be filled by acquire_lock().
|
||||
error_count=0
|
||||
max_errors=3
|
||||
tmpDirBase="$HOME/tmp"
|
||||
sync_timeout="timeout 1d"
|
||||
# Do not check upstream unless it was updated in the last 5 hours.
|
||||
upstream_max_age=18000
|
||||
# Update anyway if last check was more than 24 hours ago.
|
||||
@ -50,7 +51,7 @@ fi
|
||||
|
||||
# Load the required configuration file or quit.
|
||||
if [[ -f /etc/mirror-sync.conf ]]; then
|
||||
# shellcheck disable=SC1090
|
||||
# shellcheck source=/dev/null
|
||||
source /etc/mirror-sync.conf
|
||||
else
|
||||
echo "No configuration file defined, please setup a proper configuration file."
|
||||
@ -263,12 +264,12 @@ git_sync() {
|
||||
rm -f "$ERRORFILE"
|
||||
fi
|
||||
else
|
||||
error_count=$((error_count+1))
|
||||
if ((error_count>max_errors)); then
|
||||
new_error_count=$((error_count+1))
|
||||
if ((new_error_count>max_errors)); then
|
||||
mail_error "Unable to sync with git, check logs."
|
||||
rm -f "$ERRORFILE"
|
||||
fi
|
||||
echo "$error_count" > "$ERRORFILE"
|
||||
echo "$new_error_count" > "$ERRORFILE"
|
||||
fi
|
||||
)
|
||||
|
||||
@ -303,7 +304,7 @@ aws_sync() {
|
||||
fi
|
||||
|
||||
# Run AWS client to sync the S3 bucket.
|
||||
eval timeout 1d aws s3 sync \
|
||||
eval "$sync_timeout" aws s3 sync \
|
||||
--no-follow-symlinks \
|
||||
--delete \
|
||||
"$options" \
|
||||
@ -345,7 +346,7 @@ ftp_sync() {
|
||||
log_start_header
|
||||
|
||||
# Run AWS client to sync the S3 bucket.
|
||||
timeout 1d lftp <<< "mirror -v --delete --no-perms $options '${source:?}' '${repo:?}'"
|
||||
$sync_timeout lftp <<< "mirror -v --delete --no-perms $options '${source:?}' '${repo:?}'"
|
||||
RT=${PIPESTATUS[0]}
|
||||
if (( RT == 0 )); then
|
||||
date +%s > "${timestamp:?}"
|
||||
@ -397,7 +398,7 @@ wget_sync() {
|
||||
fi
|
||||
|
||||
# Run wget with configured options.
|
||||
eval timeout 1d wget "$options" "'${source:?}'"
|
||||
eval "$sync_timeout" wget "$options" "'${source:?}'"
|
||||
RT=${PIPESTATUS[0]}
|
||||
if (( RT == 0 )); then
|
||||
date +%s > "${timestamp:?}"
|
||||
@ -405,12 +406,12 @@ wget_sync() {
|
||||
rm -f "$ERRORFILE"
|
||||
fi
|
||||
else
|
||||
error_count=$((error_count+1))
|
||||
if ((error_count>max_errors)); then
|
||||
new_error_count=$((error_count+1))
|
||||
if ((new_error_count>max_errors)); then
|
||||
mail_error "Unable to sync with lftp, check logs."
|
||||
rm -f "$ERRORFILE"
|
||||
fi
|
||||
echo "$error_count" > "$ERRORFILE"
|
||||
echo "$new_error_count" > "$ERRORFILE"
|
||||
fi
|
||||
)
|
||||
|
||||
@ -441,7 +442,7 @@ debianMirror="file:${jigdo_pkg_repo:-}"
|
||||
nonusMirror="file:/tmp"
|
||||
include='.' # include all files,
|
||||
exclude='^$' # then exclude none
|
||||
jigdoDir=${jigdoDir}"
|
||||
jigdoDir=${jigdoDir}
|
||||
imageDir=${imageDir}
|
||||
tmpDir=${tmpDirBase:?}/${arch}.${s}
|
||||
#logfile=${LOGPATH}/${MODULE}-${arch}.${s}.log
|
||||
@ -526,7 +527,8 @@ build_trace_content() {
|
||||
# Total bytes synced per rsync stage.
|
||||
total=0
|
||||
if [[ -f $LOGFILE_SYNC ]]; then
|
||||
for bytes in $(sed -Ene 's/(^|.* )sent ([0-9]+) bytes received ([0-9]+) bytes.*/\3/p' "$LOGFILE_SYNC"); do
|
||||
all_bytes=$(sed -Ene 's/(^|.* )sent ([0-9]+) bytes received ([0-9]+) bytes.*/\3/p' "$LOGFILE_SYNC")
|
||||
for bytes in $all_bytes; do
|
||||
total=$(( total + bytes ))
|
||||
done
|
||||
elif [[ -f $LOGFILE_STAGE1 ]]; then
|
||||
@ -634,6 +636,7 @@ rsync_sync() {
|
||||
fi
|
||||
eval post_hook="\$${MODULE}_post_hook"
|
||||
eval jigdo_pkg_repo="\$${MODULE}_jigdo_pkg_repo"
|
||||
export jigdo_pkg_repo
|
||||
eval arch_configurations="\$${MODULE}_arch_configurations"
|
||||
eval repo_type="\$${MODULE}_type"
|
||||
|
||||
@ -693,7 +696,7 @@ rsync_sync() {
|
||||
|
||||
# Run the rsync. Using eval here so extra_args expands and is used as arguments.
|
||||
stage1_started=$(date +%s)
|
||||
eval timeout 1d rsync -avH \
|
||||
eval "$sync_timeout" rsync -avH \
|
||||
--human-readable \
|
||||
--progress \
|
||||
--safe-links \
|
||||
@ -758,7 +761,7 @@ rsync_sync() {
|
||||
|
||||
# Run the rsync. Using eval here so extra_args expands and is used as arguments.
|
||||
stage2_started=$(date +%s)
|
||||
eval timeout 1d rsync -avH \
|
||||
eval "$sync_timeout" rsync -avH \
|
||||
--human-readable \
|
||||
--progress \
|
||||
--safe-links \
|
||||
@ -894,7 +897,7 @@ EOF
|
||||
|
||||
# Run the rsync. Using eval here so extra_args expands and is used as arguments.
|
||||
sync_started=$(date +%s)
|
||||
eval timeout 1d "$QFM_BIN" \
|
||||
eval "$sync_timeout" "$QFM_BIN" \
|
||||
-c "'$conf_path'" \
|
||||
"$extra_args" | tee -a "$LOGFILE_SYNC"
|
||||
RT=${PIPESTATUS[0]}
|
||||
|
Loading…
Reference in New Issue
Block a user