Jelle van der Waa
2018-08-26 12:52:59 UTC
Updating the archweb database is now handled by archweb itself and
deployed on the server using a systemd unit/service. These scripts are
no longer used.
Signed-off-by: Jelle van der Waa <***@vdwaa.nl>
---
README.md | 6 +--
cron-jobs/update-web-db | 78 -----------------------------------
cron-jobs/update-web-files-db | 1 -
3 files changed, 1 insertion(+), 84 deletions(-)
delete mode 100755 cron-jobs/update-web-db
delete mode 120000 cron-jobs/update-web-files-db
diff --git a/README.md b/README.md
index c672998..925cca7 100644
--- a/README.md
+++ b/README.md
@@ -11,9 +11,7 @@ The executables that you (might) care about are:
│ ├── devlist-mailer
│ ├── ftpdir-cleanup
│ ├── integrity-check
- │ ├── sourceballs
- │ ├── update-web-db
- │ └── update-web-files-db
+ │ └── sourceballs
├── db-move
├── db-remove
├── db-repo-add
@@ -56,8 +54,6 @@ Things that haven't been mentioned yet:
- `cron-jobs/devlist-mailer`
- `cron-jobs/sourceballs`
- - `cron-jobs/update-web-db`
- - `cron-jobs/update-web-files-db`
## Testing
* Install the `make` and `docker` packages. Start the docker daemon by issuing `systemctl start docker`.
* The test suite can now be run with `make test`.
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
deleted file mode 100755
index 39ed765..0000000
--- a/cron-jobs/update-web-db
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-
-. "$(dirname "$(readlink -e "$0")")/../config"
-. "$(dirname "$(readlink -e "$0")")/../db-functions"
-
-# setup paths
-SPATH="/srv/http/archweb"
-ENVPATH="/srv/http/archweb-env/bin/activate"
-
-# having "more important repos" last should make [core] trickle to the top of
-# the updates list each hour rather than being overwhelmed by big [extra] and
-# [community] updates
-REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
-LOGOUT="/tmp/archweb_update.log"
-
-# figure out what operation to perform
-cmd="${0##*/}"
-if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
- die "Invalid command name '%s' specified!" "$cmd"
-fi
-
-script_lock
-
-# run at nice 5. it can churn quite a bit of cpu after all.
-renice +5 -p $$ > /dev/null
-
-echo "%s: Updating DB at %s" "$cmd" "$(date)" >> "${LOGOUT}"
-
-# source our virtualenv if it exists
-if [[ -f "$ENVPATH" ]]; then
- . "$ENVPATH"
-fi
-
-case "$cmd" in
- update-web-db)
- dbfileext="${DBEXT}"
- flags=""
- ;;
- update-web-files-db)
- dbfileext="${FILESEXT}"
- flags="--filesonly"
- ;;
-esac
-
-# Lock the repos and get a copy of the db files to work on
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- repo_lock "${repo}" "${arch}" || exit 1
- dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
- if [[ -f ${dbfile} ]]; then
- mkdir -p "${WORKDIR}/${repo}/${arch}"
- cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- fi
- repo_unlock "${repo}" "${arch}"
- done
-done
-
-# Run reporead on our db copy
-pushd "$SPATH" >/dev/null
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- if [[ -f ${dbcopy} ]]; then
- echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
- ./manage.py reporead "${flags}" "${arch}" "${dbcopy}" >> "${LOGOUT}" 2>&1
- echo "" >> "${LOGOUT}"
- fi
- done
-done
-popd >/dev/null
-echo "" >> "${LOGOUT}"
-
-# rotate the file if it is getting big (> 10M), overwriting any old backup
-if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
- mv "${LOGOUT}" "${LOGOUT}.old"
-fi
-
-script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
deleted file mode 120000
index 0c2c4fa..0000000
--- a/cron-jobs/update-web-files-db
+++ /dev/null
@@ -1 +0,0 @@
-update-web-db
\ No newline at end of file
deployed on the server using a systemd unit/service. These scripts are
no longer used.
Signed-off-by: Jelle van der Waa <***@vdwaa.nl>
---
README.md | 6 +--
cron-jobs/update-web-db | 78 -----------------------------------
cron-jobs/update-web-files-db | 1 -
3 files changed, 1 insertion(+), 84 deletions(-)
delete mode 100755 cron-jobs/update-web-db
delete mode 120000 cron-jobs/update-web-files-db
diff --git a/README.md b/README.md
index c672998..925cca7 100644
--- a/README.md
+++ b/README.md
@@ -11,9 +11,7 @@ The executables that you (might) care about are:
│ ├── devlist-mailer
│ ├── ftpdir-cleanup
│ ├── integrity-check
- │ ├── sourceballs
- │ ├── update-web-db
- │ └── update-web-files-db
+ │ └── sourceballs
├── db-move
├── db-remove
├── db-repo-add
@@ -56,8 +54,6 @@ Things that haven't been mentioned yet:
- `cron-jobs/devlist-mailer`
- `cron-jobs/sourceballs`
- - `cron-jobs/update-web-db`
- - `cron-jobs/update-web-files-db`
## Testing
* Install the `make` and `docker` packages. Start the docker daemon by issuing `systemctl start docker`.
* The test suite can now be run with `make test`.
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
deleted file mode 100755
index 39ed765..0000000
--- a/cron-jobs/update-web-db
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-
-. "$(dirname "$(readlink -e "$0")")/../config"
-. "$(dirname "$(readlink -e "$0")")/../db-functions"
-
-# setup paths
-SPATH="/srv/http/archweb"
-ENVPATH="/srv/http/archweb-env/bin/activate"
-
-# having "more important repos" last should make [core] trickle to the top of
-# the updates list each hour rather than being overwhelmed by big [extra] and
-# [community] updates
-REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
-LOGOUT="/tmp/archweb_update.log"
-
-# figure out what operation to perform
-cmd="${0##*/}"
-if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
- die "Invalid command name '%s' specified!" "$cmd"
-fi
-
-script_lock
-
-# run at nice 5. it can churn quite a bit of cpu after all.
-renice +5 -p $$ > /dev/null
-
-echo "%s: Updating DB at %s" "$cmd" "$(date)" >> "${LOGOUT}"
-
-# source our virtualenv if it exists
-if [[ -f "$ENVPATH" ]]; then
- . "$ENVPATH"
-fi
-
-case "$cmd" in
- update-web-db)
- dbfileext="${DBEXT}"
- flags=""
- ;;
- update-web-files-db)
- dbfileext="${FILESEXT}"
- flags="--filesonly"
- ;;
-esac
-
-# Lock the repos and get a copy of the db files to work on
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- repo_lock "${repo}" "${arch}" || exit 1
- dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
- if [[ -f ${dbfile} ]]; then
- mkdir -p "${WORKDIR}/${repo}/${arch}"
- cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- fi
- repo_unlock "${repo}" "${arch}"
- done
-done
-
-# Run reporead on our db copy
-pushd "$SPATH" >/dev/null
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- if [[ -f ${dbcopy} ]]; then
- echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
- ./manage.py reporead "${flags}" "${arch}" "${dbcopy}" >> "${LOGOUT}" 2>&1
- echo "" >> "${LOGOUT}"
- fi
- done
-done
-popd >/dev/null
-echo "" >> "${LOGOUT}"
-
-# rotate the file if it is getting big (> 10M), overwriting any old backup
-if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
- mv "${LOGOUT}" "${LOGOUT}.old"
-fi
-
-script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
deleted file mode 120000
index 0c2c4fa..0000000
--- a/cron-jobs/update-web-files-db
+++ /dev/null
@@ -1 +0,0 @@
-update-web-db
\ No newline at end of file
--
2.18.0
2.18.0