123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151 |
- . paths.conf
- function diff_db() {
- while read -r repo; do
- for i in added deleted modified; do
- rm -f ${DL_CACHE}/${i}
- touch ${DL_CACHE}/${i}
- done
- diff -N ${REPO_STORE2}/$(basename ${repo}) ${repo} \
- | awk -v added="${DL_CACHE}/added" -v deleted="${DL_CACHE}/deleted" \
- '/^</ { print substr($0, 3) >> deleted } /^>/ { print substr($0, 3) >> added }'
- cat "${DL_CACHE}/added" "${DL_CACHE}/deleted" | sort | \
- awk -v line="" 'line == $1 { print $1; line=$1; next } { line=$1 }' > "${DL_CACHE}/modified"
- grep -vf "${DL_CACHE}/modified" "${DL_CACHE}/added" > "${DL_CACHE}/added.0"
- mv "${DL_CACHE}/added.0" "${DL_CACHE}/added"
- grep -vf "${DL_CACHE}/modified" "${DL_CACHE}/deleted" > "${DL_CACHE}/deleted.0"
- mv "${DL_CACHE}/deleted.0" "${DL_CACHE}/deleted"
- for option in "added" "deleted" "modified"; do
- while read -r line; do
- case $(basename ${option}) in
- 'added')
- echo "NEW -- ${line}"
- ;;
- 'modified')
- echo "MOD -- ${line}"
- ;;
- 'deleted')
- echo "DEL -- ${line}"
- ;;
- esac
- done < "${DL_CACHE}/${option}"
- done
- done < <(find ${REPO_STORE} -regex '.*' -type f)
- }
- function store_db() {
- _log info "Backing up database"
- while read -r line; do
- cp ${line} ${REPO_STORE2}/$(basename ${line})
- done < <(find ${REPO_STORE} -type f)
- return 0
- }
- function update_db() {
- while read -r line; do
- [ "$(echo $line | sed 's/[[:space:]]*#.*/comment/')" = "comment" ] && continue
- URL=$(echo $line | cut -f3 -d' ')
- REPO=$(echo $line | cut -f2 -d' ')
- METHOD=$(echo $line | cut -f1 -d' ')
- _log info "pulling ${URL} using ${METHOD}"
- case ${METHOD} in
- SCP)
- scp ${URL}/index ${DL_CACHE}/${REPO} 2>&1 > /dev/null
- ;;
- esac
- if [ $? -ne 0 ]; then
- _log error "Could not pull ${URL}"
- fi
- gzip -dc ${DL_CACHE}/${REPO} 2>/dev/null | sort > ${REPO_STORE}/${REPO}
- if [ $? -ne 0 ]; then
- _log error "cannot unpack ${REPO}"
- fi
- done < ./repos.conf
- }
- function run_pkg() {
- PKG_NAME=$(lookup_pkg "${1}" | cut -s -f2 -d' ')
- DEPS="$(lookup_pkg \"${1}\" | cut -s -f3 -d' ' | tr ',' ' ')"
- LOG="${LOG_STORE}/${2}/"
- if ! [ -d "${2}" ]; then
- mkdir ${LOG}
- touch ${LOG}/kickoff
- fi
- printf 'TIME %s %s\n' ${PKG_NAME} $(date --rfc-3339=seconds | tr ' ' '_') >> ${LOG}/kickoff
- printf 'START %s %s\n' ${PKG_NAME} ${1} >> ${LOG}/kickoff
- bash ${PKG_STORE}/${PKG_NAME} 2>&1 >> ${LOG}/${PKG_NAME}
- }
- function lookup_pkg() {
- PKG_NAME="${1}"
- GREP_RESULT="$(grep -H -m1 "^${PKG_NAME} " ${REPO_STORE}/* | tr ' ' ';')"
- if [ "$(echo "${GREP_RESULT}" | wc -l)" -gt 1 ]; then
- REPOS=''
- for i in ${GREP_RESULT}; do
- REPOS="${REPOS} $(basename $(echo ${i} | cut -f1 -d':'))"
- done
- _log error "${PKG_NAME} exists in ${REPOS}" >&2
- return 1
- fi
- [ -z "${GREP_RESULT}" ] && return 3
- GREP_RESULT="$(echo ${GREP_RESULT} | tr ';' ' ')"
- printf '%s %s %s\n' "$(basename $(echo ${GREP_RESULT} | cut -f1 -d':'))" "$(echo ${GREP_RESULT} | cut -f2 -d' ')" "$(echo ${GREP_RESULT} | cut -s -f3 -d' ')"
- return $?
- }
- function depends_pkg() {
- PKG_NAME="${1}"
- lookup_pkg ${PKG_NAME} | cut -s -f3- -d' ' | tr ',' ' '
- return 0
- }
- function download_pkg() {
- REPO_NAME="${1}"
- PKG_NAME="${2}"
- REPO_BUFF="$(grep ${REPO_NAME} ./repos.conf)"
- METHOD="$(echo ${REPO_BUFF} | cut -f1 -d' ')"
- REPO_URL="$(echo ${REPO_BUFF} | cut -f3- -d' ')"
- _log info "downloading ${PKG_NAME} from ${REPO_NAME}"
- case ${METHOD} in
- SCP)
- scp ${REPO_URL}/${PKG_NAME} ${DL_CACHE}/${PKG_NAME}.download 2>&1 > /dev/null
- return
- ;;
- HTTP)
- _log error 'not supported'
- # curl -sLi -F action=GET_JOB -F file_name=${PKG_NAME} ${STORESERVER} > ${DL_CACHE}/${PKG_NAME}.download
- ;;
- #TODO: Add plugin support
- esac
- [ $? -ne 0 ] && return 1;
- awk -v header="${DL_CACHE}/${PKG_NAME}.header" -v body="${DL_CACHE}/${PKG_NAME}.body" -v bl=1 \
- 'bl{bl=0; h=($0 ~ /HTTP\/1/)} /^\r?$/{bl=1} {gsub($0, /\r\n/, ""); print $0>(h?header:body)}' ${DL_CACHE}/${PKG_NAME}.download
- rm -f ${DL_CACHE}/${PKG_NAME}.download
- awk '/HTTP/ { print "status:" $2 } /Date/ { print $0 }' "${DL_CACHE}/${PKG_NAME}.header" \
- > "${DL_CACHE}/${PKG_NAME}.header".tmp 2>/dev/null && mv "${DL_CACHE}/${PKG_NAME}.header.tmp" "${DL_CACHE}/${PKG_NAME}.header"
- [ $? -ne 0 ] && { return 2; }
- sed -i '/^\r*\n*$/d' "${DL_CACHE}/${PKG_NAME}.header" "${DL_CACHE}/${PKG_NAME}.body"
- HTTP_OK=$(awk -F':' '/^status/{ if($2 == "200") { exit 0 } else { exit 1 }}' "${DL_CACHE}/${PKG_NAME}.header")
- return ${HTTP_OK}
- }
- function prepare_pkg() {
- PKG_NAME="${1}"
- ! [ -f "${DL_CACHE}/${PKG_NAME}.download" ] && { return 1; }
- base64 -d "${DL_CACHE}/${PKG_NAME}.download" > "${DL_CACHE}/${PKG_NAME}.stage1"
- awk -v p=1 -v meta="${PKG_META_STORE}/${PKG_NAME}" '/^---$/{ p=0;next } p { print $0 > meta } !p{ print $0 }' ${DL_CACHE}/${PKG_NAME}.stage1 \
- | gunzip 2>/dev/null > ${DL_CACHE}/${PKG_NAME}.stage2
- cp ${DL_CACHE}/${PKG_NAME}.stage2 ${PKG_STORE}/${PKG_NAME}
- cat ${PKG_META_STORE}/${PKG_NAME}
- }
|