Removed obsolete version of fuel-createmirror

fuel-createmirror was replaced by fuel-mirror

Change-Id: I336fac39c8b1dd0176cbd1df044e9e1860ad8fb6
Related-Bug: #1487077
This commit is contained in:
Bulat Gaifullin 2015-12-05 22:20:35 +03:00
parent 6534117233
commit 7d429bdd3f
19 changed files with 0 additions and 2082 deletions

View File

@ -52,8 +52,3 @@ maintainers:
- name: Sergey Kulanov
email: skulanov@mirantis.com
IRC: SergK
- util/:
- name: Vitaly Parakhin
email: vparakhin@mirantis.com
IRC: brain461

View File

@ -1,58 +0,0 @@
# Base directory for mirror
MIRROR_ROOT="/var/www/nailgun"
# Configure upstream mirrors
# NOTE: only rsync mirrors are supported by the script!
# Please refer to https://launchpad.net/ubuntu/+archivemirrors for
# official upstream Ubuntu mirrors list
#
# Format:
# MIRROR_{DISTRO}_HOST - mirror hostname, i.e. "archive.ubuntu.com"
# MIRROR_{DISTRO}_DIR - source directory (rsync module name), i.e. "ubuntu"
# MIRROR_{DISTRO}_HTTPDIR - HTTP URL of mirror (usually equals to MIRROR_{DISTRO}_DIR)
# {DISTRO} - could be one of: UBUNTU, MOS_UBUNTU
# Upstream Ubuntu packages
MIRROR_UBUNTU_HOST="archive.ubuntu.com"
MIRROR_UBUNTU_DIR="ubuntu"
MIRROR_UBUNTU_HTTPDIR=$MIRROR_UBUNTU_DIR
# MOS Ubuntu packages
MIRROR_MOS_UBUNTU_HOST="mirror.fuel-infra.org"
MIRROR_MOS_UBUNTU_DIR="mirror/mos-repos/ubuntu/8.0"
MIRROR_MOS_UBUNTU_HTTPDIR="mos-repos/ubuntu/8.0"
# Configure repository paths
# Format:
# {DISTRO}_PATH=/path
# {DISTRO} - could be one of: UBUNTU, MOS_UBUNTU
UBUNTU_PATH="${MIRROR_ROOT}/ubuntu-part"
MOS_UBUNTU_PATH="${MIRROR_ROOT}/mos-ubuntu"
# Configure upstream packages mirroring mode
# PARTIAL_UPSTREAM:
# 0 - script will mirror all packages from specified distibutions
# and components. Upstream mirror structure will be preserved.
# 1 - (default) script will download only packages required for
# MOS. For Ubuntu packages script will create partial repository
# with the "main" component only, original mirror structure will
# not be preserved.
# NOTE: This setting affects upstream OS only (MOS mirror is always fetched as-is).
PARTIAL_UPSTREAM=1
# In case of working behind http proxy uncomment the following parameters
# Note that your proxy configuration must allow proxying to port 873
# RSYNC_PROXY=username:password@host:port
# export RSYNC_PROXY
# http_proxy=http://username:password@host:port/
# export http_proxy
# On Fuel node (which is running CentOS) we use dockerized Ubuntu for dependencies resolving.
# Set this to "true" only if Docker is up and running.
# If set to "false", generic apt will be used instead.
DOCKER_MODE=true
# Log dir
LOG_ROOT="/var/log/mirror-sync"

View File

@ -1,33 +0,0 @@
# Auto-detect environment
if ! hash fuel2 2>/dev/null; then
# working outside of Fuel node, set Fuel parameters manually
FUEL_VERSION=8.0
FUEL_SERVER="<specify_Fuel_server_IP_here>"
# --------------------------------------
# DO NOT CHANGE ANYTHING BELOW THIS LINE
# --------------------------------------
else
# Running on Fuel node
# On Fuel node (which is running CentOS) we use dockerized Ubuntu for dependencies resolving.
# Set this to "true" only if Docker is up and running.
# If set to "false", generic apt will be used instead.
DOCKER_MODE=true
# autodetect Fuel settings
source <( dockerctl shell astute cat /etc/fuel/astute.yaml | $BINROOT/util/parse_yaml.py "FUEL" )
source <( dockerctl shell nailgun cat /etc/nailgun/version.yaml | $BINROOT/util/parse_yaml.py "FUEL" )
FUEL_VERSION=$FUEL_VERSION_release
FUEL_SERVER=$FUEL_ADMIN_NETWORK_ipaddress
FULL_RELEASE=$FUEL_VERSION_openstack_version
if [ $FUEL_MASTER_PASS ]; then
FUEL_ADMIN_PASS=$FUEL_MASTER_PASS
else
FUEL_ADMIN_PASS=$FUEL_FUEL_ACCESS_password
fi
fi

View File

@ -1,40 +0,0 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/fuel.cfg"
# Sync source
UPSTREAM=$MIRROR_MOS_UBUNTU_HOST
UPSTREAM_DIR=$MIRROR_MOS_UBUNTU_DIR
UPSTREAM_DIR_HTTP=$MIRROR_MOS_UBUNTU_HTTPDIR
# Always sync full MOS mirror
PARTIAL_UPSTREAM=0
# Sync destination
LOCAL_DIR=$MOS_UBUNTU_PATH
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( mos${FUEL_VERSION}-updates mos${FUEL_VERSION}-security mos${FUEL_VERSION}-holdback )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["mos${FUEL_VERSION}-updates"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-security"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-holdback"]="main restricted"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="no" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="no" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

View File

@ -1,41 +0,0 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/fuel.cfg"
# Sync source
UPSTREAM=$MIRROR_MOS_UBUNTU_HOST
UPSTREAM_DIR=$MIRROR_MOS_UBUNTU_DIR
UPSTREAM_DIR_HTTP=$MIRROR_MOS_UBUNTU_HTTPDIR
# Always sync full MOS mirror
PARTIAL_UPSTREAM=0
# Sync destination
LOCAL_DIR=$MOS_UBUNTU_PATH
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( mos${FUEL_VERSION} mos${FUEL_VERSION}-updates mos${FUEL_VERSION}-security mos${FUEL_VERSION}-holdback )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["mos${FUEL_VERSION}"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-updates"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-security"]="main restricted"
DIST_COMPONENTs["mos${FUEL_VERSION}-holdback"]="main restricted"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="no" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="no" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

View File

@ -1,262 +0,0 @@
anacron
apache2
aptitude
atop
bc
biosdevname
bridge-utils
build-essential
ceilometer-agent-central
ceilometer-agent-compute
ceilometer-agent-notification
ceilometer-alarm-evaluator
ceilometer-alarm-notifier
ceilometer-api
ceilometer-collector
ceilometer-common
ceph
ceph-deploy
ceph-mds
cinder-api
cinder-backup
cinder-scheduler
cinder-volume
cirros-testvm
cirros-testvmware
cloud-init
conntrackd
corosync
cpu-checker
cpufrequtils
curl
daemonize
debconf-utils
debhelper
devscripts
discover
discover-data
disktype
dkms
dnsmasq-base
dnsmasq-utils
dstat
erlang-dev
ethtool
fence-agents
fuel-agent
galera
git
glance-api
glance-common
glance-registry
grub-pc
haproxy
heat-api
heat-api-cfn
heat-api-cloudwatch
heat-common
heat-docker
heat-engine
hicolor-icon-theme
htop
ifenslave
installation-report
intel-microcode
iperf
iperf3
ipmitool
iptables-persistent
kdump-tools
keystone
language-pack-en
language-pack-en-base
language-pack-gnome-en
language-pack-gnome-en-base
libapache2-mod-fastcgi
libapache2-mod-wsgi
libguestfs-tools
libnss3-tools
libssl0.9.8
linux-firmware
linux-firmware-nonfree
live-boot
live-boot-initramfs-tools
lksctp-tools
lldpad
mc
mcelog
mcollective
mcollective-client
mdadm
memcached
mongodb-clients
mongodb-server
monit
msmtp-mta
murano-api
murano-dashboard
murano-engine
mysql-client
mysql-server-wsrep-5.6
nailgun-agent
nailgun-mcagents
nailgun-net-check
neutron-common
neutron-dhcp-agent
neutron-l3-agent
neutron-lbaas-agent
neutron-metadata-agent
neutron-metering-agent
neutron-plugin-bigswitch
neutron-plugin-bigswitch-agent
neutron-plugin-brocade
neutron-plugin-cisco
neutron-plugin-ibm
neutron-plugin-ibm-agent
neutron-plugin-linuxbridge
neutron-plugin-linuxbridge-agent
neutron-plugin-metaplugin
neutron-plugin-metering-agent
neutron-plugin-midonet
neutron-plugin-ml2
neutron-plugin-mlnx
neutron-plugin-mlnx-agent
neutron-plugin-nec
neutron-plugin-nec-agent
neutron-plugin-nicira
neutron-plugin-nuage
neutron-plugin-oneconvergence
neutron-plugin-oneconvergence-agent
neutron-plugin-opencontrail
neutron-plugin-openvswitch
neutron-plugin-openvswitch-agent
neutron-plugin-plumgrid
neutron-plugin-sriov-agent
neutron-plugin-vmware
neutron-server
neutron-vpn-agent
nfs-common
nginx
nmap
nodejs
node-less
nova-api
nova-cert
nova-compute-kvm
nova-compute-qemu
nova-conductor
nova-consoleauth
nova-consoleproxy
nova-network
nova-objectstore
nova-scheduler
novnc
ntp
ohai
openhpid
openssh-client
openssh-server
openstack-dashboard
openvswitch-switch
os-prober
pacemaker
pcs
percona-toolkit
percona-xtrabackup
pssh
puppet
python-carrot
python-ceilometer
python-ceilometerclient
python-ceph
python-daemon
python-daemonize
python-dingus
python-ethtool
python-feedparser
python-gflags
python-glance
python-gobject
python-gobject-2
python-heatclient
python-keyring
python-keystone
python-ldap
python-libxml2
python-memcache
python-muranoclient
python-mysqldb
python-netifaces
python-neutron
python-neutron-fwaas
python-neutron-lbaas
python-neutron-vpnaas
python-nose
python-novnc
python-openid
python-openstack-auth
python-openstackclient
python-passlib
python-pastescript
python-pexpect
python-pycurl
python-pylibmc
python-pyudev
python-rabbit
python-sahara
python-saharaclient
python-scapy
python-scgi
python-setuptools-git
python-sqlalchemy-ext
python-stompy
python-swift
python-utidylib
python-zmq
qemu-kvm
qemu-utils
rabbitmq-server
radosgw
ruby
ruby-cstruct
ruby-deep-merge
ruby-dev
ruby-httpclient
ruby-i18n
ruby-ipaddress
ruby-json
ruby-netaddr
ruby-openstack
ruby-rethtool
ruby-stomp
sahara-api
sahara-common
sahara-engine
screen
sheepdog
socat
squashfs-tools
swift
swift-account
swift-container
swift-object
swift-plugin-s3
swift-proxy
sysfsutils
sysstat
tasksel
tcptraceroute
tmux
traceroute
ubuntu-minimal
ubuntu-standard
vgabios
vim
virt-what
vlan
xinetd
xz-utils
zabbix-agent
zabbix-frontend-php
zabbix-server-mysql
zabbix-server-pgsql

View File

@ -1,44 +0,0 @@
# Source common config
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/common.cfg"
source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/fuel.cfg"
# Sync source
UPSTREAM=$MIRROR_UBUNTU_HOST
UPSTREAM_DIR=$MIRROR_UBUNTU_DIR
UPSTREAM_DIR_HTTP=$MIRROR_UBUNTU_HTTPDIR
PARTIAL_UPSTREAM=$PARTIAL_UPSTREAM
# Sync destination
if [ "$PARTIAL_UPSTREAM" == "1" ]; then
LOCAL_DIR="${MIRROR_ROOT:-"/var/www/nailgun"}/ubuntu-full"
PARTIAL_UPSTREAM_PATH=$UBUNTU_PATH
else
LOCAL_DIR=$UBUNTU_PATH
fi
# What parts to sync
I18Ns=( en ) # Translations, not delete english, it might break repo
ARCHs=( amd64 )
DISTs=()
DISTs+=( trusty trusty-updates trusty-security )
declare -A DIST_COMPONENTs
DIST_COMPONENTs["trusty"]="main multiverse universe"
DIST_COMPONENTs["trusty-updates"]="main multiverse universe"
DIST_COMPONENTs["trusty-security"]="main multiverse universe"
# Optional fetch configuration
FETCH_I18N="no" # Fetch translations
FETCH_SOURCES="no" # Fetch packages sources
FETCH_CONTENTS="no" # Fetch 'Contents' file fof distro
FETCH_INSTALLER="yes" # Fetch separate 'debian-installer' component
FETCH_DIFF="no" # Fetch diffs of 'Packages' (speed's up client update)
FETCH_INDICES="yes" # Fetch indices
# Misc
DEBUG="no"
QUIET="no"
DISABLE_CLEANUP="no"
LOG_FILE="${LOG_ROOT:-"/var/log/mirror-sync"}/ubuntu-update.log"

View File

@ -1,638 +0,0 @@
#!/bin/bash
# Source separate config file if given
CONFIG_FILE="$1"
[[ -r "$CONFIG_FILE" ]] && . "$CONFIG_FILE"
# Sync source
UPSTREAM=${UPSTREAM:-""}
UPSTREAM_DIR=${UPSTREAM_DIR:-""}
# Sync destination
LOCAL_DIR=${LOCAL_DIR:-""}
#declare -A DIST_COMPONENTs
# Optional fetch configuration
FETCH_I18N=${FETCH_I18N:-"yes"} # Fetch translations
FETCH_SOURCES=${FETCH_SOURCES:-"no"} # Fetch packages sources
FETCH_CONTENTS=${FETCH_CONTENTS:-"no"} # Fetch 'Contents' file for distro
FETCH_INSTALLER=${FETCH_INSTALLER:="no"} # Fetch separate 'debian-installer'
FETCH_DIFF=${FETCH_DIFF:-"no"} # Fetch diffs of 'Packages'
FETCH_INDICES=${FETCH_INDICES:-"yes"} # Fetch indices
# Misc
ARCH_ALL_IS_MANDATORY=${ARCH_ALL_IS_MANDATORY:-"no"}
#------------------------------------------------------------------------------#
POSSIBLE_COMPRESSIONS=( gz bz2 xz lzma )
BINROOT=$(dirname `readlink -f "$0"`)
. $BINROOT/util/msgs.sh
. $BINROOT/util/rsync.sh
. $BINROOT/util/dpkg.sh
. $BINROOT/util/checksum.sh
[[ -n "$UPSTREAM" ]] || fatal "UPSTREAM is not defined in config"
[[ -n "$UPSTREAM_DIR" ]] || fatal "UPSTREAM_DIR is not defined in config"
[[ -n "$LOCAL_DIR" ]] || fatal "LOCAL_DIR is not defined in config"
[[ -n "${ARCHs[@]}" ]] || fatal "ARCHs is not defined in config"
[[ -n "${DISTs[@]}" ]] || fatal "DISTs is not defined in config"
#------------------------------------------------------------------------------#
# Checks if 'value' contained within 'array'
# USAGE: contains 'what' 'where'
# $1 -- value to find in array
# $2 -- array to search
contains()
{
local e
for e in "${@:2}"; do [[ "$e" = "$1" ]] && return 0; done
return 1
}
on_SIGINT()
{
fatal "Got user interrupt, aborting"
exit 1
}
#------------------------------------------------------------------------------#
# MAIN()
#------------------------------------------------------------------------------#
# Trap user abort
trap "on_SIGINT" INT
info "Started $0 $*"
debug "Upstream source is: $UPSTREAM::$UPSTREAM_DIR"
debug "Local dir is: $LOCAL_DIR"
debug "Architectures to sync: ${ARCHs[@]}"
debug "Dists to sync: ${DISTs[@]}"
debug "FETCH_I18N: $FETCH_I18N "
debug "FETCH_SOURCES: $FETCH_SOURCES "
debug "FETCH_CONTENTS: $FETCH_CONTENTS "
debug "FETCH_INSTALLER: $FETCH_INSTALLER "
debug "FETCH_DIFF: $FETCH_DIFF "
debug "FETCH_INDICES: $FETCH_INDICES "
debug "ARCH_ALL_IS_MANDATORY: $ARCH_ALL_IS_MANDATORY"
debug "POSSIBLE_COMPRESSIONS: ${POSSIBLE_COMPRESSIONS[@]}"
debug "BINROOT: $BINROOT "
debug "PARTIAL_UPSTREAM: $PARTIAL_UPSTREAM "
debug "PARTIAL_UPSTREAM_PATH: $PARTIAL_UPSTREAM_PATH"
# Create dirs
mkdir -p $LOCAL_DIR/dists
mkdir -p $LOCAL_DIR/pool
# Array of Packages files, that contains package descriptions
packages_pool_files=()
sources_pool_files=()
if rsync_file_exists "."; then
info "Upstream mirror $UPSTREAM supports rsync protocol"
else
fatal "Upstream mirror $UPSTREAM does not support rsync protocol, aborting"
fi
debug_job_start "Checking if upstream mirror update is in progress..."
while rsync_file_exists "Archive-Update-in-Progress*"; do
info "'Archive-Update-in-Progress' file found on upstream mirror. Sleeping for 20 seconds"
sleep 20
done ; debug_job_ok
################################################################################
# Stage 1
# Download metainformation files
################################################################################
for dist in "${DISTs[@]}"; do
info "Fetching dist '$dist' lists"
############################################################################
# Phase 1: Check if we have aliased distro and create necessary symlinks
# aliases is specified after '@'-sign in dist name, separated by commas
# For example: 'wheezy@testing,somealias' means dist 'wheezy' with symlinks
# 'testing' and 'somealias' pointing to it
############################################################################
# TODO: get aliases from Release suite
if [ -n ${dist#*@} ]; then
normal_dist="${dist%%@*}"
for dist_alias in `echo ${dist#*@} | tr ',' ' '`; do
if [[ "$dist_alias" == "$normal_dist" ]]; then
continue
fi
if [ ! -L $LOCAL_DIR/dists/$dist_alias ]; then
debug "Creating dist alias '$dist_alias' -> '$normal_dist'"
ln -s "$normal_dist" "$LOCAL_DIR/dists/$dist_alias" || \
error "Error creating alias for $normal_dist ($dist_alias)"
fi
done
dist="$normal_dist"
unset normal_dist
fi
############################################################################
# Phase 2: Create distribution dir
############################################################################
mkdir -p $LOCAL_DIR/dists/$dist/
############################################################################
# Phase 3: Fetch Release files
# InRelease uses new scheme of inline Release signing
# Old scheme implies separate 'Release' and 'Release.gpg' files
############################################################################
debug "Fetching Release files"
for rel_file in InRelease Release Release.gpg; do
fetch "/dists/$dist/$rel_file" "$LOCAL_DIR/dists/$dist/"
done
release_file="$LOCAL_DIR/dists/$dist/InRelease"
# Check InRelease file
if [ -f "$release_file" ]; then
inrl_valid=$(date -d "`grep Valid-Until /srv/mirror/debian/debian_bg/dists/wheezy-updates/InRelease | awk '{$1=""; print $0}'`" +%s)
now=$(date +%s)
if [[ $(( $now - $inrl_valid )) -gt -86400 ]]; then
info "InRelease file will expire before the next update, removing it..."
rm -f "$release_file"
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
else
release_file="$LOCAL_DIR/dists/$dist/Release"
fi
[ -f "$release_file" ] || fatal "Unable to find release file for dist $dist"
debug "Got Release file '$release_file'"
############################################################################
# Phase 4: check release signature
############################################################################
if [[ "$release_file" =~ ".*InRelease$" ]]; then
gpg --verify "$release_file" || \
fatal "Failed to check signature for $release_file"
elif [[ "$release_file" =~ ".*Release" ]]; then
gpg --verify "${release_file}.gpg" "${release_file}" || \
fatal "Failed to check signature for $release_file"
fi
############################################################################
# Phase 5: Determine which components and arches to download
# Case A: If we have user specified component list, and hasn't found any
# in distro description, then blindly use user given values
# Case B: If we have no user specified component list, try to get them from
# repository Release file, if it fails - bail out
# Case C: If we have both, calculate intersection of them
############################################################################
debug "Calculating arches/components to fetch from dist"
components=`get_dist_components $release_file "${DIST_COMPONENTs[$dist]}"`
arches=`get_dist_architectures $release_file ${ARCHs[@]}`
# Phase 6: Fork components into binary_components
# That allows us to add special sub-components specific to binary components
# such as 'debian-installer'
binary_components="$components"
############################################################################
# Phase 7: Check if we must fetch 'debian-installer' sub-components and add
# them to the binary_components list if needed
############################################################################
if [[ "$FETCH_INSTALLER" = "yes" ]]; then
for component in $components; do
if rsync_file_exists "dists/$dist/$component/debian-installer"; then
debug "Adding debian-installer '$component/debian-installer'"
binary_components="$binary_components $component/debian-installer"
else
error "Not found debian-installer at '$component/debian-installer'"
fi
done
fi
############################################################################
# Phase 8: Fetch binary components 'Packages' indexes and diffs
############################################################################
debug "Will fetch binary components: $binary_components"
for component in $binary_components; do
info "Fetching component '$component' binary package lists"
# Component path
comp_path="dists/$dist/$component"
# Create component dir
mkdir -p "$LOCAL_DIR/$comp_path"
# First, fetch binary packages lists
for arch in $arches; do
arch_path="$comp_path/binary-$arch"
# Check if remote the dir exists
if ! rsync_file_exists "$arch_path"; then
# Missing 'all' architecture in a non critical error
if [[ "$arch" = "all" ]] && [[ "$ARCH_ALL_IS_MANDATORY" != "yes" ]]; then
debug "Missing 'all' architecture in $dist/$component"
continue
fi
fatal "Arch '$arch' in '$dist/$component' doesn't exist"
fi
# Prepare component dir
mkdir -p "$LOCAL_DIR/$arch_path"
to_fetch=()
# List of files that we want to dl
to_fetch+=( "$arch_path/Release" )
to_fetch+=( "$arch_path/Packages" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$arch_path/Packages.$ext" )
done
# Check if we want a Packages.diff files Index too
if [[ "$FETCH_DIFF" = "yes" ]] && \
rsync_file_exists "$arch_path/Packages.diff/Index"; then
to_fetch+=( `rsync_ls "$arch_path/Packages.diff/*"` )
fi
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify all fetched files
for file in ${fetched_files[@]}; do
# Skip checking of diffs, they are mentioned in Index file
# Validate only Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$arch_path/Packages.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
# Check file by Release file's checksum
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Make sure that we have at least one valid packages list
packages_file=`guess_filename "$LOCAL_DIR/$arch_path/Packages"`
if [[ -z "$packages_file" ]]; then
fatal "Failed to find Packages file at $arch_path"
fi
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$arch_path/Packages.diff/Index"`
if [[ "$FETCH_DIFF" = "yes" ]] && [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$arch_path/Packages.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$arch_path/Packages.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
error "Checksum failed on file $arch_path/Packages.diff/$diff, removing all diffs"
rm -rf "$LOCAL_DIR/$arch_path/Packages.diff"
break
fi
debug_job_ok
done
fi
# Parse package file and add packages from it to dl list
packages_pool_files+=( "$packages_file" )
done
done
############################################################################
# Phase 9: Fetch additional stuff for components, i18n, sources, 'Contents'
############################################################################
for component in $components; do
comp_path="dists/$dist/$component"
mkdir -p "$LOCAL_DIR/$comp_path"
# Second, the i18n packages
info "Fetching section '$component' i18n"
if [[ "$FETCH_I18N" = "yes" ]]; then
mkdir -p "$LOCAL_DIR/$comp_path/i18n/"
to_fetch=()
to_fetch+=( "$comp_path/i18n/Index" )
for i18n in ${I18Ns[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n" )
# Translation files may have diffs too
to_fetch+=( "$comp_path/i18n/Translation-$i18n.diff/*" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/i18n/Translation-$i18n.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
# Skip checking of diffs, except it's Index file
if [[ "`dirname $file`" = "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff" ]]; then
[[ "`basename $file`" != "Index" ]] && continue
fi
debug_job_start "Checking file $file"
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
debug_job_ok
done
# Check integrity of .diffs if we got them
diff_index=`guess_filename "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/Index"`
if [[ -n "$diff_index" ]]; then
diffs=`cat $diff_index | awk '/SHA1-Patches:/,0' | tail -n +2 | awk '{print $3}'`
for diff in $diffs; do
debug_job_start "Checking file $LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff"
diff_exp_sha1=`cat $diff_index | awk '/SHA1-Patches:/,0' | grep "$diff" | awk '{print $1}'`
diff_real_sha1=`read_file "$LOCAL_DIR/$comp_path/i18n/Translation-$i18n.diff/$diff" | sha1sum | awk '{print $1}'`
if [[ "$diff_exp_sha1" != "$diff_real_sha1" ]]; then
debug_job_err
fatal "Checksum failed on file $comp_path/i18n/Translation-$i18n.diff/$diff"
fi
debug_job_ok
done
fi
done
fi
# Third is the Sources
if [[ "$FETCH_SOURCES" = "yes" ]]; then
info "Fetching component '$component' source package lists"
mkdir -p "$LOCAL_DIR/$comp_path/source/"
to_fetch=()
to_fetch+=( "$comp_path/source/Release" )
to_fetch+=( "$comp_path/source/Sources" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/source/Sources.$ext" )
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
sources_file=`guess_filename "$LOCAL_DIR/$comp_path/source/Sources"`
if [[ -z "$sources_file" ]]; then
fatal "Failed to find Sources file at $LOCAL_DIR/$comp_path/source"
fi
# Parse sources file and add packages from it to dl list
sources_pool_files+=( "$sources_file" )
fi
# Fetch the component contents packs
if [[ "$FETCH_CONTENTS" = "yes" ]]; then
info "Fetching component '$component' content lists"
to_fetch=()
for arch in $arches; do
to_fetch+=( "$comp_path/Contents-$arch" )
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
to_fetch+=( "$comp_path/Contents-$arch.$ext" )
done
done
# Download files in our wishlist and get names of actually
# downloaded files
fetched_files=`fetch_all "$LOCAL_DIR" ${to_fetch[@]}`
# Verify
for file in ${fetched_files[@]}; do
pkg_file_valid "$release_file" "${file#$LOCAL_DIR/dists/$dist/}" || \
fatal "Checksum check failed for $file"
done
# If our component is "main", make link in the root of distribution
if [[ "$component" = "main" ]]; then
for arch in $arches; do
if [[ -e "$LOCAL_DIR/dists/$dist/$component/Contents-$arch.gz" ]]; then
debug "Creating link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
ln -sf main/Contents-$arch.gz $LOCAL_DIR/dists/$dist/Contents-$arch.gz
else
debug "Deleting link to main/Contents-$arch.gz at $LOCAL_DIR/dists/$dist"
rm -f "$LOCAL_DIR/dists/$dist/Contents-$arch.gz"
fi
done
fi
fi
done
done
# Get the indices
if [[ "$FETCH_INDICES" = "yes" ]]; then
info "Fetching indices"
mkdir -p "$LOCAL_DIR/indices/"
for dist in "${DISTs[@]}"; do
fetch "/indices/override.$dist.*" "$LOCAL_DIR/indices/"
done
fi
################################################################################
# Stage 2
# Download pool of packages
################################################################################
info "Parsing package and sources files:"
info "${packages_pool_files[@]}"
info "${sources_pool_files[@]}"
files_to_dl_list=`mktemp --suffix="-deb-mirror"`
# File that contains md5sums of deb pkgs
deb_md5=`mktemp --suffix="-deb-mirror-md5"`
rsync_log=`mktemp --suffix="-deb-mirror-rslog"`
sort_temp=`mktemp --suffix="-deb-mirror-sort"`
$BINROOT/util/parsePackages.py ${packages_pool_files[@]} > "$files_to_dl_list" \
2> "$deb_md5" && \
$BINROOT/util/parseSources.py ${sources_pool_files[@]} >> "$files_to_dl_list" || \
fatal "Unable to create list of packages to fetch"
sort -u "$files_to_dl_list" > "$sort_temp" ; mv "$sort_temp" "$files_to_dl_list"
sort -u -k 3,3 "$deb_md5" > "$sort_temp" ; mv "$sort_temp" "$deb_md5"
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
info "Resolving dependencies for partial mirror"
# Detect kernel version of debian-installer
export UBUNTU_RELEASE=trusty
export UBUNTU_NETBOOT_FLAVOR=netboot
export UBUNTU_KERNEL_FLAVOR=lts-trusty
export UBUNTU_ARCH=amd64
INITRD_DIR="/dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/"
mkdir -p "$LOCAL_DIR/$INITRD_DIR"
fetch "/$INITRD_DIR/initrd.gz" "$LOCAL_DIR/$INITRD_DIR"
export UBUNTU_INSTALLER_KERNEL_VERSION=`zcat "$LOCAL_DIR/$INITRD_DIR/initrd.gz" | cpio --list 'lib/modules/*/kernel' 2>/dev/null | cut -d"/" -f 3`
debug "Detected debian-installer kernel version: "$UBUNTU_INSTALLER_KERNEL_VERSION
# Generate list of MOS dependencies
export apt_altstate=`mktemp -d --suffix="-apt-altstate"`
export BINROOT
export FUEL_VERSION
if [[ "$DOCKER_MODE" = "true" ]]; then
( docker ps -a | grep fuel-createmirror ) && docker rm -f fuel-createmirror
# docker pull ubuntu:latest
docker -D run -d --name=fuel-createmirror --net=host -a stdout -a stderr -t \
-e UBUNTU_RELEASE=$UBUNTU_RELEASE -e UBUNTU_NETBOOT_FLAVOR=$UBUNTU_NETBOOT_FLAVOR \
-e UBUNTU_INSTALLER_KERNEL_VERSION=$UBUNTU_INSTALLER_KERNEL_VERSION -e UBUNTU_KERNEL_FLAVOR=$UBUNTU_KERNEL_FLAVOR \
-e RSYNC_PROXY=$RSYNC_PROXY -e FUEL_VERSION=$FUEL_VERSION -e http_proxy=$http_proxy \
-e UBUNTU_ARCH=$UBUNTU_ARCH -e BINROOT=$BINROOT \
-e apt_altstate=$apt_altstate -v $BINROOT:$BINROOT:rw -v $apt_altstate:$apt_altstate:rw ubuntu:latest \
|| fatal "Cannot run the docker container, please check connectivity to index.docker.io"
dockerctl shell fuel-createmirror $BINROOT/util/partial_ubuntu.sh || fatal "Cannot calculate list of dependencies"
# cleanup ubuntu container
docker rm -f fuel-createmirror
else
$BINROOT/util/partial_ubuntu.sh || fatal "Cannot calculate list of dependencies"
fi
# Create download lists for deb and udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/deb "$deb_md5" > $apt_altstate/deb_md5
grep "\.udeb$" "$files_to_dl_list" | egrep -v "generic|virtual" > $apt_altstate/udeb_nonkernel
grep "\.udeb$" "$files_to_dl_list" | egrep "generic|virtual" | grep $UBUNTU_INSTALLER_KERNEL_VERSION > $apt_altstate/udeb_kernel
cat $apt_altstate/udeb_nonkernel $apt_altstate/udeb_kernel | sort -u > $apt_altstate/udeb
awk 'FNR==NR {arr[$0];next} $3 in arr' $apt_altstate/udeb "$deb_md5" > $apt_altstate/udeb_md5
cat $apt_altstate/netboot.list $apt_altstate/udeb $apt_altstate/deb > "$files_to_dl_list"
cat $apt_altstate/netboot_md5.list $apt_altstate/udeb_md5 $apt_altstate/deb_md5 > "$deb_md5"
rm -rf "$apt_altstate"
fi # "$PARTIAL_UPSTREAM" = "1"
info "Downloading pool files"
rsync --verbose --out-format="%i %n" --stats \
--recursive --perms --copy-links --times --hard-links --sparse --safe-links \
--exclude=".tmp/" --exclude=".temp/" --exclude=".~tmp~/" \
--files-from="$files_to_dl_list" \
--bwlimit=5192 \
"${UPSTREAM}::${UPSTREAM_DIR}/" "$LOCAL_DIR" | tee "$rsync_log"
# --files-from="$files_to_dl_list" \--block-size=8192
#--max-delete=40000 --delay-updates --delete --delete-after \
# Check if rsync was ok
if [[ $? != 0 ]]; then
rm "$files_to_dl_list"
fatal "Failed to sync all package files, see log for details"
#error "Failed to sync all package files, see log for details"
else
info "Primary sync successfully completed"
fi
# fix directory permissions for pool files
find "$LOCAL_DIR" -type d -exec chmod 755 {} \;
# Let's check new file MD5sums
fresh_files=`egrep "^>f......... .*" "$rsync_log" | awk '{print $2}'`
for fresh_file in $fresh_files; do
check_file "$deb_md5" "$LOCAL_DIR" "$fresh_file"
if [[ $? != 0 ]]; then
rm "$deb_md5"
rm "$rsync_log"
fatal "MD5sum check failed for file $LOCAL_DIR/$fresh_file"
fi
done
rm "$deb_md5"
rm "$rsync_log"
# Now iterate through all downloaded files and check if any of them are symlink
# download neccessary files if needed
# Yeah, some times section can contain a metainfo for symlink to file in
# diffirent section that is no longer exists in there, so it will be wiped as
# unused
wayback="`pwd`"
cd "$LOCAL_DIR/"
pool_current_files=`mktemp --suffix d-m_got`
pool_required_files=`mktemp --suffix d-m_req`
# Create lists of files that we got and that we need
find pool -type f -or -type l | sort -u > $pool_current_files
cat $files_to_dl_list | grep "^pool" | sort -u > $pool_required_files
cd "$wayback"
info "Cleaning up pool files"
# Clean obsolete files
obsolete_files=`comm -3 -2 "$pool_current_files" "$pool_required_files"`
for file in $obsolete_files; do
debug_job_start "Deleting '$LOCAL_DIR/$file'"
rm "$LOCAL_DIR/$file" && debug_job_ok || debug_job_err
done
info "Doublechecking that required pool files exists"
missing_files=`comm -3 -1 "$pool_current_files" "$pool_required_files"`
if [[ -n "$missing_files" ]]; then
error "Some files are missing after sync!!!:"
error "$missing_files"
fatal "Aborting due to missing files"
fi
rm "$files_to_dl_list"
rm "$pool_required_files"
rm "$pool_current_files"
# Timestamp
echo "Updated at: `date`" > $LOCAL_DIR/.lastupdate
# If partial mirroring is enabled, get the list of partial mirror packages
if [[ "$PARTIAL_UPSTREAM" = "1" ]]; then
# netboot images URI used by Nailgun differs from the one used in script
# see https://bugs.launchpad.net/bugs/1461927 for details
PARTIAL_INITRD_DIR="/dists/${UBUNTU_RELEASE}/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/"
# Prepare directory structure for partial repository
info "Generating partial mirror"
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
mkdir -p ${PARTIAL_UPSTREAM_PATH}/pool/main
mkdir -p ${PARTIAL_UPSTREAM_PATH}/indices
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/main/debian-installer/binary-amd64
mkdir -p ${PARTIAL_UPSTREAM_PATH}/${PARTIAL_INITRD_DIR}
temp_dir=`mktemp -d --suffix="-reposync"`
find $LOCAL_DIR/pool/ -name *.deb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/main
rm -f ${temp_dir}/*
find ${LOCAL_DIR}/pool/ -name *.udeb -type f -exec cp -vuni '{}' ${temp_dir} ";"
rsync -a --delete ${temp_dir}/ ${PARTIAL_UPSTREAM_PATH}/pool/debian-installer
rm -rf ${temp_dir}
rsync -a --delete ${LOCAL_DIR}/${INITRD_DIR}/ ${PARTIAL_UPSTREAM_PATH}/${PARTIAL_INITRD_DIR}
find ${PARTIAL_UPSTREAM_PATH} -type d -print0 | xargs -0 chmod 755
# Generate "indices" folder
cat $LOCAL_DIR/indices/*extra* | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.extra.main
cat $LOCAL_DIR/indices/*.debian-installer | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main.debian-installer
pushd $LOCAL_DIR/indices/
ls --ignore="*extra*" --ignore="*src" --ignore="*debian-installer" --quoting-style=shell | xargs cat | sort -u > ${PARTIAL_UPSTREAM_PATH}/indices/override.${UBUNTU_RELEASE}.main
popd
# Generate Release file
cat <<EOF > ${PARTIAL_UPSTREAM_PATH}/dists/${UBUNTU_RELEASE}/Release
Architectures: amd64
Codename: ${UBUNTU_RELEASE}
Components: main
Date: `date`
Description: Ubuntu ${UBUNTU_RELEASE} partial mirror
Label: Ubuntu
Origin: Ubuntu
Suite: ${UBUNTU_RELEASE}
EOF
# Build partial mirror
info "Generating metadata for partial mirror"
info "Applying fix for upstream dpkg-scanpackages"
patch -N /usr/bin/dpkg-scanpackages < $BINROOT/util/dpkg.patch
export BINROOT
$BINROOT/util/regenerate_ubuntu_repo ${PARTIAL_UPSTREAM_PATH} ${UBUNTU_RELEASE} || fatal "Failed to generate partial mirror"
rm -rf $LOCAL_DIR
fi # "$PARTIAL_UPSTREAM" = "1"
info "Done"

View File

@ -1,232 +0,0 @@
#!/bin/bash
# This shell script was written in order to help you to create and maintain your
# local mirrors of MOS and/or Ubuntu. You could use this script as a cron job.
# Dependencies: rsync, gpg, docker + dpkg-dev (only for partial Ubuntu mirror)
usage() {
cat <<EOF
Usage: `basename $0` [options]
Create and update local mirrors of MOS and/or Ubuntu.
IMPORTANT!
If NO parameters specified, this script will:
- Create/Update both MOS and Ubuntu local mirrors
- Set them as repositories for existing NEW environments in Fuel UI
- Set them as DEFAULT repositories for new environments
Options:
-h| --help This help screen.
-d| --no-default Don't change default repositories for new environments
-a| --no-apply Don't apply changes to Fuel environments
-M| --mos Create/Update MOS local mirror only
-U| --ubuntu Create/Update Ubuntu local mirror only
-N| --dry-run Show commands to execute instead of running them
-p| --password Fuel Master admin password (defaults to admin)
CUSTOMIZATION
-------------
The following configuration file could be used to modify the
script behavior:
/etc/fuel-createmirror/common.cfg
If you are behind a proxy, you should set both http_proxy and RSYNC_PROXY env vars.
Please refer to the description of parameters in that configuration file.
See more detailed description in the Fuel Operations Guide:
https://docs.mirantis.com/openstack/fuel/fuel-6.1/operations.html#external-ubuntu-ops
EOF
}
usage_short() {
echo Usage: `basename $0` [options]
echo
echo -e Try \``basename $0` --help\' for more options.
}
die() { echo "$@" 1>&2 ; exit 1; }
print_repositories_ubuntu() {
# $1 - directory name of local repository
echo -e " * INFO: In order to setup these repositories MANUALLY, you should"
echo -e " go to Fuel UI, choose your cluster and go to the 'Settings' tab"
if [ "$PARTIAL_UPSTREAM" == "0" ]; then
echo -e " Replace the URI value for the following repositories:"
for dist in "${DISTs[@]}"; do
distlabel=`echo "$dist" | sed "s/$FUEL_VERSION//"`
echo -e " Repository \"$distlabel\" URI=\"deb http://$FUEL_SERVER:8080/$1 $dist ${DIST_COMPONENTs[$dist]}\""
done
else
echo -e " Replace the URI value for the following repositories:"
echo
echo -e " Repository \"ubuntu\" new URI=\"deb http://$FUEL_SERVER:8080/$1 ${DISTs[0]} main\""
echo -e " Repository \"ubuntu-security\" new URI=\"deb http://$FUEL_SERVER:8080/$1 ${DISTs[0]} main\""
echo -e " Repository \"ubuntu-updates\" new URI=\"deb http://$FUEL_SERVER:8080/$1 ${DISTs[0]} main\""
fi
echo
}
add_repositories_to_nailgun() {
# parameters:
# $1 - operating_system from fuel env
# $2 - distro name in fuel-package-updates format
# $3 - directory name of local repository
echo " * INFO: Attempting to add created repositories to Nailgun..."
local release_id=`env http_proxy="" fuel --user=admin --password="$FUEL_ADMIN_PASS" release 2>/dev/null| awk -F"|" '{print $1" "$4" "$5}' | grep "$1" | grep "$FULL_RELEASE" | awk '{print $1}'`
local clearupstream=" --clear-upstream-repos "
local makedefault=" --make-default "
local apply=" --apply "
[ "$PARTIAL_UPSTREAM" == "0" ] && clearupstream=""
[ "$OPT_NO_APPLY" == "1" ] && apply=""
# find envs with status "new" and with given release_id
envs=`env http_proxy="" fuel --user=admin --password="$FUEL_ADMIN_PASS" env 2>&1 | grep -w new | awk -v release_id=$release_id -F'|' '$5 == release_id {print $1}'`
for env in ${envs}; do
$EXEC_PREFIX env http_proxy="" fuel-package-updates -d $2 -r $FULL_RELEASE --no-download $apply \
-s $FUEL_SERVER -p "$FUEL_ADMIN_PASS" -b http://$FUEL_SERVER:8080/$3 -e $env $clearupstream 2>/dev/null
EC_FPU=$?
if [[ "$EC_FPU" == "0" ]]; then
[ "$OPT_NO_APPLY" ] || echo " * INFO: environment id=$env updated successfully, no manual actions is required"
else
echo " * INFO: Failed to add repositories for environment id=$env to Nailgun, please add them MANUALLY"
EC_ADD=1
fi
done
if [ "$OPT_NO_DEFAULT" ]; then
echo " * INFO: Default repositories for new environments were not modified"
else
$EXEC_PREFIX env http_proxy="" fuel-package-updates -d $2 -r $FULL_RELEASE --no-download --make-default \
-s $FUEL_SERVER -p "$FUEL_ADMIN_PASS" -b http://$FUEL_SERVER:8080/$3 $apply $clearupstream 2>/dev/null
EC_FPU=$?
if [[ "$EC_FPU" == "0" ]]; then
echo " * INFO: Created repositories were set as defaults for new environments"
else
echo " * WARN: Failed to set repositories as defaults for new environments"
fi
fi
[ "$EC_ADD" == "1" ] && print_repositories_ubuntu $3
}
### BEGIN
# Set defaults
OPT_MOS=1
OPT_UBUNTU=1
EXEC_PREFIX=""
# Parse options
OPTS=`getopt -o hdaMUNp: -l help,no-default,no-apply,mos,ubuntu,password:,dry-run -- "$@"`
if [ $? != 0 ]; then
usage_short
exit 1
fi
eval set -- "$OPTS"
while true ; do
case "$1" in
-h| --help ) usage ; exit 0;;
-d | --no-default ) OPT_NO_DEFAULT=1; shift;;
-a | --no-apply ) OPT_NO_APPLY=1; shift;;
-N | --dry-run ) EXEC_PREFIX="echo EXEC "; shift;;
-M | --mos ) unset OPT_UBUNTU; shift;;
-U | --ubuntu ) unset OPT_MOS; shift;;
-p | --password ) FUEL_MASTER_PASS="$2"; shift; shift;;
-- ) shift; break;;
* ) break;;
esac
done
if [[ "$@" != "" ]]; then
echo "Invalid option -- $@"
usage_short
exit 1
fi
if [ -z $OPT_MOS ] && [ -z $OPT_UBUNTU ]; then
echo "The --mos and --ubuntu options are mutually exclusive, aborting..."
usage_short
exit 1
fi
export BINROOT=$(dirname `readlink -f "$0"`)
. $BINROOT/config/common.cfg
. $BINROOT/config/fuel.cfg
# If running on Fuel node - check if we can connect to backend
if hash fuel2 2>/dev/null; then
echo " * INFO: Verifying connection to the Fuel backend"
if env http_proxy="" fuel --user=admin --password="$FUEL_ADMIN_PASS" release &>/dev/null; then
echo " * INFO: Fuel backend connection OK"
else
echo " * FATAL: Connect to Fuel backend failed. Please verify that Fuel services are up&running."
echo " If services are OK, please make sure you have specified the correct Fuel Master admin password"
usage_short
exit 1
fi
fi
if [ -z "${RSYNC_PROXY+x}" ] && [ $http_proxy ]; then
export http_proxy
export RSYNC_PROXY=$http_proxy
fi
$EXEC_PREFIX mkdir -p ${MIRROR_ROOT} || die "Cannot create ${MIRROR_ROOT}, exiting."
$EXEC_PREFIX mkdir -p ${LOG_ROOT} || die "Cannot create ${LOG_ROOT}, exiting."
EC=0
if [[ $OPT_MOS ]]; then
$EXEC_PREFIX $BINROOT/deb-mirror $BINROOT/config/mos-ubuntu-updatesonly.cfg
EC_MOS=$?
fi
if [[ $OPT_UBUNTU ]]; then
$EXEC_PREFIX $BINROOT/deb-mirror $BINROOT/config/ubuntu.cfg
EC_UBUNTU=$?
fi
if [[ $OPT_MOS ]]; then
if [[ "$EC_MOS" == "0" ]]; then
. $BINROOT/config/mos-ubuntu-updatesonly.cfg
echo " * INFO: MOS mirror was created at: $LOCAL_DIR"
if [[ "$DOCKER_MODE" == "true" ]]; then
add_repositories_to_nailgun Ubuntu ubuntu ${LOCAL_DIR##*/}
else
print_repositories_ubuntu ${LOCAL_DIR##*/}
fi
else
echo " * FATAL: Creation of MOS mirror FAILED, check logs at $LOG_ROOT"
EC=1
fi
fi
if [[ $OPT_UBUNTU ]]; then
if [[ "$EC_UBUNTU" == "0" ]]; then
. $BINROOT/config/ubuntu.cfg
if [[ $PARTIAL_UPSTREAM = "1" ]]; then
echo " * INFO: Ubuntu partial mirror was created at: $PARTIAL_UPSTREAM_PATH"
if [[ "$DOCKER_MODE" == "true" ]]; then
add_repositories_to_nailgun Ubuntu ubuntu-baseos ${PARTIAL_UPSTREAM_PATH##*/}
else
print_repositories_ubuntu ${PARTIAL_UPSTREAM_PATH##*/}
fi
else
echo " * INFO: Ubuntu mirror was created at: $LOCAL_DIR"
if [[ "$DOCKER_MODE" == "true" ]]; then
add_repositories_to_nailgun Ubuntu ubuntu-baseos ${LOCAL_DIR##*/}
else
print_repositories_ubuntu ${LOCAL_DIR##*/}
fi
fi
else
echo " * FATAL: Creation of Ubuntu mirror FAILED, check logs at $LOG_ROOT"
EC=1
fi
fi
exit $EC

View File

@ -1,41 +0,0 @@
check_file()
{
checksum_file=$1
root=$2
file_to_check=$3
debug_job_start "Checking checksum of file '$root/$file_to_check'"
file_records=`grep "$file_to_check" "$checksum_file" | sort -u`
IFS='
'
[[ -z "$file_records" ]] && echo -n "checksums not found..." \
&& debug_job_skip && return 0
for file_record in $file_records; do
expected_checksum_type=`echo $file_record | awk '{print $1}'`
expected_checksum=`echo $file_record | awk '{print $2}'`
shopt -s nocasematch
if [[ $expected_checksum_type == "MD5" ]]; then
echo -n "MD5..."
actual_checksum=`md5sum "$root/$file_to_check" | head -c 32`
elif [[ $expected_checksum_type == "SHA1" ]] || [[ $expected_checksum_type == "SHA" ]]; then
echo -n "SHA1..."
actual_checksum=`sha1sum "$root/$file_to_check" | head -c 40`
elif [[ $expected_checksum_type == "SHA256" ]]; then
echo -n "SHA256..."
actual_checksum=`sha256sum "$root/$file_to_check" | head -c 64`
elif [[ $expected_checksum_type == "NONE" ]]; then
echo -n "NONE..."
actual_checksum=$expected_checksum
fi
shopt -u nocasematch
[[ "$expected_checksum" != "$actual_checksum" ]] && debug_job_err && return 1
done
debug_job_ok
return 0
}

View File

@ -1,49 +0,0 @@
From 09efec567e6b0baa835c1921c222a5a967cf193a Mon Sep 17 00:00:00 2001
From: Dennis Dmitriev <ddmitriev@mirantis.com>
Date: Tue, 19 Aug 2014 18:31:33 +0300
Subject: [PATCH] Allow correctly parcing indices/override.* files
Allow parcing indices/override.* files when architecture
prefixes are used in package names there. It allows to use
dpkg-scanpackages with Ubuntu repositories.
Originally, dpkg-scanpackages just skip the following lines because it
compares package names without any modification, and cannot find the
apropriate package because 'a2jmidid' != 'a2jmidid/amd64'
For example, several lines from override.precise.extra.main:
...
a2jmidid Bugs https://bugs.launchpad.net/ubuntu/+filebug
a2jmidid Origin Ubuntu
a2jmidid/amd64 Task ubuntustudio-generation, ubuntustudio-recording
a2jmidid/armel Task ubuntustudio-generation, ubuntustudio-recording
a2jmidid/armhf Task ubuntustudio-generation, ubuntustudio-recording
a2jmidid/i386 Task ubuntustudio-generation, ubuntustudio-recording
a2jmidid/powerpc Task ubuntustudio-generation, ubuntustudio-recording
...
Related-Bug: 1358785
---
scripts/dpkg-scanpackages.pl | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/scripts/dpkg-scanpackages.pl b/scripts/dpkg-scanpackages.pl
index 9a3d59c..3d6724b 100755
--- a/scripts/dpkg-scanpackages.pl
+++ b/scripts/dpkg-scanpackages.pl
@@ -143,7 +143,12 @@ sub load_override_extra
s/\s+$//;
next unless $_;
- my ($p, $field, $value) = split(/\s+/, $_, 3);
+ my ($pr, $field, $value) = split(/\s+/, $_, 3);
+
+ my ($p, $parch) = split(/\//, $pr, 2);
+ if (defined($options{arch}) and defined($parch)) {
+ next unless ($options{arch} eq $parch);
+ }
next unless defined($packages{$p});
--
1.8.5.5

View File

@ -1,155 +0,0 @@
#!/bin/bash
# Guess filename based on POSSIBLE_COMPRESSIONS variable
# It will cycle through filenames (myname myname.gz myname.bz2 myname.xz etc...)
# and return first match that exists in the filesystem
# $1 -- base filename
guess_filename()
{
local to_return=""
local file="$1"
#debug "Guessing filename for $file"
if [[ ! -f "$file" ]]; then
for ext in ${POSSIBLE_COMPRESSIONS[@]}; do
if [[ -f "$file.$ext" ]]; then
#debug "Got match $file.$ext"
to_return="$file.$ext"
break
fi
done
else
to_return="$file"
fi
echo "$to_return"
}
# Determines if file is compressed, and uncompresses into stdout
# $1 -- file too cat
# $2=false -- Try to guess filename
read_file()
{
local file="$1"
local try_to_guess="${2:-'false'}"
if [[ ! -f "$file" ]]; then
if [[ "$try_to_guess" = "false" ]]; then
return
else
file=`guess_filename "$file"`
[[ -f "$file" ]] || return
fi
fi
case `file "$file"` in
*gzip*)
# We got a GZip
zcat "$file"
return;;
*bzip2*)
# We got a BZip2
bzcat "$file"
return;;
*XZ*)
# We got a XZ
xzcat "$file"
return;;
*text*)
# Must be a plain text
cat "$file"
return;;
esac
}
# Gets distro components from Release file
# $1 -- path to Release file
# $2 -- user component list
get_dist_components()
{
local dist_components=( `read_file "$1"| egrep "^Components: "| cut -d' ' -f'2-'` )
local user_components=${2:-""}
local to_return=""
if [[ -z "$user_components" ]]; then
echo "$dist_components"
elif [[ -z $dist_components ]]; then
echo "$user_components"
else
for ucomp in $user_components; do
if contains "$ucomp" "${dist_components[@]}"; then
to_return="${to_return} $ucomp"
fi
done
fi
echo $to_return
}
# Gets distro arches from Release file
# $1 -- path to Release file
# $2 -- user arch list
get_dist_architectures()
{
local dist_arches=( `read_file "$1"| egrep "^Architectures: "| cut -d' ' -f'2-'` )
local user_arches=( $* )
local to_return=""
# Filter out arches that not listed in 'ARCHs' global variable
for arch in ${user_arches[@]}; do
if contains "$arch" "${dist_arches[@]}"; then
to_return="${to_return} $arch"
fi
# Special case architecture that is not included in Release arches list
if [[ "$arch" = "all" ]]; then
to_return="${to_return} $arch"
fi
done
echo $to_return
}
# Checks dist file validity
# $1 -- Full path to release file
# $2 -- Relative path to target file from the repository root
pkg_file_valid()
{
local release="$1"
local pkg="$2"
# Check if release file has an md5sum section, if not then just return OK
if ! egrep -i '^MD5Sum:\s*$' $release &> /dev/null; then
debug "Release file '$release' doesn't contain MD5 info"
return 0
fi
# Get distro basedir
local dist_base=`dirname "$release"`
local pkg_path="$dist_base/$pkg"
local pkg_line=`cat "$release" | egrep -i "^ [0-9a-f]{32}\s+[0-9]+\s+$pkg\s*$"`
# Check if we found files md5 string. if not return all ok
# TODO: make option to raise error on missing md5sum
if [[ -z "$pkg_line" ]]; then
error "Can't find md5sum for '$pkg' in '$release', skipping"
return 0
fi
# Get line with MD5SUM for current package
local expected_md5sum=`echo "$pkg_line" | awk '{print $1}'`
local expected_size=`echo "$pkg_line" | awk '{print $2}'`
# Check file validity if it's not found just empty vars
local size=`stat -c%s "$pkg_path"`
local md5sum=`md5sum "$pkg_path"| awk '{print $1}'`
if [[ -e $pkg_path ]] && \
[[ $size = $expected_size ]] && \
[[ $md5sum = $expected_md5sum ]]; then
debug "File '$pkg' checked by '$release' is OK"
return 0
fi
error "File '$pkg_path' checked by '$release' is BAD"
debug "File details:"
debug "size = $size, expected $expected_size"
debug "md5sum = $md5sum, expected $expected_md5sum"
return 1
}

View File

@ -1,116 +0,0 @@
#!/bin/bash
# Generic message display and job-contol
DEBUG=${DEBUG:-"no"}
QUIET=${QUIET:-"no"}
# If no LOG_FILE set, discard log output
LOG_FILE=${LOG_FILE:-"/dev/null"}
################################################################################
# Magic FD manipulations
################################################################################
# Log file wrapper function, reads stdin line by line and timestamps each line,
# also filters terminal colors
_log()
{
while IFS='' read -r line; do
echo "$(date) $line" | sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g" >> "$LOG_FILE"
done
}
# Set FD 5 -- main output FD
# Split it's output out helper '_log' function and stdout
# If QUIET is set - suppress console output
if [[ "$QUIET" = "yes" ]]; then
exec 5> >(tee -a >(_log) > /dev/null)
else
exec 5> >(tee -a >(_log))
fi
# Supress child's outputs if DEBUG set to 'no', append to main FD otherwise
if [[ "$DEBUG" = "no" ]]; then
exec 1>/dev/null
exec 2>/dev/null
else
exec 1>&5
exec 2>&5
fi
# FD 3 -- Pretty messages FD
# Prettyfied messages for user sent here
# By default send it's output to main FD
exec 3>&5
################################################################################
# Simple messaging functions
################################################################################
msgs_errors=()
msg()
{
echo " * $*" 1>&3
}
debug()
{
[[ "$DEBUG" = "yes" ]] && msg "DEBUG: $*"
}
info()
{
msg "INFO: $*"
}
error()
{
msg "ERROR: $*"
msgs_errors+=( "$*" )
}
fatal()
{
msg "FATAL: $1"
([ ! -z $2 ] && exit $2) || exit 1
}
################################################################################
# Job control functions
################################################################################
msgs_jobname=""
job_start()
{
msgs_jobname="$1"
echo -ne "$msgs_jobname..." 1>&3
#logger -t "$TAG" "$msgs_jobname"
}
job_ok()
{
echo -e "\e[0;32mOK\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... OK !"
}
job_err()
{
echo -e "\e[0;31mFAIL!\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... FAILED !"
errors="${errors}$msgs_jobname have failed\n"
}
job_skip()
{
echo -e "\e[0;33mSKIPPED!!\e[0m" 1>&3
#logger -t "$TAG" "$msgs_jobname... SKIPPED !"
}
debug_job_start()
{
[[ "$DEBUG" = "yes" ]] && job_start "$*"
}
debug_job_ok()
{
[[ "$DEBUG" = "yes" ]] && job_ok
}
debug_job_err()
{
[[ "$DEBUG" = "yes" ]] && job_err
}
debug_job_skip()
{
[[ "$DEBUG" = "yes" ]] && job_skip
}

View File

@ -1,35 +0,0 @@
#!/usr/bin/python
# This script parses contents of given 'Package' files, and creates rsync
# command line to synchronize mirror
import re
import sys
# Regex to parse
regex=re.compile("^(?P<param>[a-z0-9]+): (?P<value>.*)$", re.IGNORECASE)
for pkgfile in sys.argv[1:]:
if pkgfile.endswith(".gz"):
import gzip
file = gzip.open(pkgfile)
elif pkgfile.endswith(".bz2"):
import bz2
file = bz2.BZ2File(pkgfile)
else:
file = open(pkgfile)
# Current package
pkg={}
for line in file:
# If we have a blank line - it's means that we're on package separator
# Print the information about current package and clear current package info
if line == "\n":
sys.stdout.write(pkg["filename"] + "\n")
if "md5sum" in pkg:
sys.stderr.write("MD5 " + pkg["md5sum"] + " " + pkg["filename"] + "\n")
pkg={}
m = regex.match(line)
if m:
pkg[m.group("param").lower()] = m.group("value")

View File

@ -1,44 +0,0 @@
#!/usr/bin/python
# This script parses contents of given 'Source' files, and creates rsync
# command line to synchronize mirror
import re
import sys
# Regex to parse
regex=re.compile("^(?P<param>[a-zA-Z0-9_-]+):\s?(?P<value>.*)$")
files_regex=re.compile("(?P<md5>[a-f0-9]{32}) [0-9]+ (?P<filename>.*)")
for pkgfile in sys.argv[1:]:
if pkgfile.endswith(".gz"):
import gzip
file = gzip.open(pkgfile)
elif pkgfile.endswith(".bz2"):
import bz2
file = bz2.BZ2File(pkgfile)
else:
file = open(pkgfile)
pkg={}
cur_param=""
for line in file:
if line == "\n":
#print("----------------------------------------------------")
basedir=pkg['directory']
files=files_regex.findall(pkg['files'])
for md5, file in files:
print basedir + "/" + file
pkg={}
continue
m = regex.match(line)
if m:
cur_param = m.group("param").lower()
pkg[cur_param] = m.group("value")
elif line.startswith(" "):
# We got a multiliner continuation
pkg[cur_param] += line.lstrip()
else:
print "IMMPOSSIBIRUUUU!!!!"
sys.exit(999)

View File

@ -1,26 +0,0 @@
#!/usr/bin/python
# Enumerate YAML from stdin and produce prefixed output
import re
import sys
import yaml
prefix = sys.argv[1]
def serialize(value, name):
if value is None:
print('{0}=""'.format(name))
elif hasattr(value, 'items'):
for key, subvalue in value.items():
key = re.sub(r'[\W]', '_', key)
serialize(subvalue, name + '_' + key)
elif hasattr(value, '__iter__'):
print("{0}_len={1}".format(name, len(value)))
for i, v in enumerate(value):
serialize(v, name + '_' + str(i))
else:
print('{0}="{1}"'.format(name, value))
with sys.stdin as yaml_file:
data = yaml.load(yaml_file)
serialize(data, prefix)

View File

@ -1,100 +0,0 @@
#!/bin/bash
cp $BINROOT/config/requirements-deb.txt $apt_altstate
cat >> $apt_altstate/requirements-deb.txt << EOF
linux-image-${UBUNTU_INSTALLER_KERNEL_VERSION}
linux-headers-${UBUNTU_INSTALLER_KERNEL_VERSION}
linux-image-generic-${UBUNTU_KERNEL_FLAVOR}
linux-headers-generic-${UBUNTU_KERNEL_FLAVOR}
EOF
requirements_add_essential_pkgs () {
# All essential packages are already installed, so ask dpkg for a list
dpkg-query -W -f='${Package} ${Essential}\n' > /tmp/essential.pkgs
sed -i /tmp/essential.pkgs -n -e 's/\([^ ]\+\).*yes$/\1/p'
cat /tmp/essential.pkgs >> $apt_altstate/requirements-deb.txt
}
#apt_altstate=`mktemp -d --suffix="-apt-altstate"`
apt_lists_dir="$apt_altstate/var/lib/apt/lists"
apt_cache_dir="$apt_altstate/var/cache/apt"
null_dpkg_status="$apt_altstate/var/lib/dpkg/status"
apt_alt_etc="$apt_altstate/etc/apt"
mkdir -p "$apt_lists_dir"
mkdir -p "$apt_cache_dir"
mkdir -p "$apt_alt_etc/trusted.gpg.d/"
mkdir -p "$apt_alt_etc/preferences.d/"
mkdir -p "${null_dpkg_status%/*}"
touch "${null_dpkg_status}"
cp -a /usr/share/keyrings/ubuntu*.gpg "$apt_alt_etc/trusted.gpg.d/"
apt_altstate_opts="-o APT::Get::AllowUnauthenticated=1"
apt_altstate_opts="${apt_altstate_opts} -o Dir=${apt_altstate}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::State::Lists=${apt_lists_dir}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::State::status=${null_dpkg_status}"
apt_altstate_opts="${apt_altstate_opts} -o Dir::Cache=${apt_cache_dir}"
if ! source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/../config/ubuntu.cfg"; then
echo "`basename $0`: cannot read config for Ubuntu, please create one!"
exit 1
fi
for dist in ${DISTs[@]}; do
echo deb http://${UPSTREAM}/${UPSTREAM_DIR} $dist "${DIST_COMPONENTs[$dist]}" >> ${apt_alt_etc}/sources.list
done
if ! source "$(dirname $(readlink -f "${BASH_SOURCE[0]}"))/../config/mos-ubuntu.cfg"; then
echo "`basename $0`: cannot read config for MOS Ubuntu, please create one!"
exit 1
fi
for dist in ${DISTs[@]}; do
echo deb http://${UPSTREAM}/${UPSTREAM_DIR_HTTP} $dist "${DIST_COMPONENTs[$dist]}" >> ${apt_alt_etc}/sources.list
done
cat <<EOF > ${apt_alt_etc}/preferences
Package: *
Pin: release o=Mirantis
Pin-Priority: 1101
EOF
if ! apt-get $apt_altstate_opts update; then
echo "`basename $0`: failed to populate alt apt state!"
exit 1
fi
requirements_add_essential_pkgs
echo "Processing Fuel dependencies..."
has_apt_errors=''
while read pkg; do
downloads_list="$apt_altstate/downloads_${pkg}.list"
if ! apt-get $apt_altstate_opts --print-uris --yes -qq install $pkg >"${downloads_list}" 2>>"$apt_altstate/apt-errors.log"; then
echo "package $pkg can not be installed" >>$apt_altstate/apt-errors.log
# run apt-get once more to get a verbose error message
apt-get $apt_altstate_opts --print-uris --yes install $pkg >>$apt_altstate/apt-errors.log 2>&1 || true
has_apt_errors='yes'
fi
sed -i "${downloads_list}" -n -e "s/^'\([^']\+\)['].*$/\1/p"
done < $apt_altstate/requirements-deb.txt
if [ -n "$has_apt_errors" ]; then
echo "`basename $0`some packages are not installable" >&2
cat < $apt_altstate/apt-errors.log >&2
exit 1
fi
# Prepare list of upstream packages to download
cat $apt_altstate/downloads_*.list | grep -v ${UPSTREAM} | perl -p -e 's/^.*?pool/pool/' | sort -u > $apt_altstate/deb
rm -f $apt_altstate/downloads_*.list
NETBOOT_FILES="linux initrd.gz"
for dload in $NETBOOT_FILES; do
echo dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/${dload} >> $apt_altstate/netboot.list
echo NONE NONE dists/${UBUNTU_RELEASE}-updates/main/installer-${UBUNTU_ARCH}/current/images/${UBUNTU_NETBOOT_FLAVOR}/ubuntu-installer/${UBUNTU_ARCH}/${dload} >> $apt_altstate/netboot_md5.list
done
exit 0

View File

@ -1,89 +0,0 @@
#!/bin/bash
# Based on the method described here:
# http://troubleshootingrange.blogspot.com/2012/09/hosting-simple-apt-repository-on-centos.html
# Please install 'dpkg' and 'dpkg-devel' packages before use.
set -e
ARCH=amd64
REPO_PATH=$1
SUITE=$2
SECTION=main
BINDIR=${REPO_PATH}/dists/${SUITE}/${SECTION}
RELEASE="${REPO_PATH}/dists/${SUITE}/Release"
touch ${RELEASE}
release_header=`sed '/MD5Sum:/,$d' ${RELEASE}`
override_main="indices/override.${SUITE}.${SECTION}"
override_udeb="indices/override.${SUITE}.${SECTION}.debian-installer"
override_extra="indices/override.${SUITE}.extra.${SECTION}"
if [ -f "${REPO_PATH}/${override_main}" ]; then
binoverride="${override_main}"
else
binoverride=""
fi
if [ -f "${REPO_PATH}/${override_udeb}" ]; then
binoverride_udeb="${override_udeb}"
else
binoverride_udeb=""
fi
if [ -f "${REPO_PATH}/${override_extra}" ]; then
extraoverride="--extra-override ${override_extra}"
else
extraoverride=""
fi
package_deb=${BINDIR}/binary-${ARCH}/Packages
package_udeb=${BINDIR}/debian-installer/binary-${ARCH}/Packages
cd ${REPO_PATH}
# Scan *.deb packages
dpkg-scanpackages -m ${extraoverride} -a ${ARCH} pool/${SECTION} ${binoverride} > ${package_deb}.tmp 2>/dev/null
gzip -9c ${package_deb}.tmp > ${package_deb}.gz.tmp
bzip2 -ckz ${package_deb}.tmp > ${package_deb}.bz2.tmp
# Replace original files with new ones
mv --backup -f ${package_deb}.tmp ${package_deb}
mv --backup -f ${package_deb}.gz.tmp ${package_deb}.gz
mv --backup -f ${package_deb}.bz2.tmp ${package_deb}.bz2
# Scan *.udeb packages
if [ -d "${BINDIR}/debian-installer/binary-${ARCH}/" ]; then
dpkg-scanpackages -t udeb -m -a ${ARCH} pool/debian-installer ${binoverride_udeb} > ${package_udeb}.tmp 2>/dev/null
gzip -9c ${package_udeb}.tmp > ${package_udeb}.gz.tmp
bzip2 -ckz ${package_udeb}.tmp > ${package_udeb}.bz2.tmp
# Replace original files with new ones
mv --backup -f ${package_udeb}.tmp ${package_udeb}
mv --backup -f ${package_udeb}.gz.tmp ${package_udeb}.gz
mv --backup -f ${package_udeb}.bz2.tmp ${package_udeb}.bz2
fi
# Generate release file
cd ${REPO_PATH}/dists/${SUITE}
echo "$release_header" > Release.tmp
# Generate hashes
c1=(MD5Sum: SHA1: SHA256: SHA512:)
c2=(md5 sha1 sha256 sha512)
i=0
while [ $i -lt ${#c1[*]} ]; do
echo ${c1[i]}
for hashme in `find ${SECTION} -type f \( -not -name "*~" -name "Package*" -o -name "Release*" \)`; do
ohash=`openssl dgst -${c2[$i]} ${hashme}`
chash="${ohash##* }"
size=`stat -c %s ${hashme}`
echo " ${chash} ${size} ${hashme}"
done
i=$(( $i + 1));
done >> Release.tmp
mv --backup -f Release.tmp Release

View File

@ -1,74 +0,0 @@
#!/bin/bash
# Checks if remote file/dir exists
rsync_file_exists()
{
/usr/bin/rsync --no-motd --list-only "${UPSTREAM}::${UPSTREAM_DIR}/$1" &> /dev/null
return $?
}
# Fetches list of files from remote rsync repo by given mask
# $1 -- file mask
rsync_ls()
{
local to_return=()
local mask="$1"
files=`/usr/bin/rsync --no-motd --list-only \
--relative --recursive --no-implied-dirs \
--perms --copy-links --times --hard-links --sparse --safe-links \
"${UPSTREAM}::${UPSTREAM_DIR}/$mask" | \
grep -v "^d" | sed -e "s/->.*//g" | awk '{$1=$2=$3=$4=""}1'`
for file in $files; do
to_return+=( "$file" )
done
echo "${to_return[@]}"
return 0
}
# Rsync wrapper function
fetch()
{
src_path="$1"
dst_path="$2"
shift; shift
opt_args=( $* )
# Create a dest dir if needed
dst_dir=`dirname $dst_path`
[[ -d "$dst_dir" ]] || mkdir -p "$dst_dir"
debug_job_start "Fetching '$src_path' to '$dst_path' with params '${opt_args[@]}'"
/usr/bin/rsync --no-motd --perms --copy-links --times --hard-links --sparse --safe-links \
${opt_args[@]} \
"${UPSTREAM}::${UPSTREAM_DIR}/$src_path" "$dst_path"
local rsync_ec="$?"
if [[ $rsync_ec = 0 ]]; then
debug_job_ok
else
debug_job_err
fi
return $rsync_ec
}
# Fetches all files to specified root
# $1 -- Local root, where all files will be stored by it's relative path
# $* -- Files to fetch
fetch_all()
{
local root="$1"; shift
local fetched=()
local rsync_out=""
rsync_out=` echo $* | tr ' ' '\n' | \
rsync --no-motd --relative --out-format='%n' --files-from=- \
--no-implied-dirs --no-motd \
--perms --copy-links --times --hard-links --sparse \
"${UPSTREAM}::${UPSTREAM_DIR}/" "$root" 2> /dev/null`
for line in $rsync_out; do
debug "Fetched file $LOCAL_DIR/$line"
fetched+=( "$LOCAL_DIR/$line" )
done
}