update
This commit is contained in:
@@ -1,97 +0,0 @@
|
|||||||
ARG EE_BASE_IMAGE="docker.io/rockylinux:9.3"
|
|
||||||
ARG PYCMD="/usr/bin/python3"
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE=""
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS="--pre"
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS=""
|
|
||||||
ARG ANSIBLE_INSTALL_REFS="ansible-core>=2.15.0rc2,<2.16 ansible-runner"
|
|
||||||
ARG PKGMGR="/usr/bin/dnf"
|
|
||||||
|
|
||||||
# Base build stage
|
|
||||||
FROM $EE_BASE_IMAGE as base
|
|
||||||
USER root
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN /usr/bin/python3 -m ensurepip
|
|
||||||
RUN /usr/bin/python3 -m pip install --upgrade pip
|
|
||||||
RUN $PYCMD -m ensurepip
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir $ANSIBLE_INSTALL_REFS
|
|
||||||
COPY _build/scripts/ /output/scripts/
|
|
||||||
COPY _build/scripts/entrypoint /opt/builder/bin/entrypoint
|
|
||||||
RUN $PYCMD -m pip install -U pip
|
|
||||||
|
|
||||||
# Galaxy build stage
|
|
||||||
FROM base as galaxy
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN /usr/bin/python3 -m pip install --upgrade pip cmake
|
|
||||||
COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg
|
|
||||||
RUN /output/scripts/check_galaxy
|
|
||||||
COPY _build /build
|
|
||||||
WORKDIR /build
|
|
||||||
|
|
||||||
RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.yml --roles-path "/usr/share/ansible/roles"
|
|
||||||
RUN ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.yml --collections-path "/usr/share/ansible/collections"
|
|
||||||
|
|
||||||
# Builder build stage
|
|
||||||
FROM base as builder
|
|
||||||
WORKDIR /build
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir bindep pyyaml requirements-parser
|
|
||||||
|
|
||||||
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
|
|
||||||
|
|
||||||
COPY _build/requirements.txt requirements.txt
|
|
||||||
COPY _build/bindep.txt bindep.txt
|
|
||||||
RUN $PYCMD /output/scripts/introspect.py introspect --sanitize --user-pip=requirements.txt --user-bindep=bindep.txt --write-bindep=/tmp/src/bindep.txt --write-pip=/tmp/src/requirements.txt
|
|
||||||
RUN /output/scripts/assemble
|
|
||||||
|
|
||||||
# Final build stage
|
|
||||||
FROM base as final
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN whoami
|
|
||||||
RUN cat /etc/os-release
|
|
||||||
RUN /output/scripts/check_ansible $PYCMD
|
|
||||||
|
|
||||||
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
|
|
||||||
|
|
||||||
COPY --from=builder /output/ /output/
|
|
||||||
RUN /output/scripts/install-from-bindep && rm -rf /output/wheels
|
|
||||||
RUN chmod ug+rw /etc/passwd
|
|
||||||
RUN mkdir -p /runner && chgrp 0 /runner && chmod -R ug+rwx /runner
|
|
||||||
WORKDIR /runner
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir 'dumb-init==1.2.5'
|
|
||||||
RUN pip3 install --upgrade azure-identity azure-cli-core paramiko
|
|
||||||
COPY --from=quay.io/ansible/receptor:devel /usr/bin/receptor /usr/bin/receptor
|
|
||||||
RUN mkdir -p /var/run/receptor
|
|
||||||
RUN git lfs install --system
|
|
||||||
RUN rm -rf /output
|
|
||||||
LABEL ansible-execution-environment=true
|
|
||||||
USER 1000
|
|
||||||
ENTRYPOINT ["/opt/builder/bin/entrypoint", "dumb-init"]
|
|
||||||
CMD ["bash"]
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
subversion [platform:rpm]
|
|
||||||
wget [platform:rpm]
|
|
||||||
unzip [platform:rpm]
|
|
||||||
gcc [platform:rpm]
|
|
||||||
python3-devel [platform:rpm]
|
|
||||||
cmake [platform:rpm]
|
|
||||||
gcc-c++ [platform:rpm]
|
|
||||||
make [platform:rpm]
|
|
||||||
openssl-devel [platform:rpm]
|
|
||||||
git-core [platform:rpm]
|
|
||||||
python3.9-devel [platform:rpm compile]
|
|
||||||
libcurl-devel [platform:rpm compile]
|
|
||||||
krb5-devel [platform:rpm compile]
|
|
||||||
krb5-workstation [platform:rpm]
|
|
||||||
subversion [platform:rpm]
|
|
||||||
subversion [platform:dpkg]
|
|
||||||
git-lfs [platform:rpm]
|
|
||||||
sshpass [platform:rpm]
|
|
||||||
rsync [platform:rpm]
|
|
||||||
epel-release [platform:rpm]
|
|
||||||
python-unversioned-command [platform:rpm]
|
|
||||||
unzip [platform:rpm]
|
|
||||||
podman-remote [platform:rpm]
|
|
||||||
cmake [platform:rpm compile]
|
|
||||||
gcc [platform:rpm compile]
|
|
||||||
gcc-c++ [platform:rpm compile]
|
|
||||||
make [platform:rpm compile]
|
|
||||||
openssl-devel [platform:rpm compile]
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
[galaxy]
|
|
||||||
server_list = galaxy
|
|
||||||
|
|
||||||
[galaxy_server.galaxy]
|
|
||||||
url=https://galaxy.ansible.com/
|
|
||||||
|
|
||||||
|
|
||||||
[defaults]
|
|
||||||
NETWORK_GROUP_MODULES=arubaoss
|
|
||||||
host_key_checking = false
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
ncclient==0.6.9
|
|
||||||
scp==0.13.3
|
|
||||||
textfsm==1.1.0
|
|
||||||
ipaddr==2.2.0
|
|
||||||
#az-cli
|
|
||||||
|
|
||||||
git+https://github.com/ansible/ansible-sign
|
|
||||||
ncclient
|
|
||||||
paramiko
|
|
||||||
pykerberos
|
|
||||||
pyOpenSSL
|
|
||||||
pypsrp[kerberos,credssp]
|
|
||||||
pywinrm[kerberos,credssp]
|
|
||||||
toml
|
|
||||||
pexpect>=4.5
|
|
||||||
python-daemon
|
|
||||||
pyyaml
|
|
||||||
six
|
|
||||||
receptorctl
|
|
||||||
|
|
||||||
#azure
|
|
||||||
packaging
|
|
||||||
requests[security]
|
|
||||||
xmltodict
|
|
||||||
msgraph-sdk==1.0.0
|
|
||||||
azure-cli-core==2.61.0
|
|
||||||
azure-common==1.1.11
|
|
||||||
azure-identity==1.16.1
|
|
||||||
azure-mgmt-authorization==2.0.0
|
|
||||||
azure-mgmt-apimanagement==3.0.0
|
|
||||||
azure-mgmt-batch==16.2.0
|
|
||||||
azure-mgmt-cdn==11.0.0
|
|
||||||
azure-mgmt-compute==30.6.0
|
|
||||||
azure-mgmt-containerinstance==9.0.0
|
|
||||||
azure-mgmt-core==1.4.0
|
|
||||||
azure-mgmt-containerregistry==9.1.0
|
|
||||||
azure-containerregistry==1.1.0
|
|
||||||
azure-mgmt-containerservice==20.0.0
|
|
||||||
azure-mgmt-datafactory==2.0.0
|
|
||||||
azure-mgmt-dns==8.0.0
|
|
||||||
azure-mgmt-marketplaceordering==1.1.0
|
|
||||||
azure-mgmt-monitor==3.0.0
|
|
||||||
azure-mgmt-managedservices==6.0.0
|
|
||||||
azure-mgmt-managementgroups==1.0.0
|
|
||||||
azure-mgmt-network==19.1.0
|
|
||||||
azure-mgmt-nspkg==2.0.0
|
|
||||||
azure-mgmt-privatedns==1.0.0
|
|
||||||
azure-mgmt-redis==13.0.0
|
|
||||||
azure-mgmt-resource==21.1.0
|
|
||||||
azure-mgmt-rdbms==10.2.0b12
|
|
||||||
azure-mgmt-search==8.0.0
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
---
|
|
||||||
collections:
|
|
||||||
- azure.azcollection
|
|
||||||
- ansible.windows
|
|
||||||
- community.windows
|
|
||||||
- community.general
|
|
||||||
- tribe29.checkmk
|
|
||||||
- ansible.posix
|
|
||||||
- awx.awx
|
|
||||||
- cisco.ios
|
|
||||||
- microsoft.ad
|
|
||||||
- arubanetworks.aos_switch
|
|
||||||
- ansible.netcommon
|
|
||||||
- community.docker
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2019 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Make a list of bindep dependencies and a collection of built binary
|
|
||||||
# wheels for the repo in question as well as its python dependencies.
|
|
||||||
# Install javascript tools as well to support python that needs javascript
|
|
||||||
# at build time.
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
RELEASE=$(source /etc/os-release; echo $ID)
|
|
||||||
|
|
||||||
# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package
|
|
||||||
# manager.
|
|
||||||
PKGMGR="${PKGMGR:-}"
|
|
||||||
PKGMGR_OPTS="${PKGMGR_OPTS:-}"
|
|
||||||
PKGMGR_PRESERVE_CACHE="${PKGMGR_PRESERVE_CACHE:-}"
|
|
||||||
|
|
||||||
PYCMD="${PYCMD:=/usr/bin/python3}"
|
|
||||||
PIPCMD="${PIPCMD:=$PYCMD -m pip}"
|
|
||||||
|
|
||||||
$PYCMD -m ensurepip
|
|
||||||
|
|
||||||
if [ -z $PKGMGR ]; then
|
|
||||||
# Expect dnf to be installed, however if we find microdnf default to it.
|
|
||||||
PKGMGR=/usr/bin/dnf
|
|
||||||
if [ -f "/usr/bin/microdnf" ]; then
|
|
||||||
PKGMGR=/usr/bin/microdnf
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PKGMGR" = "/usr/bin/microdnf" ]
|
|
||||||
then
|
|
||||||
if [ -z $PKGMGR_OPTS ]; then
|
|
||||||
# NOTE(pabelanger): skip install docs and weak dependencies to
|
|
||||||
# make smaller images. Sadly, setting these in dnf.conf don't
|
|
||||||
# appear to work.
|
|
||||||
PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# NOTE(pabelanger): Ensure all the directory we use exists regardless
|
|
||||||
# of the user first creating them or not.
|
|
||||||
mkdir -p /output/bindep
|
|
||||||
mkdir -p /output/wheels
|
|
||||||
mkdir -p /tmp/src
|
|
||||||
|
|
||||||
cd /tmp/src
|
|
||||||
|
|
||||||
function install_bindep {
|
|
||||||
# Protect from the bindep builder image use of the assemble script
|
|
||||||
# to produce a wheel. Note we append because we want all
|
|
||||||
# sibling packages in here too
|
|
||||||
if [ -f bindep.txt ] ; then
|
|
||||||
bindep -l newline | sort >> /output/bindep/run.txt || true
|
|
||||||
if [ "$RELEASE" == "centos" ] ; then
|
|
||||||
bindep -l newline -b epel | sort >> /output/bindep/stage.txt || true
|
|
||||||
grep -Fxvf /output/bindep/run.txt /output/bindep/stage.txt >> /output/bindep/epel.txt || true
|
|
||||||
rm -rf /output/bindep/stage.txt
|
|
||||||
fi
|
|
||||||
compile_packages=$(bindep -b compile || true)
|
|
||||||
if [ ! -z "$compile_packages" ] ; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS ${compile_packages}
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function install_wheels {
|
|
||||||
# NOTE(pabelanger): If there are build requirements to install, do so.
|
|
||||||
# However do not cache them as we do not want them in the final image.
|
|
||||||
if [ -f /tmp/src/build-requirements.txt ] && [ ! -f /tmp/src/.build-requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --no-cache -r /tmp/src/build-requirements.txt
|
|
||||||
touch /tmp/src/.build-requirements.txt
|
|
||||||
fi
|
|
||||||
# Build a wheel so that we have an install target.
|
|
||||||
# pip install . in the container context with the mounted
|
|
||||||
# source dir gets ... exciting, if setup.py exists.
|
|
||||||
# We run sdist first to trigger code generation steps such
|
|
||||||
# as are found in zuul, since the sequencing otherwise
|
|
||||||
# happens in a way that makes wheel content copying unhappy.
|
|
||||||
# pip wheel isn't used here because it puts all of the output
|
|
||||||
# in the output dir and not the wheel cache, so it's not
|
|
||||||
# possible to tell what is the wheel for the project and
|
|
||||||
# what is the wheel cache.
|
|
||||||
if [ -f setup.py ] ; then
|
|
||||||
$PYCMD setup.py sdist bdist_wheel -d /output/wheels
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install everything so that the wheel cache is populated with
|
|
||||||
# transitive depends. If a requirements.txt file exists, install
|
|
||||||
# it directly so that people can use git url syntax to do things
|
|
||||||
# like pick up patched but unreleased versions of dependencies.
|
|
||||||
# Only do this for the main package (i.e. only write requirements
|
|
||||||
# once).
|
|
||||||
if [ -f /tmp/src/requirements.txt ] && [ ! -f /output/requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /tmp/src/requirements.txt
|
|
||||||
cp /tmp/src/requirements.txt /output/requirements.txt
|
|
||||||
fi
|
|
||||||
# If we didn't build wheels, we can skip trying to install it.
|
|
||||||
if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then
|
|
||||||
$PIPCMD uninstall -y /output/wheels/*.whl
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels /output/wheels/*whl
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
PACKAGES=$*
|
|
||||||
PIP_OPTS="${PIP_OPTS-}"
|
|
||||||
|
|
||||||
# bindep the main package
|
|
||||||
install_bindep
|
|
||||||
|
|
||||||
# go through ZUUL_SIBLINGS, if any, and build those wheels too
|
|
||||||
for sibling in ${ZUUL_SIBLINGS:-}; do
|
|
||||||
pushd .zuul-siblings/${sibling}
|
|
||||||
install_bindep
|
|
||||||
popd
|
|
||||||
done
|
|
||||||
|
|
||||||
# Use a clean virtualenv for install steps to prevent things from the
|
|
||||||
# current environment making us not build a wheel.
|
|
||||||
# NOTE(pabelanger): We allow users to install distro python packages of
|
|
||||||
# libraries. This is important for projects that eventually want to produce
|
|
||||||
# an RPM or offline install.
|
|
||||||
$PYCMD -m venv /tmp/venv --system-site-packages --without-pip
|
|
||||||
source /tmp/venv/bin/activate
|
|
||||||
|
|
||||||
# If there is an upper-constraints.txt file in the source tree,
|
|
||||||
# use it in the pip commands.
|
|
||||||
if [ -f /tmp/src/upper-constraints.txt ] ; then
|
|
||||||
cp /tmp/src/upper-constraints.txt /output/upper-constraints.txt
|
|
||||||
CONSTRAINTS="-c /tmp/src/upper-constraints.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If we got a list of packages, install them, otherwise install the
|
|
||||||
# main package.
|
|
||||||
if [[ $PACKAGES ]] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels $PACKAGES
|
|
||||||
for package in $PACKAGES ; do
|
|
||||||
echo "$package" >> /output/packages.txt
|
|
||||||
done
|
|
||||||
else
|
|
||||||
install_wheels
|
|
||||||
fi
|
|
||||||
|
|
||||||
# go through ZUUL_SIBLINGS, if any, and build those wheels too
|
|
||||||
for sibling in ${ZUUL_SIBLINGS:-}; do
|
|
||||||
pushd .zuul-siblings/${sibling}
|
|
||||||
install_wheels
|
|
||||||
popd
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -z $PKGMGR_PRESERVE_CACHE ]; then
|
|
||||||
$PKGMGR clean all
|
|
||||||
rm -rf /var/cache/{dnf,yum}
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf /var/lib/dnf/history.*
|
|
||||||
rm -rf /var/log/{dnf.*,hawkey.log}
|
|
||||||
rm -rf /tmp/venv
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2023 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
# Script to validate that Ansible and Ansible Runner are installed.
|
|
||||||
#
|
|
||||||
# Usage: check_ansible <PYCMD>
|
|
||||||
#
|
|
||||||
# Options:
|
|
||||||
# PYCMD - The path to the python executable to use.
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
set -x
|
|
||||||
|
|
||||||
PYCMD=$1
|
|
||||||
|
|
||||||
if [ -z "$PYCMD" ]
|
|
||||||
then
|
|
||||||
echo "Usage: check_ansible <PYCMD>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -x "$PYCMD" ]
|
|
||||||
then
|
|
||||||
echo "$PYCMD is not an executable"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible installation
|
|
||||||
|
|
||||||
An Ansible installation cannot be found in the final builder image.
|
|
||||||
|
|
||||||
Ansible must be installed in the final image. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may
|
|
||||||
use the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible for you, or use 'additional_build_steps' to manually do
|
|
||||||
this yourself. Alternatively, use a base image with Ansible already
|
|
||||||
installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible-runner --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible Runner installation
|
|
||||||
|
|
||||||
An Ansible Runner installation cannot be found in the final builder
|
|
||||||
image.
|
|
||||||
|
|
||||||
Ansible Runner must be installed in the final image. If you are
|
|
||||||
using a recent enough version of the execution environment file, you
|
|
||||||
may use the 'dependencies.ansible_runner' configuration option to
|
|
||||||
install Ansible Runner for you, or use 'additional_build_steps' to
|
|
||||||
manually do this yourself. Alternatively, use a base image with
|
|
||||||
Ansible Runner already installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
$PYCMD -c 'import ansible ; import ansible_runner'
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible or Ansible Runner for selected Python
|
|
||||||
|
|
||||||
An Ansible and/or Ansible Runner installation cannot be found in
|
|
||||||
the final builder image using the following Python interpreter:
|
|
||||||
|
|
||||||
$PYCMD
|
|
||||||
|
|
||||||
Ansible and Ansible Runner must be installed in the final image and
|
|
||||||
available to the selected Python interpreter. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may use
|
|
||||||
the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible and the 'dependencies.ansible_runner' configuration option
|
|
||||||
to install Ansible Runner. You can also use 'additional_build_steps'
|
|
||||||
to manually do this yourself. Alternatively, use a base image with
|
|
||||||
Ansible and Ansible Runner already installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2023 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
# Script to validate that Ansible Galaxy is installed on the system.
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
set -x
|
|
||||||
|
|
||||||
ansible-galaxy --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible installation
|
|
||||||
|
|
||||||
The 'ansible-galaxy' command is not found in the base image. This
|
|
||||||
image is used to create the intermediary image that performs the
|
|
||||||
Galaxy collection and role installation process.
|
|
||||||
|
|
||||||
Ansible must be installed in the base image. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may
|
|
||||||
use the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible for you, or use 'additional_build_steps' to manually do
|
|
||||||
this yourself. Alternatively, use a base image with Ansible already
|
|
||||||
installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
@@ -1,152 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Copyright: (c) 2023, Ansible Project
|
|
||||||
# Apache License, Version 2.0 (see LICENSE.md or https://www.apache.org/licenses/LICENSE-2.0)
|
|
||||||
|
|
||||||
# This entrypoint script papers over a number of problems that manifest under different container runtimes when
|
|
||||||
# using ephemeral UIDs, then chain-execs to the requested init system and/or command. It is an implementation
|
|
||||||
# detail for the convenience of Ansible execution environments built by ansible-builder.
|
|
||||||
#
|
|
||||||
# If we're running as a legit user that has an entry in /etc/passwd and a valid and writeable homedir, we're all good.
|
|
||||||
#
|
|
||||||
# If the current uid is not in /etc/passwd, we'll attempt to add it, but /etc/passwd is often not writable by GID 0.
|
|
||||||
# `ansible-builder` defaults to making /etc/passwd writable by GID0 by default for maximum compatibility, but this is
|
|
||||||
# not guaranteed. Some runtimes/wrappers (eg podman, cri-o) already create an /etc/passwd entry on the fly as-needed,
|
|
||||||
# but they may set the homedir to something inaccessible (eg, `/`, WORKDIR).
|
|
||||||
#
|
|
||||||
# There are numerous cases where a missing or incorrect homedir in /etc/passwd are fatal. It breaks
|
|
||||||
# `async` in ansible-core, things like `echo ~someuid`, and numerous other software packages that assume a valid POSIX
|
|
||||||
# user configuration.
|
|
||||||
#
|
|
||||||
# If the homedir listed in /etc/passwd is not writeable by the current user (supposed to be primary GID0), we'll try
|
|
||||||
# to make it writeable (except `/`), or select another writeable home directory from `$HOME`, `/runner`, or `/tmp` and
|
|
||||||
# update $HOME (and /etc/passwd if possible) accordingly for the current process chain.
|
|
||||||
#
|
|
||||||
# This script is generally silent by default, but some likely-fatal cases will issue a brief warning to stderr. The
|
|
||||||
# envvars described below can be set before container init to cause faster failures and/or get tracing output.
|
|
||||||
|
|
||||||
# options:
|
|
||||||
# EP_BASH_DEBUG=1 (enable set -x)
|
|
||||||
# EP_DEBUG_TRACE=1 (enable debug trace to stderr)
|
|
||||||
# EP_ON_ERROR=ignore/warn/fail (default ignore)
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
if (( "${EP_BASH_DEBUG:=0}" == 1 )); then
|
|
||||||
set -x
|
|
||||||
fi
|
|
||||||
|
|
||||||
: "${EP_DEBUG_TRACE:=0}"
|
|
||||||
: "${EP_ON_ERROR:=warn}"
|
|
||||||
: "${HOME:=}"
|
|
||||||
CUR_UID=$(id -u)
|
|
||||||
CUR_USERNAME=$(id -u -n 2> /dev/null || true) # whoami-free way to get current username, falls back to current uid
|
|
||||||
|
|
||||||
DEFAULT_HOME="/runner"
|
|
||||||
DEFAULT_SHELL="/bin/bash"
|
|
||||||
|
|
||||||
if (( "$EP_DEBUG_TRACE" == 1 )); then
|
|
||||||
function log_debug() { echo "EP_DEBUG: $1" 1>&2; }
|
|
||||||
else
|
|
||||||
function log_debug() { :; }
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "entrypoint.sh started"
|
|
||||||
|
|
||||||
case "$EP_ON_ERROR" in
|
|
||||||
"fail")
|
|
||||||
function maybe_fail() { echo "EP_FAIL: $1" 1>&2; exit 1; }
|
|
||||||
;;
|
|
||||||
"warn")
|
|
||||||
function maybe_fail() { echo "EP_WARN: $1" 1>&2; }
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
function maybe_fail() { log_debug "EP_FAIL (ignored): $1"; }
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
function is_dir_writable() {
|
|
||||||
[ -d "$1" ] && [ -w "$1" ] && [ -x "$1" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensure_current_uid_in_passwd() {
|
|
||||||
log_debug "is current uid ${CUR_UID} in /etc/passwd?"
|
|
||||||
|
|
||||||
if ! getent passwd "${CUR_USERNAME}" &> /dev/null ; then
|
|
||||||
if [ -w "/etc/passwd" ]; then
|
|
||||||
log_debug "appending missing uid ${CUR_UID} into /etc/passwd"
|
|
||||||
# use the default homedir; we may have to rewrite it to another value later if it's inaccessible
|
|
||||||
echo "${CUR_UID}:x:${CUR_UID}:0:container user ${CUR_UID}:${DEFAULT_HOME}:${DEFAULT_SHELL}" >> /etc/passwd
|
|
||||||
else
|
|
||||||
maybe_fail "uid ${CUR_UID} is missing from /etc/passwd, which is not writable; this error is likely fatal"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
log_debug "current uid is already in /etc/passwd"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensure_writeable_homedir() {
|
|
||||||
if (is_dir_writable "${CANDIDATE_HOME}") ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is valid and writeable"
|
|
||||||
else
|
|
||||||
if [ "${CANDIDATE_HOME}" == "/" ]; then
|
|
||||||
log_debug "skipping attempt to fix permissions on / as homedir"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is missing or not writeable; attempt to fix"
|
|
||||||
if ! (mkdir -p "${CANDIDATE_HOME}" >& /dev/null && chmod -R ug+rwx "${CANDIDATE_HOME}" >& /dev/null) ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} cannot be made writeable"
|
|
||||||
return 1
|
|
||||||
else
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} was successfully made writeable"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# this might work; export it even if we end up not being able to update /etc/passwd
|
|
||||||
# this ensures the envvar matches current reality for this session; future sessions should set automatically if /etc/passwd is accurate
|
|
||||||
export HOME=${CANDIDATE_HOME}
|
|
||||||
|
|
||||||
if [ "${CANDIDATE_HOME}" == "${PASSWD_HOME}" ] ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} matches /etc/passwd"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! [ -w /etc/passwd ]; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is valid for ${CUR_USERNAME}, but /etc/passwd is not writable to update it"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "resetting homedir for user ${CUR_USERNAME} to ${CANDIDATE_HOME} in /etc/passwd"
|
|
||||||
|
|
||||||
# sed -i wants to create a tempfile next to the original, which won't work with /etc permissions in many cases,
|
|
||||||
# so just do it in memory and overwrite the existing file if we succeeded
|
|
||||||
NEWPW=$(sed -r "s;(^${CUR_USERNAME}:(.*:){4})(.*:);\1${CANDIDATE_HOME}:;g" /etc/passwd)
|
|
||||||
echo "${NEWPW}" > /etc/passwd
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_current_uid_in_passwd
|
|
||||||
|
|
||||||
log_debug "current value of HOME is ${HOME}"
|
|
||||||
|
|
||||||
PASSWD_HOME=$(getent passwd "${CUR_USERNAME}" | cut -d: -f6)
|
|
||||||
log_debug "user ${CUR_USERNAME} homedir from /etc/passwd is ${PASSWD_HOME}"
|
|
||||||
|
|
||||||
CANDIDATE_HOMES=("${PASSWD_HOME}" "${HOME}" "${DEFAULT_HOME}" "/tmp")
|
|
||||||
|
|
||||||
# we'll set this in the loop as soon as we find a writeable dir
|
|
||||||
unset HOME
|
|
||||||
|
|
||||||
for CANDIDATE_HOME in "${CANDIDATE_HOMES[@]}"; do
|
|
||||||
if ensure_writeable_homedir ; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if ! [ -v HOME ] ; then
|
|
||||||
maybe_fail "a valid homedir could not be set for ${CUR_USERNAME}; this is likely fatal"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# chain exec whatever we were asked to run (ideally an init system) to keep any envvar state we've set
|
|
||||||
log_debug "chain exec-ing requested command $*"
|
|
||||||
exec "${@}"
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2019 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package
|
|
||||||
# manager.
|
|
||||||
PKGMGR="${PKGMGR:-}"
|
|
||||||
PKGMGR_OPTS="${PKGMGR_OPTS:-}"
|
|
||||||
PKGMGR_PRESERVE_CACHE="${PKGMGR_PRESERVE_CACHE:-}"
|
|
||||||
|
|
||||||
PYCMD="${PYCMD:=/usr/bin/python3}"
|
|
||||||
PIPCMD="${PIPCMD:=$PYCMD -m pip}"
|
|
||||||
PIP_OPTS="${PIP_OPTS-}"
|
|
||||||
|
|
||||||
$PYCMD -m ensurepip
|
|
||||||
|
|
||||||
if [ -z $PKGMGR ]; then
|
|
||||||
# Expect dnf to be installed, however if we find microdnf default to it.
|
|
||||||
PKGMGR=/usr/bin/dnf
|
|
||||||
if [ -f "/usr/bin/microdnf" ]; then
|
|
||||||
PKGMGR=/usr/bin/microdnf
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PKGMGR" = "/usr/bin/microdnf" ]
|
|
||||||
then
|
|
||||||
if [ -z $PKGMGR_OPTS ]; then
|
|
||||||
# NOTE(pabelanger): skip install docs and weak dependencies to
|
|
||||||
# make smaller images. Sadly, setting these in dnf.conf don't
|
|
||||||
# appear to work.
|
|
||||||
PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f /output/bindep/run.txt ] ; then
|
|
||||||
PACKAGES=$(cat /output/bindep/run.txt)
|
|
||||||
if [ ! -z "$PACKAGES" ]; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS $PACKAGES
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f /output/bindep/epel.txt ] ; then
|
|
||||||
EPEL_PACKAGES=$(cat /output/bindep/epel.txt)
|
|
||||||
if [ ! -z "$EPEL_PACKAGES" ]; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS --enablerepo epel $EPEL_PACKAGES
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If there's a constraints file, use it.
|
|
||||||
if [ -f /output/upper-constraints.txt ] ; then
|
|
||||||
CONSTRAINTS="-c /output/upper-constraints.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If a requirements.txt file exists,
|
|
||||||
# install it directly so that people can use git url syntax
|
|
||||||
# to do things like pick up patched but unreleased versions
|
|
||||||
# of dependencies.
|
|
||||||
if [ -f /output/requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /output/requirements.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Add any requested extras to the list of things to install
|
|
||||||
EXTRAS=""
|
|
||||||
for extra in $* ; do
|
|
||||||
EXTRAS="${EXTRAS} -r /output/$extra/requirements.txt"
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -f /output/packages.txt ] ; then
|
|
||||||
# If a package list was passed to assemble, install that in the final
|
|
||||||
# image.
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /output/packages.txt $EXTRAS
|
|
||||||
else
|
|
||||||
# Install the wheels. Uninstall any existing version as siblings maybe
|
|
||||||
# be built with the same version number as the latest release, but we
|
|
||||||
# really want the speculatively built wheels installed over any
|
|
||||||
# automatic dependencies.
|
|
||||||
# NOTE(pabelanger): It is possible a project may not have a wheel, but does have requirements.txt
|
|
||||||
if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then
|
|
||||||
$PIPCMD uninstall -y /output/wheels/*.whl
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels /output/wheels/*.whl $EXTRAS
|
|
||||||
elif [ ! -z "$EXTRAS" ] ; then
|
|
||||||
$PIPCMD uninstall -y $EXTRAS
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels $EXTRAS
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# clean up after ourselves, unless requested to keep the cache
|
|
||||||
if [[ "$PKGMGR_PRESERVE_CACHE" != always ]]; then
|
|
||||||
$PKGMGR clean all
|
|
||||||
rm -rf /var/cache/{dnf,yum}
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf /var/lib/dnf/history.*
|
|
||||||
rm -rf /var/log/{dnf.*,hawkey.log}
|
|
||||||
@@ -1,400 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
import requirements
|
|
||||||
import importlib.metadata
|
|
||||||
|
|
||||||
base_collections_path = '/usr/share/ansible/collections'
|
|
||||||
default_file = 'execution-environment.yml'
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def line_is_empty(line):
|
|
||||||
return bool((not line.strip()) or line.startswith('#'))
|
|
||||||
|
|
||||||
|
|
||||||
def read_req_file(path):
|
|
||||||
"""Provide some minimal error and display handling for file reading"""
|
|
||||||
if not os.path.exists(path):
|
|
||||||
print('Expected requirements file not present at: {0}'.format(os.path.abspath(path)))
|
|
||||||
with open(path, 'r') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def pip_file_data(path):
|
|
||||||
pip_content = read_req_file(path)
|
|
||||||
|
|
||||||
pip_lines = []
|
|
||||||
for line in pip_content.split('\n'):
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
if line.startswith('-r') or line.startswith('--requirement'):
|
|
||||||
_, new_filename = line.split(None, 1)
|
|
||||||
new_path = os.path.join(os.path.dirname(path or '.'), new_filename)
|
|
||||||
pip_lines.extend(pip_file_data(new_path))
|
|
||||||
else:
|
|
||||||
pip_lines.append(line)
|
|
||||||
|
|
||||||
return pip_lines
|
|
||||||
|
|
||||||
|
|
||||||
def bindep_file_data(path):
|
|
||||||
sys_content = read_req_file(path)
|
|
||||||
|
|
||||||
sys_lines = []
|
|
||||||
for line in sys_content.split('\n'):
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
sys_lines.append(line)
|
|
||||||
|
|
||||||
return sys_lines
|
|
||||||
|
|
||||||
|
|
||||||
def process_collection(path):
|
|
||||||
"""Return a tuple of (python_dependencies, system_dependencies) for the
|
|
||||||
collection install path given.
|
|
||||||
Both items returned are a list of dependencies.
|
|
||||||
|
|
||||||
:param str path: root directory of collection (this would contain galaxy.yml file)
|
|
||||||
"""
|
|
||||||
CD = CollectionDefinition(path)
|
|
||||||
|
|
||||||
py_file = CD.get_dependency('python')
|
|
||||||
pip_lines = []
|
|
||||||
if py_file:
|
|
||||||
pip_lines = pip_file_data(os.path.join(path, py_file))
|
|
||||||
|
|
||||||
sys_file = CD.get_dependency('system')
|
|
||||||
bindep_lines = []
|
|
||||||
if sys_file:
|
|
||||||
bindep_lines = bindep_file_data(os.path.join(path, sys_file))
|
|
||||||
|
|
||||||
return (pip_lines, bindep_lines)
|
|
||||||
|
|
||||||
|
|
||||||
def process(data_dir=base_collections_path, user_pip=None, user_bindep=None):
|
|
||||||
paths = []
|
|
||||||
path_root = os.path.join(data_dir, 'ansible_collections')
|
|
||||||
|
|
||||||
# build a list of all the valid collection paths
|
|
||||||
if os.path.exists(path_root):
|
|
||||||
for namespace in sorted(os.listdir(path_root)):
|
|
||||||
if not os.path.isdir(os.path.join(path_root, namespace)):
|
|
||||||
continue
|
|
||||||
for name in sorted(os.listdir(os.path.join(path_root, namespace))):
|
|
||||||
collection_dir = os.path.join(path_root, namespace, name)
|
|
||||||
if not os.path.isdir(collection_dir):
|
|
||||||
continue
|
|
||||||
files_list = os.listdir(collection_dir)
|
|
||||||
if 'galaxy.yml' in files_list or 'MANIFEST.json' in files_list:
|
|
||||||
paths.append(collection_dir)
|
|
||||||
|
|
||||||
# populate the requirements content
|
|
||||||
py_req = {}
|
|
||||||
sys_req = {}
|
|
||||||
for path in paths:
|
|
||||||
col_pip_lines, col_sys_lines = process_collection(path)
|
|
||||||
CD = CollectionDefinition(path)
|
|
||||||
namespace, name = CD.namespace_name()
|
|
||||||
key = '{}.{}'.format(namespace, name)
|
|
||||||
|
|
||||||
if col_pip_lines:
|
|
||||||
py_req[key] = col_pip_lines
|
|
||||||
|
|
||||||
if col_sys_lines:
|
|
||||||
sys_req[key] = col_sys_lines
|
|
||||||
|
|
||||||
# add on entries from user files, if they are given
|
|
||||||
if user_pip:
|
|
||||||
col_pip_lines = pip_file_data(user_pip)
|
|
||||||
if col_pip_lines:
|
|
||||||
py_req['user'] = col_pip_lines
|
|
||||||
if user_bindep:
|
|
||||||
col_sys_lines = bindep_file_data(user_bindep)
|
|
||||||
if col_sys_lines:
|
|
||||||
sys_req['user'] = col_sys_lines
|
|
||||||
|
|
||||||
return {
|
|
||||||
'python': py_req,
|
|
||||||
'system': sys_req
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def has_content(candidate_file):
|
|
||||||
"""Beyond checking that the candidate exists, this also assures
|
|
||||||
that the file has something other than whitespace,
|
|
||||||
which can cause errors when given to pip.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(candidate_file):
|
|
||||||
return False
|
|
||||||
with open(candidate_file, 'r') as f:
|
|
||||||
content = f.read()
|
|
||||||
return bool(content.strip().strip('\n'))
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionDefinition:
|
|
||||||
"""This class represents the dependency metadata for a collection
|
|
||||||
should be replaced by logic to hit the Galaxy API if made available
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, collection_path):
|
|
||||||
self.reference_path = collection_path
|
|
||||||
meta_file = os.path.join(collection_path, 'meta', default_file)
|
|
||||||
if os.path.exists(meta_file):
|
|
||||||
with open(meta_file, 'r') as f:
|
|
||||||
self.raw = yaml.safe_load(f)
|
|
||||||
else:
|
|
||||||
self.raw = {'version': 1, 'dependencies': {}}
|
|
||||||
# Automatically infer requirements for collection
|
|
||||||
for entry, filename in [('python', 'requirements.txt'), ('system', 'bindep.txt')]:
|
|
||||||
candidate_file = os.path.join(collection_path, filename)
|
|
||||||
if has_content(candidate_file):
|
|
||||||
self.raw['dependencies'][entry] = filename
|
|
||||||
|
|
||||||
def target_dir(self):
|
|
||||||
namespace, name = self.namespace_name()
|
|
||||||
return os.path.join(
|
|
||||||
base_collections_path, 'ansible_collections',
|
|
||||||
namespace, name
|
|
||||||
)
|
|
||||||
|
|
||||||
def namespace_name(self):
|
|
||||||
"Returns 2-tuple of namespace and name"
|
|
||||||
path_parts = [p for p in self.reference_path.split(os.path.sep) if p]
|
|
||||||
return tuple(path_parts[-2:])
|
|
||||||
|
|
||||||
def get_dependency(self, entry):
|
|
||||||
"""A collection is only allowed to reference a file by a relative path
|
|
||||||
which is relative to the collection root
|
|
||||||
"""
|
|
||||||
req_file = self.raw.get('dependencies', {}).get(entry)
|
|
||||||
if req_file is None:
|
|
||||||
return None
|
|
||||||
elif os.path.isabs(req_file):
|
|
||||||
raise RuntimeError(
|
|
||||||
'Collections must specify relative paths for requirements files. '
|
|
||||||
'The file {0} specified by {1} violates this.'.format(
|
|
||||||
req_file, self.reference_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return req_file
|
|
||||||
|
|
||||||
|
|
||||||
def simple_combine(reqs):
|
|
||||||
"""Given a dictionary of requirement lines keyed off collections,
|
|
||||||
return a list with the most basic of de-duplication logic,
|
|
||||||
and comments indicating the sources based off the collection keys
|
|
||||||
"""
|
|
||||||
consolidated = []
|
|
||||||
fancy_lines = []
|
|
||||||
for collection, lines in reqs.items():
|
|
||||||
for line in lines:
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
|
|
||||||
base_line = line.split('#')[0].strip()
|
|
||||||
if base_line in consolidated:
|
|
||||||
i = consolidated.index(base_line)
|
|
||||||
fancy_lines[i] += ', {}'.format(collection)
|
|
||||||
else:
|
|
||||||
fancy_line = base_line + ' # from collection {}'.format(collection)
|
|
||||||
consolidated.append(base_line)
|
|
||||||
fancy_lines.append(fancy_line)
|
|
||||||
|
|
||||||
return fancy_lines
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(args=sys.argv[1:]):
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
prog='introspect',
|
|
||||||
description=(
|
|
||||||
'ansible-builder introspection; injected and used during execution environment build'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='The command to invoke.', dest='action')
|
|
||||||
subparsers.required = True
|
|
||||||
|
|
||||||
create_introspect_parser(subparsers)
|
|
||||||
|
|
||||||
args = parser.parse_args(args)
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def run_introspect(args, logger):
|
|
||||||
data = process(args.folder, user_pip=args.user_pip, user_bindep=args.user_bindep)
|
|
||||||
if args.sanitize:
|
|
||||||
logger.info('# Sanitized dependencies for %s', args.folder)
|
|
||||||
data_for_write = data
|
|
||||||
data['python'] = sanitize_requirements(data['python'])
|
|
||||||
data['system'] = simple_combine(data['system'])
|
|
||||||
else:
|
|
||||||
logger.info('# Dependency data for %s', args.folder)
|
|
||||||
data_for_write = data.copy()
|
|
||||||
data_for_write['python'] = simple_combine(data['python'])
|
|
||||||
data_for_write['system'] = simple_combine(data['system'])
|
|
||||||
|
|
||||||
print('---')
|
|
||||||
print(yaml.dump(data, default_flow_style=False))
|
|
||||||
|
|
||||||
if args.write_pip and data.get('python'):
|
|
||||||
write_file(args.write_pip, data_for_write.get('python') + [''])
|
|
||||||
if args.write_bindep and data.get('system'):
|
|
||||||
write_file(args.write_bindep, data_for_write.get('system') + [''])
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def create_introspect_parser(parser):
|
|
||||||
introspect_parser = parser.add_parser(
|
|
||||||
'introspect',
|
|
||||||
help='Introspects collections in folder.',
|
|
||||||
description=(
|
|
||||||
'Loops over collections in folder and returns data about dependencies. '
|
|
||||||
'This is used internally and exposed here for verification. '
|
|
||||||
'This is targeted toward collection authors and maintainers.'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument('--sanitize', action='store_true',
|
|
||||||
help=('Sanitize and de-duplicate requirements. '
|
|
||||||
'This is normally done separately from the introspect script, but this '
|
|
||||||
'option is given to more accurately test collection content.'))
|
|
||||||
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'folder', default=base_collections_path, nargs='?',
|
|
||||||
help=(
|
|
||||||
'Ansible collections path(s) to introspect. '
|
|
||||||
'This should have a folder named ansible_collections inside of it.'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# Combine user requirements and collection requirements into single file
|
|
||||||
# in the future, could look into passing multilple files to
|
|
||||||
# python-builder scripts to be fed multiple files as opposed to this
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--user-pip', dest='user_pip',
|
|
||||||
help='An additional file to combine with collection pip requirements.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--user-bindep', dest='user_bindep',
|
|
||||||
help='An additional file to combine with collection bindep requirements.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--write-pip', dest='write_pip',
|
|
||||||
help='Write the combined pip requirements file to this location.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--write-bindep', dest='write_bindep',
|
|
||||||
help='Write the combined bindep requirements file to this location.'
|
|
||||||
)
|
|
||||||
|
|
||||||
return introspect_parser
|
|
||||||
|
|
||||||
|
|
||||||
EXCLUDE_REQUIREMENTS = frozenset((
|
|
||||||
# obviously already satisfied or unwanted
|
|
||||||
'ansible', 'ansible-base', 'python', 'ansible-core',
|
|
||||||
# general python test requirements
|
|
||||||
'tox', 'pycodestyle', 'yamllint', 'pylint',
|
|
||||||
'flake8', 'pytest', 'pytest-xdist', 'coverage', 'mock', 'testinfra',
|
|
||||||
# test requirements highly specific to Ansible testing
|
|
||||||
'ansible-lint', 'molecule', 'galaxy-importer', 'voluptuous',
|
|
||||||
# already present in image for py3 environments
|
|
||||||
'yaml', 'pyyaml', 'json',
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_requirements(collection_py_reqs):
|
|
||||||
"""
|
|
||||||
Cleanup Python requirements by removing duplicates and excluded packages.
|
|
||||||
|
|
||||||
The user requirements file will go through the deduplication process, but
|
|
||||||
skips the special package exclusion process.
|
|
||||||
|
|
||||||
:param dict collection_py_reqs: A dict of lists of Python requirements, keyed
|
|
||||||
by fully qualified collection name. The special key `user` holds requirements
|
|
||||||
from the user specified requirements file from the ``--user-pip`` CLI option.
|
|
||||||
|
|
||||||
:returns: A finalized list of sanitized Python requirements.
|
|
||||||
"""
|
|
||||||
# de-duplication
|
|
||||||
consolidated = []
|
|
||||||
seen_pkgs = set()
|
|
||||||
|
|
||||||
for collection, lines in collection_py_reqs.items():
|
|
||||||
try:
|
|
||||||
for req in requirements.parse('\n'.join(lines)):
|
|
||||||
if req.specifier:
|
|
||||||
req.name = importlib.metadata.Prepared(req.name).normalized
|
|
||||||
req.collections = [collection] # add backref for later
|
|
||||||
if req.name is None:
|
|
||||||
consolidated.append(req)
|
|
||||||
continue
|
|
||||||
if req.name in seen_pkgs:
|
|
||||||
for prior_req in consolidated:
|
|
||||||
if req.name == prior_req.name:
|
|
||||||
prior_req.specs.extend(req.specs)
|
|
||||||
prior_req.collections.append(collection)
|
|
||||||
break
|
|
||||||
continue
|
|
||||||
consolidated.append(req)
|
|
||||||
seen_pkgs.add(req.name)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning('Warning: failed to parse requirements from %s, error: %s', collection, e)
|
|
||||||
|
|
||||||
# removal of unwanted packages
|
|
||||||
sanitized = []
|
|
||||||
for req in consolidated:
|
|
||||||
# Exclude packages, unless it was present in the user supplied requirements.
|
|
||||||
if req.name and req.name.lower() in EXCLUDE_REQUIREMENTS and 'user' not in req.collections:
|
|
||||||
logger.debug('# Excluding requirement %s from %s', req.name, req.collections)
|
|
||||||
continue
|
|
||||||
if req.vcs or req.uri:
|
|
||||||
# Requirement like git+ or http return as-is
|
|
||||||
new_line = req.line
|
|
||||||
elif req.name:
|
|
||||||
specs = ['{0}{1}'.format(cmp, ver) for cmp, ver in req.specs]
|
|
||||||
new_line = req.name + ','.join(specs)
|
|
||||||
else:
|
|
||||||
raise RuntimeError('Could not process {0}'.format(req.line))
|
|
||||||
|
|
||||||
sanitized.append(new_line + ' # from collection {}'.format(','.join(req.collections)))
|
|
||||||
|
|
||||||
return sanitized
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(filename: str, lines: list) -> bool:
|
|
||||||
parent_dir = os.path.dirname(filename)
|
|
||||||
if parent_dir and not os.path.exists(parent_dir):
|
|
||||||
logger.warning('Creating parent directory for %s', filename)
|
|
||||||
os.makedirs(parent_dir)
|
|
||||||
new_text = '\n'.join(lines)
|
|
||||||
if os.path.exists(filename):
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
if f.read() == new_text:
|
|
||||||
logger.debug("File %s is already up-to-date.", filename)
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
logger.warning('File %s had modifications and will be rewritten', filename)
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
f.write(new_text)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parse_args()
|
|
||||||
|
|
||||||
if args.action == 'introspect':
|
|
||||||
run_introspect(args, logger)
|
|
||||||
|
|
||||||
logger.error("An error has occurred.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
ARG EE_BASE_IMAGE="quay.io/centos/centos:stream9"
|
|
||||||
ARG PYCMD="/usr/bin/python3"
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE=""
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS="--pre"
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS=""
|
|
||||||
ARG ANSIBLE_INSTALL_REFS="ansible-core>=2.15.0rc2,<2.16 ansible-runner"
|
|
||||||
ARG PKGMGR="/usr/bin/dnf"
|
|
||||||
|
|
||||||
# Base build stage
|
|
||||||
FROM $EE_BASE_IMAGE as base
|
|
||||||
USER root
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN /usr/bin/python3 -m ensurepip
|
|
||||||
RUN /usr/bin/python3 -m pip install --upgrade pip
|
|
||||||
RUN $PYCMD -m ensurepip
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir $ANSIBLE_INSTALL_REFS
|
|
||||||
COPY _build/scripts/ /output/scripts/
|
|
||||||
COPY _build/scripts/entrypoint /opt/builder/bin/entrypoint
|
|
||||||
RUN $PYCMD -m pip install -U pip
|
|
||||||
|
|
||||||
# Galaxy build stage
|
|
||||||
FROM base as galaxy
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN /usr/bin/python3 -m pip install --upgrade pip cmake
|
|
||||||
COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg
|
|
||||||
RUN /output/scripts/check_galaxy
|
|
||||||
COPY _build /build
|
|
||||||
WORKDIR /build
|
|
||||||
|
|
||||||
RUN ansible-galaxy role install $ANSIBLE_GALAXY_CLI_ROLE_OPTS -r requirements.yml --roles-path "/usr/share/ansible/roles"
|
|
||||||
RUN ANSIBLE_GALAXY_DISABLE_GPG_VERIFY=1 ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.yml --collections-path "/usr/share/ansible/collections"
|
|
||||||
|
|
||||||
# Builder build stage
|
|
||||||
FROM base as builder
|
|
||||||
WORKDIR /build
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir bindep pyyaml requirements-parser
|
|
||||||
|
|
||||||
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
|
|
||||||
|
|
||||||
COPY _build/requirements.txt requirements.txt
|
|
||||||
COPY _build/bindep.txt bindep.txt
|
|
||||||
RUN $PYCMD /output/scripts/introspect.py introspect --sanitize --user-pip=requirements.txt --user-bindep=bindep.txt --write-bindep=/tmp/src/bindep.txt --write-pip=/tmp/src/requirements.txt
|
|
||||||
RUN /output/scripts/assemble
|
|
||||||
|
|
||||||
# Final build stage
|
|
||||||
FROM base as final
|
|
||||||
ARG EE_BASE_IMAGE
|
|
||||||
ARG PYCMD
|
|
||||||
ARG PKGMGR_PRESERVE_CACHE
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS
|
|
||||||
ARG ANSIBLE_GALAXY_CLI_ROLE_OPTS
|
|
||||||
ARG ANSIBLE_INSTALL_REFS
|
|
||||||
ARG PKGMGR
|
|
||||||
|
|
||||||
RUN whoami
|
|
||||||
RUN cat /etc/os-release
|
|
||||||
RUN /output/scripts/check_ansible $PYCMD
|
|
||||||
|
|
||||||
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
|
|
||||||
|
|
||||||
COPY --from=builder /output/ /output/
|
|
||||||
RUN /output/scripts/install-from-bindep && rm -rf /output/wheels
|
|
||||||
RUN chmod ug+rw /etc/passwd
|
|
||||||
RUN mkdir -p /runner && chgrp 0 /runner && chmod -R ug+rwx /runner
|
|
||||||
WORKDIR /runner
|
|
||||||
RUN $PYCMD -m pip install --no-cache-dir 'dumb-init==1.2.5'
|
|
||||||
RUN pip3 install --upgrade azure-identity azure-cli-core paramiko
|
|
||||||
COPY --from=quay.io/ansible/receptor:devel /usr/bin/receptor /usr/bin/receptor
|
|
||||||
RUN mkdir -p /var/run/receptor
|
|
||||||
RUN git lfs install --system
|
|
||||||
RUN rm -rf /output
|
|
||||||
LABEL ansible-execution-environment=true
|
|
||||||
USER 1000
|
|
||||||
ENTRYPOINT ["/opt/builder/bin/entrypoint", "dumb-init"]
|
|
||||||
CMD ["bash"]
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
subversion [platform:rpm]
|
|
||||||
wget [platform:rpm]
|
|
||||||
unzip [platform:rpm]
|
|
||||||
gcc [platform:rpm]
|
|
||||||
python3-devel [platform:rpm]
|
|
||||||
cmake [platform:rpm]
|
|
||||||
gcc-c++ [platform:rpm]
|
|
||||||
make [platform:rpm]
|
|
||||||
openssl-devel [platform:rpm]
|
|
||||||
git-core [platform:rpm]
|
|
||||||
python3.9-devel [platform:rpm compile]
|
|
||||||
libcurl-devel [platform:rpm compile]
|
|
||||||
krb5-devel [platform:rpm compile]
|
|
||||||
krb5-workstation [platform:rpm]
|
|
||||||
subversion [platform:rpm]
|
|
||||||
subversion [platform:dpkg]
|
|
||||||
git-lfs [platform:rpm]
|
|
||||||
sshpass [platform:rpm]
|
|
||||||
rsync [platform:rpm]
|
|
||||||
epel-release [platform:rpm]
|
|
||||||
python-unversioned-command [platform:rpm]
|
|
||||||
unzip [platform:rpm]
|
|
||||||
podman-remote [platform:rpm]
|
|
||||||
cmake [platform:rpm compile]
|
|
||||||
gcc [platform:rpm compile]
|
|
||||||
gcc-c++ [platform:rpm compile]
|
|
||||||
make [platform:rpm compile]
|
|
||||||
openssl-devel [platform:rpm compile]
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
[galaxy]
|
|
||||||
server_list = galaxy
|
|
||||||
|
|
||||||
[galaxy_server.galaxy]
|
|
||||||
url=https://galaxy.ansible.com/
|
|
||||||
|
|
||||||
|
|
||||||
[defaults]
|
|
||||||
NETWORK_GROUP_MODULES=arubaoss
|
|
||||||
host_key_checking = false
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
ncclient==0.6.9
|
|
||||||
scp==0.13.3
|
|
||||||
textfsm==1.1.0
|
|
||||||
ipaddr==2.2.0
|
|
||||||
#az-cli
|
|
||||||
|
|
||||||
git+https://github.com/ansible/ansible-sign
|
|
||||||
ncclient
|
|
||||||
paramiko
|
|
||||||
pykerberos
|
|
||||||
pyOpenSSL
|
|
||||||
pypsrp[kerberos,credssp]
|
|
||||||
pywinrm[kerberos,credssp]
|
|
||||||
toml
|
|
||||||
pexpect>=4.5
|
|
||||||
python-daemon
|
|
||||||
pyyaml
|
|
||||||
six
|
|
||||||
receptorctl
|
|
||||||
|
|
||||||
#azure
|
|
||||||
packaging
|
|
||||||
requests[security]
|
|
||||||
xmltodict
|
|
||||||
msgraph-sdk==1.0.0
|
|
||||||
azure-cli-core==2.61.0
|
|
||||||
azure-common==1.1.11
|
|
||||||
azure-identity==1.16.1
|
|
||||||
azure-mgmt-authorization==2.0.0
|
|
||||||
azure-mgmt-apimanagement==3.0.0
|
|
||||||
azure-mgmt-batch==16.2.0
|
|
||||||
azure-mgmt-cdn==11.0.0
|
|
||||||
azure-mgmt-compute==30.6.0
|
|
||||||
azure-mgmt-containerinstance==9.0.0
|
|
||||||
azure-mgmt-core==1.4.0
|
|
||||||
azure-mgmt-containerregistry==9.1.0
|
|
||||||
azure-containerregistry==1.1.0
|
|
||||||
azure-mgmt-containerservice==20.0.0
|
|
||||||
azure-mgmt-datafactory==2.0.0
|
|
||||||
azure-mgmt-dns==8.0.0
|
|
||||||
azure-mgmt-marketplaceordering==1.1.0
|
|
||||||
azure-mgmt-monitor==3.0.0
|
|
||||||
azure-mgmt-managedservices==6.0.0
|
|
||||||
azure-mgmt-managementgroups==1.0.0
|
|
||||||
azure-mgmt-network==19.1.0
|
|
||||||
azure-mgmt-nspkg==2.0.0
|
|
||||||
azure-mgmt-privatedns==1.0.0
|
|
||||||
azure-mgmt-redis==13.0.0
|
|
||||||
azure-mgmt-resource==21.1.0
|
|
||||||
azure-mgmt-rdbms==10.2.0b12
|
|
||||||
azure-mgmt-search==8.0.0
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
---
|
|
||||||
collections:
|
|
||||||
- azure.azcollection
|
|
||||||
- ansible.windows
|
|
||||||
- community.windows
|
|
||||||
- community.general
|
|
||||||
- tribe29.checkmk
|
|
||||||
- ansible.posix
|
|
||||||
- awx.awx
|
|
||||||
- cisco.ios
|
|
||||||
- microsoft.ad
|
|
||||||
- arubanetworks.aos_switch
|
|
||||||
- ansible.netcommon
|
|
||||||
- community.docker
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2019 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
# Make a list of bindep dependencies and a collection of built binary
|
|
||||||
# wheels for the repo in question as well as its python dependencies.
|
|
||||||
# Install javascript tools as well to support python that needs javascript
|
|
||||||
# at build time.
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
RELEASE=$(source /etc/os-release; echo $ID)
|
|
||||||
|
|
||||||
# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package
|
|
||||||
# manager.
|
|
||||||
PKGMGR="${PKGMGR:-}"
|
|
||||||
PKGMGR_OPTS="${PKGMGR_OPTS:-}"
|
|
||||||
PKGMGR_PRESERVE_CACHE="${PKGMGR_PRESERVE_CACHE:-}"
|
|
||||||
|
|
||||||
PYCMD="${PYCMD:=/usr/bin/python3}"
|
|
||||||
PIPCMD="${PIPCMD:=$PYCMD -m pip}"
|
|
||||||
|
|
||||||
$PYCMD -m ensurepip
|
|
||||||
|
|
||||||
if [ -z $PKGMGR ]; then
|
|
||||||
# Expect dnf to be installed, however if we find microdnf default to it.
|
|
||||||
PKGMGR=/usr/bin/dnf
|
|
||||||
if [ -f "/usr/bin/microdnf" ]; then
|
|
||||||
PKGMGR=/usr/bin/microdnf
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PKGMGR" = "/usr/bin/microdnf" ]
|
|
||||||
then
|
|
||||||
if [ -z $PKGMGR_OPTS ]; then
|
|
||||||
# NOTE(pabelanger): skip install docs and weak dependencies to
|
|
||||||
# make smaller images. Sadly, setting these in dnf.conf don't
|
|
||||||
# appear to work.
|
|
||||||
PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# NOTE(pabelanger): Ensure all the directory we use exists regardless
|
|
||||||
# of the user first creating them or not.
|
|
||||||
mkdir -p /output/bindep
|
|
||||||
mkdir -p /output/wheels
|
|
||||||
mkdir -p /tmp/src
|
|
||||||
|
|
||||||
cd /tmp/src
|
|
||||||
|
|
||||||
function install_bindep {
|
|
||||||
# Protect from the bindep builder image use of the assemble script
|
|
||||||
# to produce a wheel. Note we append because we want all
|
|
||||||
# sibling packages in here too
|
|
||||||
if [ -f bindep.txt ] ; then
|
|
||||||
bindep -l newline | sort >> /output/bindep/run.txt || true
|
|
||||||
if [ "$RELEASE" == "centos" ] ; then
|
|
||||||
bindep -l newline -b epel | sort >> /output/bindep/stage.txt || true
|
|
||||||
grep -Fxvf /output/bindep/run.txt /output/bindep/stage.txt >> /output/bindep/epel.txt || true
|
|
||||||
rm -rf /output/bindep/stage.txt
|
|
||||||
fi
|
|
||||||
compile_packages=$(bindep -b compile || true)
|
|
||||||
if [ ! -z "$compile_packages" ] ; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS ${compile_packages}
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function install_wheels {
|
|
||||||
# NOTE(pabelanger): If there are build requirements to install, do so.
|
|
||||||
# However do not cache them as we do not want them in the final image.
|
|
||||||
if [ -f /tmp/src/build-requirements.txt ] && [ ! -f /tmp/src/.build-requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --no-cache -r /tmp/src/build-requirements.txt
|
|
||||||
touch /tmp/src/.build-requirements.txt
|
|
||||||
fi
|
|
||||||
# Build a wheel so that we have an install target.
|
|
||||||
# pip install . in the container context with the mounted
|
|
||||||
# source dir gets ... exciting, if setup.py exists.
|
|
||||||
# We run sdist first to trigger code generation steps such
|
|
||||||
# as are found in zuul, since the sequencing otherwise
|
|
||||||
# happens in a way that makes wheel content copying unhappy.
|
|
||||||
# pip wheel isn't used here because it puts all of the output
|
|
||||||
# in the output dir and not the wheel cache, so it's not
|
|
||||||
# possible to tell what is the wheel for the project and
|
|
||||||
# what is the wheel cache.
|
|
||||||
if [ -f setup.py ] ; then
|
|
||||||
$PYCMD setup.py sdist bdist_wheel -d /output/wheels
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install everything so that the wheel cache is populated with
|
|
||||||
# transitive depends. If a requirements.txt file exists, install
|
|
||||||
# it directly so that people can use git url syntax to do things
|
|
||||||
# like pick up patched but unreleased versions of dependencies.
|
|
||||||
# Only do this for the main package (i.e. only write requirements
|
|
||||||
# once).
|
|
||||||
if [ -f /tmp/src/requirements.txt ] && [ ! -f /output/requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /tmp/src/requirements.txt
|
|
||||||
cp /tmp/src/requirements.txt /output/requirements.txt
|
|
||||||
fi
|
|
||||||
# If we didn't build wheels, we can skip trying to install it.
|
|
||||||
if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then
|
|
||||||
$PIPCMD uninstall -y /output/wheels/*.whl
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels /output/wheels/*whl
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
PACKAGES=$*
|
|
||||||
PIP_OPTS="${PIP_OPTS-}"
|
|
||||||
|
|
||||||
# bindep the main package
|
|
||||||
install_bindep
|
|
||||||
|
|
||||||
# go through ZUUL_SIBLINGS, if any, and build those wheels too
|
|
||||||
for sibling in ${ZUUL_SIBLINGS:-}; do
|
|
||||||
pushd .zuul-siblings/${sibling}
|
|
||||||
install_bindep
|
|
||||||
popd
|
|
||||||
done
|
|
||||||
|
|
||||||
# Use a clean virtualenv for install steps to prevent things from the
|
|
||||||
# current environment making us not build a wheel.
|
|
||||||
# NOTE(pabelanger): We allow users to install distro python packages of
|
|
||||||
# libraries. This is important for projects that eventually want to produce
|
|
||||||
# an RPM or offline install.
|
|
||||||
$PYCMD -m venv /tmp/venv --system-site-packages --without-pip
|
|
||||||
source /tmp/venv/bin/activate
|
|
||||||
|
|
||||||
# If there is an upper-constraints.txt file in the source tree,
|
|
||||||
# use it in the pip commands.
|
|
||||||
if [ -f /tmp/src/upper-constraints.txt ] ; then
|
|
||||||
cp /tmp/src/upper-constraints.txt /output/upper-constraints.txt
|
|
||||||
CONSTRAINTS="-c /tmp/src/upper-constraints.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If we got a list of packages, install them, otherwise install the
|
|
||||||
# main package.
|
|
||||||
if [[ $PACKAGES ]] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels $PACKAGES
|
|
||||||
for package in $PACKAGES ; do
|
|
||||||
echo "$package" >> /output/packages.txt
|
|
||||||
done
|
|
||||||
else
|
|
||||||
install_wheels
|
|
||||||
fi
|
|
||||||
|
|
||||||
# go through ZUUL_SIBLINGS, if any, and build those wheels too
|
|
||||||
for sibling in ${ZUUL_SIBLINGS:-}; do
|
|
||||||
pushd .zuul-siblings/${sibling}
|
|
||||||
install_wheels
|
|
||||||
popd
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -z $PKGMGR_PRESERVE_CACHE ]; then
|
|
||||||
$PKGMGR clean all
|
|
||||||
rm -rf /var/cache/{dnf,yum}
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf /var/lib/dnf/history.*
|
|
||||||
rm -rf /var/log/{dnf.*,hawkey.log}
|
|
||||||
rm -rf /tmp/venv
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2023 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
# Script to validate that Ansible and Ansible Runner are installed.
|
|
||||||
#
|
|
||||||
# Usage: check_ansible <PYCMD>
|
|
||||||
#
|
|
||||||
# Options:
|
|
||||||
# PYCMD - The path to the python executable to use.
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
set -x
|
|
||||||
|
|
||||||
PYCMD=$1
|
|
||||||
|
|
||||||
if [ -z "$PYCMD" ]
|
|
||||||
then
|
|
||||||
echo "Usage: check_ansible <PYCMD>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -x "$PYCMD" ]
|
|
||||||
then
|
|
||||||
echo "$PYCMD is not an executable"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible installation
|
|
||||||
|
|
||||||
An Ansible installation cannot be found in the final builder image.
|
|
||||||
|
|
||||||
Ansible must be installed in the final image. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may
|
|
||||||
use the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible for you, or use 'additional_build_steps' to manually do
|
|
||||||
this yourself. Alternatively, use a base image with Ansible already
|
|
||||||
installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible-runner --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible Runner installation
|
|
||||||
|
|
||||||
An Ansible Runner installation cannot be found in the final builder
|
|
||||||
image.
|
|
||||||
|
|
||||||
Ansible Runner must be installed in the final image. If you are
|
|
||||||
using a recent enough version of the execution environment file, you
|
|
||||||
may use the 'dependencies.ansible_runner' configuration option to
|
|
||||||
install Ansible Runner for you, or use 'additional_build_steps' to
|
|
||||||
manually do this yourself. Alternatively, use a base image with
|
|
||||||
Ansible Runner already installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
$PYCMD -c 'import ansible ; import ansible_runner'
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible or Ansible Runner for selected Python
|
|
||||||
|
|
||||||
An Ansible and/or Ansible Runner installation cannot be found in
|
|
||||||
the final builder image using the following Python interpreter:
|
|
||||||
|
|
||||||
$PYCMD
|
|
||||||
|
|
||||||
Ansible and Ansible Runner must be installed in the final image and
|
|
||||||
available to the selected Python interpreter. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may use
|
|
||||||
the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible and the 'dependencies.ansible_runner' configuration option
|
|
||||||
to install Ansible Runner. You can also use 'additional_build_steps'
|
|
||||||
to manually do this yourself. Alternatively, use a base image with
|
|
||||||
Ansible and Ansible Runner already installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
exit 0
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2023 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#####################################################################
|
|
||||||
# Script to validate that Ansible Galaxy is installed on the system.
|
|
||||||
#####################################################################
|
|
||||||
|
|
||||||
set -x
|
|
||||||
|
|
||||||
ansible-galaxy --version
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]
|
|
||||||
then
|
|
||||||
cat<<EOF
|
|
||||||
**********************************************************************
|
|
||||||
ERROR - Missing Ansible installation
|
|
||||||
|
|
||||||
The 'ansible-galaxy' command is not found in the base image. This
|
|
||||||
image is used to create the intermediary image that performs the
|
|
||||||
Galaxy collection and role installation process.
|
|
||||||
|
|
||||||
Ansible must be installed in the base image. If you are using a
|
|
||||||
recent enough version of the execution environment file, you may
|
|
||||||
use the 'dependencies.ansible_core' configuration option to install
|
|
||||||
Ansible for you, or use 'additional_build_steps' to manually do
|
|
||||||
this yourself. Alternatively, use a base image with Ansible already
|
|
||||||
installed.
|
|
||||||
**********************************************************************
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
||||||
@@ -1,152 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Copyright: (c) 2023, Ansible Project
|
|
||||||
# Apache License, Version 2.0 (see LICENSE.md or https://www.apache.org/licenses/LICENSE-2.0)
|
|
||||||
|
|
||||||
# This entrypoint script papers over a number of problems that manifest under different container runtimes when
|
|
||||||
# using ephemeral UIDs, then chain-execs to the requested init system and/or command. It is an implementation
|
|
||||||
# detail for the convenience of Ansible execution environments built by ansible-builder.
|
|
||||||
#
|
|
||||||
# If we're running as a legit user that has an entry in /etc/passwd and a valid and writeable homedir, we're all good.
|
|
||||||
#
|
|
||||||
# If the current uid is not in /etc/passwd, we'll attempt to add it, but /etc/passwd is often not writable by GID 0.
|
|
||||||
# `ansible-builder` defaults to making /etc/passwd writable by GID0 by default for maximum compatibility, but this is
|
|
||||||
# not guaranteed. Some runtimes/wrappers (eg podman, cri-o) already create an /etc/passwd entry on the fly as-needed,
|
|
||||||
# but they may set the homedir to something inaccessible (eg, `/`, WORKDIR).
|
|
||||||
#
|
|
||||||
# There are numerous cases where a missing or incorrect homedir in /etc/passwd are fatal. It breaks
|
|
||||||
# `async` in ansible-core, things like `echo ~someuid`, and numerous other software packages that assume a valid POSIX
|
|
||||||
# user configuration.
|
|
||||||
#
|
|
||||||
# If the homedir listed in /etc/passwd is not writeable by the current user (supposed to be primary GID0), we'll try
|
|
||||||
# to make it writeable (except `/`), or select another writeable home directory from `$HOME`, `/runner`, or `/tmp` and
|
|
||||||
# update $HOME (and /etc/passwd if possible) accordingly for the current process chain.
|
|
||||||
#
|
|
||||||
# This script is generally silent by default, but some likely-fatal cases will issue a brief warning to stderr. The
|
|
||||||
# envvars described below can be set before container init to cause faster failures and/or get tracing output.
|
|
||||||
|
|
||||||
# options:
|
|
||||||
# EP_BASH_DEBUG=1 (enable set -x)
|
|
||||||
# EP_DEBUG_TRACE=1 (enable debug trace to stderr)
|
|
||||||
# EP_ON_ERROR=ignore/warn/fail (default ignore)
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
if (( "${EP_BASH_DEBUG:=0}" == 1 )); then
|
|
||||||
set -x
|
|
||||||
fi
|
|
||||||
|
|
||||||
: "${EP_DEBUG_TRACE:=0}"
|
|
||||||
: "${EP_ON_ERROR:=warn}"
|
|
||||||
: "${HOME:=}"
|
|
||||||
CUR_UID=$(id -u)
|
|
||||||
CUR_USERNAME=$(id -u -n 2> /dev/null || true) # whoami-free way to get current username, falls back to current uid
|
|
||||||
|
|
||||||
DEFAULT_HOME="/runner"
|
|
||||||
DEFAULT_SHELL="/bin/bash"
|
|
||||||
|
|
||||||
if (( "$EP_DEBUG_TRACE" == 1 )); then
|
|
||||||
function log_debug() { echo "EP_DEBUG: $1" 1>&2; }
|
|
||||||
else
|
|
||||||
function log_debug() { :; }
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "entrypoint.sh started"
|
|
||||||
|
|
||||||
case "$EP_ON_ERROR" in
|
|
||||||
"fail")
|
|
||||||
function maybe_fail() { echo "EP_FAIL: $1" 1>&2; exit 1; }
|
|
||||||
;;
|
|
||||||
"warn")
|
|
||||||
function maybe_fail() { echo "EP_WARN: $1" 1>&2; }
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
function maybe_fail() { log_debug "EP_FAIL (ignored): $1"; }
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
function is_dir_writable() {
|
|
||||||
[ -d "$1" ] && [ -w "$1" ] && [ -x "$1" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensure_current_uid_in_passwd() {
|
|
||||||
log_debug "is current uid ${CUR_UID} in /etc/passwd?"
|
|
||||||
|
|
||||||
if ! getent passwd "${CUR_USERNAME}" &> /dev/null ; then
|
|
||||||
if [ -w "/etc/passwd" ]; then
|
|
||||||
log_debug "appending missing uid ${CUR_UID} into /etc/passwd"
|
|
||||||
# use the default homedir; we may have to rewrite it to another value later if it's inaccessible
|
|
||||||
echo "${CUR_UID}:x:${CUR_UID}:0:container user ${CUR_UID}:${DEFAULT_HOME}:${DEFAULT_SHELL}" >> /etc/passwd
|
|
||||||
else
|
|
||||||
maybe_fail "uid ${CUR_UID} is missing from /etc/passwd, which is not writable; this error is likely fatal"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
log_debug "current uid is already in /etc/passwd"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensure_writeable_homedir() {
|
|
||||||
if (is_dir_writable "${CANDIDATE_HOME}") ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is valid and writeable"
|
|
||||||
else
|
|
||||||
if [ "${CANDIDATE_HOME}" == "/" ]; then
|
|
||||||
log_debug "skipping attempt to fix permissions on / as homedir"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is missing or not writeable; attempt to fix"
|
|
||||||
if ! (mkdir -p "${CANDIDATE_HOME}" >& /dev/null && chmod -R ug+rwx "${CANDIDATE_HOME}" >& /dev/null) ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} cannot be made writeable"
|
|
||||||
return 1
|
|
||||||
else
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} was successfully made writeable"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# this might work; export it even if we end up not being able to update /etc/passwd
|
|
||||||
# this ensures the envvar matches current reality for this session; future sessions should set automatically if /etc/passwd is accurate
|
|
||||||
export HOME=${CANDIDATE_HOME}
|
|
||||||
|
|
||||||
if [ "${CANDIDATE_HOME}" == "${PASSWD_HOME}" ] ; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} matches /etc/passwd"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! [ -w /etc/passwd ]; then
|
|
||||||
log_debug "candidate homedir ${CANDIDATE_HOME} is valid for ${CUR_USERNAME}, but /etc/passwd is not writable to update it"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_debug "resetting homedir for user ${CUR_USERNAME} to ${CANDIDATE_HOME} in /etc/passwd"
|
|
||||||
|
|
||||||
# sed -i wants to create a tempfile next to the original, which won't work with /etc permissions in many cases,
|
|
||||||
# so just do it in memory and overwrite the existing file if we succeeded
|
|
||||||
NEWPW=$(sed -r "s;(^${CUR_USERNAME}:(.*:){4})(.*:);\1${CANDIDATE_HOME}:;g" /etc/passwd)
|
|
||||||
echo "${NEWPW}" > /etc/passwd
|
|
||||||
}
|
|
||||||
|
|
||||||
ensure_current_uid_in_passwd
|
|
||||||
|
|
||||||
log_debug "current value of HOME is ${HOME}"
|
|
||||||
|
|
||||||
PASSWD_HOME=$(getent passwd "${CUR_USERNAME}" | cut -d: -f6)
|
|
||||||
log_debug "user ${CUR_USERNAME} homedir from /etc/passwd is ${PASSWD_HOME}"
|
|
||||||
|
|
||||||
CANDIDATE_HOMES=("${PASSWD_HOME}" "${HOME}" "${DEFAULT_HOME}" "/tmp")
|
|
||||||
|
|
||||||
# we'll set this in the loop as soon as we find a writeable dir
|
|
||||||
unset HOME
|
|
||||||
|
|
||||||
for CANDIDATE_HOME in "${CANDIDATE_HOMES[@]}"; do
|
|
||||||
if ensure_writeable_homedir ; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if ! [ -v HOME ] ; then
|
|
||||||
maybe_fail "a valid homedir could not be set for ${CUR_USERNAME}; this is likely fatal"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# chain exec whatever we were asked to run (ideally an init system) to keep any envvar state we've set
|
|
||||||
log_debug "chain exec-ing requested command $*"
|
|
||||||
exec "${@}"
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright (c) 2019 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
# NOTE(pabelanger): Allow users to force either microdnf or dnf as a package
|
|
||||||
# manager.
|
|
||||||
PKGMGR="${PKGMGR:-}"
|
|
||||||
PKGMGR_OPTS="${PKGMGR_OPTS:-}"
|
|
||||||
PKGMGR_PRESERVE_CACHE="${PKGMGR_PRESERVE_CACHE:-}"
|
|
||||||
|
|
||||||
PYCMD="${PYCMD:=/usr/bin/python3}"
|
|
||||||
PIPCMD="${PIPCMD:=$PYCMD -m pip}"
|
|
||||||
PIP_OPTS="${PIP_OPTS-}"
|
|
||||||
|
|
||||||
$PYCMD -m ensurepip
|
|
||||||
|
|
||||||
if [ -z $PKGMGR ]; then
|
|
||||||
# Expect dnf to be installed, however if we find microdnf default to it.
|
|
||||||
PKGMGR=/usr/bin/dnf
|
|
||||||
if [ -f "/usr/bin/microdnf" ]; then
|
|
||||||
PKGMGR=/usr/bin/microdnf
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$PKGMGR" = "/usr/bin/microdnf" ]
|
|
||||||
then
|
|
||||||
if [ -z $PKGMGR_OPTS ]; then
|
|
||||||
# NOTE(pabelanger): skip install docs and weak dependencies to
|
|
||||||
# make smaller images. Sadly, setting these in dnf.conf don't
|
|
||||||
# appear to work.
|
|
||||||
PKGMGR_OPTS="--nodocs --setopt install_weak_deps=0"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f /output/bindep/run.txt ] ; then
|
|
||||||
PACKAGES=$(cat /output/bindep/run.txt)
|
|
||||||
if [ ! -z "$PACKAGES" ]; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS $PACKAGES
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f /output/bindep/epel.txt ] ; then
|
|
||||||
EPEL_PACKAGES=$(cat /output/bindep/epel.txt)
|
|
||||||
if [ ! -z "$EPEL_PACKAGES" ]; then
|
|
||||||
$PKGMGR install -y $PKGMGR_OPTS --enablerepo epel $EPEL_PACKAGES
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If there's a constraints file, use it.
|
|
||||||
if [ -f /output/upper-constraints.txt ] ; then
|
|
||||||
CONSTRAINTS="-c /output/upper-constraints.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# If a requirements.txt file exists,
|
|
||||||
# install it directly so that people can use git url syntax
|
|
||||||
# to do things like pick up patched but unreleased versions
|
|
||||||
# of dependencies.
|
|
||||||
if [ -f /output/requirements.txt ] ; then
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /output/requirements.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Add any requested extras to the list of things to install
|
|
||||||
EXTRAS=""
|
|
||||||
for extra in $* ; do
|
|
||||||
EXTRAS="${EXTRAS} -r /output/$extra/requirements.txt"
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -f /output/packages.txt ] ; then
|
|
||||||
# If a package list was passed to assemble, install that in the final
|
|
||||||
# image.
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels -r /output/packages.txt $EXTRAS
|
|
||||||
else
|
|
||||||
# Install the wheels. Uninstall any existing version as siblings maybe
|
|
||||||
# be built with the same version number as the latest release, but we
|
|
||||||
# really want the speculatively built wheels installed over any
|
|
||||||
# automatic dependencies.
|
|
||||||
# NOTE(pabelanger): It is possible a project may not have a wheel, but does have requirements.txt
|
|
||||||
if [ $(ls -1 /output/wheels/*whl 2>/dev/null | wc -l) -gt 0 ]; then
|
|
||||||
$PIPCMD uninstall -y /output/wheels/*.whl
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels /output/wheels/*.whl $EXTRAS
|
|
||||||
elif [ ! -z "$EXTRAS" ] ; then
|
|
||||||
$PIPCMD uninstall -y $EXTRAS
|
|
||||||
$PIPCMD install $CONSTRAINTS $PIP_OPTS --cache-dir=/output/wheels $EXTRAS
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# clean up after ourselves, unless requested to keep the cache
|
|
||||||
if [[ "$PKGMGR_PRESERVE_CACHE" != always ]]; then
|
|
||||||
$PKGMGR clean all
|
|
||||||
rm -rf /var/cache/{dnf,yum}
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf /var/lib/dnf/history.*
|
|
||||||
rm -rf /var/log/{dnf.*,hawkey.log}
|
|
||||||
@@ -1,400 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
import requirements
|
|
||||||
import importlib.metadata
|
|
||||||
|
|
||||||
base_collections_path = '/usr/share/ansible/collections'
|
|
||||||
default_file = 'execution-environment.yml'
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def line_is_empty(line):
|
|
||||||
return bool((not line.strip()) or line.startswith('#'))
|
|
||||||
|
|
||||||
|
|
||||||
def read_req_file(path):
|
|
||||||
"""Provide some minimal error and display handling for file reading"""
|
|
||||||
if not os.path.exists(path):
|
|
||||||
print('Expected requirements file not present at: {0}'.format(os.path.abspath(path)))
|
|
||||||
with open(path, 'r') as f:
|
|
||||||
return f.read()
|
|
||||||
|
|
||||||
|
|
||||||
def pip_file_data(path):
|
|
||||||
pip_content = read_req_file(path)
|
|
||||||
|
|
||||||
pip_lines = []
|
|
||||||
for line in pip_content.split('\n'):
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
if line.startswith('-r') or line.startswith('--requirement'):
|
|
||||||
_, new_filename = line.split(None, 1)
|
|
||||||
new_path = os.path.join(os.path.dirname(path or '.'), new_filename)
|
|
||||||
pip_lines.extend(pip_file_data(new_path))
|
|
||||||
else:
|
|
||||||
pip_lines.append(line)
|
|
||||||
|
|
||||||
return pip_lines
|
|
||||||
|
|
||||||
|
|
||||||
def bindep_file_data(path):
|
|
||||||
sys_content = read_req_file(path)
|
|
||||||
|
|
||||||
sys_lines = []
|
|
||||||
for line in sys_content.split('\n'):
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
sys_lines.append(line)
|
|
||||||
|
|
||||||
return sys_lines
|
|
||||||
|
|
||||||
|
|
||||||
def process_collection(path):
|
|
||||||
"""Return a tuple of (python_dependencies, system_dependencies) for the
|
|
||||||
collection install path given.
|
|
||||||
Both items returned are a list of dependencies.
|
|
||||||
|
|
||||||
:param str path: root directory of collection (this would contain galaxy.yml file)
|
|
||||||
"""
|
|
||||||
CD = CollectionDefinition(path)
|
|
||||||
|
|
||||||
py_file = CD.get_dependency('python')
|
|
||||||
pip_lines = []
|
|
||||||
if py_file:
|
|
||||||
pip_lines = pip_file_data(os.path.join(path, py_file))
|
|
||||||
|
|
||||||
sys_file = CD.get_dependency('system')
|
|
||||||
bindep_lines = []
|
|
||||||
if sys_file:
|
|
||||||
bindep_lines = bindep_file_data(os.path.join(path, sys_file))
|
|
||||||
|
|
||||||
return (pip_lines, bindep_lines)
|
|
||||||
|
|
||||||
|
|
||||||
def process(data_dir=base_collections_path, user_pip=None, user_bindep=None):
|
|
||||||
paths = []
|
|
||||||
path_root = os.path.join(data_dir, 'ansible_collections')
|
|
||||||
|
|
||||||
# build a list of all the valid collection paths
|
|
||||||
if os.path.exists(path_root):
|
|
||||||
for namespace in sorted(os.listdir(path_root)):
|
|
||||||
if not os.path.isdir(os.path.join(path_root, namespace)):
|
|
||||||
continue
|
|
||||||
for name in sorted(os.listdir(os.path.join(path_root, namespace))):
|
|
||||||
collection_dir = os.path.join(path_root, namespace, name)
|
|
||||||
if not os.path.isdir(collection_dir):
|
|
||||||
continue
|
|
||||||
files_list = os.listdir(collection_dir)
|
|
||||||
if 'galaxy.yml' in files_list or 'MANIFEST.json' in files_list:
|
|
||||||
paths.append(collection_dir)
|
|
||||||
|
|
||||||
# populate the requirements content
|
|
||||||
py_req = {}
|
|
||||||
sys_req = {}
|
|
||||||
for path in paths:
|
|
||||||
col_pip_lines, col_sys_lines = process_collection(path)
|
|
||||||
CD = CollectionDefinition(path)
|
|
||||||
namespace, name = CD.namespace_name()
|
|
||||||
key = '{}.{}'.format(namespace, name)
|
|
||||||
|
|
||||||
if col_pip_lines:
|
|
||||||
py_req[key] = col_pip_lines
|
|
||||||
|
|
||||||
if col_sys_lines:
|
|
||||||
sys_req[key] = col_sys_lines
|
|
||||||
|
|
||||||
# add on entries from user files, if they are given
|
|
||||||
if user_pip:
|
|
||||||
col_pip_lines = pip_file_data(user_pip)
|
|
||||||
if col_pip_lines:
|
|
||||||
py_req['user'] = col_pip_lines
|
|
||||||
if user_bindep:
|
|
||||||
col_sys_lines = bindep_file_data(user_bindep)
|
|
||||||
if col_sys_lines:
|
|
||||||
sys_req['user'] = col_sys_lines
|
|
||||||
|
|
||||||
return {
|
|
||||||
'python': py_req,
|
|
||||||
'system': sys_req
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def has_content(candidate_file):
|
|
||||||
"""Beyond checking that the candidate exists, this also assures
|
|
||||||
that the file has something other than whitespace,
|
|
||||||
which can cause errors when given to pip.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(candidate_file):
|
|
||||||
return False
|
|
||||||
with open(candidate_file, 'r') as f:
|
|
||||||
content = f.read()
|
|
||||||
return bool(content.strip().strip('\n'))
|
|
||||||
|
|
||||||
|
|
||||||
class CollectionDefinition:
|
|
||||||
"""This class represents the dependency metadata for a collection
|
|
||||||
should be replaced by logic to hit the Galaxy API if made available
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, collection_path):
|
|
||||||
self.reference_path = collection_path
|
|
||||||
meta_file = os.path.join(collection_path, 'meta', default_file)
|
|
||||||
if os.path.exists(meta_file):
|
|
||||||
with open(meta_file, 'r') as f:
|
|
||||||
self.raw = yaml.safe_load(f)
|
|
||||||
else:
|
|
||||||
self.raw = {'version': 1, 'dependencies': {}}
|
|
||||||
# Automatically infer requirements for collection
|
|
||||||
for entry, filename in [('python', 'requirements.txt'), ('system', 'bindep.txt')]:
|
|
||||||
candidate_file = os.path.join(collection_path, filename)
|
|
||||||
if has_content(candidate_file):
|
|
||||||
self.raw['dependencies'][entry] = filename
|
|
||||||
|
|
||||||
def target_dir(self):
|
|
||||||
namespace, name = self.namespace_name()
|
|
||||||
return os.path.join(
|
|
||||||
base_collections_path, 'ansible_collections',
|
|
||||||
namespace, name
|
|
||||||
)
|
|
||||||
|
|
||||||
def namespace_name(self):
|
|
||||||
"Returns 2-tuple of namespace and name"
|
|
||||||
path_parts = [p for p in self.reference_path.split(os.path.sep) if p]
|
|
||||||
return tuple(path_parts[-2:])
|
|
||||||
|
|
||||||
def get_dependency(self, entry):
|
|
||||||
"""A collection is only allowed to reference a file by a relative path
|
|
||||||
which is relative to the collection root
|
|
||||||
"""
|
|
||||||
req_file = self.raw.get('dependencies', {}).get(entry)
|
|
||||||
if req_file is None:
|
|
||||||
return None
|
|
||||||
elif os.path.isabs(req_file):
|
|
||||||
raise RuntimeError(
|
|
||||||
'Collections must specify relative paths for requirements files. '
|
|
||||||
'The file {0} specified by {1} violates this.'.format(
|
|
||||||
req_file, self.reference_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return req_file
|
|
||||||
|
|
||||||
|
|
||||||
def simple_combine(reqs):
|
|
||||||
"""Given a dictionary of requirement lines keyed off collections,
|
|
||||||
return a list with the most basic of de-duplication logic,
|
|
||||||
and comments indicating the sources based off the collection keys
|
|
||||||
"""
|
|
||||||
consolidated = []
|
|
||||||
fancy_lines = []
|
|
||||||
for collection, lines in reqs.items():
|
|
||||||
for line in lines:
|
|
||||||
if line_is_empty(line):
|
|
||||||
continue
|
|
||||||
|
|
||||||
base_line = line.split('#')[0].strip()
|
|
||||||
if base_line in consolidated:
|
|
||||||
i = consolidated.index(base_line)
|
|
||||||
fancy_lines[i] += ', {}'.format(collection)
|
|
||||||
else:
|
|
||||||
fancy_line = base_line + ' # from collection {}'.format(collection)
|
|
||||||
consolidated.append(base_line)
|
|
||||||
fancy_lines.append(fancy_line)
|
|
||||||
|
|
||||||
return fancy_lines
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(args=sys.argv[1:]):
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
prog='introspect',
|
|
||||||
description=(
|
|
||||||
'ansible-builder introspection; injected and used during execution environment build'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='The command to invoke.', dest='action')
|
|
||||||
subparsers.required = True
|
|
||||||
|
|
||||||
create_introspect_parser(subparsers)
|
|
||||||
|
|
||||||
args = parser.parse_args(args)
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def run_introspect(args, logger):
|
|
||||||
data = process(args.folder, user_pip=args.user_pip, user_bindep=args.user_bindep)
|
|
||||||
if args.sanitize:
|
|
||||||
logger.info('# Sanitized dependencies for %s', args.folder)
|
|
||||||
data_for_write = data
|
|
||||||
data['python'] = sanitize_requirements(data['python'])
|
|
||||||
data['system'] = simple_combine(data['system'])
|
|
||||||
else:
|
|
||||||
logger.info('# Dependency data for %s', args.folder)
|
|
||||||
data_for_write = data.copy()
|
|
||||||
data_for_write['python'] = simple_combine(data['python'])
|
|
||||||
data_for_write['system'] = simple_combine(data['system'])
|
|
||||||
|
|
||||||
print('---')
|
|
||||||
print(yaml.dump(data, default_flow_style=False))
|
|
||||||
|
|
||||||
if args.write_pip and data.get('python'):
|
|
||||||
write_file(args.write_pip, data_for_write.get('python') + [''])
|
|
||||||
if args.write_bindep and data.get('system'):
|
|
||||||
write_file(args.write_bindep, data_for_write.get('system') + [''])
|
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def create_introspect_parser(parser):
|
|
||||||
introspect_parser = parser.add_parser(
|
|
||||||
'introspect',
|
|
||||||
help='Introspects collections in folder.',
|
|
||||||
description=(
|
|
||||||
'Loops over collections in folder and returns data about dependencies. '
|
|
||||||
'This is used internally and exposed here for verification. '
|
|
||||||
'This is targeted toward collection authors and maintainers.'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument('--sanitize', action='store_true',
|
|
||||||
help=('Sanitize and de-duplicate requirements. '
|
|
||||||
'This is normally done separately from the introspect script, but this '
|
|
||||||
'option is given to more accurately test collection content.'))
|
|
||||||
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'folder', default=base_collections_path, nargs='?',
|
|
||||||
help=(
|
|
||||||
'Ansible collections path(s) to introspect. '
|
|
||||||
'This should have a folder named ansible_collections inside of it.'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# Combine user requirements and collection requirements into single file
|
|
||||||
# in the future, could look into passing multilple files to
|
|
||||||
# python-builder scripts to be fed multiple files as opposed to this
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--user-pip', dest='user_pip',
|
|
||||||
help='An additional file to combine with collection pip requirements.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--user-bindep', dest='user_bindep',
|
|
||||||
help='An additional file to combine with collection bindep requirements.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--write-pip', dest='write_pip',
|
|
||||||
help='Write the combined pip requirements file to this location.'
|
|
||||||
)
|
|
||||||
introspect_parser.add_argument(
|
|
||||||
'--write-bindep', dest='write_bindep',
|
|
||||||
help='Write the combined bindep requirements file to this location.'
|
|
||||||
)
|
|
||||||
|
|
||||||
return introspect_parser
|
|
||||||
|
|
||||||
|
|
||||||
EXCLUDE_REQUIREMENTS = frozenset((
|
|
||||||
# obviously already satisfied or unwanted
|
|
||||||
'ansible', 'ansible-base', 'python', 'ansible-core',
|
|
||||||
# general python test requirements
|
|
||||||
'tox', 'pycodestyle', 'yamllint', 'pylint',
|
|
||||||
'flake8', 'pytest', 'pytest-xdist', 'coverage', 'mock', 'testinfra',
|
|
||||||
# test requirements highly specific to Ansible testing
|
|
||||||
'ansible-lint', 'molecule', 'galaxy-importer', 'voluptuous',
|
|
||||||
# already present in image for py3 environments
|
|
||||||
'yaml', 'pyyaml', 'json',
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_requirements(collection_py_reqs):
|
|
||||||
"""
|
|
||||||
Cleanup Python requirements by removing duplicates and excluded packages.
|
|
||||||
|
|
||||||
The user requirements file will go through the deduplication process, but
|
|
||||||
skips the special package exclusion process.
|
|
||||||
|
|
||||||
:param dict collection_py_reqs: A dict of lists of Python requirements, keyed
|
|
||||||
by fully qualified collection name. The special key `user` holds requirements
|
|
||||||
from the user specified requirements file from the ``--user-pip`` CLI option.
|
|
||||||
|
|
||||||
:returns: A finalized list of sanitized Python requirements.
|
|
||||||
"""
|
|
||||||
# de-duplication
|
|
||||||
consolidated = []
|
|
||||||
seen_pkgs = set()
|
|
||||||
|
|
||||||
for collection, lines in collection_py_reqs.items():
|
|
||||||
try:
|
|
||||||
for req in requirements.parse('\n'.join(lines)):
|
|
||||||
if req.specifier:
|
|
||||||
req.name = importlib.metadata.Prepared(req.name).normalized
|
|
||||||
req.collections = [collection] # add backref for later
|
|
||||||
if req.name is None:
|
|
||||||
consolidated.append(req)
|
|
||||||
continue
|
|
||||||
if req.name in seen_pkgs:
|
|
||||||
for prior_req in consolidated:
|
|
||||||
if req.name == prior_req.name:
|
|
||||||
prior_req.specs.extend(req.specs)
|
|
||||||
prior_req.collections.append(collection)
|
|
||||||
break
|
|
||||||
continue
|
|
||||||
consolidated.append(req)
|
|
||||||
seen_pkgs.add(req.name)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning('Warning: failed to parse requirements from %s, error: %s', collection, e)
|
|
||||||
|
|
||||||
# removal of unwanted packages
|
|
||||||
sanitized = []
|
|
||||||
for req in consolidated:
|
|
||||||
# Exclude packages, unless it was present in the user supplied requirements.
|
|
||||||
if req.name and req.name.lower() in EXCLUDE_REQUIREMENTS and 'user' not in req.collections:
|
|
||||||
logger.debug('# Excluding requirement %s from %s', req.name, req.collections)
|
|
||||||
continue
|
|
||||||
if req.vcs or req.uri:
|
|
||||||
# Requirement like git+ or http return as-is
|
|
||||||
new_line = req.line
|
|
||||||
elif req.name:
|
|
||||||
specs = ['{0}{1}'.format(cmp, ver) for cmp, ver in req.specs]
|
|
||||||
new_line = req.name + ','.join(specs)
|
|
||||||
else:
|
|
||||||
raise RuntimeError('Could not process {0}'.format(req.line))
|
|
||||||
|
|
||||||
sanitized.append(new_line + ' # from collection {}'.format(','.join(req.collections)))
|
|
||||||
|
|
||||||
return sanitized
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(filename: str, lines: list) -> bool:
|
|
||||||
parent_dir = os.path.dirname(filename)
|
|
||||||
if parent_dir and not os.path.exists(parent_dir):
|
|
||||||
logger.warning('Creating parent directory for %s', filename)
|
|
||||||
os.makedirs(parent_dir)
|
|
||||||
new_text = '\n'.join(lines)
|
|
||||||
if os.path.exists(filename):
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
if f.read() == new_text:
|
|
||||||
logger.debug("File %s is already up-to-date.", filename)
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
logger.warning('File %s had modifications and will be rewritten', filename)
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
f.write(new_text)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parse_args()
|
|
||||||
|
|
||||||
if args.action == 'introspect':
|
|
||||||
run_introspect(args, logger)
|
|
||||||
|
|
||||||
logger.error("An error has occurred.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
Reference in New Issue
Block a user