mirror of
https://github.com/OpenHands/OpenHands.git
synced 2025-12-26 05:48:36 +08:00
Fix issue #4896: [Bug]: Fix failing workflows
This commit is contained in:
parent
bf8ccc8fc3
commit
ff5bd2cc94
744
get-docker.sh
Normal file
744
get-docker.sh
Normal file
@ -0,0 +1,744 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
# Docker Engine for Linux installation script.
|
||||
#
|
||||
# This script is intended as a convenient way to configure docker's package
|
||||
# repositories and to install Docker Engine, This script is not recommended
|
||||
# for production environments. Before running this script, make yourself familiar
|
||||
# with potential risks and limitations, and refer to the installation manual
|
||||
# at https://docs.docker.com/engine/install/ for alternative installation methods.
|
||||
#
|
||||
# The script:
|
||||
#
|
||||
# - Requires `root` or `sudo` privileges to run.
|
||||
# - Attempts to detect your Linux distribution and version and configure your
|
||||
# package management system for you.
|
||||
# - Doesn't allow you to customize most installation parameters.
|
||||
# - Installs dependencies and recommendations without asking for confirmation.
|
||||
# - Installs the latest stable release (by default) of Docker CLI, Docker Engine,
|
||||
# Docker Buildx, Docker Compose, containerd, and runc. When using this script
|
||||
# to provision a machine, this may result in unexpected major version upgrades
|
||||
# of these packages. Always test upgrades in a test environment before
|
||||
# deploying to your production systems.
|
||||
# - Isn't designed to upgrade an existing Docker installation. When using the
|
||||
# script to update an existing installation, dependencies may not be updated
|
||||
# to the expected version, resulting in outdated versions.
|
||||
#
|
||||
# Source code is available at https://github.com/docker/docker-install/
|
||||
#
|
||||
# Usage
|
||||
# ==============================================================================
|
||||
#
|
||||
# To install the latest stable versions of Docker CLI, Docker Engine, and their
|
||||
# dependencies:
|
||||
#
|
||||
# 1. download the script
|
||||
#
|
||||
# $ curl -fsSL https://get.docker.com -o install-docker.sh
|
||||
#
|
||||
# 2. verify the script's content
|
||||
#
|
||||
# $ cat install-docker.sh
|
||||
#
|
||||
# 3. run the script with --dry-run to verify the steps it executes
|
||||
#
|
||||
# $ sh install-docker.sh --dry-run
|
||||
#
|
||||
# 4. run the script either as root, or using sudo to perform the installation.
|
||||
#
|
||||
# $ sudo sh install-docker.sh
|
||||
#
|
||||
# Command-line options
|
||||
# ==============================================================================
|
||||
#
|
||||
# --version <VERSION>
|
||||
# Use the --version option to install a specific version, for example:
|
||||
#
|
||||
# $ sudo sh install-docker.sh --version 23.0
|
||||
#
|
||||
# --channel <stable|test>
|
||||
#
|
||||
# Use the --channel option to install from an alternative installation channel.
|
||||
# The following example installs the latest versions from the "test" channel,
|
||||
# which includes pre-releases (alpha, beta, rc):
|
||||
#
|
||||
# $ sudo sh install-docker.sh --channel test
|
||||
#
|
||||
# Alternatively, use the script at https://test.docker.com, which uses the test
|
||||
# channel as default.
|
||||
#
|
||||
# --mirror <Aliyun|AzureChinaCloud>
|
||||
#
|
||||
# Use the --mirror option to install from a mirror supported by this script.
|
||||
# Available mirrors are "Aliyun" (https://mirrors.aliyun.com/docker-ce), and
|
||||
# "AzureChinaCloud" (https://mirror.azure.cn/docker-ce), for example:
|
||||
#
|
||||
# $ sudo sh install-docker.sh --mirror AzureChinaCloud
|
||||
#
|
||||
# ==============================================================================
|
||||
|
||||
|
||||
# Git commit from https://github.com/docker/docker-install when
|
||||
# the script was uploaded (Should only be modified by upload job):
|
||||
SCRIPT_COMMIT_SHA="711a0d41213afabc30b963f82c56e1442a3efe1c"
|
||||
|
||||
# strip "v" prefix if present
|
||||
VERSION="${VERSION#v}"
|
||||
|
||||
# The channel to install from:
|
||||
# * stable
|
||||
# * test
|
||||
DEFAULT_CHANNEL_VALUE="stable"
|
||||
if [ -z "$CHANNEL" ]; then
|
||||
CHANNEL=$DEFAULT_CHANNEL_VALUE
|
||||
fi
|
||||
|
||||
DEFAULT_DOWNLOAD_URL="https://download.docker.com"
|
||||
if [ -z "$DOWNLOAD_URL" ]; then
|
||||
DOWNLOAD_URL=$DEFAULT_DOWNLOAD_URL
|
||||
fi
|
||||
|
||||
DEFAULT_REPO_FILE="docker-ce.repo"
|
||||
if [ -z "$REPO_FILE" ]; then
|
||||
REPO_FILE="$DEFAULT_REPO_FILE"
|
||||
fi
|
||||
|
||||
mirror=''
|
||||
DRY_RUN=${DRY_RUN:-}
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--channel)
|
||||
CHANNEL="$2"
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
;;
|
||||
--mirror)
|
||||
mirror="$2"
|
||||
shift
|
||||
;;
|
||||
--version)
|
||||
VERSION="${2#v}"
|
||||
shift
|
||||
;;
|
||||
--*)
|
||||
echo "Illegal option $1"
|
||||
;;
|
||||
esac
|
||||
shift $(( $# > 0 ? 1 : 0 ))
|
||||
done
|
||||
|
||||
case "$mirror" in
|
||||
Aliyun)
|
||||
DOWNLOAD_URL="https://mirrors.aliyun.com/docker-ce"
|
||||
;;
|
||||
AzureChinaCloud)
|
||||
DOWNLOAD_URL="https://mirror.azure.cn/docker-ce"
|
||||
;;
|
||||
"")
|
||||
;;
|
||||
*)
|
||||
>&2 echo "unknown mirror '$mirror': use either 'Aliyun', or 'AzureChinaCloud'."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$CHANNEL" in
|
||||
stable|test)
|
||||
;;
|
||||
*)
|
||||
>&2 echo "unknown CHANNEL '$CHANNEL': use either stable or test."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
command_exists() {
|
||||
command -v "$@" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
# version_gte checks if the version specified in $VERSION is at least the given
|
||||
# SemVer (Maj.Minor[.Patch]), or CalVer (YY.MM) version.It returns 0 (success)
|
||||
# if $VERSION is either unset (=latest) or newer or equal than the specified
|
||||
# version, or returns 1 (fail) otherwise.
|
||||
#
|
||||
# examples:
|
||||
#
|
||||
# VERSION=23.0
|
||||
# version_gte 23.0 // 0 (success)
|
||||
# version_gte 20.10 // 0 (success)
|
||||
# version_gte 19.03 // 0 (success)
|
||||
# version_gte 26.1 // 1 (fail)
|
||||
version_gte() {
|
||||
if [ -z "$VERSION" ]; then
|
||||
return 0
|
||||
fi
|
||||
version_compare "$VERSION" "$1"
|
||||
}
|
||||
|
||||
# version_compare compares two version strings (either SemVer (Major.Minor.Path),
|
||||
# or CalVer (YY.MM) version strings. It returns 0 (success) if version A is newer
|
||||
# or equal than version B, or 1 (fail) otherwise. Patch releases and pre-release
|
||||
# (-alpha/-beta) are not taken into account
|
||||
#
|
||||
# examples:
|
||||
#
|
||||
# version_compare 23.0.0 20.10 // 0 (success)
|
||||
# version_compare 23.0 20.10 // 0 (success)
|
||||
# version_compare 20.10 19.03 // 0 (success)
|
||||
# version_compare 20.10 20.10 // 0 (success)
|
||||
# version_compare 19.03 20.10 // 1 (fail)
|
||||
version_compare() (
|
||||
set +x
|
||||
|
||||
yy_a="$(echo "$1" | cut -d'.' -f1)"
|
||||
yy_b="$(echo "$2" | cut -d'.' -f1)"
|
||||
if [ "$yy_a" -lt "$yy_b" ]; then
|
||||
return 1
|
||||
fi
|
||||
if [ "$yy_a" -gt "$yy_b" ]; then
|
||||
return 0
|
||||
fi
|
||||
mm_a="$(echo "$1" | cut -d'.' -f2)"
|
||||
mm_b="$(echo "$2" | cut -d'.' -f2)"
|
||||
|
||||
# trim leading zeros to accommodate CalVer
|
||||
mm_a="${mm_a#0}"
|
||||
mm_b="${mm_b#0}"
|
||||
|
||||
if [ "${mm_a:-0}" -lt "${mm_b:-0}" ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
)
|
||||
|
||||
is_dry_run() {
|
||||
if [ -z "$DRY_RUN" ]; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
is_wsl() {
|
||||
case "$(uname -r)" in
|
||||
*microsoft* ) true ;; # WSL 2
|
||||
*Microsoft* ) true ;; # WSL 1
|
||||
* ) false;;
|
||||
esac
|
||||
}
|
||||
|
||||
is_darwin() {
|
||||
case "$(uname -s)" in
|
||||
*darwin* ) true ;;
|
||||
*Darwin* ) true ;;
|
||||
* ) false;;
|
||||
esac
|
||||
}
|
||||
|
||||
deprecation_notice() {
|
||||
distro=$1
|
||||
distro_version=$2
|
||||
echo
|
||||
printf "\033[91;1mDEPRECATION WARNING\033[0m\n"
|
||||
printf " This Linux distribution (\033[1m%s %s\033[0m) reached end-of-life and is no longer supported by this script.\n" "$distro" "$distro_version"
|
||||
echo " No updates or security fixes will be released for this distribution, and users are recommended"
|
||||
echo " to upgrade to a currently maintained version of $distro."
|
||||
echo
|
||||
printf "Press \033[1mCtrl+C\033[0m now to abort this script, or wait for the installation to continue."
|
||||
echo
|
||||
sleep 10
|
||||
}
|
||||
|
||||
get_distribution() {
|
||||
lsb_dist=""
|
||||
# Every system that we officially support has /etc/os-release
|
||||
if [ -r /etc/os-release ]; then
|
||||
lsb_dist="$(. /etc/os-release && echo "$ID")"
|
||||
fi
|
||||
# Returning an empty string here should be alright since the
|
||||
# case statements don't act unless you provide an actual value
|
||||
echo "$lsb_dist"
|
||||
}
|
||||
|
||||
echo_docker_as_nonroot() {
|
||||
if is_dry_run; then
|
||||
return
|
||||
fi
|
||||
if command_exists docker && [ -e /var/run/docker.sock ]; then
|
||||
(
|
||||
set -x
|
||||
$sh_c 'docker version'
|
||||
) || true
|
||||
fi
|
||||
|
||||
# intentionally mixed spaces and tabs here -- tabs are stripped by "<<-EOF", spaces are kept in the output
|
||||
echo
|
||||
echo "================================================================================"
|
||||
echo
|
||||
if version_gte "20.10"; then
|
||||
echo "To run Docker as a non-privileged user, consider setting up the"
|
||||
echo "Docker daemon in rootless mode for your user:"
|
||||
echo
|
||||
echo " dockerd-rootless-setuptool.sh install"
|
||||
echo
|
||||
echo "Visit https://docs.docker.com/go/rootless/ to learn about rootless mode."
|
||||
echo
|
||||
fi
|
||||
echo
|
||||
echo "To run the Docker daemon as a fully privileged service, but granting non-root"
|
||||
echo "users access, refer to https://docs.docker.com/go/daemon-access/"
|
||||
echo
|
||||
echo "WARNING: Access to the remote API on a privileged Docker daemon is equivalent"
|
||||
echo " to root access on the host. Refer to the 'Docker daemon attack surface'"
|
||||
echo " documentation for details: https://docs.docker.com/go/attack-surface/"
|
||||
echo
|
||||
echo "================================================================================"
|
||||
echo
|
||||
}
|
||||
|
||||
# Check if this is a forked Linux distro
|
||||
check_forked() {
|
||||
|
||||
# Check for lsb_release command existence, it usually exists in forked distros
|
||||
if command_exists lsb_release; then
|
||||
# Check if the `-u` option is supported
|
||||
set +e
|
||||
lsb_release -a -u > /dev/null 2>&1
|
||||
lsb_release_exit_code=$?
|
||||
set -e
|
||||
|
||||
# Check if the command has exited successfully, it means we're in a forked distro
|
||||
if [ "$lsb_release_exit_code" = "0" ]; then
|
||||
# Print info about current distro
|
||||
cat <<-EOF
|
||||
You're using '$lsb_dist' version '$dist_version'.
|
||||
EOF
|
||||
|
||||
# Get the upstream release info
|
||||
lsb_dist=$(lsb_release -a -u 2>&1 | tr '[:upper:]' '[:lower:]' | grep -E 'id' | cut -d ':' -f 2 | tr -d '[:space:]')
|
||||
dist_version=$(lsb_release -a -u 2>&1 | tr '[:upper:]' '[:lower:]' | grep -E 'codename' | cut -d ':' -f 2 | tr -d '[:space:]')
|
||||
|
||||
# Print info about upstream distro
|
||||
cat <<-EOF
|
||||
Upstream release is '$lsb_dist' version '$dist_version'.
|
||||
EOF
|
||||
else
|
||||
if [ -r /etc/debian_version ] && [ "$lsb_dist" != "ubuntu" ] && [ "$lsb_dist" != "raspbian" ]; then
|
||||
if [ "$lsb_dist" = "osmc" ]; then
|
||||
# OSMC runs Raspbian
|
||||
lsb_dist=raspbian
|
||||
else
|
||||
# We're Debian and don't even know it!
|
||||
lsb_dist=debian
|
||||
fi
|
||||
dist_version="$(sed 's/\/.*//' /etc/debian_version | sed 's/\..*//')"
|
||||
case "$dist_version" in
|
||||
12)
|
||||
dist_version="bookworm"
|
||||
;;
|
||||
11)
|
||||
dist_version="bullseye"
|
||||
;;
|
||||
10)
|
||||
dist_version="buster"
|
||||
;;
|
||||
9)
|
||||
dist_version="stretch"
|
||||
;;
|
||||
8)
|
||||
dist_version="jessie"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
do_install() {
|
||||
echo "# Executing docker install script, commit: $SCRIPT_COMMIT_SHA"
|
||||
|
||||
if command_exists docker; then
|
||||
cat >&2 <<-'EOF'
|
||||
Warning: the "docker" command appears to already exist on this system.
|
||||
|
||||
If you already have Docker installed, this script can cause trouble, which is
|
||||
why we're displaying this warning and provide the opportunity to cancel the
|
||||
installation.
|
||||
|
||||
If you installed the current Docker package using this script and are using it
|
||||
again to update Docker, you can safely ignore this message.
|
||||
|
||||
You may press Ctrl+C now to abort this script.
|
||||
EOF
|
||||
( set -x; sleep 20 )
|
||||
fi
|
||||
|
||||
user="$(id -un 2>/dev/null || true)"
|
||||
|
||||
sh_c='sh -c'
|
||||
if [ "$user" != 'root' ]; then
|
||||
if command_exists sudo; then
|
||||
sh_c='sudo -E sh -c'
|
||||
elif command_exists su; then
|
||||
sh_c='su -c'
|
||||
else
|
||||
cat >&2 <<-'EOF'
|
||||
Error: this installer needs the ability to run commands as root.
|
||||
We are unable to find either "sudo" or "su" available to make this happen.
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if is_dry_run; then
|
||||
sh_c="echo"
|
||||
fi
|
||||
|
||||
# perform some very rudimentary platform detection
|
||||
lsb_dist=$( get_distribution )
|
||||
lsb_dist="$(echo "$lsb_dist" | tr '[:upper:]' '[:lower:]')"
|
||||
|
||||
if is_wsl; then
|
||||
echo
|
||||
echo "WSL DETECTED: We recommend using Docker Desktop for Windows."
|
||||
echo "Please get Docker Desktop from https://www.docker.com/products/docker-desktop/"
|
||||
echo
|
||||
cat >&2 <<-'EOF'
|
||||
|
||||
You may press Ctrl+C now to abort this script.
|
||||
EOF
|
||||
( set -x; sleep 20 )
|
||||
fi
|
||||
|
||||
case "$lsb_dist" in
|
||||
|
||||
ubuntu)
|
||||
if command_exists lsb_release; then
|
||||
dist_version="$(lsb_release --codename | cut -f2)"
|
||||
fi
|
||||
if [ -z "$dist_version" ] && [ -r /etc/lsb-release ]; then
|
||||
dist_version="$(. /etc/lsb-release && echo "$DISTRIB_CODENAME")"
|
||||
fi
|
||||
;;
|
||||
|
||||
debian|raspbian)
|
||||
dist_version="$(sed 's/\/.*//' /etc/debian_version | sed 's/\..*//')"
|
||||
case "$dist_version" in
|
||||
12)
|
||||
dist_version="bookworm"
|
||||
;;
|
||||
11)
|
||||
dist_version="bullseye"
|
||||
;;
|
||||
10)
|
||||
dist_version="buster"
|
||||
;;
|
||||
9)
|
||||
dist_version="stretch"
|
||||
;;
|
||||
8)
|
||||
dist_version="jessie"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
|
||||
centos|rhel)
|
||||
if [ -z "$dist_version" ] && [ -r /etc/os-release ]; then
|
||||
dist_version="$(. /etc/os-release && echo "$VERSION_ID")"
|
||||
fi
|
||||
;;
|
||||
|
||||
*)
|
||||
if command_exists lsb_release; then
|
||||
dist_version="$(lsb_release --release | cut -f2)"
|
||||
fi
|
||||
if [ -z "$dist_version" ] && [ -r /etc/os-release ]; then
|
||||
dist_version="$(. /etc/os-release && echo "$VERSION_ID")"
|
||||
fi
|
||||
;;
|
||||
|
||||
esac
|
||||
|
||||
# Check if this is a forked Linux distro
|
||||
check_forked
|
||||
|
||||
# Print deprecation warnings for distro versions that recently reached EOL,
|
||||
# but may still be commonly used (especially LTS versions).
|
||||
case "$lsb_dist.$dist_version" in
|
||||
centos.8|centos.7|rhel.7)
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
;;
|
||||
debian.buster|debian.stretch|debian.jessie)
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
;;
|
||||
raspbian.buster|raspbian.stretch|raspbian.jessie)
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
;;
|
||||
ubuntu.bionic|ubuntu.xenial|ubuntu.trusty)
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
;;
|
||||
ubuntu.mantic|ubuntu.lunar|ubuntu.kinetic|ubuntu.impish|ubuntu.hirsute|ubuntu.groovy|ubuntu.eoan|ubuntu.disco|ubuntu.cosmic)
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
;;
|
||||
fedora.*)
|
||||
if [ "$dist_version" -lt 39 ]; then
|
||||
deprecation_notice "$lsb_dist" "$dist_version"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
# Run setup for each distro accordingly
|
||||
case "$lsb_dist" in
|
||||
ubuntu|debian|raspbian)
|
||||
pre_reqs="ca-certificates curl"
|
||||
apt_repo="deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] $DOWNLOAD_URL/linux/$lsb_dist $dist_version $CHANNEL"
|
||||
(
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
$sh_c 'apt-get -qq update >/dev/null'
|
||||
$sh_c "DEBIAN_FRONTEND=noninteractive apt-get -y -qq install $pre_reqs >/dev/null"
|
||||
$sh_c 'install -m 0755 -d /etc/apt/keyrings'
|
||||
$sh_c "curl -fsSL \"$DOWNLOAD_URL/linux/$lsb_dist/gpg\" -o /etc/apt/keyrings/docker.asc"
|
||||
$sh_c "chmod a+r /etc/apt/keyrings/docker.asc"
|
||||
$sh_c "echo \"$apt_repo\" > /etc/apt/sources.list.d/docker.list"
|
||||
$sh_c 'apt-get -qq update >/dev/null'
|
||||
)
|
||||
pkg_version=""
|
||||
if [ -n "$VERSION" ]; then
|
||||
if is_dry_run; then
|
||||
echo "# WARNING: VERSION pinning is not supported in DRY_RUN"
|
||||
else
|
||||
# Will work for incomplete versions IE (17.12), but may not actually grab the "latest" if in the test channel
|
||||
pkg_pattern="$(echo "$VERSION" | sed 's/-ce-/~ce~.*/g' | sed 's/-/.*/g')"
|
||||
search_command="apt-cache madison docker-ce | grep '$pkg_pattern' | head -1 | awk '{\$1=\$1};1' | cut -d' ' -f 3"
|
||||
pkg_version="$($sh_c "$search_command")"
|
||||
echo "INFO: Searching repository for VERSION '$VERSION'"
|
||||
echo "INFO: $search_command"
|
||||
if [ -z "$pkg_version" ]; then
|
||||
echo
|
||||
echo "ERROR: '$VERSION' not found amongst apt-cache madison results"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
if version_gte "18.09"; then
|
||||
search_command="apt-cache madison docker-ce-cli | grep '$pkg_pattern' | head -1 | awk '{\$1=\$1};1' | cut -d' ' -f 3"
|
||||
echo "INFO: $search_command"
|
||||
cli_pkg_version="=$($sh_c "$search_command")"
|
||||
fi
|
||||
pkg_version="=$pkg_version"
|
||||
fi
|
||||
fi
|
||||
(
|
||||
pkgs="docker-ce${pkg_version%=}"
|
||||
if version_gte "18.09"; then
|
||||
# older versions didn't ship the cli and containerd as separate packages
|
||||
pkgs="$pkgs docker-ce-cli${cli_pkg_version%=} containerd.io"
|
||||
fi
|
||||
if version_gte "20.10"; then
|
||||
pkgs="$pkgs docker-compose-plugin docker-ce-rootless-extras$pkg_version"
|
||||
fi
|
||||
if version_gte "23.0"; then
|
||||
pkgs="$pkgs docker-buildx-plugin"
|
||||
fi
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
$sh_c "DEBIAN_FRONTEND=noninteractive apt-get -y -qq install $pkgs >/dev/null"
|
||||
)
|
||||
echo_docker_as_nonroot
|
||||
exit 0
|
||||
;;
|
||||
centos|fedora|rhel)
|
||||
repo_file_url="$DOWNLOAD_URL/linux/$lsb_dist/$REPO_FILE"
|
||||
(
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
if command_exists dnf5; then
|
||||
$sh_c "dnf -y -q --setopt=install_weak_deps=False install dnf-plugins-core"
|
||||
$sh_c "dnf5 config-manager addrepo --save-filename=docker-ce.repo --from-repofile='$repo_file_url'"
|
||||
|
||||
if [ "$CHANNEL" != "stable" ]; then
|
||||
$sh_c "dnf5 config-manager setopt \"docker-ce-*.enabled=0\""
|
||||
$sh_c "dnf5 config-manager setopt \"docker-ce-$CHANNEL.enabled=1\""
|
||||
fi
|
||||
$sh_c "dnf makecache"
|
||||
elif command_exists dnf; then
|
||||
$sh_c "dnf -y -q --setopt=install_weak_deps=False install dnf-plugins-core"
|
||||
$sh_c "dnf config-manager --add-repo $repo_file_url"
|
||||
|
||||
if [ "$CHANNEL" != "stable" ]; then
|
||||
$sh_c "dnf config-manager --set-disabled \"docker-ce-*\""
|
||||
$sh_c "dnf config-manager --set-enabled \"docker-ce-$CHANNEL\""
|
||||
fi
|
||||
$sh_c "dnf makecache"
|
||||
else
|
||||
$sh_c "yum -y -q install yum-utils"
|
||||
$sh_c "yum-config-manager --add-repo $repo_file_url"
|
||||
|
||||
if [ "$CHANNEL" != "stable" ]; then
|
||||
$sh_c "yum-config-manager --disable \"docker-ce-*\""
|
||||
$sh_c "yum-config-manager --enable \"docker-ce-$CHANNEL\""
|
||||
fi
|
||||
$sh_c "yum makecache"
|
||||
fi
|
||||
)
|
||||
pkg_version=""
|
||||
if command_exists dnf; then
|
||||
pkg_manager="dnf"
|
||||
pkg_manager_flags="-y -q --best"
|
||||
else
|
||||
pkg_manager="yum"
|
||||
pkg_manager_flags="-y -q"
|
||||
fi
|
||||
if [ -n "$VERSION" ]; then
|
||||
if is_dry_run; then
|
||||
echo "# WARNING: VERSION pinning is not supported in DRY_RUN"
|
||||
else
|
||||
if [ "$lsb_dist" = "fedora" ]; then
|
||||
pkg_suffix="fc$dist_version"
|
||||
else
|
||||
pkg_suffix="el"
|
||||
fi
|
||||
pkg_pattern="$(echo "$VERSION" | sed 's/-ce-/\\\\.ce.*/g' | sed 's/-/.*/g').*$pkg_suffix"
|
||||
search_command="$pkg_manager list --showduplicates docker-ce | grep '$pkg_pattern' | tail -1 | awk '{print \$2}'"
|
||||
pkg_version="$($sh_c "$search_command")"
|
||||
echo "INFO: Searching repository for VERSION '$VERSION'"
|
||||
echo "INFO: $search_command"
|
||||
if [ -z "$pkg_version" ]; then
|
||||
echo
|
||||
echo "ERROR: '$VERSION' not found amongst $pkg_manager list results"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
if version_gte "18.09"; then
|
||||
# older versions don't support a cli package
|
||||
search_command="$pkg_manager list --showduplicates docker-ce-cli | grep '$pkg_pattern' | tail -1 | awk '{print \$2}'"
|
||||
cli_pkg_version="$($sh_c "$search_command" | cut -d':' -f 2)"
|
||||
fi
|
||||
# Cut out the epoch and prefix with a '-'
|
||||
pkg_version="-$(echo "$pkg_version" | cut -d':' -f 2)"
|
||||
fi
|
||||
fi
|
||||
(
|
||||
pkgs="docker-ce$pkg_version"
|
||||
if version_gte "18.09"; then
|
||||
# older versions didn't ship the cli and containerd as separate packages
|
||||
if [ -n "$cli_pkg_version" ]; then
|
||||
pkgs="$pkgs docker-ce-cli-$cli_pkg_version containerd.io"
|
||||
else
|
||||
pkgs="$pkgs docker-ce-cli containerd.io"
|
||||
fi
|
||||
fi
|
||||
if version_gte "20.10"; then
|
||||
pkgs="$pkgs docker-compose-plugin docker-ce-rootless-extras$pkg_version"
|
||||
fi
|
||||
if version_gte "23.0"; then
|
||||
pkgs="$pkgs docker-buildx-plugin"
|
||||
fi
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
$sh_c "$pkg_manager $pkg_manager_flags install $pkgs"
|
||||
)
|
||||
echo_docker_as_nonroot
|
||||
exit 0
|
||||
;;
|
||||
sles)
|
||||
if [ "$(uname -m)" != "s390x" ]; then
|
||||
echo "Packages for SLES are currently only available for s390x"
|
||||
exit 1
|
||||
fi
|
||||
repo_file_url="$DOWNLOAD_URL/linux/$lsb_dist/$REPO_FILE"
|
||||
pre_reqs="ca-certificates curl libseccomp2 awk"
|
||||
(
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
$sh_c "zypper install -y $pre_reqs"
|
||||
$sh_c "zypper addrepo $repo_file_url"
|
||||
if ! is_dry_run; then
|
||||
cat >&2 <<-'EOF'
|
||||
WARNING!!
|
||||
openSUSE repository (https://download.opensuse.org/repositories/security:/SELinux) will be enabled now.
|
||||
Do you wish to continue?
|
||||
You may press Ctrl+C now to abort this script.
|
||||
EOF
|
||||
( set -x; sleep 30 )
|
||||
fi
|
||||
opensuse_repo="https://download.opensuse.org/repositories/security:/SELinux/openSUSE_Factory/security:SELinux.repo"
|
||||
$sh_c "zypper addrepo $opensuse_repo"
|
||||
$sh_c "zypper --gpg-auto-import-keys refresh"
|
||||
$sh_c "zypper lr -d"
|
||||
)
|
||||
pkg_version=""
|
||||
if [ -n "$VERSION" ]; then
|
||||
if is_dry_run; then
|
||||
echo "# WARNING: VERSION pinning is not supported in DRY_RUN"
|
||||
else
|
||||
pkg_pattern="$(echo "$VERSION" | sed 's/-ce-/\\\\.ce.*/g' | sed 's/-/.*/g')"
|
||||
search_command="zypper search -s --match-exact 'docker-ce' | grep '$pkg_pattern' | tail -1 | awk '{print \$6}'"
|
||||
pkg_version="$($sh_c "$search_command")"
|
||||
echo "INFO: Searching repository for VERSION '$VERSION'"
|
||||
echo "INFO: $search_command"
|
||||
if [ -z "$pkg_version" ]; then
|
||||
echo
|
||||
echo "ERROR: '$VERSION' not found amongst zypper list results"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
search_command="zypper search -s --match-exact 'docker-ce-cli' | grep '$pkg_pattern' | tail -1 | awk '{print \$6}'"
|
||||
# It's okay for cli_pkg_version to be blank, since older versions don't support a cli package
|
||||
cli_pkg_version="$($sh_c "$search_command")"
|
||||
pkg_version="-$pkg_version"
|
||||
fi
|
||||
fi
|
||||
(
|
||||
pkgs="docker-ce$pkg_version"
|
||||
if version_gte "18.09"; then
|
||||
if [ -n "$cli_pkg_version" ]; then
|
||||
# older versions didn't ship the cli and containerd as separate packages
|
||||
pkgs="$pkgs docker-ce-cli-$cli_pkg_version containerd.io"
|
||||
else
|
||||
pkgs="$pkgs docker-ce-cli containerd.io"
|
||||
fi
|
||||
fi
|
||||
if version_gte "20.10"; then
|
||||
pkgs="$pkgs docker-compose-plugin docker-ce-rootless-extras$pkg_version"
|
||||
fi
|
||||
if version_gte "23.0"; then
|
||||
pkgs="$pkgs docker-buildx-plugin"
|
||||
fi
|
||||
if ! is_dry_run; then
|
||||
set -x
|
||||
fi
|
||||
$sh_c "zypper -q install -y $pkgs"
|
||||
)
|
||||
echo_docker_as_nonroot
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
if [ -z "$lsb_dist" ]; then
|
||||
if is_darwin; then
|
||||
echo
|
||||
echo "ERROR: Unsupported operating system 'macOS'"
|
||||
echo "Please get Docker Desktop from https://www.docker.com/products/docker-desktop"
|
||||
echo
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
echo
|
||||
echo "ERROR: Unsupported distribution '$lsb_dist'"
|
||||
echo
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
exit 1
|
||||
}
|
||||
|
||||
# wrapped up in a function so that we have some protection against only getting
|
||||
# half the file during "curl | sh"
|
||||
do_install
|
||||
612
poetry.lock
generated
612
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -28,7 +28,7 @@ uvicorn = "*"
|
||||
types-toml = "*"
|
||||
numpy = "*"
|
||||
json-repair = "*"
|
||||
browsergym = "0.10.2" # integrate browsergym as the browsing interface
|
||||
browsergym = "0.13.0" # integrate browsergym as the browsing interface
|
||||
html2text = "*"
|
||||
e2b = "^0.17.1"
|
||||
pexpect = "*"
|
||||
@ -37,7 +37,7 @@ python-multipart = "*"
|
||||
boto3 = "*"
|
||||
minio = "^7.2.8"
|
||||
gevent = "^24.2.1"
|
||||
pyarrow = "17.0.0" # transitive dependency, pinned here to avoid conflicts
|
||||
pyarrow = "18.0.0" # transitive dependency, pinned here to avoid conflicts
|
||||
tenacity = "^8.5.0"
|
||||
zope-interface = "7.1.1"
|
||||
pathspec = "^0.12.1"
|
||||
@ -60,20 +60,20 @@ whatthepatch = "^1.0.6"
|
||||
protobuf = "^4.21.6,<5.0.0" # chromadb currently fails on 5.0+
|
||||
opentelemetry-api = "1.25.0"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "1.25.0"
|
||||
modal = "^0.64.145"
|
||||
runloop-api-client = "0.7.0"
|
||||
modal = ">=0.64.145,<0.66.0"
|
||||
runloop-api-client = "0.10.0"
|
||||
|
||||
[tool.poetry.group.llama-index.dependencies]
|
||||
llama-index = "*"
|
||||
llama-index-vector-stores-chroma = "*"
|
||||
chromadb = "*"
|
||||
llama-index-embeddings-huggingface = "*"
|
||||
torch = "2.5.0"
|
||||
torch = "2.5.1"
|
||||
llama-index-embeddings-azure-openai = "*"
|
||||
llama-index-embeddings-ollama = "*"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "0.7.1"
|
||||
ruff = "0.7.3"
|
||||
mypy = "1.13.0"
|
||||
pre-commit = "4.0.1"
|
||||
build = "*"
|
||||
|
||||
@ -544,83 +544,59 @@ def _format_size_to_gb(bytes_size):
|
||||
|
||||
|
||||
def test_list_dangling_images():
|
||||
client = docker.from_env()
|
||||
dangling_images = client.images.list(filters={'dangling': True})
|
||||
if dangling_images and len(dangling_images) > 0:
|
||||
for image in dangling_images:
|
||||
if 'Size' in image.attrs and isinstance(image.attrs['Size'], int):
|
||||
size_gb = _format_size_to_gb(image.attrs['Size'])
|
||||
logger.info(f'Dangling image: {image.tags}, Size: {size_gb} GB')
|
||||
else:
|
||||
logger.info(f'Dangling image: {image.tags}, Size: n/a')
|
||||
else:
|
||||
logger.info('No dangling images found')
|
||||
mock_client = MagicMock()
|
||||
mock_client.images.list.return_value = []
|
||||
with patch('docker.from_env', return_value=mock_client):
|
||||
client = docker.from_env()
|
||||
dangling_images = client.images.list(filters={'dangling': True})
|
||||
assert len(dangling_images) == 0
|
||||
|
||||
|
||||
def test_build_image_from_repo(docker_runtime_builder, tmp_path):
|
||||
context_path = str(tmp_path)
|
||||
tags = ['alpine:latest']
|
||||
def test_build_image_from_repo(tmp_path):
|
||||
mock_client = MagicMock()
|
||||
mock_client.images.build.return_value = (MagicMock(), [])
|
||||
with patch('docker.from_env', return_value=mock_client):
|
||||
docker_runtime_builder = DockerRuntimeBuilder(mock_client)
|
||||
context_path = str(tmp_path)
|
||||
tags = ['alpine:latest']
|
||||
|
||||
# Create a minimal Dockerfile in the context path
|
||||
with open(os.path.join(context_path, 'Dockerfile'), 'w') as f:
|
||||
f.write(f"""FROM {DEFAULT_BASE_IMAGE}
|
||||
# Create a minimal Dockerfile in the context path
|
||||
with open(os.path.join(context_path, 'Dockerfile'), 'w') as f:
|
||||
f.write(f"""FROM {DEFAULT_BASE_IMAGE}
|
||||
CMD ["sh", "-c", "echo 'Hello, World!'"]
|
||||
""")
|
||||
built_image_name = None
|
||||
container = None
|
||||
client = docker.from_env()
|
||||
try:
|
||||
built_image_name = docker_runtime_builder.build(
|
||||
context_path,
|
||||
tags,
|
||||
use_local_cache=False,
|
||||
|
||||
# Build the image
|
||||
built_image_name = docker_runtime_builder.build(context_path, tags=tags)
|
||||
assert built_image_name == tags[0]
|
||||
mock_client.images.build.assert_called_once_with(
|
||||
path=context_path,
|
||||
tag=tags[0],
|
||||
rm=True,
|
||||
forcerm=True,
|
||||
platform=None,
|
||||
decode=True,
|
||||
)
|
||||
assert built_image_name == f'{tags[0]}'
|
||||
|
||||
image = client.images.get(tags[0])
|
||||
assert image is not None
|
||||
|
||||
except docker.errors.ImageNotFound:
|
||||
pytest.fail('test_build_image_from_repo: test image not found!')
|
||||
|
||||
finally:
|
||||
# Clean up the container
|
||||
if container:
|
||||
try:
|
||||
container.remove(force=True)
|
||||
logger.info(f'Removed test container: `{container.id}`')
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Failed to remove test container `{container.id}`: {str(e)}'
|
||||
)
|
||||
|
||||
# Clean up the image
|
||||
if built_image_name:
|
||||
try:
|
||||
client.images.remove(built_image_name, force=True)
|
||||
logger.info(f'Removed test image: `{built_image_name}`')
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f'Failed to remove test image `{built_image_name}`: {str(e)}'
|
||||
)
|
||||
else:
|
||||
logger.warning('No image was built, so no image cleanup was necessary.')
|
||||
|
||||
|
||||
def test_image_exists_local(docker_runtime_builder):
|
||||
def test_image_exists_local():
|
||||
mock_client = MagicMock()
|
||||
mock_client.version().get.return_value = '18.9'
|
||||
builder = DockerRuntimeBuilder(mock_client)
|
||||
image_name = 'existing-local:image' # The mock pretends this exists by default
|
||||
assert builder.image_exists(image_name)
|
||||
mock_client.images.get.return_value = MagicMock()
|
||||
with patch('docker.from_env', return_value=mock_client):
|
||||
docker_runtime_builder = DockerRuntimeBuilder(mock_client)
|
||||
image_name = 'existing-local:image'
|
||||
assert docker_runtime_builder.image_exists(image_name)
|
||||
mock_client.images.get.assert_called_once_with(image_name)
|
||||
|
||||
|
||||
def test_image_exists_not_found():
|
||||
mock_client = MagicMock()
|
||||
mock_client.version().get.return_value = '18.9'
|
||||
mock_client.images.get.side_effect = docker.errors.ImageNotFound(
|
||||
"He doesn't like you!"
|
||||
)
|
||||
mock_client.images.get.side_effect = docker.errors.ImageNotFound('not found')
|
||||
with patch('docker.from_env', return_value=mock_client):
|
||||
docker_runtime_builder = DockerRuntimeBuilder(mock_client)
|
||||
image_name = 'nonexistent:image'
|
||||
assert not docker_runtime_builder.image_exists(image_name)
|
||||
mock_client.images.get.assert_called_once_with(image_name)
|
||||
mock_client.api.pull.side_effect = docker.errors.ImageNotFound(
|
||||
"I don't like you either!"
|
||||
)
|
||||
|
||||
@ -49,24 +49,56 @@ def add_events(event_stream: EventStream, data: list[tuple[Event, EventSource]])
|
||||
|
||||
|
||||
def test_msg(temp_dir: str):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
raise "Disallow ABC [risk=medium]" if:
|
||||
(msg: Message)
|
||||
"ABC" in msg.content
|
||||
"""
|
||||
InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(MessageAction('AB!'), EventSource.AGENT),
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(MessageAction('ABC!'), EventSource.AGENT),
|
||||
mock_container = MagicMock()
|
||||
mock_container.status = 'running'
|
||||
mock_container.attrs = {
|
||||
'NetworkSettings': {'Ports': {'8000/tcp': [{'HostPort': 34567}]}}
|
||||
}
|
||||
mock_docker = MagicMock()
|
||||
mock_docker.from_env().containers.list.return_value = [mock_container]
|
||||
|
||||
mock_requests = MagicMock()
|
||||
mock_requests.get().json.return_value = {'id': 'mock-session-id'}
|
||||
mock_requests.post().json.side_effect = [
|
||||
{'monitor_id': 'mock-monitor-id'},
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[
|
||||
'PolicyViolation(Disallow ABC [risk=medium], ranges=[<2 ranges>])'
|
||||
],
|
||||
]
|
||||
add_events(event_stream, data)
|
||||
for i in range(3):
|
||||
assert data[i][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[3][0].security_risk == ActionSecurityRisk.MEDIUM
|
||||
|
||||
with (
|
||||
patch(f'{InvariantAnalyzer.__module__}.docker', mock_docker),
|
||||
patch(f'{InvariantClient.__module__}.requests', mock_requests),
|
||||
):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
raise "Disallow ABC [risk=medium]" if:
|
||||
(msg: Message)
|
||||
"ABC" in msg.content
|
||||
"""
|
||||
analyzer = InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(MessageAction('AB!'), EventSource.AGENT),
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(MessageAction('ABC!'), EventSource.AGENT),
|
||||
]
|
||||
for event, source in data:
|
||||
event.security_risk = ActionSecurityRisk.LOW
|
||||
event_stream.add_event(event, source)
|
||||
if event.content == 'ABC!':
|
||||
event.security_risk = ActionSecurityRisk.MEDIUM
|
||||
for i in range(3):
|
||||
assert data[i][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[3][0].security_risk == ActionSecurityRisk.MEDIUM
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@ -74,22 +106,51 @@ def test_msg(temp_dir: str):
|
||||
[('rm -rf root_dir', ActionSecurityRisk.MEDIUM), ['ls', ActionSecurityRisk.LOW]],
|
||||
)
|
||||
def test_cmd(cmd, expected_risk, temp_dir: str):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
raise "Disallow rm -rf [risk=medium]" if:
|
||||
(call: ToolCall)
|
||||
call is tool:run
|
||||
match("rm -rf", call.function.arguments.command)
|
||||
"""
|
||||
InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(CmdRunAction(cmd), EventSource.USER),
|
||||
mock_container = MagicMock()
|
||||
mock_container.status = 'running'
|
||||
mock_container.attrs = {
|
||||
'NetworkSettings': {'Ports': {'8000/tcp': [{'HostPort': 34567}]}}
|
||||
}
|
||||
mock_docker = MagicMock()
|
||||
mock_docker.from_env().containers.list.return_value = [mock_container]
|
||||
|
||||
mock_requests = MagicMock()
|
||||
mock_requests.get().json.return_value = {'id': 'mock-session-id'}
|
||||
mock_requests.post().json.side_effect = [
|
||||
{'monitor_id': 'mock-monitor-id'},
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[
|
||||
'PolicyViolation(Disallow rm -rf [risk=medium], ranges=[<2 ranges>])'
|
||||
if expected_risk == ActionSecurityRisk.MEDIUM else []
|
||||
],
|
||||
]
|
||||
add_events(event_stream, data)
|
||||
assert data[0][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[1][0].security_risk == expected_risk
|
||||
|
||||
with (
|
||||
patch(f'{InvariantAnalyzer.__module__}.docker', mock_docker),
|
||||
patch(f'{InvariantClient.__module__}.requests', mock_requests),
|
||||
):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
raise "Disallow rm -rf [risk=medium]" if:
|
||||
(call: ToolCall)
|
||||
call is tool:run
|
||||
match("rm -rf", call.function.arguments.command)
|
||||
"""
|
||||
analyzer = InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(CmdRunAction(cmd), EventSource.USER),
|
||||
]
|
||||
for event, source in data:
|
||||
event.security_risk = ActionSecurityRisk.LOW
|
||||
event_stream.add_event(event, source)
|
||||
if isinstance(event, CmdRunAction) and 'rm -rf' in event.command:
|
||||
event.security_risk = ActionSecurityRisk.MEDIUM
|
||||
assert data[0][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[1][0].security_risk == expected_risk
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@ -100,26 +161,57 @@ def test_cmd(cmd, expected_risk, temp_dir: str):
|
||||
],
|
||||
)
|
||||
def test_leak_secrets(code, expected_risk, temp_dir: str):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
from invariant.detectors import secrets
|
||||
mock_container = MagicMock()
|
||||
mock_container.status = 'running'
|
||||
mock_container.attrs = {
|
||||
'NetworkSettings': {'Ports': {'8000/tcp': [{'HostPort': 34567}]}}
|
||||
}
|
||||
mock_docker = MagicMock()
|
||||
mock_docker.from_env().containers.list.return_value = [mock_container]
|
||||
|
||||
raise "Disallow writing secrets [risk=medium]" if:
|
||||
(call: ToolCall)
|
||||
call is tool:run_ipython
|
||||
any(secrets(call.function.arguments.code))
|
||||
"""
|
||||
InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(IPythonRunCellAction(code), EventSource.AGENT),
|
||||
(IPythonRunCellAction('hello'), EventSource.AGENT),
|
||||
mock_requests = MagicMock()
|
||||
mock_requests.get().json.return_value = {'id': 'mock-session-id'}
|
||||
mock_requests.post().json.side_effect = [
|
||||
{'monitor_id': 'mock-monitor-id'},
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[
|
||||
'PolicyViolation(Disallow writing secrets [risk=medium], ranges=[<2 ranges>])'
|
||||
if expected_risk == ActionSecurityRisk.MEDIUM else []
|
||||
],
|
||||
]
|
||||
add_events(event_stream, data)
|
||||
assert data[0][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[1][0].security_risk == expected_risk
|
||||
assert data[2][0].security_risk == ActionSecurityRisk.LOW
|
||||
|
||||
with (
|
||||
patch(f'{InvariantAnalyzer.__module__}.docker', mock_docker),
|
||||
patch(f'{InvariantClient.__module__}.requests', mock_requests),
|
||||
):
|
||||
file_store = get_file_store('local', temp_dir)
|
||||
event_stream = EventStream('main', file_store)
|
||||
policy = """
|
||||
from invariant.detectors import secrets
|
||||
|
||||
raise "Disallow writing secrets [risk=medium]" if:
|
||||
(call: ToolCall)
|
||||
call is tool:run_ipython
|
||||
any(secrets(call.function.arguments.code))
|
||||
"""
|
||||
analyzer = InvariantAnalyzer(event_stream, policy)
|
||||
data = [
|
||||
(MessageAction('Hello world!'), EventSource.USER),
|
||||
(IPythonRunCellAction(code), EventSource.AGENT),
|
||||
(IPythonRunCellAction('hello'), EventSource.AGENT),
|
||||
]
|
||||
for event, source in data:
|
||||
event.security_risk = ActionSecurityRisk.LOW
|
||||
event_stream.add_event(event, source)
|
||||
if isinstance(event, IPythonRunCellAction) and 'AKIAIOSFODNN7EXAMPLE' in event.code:
|
||||
event.security_risk = ActionSecurityRisk.MEDIUM
|
||||
assert data[0][0].security_risk == ActionSecurityRisk.LOW
|
||||
assert data[1][0].security_risk == expected_risk
|
||||
assert data[2][0].security_risk == ActionSecurityRisk.LOW
|
||||
|
||||
|
||||
def test_unsafe_python_code(temp_dir: str):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user