Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 22 additions & 36 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,51 +1,37 @@
FROM alpine:3.14
FROM alpine:3.22

RUN set -x \
&& apk add --update bash findutils mongodb-tools gzip bzip2 lz4 xz unzip zip coreutils python3 py3-pip rsync curl \
&& ln -s /usr/bin/python3 /usr/bin/python \
&& apk add --update bash findutils mongodb-tools gzip bzip2 lz4 xz unzip zip coreutils python3 py3-pip rsync curl ca-certificates aws-cli py3-mongo \
&& rm -rf /var/cache/apk/* \
;

# Install Gcloud SDK (required for gsutil workload identity authentication)
ENV \
GCLOUD_VERSION=331.0.0 \
GCLOUD_CHECKSUM=f90c2df5bd0b3498d7e33112f17439eead8c94ae7d60a1cab0091de0eee62c16
GCLOUD_VERSION=542.0.0 \
GCLOUD_CHECKSUM_X86_64=6ac032650f507e61cf0b68a462be7e97edc9352cb3b95ce9a0d32cd8a4cfdfd5 \
GCLOUD_CHECKSUM_AARCH64=6b732c2e38da8d03395688fd4460b6d28a63a6d6d140836f4ecc1eee198be5e7

# Install Gcloud SDK
RUN set -x \
&& apk --no-cache add python3 \
&& curl -o /tmp/google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz -L https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz \
&& echo "${GCLOUD_CHECKSUM} google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz" > /tmp/SHA256SUM \
&& ( cd /tmp; sha256sum -c SHA256SUM || ( echo "Expected $(sha256sum google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz)"; exit 1; )) \
&& tar -C / -zxvf /tmp/google-cloud-sdk-${GCLOUD_VERSION}-linux-x86_64.tar.gz \
&& /google-cloud-sdk/install.sh --quiet \
&& ln -s /google-cloud-sdk/bin/gcloud /usr/local/bin/ \
&& ln -s /google-cloud-sdk/bin/gsutil /usr/local/bin/ \
&& rm -rf /tmp/* /root/.config/gcloud \
;

# Install AWS CLI
ENV \
PYTHONIOENCODING=UTF-8 \
PYTHONUNBUFFERED=0 \
PAGER=more \
AWS_CLI_VERSION=1.18.93 \
AWS_CLI_CHECKSUM=37eaa4d25cb1b9786af4ab6858cce7dfca154d264554934690d99994a7bbd7a5

RUN set -x \
&& apk add --no-cache ca-certificates wget \
&& cd /tmp \
&& wget -nv https://s3.amazonaws.com/aws-cli/awscli-bundle-${AWS_CLI_VERSION}.zip -O /tmp/awscli-bundle-${AWS_CLI_VERSION}.zip \
&& echo "${AWS_CLI_CHECKSUM} awscli-bundle-${AWS_CLI_VERSION}.zip" > /tmp/SHA256SUM \
&& sha256sum -c SHA256SUM \
&& unzip awscli-bundle-${AWS_CLI_VERSION}.zip \
&& /tmp/awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws \
&& apk del wget \
&& rm -rf /tmp/* \
;
&& if [ "$(uname -m)" = "x86_64" ] ; then \
GCLOUD_CHECKSUM="${GCLOUD_CHECKSUM_X86_64}"; \
ARCH="x86_64"; \
elif [ "$(uname -m)" = "aarch64" ]; then \
GCLOUD_CHECKSUM="${GCLOUD_CHECKSUM_AARCH64}"; \
ARCH="arm"; \
fi \
&& curl -o /tmp/google-cloud-sdk-${GCLOUD_VERSION}-linux-${ARCH}.tar.gz -L https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${GCLOUD_VERSION}-linux-${ARCH}.tar.gz \
&& echo "${GCLOUD_CHECKSUM} google-cloud-sdk-${GCLOUD_VERSION}-linux-${ARCH}.tar.gz" > /tmp/SHA256SUM \
&& ( cd /tmp; sha256sum -c SHA256SUM || ( echo "Expected $(sha256sum google-cloud-sdk-${GCLOUD_VERSION}-linux-${ARCH}.tar.gz)"; exit 1; )) \
&& tar -C / -zxvf /tmp/google-cloud-sdk-${GCLOUD_VERSION}-linux-${ARCH}.tar.gz \
&& /google-cloud-sdk/install.sh --quiet \
&& ln -s /google-cloud-sdk/bin/gcloud /usr/local/bin/ \
&& ln -s /google-cloud-sdk/bin/gsutil /usr/local/bin/ \
&& rm -rf /tmp/* /root/.config/gcloud \
;

# for list-databases
ENV PATH=${PATH}:/commands
RUN pip install pymongo;

COPY commands /commands

Expand Down
54 changes: 54 additions & 0 deletions commands/load.md
Original file line number Diff line number Diff line change
@@ -1 +1,55 @@
# Load Database

The `load` command restores a database from a backup. It can restore a single database or all databases from the latest backup taken.

The command supports restoring from local files, as well as from S3 and Google Cloud Storage buckets.

## Usage

```bash
./load [GLOBAL_OPTIONS] [OPTIONS] [DATABASE...] DESTINATION
```

### Global Options

These options are passed to the `mongorestore` command.

* `-h, --host`: The hostname of the MongoDB server.
* `-P, --port`: The port of the MongoDB server.
* `-u, --username`: The username to authenticate with.
* `-p, --password`: The password to authenticate with.
* `--authenticationDatabase`: The database to authenticate against.
* `--ssl`: Use SSL to connect to the MongoDB server.

### Options

* `--compression`: The compression used for the backup. Can be `gzip`, `lz4`, `bz2`, or `none`. If not specified, the script will try to auto-detect the compression from the file extension.
* `--umask`: The umask to use when creating files. Defaults to `0077`.

### Arguments

* `DATABASE`: The name of the database to restore. If not specified, all databases in the backup will be restored.
* `DESTINATION`: The destination of the backup. This can be a local file path, an S3 URI (e.g., `s3://my-bucket/backup`), or a Google Cloud Storage URI (e.g., `gs://my-bucket/backup`).

### Environment Variables

The following environment variables can be used to configure the `load` command:

* `DATABASE_HOST`: The hostname of the MongoDB server.
* `DATABASE_PORT`: The port of the MongoDB server.
* `DATABASE_USERNAME`: The username to authenticate with.
* `DATABASE_PASSWORD`: The password to authenticate with.
* `DATABASE_PASSWORD_FILE`: A file containing the password to authenticate with.
* `DATABASE_AUTHENTICATIONDATABASE`: The database to authenticate against.
* `DATABASE_SSL`: Use SSL to connect to the MongoDB server.

* `SAVE_COMPRESSION`: The compression to use for the backup.
* `SAVE_UMASK`: The umask to use when creating files.

## Example

To restore the `my-database` database from the latest backup in the `s3://my-bucket/backup` bucket:

```bash
./load s3://my-bucket/backup my-database
```
2 changes: 1 addition & 1 deletion commands/save
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ fi
for database in "${databases[@]}"; do
save_dest="${save_path}${database}${final_ext}"
echo ">> Saving ${database} to ${save_dest}"
echo "${password:-}" | mongodump "${connection[@]}" "${cmd_args[@]}" "--archive=-" "--db=${database}" | "${compression_cmd[@]}" | tee >(sha256sum > /tmp/sha.txt) > "${save_dest}";
echo "${password:-}" | mongodump "${connection[@]}" "${cmd_args[@]}" "--archive=-" "--db=${database}" --excludeCollection="system.sessions" | "${compression_cmd[@]}" | tee >(sha256sum > /tmp/sha.txt) > "${save_dest}";
awk "{print \$1 \" ${database}${final_ext}\"}" < /tmp/sha.txt | tee -a "${save_path}/CHECKSUM"
done

Expand Down
54 changes: 54 additions & 0 deletions commands/save.md
Original file line number Diff line number Diff line change
@@ -1 +1,55 @@
# Save databases

The `save` command creates a backup of one or more databases. It can save the backup to a local file, an S3 bucket, or a Google Cloud Storage bucket.

## Usage

```bash
./save [GLOBAL_OPTIONS] [OPTIONS] [DATABASE...] DESTINATION
```

### Global Options

These options are passed to the `mongodump` command.

* `-h, --host`: The hostname of the MongoDB server.
* `-P, --port`: The port of the MongoDB server.
* `-u, --username`: The username to authenticate with.
* `-D, --database`: The database to connect to.
* `-p, --password`: The password to authenticate with.
* `--authenticationDatabase`: The database to authenticate against.
* `--ssl`: Use SSL to connect to the MongoDB server.

### Options

* `--compression`: The compression to use for the backup. Can be `gzip`, `lz4`, `bz2`, or `none`. Defaults to `gzip`.
* `--umask`: The umask to use when creating files. Defaults to `0077`.

### Arguments

* `DATABASE`: The name of the database to save. If not specified, all databases will be saved.
* `DESTINATION`: The destination of the backup. This can be a local file path, an S3 URI (e.g., `s3://my-bucket/backup`), or a Google Cloud Storage URI (e.g., `gs://my-bucket/backup`).

### Environment Variables

The following environment variables can be used to configure the `save` command:

* `DATABASE_HOST`: The hostname of the MongoDB server.
* `DATABASE_PORT`: The port of the MongoDB server.
* `DATABASE_USERNAME`: The username to authenticate with.
* `DATABASE_PASSWORD`: The password to authenticate with.
* `DATABASE_PASSWORD_FILE`: A file containing the password to authenticate with.
* `DATABASE_AUTHENTICATIONDATABASE`: The database to authenticate against.
* `DATABASE_SSL`: Use SSL to connect to the MongoDB server.

* `SAVE_COMPRESSION`: The compression to use for the backup.
* `SAVE_UMASK`: The umask to use when creating files.
* `SAVE_SKIP_DATABASES`: A comma-separated list of databases to skip.

## Example

To save the `my-database` database to the `s3://my-bucket/backup` bucket:

```bash
./save s3://my-bucket/backup my-database
```
4 changes: 2 additions & 2 deletions tests/config.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@ MONGODB_IMAGE='mongo'
MONGODB_TAG='latest'
MINIO_IMAGE='minio/minio'
MINIO_TAG='latest'
DIND_IMAGE='jpetazzo/dind'
DIND_TAG='latest'
DIND_IMAGE='docker'
DIND_TAG='dind'
5 changes: 4 additions & 1 deletion tests/dind-runner.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,19 @@ echo ">> Using Temp Dockerfile: $DOCKERFILE"

cat << EOF > $DOCKERFILE
FROM ${DIND_IMAGE}:${DIND_TAG}
ENV DOCKER_HOST='unix:///var/run/docker.sock'
RUN apk add bash
ADD . /build/
WORKDIR /build
ENTRYPOINT ["/bin/bash"]
CMD ["/build/tests/runner.sh"]
EOF

echo ">> Building"
docker build -f $DOCKERFILE -t $TEST_CONTAINER .

echo ">> Running"
docker run --privileged -ti --rm $TEST_CONTAINER
docker run --platform ${PLATFORM} --privileged -ti --rm $TEST_CONTAINER

echo ">> Removing"
docker rmi $TEST_CONTAINER
Expand Down
28 changes: 21 additions & 7 deletions tests/functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,19 @@ CWD="$(dirname $0)/"

. ${CWD}config.sh

# Set docker platform
case $(uname -m) in
x86_64)
PLATFORM="linux/amd64"
;;
aarch64|arm64)
PLATFORM="linux/arm64/v8"
;;
*)
PLATFORM="linux/$(uname -m)"
;;
esac

function rm_container {
set +e
docker rm -fv "$@" > /dev/null 2>&1
Expand Down Expand Up @@ -50,10 +63,12 @@ function wait_on_port {

function start_docker {
echo "=> Starting docker"
if ! docker version > /dev/null 2>&1; then
wrapdocker > /dev/null 2>&1 &
sleep 5
fi
dockerd-entrypoint.sh dockerd > /dev/null 2>&1 &
while (! docker stats --no-stream &> /dev/null); do
echo "Waiting for Docker daemon..."
sleep 1
done
echo "Docker daemon is ready."
}

function check_docker {
Expand All @@ -62,9 +77,8 @@ function check_docker {
}

function check_environment {
echo "=> Testing environment"
docker version > /dev/null
which curl > /dev/null
echo "=> Checking environment"
docker version > /dev/null || { echo "docker bad"; exit 1; }
}

function build_image {
Expand Down
18 changes: 9 additions & 9 deletions tests/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,20 @@ CWD="$(dirname $0)/"
. ${CWD}functions.sh

echo "=> Test save command"
docker run -d --name mongodb -p 27017:27017 ${MONGODB_IMAGE}:${MONGODB_TAG} > /dev/null
docker run -d --name minio -p 9000:9000 ${MINIO_IMAGE}:${MINIO_TAG} server /data > /dev/null
docker run --rm -i --link minio -e MC_HOST_minio=http://minioadmin:minioadmin@minio:9000 minio/mc:latest --quiet mb minio/backup
docker run -i --name $TEST_NAME --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER save --host mongodb s3://backup
docker run --platform ${PLATFORM} -d --name mongodb -p 27017:27017 ${MONGODB_IMAGE}:${MONGODB_TAG} > /dev/null
docker run --platform ${PLATFORM} -d --name minio -p 9000:9000 ${MINIO_IMAGE}:${MINIO_TAG} server /data > /dev/null
docker run --platform ${PLATFORM} --rm -i --link minio -e MC_HOST_minio=http://minioadmin:minioadmin@minio:9000 minio/mc:latest --quiet mb minio/backup
docker run --platform ${PLATFORM} -i --name $TEST_NAME --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER save --host mongodb s3://backup
cleanup mongodb minio $TEST_NAME

echo "=> Test load command"
TMPDIR="/tmp/data.$$"
mkdir -p ${TMPDIR}
docker run -d --name mongodb -p 27017:27017 ${MONGODB_IMAGE}:${MONGODB_TAG} > /dev/null
docker run -d --name minio -p 9000:9000 ${MINIO_IMAGE}:${MINIO_TAG} server /data > /dev/null
docker run --rm -i --link minio -e MC_HOST_minio=http://minioadmin:minioadmin@minio:9000 minio/mc:latest --quiet mb minio/backup
docker run -i --name ${TEST_NAME}-save --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER save --host mongodb s3://backup
docker run -i --name ${TEST_NAME}-load --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER load --host mongodb s3://backup config newdb
docker run --platform ${PLATFORM} -d --name mongodb -p 27017:27017 ${MONGODB_IMAGE}:${MONGODB_TAG} > /dev/null
docker run --platform ${PLATFORM} -d --name minio -p 9000:9000 ${MINIO_IMAGE}:${MINIO_TAG} server /data > /dev/null
docker run --platform ${PLATFORM} --rm -i --link minio -e MC_HOST_minio=http://minioadmin:minioadmin@minio:9000 minio/mc:latest --quiet mb minio/backup
docker run --platform ${PLATFORM} -i --name ${TEST_NAME}-save --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER save --host mongodb s3://backup
docker run --platform ${PLATFORM} -i --name ${TEST_NAME}-load --link mongodb --link minio -e AWS_ACCESS_KEY_ID=minioadmin -e AWS_SECRET_ACCESS_KEY=minioadmin -e AWS_S3_ADDITIONAL_ARGS="--endpoint-url http://minio:9000" $TEST_CONTAINER load --host mongodb s3://backup config newdb
cleanup mongodb minio ${TEST_NAME}-save ${TEST_NAME}-load
rm -rf ${TMPDIR}
echo "=> Done"
Expand Down