Skip to content

Commit

Permalink
Add support for s3 GLACIER storage class, when GET return error, th…
Browse files Browse the repository at this point in the history
…en, it requires 5 minutes per key and restore could be slow. Use `GLACIER_IR`, it looks more robust, fix #614
  • Loading branch information
Slach committed Aug 8, 2023
1 parent 65af682 commit 7f316b6
Show file tree
Hide file tree
Showing 6 changed files with 70 additions and 7 deletions.
1 change: 1 addition & 0 deletions ChangeLog.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ IMPROVEMENTS
- Backup/Restore RBAC related objects from Zookeeper via direct connection to zookeeper/keeper, fix [604](https://github.com/Altinity/clickhouse-backup/issues/604)
- Add `SHARDED_OPERATION_MODE` option, to easy create backup for sharded cluster, available values `none` (no sharding), `table` (table granularity), `database` (database granularity), `first-replica` (on the lexicographically sorted first active replica), thanks @mskwon, fix [639](https://github.com/Altinity/clickhouse-backup/issues/639), fix [648](https://github.com/Altinity/clickhouse-backup/pull/648)
- Add support for `compression_format: none` for upload and download backups created with `--rbac` / `--rbac-only` or `--configs` / `--configs-only` options, fix [713](https://github.com/Altinity/clickhouse-backup/issues/713)
- Add support for s3 `GLACIER` storage class, when GET return error, then, it requires 5 minutes per key and restore could be slow. Use `GLACIER_IR`, it looks more robust, fix [614](https://github.com/Altinity/clickhouse-backup/issues/614)

BUG FIXES
- fix possible create backup failures during UNFREEZE not exists tables, affected 2.2.7+ version, fix [704](https://github.com/Altinity/clickhouse-backup/issues/704)
Expand Down
5 changes: 3 additions & 2 deletions pkg/storage/s3.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ func (s *S3) Connect(ctx context.Context) error {

if s.Config.Debug {
awsConfig.Logger = newS3Logger(s.Log)
awsConfig.ClientLogMode = aws.LogRetries | aws.LogRequest | aws.LogResponse
awsConfig.ClientLogMode = aws.LogRetries | aws.LogRequest | aws.LogResponseWithBody
}

httpTransport := http.DefaultTransport
Expand Down Expand Up @@ -630,7 +630,8 @@ func (s *S3) restoreObject(ctx context.Context, key string) error {
}

if res.Restore != nil && *res.Restore == "ongoing-request=\"true\"" {
s.Log.Debugf("%s still not restored, will wait %d seconds", key, i*5)
i += 1
s.Log.Warnf("%s still not restored, will wait %d seconds", key, i*5)
time.Sleep(time.Duration(i*5) * time.Second)
} else {
return nil
Expand Down
1 change: 0 additions & 1 deletion test/integration/config-s3-fips.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ s3:
compression_format: tar
allow_multipart_download: true
concurrency: 3
storage_class: GLACIER
api:
listen: :7171
create_integration_tables: true
Expand Down
44 changes: 44 additions & 0 deletions test/integration/config-s3-glacier.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
general:
disable_progress_bar: true
remote_storage: s3
upload_concurrency: 4
download_concurrency: 4
skip_tables:
- " system.*"
- "INFORMATION_SCHEMA.*"
- "information_schema.*"
- "_temporary_and_external_tables.*"
restore_schema_on_cluster: "{cluster}"
clickhouse:
host: clickhouse
port: 9440
username: backup
password: meow=& 123?*%# МЯУ
secure: true
skip_verify: true
sync_replicated_tables: true
timeout: 1h
restart_command: bash -c 'echo "FAKE RESTART"'
backup_mutations: true
# secrets for `FISP` will provide from `.env` or from GitHub actions secrets
s3:
access_key: ${QA_AWS_ACCESS_KEY}
secret_key: ${QA_AWS_SECRET_KEY}
bucket: ${QA_AWS_BUCKET}
# endpoint: https://${QA_AWS_BUCKET}.s3-fips.${QA_AWS_REGION}.amazonaws.com/
region: ${QA_AWS_REGION}
acl: private
force_path_style: false
path: backup/{cluster}/{shard}
object_disk_path: object_disks/{cluster}/{shard}
disable_ssl: false
compression_format: tar
allow_multipart_download: false
concurrency: 4
# storage_class: GLACIER, 6000 seconds test execution
storage_class: GLACIER_IR
api:
listen: :7171
create_integration_tables: true
integration_tables_host: "localhost"
allow_parallel: false
22 changes: 19 additions & 3 deletions test/integration/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -952,7 +952,7 @@ func TestFIPS(t *testing.T) {
r.NoError(dockerExec("clickhouse", "bash", "-xce", "openssl req -subj \"/CN=localhost\" -addext \"subjectAltName = DNS:localhost,DNS:*.cluster.local\" -new -key /etc/clickhouse-backup/server-key.pem -out /etc/clickhouse-backup/server-req.csr"))
r.NoError(dockerExec("clickhouse", "bash", "-xce", "openssl x509 -req -days 365000 -extensions SAN -extfile <(printf \"\\n[SAN]\\nsubjectAltName=DNS:localhost,DNS:*.cluster.local\") -in /etc/clickhouse-backup/server-req.csr -out /etc/clickhouse-backup/server-cert.pem -CA /etc/clickhouse-backup/ca-cert.pem -CAkey /etc/clickhouse-backup/ca-key.pem -CAcreateserial"))
}
r.NoError(dockerExec("clickhouse", "bash", "-c", "cat /etc/clickhouse-backup/config.yml.fips-template | envsubst > /etc/clickhouse-backup/config.yml"))
r.NoError(dockerExec("clickhouse", "bash", "-xec", "cat /etc/clickhouse-backup/config.yml.fips-template | envsubst > /etc/clickhouse-backup/config.yml"))

generateCerts("rsa", "4096", "")
createSQL := "CREATE TABLE default.fips_table (v UInt64) ENGINE=MergeTree() ORDER BY tuple()"
Expand Down Expand Up @@ -1158,6 +1158,20 @@ func TestDoRestoreConfigs(t *testing.T) {
ch.chbackend.Close()
}

func TestIntegrationS3Glacier(t *testing.T) {
if isTestShouldSkip("GLACIER_TESTS") {
t.Skip("Skipping GLACIER integration tests...")
return
}
r := require.New(t)
r.NoError(dockerCP("config-s3-glacier.yml", "clickhouse-backup:/etc/clickhouse-backup/config.yml.s3glacier-template"))
installDebIfNotExists(r, "clickhouse-backup", "curl", "gettext-base", "bsdmainutils", "dnsutils", "git", "ca-certificates")
r.NoError(dockerExec("clickhouse-backup", "bash", "-xec", "cat /etc/clickhouse-backup/config.yml.s3glacier-template | envsubst > /etc/clickhouse-backup/config.yml"))
dockerExecTimeout = 60 * time.Minute
runMainIntegrationScenario(t, "GLACIER")
dockerExecTimeout = 3 * time.Minute
}

func TestIntegrationS3(t *testing.T) {
r := require.New(t)
r.NoError(dockerCP("config-s3.yml", "clickhouse-backup:/etc/clickhouse-backup/config.yml"))
Expand Down Expand Up @@ -2412,6 +2426,8 @@ func (ch *TestClickHouse) queryWithNoError(r *require.Assertions, query string,
r.NoError(err)
}

var dockerExecTimeout = 180 * time.Second

func dockerExec(container string, cmd ...string) error {
out, err := dockerExecOut(container, cmd...)
log.Info(out)
Expand All @@ -2421,7 +2437,7 @@ func dockerExec(container string, cmd ...string) error {
func dockerExecOut(container string, cmd ...string) (string, error) {
dcmd := []string{"exec", container}
dcmd = append(dcmd, cmd...)
return utils.ExecCmdOut(context.Background(), 180*time.Second, "docker", dcmd...)
return utils.ExecCmdOut(context.Background(), dockerExecTimeout, "docker", dcmd...)
}

func dockerCP(src, dst string) error {
Expand Down Expand Up @@ -2490,7 +2506,7 @@ func installDebIfNotExists(r *require.Assertions, container string, pkgs ...stri
container,
"bash", "-xec",
fmt.Sprintf(
"export DEBIAN_FRONTEND=noniteractive; if [[ '%d' != $(dpkg -l | grep -c -E \"%s\" ) ]]; then rm -fv /etc/apt/sources.list.d/clickhouse.list; find /etc/apt/ -type f -exec sed -i 's/ru.archive.ubuntu.com/archive.ubuntu.com/g' {} +; apt-get -y update; apt-get install --no-install-recommends -y %s; fi",
"export DEBIAN_FRONTEND=noniteractive; if [[ '%d' != $(dpkg -l | grep -c -E \"%s\" ) ]]; then rm -fv /etc/apt/sources.list.d/clickhouse.list; find /etc/apt/ -type f -name *.list -exec sed -i 's/ru.archive.ubuntu.com/archive.ubuntu.com/g' {} +; apt-get -y update; apt-get install --no-install-recommends -y %s; fi",
len(pkgs), "^ii\\s+"+strings.Join(pkgs, "|^ii\\s+"), strings.Join(pkgs, " "),
),
))
Expand Down
4 changes: 3 additions & 1 deletion test/integration/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ else
export GCS_TESTS=${GCS_TESTS:-}
fi

export GLACIER_TESTS=${GLACIER_TESTS:-}

export AZURE_TESTS=${AZURE_TESTS:-1}
export RUN_ADVANCED_TESTS=${RUN_ADVANCED_TESTS:-1}
export S3_DEBUG=${S3_DEBUG:-false}
Expand All @@ -44,5 +46,5 @@ make clean build-race-docker build-race-fips-docker
docker-compose -f ${CUR_DIR}/${COMPOSE_FILE} up -d
docker-compose -f ${CUR_DIR}/${COMPOSE_FILE} exec minio mc alias list

go test -timeout 30m -failfast -tags=integration -run "${RUN_TESTS:-.+}" -v ${CUR_DIR}/integration_test.go
go test -timeout ${TESTS_TIMEOUT:-30m} -failfast -tags=integration -run "${RUN_TESTS:-.+}" -v ${CUR_DIR}/integration_test.go
go tool covdata textfmt -i "${CUR_DIR}/_coverage_/" -o "${CUR_DIR}/_coverage_/coverage.out"

0 comments on commit 7f316b6

Please sign in to comment.