Skip to content

Commit

Permalink
feat: add flags to support aws as storage destination (#1372)
Browse files Browse the repository at this point in the history
Signed-off-by: Jeffrey Tang <[email protected]>
  • Loading branch information
JeffreyDallas authored Feb 12, 2025
1 parent 5b5e276 commit 5ca0e47
Show file tree
Hide file tree
Showing 9 changed files with 303 additions and 138 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/flow-gcs-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,9 @@ jobs:
timeout-minutes: 20
runs-on: solo-linux-large
strategy:
fail-fast: false
matrix:
storageType: ["gcs_only", "gcs_and_minio"]
storageType: ["gcs_only", "minio_only", "aws_only"]
steps:
- name: Harden Runner
uses: step-security/harden-runner@cb605e52c26070c328afc4562f0b4ada7618a84e # v2.10.4
Expand Down Expand Up @@ -116,6 +117,7 @@ jobs:
.github/workflows/script/gcs_test.sh
- name: Delete Test Directory after Test
if: matrix.storageType == 'gcs_only' || matrix.storageType == 'aws_only'
run: |
gcloud storage rm --recursive gs://${BUCKET_NAME}/${{ steps.jobs.outputs.job_id }} --project=${{ vars.GCP_S3_PROJECT_ID }}
gcloud storage rm --recursive gs://${BACKUP_BUCKET_NAME}/${{ steps.jobs.outputs.job_id }} --project=${{ vars.GCP_S3_PROJECT_ID }}
66 changes: 45 additions & 21 deletions .github/workflows/script/gcs_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ else
fi

if [ -z "${STORAGE_TYPE}" ]; then
storageType="gcs_and_minio"
storageType="aws_only"
else
storageType=${STORAGE_TYPE}
fi
Expand All @@ -40,44 +40,68 @@ if [ -z "${PREFIX}" ]; then
echo "PREFIX is not set"
else
echo "Using PREFIX: ${PREFIX}"
BUCKET_PREFIX_OPTION="--storage-bucket-prefix ${PREFIX}"
if [ "${storageType}" == "aws_only" ]; then
STORAGE_OPTIONS=(
"--aws-endpoint" "https://storage.googleapis.com"
"--aws-access-key" "${GCS_ACCESS_KEY}"
"--aws-secrets" "${GCS_SECRET_KEY}"
"--aws-bucket" "${streamBucket}"
"--aws-bucket-prefix" "${PREFIX}"
)
elif [ "${storageType}" == "gcs_only" ]; then
STORAGE_OPTIONS=(
"--gcs-endpoint" "https://storage.googleapis.com"
"--gcs-access-key" "${GCS_ACCESS_KEY}"
"--gcs-secrets" "${GCS_SECRET_KEY}"
"--gcs-bucket" "${streamBucket}"
"--gcs-bucket-prefix" "${PREFIX}"
)
fi

if [ "${storageType}" == "aws_only" ] || [ "${storageType}" == "gcs_only" ]; then
MIRROR_STORAGE_OPTIONS=(
"--storage-endpoint" "https://storage.googleapis.com"
"--storage-access-key" "${GCS_ACCESS_KEY}"
"--storage-secrets" "${GCS_SECRET_KEY}"
"--storage-bucket" "${streamBucket}"
"--storage-bucket-prefix" "${PREFIX}"
)
fi
fi

echo "STORAGE_OPTIONS: " "${STORAGE_OPTIONS[@]}"
echo "MIRROR_STORAGE_OPTIONS: " "${MIRROR_STORAGE_OPTIONS[@]}"

echo "${GCP_SERVICE_ACCOUNT_TOKEN}" > gcp_service_account.json

echo "Using bucket name: ${streamBucket}"
echo "Test storage type: ${storageType}"

if [[ -z "${SOLO_TEST_CLUSTER}" && ${SOLO_CLUSTER_NAME} != "" ]]; then
SOLO_CLUSTER_NAME=solo-e2e
else
SOLO_CLUSTER_NAME=${SOLO_TEST_CLUSTER}
fi
SOLO_CLUSTER_NAME=solo-e2e
SOLO_NAMESPACE=solo-e2e
SOLO_CLUSTER_SETUP_NAMESPACE=solo-setup

SOLO_DEPLOYMENT=solo-e2e

kind delete cluster -n "${SOLO_CLUSTER_NAME}"
kind create cluster -n "${SOLO_CLUSTER_NAME}"
npm run solo-test -- init
npm run solo-test -- cluster setup \
-s "${SOLO_CLUSTER_SETUP_NAMESPACE}"
npm run solo-test -- node keys --gossip-keys --tls-keys -i node1
npm run solo-test -- network deploy -i node1 -n "${SOLO_NAMESPACE}" \
--storage-endpoint "https://storage.googleapis.com" \
--storage-access-key "${GCS_ACCESS_KEY}" --storage-secrets "${GCS_SECRET_KEY}" \
--storage-type "${storageType}" --storage-bucket "${streamBucket}" $BUCKET_PREFIX_OPTION \
npm run solo-test -- deployment create -n "${SOLO_NAMESPACE}" --context kind-"${SOLO_CLUSTER_NAME}" --email [email protected] --deployment-clusters kind-"${SOLO_CLUSTER_NAME}" --deployment "${SOLO_DEPLOYMENT}"
npm run solo-test -- network deploy -i node1 --deployment "${SOLO_DEPLOYMENT}" \
--storage-type "${storageType}" \
"${STORAGE_OPTIONS[@]}" \
--backup-bucket "${streamBackupBucket}" \
--google-credential gcp_service_account.json

npm run solo-test -- node setup -i node1 -n "${SOLO_NAMESPACE}"
npm run solo-test -- node start -i node1 -n "${SOLO_NAMESPACE}"
npm run solo-test -- mirror-node deploy --namespace "${SOLO_NAMESPACE}" \
--storage-endpoint "https://storage.googleapis.com" \
--storage-access-key "${GCS_ACCESS_KEY}" --storage-secrets "${GCS_SECRET_KEY}" \
--storage-type "${storageType}" --storage-bucket "${streamBucket}" $BUCKET_PREFIX_OPTION
npm run solo-test -- node setup -i node1 --deployment "${SOLO_DEPLOYMENT}"
npm run solo-test -- node start -i node1 --deployment "${SOLO_DEPLOYMENT}"
npm run solo-test -- mirror-node deploy --deployment "${SOLO_DEPLOYMENT}" \
--storage-type "${storageType}" \
"${MIRROR_STORAGE_OPTIONS[@]}" \

npm run solo-test -- explorer deploy -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}"
npm run solo-test -- explorer deploy -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --deployment "${SOLO_DEPLOYMENT}"

kubectl port-forward -n "${SOLO_NAMESPACE}" svc/haproxy-node1-svc 50211:50211 > /dev/null 2>&1 &

Expand All @@ -88,7 +112,7 @@ cd ..; create_test_account ; cd -

node examples/create-topic.js

npm run solo-test -- node stop -i node1 -n "${SOLO_NAMESPACE}"
npm run solo-test -- node stop -i node1 --deployment "${SOLO_DEPLOYMENT}"

echo "Waiting for backup uploader to run"
# manually call script "backup.sh" from container backup-uploader since it only runs every 5 minutes
Expand All @@ -102,4 +126,4 @@ if grep -q \""error\"" backup-uploader.log; then
exit 1
fi

npm run solo-test -- network destroy -n "${SOLO_NAMESPACE}" --force -q
npm run solo-test -- network destroy --deployment "${SOLO_DEPLOYMENT}" --force -q
170 changes: 149 additions & 21 deletions src/commands/flags.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1763,66 +1763,125 @@ export class Flags {
definition: {
defaultValue: constants.StorageType.MINIO_ONLY,
describe:
'storage type for saving stream files, available options are minio_only, gcs_and_minio, s3_only, gcs_only, s3_and_gcs',
'storage type for saving stream files, available options are minio_only, aws_only, gcs_only, aws_and_gcs',
type: 'StorageType',
},
prompt: undefined,
};

static readonly storageAccessKey: CommandFlag = {
constName: 'storageAccessKey',
name: 'storage-access-key',
static readonly gcsAccessKey: CommandFlag = {
constName: 'gcsAccessKey',
name: 'gcs-access-key',
definition: {
defaultValue: '',
describe: 'storage access key',
describe: 'gcs storage access key',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageSecrets: CommandFlag = {
constName: 'storageSecrets',
name: 'storage-secrets',
static readonly gcsSecrets: CommandFlag = {
constName: 'gcsSecrets',
name: 'gcs-secrets',
definition: {
defaultValue: '',
describe: 'storage secret key',
describe: 'gcs storage secret key',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageEndpoint: CommandFlag = {
constName: 'storageEndpoint',
name: 'storage-endpoint',
static readonly gcsEndpoint: CommandFlag = {
constName: 'gcsEndpoint',
name: 'gcs-endpoint',
definition: {
defaultValue: '',
describe: 'storage endpoint URL',
describe: 'gcs storage endpoint URL',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageBucket: CommandFlag = {
constName: 'storageBucket',
name: 'storage-bucket',
static readonly gcsBucket: CommandFlag = {
constName: 'gcsBucket',
name: 'gcs-bucket',
definition: {
defaultValue: '',
describe: 'name of storage bucket',
describe: 'name of gcs storage bucket',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageBucketPrefix: CommandFlag = {
constName: 'storageBucketPrefix',
name: 'storage-bucket-prefix',
static readonly gcsBucketPrefix: CommandFlag = {
constName: 'gcsBucketPrefix',
name: 'gcs-bucket-prefix',
definition: {
defaultValue: '',
describe: 'path prefix of google storage bucket',
type: 'string',
},
prompt: undefined,
};

static readonly awsAccessKey: CommandFlag = {
constName: 'awsAccessKey',
name: 'aws-access-key',
definition: {
defaultValue: '',
describe: 'aws storage access key',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly awsSecrets: CommandFlag = {
constName: 'awsSecrets',
name: 'aws-secrets',
definition: {
defaultValue: '',
describe: 'aws storage secret key',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly awsEndpoint: CommandFlag = {
constName: 'awsEndpoint',
name: 'aws-endpoint',
definition: {
defaultValue: '',
describe: 'aws storage endpoint URL',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly awsBucket: CommandFlag = {
constName: 'awsBucket',
name: 'aws-bucket',
definition: {
defaultValue: '',
describe: 'name of aws storage bucket',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly awsBucketPrefix: CommandFlag = {
constName: 'awsBucketPrefix',
name: 'aws-bucket-prefix',
definition: {
defaultValue: '',
describe: 'path prefix of storage bucket',
describe: 'path prefix of aws storage bucket',
type: 'string',
},
prompt: undefined,
Expand Down Expand Up @@ -1852,6 +1911,65 @@ export class Flags {
prompt: undefined,
};

static readonly storageAccessKey: CommandFlag = {
constName: 'storageAccessKey',
name: 'storage-access-key',
definition: {
defaultValue: '',
describe: 'storage access key for mirror node importer',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageSecrets: CommandFlag = {
constName: 'storageSecrets',
name: 'storage-secrets',
definition: {
defaultValue: '',
describe: 'storage secret key for mirror node importer',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageEndpoint: CommandFlag = {
constName: 'storageEndpoint',
name: 'storage-endpoint',
definition: {
defaultValue: '',
describe: 'storage endpoint URL for mirror node importer',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageBucket: CommandFlag = {
constName: 'storageBucket',
name: 'storage-bucket',
definition: {
defaultValue: '',
describe: 'name of storage bucket for mirror node importer',
type: 'string',
dataMask: constants.STANDARD_DATAMASK,
},
prompt: undefined,
};

static readonly storageBucketPrefix: CommandFlag = {
constName: 'storageBucketPrefix',
name: 'storage-bucket-prefix',
definition: {
defaultValue: '',
describe: 'path prefix of storage bucket mirror node importer',
type: 'string',
},
prompt: undefined,
};

static readonly loadBalancerEnabled: CommandFlag = {
constName: 'loadBalancerEnabled',
name: 'load-balancer',
Expand Down Expand Up @@ -1945,6 +2063,16 @@ export class Flags {
Flags.stakeAmounts,
Flags.stateFile,
Flags.storageType,
Flags.gcsAccessKey,
Flags.gcsSecrets,
Flags.gcsEndpoint,
Flags.gcsBucket,
Flags.gcsBucketPrefix,
Flags.awsAccessKey,
Flags.awsSecrets,
Flags.awsEndpoint,
Flags.awsBucket,
Flags.awsBucketPrefix,
Flags.storageAccessKey,
Flags.storageSecrets,
Flags.storageEndpoint,
Expand Down
12 changes: 8 additions & 4 deletions src/commands/mirror_node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -131,14 +131,18 @@ export class MirrorNodeCommand extends BaseCommand {
}

let storageType = '';
if (config.storageType && config.storageAccessKey && config.storageSecrets && config.storageEndpoint) {
if (
config.storageType !== constants.StorageType.MINIO_ONLY &&
config.storageAccessKey &&
config.storageSecrets &&
config.storageEndpoint
) {
if (
config.storageType === constants.StorageType.GCS_ONLY ||
config.storageType === constants.StorageType.S3_AND_GCS ||
config.storageType === constants.StorageType.GCS_AND_MINIO
config.storageType === constants.StorageType.AWS_AND_GCS
) {
storageType = 'gcp';
} else if (config.storageType === constants.StorageType.S3_ONLY) {
} else if (config.storageType === constants.StorageType.AWS_ONLY) {
storageType = 's3';
} else {
throw new IllegalArgumentError(`Invalid cloud storage type: ${config.storageType}`);
Expand Down
Loading

0 comments on commit 5ca0e47

Please sign in to comment.