From 770f9f204a12cd2734efe043039c6c521e0e03bf Mon Sep 17 00:00:00 2001 From: Francesco Martini Date: Thu, 28 Mar 2024 23:37:50 +0100 Subject: [PATCH] improved docs --- tools/python/copy-table/README.md | 4 ++-- tools/python/copy-table/unload.py | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tools/python/copy-table/README.md b/tools/python/copy-table/README.md index 893fdd0f..c1b44f57 100644 --- a/tools/python/copy-table/README.md +++ b/tools/python/copy-table/README.md @@ -53,14 +53,14 @@ Check the following guide to learn more: [Limits for UNLOAD from Timestream for - **from_time** [OPTIONAL]: Timestamp (extreme included) from which you want to select data to unload (e.g.: *2024-02-26 17:24:38.270000000*) - **end_time** [OPTIONAL]: Timestamp (extreme excluded) to which you want to select data to unload (e.g.: *2024-03-15 19:26:31.304000000*) - **partition** [OPTIONAL]: Time partition you want to use (possible values: *day, month, year*) -- **iam_role_arn_bucket_policy** [OPTIONAL]: {Applies for cross account migrations} Grants destination IAM Role access to S3 Bucket (e.g.: *arn:aws:iam::123456789123:role/BatchLoadRole*) +- **iam_role_arn** [OPTIONAL]: {Applies for cross account migrations} Grants destination IAM Role access to S3 Bucket (e.g.: *arn:aws:iam::123456789123:role/BatchLoadRole*) ### Examples Example to unload the Timestream table *myTable* in the database *sourcedb* to the folder *unload* in the *timestream-unload-sourcedb-mytable* S3 bucket. Also, it applies an S3 bucket policy to allow the IAM Role *BatchLoadRole* of account *123456789123* to allow the copy. It does day level partitions. ```bash -python3 unload.py --region eu-west-1 --s3_uri s3://timestream-unload-sourcedb-mytable/unload --database sourcedb --table myTable --iam_role_arn_bucket_policy arn:aws:iam::123456789123:role/BatchLoadRole --partition day +python3 unload.py --region eu-west-1 --s3_uri s3://timestream-unload-sourcedb-mytable/unload --database sourcedb --table myTable --iam_role_arn arn:aws:iam::123456789123:role/BatchLoadRole --partition day ``` ## Getting started with BATCH LOAD diff --git a/tools/python/copy-table/unload.py b/tools/python/copy-table/unload.py index aeca0efa..2996a269 100644 --- a/tools/python/copy-table/unload.py +++ b/tools/python/copy-table/unload.py @@ -7,7 +7,7 @@ from utils.logger_utils import create_logger from utils.s3_utils import s3Utility -def main(logger, region, database, table, bucket_s3_uri, from_time, end_time, partition, iam_role_arn_bucket_policy): +def main(logger, region, database, table, bucket_s3_uri, from_time, end_time, partition, iam_role_arn): session = boto3.Session() if (region is None or len(region) == 0): @@ -29,7 +29,7 @@ def main(logger, region, database, table, bucket_s3_uri, from_time, end_time, pa bucket_s3_uri = s3_utility.create_s3_bucket(bucket_name) # Create bucket policy for accessing data if IAM Role is provided - if (iam_role_arn_bucket_policy): + if (iam_role_arn): bucket_name = bucket_s3_uri.split('s3://')[1] bucket_name = bucket_name.split('/')[0] @@ -38,13 +38,13 @@ def main(logger, region, database, table, bucket_s3_uri, from_time, end_time, pa 'Statement': [{ 'Sid': 'PermissionS3CopyGetObj', 'Effect': 'Allow', - 'Principal': {'AWS': f'{iam_role_arn_bucket_policy}'}, + 'Principal': {'AWS': f'{iam_role_arn}'}, 'Action': ['s3:GetObject'], 'Resource': f'arn:aws:s3:::{bucket_name}/*' },{ 'Sid': 'PermissionS3CopyListBucket', 'Effect': 'Allow', - 'Principal': {'AWS': f'{iam_role_arn_bucket_policy}'}, + 'Principal': {'AWS': f'{iam_role_arn}'}, 'Action': ['s3:ListBucket'], 'Resource': f'arn:aws:s3:::{bucket_name}' } @@ -113,7 +113,7 @@ def run_query(logger, client, query): parser.add_argument("-f", "--from_time", help="Timestamp from which you want to unload data (included)", required=False) parser.add_argument("-e", "--end_time", help="Timestamp to which you want to unload data (not included)", required=False) parser.add_argument("-p", "--partition", help="Partition data by 'day', 'month' or 'year'", required=False, choices=['day', 'month', 'year']) - parser.add_argument("-i", "--iam_role_arn_bucket_policy", help="IAM Role ARN used in the S3 Bucket policy that is applied to the S3 Bucket where unload data is stored", required=False) + parser.add_argument("-i", "--iam_role_arn", help="IAM Role ARN used in the S3 Bucket policy that is applied to the S3 Bucket where unload data is stored", required=False) #assign arguments to args variable args = parser.parse_args() @@ -121,7 +121,7 @@ def run_query(logger, client, query): #create logger logger = create_logger("Unload Logger") - main(logger, args.region, args.database, args.table, args.s3_uri, args.from_time, args.end_time, args.partition, args.iam_role_arn_bucket_policy) + main(logger, args.region, args.database, args.table, args.s3_uri, args.from_time, args.end_time, args.partition, args.iam_role_arn) logger.info("COMPLETED SUCCESSFULLY")