Crearting Redshift Cluster using the AWS python SDK

An example of Infrastructure-as-code

In [1]:
import pandas as pd
import boto3
import json

STEP 0: Make sure you have an AWS secret and access key

  • Create a new IAM user in your AWS account
  • Give it AdministratorAccess, From Attach existing policies directly Tab
  • Take note of the access key and secret
  • Edit the file dwh.cfg in the same folder as this notebook and fill
    [AWS]
    KEY= YOUR_AWS_KEY
    SECRET= YOUR_AWS_SECRET

Load DWH Params from a file

In [2]:
import configparser
config = configparser.ConfigParser()
config.read_file(open('dwh.cfg'))

KEY                    = config.get('AWS','KEY')
SECRET                 = config.get('AWS','SECRET')

DWH_CLUSTER_TYPE       = config.get("DWH","DWH_CLUSTER_TYPE")
DWH_NUM_NODES          = config.get("DWH","DWH_NUM_NODES")
DWH_NODE_TYPE          = config.get("DWH","DWH_NODE_TYPE")

DWH_CLUSTER_IDENTIFIER = config.get("DWH","DWH_CLUSTER_IDENTIFIER")
DWH_DB                 = config.get("DWH","DWH_DB")
DWH_DB_USER            = config.get("DWH","DWH_DB_USER")
DWH_DB_PASSWORD        = config.get("DWH","DWH_DB_PASSWORD")
DWH_PORT               = config.get("DWH","DWH_PORT")

DWH_IAM_ROLE_NAME      = config.get("DWH", "DWH_IAM_ROLE_NAME")

(DWH_DB_USER, DWH_DB_PASSWORD, DWH_DB)

pd.DataFrame({"Param":
                  ["DWH_CLUSTER_TYPE", "DWH_NUM_NODES", "DWH_NODE_TYPE", "DWH_CLUSTER_IDENTIFIER", "DWH_DB", "DWH_DB_USER", "DWH_DB_PASSWORD", "DWH_PORT", "DWH_IAM_ROLE_NAME"],
              "Value":
                  [DWH_CLUSTER_TYPE, DWH_NUM_NODES, DWH_NODE_TYPE, DWH_CLUSTER_IDENTIFIER, DWH_DB, DWH_DB_USER, DWH_DB_PASSWORD, DWH_PORT, DWH_IAM_ROLE_NAME]
             })
Out[2]:
Param Value
0 DWH_CLUSTER_TYPE multi-node
1 DWH_NUM_NODES 4
2 DWH_NODE_TYPE dc2.large
3 DWH_CLUSTER_IDENTIFIER dwhCluster
4 DWH_DB dwh
5 DWH_DB_USER dwhuser
6 DWH_DB_PASSWORD Passw0rd
7 DWH_PORT 5439
8 DWH_IAM_ROLE_NAME dwhRole

Create clients for IAM, EC2, S3 and Redshift

In [3]:
import boto3

ec2 = boto3.resource('ec2',
                       region_name="us-west-2",
                       aws_access_key_id=KEY,
                       aws_secret_access_key=SECRET
                    )

s3 = boto3.resource('s3',
                       region_name="us-west-2",
                       aws_access_key_id=KEY,
                       aws_secret_access_key=SECRET
                   )

iam = boto3.client('iam',aws_access_key_id=KEY,
                     aws_secret_access_key=SECRET,
                     region_name='us-west-2'
                  )

redshift = boto3.client('redshift',
                       region_name="us-west-2",
                       aws_access_key_id=KEY,
                       aws_secret_access_key=SECRET
                       )

Check out the sample data sources on S3

In [4]:
sampleDbBucket =  s3.Bucket("awssampledbuswest2")
for obj in sampleDbBucket.objects.filter(Prefix="ssbgz"):
    print(obj)
# for obj in sampleDbBucket.objects.all():
#     print(obj)
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/customer0002_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/dwdate.tbl.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0000_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0001_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0002_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0003_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0004_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0005_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0006_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/lineorder0007_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/part0000_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/part0001_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/part0002_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/part0003_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/supplier.tbl_0000_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/supplier0001_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/supplier0002_part_00.gz')
s3.ObjectSummary(bucket_name='awssampledbuswest2', key='ssbgz/supplier0003_part_00.gz')

STEP1: IAM ROLE

  • Create an IAM Role that makes Redshift able to access S3 bucket (ReadOnly)
In [5]:
from botocore.exceptions import ClientError

#1.1 Create the role, 
try:
    print("1.1 Creating a new IAM Role") 
    dwhRole = iam.create_role(
        Path='/',
        RoleName=DWH_IAM_ROLE_NAME,
        Description = "Allows Redshift clusters to call AWS services on your behalf.",
        AssumeRolePolicyDocument=json.dumps(
            {'Statement': [{'Action': 'sts:AssumeRole',
               'Effect': 'Allow',
               'Principal': {'Service': 'redshift.amazonaws.com'}}],
             'Version': '2012-10-17'})
    )    
except Exception as e:
    print(e)
    
    
print("1.2 Attaching Policy")

iam.attach_role_policy(RoleName=DWH_IAM_ROLE_NAME,
                       PolicyArn="arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess"
                      )['ResponseMetadata']['HTTPStatusCode']

print("1.3 Get the IAM role ARN")
roleArn = iam.get_role(RoleName=DWH_IAM_ROLE_NAME)['Role']['Arn']

print(roleArn)
1.1 Creating a new IAM Role
An error occurred (AccessDenied) when calling the CreateRole operation: User: arn:aws:iam::034612322877:user/airflow_redshift_user is not authorized to perform: iam:CreateRole on resource: arn:aws:iam::034612322877:role/dwhRole
1.2 Attaching Policy
---------------------------------------------------------------------------
ClientError                               Traceback (most recent call last)
<ipython-input-5-3596c08f35af> in <module>()
     21 
     22 iam.attach_role_policy(RoleName=DWH_IAM_ROLE_NAME,
---> 23                        PolicyArn="arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess"
     24                       )['ResponseMetadata']['HTTPStatusCode']
     25 

/opt/conda/lib/python3.6/site-packages/botocore/client.py in _api_call(self, *args, **kwargs)
    355                     "%s() only accepts keyword arguments." % py_operation_name)
    356             # The "self" in this scope is referring to the BaseClient.
--> 357             return self._make_api_call(operation_name, kwargs)
    358 
    359         _api_call.__name__ = str(py_operation_name)

/opt/conda/lib/python3.6/site-packages/botocore/client.py in _make_api_call(self, operation_name, api_params)
    659             error_code = parsed_response.get("Error", {}).get("Code")
    660             error_class = self.exceptions.from_code(error_code)
--> 661             raise error_class(parsed_response, operation_name)
    662         else:
    663             return parsed_response

ClientError: An error occurred (AccessDenied) when calling the AttachRolePolicy operation: User: arn:aws:iam::034612322877:user/airflow_redshift_user is not authorized to perform: iam:AttachRolePolicy on resource: role dwhRole

STEP2: Redshift Cluster

  • Create a RedShift Cluster
  • For complete arguments to create_cluster, see docs
In [83]:
try:
    response = redshift.create_cluster(        
        #HW
        ClusterType=DWH_CLUSTER_TYPE,
        NodeType=DWH_NODE_TYPE,
        NumberOfNodes=int(DWH_NUM_NODES),

        #Identifiers & Credentials
        DBName=DWH_DB,
        ClusterIdentifier=DWH_CLUSTER_IDENTIFIER,
        MasterUsername=DWH_DB_USER,
        MasterUserPassword=DWH_DB_PASSWORD,
        
        #Roles (for s3 access)
        IamRoles=[roleArn]  
    )
except Exception as e:
    print(e)
An error occurred (ClusterAlreadyExists) when calling the CreateCluster operation: Cluster already exists

2.1 Describe the cluster to see its status

  • run this block several times until the cluster status becomes Available
In [77]:
def prettyRedshiftProps(props):
    pd.set_option('display.max_colwidth', -1)
    keysToShow = ["ClusterIdentifier", "NodeType", "ClusterStatus", "MasterUsername", "DBName", "Endpoint", "NumberOfNodes", 'VpcId']
    x = [(k, v) for k,v in props.items() if k in keysToShow]
    return pd.DataFrame(data=x, columns=["Key", "Value"])

myClusterProps = redshift.describe_clusters(ClusterIdentifier=DWH_CLUSTER_IDENTIFIER)['Clusters'][0]
prettyRedshiftProps(myClusterProps)
Out[77]:
Key Value
0 ClusterIdentifier dwhcluster
1 NodeType dc2.large
2 ClusterStatus available
3 MasterUsername dwhuser
4 DBName dwh
5 Endpoint {'Address': 'dwhcluster.csmamz5zxmle.us-west-2.redshift.amazonaws.com', 'Port': 5439}
6 VpcId vpc-54d40a2c
7 NumberOfNodes 4

2.2 Take note of the cluster endpoint and role ARN

DO NOT RUN THIS unless the cluster status becomes "Available"

In [78]:
DWH_ENDPOINT = myClusterProps['Endpoint']['Address']
DWH_ROLE_ARN = myClusterProps['IamRoles'][0]['IamRoleArn']
print("DWH_ENDPOINT :: ", endpoint)
print("DWH_ROLE_ARN :: ", roleArn)
DWH_ENDPOINT ::  dwhcluster.csmamz5zxmle.us-west-2.redshift.amazonaws.com
DWH_ROLE_ARN ::  arn:aws:iam::988332130976:role/dwhRole

STEP3: Open an incoming TCP port to access the cluster ednpoint

In [84]:
try:
    vpc = ec2.Vpc(id=myClusterProps['VpcId'])
    defaultSg = list(vpc.security_groups.all())[0]
    print(defaultSg)
    defaultSg.authorize_ingress(
        GroupName=defaultSg.group_name,
        CidrIp='0.0.0.0/0',
        IpProtocol='TCP',
        FromPort=int(DWH_PORT),
        ToPort=int(DWH_PORT)
    )
except Exception as e:
    print(e)
ec2.SecurityGroup(id='sg-d6161da0')
An error occurred (InvalidPermission.Duplicate) when calling the AuthorizeSecurityGroupIngress operation: the specified rule "peer: 0.0.0.0/0, TCP, from port: 5439, to port: 5439, ALLOW" already exists

STEP 5: Make sure you can connect to the clusterConnect to the cluster

In [80]:
%load_ext sql
The sql extension is already loaded. To reload it, use:
  %reload_ext sql
In [81]:
conn_string="postgresql://{}:{}@{}:{}/{}".format(DWH_DB_USER, DWH_DB_PASSWORD, DWH_ENDPOINT, DWH_PORT,DWH_DB)
print(conn_string)
%sql $conn_string
postgresql://dwhuser:Passw0rd@dwhcluster.csmamz5zxmle.us-west-2.redshift.amazonaws.com:5439/dwh
Out[81]:
'Connected: dwhuser@dwh'

STEP5: Clean up your resources

DO NOT RUN THIS UNLESS YOU ARE SURE
We will be using these resources in the next exercises

In [85]:
#### CAREFUL!!
#-- Uncomment & run to delete the created resources
redshift.delete_cluster( ClusterIdentifier=DWH_CLUSTER_IDENTIFIER,  SkipFinalClusterSnapshot=True)
#### CAREFUL!!
Out[85]:
{'Cluster': {'AllowVersionUpgrade': True,
  'AutomatedSnapshotRetentionPeriod': 1,
  'AvailabilityZone': 'us-west-2b',
  'ClusterCreateTime': datetime.datetime(2019, 2, 16, 6, 21, 30, 630000, tzinfo=tzutc()),
  'ClusterIdentifier': 'dwhcluster',
  'ClusterParameterGroups': [{'ParameterApplyStatus': 'in-sync',
    'ParameterGroupName': 'default.redshift-1.0'}],
  'ClusterSecurityGroups': [],
  'ClusterStatus': 'deleting',
  'ClusterSubnetGroupName': 'default',
  'ClusterVersion': '1.0',
  'DBName': 'dwh',
  'Encrypted': False,
  'Endpoint': {'Address': 'dwhcluster.csmamz5zxmle.us-west-2.redshift.amazonaws.com',
   'Port': 5439},
  'EnhancedVpcRouting': False,
  'IamRoles': [{'ApplyStatus': 'in-sync',
    'IamRoleArn': 'arn:aws:iam::988332130976:role/dwhRole'}],
  'MasterUsername': 'dwhuser',
  'NodeType': 'dc2.large',
  'NumberOfNodes': 4,
  'PendingModifiedValues': {},
  'PreferredMaintenanceWindow': 'fri:10:30-fri:11:00',
  'PubliclyAccessible': True,
  'Tags': [],
  'VpcId': 'vpc-54d40a2c',
  'VpcSecurityGroups': []},
 'ResponseMetadata': {'HTTPHeaders': {'content-length': '2041',
   'content-type': 'text/xml',
   'date': 'Sat, 16 Feb 2019 07:13:32 GMT',
   'x-amzn-requestid': '5e58b2d8-31ba-11e9-b19b-0945d449b0a9'},
  'HTTPStatusCode': 200,
  'RequestId': '5e58b2d8-31ba-11e9-b19b-0945d449b0a9',
  'RetryAttempts': 0}}
  • run this block several times until the cluster really deleted
In [86]:
myClusterProps = redshift.describe_clusters(ClusterIdentifier=DWH_CLUSTER_IDENTIFIER)['Clusters'][0]
prettyRedshiftProps(myClusterProps)
Out[86]:
Key Value
0 ClusterIdentifier dwhcluster
1 NodeType dc2.large
2 ClusterStatus deleting
3 MasterUsername dwhuser
4 DBName dwh
5 Endpoint {'Address': 'dwhcluster.csmamz5zxmle.us-west-2.redshift.amazonaws.com', 'Port': 5439}
6 VpcId vpc-54d40a2c
7 NumberOfNodes 4
In [87]:
#### CAREFUL!!
#-- Uncomment & run to delete the created resources
iam.detach_role_policy(RoleName=DWH_IAM_ROLE_NAME, PolicyArn="arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess")
iam.delete_role(RoleName=DWH_IAM_ROLE_NAME)
#### CAREFUL!!
Out[87]:
{'ResponseMetadata': {'HTTPHeaders': {'content-length': '200',
   'content-type': 'text/xml',
   'date': 'Sat, 16 Feb 2019 07:13:50 GMT',
   'x-amzn-requestid': '694f8d91-31ba-11e9-9438-d3ce9c613ef8'},
  'HTTPStatusCode': 200,
  'RequestId': '694f8d91-31ba-11e9-9438-d3ce9c613ef8',
  'RetryAttempts': 0}}
In [ ]: