Examples of aiobotocore usage

Below is a list of examples from aiobotocore/examples

Every example is a correct tiny python program.

Basic Usage

Simple put, get, delete example for S3 service:

import asyncio
import aiobotocore

AWS_ACCESS_KEY_ID = "xxx"
AWS_SECRET_ACCESS_KEY = "xxx"


async def go():

    bucket = 'dataintake'
    filename = 'dummy.bin'
    folder = 'aiobotocore'
    key = f'{folder}/{filename}'

    session = aiobotocore.get_session()
    async with session.create_client(
            's3', region_name='us-west-2',
            aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
            aws_access_key_id=AWS_ACCESS_KEY_ID) as client:
        # upload object to amazon s3
        data = b'\x01' * 1024
        resp = await client.put_object(Bucket=bucket,
                                       Key=key,
                                       Body=data)
        print(resp)

        # getting s3 object properties of file we just uploaded
        resp = await client.get_object_acl(Bucket=bucket, Key=key)
        print(resp)

        # delete object from s3
        resp = await client.delete_object(Bucket=bucket, Key=key)
        print(resp)


loop = asyncio.get_event_loop()
loop.run_until_complete(go())

SQS

Queue Create

This snippet creates a queue, lists the queues, then deletes the queue.

# Boto should get credentials from ~/.aws/credentials or the environment
import asyncio

import aiobotocore


async def go():
    session = aiobotocore.get_session()
    async with session.create_client('sqs', region_name='us-west-2') as client:

        print('Creating test_queue1')
        response = await client.create_queue(QueueName='test_queue1')
        queue_url = response['QueueUrl']

        response = await client.list_queues()

        print('Queue URLs:')
        for queue_name in response.get('QueueUrls', []):
            print(f' {queue_name}')

        print(f'Deleting queue {queue_url}')
        await client.delete_queue(QueueUrl=queue_url)

        print('Done')


def main():
    try:
        loop = asyncio.get_event_loop()
        loop.run_until_complete(go())
    except KeyboardInterrupt:
        pass


if __name__ == '__main__':
    main()

Producer Consumer

Here is a quick and simple producer/consumer example. The producer will put messages on the queue with a delay of up to 4 seconds between each put. The consumer will read off any messages on the queue, waiting up to 2 seconds for messages to appear before returning.

#!/usr/bin/env python3
"""
aiobotocore SQS Producer Example
"""
import asyncio
import random
import sys

import aiobotocore
import botocore.exceptions

QUEUE_NAME = 'test_queue12'


async def go():
    # Boto should get credentials from ~/.aws/credentials or the environment
    session = aiobotocore.get_session()
    async with session.create_client('sqs', region_name='us-west-2') as client:
        try:
            response = await client.get_queue_url(QueueName=QUEUE_NAME)
        except botocore.exceptions.ClientError as err:
            if err.response['Error']['Code'] == \
                    'AWS.SimpleQueueService.NonExistentQueue':
                print(f"Queue {QUEUE_NAME} does not exist")
                sys.exit(1)
            else:
                raise

        queue_url = response['QueueUrl']

        print('Putting messages on the queue')

        msg_no = 1
        while True:
            try:
                msg_body = f'Message #{msg_no}'
                await client.send_message(
                    QueueUrl=queue_url,
                    MessageBody=msg_body
                )
                msg_no += 1

                print(f'Pushed "{msg_body}" to queue')

                await asyncio.sleep(random.randint(1, 4))
            except KeyboardInterrupt:
                break

        print('Finished')


def main():
    try:
        loop = asyncio.get_event_loop()
        loop.run_until_complete(go())
    except KeyboardInterrupt:
        pass


if __name__ == '__main__':
    main()
#!/usr/bin/env python3
"""
aiobotocore SQS Consumer Example
"""
import asyncio
import sys

import aiobotocore
import botocore.exceptions

QUEUE_NAME = 'test_queue12'


async def go():
    # Boto should get credentials from ~/.aws/credentials or the environment
    session = aiobotocore.get_session()
    async with session.create_client('sqs', region_name='us-west-2') as client:
        try:
            response = await client.get_queue_url(QueueName=QUEUE_NAME)
        except botocore.exceptions.ClientError as err:
            if err.response['Error']['Code'] == \
                    'AWS.SimpleQueueService.NonExistentQueue':
                print("Queue {0} does not exist".format(QUEUE_NAME))
                sys.exit(1)
            else:
                raise

        queue_url = response['QueueUrl']

        print('Pulling messages off the queue')

        while True:
            try:
                # This loop wont spin really fast as there is
                # essentially a sleep in the receive_message call
                response = await client.receive_message(
                    QueueUrl=queue_url,
                    WaitTimeSeconds=2,
                )

                if 'Messages' in response:
                    for msg in response['Messages']:
                        print(f'Got msg "{msg["Body"]}"')
                        # Need to remove msg from queue or else it'll reappear
                        await client.delete_message(
                            QueueUrl=queue_url,
                            ReceiptHandle=msg['ReceiptHandle']
                        )
                else:
                    print('No messages in queue')
            except KeyboardInterrupt:
                break

        print('Finished')


def main():
    try:
        loop = asyncio.get_event_loop()
        loop.run_until_complete(go())
    except KeyboardInterrupt:
        pass


if __name__ == '__main__':
    main()

DynamoDB

Table Creation

When you create a DynamoDB table, it can take quite a while (especially if you add a few secondary index’s). Instead of polling describe_table yourself, boto3 came up with “waiters” that will do all the polling for you. The following snippet shows how to wait for a DynamoDB table to be created in an async way.

# Boto should get credentials from ~/.aws/credentials or the environment
import uuid
import asyncio

import aiobotocore


async def go():
    session = aiobotocore.get_session()
    async with session.create_client('dynamodb', region_name='us-west-2') as client:
        # Create random table name
        table_name = f'aiobotocore-{uuid.uuid4()}'

        print('Requesting table creation...')
        await client.create_table(
            TableName=table_name,
            AttributeDefinitions=[
                {
                    'AttributeName': 'testKey',
                    'AttributeType': 'S'
                },
            ],
            KeySchema=[
                {
                    'AttributeName': 'testKey',
                    'KeyType': 'HASH'
                },
            ],
            ProvisionedThroughput={
                'ReadCapacityUnits': 10,
                'WriteCapacityUnits': 10
            }
        )

        print("Waiting for table to be created...")
        waiter = client.get_waiter('table_exists')
        await waiter.wait(TableName=table_name)
        print(f"Table {table_name} created")


def main():
    try:
        loop = asyncio.get_event_loop()
        loop.run_until_complete(go())
    except KeyboardInterrupt:
        pass


if __name__ == '__main__':
    main()

Batch Insertion

Now if you have a massive amount of data to insert into Dynamo, I would suggest using an EMR data pipeline (theres even an example for exactly this). But if you stubborn, here is an example of inserting lots of items into Dynamo (it’s not really that complicated once you’ve read it).

What the code does is generates items (e.g. item0, item1, item2…) and writes them to a table “test” against a primary partition key called “pk” (with 5 read and 5 write units, no auto-scaling).

The batch_write_item method only takes a max of 25 items at a time, so the script computes 25 items, writes them, then does it all over again.

After Dynamo has had enough, it will start throttling you and return any items that have not been written in the response. Once the script is being throttled, it will start sleeping for 5 seconds until the failed items have been successfully written, after that it will exit.

# Boto should get credentials from ~/.aws/credentials or the environment
import asyncio

import aiobotocore


def get_items(start_num, num_items):
    """
    Generate a sequence of dynamo items

    :param start_num: Start index
    :type start_num: int
    :param num_items: Number of items
    :type num_items: int
    :return: List of dictionaries
    :rtype: list of dict
    """
    result = []
    for i in range(start_num, start_num+num_items):
        result.append({'pk': {'S': 'item{0}'.format(i)}})
    return result


def create_batch_write_structure(table_name, start_num, num_items):
    """
    Create item structure for passing to batch_write_item

    :param table_name: DynamoDB table name
    :type table_name: str
    :param start_num: Start index
    :type start_num: int
    :param num_items: Number of items
    :type num_items: int
    :return: dictionary of tables to write to
    :rtype: dict
    """
    return {
        table_name: [
            {'PutRequest': {'Item': item}}
            for item in get_items(start_num, num_items)
        ]
    }


async def go():
    session = aiobotocore.get_session()
    async with session.create_client('dynamodb', region_name='us-west-2') as client:
        table_name = 'test'

        print('Writing to dynamo')
        start = 0
        while True:
            # Loop adding 25 items to dynamo at a time
            request_items = create_batch_write_structure(table_name, start, 25)
            response = await client.batch_write_item(
                RequestItems=request_items
            )
            if len(response['UnprocessedItems']) == 0:
                print('Wrote 25 items to dynamo')
            else:
                # Hit the provisioned write limit
                print('Hit write limit, backing off then retrying')
                await asyncio.sleep(5)

                # Items left over that haven't been inserted
                unprocessed_items = response['UnprocessedItems']
                print('Resubmitting items')
                # Loop until unprocessed items are written
                while len(unprocessed_items) > 0:
                    response = await client.batch_write_item(
                        RequestItems=unprocessed_items
                    )
                    # If any items are still left over, add them to the
                    # list to be written
                    unprocessed_items = response['UnprocessedItems']

                    # If there are items left over, we could do with
                    # sleeping some more
                    if len(unprocessed_items) > 0:
                        print('Backing off for 5 seconds')
                        await asyncio.sleep(5)

                # Inserted all the unprocessed items, exit loop
                print('Unprocessed items successfully inserted')
                break

            start += 25

        # See if DynamoDB has the last item we inserted
        final_item = 'item' + str(start + 24)
        print(f'Item "{final_item}" should exist')

        response = await client.get_item(
            TableName=table_name,
            Key={'pk': {'S': final_item}}
        )
        print(f'Response: {response["Item"]}')


def main():
    try:
        loop = asyncio.get_event_loop()
        loop.run_until_complete(go())
    except KeyboardInterrupt:
        pass


if __name__ == '__main__':
    main()