Skip to content

Commit

Permalink
Python3
Browse files Browse the repository at this point in the history
  • Loading branch information
pinkeen committed Dec 5, 2019
1 parent 737af1f commit f0e5095
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 29 deletions.
6 changes: 1 addition & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,6 @@ env:
- LAMBDA_RUNTIME=python2
- LAMBDA_RUNTIME=python3

jobs:
allow_failures:
- env: LAMBDA_RUNTIME=python3

script: >
docker run \
--rm \
Expand All @@ -30,4 +26,4 @@ deploy:
skip_cleanup: true
cleanup: false
on:
tags: true
tags: true
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# MageOps AWS Lambdas for Handling Autoscaling

This lambdas perform various tasks related to managing ASGs in response
to various events and conditions. They are strongly coupled to
to various events and conditions. They are strongly coupled to
[MageOps infrastructure setup](https://github.com/mageops/ansible-workflow).

## Single Deploy Package
Expand All @@ -16,7 +16,7 @@ the entrypoint will changed based on which one is used.
### Generating deploy package

```bash
docker run --rm --tty --volume "$(pwd):/var/app" mageops/aws-lambda-build python2 autoscaling-lambdas-deploy-package
docker run --rm --tty --volume "$(pwd):/var/app" mageops/aws-lambda-build python3 autoscaling-lambdas-deploy-package
```

#### Docker image for building lambdas
Expand Down
28 changes: 15 additions & 13 deletions handle_autoscaling_event.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import print_function

import boto3
import os
import json
Expand Down Expand Up @@ -41,21 +43,21 @@ def update_backends(exclude_backend_instance_ids=None, wait_for_finish=False):
update_lambda_name = os.environ['UPDATE_LAMBDA_NAME']

if exclude_backend_instance_ids:
print "Ignoring backend instances %s" % ', '.join(exclude_backend_instance_ids)
print("Ignoring backend instances %s" % ', '.join(exclude_backend_instance_ids))

varnish_hosts = get_ec2_hosts(varnish_instance_filter)
backend_hosts = get_ec2_hosts(backend_instance_filter, exclude_backend_instance_ids)
extra_hosts = get_ec2_hosts(extra_instance_filter, exclude_backend_instance_ids)

print "Varnish hosts to be updated: %s, found using filter %s" % (varnish_hosts, varnish_instance_filter)
print "New backend hosts: %s, found using filter %s" % (backend_hosts, backend_instance_filter)
print "New extra hosts: %s, found using filter %s" % (extra_hosts, extra_instance_filter)
print("Varnish hosts to be updated: %s, found using filter %s" % (varnish_hosts, varnish_instance_filter))
print("New backend hosts: %s, found using filter %s" % (backend_hosts, backend_instance_filter))
print("New extra hosts: %s, found using filter %s" % (extra_hosts, extra_instance_filter))

s3_client = boto3.client('s3')
varnish_key_object = s3_client.get_object(Bucket=varnish_key_bucket, Key=varnish_key_name)
varnish_key = varnish_key_object['Body'].read()

print "Downloaded varnish ssh key from %s/%s" % (varnish_key_bucket, varnish_key_name)
print("Downloaded varnish ssh key from %s/%s" % (varnish_key_bucket, varnish_key_name))

payload = json.dumps({
'varnish_ssh_key': varnish_key,
Expand All @@ -66,10 +68,10 @@ def update_backends(exclude_backend_instance_ids=None, wait_for_finish=False):
})

if wait_for_finish:
print 'Invoking update lambda with wait'
print('Invoking update lambda with wait')
invocation_type = 'RequestResponse'
else:
print 'Invoking update lambda asynchronously'
print('Invoking update lambda asynchronously')
invocation_type = 'Event'

boto3.client('lambda').invoke(
Expand All @@ -80,7 +82,7 @@ def update_backends(exclude_backend_instance_ids=None, wait_for_finish=False):
)

if wait_for_finish:
print 'Update lambda finished'
print('Update lambda finished')


def complete_lifecycle_action(hook, asg, instance_id, action=LIFECYCLE_ACTION_CONTINUE):
Expand All @@ -93,14 +95,14 @@ def complete_lifecycle_action(hook, asg, instance_id, action=LIFECYCLE_ACTION_CO
InstanceId=instance_id
)

print "Asg continue response: %s" % response
print("Asg continue response: %s" % response)


def handle_plain_event(event_type, event_data):
print 'Handling plain event "%s"' % event_type
print('Handling plain event "%s"' % event_type)

if event_type != LAUNCH_SUCCESSFUL_EVENT:
print 'Unsupported event type, doing nothing'
print('Unsupported event type, doing nothing')
return

update_backends()
Expand All @@ -112,13 +114,13 @@ def handle_lifecycle_event(event_type, event_data):
asg_name = event_data[KEY_ASG_NAME]
instance_id = event_data[KEY_EC2_INSTANCE_ID]

print 'Handling lifecycle event "%s" / hook "%s"' % (event_type, current_hook)
print('Handling lifecycle event "%s" / hook "%s"' % (event_type, current_hook))

if current_hook == terminate_hook:
update_backends([instance_id], True)
complete_lifecycle_action(current_hook, asg_name, instance_id, LIFECYCLE_ACTION_CONTINUE)
else:
print 'Unsupported lifecycle hook, doing nothing'
print('Unsupported lifecycle hook, doing nothing')


def handle(event, context):
Expand Down
10 changes: 6 additions & 4 deletions import_scaling_handler.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import print_function

import boto3
import os
import json
Expand All @@ -7,7 +9,7 @@


def ensure_running(asg_name):
print 'Setting desired capacity of %s to 1' % asg_name
print('Setting desired capacity of %s to 1' % asg_name)
asg_client.set_desired_capacity(
AutoScalingGroupName=asg_name,
DesiredCapacity=1,
Expand All @@ -16,7 +18,7 @@ def ensure_running(asg_name):


def ensure_notrunning(asg_name):
print 'Setting desired capacity of %s to 0' % asg_name
print('Setting desired capacity of %s to 0' % asg_name)
asg_client.set_desired_capacity(
AutoScalingGroupName=asg_name,
DesiredCapacity=0,
Expand All @@ -41,8 +43,8 @@ def handle(event, context):
# if we'd like to check current ASG status that's additional call anyway
# not worth it
if is_import_needed(check_endpoint):
print 'Import instance needed'
print('Import instance needed')
ensure_running(asg_name)
else:
print 'Import not needed'
print('Import not needed')
ensure_notrunning(asg_name)
12 changes: 7 additions & 5 deletions update_varnish_backends.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import print_function

import boto3
import os
import json
Expand All @@ -8,9 +10,9 @@
from datetime import datetime

def execute_command(ssh_client, command):
print 'Executing %s' % command
print('Executing %s' % command)
stdin, stdout, stderr = ssh_client.exec_command(command)
print stdout.read(), stderr.read()
print(stdout.read(), stderr.read())


def handle(event, context):
Expand All @@ -34,14 +36,14 @@ def handle(event, context):
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())

print 'Connecting to %s...' % host
print('Connecting to %s...' % host)
ssh_client.connect(hostname = host, username = ssh_username, pkey = ssh_key)

print 'Copying new backends vcl to %s ...' % new_vcl_name
print('Copying new backends vcl to %s ...' % new_vcl_name)
sftp = ssh_client.open_sftp()
sftp.putfo(StringIO.StringIO(backend_vcl), new_vcl_name)

print 'Updating vcls...'
print('Updating vcls...')
execute_command(ssh_client, 'sudo /usr/bin/varnish_update_backends %s' % new_vcl_name)

ssh_client.close()
Expand Down

0 comments on commit f0e5095

Please sign in to comment.