Skip to content
This repository has been archived by the owner on Nov 3, 2021. It is now read-only.

Commit

Permalink
Merge pull request #1342 from mozilla/python_3_upgrade
Browse files Browse the repository at this point in the history
Python 3 upgrade
  • Loading branch information
pwnbus authored Jul 8, 2019
2 parents 5bb7f4b + ccea6f7 commit 7e3d139
Show file tree
Hide file tree
Showing 144 changed files with 1,607 additions and 1,617 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
language: python
python:
- "2.7"
- "3.6"
sudo: required
services:
- docker
Expand Down
12 changes: 11 additions & 1 deletion CHANGELOG
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)

## [Unreleased]

## [v3.0.0] - 2019-07-08
### Added
- Support for Python3

### Removed
- Support for Python2
- Usage of boto (boto3 now preferred)


## [v2.0.1] - 2019-07-08
### Fixed
- Ensure all print statements use parenthesis
Expand Down Expand Up @@ -123,7 +132,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Added checks on sending SQS messages to only accept intra-account messages
- Improved docker performance and disk space requirements

[Unreleased]: https://github.com/mozilla/MozDef/compare/v2.0.1...HEAD
[Unreleased]: https://github.com/mozilla/MozDef/compare/v3.0.0...HEAD
[v3.0.0]: https://github.com/mozilla/MozDef/compare/v2.0.1...v3.0.0
[v2.0.1]: https://github.com/mozilla/MozDef/compare/v2.0.0...v2.0.1
[v2.0.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v2.0.0
[v1.40.0]: https://github.com/mozilla/MozDef/compare/v1.40.0...v1.39.0
Expand Down
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ build: build-from-cwd

.PHONY: build-from-cwd
build-from-cwd: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching)
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE) base
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(BUILD_MODE) $(PARALLEL) $(NO_CACHE)

.PHONY: build-from-github
Expand All @@ -76,6 +77,7 @@ build-from-github: ## Build local MozDef images from the github branch (use mak

.PHONY: build-tests
build-tests: ## Build end-to-end test environment only
docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE) base
docker-compose -f docker/compose/docker-compose-tests.yml -p test-$(NAME) $(NO_CACHE) $(BUILD_MODE)

.PHONY: stop
Expand Down
20 changes: 10 additions & 10 deletions alerts/actions/dashboard_geomodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,30 +85,30 @@ def onMessage(self, message):
whois = IPWhois(source_ip).lookup_whois()
whois_str = whois['nets'][0]['description']
source_ip_isp = whois_str.replace('\n', ', ').replace('\r', '')
new_ip_info = u'{} ({})'.format(source_ip, source_ip_isp)
new_ip_info = '{} ({})'.format(source_ip, source_ip_isp)
except Exception:
new_ip_info = u'{}'.format(source_ip)
new_ip_info = '{}'.format(source_ip)

new_location_str = u""
new_location_str = ""
if city.lower() == 'unknown':
new_location_str += u'{0}'.format(country)
new_location_str += '{0}'.format(country)
else:
new_location_str += u'{0}, {1}'.format(city, country)
new_location_str += '{0}, {1}'.format(city, country)

event_timestamp = toUTC(message['events'][0]['documentsource']['details']['event_time'])
event_day = event_timestamp.strftime('%B %d, %Y')
summary = u'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip)
summary = 'On {0} (UTC), did you login from {1} ({2})?'.format(event_day, new_location_str, source_ip)

previous_city = message['details']['previous_locality_details']['city']
previous_country = message['details']['previous_locality_details']['country']
if previous_city.lower() == 'unknown':
previous_location_str = u'{0}'.format(previous_country)
previous_location_str = '{0}'.format(previous_country)
else:
previous_location_str = u'{0}, {1}'.format(previous_city, previous_country)
previous_location_str = '{0}, {1}'.format(previous_city, previous_country)

alert_record = {
'alert_id': b2a_hex(os.urandom(15)),
'alert_code': b2a_hex(self.alert_classname),
'alert_id': b2a_hex(os.urandom(15)).decode(),
'alert_code': b2a_hex(self.alert_classname.encode()).decode(),
'user_id': auth_full_username,
'risk': self.config['risk'],
'summary': summary,
Expand Down
2 changes: 1 addition & 1 deletion alerts/alert_actions_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def on_message(self, body, message):
# just to be safe..check what we were sent.
if isinstance(body, dict):
bodyDict = body
elif isinstance(body, str) or isinstance(body, unicode):
elif isinstance(body, str):
try:
bodyDict = json.loads(body) # lets assume it's json
except ValueError as e:
Expand Down
2 changes: 1 addition & 1 deletion alerts/celeryconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
alert_class = getattr(alert_module, alert_classname)
app.register_task(alert_class())
except ImportError as e:
print("Error importing {}").format(alert_namespace)
print("Error importing {}".format(alert_namespace))
print(e)
pass
except Exception as e:
Expand Down
4 changes: 2 additions & 2 deletions alerts/feedback_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ def onEvent(self, event):
user = event['_source']['details']['alert_information']['user_id']
event_summary = event['_source']['summary']
event_date = event['_source']['details']['alert_information']['date']
summary = u"{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary)
summary = "{} escalated alert within single-sign on (SSO) dashboard. Event Date: {} Summary: \"{}\"".format(user, event_date, event_summary)

for alert_code, tag in self._config.iteritems():
for alert_code, tag in self._config.items():
if event['_source']['details']['alert_information']['alert_code'] == alert_code:
tags.append(tag)

Expand Down
2 changes: 1 addition & 1 deletion alerts/lib/alert_plugin_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class AlertPluginSet(PluginSet):

def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)

return plugin_class.onMessage(message), metadata
23 changes: 11 additions & 12 deletions alerts/lib/alerttask.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@
from collections import Counter
from celery import Task
from celery.utils.log import get_task_logger
from config import RABBITMQ, ES, ALERT_PLUGINS

from mozdef_util.utilities.toUTC import toUTC
from mozdef_util.elasticsearch_client import ElasticsearchClient
from mozdef_util.query_models import TermMatch, ExistsMatch

sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
from lib.config import RABBITMQ, ES, ALERT_PLUGINS

sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from lib.alert_plugin_set import AlertPluginSet

Expand All @@ -35,7 +37,7 @@ def keypaths(nested):
""" return a list of nested dict key paths
like: [u'_source', u'details', u'program']
"""
for key, value in nested.iteritems():
for key, value in nested.items():
if isinstance(value, collections.Mapping):
for subkey, subvalue in keypaths(value):
yield [key] + subkey, subvalue
Expand Down Expand Up @@ -123,6 +125,9 @@ def parse_config(self, config_filename, config_keys):
temp_value = getConfig(config_key, "", config_filename)
setattr(self.config, config_key, temp_value)

def close_connections(self):
self.mqConn.release()

def _discover_task_exchange(self):
"""Use configuration information to understand the message queue protocol.
return: amqp, sqs
Expand Down Expand Up @@ -344,11 +349,7 @@ def searchEventsAggregated(self, aggregationPath, samplesLimit=5):
for i in Counter(aggregationValues).most_common():
idict = {"value": i[0], "count": i[1], "events": [], "allevents": []}
for r in results:
if (
getValueByPath(r["_source"], aggregationPath).encode(
"ascii", "ignore"
) == i[0]
):
if getValueByPath(r["_source"], aggregationPath) == i[0]:
# copy events detail into this aggregation up to our samples limit
if len(idict["events"]) < samplesLimit:
idict["events"].append(r)
Expand Down Expand Up @@ -510,11 +511,9 @@ def tagEventsAlert(self, events, alertResultES):
event["_source"]["alert_names"] = []
event["_source"]["alert_names"].append(self.determine_alert_classname())

self.es.save_event(
index=event["_index"], body=event["_source"], doc_id=event["_id"]
)
# We refresh here to ensure our changes to the events will show up for the next search query results
self.es.refresh(event["_index"])
self.es.save_event(index=event["_index"], body=event["_source"], doc_id=event["_id"])
# We refresh here to ensure our changes to the events will show up for the next search query results
self.es.refresh(event["_index"])
except Exception as e:
self.log.error("Error while updating events in ES: {0}".format(e))

Expand Down
5 changes: 3 additions & 2 deletions alerts/lib/deadman_alerttask.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from alerttask import AlertTask
from .alerttask import AlertTask


class DeadmanAlertTask(AlertTask):

def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
results = self.main_query.execute(self.es, indices=self.event_indices, size=1)
return results
8 changes: 5 additions & 3 deletions alerts/plugins/ip_source_enrichment.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import os
import re

import functools
import netaddr


Expand All @@ -23,10 +24,11 @@ def _find_ip_addresses(string):
ipv6_rx = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'

ipv4 = re.findall(ipv4_rx, string)
ipv6 = map(
ipv6_map = map(
lambda match: match[0] if isinstance(match, tuple) else match,
re.findall(ipv6_rx, string))

ipv6 = [x for x in ipv6_map]
return ipv4 + ipv6


Expand All @@ -42,11 +44,11 @@ def find_ips(value):

if isinstance(value, list) or isinstance(value, tuple):
found = [find_ips(item) for item in value]
return reduce(add, found, [])
return functools.reduce(add, found, [])

if isinstance(value, dict):
found = [find_ips(item) for item in value.values()]
return reduce(add, found, [])
return functools.reduce(add, found, [])

return []

Expand Down
2 changes: 1 addition & 1 deletion alerts/ssh_access_signreleng.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def main(self):

for exclusion in self.config['exclusions']:
exclusion_query = None
for key, value in exclusion.iteritems():
for key, value in exclusion.items():
phrase_exclusion = PhraseMatch(key, value)
if exclusion_query is None:
exclusion_query = phrase_exclusion
Expand Down
2 changes: 1 addition & 1 deletion benchmarking/workers/json2Mozdef.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from requests_futures.sessions import FuturesSession
from multiprocessing import Process, Queue
import logging
from Queue import Empty
from queue import Empty
from requests.packages.urllib3.exceptions import ClosedPoolError
import time

Expand Down
8 changes: 4 additions & 4 deletions bot/irc/mozdefbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def formatAlert(jsonDictIn):
return colorify('{0}: {1} {2}'.format(
severity,
colors['blue'] + category + colors['normal'],
summary.encode('ascii', 'replace')
summary
))


Expand Down Expand Up @@ -219,7 +219,7 @@ def priv_handler(client, actor, recipient, message):
ip = netaddr.IPNetwork(field)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
whois = IPWhois(ip).lookup_whois()
description = whois['nets'][0]['description'].encode('string_escape')
description = whois['nets'][0]['description']
self.client.msg(
recipient, "{0} description: {1}".format(field, description))
else:
Expand Down Expand Up @@ -290,7 +290,7 @@ def on_message(self, body, message):
# just to be safe..check what we were sent.
if isinstance(body, dict):
bodyDict = body
elif isinstance(body, str) or isinstance(body, unicode):
elif isinstance(body, str):
try:
bodyDict = json.loads(body) # lets assume it's json
except ValueError as e:
Expand Down Expand Up @@ -402,7 +402,7 @@ def initConfig():

# Our config parser stomps out the '#' so we gotta readd
channelkeys = {}
for key, value in options.channelkeys.iteritems():
for key, value in options.channelkeys.items():
if not key.startswith('#'):
key = '#{0}'.format(key)
channelkeys[key] = value
Expand Down
3 changes: 2 additions & 1 deletion bot/slack/bot_plugin_set.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import pynsive
import importlib

from mozdef_util.utilities.logger import logger

Expand Down Expand Up @@ -32,7 +33,7 @@ def identify_plugins(self, enabled_plugins):
continue

module_obj = pynsive.import_module(found_module)
reload(module_obj)
importlib.reload(module_obj)
plugin_class_obj = module_obj.Command()
logger.info('Plugin {0} registered to receive command with {1}'.format(module_name, plugin_class_obj.command_name))
plugins.append(
Expand Down
2 changes: 1 addition & 1 deletion bot/slack/commands/ip_whois.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def handle_command(self, parameters):
ip = netaddr.IPNetwork(ip_token)[0]
if (not ip.is_loopback() and not ip.is_private() and not ip.is_reserved()):
whois = IPWhois(ip).lookup_whois()
description = str(whois['nets'][0]['description']).encode('string_escape')
description = whois['nets'][0]['description']
response += "{0} description: {1}\n".format(ip_token, description)
else:
response += "{0}: hrm...loopback? private ip?\n".format(ip_token)
Expand Down
2 changes: 1 addition & 1 deletion bot/slack/mozdefbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def on_message(self, body, message):
# just to be safe..check what we were sent.
if isinstance(body, dict):
body_dict = body
elif isinstance(body, str) or isinstance(body, unicode):
elif isinstance(body, str):
try:
body_dict = json.loads(body) # lets assume it's json
except ValueError as e:
Expand Down
2 changes: 1 addition & 1 deletion bot/slack/slack_bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def delegate_command(self, message_text):

if command == '!help':
response = "\nHelp is on it's way...try these:\n"
for command_name, plugin in self.plugins.iteritems():
for command_name, plugin in self.plugins.items():
response += "\n{0} -- {1}".format(
command_name,
plugin['help_text']
Expand Down
18 changes: 9 additions & 9 deletions cloudy_mozdef/cloudformation/mozdef-alert-developer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Resources:
S3Bucket: public.us-west-2.security.allizom.org
S3Key: mozdef-lambda-layer/layer-latest.zip
CompatibleRuntimes:
- python2.7
- python3.6
LicenseInfo: 'MPL 2.0'
LambdalertIAMRole:
Type: AWS::IAM::Role
Expand All @@ -36,16 +36,16 @@ Resources:
Service: lambda.amazonaws.com
Action: sts:AssumeRole
ManagedPolicyArns:
- arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole
AlertWritersEnv:
- arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole
AlertWritersEnv:
Type: "AWS::Lambda::Function"
Properties:
Properties:
Handler: "lambdalert.handle"
Role:
Fn::GetAtt:
Role:
Fn::GetAtt:
- "LambdalertIAMRole"
- "Arn"
Code:
Code:
S3Bucket: public.us-west-2.security.allizom.org
S3Key: mozdef-lambda-layer/function-latest.zip
Layers:
Expand All @@ -55,9 +55,9 @@ Resources:
OPTIONS_ESSERVERS: !Ref ESUrl
OPTIONS_MQPROTOCOL: sqs
VpcConfig:
SecurityGroupIds:
SecurityGroupIds:
- !Ref MozDefSecurityGroup
SubnetIds: !Ref PublicSubnetIds
ReservedConcurrentExecutions: 1
Runtime: "python2.7"
Runtime: "python3.6"
Timeout: 120
Loading

0 comments on commit 7e3d139

Please sign in to comment.