Skip to content

Commit

Permalink
Merge pull request #2154 from fedspendingtransparency/staging
Browse files Browse the repository at this point in the history
Sprint 95 Production Deploy
  • Loading branch information
tony-sappe authored Dec 2, 2019
2 parents e27ad14 + 18424ca commit 6887dc7
Show file tree
Hide file tree
Showing 129 changed files with 1,676 additions and 2,869 deletions.
7 changes: 7 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,10 @@ ES_HOSTNAME=http://usaspending-es:9200

# ES_CLUSTER_DIR should point to a path outside, or it will be mounted w/Docker
ES_CLUSTER_DIR=../docker_es

# Configuration values for a connection string to a Broker database
# Only necessary for some management commands
BROKER_USER=root
BROKER_PASSWORD=password
BROKER_HOST=broker-db
BROKER_PORT=5432
5 changes: 2 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ language: python
cache: pip

python:
- '3.5'
- '3.7'

services:
Expand Down Expand Up @@ -41,7 +40,7 @@ before_install:
- npm install [email protected] --global

install:
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then travis_retry pip install -r requirements/requirements_3-7.txt; else travis_retry pip install -r requirements/requirements.txt; fi
- travis_retry pip install -r requirements/requirements.txt
- pip install coveralls
# Checkout dependent broker code used to spin up a broker integration test db. Put it in its own folder alongside this repo's code
- echo "Using ${BROKER_REPO_BRANCH} branch from ${BROKER_REPO_URL}"
Expand Down Expand Up @@ -73,7 +72,7 @@ before_script:
script:
- cd ${TRAVIS_BUILD_DIR} # run build script out of repo dir
- flake8
- if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check --diff . ; fi
- black --check --diff .
- python manage.py check_for_endpoint_documentation
- pytest --ignore-glob='**/tests/integration/*' --cov=usaspending_api --cov-report= --reuse-db -rsx
- pytest --override-ini=python_files='**/tests/integration/test_*.py **/tests/integration/*_test.py' --cov=usaspending_api --cov-append --cov-report term --cov-report xml:coverage.xml --reuse-db -rsx
Expand Down
55 changes: 29 additions & 26 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,44 +1,47 @@
# Basic Dockerfile for the USASpendingAPI

## 0) Add your DATABASE_URL on the ENV line below. Use host.docker.internal instead of localhost (overidden with Docker compose)
##
## 1) Init/run order w/Docker compose:
## docker-compose up usaspending-db (leave running)
## docker-compose up usaspending-db-migrate
## docker-compose up usaspending-db-sql
## docker-compose up usaspending-db-init
## docker-compose up usaspending-es (leave running, elasticsearch)
## Then run/re-run using the db you just created (may need to wait for the DB to be up and listening):
## docker-compose up usaspending-api
##
## Add your DATABASE_URL on the ENV line below. Use host.docker.internal instead of localhost (overidden with Docker compose)

## Optional) Run ad-hoc commands:
# docker build . -t usaspendingapi
# docker run -p 127.0.0.1:8000:8000 usaspendingapi <command>

# Rebuild and run when code in /usaspending-api changes

FROM python:3.5

WORKDIR /dockermount

# For "Wrong sources.list entry or malformed file" re: main/binary-amd64/Packages, revisit
RUN printf "deb http://archive.debian.org/debian/ jessie main\ndeb-src http://archive.debian.org/debian/ jessie main\ndeb http://security.debian.org jessie/updates main\ndeb-src http://security.debian.org jessie/updates main" > /etc/apt/sources.list
# See README.md for docker-compose information

# Install postgres client to access psql for database downloads
RUN printf "deb http://apt.postgresql.org/pub/repos/apt/ jessie-pgdg main" > /etc/apt/sources.list.d/pgdg.list
RUN wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
RUN apt-get update && apt-get install -y postgresql-client-10
FROM centos:7

RUN apt-get update -y
WORKDIR /dockermount

COPY requirements/requirements.txt /dockermount/requirements/requirements.txt
RUN pip install -r requirements/requirements.txt
RUN yum -y update && yum clean all
RUN yum -y install wget gcc openssl-devel bzip2-devel libffi libffi-devel zlib-devel
RUN yum -y groupinstall "Development Tools"

##### Install PostgreSQL 10 client (psql)
RUN yum -y install https://download.postgresql.org/pub/repos/yum/10/redhat/rhel-7-x86_64/pgdg-redhat10-10-2.noarch.rpm
RUN yum -y install postgresql10

##### Building python 3.7
WORKDIR /usr/src
RUN wget --quiet https://www.python.org/ftp/python/3.7.3/Python-3.7.3.tgz
RUN tar xzf Python-3.7.3.tgz
WORKDIR /usr/src/Python-3.7.3
RUN ./configure --enable-optimizations
RUN make altinstall
RUN ln -sf /usr/local/bin/python3.7 /usr/bin/python3
RUN echo "$(python3 --version)"

##### Copy python packaged
WORKDIR /dockermount
COPY requirements/ /dockermount/requirements/
RUN python3 -m pip install -r requirements/requirements.txt

##### Copy the rest of the project files into the container
COPY . /dockermount

# Compose overrides DATABASE_URL
##### Compose overrides DATABASE_URL
ENV DATABASE_URL postgres://[email protected]:5432/data_store_api

ENV PYTHONUNBUFFERED=0

EXPOSE 8000
50 changes: 27 additions & 23 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,23 @@ _This API is utilized by USAspending.gov to obtain all federal spending data whi
Ensure the following dependencies are installed and working prior to continuing:

### Requirements
You can install and run all USAspending components with just [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/). To develop, you will require:
- [`python3`](https://docs.python-guide.org/starting/installation/#python-3-installation-guides)
- [`pyenv`](https://github.com/pyenv/pyenv/#installation) using Python 3.5.x
- _NOTE: Read full install docs. `brew install` needs to be followed by additional steps to modify and source your `~/.bash_profile`_
- [`PostgreSQL`](https://www.postgresql.org/download/) 10.x (with a dedicated `data_store_api` database)
- [`direnv`](https://github.com/direnv/direnv#install)
- For Mac OS, be sure to put the hook in your `~/.bash_profile`, not `~/.bashrc`
- [`Docker`](https://docs.docker.com/install/) which will handle the other application dependencies.
- `Bash` or another Unix Shell equivalent
- Bash is available on Windows as [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
- Bash is available on Windows as [Windows Subsystem for Linux](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
- [`Git`](https://git-scm.com/downloads)

#### If not using Docker:
> Using Docker is recommended since it provides a clean environment. Setting up your own local environment requires some technical abilities and experience with modern software tools.
- Command line package manager
- Windows' WSL bash uses `apt-get`
- Mac OS users will use [`Homebrew`](https://brew.sh/)
- Windows' WSL bash uses `apt-get`
- MacOS users will use [`Homebrew`](https://brew.sh/)
- Linux users already know their package manager (yum, apt, pacman, etc.)
- [`PostgreSQL`](https://www.postgresql.org/download/) version 10.x (with a dedicated `data_store_api` database)
- [`Elasticsearch`](https://www.elastic.co/downloads/elasticsearch) version 6.3
- Python 3.7 environment
- Highly recommended to use a virtual environment. There are various tools and associated instructions depending on preferences


### Cloning the Repository
Now, navigate to the base file directory where you will store the USAspending repositories
Expand All @@ -40,16 +45,15 @@ There are three documented options for setting up a local database in order to r

#### Option 1: Using a Locally Hosted Postgres Database
Create a Local postgres database called 'data_store_api' and either create a new username and password for the database or use all the defaults. For help, consult:
- ['Postgres Setup Help'](https://medium.com/coding-blocks/creating-user-database-and-adding-access-on-postgresql-8bfcd2f4a91e)
- [Postgres Setup Help](https://medium.com/coding-blocks/creating-user-database-and-adding-access-on-postgresql-8bfcd2f4a91e)

Make sure to grant whatever user you created for the data_store api database superuser permissions or some scripts will not work:

postgres=# ALTER ROLE <<role/user you created>> WITH SUPERUSER;

#### Option 2: Using the Docker Compose Postgres Database
See below for basic setup instructions. For help with Docker Compose:
- [Docker Installation](https://docs.docker.com/install/)
- [Docker Compose](https://docs.docker.com/compose/)
- [Docker Compose](https://docs.docker.com/compose/)


##### Database Setup and Initialization with Docker Compose
Expand All @@ -58,13 +62,13 @@ See below for basic setup instructions. For help with Docker Compose:

- **If you run a local database**, set `POSTGRES_HOST` in `.env` to `host.docker.internal`. `POSTGRES_PORT` should be changed if it isn't 5432.

- `docker-compose up usaspending-db` will create and run a Postgres database in the `POSTGRES_CLUSTER_DIR` specified in the `.env` configuration file. We recommend using a folder *outside* of the usaspending-api project directory so it does not get copied to other containers in subsequent steps.
- `docker-compose up usaspending-db` will create and run a Postgres database in the `POSTGRES_CLUSTER_DIR` specified in the `.env` configuration file. We recommend using a folder *outside* of the usaspending-api project directory so it does not get copied to other containers in subsequent steps.

- `docker-compose run usaspending-manage python -u manage.py migrate` will run Django migrations: [https://docs.djangoproject.com/en/2.2/topics/migrations/](https://docs.djangoproject.com/en/2.2/topics/migrations/).
- `docker-compose run usaspending-manage python3 -u manage.py migrate` will run Django migrations: [https://docs.djangoproject.com/en/2.2/topics/migrations/](https://docs.djangoproject.com/en/2.2/topics/migrations/).

- `docker-compose run usaspending-manage python -u manage.py load_reference_data` will load essential reference data (agencies, program activity codes, CFDA program data, country codes, and others).
- `docker-compose run usaspending-manage python3 -u manage.py load_reference_data` will load essential reference data (agencies, program activity codes, CFDA program data, country codes, and others).

- `docker-compose up usaspending-db-sql`, then `docker-compose up usaspending-db-init` will provision the custom materialized views which are required by certain API endpoints.
- `docker-compose run usaspending-manage python3 -u manage.py matview_runner --dependencies` will provision the materialized views which are required by certain API endpoints.

##### Manual Database Setup
- `docker-compose.yaml` contains the shell commands necessary to set up the database manually, if you prefer to have a more custom environment.
Expand Down Expand Up @@ -118,17 +122,17 @@ Once these are satisfied, simply run:
(usaspending-api) $ pytest

#### Required Python Libraries
Create and activate the virtual environment using `venv`, and ensure the right version of Python 3.5.x is being used (the latest RHEL package available for `python35u`: _`3.5.6` as of this writing_)
Create and activate the virtual environment using `venv`, and ensure the right version of Python 3.7.x is being used (the latest RHEL package available for `python36u`: _as of this writing_)

$ pyenv install 3.5.6
$ pyenv local 3.5.6
$ pyenv install 3.7.2
$ pyenv local 3.7.2
$ python -m venv .venv/usaspending-api
$ source .venv/usaspending-api/bin/activate


Your prompt should then look as below to show you are _in_ the virtual environment named `usaspending-api` (_to exit that virtual environment, simply type `deactivate` at the prompt_).

(usaspending-api) $
(usaspending-api) $

[`pip`](https://pip.pypa.io/en/stable/installing/) `install` application dependencies

Expand All @@ -155,12 +159,12 @@ To satisfy these dependencies and include execution of these tests, do the follo
1. Ensure you have [`Docker`](https://docs.docker.com/install/) installed and running on your machine
1. Ensure the `Broker` source code is checked out alongside this repo at `../data-act-broker-backend`
1. Ensure you have the `DATA_BROKER_DATABASE_URL` environment variable set, and pointing to a live PostgreSQL server (no database required)
1. Ensure you have built the `Broker` backend Docker image by running:
1. Ensure you have built the `Broker` backend Docker image by running:

```shell
(usaspending-api) $ docker build -t dataact-broker-backend ../data-act-broker-backend
```

_NOTE: Broker source code should be re-fetched and image rebuilt to ensure latest integration is tested_

Re-running the test suite using `pytest -rs` with these dependencies satisfied should yield no more skips of the broker integration tests.
Expand Down
28 changes: 3 additions & 25 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,41 +22,19 @@ services:
build: .
volumes:
- .:/dockermount
command: /bin/sh -c "python -u manage.py shell"
command: python3 -u manage.py shell
environment:
DATABASE_URL: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/data_store_api
ES_HOSTNAME: ${ES_HOSTNAME}

usaspending-db-sql:
build: .
volumes:
- .:/dockermount
command: >
/bin/sh -c "mkdir $$MATVIEW_SQL_DIR | true &&
python -u usaspending_api/database_scripts/matview_generator/matview_sql_generator.py --dest=$$MATVIEW_SQL_DIR"
environment:
MATVIEW_SQL_DIR: ${MATVIEW_SQL_DIR}

usaspending-db-init:
image: postgres:10.6-alpine
volumes:
- .:/dockermount
command: >
/bin/sh -c "cd /dockermount &&
psql $$DATABASE_URL -c 'CREATE ROLE readonly' || true &&
psql $$DATABASE_URL -f usaspending_api/database_scripts/matviews/functions_and_enums.sql &&
cat $$MATVIEW_SQL_DIR/*.sql | psql $$DATABASE_URL -f -"
environment:
MATVIEW_SQL_DIR: ${MATVIEW_SQL_DIR}
DATABASE_URL: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/data_store_api
DATA_BROKER_DATABASE_URL: postgresql://${BROKER_USER}:${BROKER_PASSWORD}@${BROKER_HOST}:${BROKER_PORT}/data_broker

usaspending-api:
build: .
volumes:
- .:/dockermount
ports:
- 8000:8000
command: /bin/sh -c "python -u manage.py runserver 0.0.0.0:8000"
command: python3 -u manage.py runserver 0.0.0.0:8000
environment:
DB_SOURCE: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/data_store_api
DB_R1: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/data_store_api
Expand Down
6 changes: 1 addition & 5 deletions requirements/caching_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1 @@
django-elasticache==1.0.2
django-redis==4.8.0
pylibmc==1.5.2
pyparsing==2.2.0
packaging==16.8
-r requirements-server.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,10 @@ appdirs==1.4.3
asyncpg==0.18.3
atomicwrites==1.3.0
attrs==19.1.0
black==19.3b0
boto3==1.9.224
botocore==1.12.224
certifi==2019.6.16
chardet==3.0.4
coverage==4.5.4
dj-database-url==0.5.0
django-cors-headers==1.2.2
django-debug-toolbar==1.7
Expand All @@ -19,41 +17,31 @@ django-simple-history==1.8.2
django-spaghetti-and-meatballs==0.2.2
Django>=1.11.0,<2.0
djangorestframework==3.9.4
docker==4.1.0
docutils==0.15.2
dredd-hooks==0.2.0
drf-extensions==0.3.1
drf-tracking==1.4.0
elasticsearch==6.3.0
entrypoints==0.3
et-xmlfile==1.0.1
filechunkio==1.8
fiscalyear==0.1.0
flake8==3.7.8
git+https://github.com/fedspendingtransparency/django-mock-queries#egg=django-mock-queries
idna==2.8
importlib-metadata==0.20
jdcal==1.4.1
jmespath==0.9.4
jsonpickle==1.2
Markdown==3.1.1
mccabe==0.6.1
mock==3.0.5
model-mommy==1.6.0
Markdown<3.0
more-itertools==7.2.0
numpy==1.17.2
openpyxl==2.4.7
pandas==0.25.1
pip>=19.2
pluggy==0.12.0
psycopg2-binary==2.7.5
py-gfm==0.1.3
py-gfm==0.1.4
py==1.8.0
pycodestyle==2.5.0
pyflakes==2.1.1
pytest-cov==2.8.1
pytest-django==3.5.1
pytest==4.3.1
python-dateutil==2.7.3
python-json-logger==0.1.9
pytz==2019.2
Expand Down
14 changes: 14 additions & 0 deletions requirements/requirements-contrib.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
black==19.10b0
coverage==4.5.4
docker==4.1.0
dredd-hooks==0.2.0
flake8==3.7.8
git+https://github.com/fedspendingtransparency/django-mock-queries#egg=django-mock-queries
mccabe==0.6.1
mock==3.0.5
model-mommy==1.6.0
pycodestyle==2.5.0
pyflakes==2.1.1
pytest-cov==2.8.1
pytest-django==3.5.1
pytest==4.3.1
7 changes: 7 additions & 0 deletions requirements/requirements-server.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
django-elasticache==1.0.2
django-redis==4.8.0
packaging==16.8
pylibmc==1.5.2
pyparsing==2.2.0
supervisor==4.0.2
uwsgi==2.0.17.1
Loading

0 comments on commit 6887dc7

Please sign in to comment.