Skip to content

Commit

Permalink
Merge pull request #2862 from fedspendingtransparency/qat
Browse files Browse the repository at this point in the history
Sprint 120 Production Deploy
  • Loading branch information
tony-sappe authored Nov 30, 2020
2 parents f21f9aa + 1e76b85 commit b2346ba
Show file tree
Hide file tree
Showing 68 changed files with 2,849 additions and 599 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
*.pyc
*.swp
*.env
.env
.cache
.coverage
.DS_Store
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ Returns loan spending details of Federal Accounts receiving supplemental funding

{
"filter": {
"def_codes": ["L", "M", "N", "O", "P"],
"award_type_codes": ["07", "08"]
"def_codes": ["L", "M", "N", "O", "P"]
},
"pagination": {
"limit": 10,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ Returns loan spending details of Object Classes receiving supplemental funding b

{
"filter": {
"def_codes": ["L", "M", "N", "O", "P"],
"award_type_codes": ["07", "08"]
"def_codes": ["L", "M", "N", "O", "P"]
},
"pagination": {
"limit": 10,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FORMAT: 1A
HOST: https://api.usaspending.gov

# Placeholder [/api/v2/reporting/placeholder/]

Description of the endpoint as a whole not taking into account the different HTTP methods.

## GET

Description of the endpoint using the above HTTP method.


+ Response 200 (application/json)
+ Attributes
+ `status` (required, string)

+ Body

{
"status": "success"
}
1 change: 1 addition & 0 deletions usaspending_api/api_docs/markdown/endpoints.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ The currently available endpoints are listed in the following table.
|[/api/v2/references/naics/](/api/v2/references/naics/)|GET| Returns all Tier 1 (2-digit) NAICS and related, relevant data. |
|[/api/v2/references/submission_periods/](/api/v2/references/submission_periods/)|GET| Returns a list of all available submission periods with essential information about start and end dates. |
|[/api/v2/references/toptier_agencies/](/api/v2/references/toptier_agencies/)|GET| Returns all toptier agencies and related, relevant data. |
|[/api/v2/reporting/placeholder/](/api/v2/reporting/placeholder/)|POST| Temp Placeholder. Ignore and rmove |
|[/api/v2/search/new_awards_over_time/](/api/v2/search/new_awards_over_time/)|POST| Returns a list of time periods with the new awards in the appropriate period within the provided time range |
|[/api/v2/search/spending_by_award/](/api/v2/search/spending_by_award/)|POST| Returns the fields of the filtered awards |
|[/api/v2/search/spending_by_award_count/](/api/v2/search/spending_by_award_count/)|POST| Returns the number of awards in each award type (Contracts, IDV, Loans, Direct Payments, Grants, and Other) |
Expand Down
4 changes: 4 additions & 0 deletions usaspending_api/common/elasticsearch/search_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,3 +100,7 @@ class TransactionSearch(_Search):

class AwardSearch(_Search):
_index_name = f"{settings.ES_AWARDS_QUERY_ALIAS_PREFIX}*"


class AccountSearch(_Search):
_index_name = f"{settings.ES_COVID19_FABA_QUERY_ALIAS_PREFIX}*"
13 changes: 13 additions & 0 deletions usaspending_api/common/helpers/fiscal_year_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,19 @@ def generate_fiscal_date_range(min_date: datetime, max_date: datetime, frequency
)
current_date = current_date + relativedelta(months=interval)

# check if max_date is in new period
final_period = {
"fiscal_year": generate_fiscal_year(max_date),
"fiscal_quarter": generate_fiscal_quarter(max_date),
"fiscal_month": generate_fiscal_month(max_date),
}
if final_period["fiscal_year"] > date_range[-1]["fiscal_year"]:
date_range.append(final_period)
elif interval == 3 and final_period["fiscal_quarter"] != date_range[-1]["fiscal_quarter"]:
date_range.append(final_period)
elif interval == 1 and final_period != date_range[-1]:
date_range.append(final_period)

return date_range


Expand Down
2 changes: 1 addition & 1 deletion usaspending_api/common/helpers/s3_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ def multipart_upload(bucketname, regionname, source_path, keyname):
bytes_per_chunk = max(int(math.sqrt(5242880) * math.sqrt(source_size)), 5242880)
config = boto3.s3.transfer.TransferConfig(multipart_chunksize=bytes_per_chunk)
transfer = boto3.s3.transfer.S3Transfer(s3client, config)
transfer.upload_file(source_path, bucketname, Path(keyname).name)
transfer.upload_file(source_path, bucketname, Path(keyname).name, extra_args={"ACL": "bucket-owner-full-control"})
30 changes: 19 additions & 11 deletions usaspending_api/common/management/commands/matview_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,17 +83,17 @@ def handle(self, *args, **options):
if not self.no_cleanup:
self.cleanup()

@staticmethod
def clean_or_create_dir(dir_path):
if dir_path.exists():
logger.warning(f"Clearing dir {dir_path}")
recursive_delete(dir_path)
dir_path.mkdir()

def generate_matview_sql(self):
"""Convert JSON definition files to SQL"""
if self.matview_dir.exists():
logger.warning("Clearing dir {}".format(self.matview_dir))
recursive_delete(self.matview_dir)
self.matview_dir.mkdir()

if self.matview_chunked_dir.exists():
logger.warning("Clearing dir {}".format(self.matview_chunked_dir))
recursive_delete(self.matview_chunked_dir)
self.matview_chunked_dir.mkdir()
self.clean_or_create_dir(self.matview_dir)
self.clean_or_create_dir(self.matview_chunked_dir)

# IF using this for operations, DO NOT LEAVE hardcoded `python3` in the command
# Create main list of Matview SQL files
Expand All @@ -102,7 +102,12 @@ def generate_matview_sql(self):

# Create SQL files for Chunked Universal Transaction Matviews
for matview, config in self.chunked_matviews.items():
exec_str = f"python3 {CHUNKED_MATVIEW_GENERATOR_FILE} --quiet --file {config['json_filepath']} --chunk-count {self.chunk_count}"
exec_str = (
f"python3 {CHUNKED_MATVIEW_GENERATOR_FILE} --quiet"
f" --file {config['json_filepath']}"
f" --chunk-count {self.chunk_count}"
f" --dest={self.matview_chunked_dir}"
)
subprocess.call(exec_str, shell=True)

def cleanup(self):
Expand Down Expand Up @@ -133,7 +138,10 @@ def create_views(self):
if "universal_transaction_matview" in self.chunked_matviews:
logger.info("Inserting data from universal_transaction_matview chunks into single table.")
call_command(
"combine_universal_transaction_matview_chunks", chunk_count=self.chunk_count, index_concurrency=20,
"combine_universal_transaction_matview_chunks",
chunk_count=self.chunk_count,
index_concurrency=20,
matview_dir=self.matview_chunked_dir,
)

for view in OVERLAY_VIEWS:
Expand Down
Loading

0 comments on commit b2346ba

Please sign in to comment.