Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[1pt] PR: Update FIM pipeline to process in Alaska #1106

Merged
merged 19 commits into from
Apr 17, 2024
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion data/wbd/generate_pre_clip_fim_huc8.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ def huc_level_clip_vectors_to_wbd(args):
'the output directory specified as the <outputs_dir> argument.',
usage='''
./generate_pre_clip_fim_huc8.py
-n /data/inputs/pre_clip_huc8/2024_3_20
-n /data/inputs/pre_clip_huc8/24_3_20
-u /data/inputs/huc_lists/included_huc8.lst
-j 6
-o
Expand Down
2 changes: 1 addition & 1 deletion src/bash_variables.env
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export input_nwm_lakes_Alaska=${inputsDir}/nwm_hydrofabric/nwm_
export input_WBD_gdb=${inputsDir}/wbd/WBD_National_EPSG_5070_WBDHU8_clip_dem_domain.gpkg
export input_WBD_gdb_Alaska=${inputsDir}/wbd/WBD_National_South_Alaska.gpkg
export input_calib_points_dir=${inputsDir}/rating_curve/water_edge_database/calibration_points/
export pre_clip_huc_dir=${inputsDir}/pre_clip_huc8/23_10_17
export pre_clip_huc_dir=${inputsDir}/pre_clip_huc8/24_4_3 # was 23_10_17
export bathymetry_file=${inputsDir}/bathymetry/bathymetry_adjustment_data.gpkg

# input file location with nwm feature_id and recurrence flow values
Expand Down
4 changes: 3 additions & 1 deletion src/check_huc_inputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ def __read_included_files(parent_dir_path):
# I have just replaced the pattern, but later we might want to clean this up.

# filename_patterns = glob(os.path.join(parent_dir_path, 'included_huc*.lst'))
filename_patterns = glob(os.path.join(parent_dir_path, 'included_huc8.lst'))

included_huc_list = 'included_huc8_withAlaska.lst' # previous: 'included_huc8.lst'
filename_patterns = glob(os.path.join(parent_dir_path, included_huc_list))

accepted_hucs_set = set()
for filename in filename_patterns:
Expand Down
24 changes: 18 additions & 6 deletions src/derive_level_paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from stream_branches import StreamNetwork
from utils.fim_enums import FIM_exit_codes
from utils.shared_variables import HIGH_STREAM_DENSITY_HUCS, MEDIUM_HIGH_STREAM_DENSITY_HUCS


gpd.options.io_engine = "pyogrio"
Expand All @@ -19,8 +20,8 @@ def Derive_level_paths(
buffer_wbd_streams,
out_stream_network,
branch_id_attribute,
huc_id,
out_stream_network_dissolved=None,
huc_id=None,
headwaters_outfile=None,
catchments=None,
waterbodies=None,
Expand Down Expand Up @@ -57,11 +58,22 @@ def Derive_level_paths(
print("Sorry, no streams exist and processing can not continue. This could be an empty file.")
sys.exit(FIM_exit_codes.UNIT_NO_BRANCHES.value) # will send a 60 back

# values_exluded of 1 and 2 mean where are dropping stream orders 1 and 2. We are leaving those
# for branch zero.
stream_network = stream_network.exclude_attribute_values(
branch_id_attribute="order_", values_excluded=[1, 2]
)
if huc_id in HIGH_STREAM_DENSITY_HUCS:
print('HUC is in high density HUC list... removing additional stream segments.')
stream_network = stream_network.exclude_attribute_values(
branch_id_attribute="order_", values_excluded=[1, 2, 3, 4]
)
elif huc_id in MEDIUM_HIGH_STREAM_DENSITY_HUCS:
print('HUC is in medium-high density HUC list... removing additional stream segments.')
stream_network = stream_network.exclude_attribute_values(
branch_id_attribute="order_", values_excluded=[1, 2, 3]
)
else:
# values_exluded of 1 and 2 mean that we are dropping stream orders 1 and 2. We are leaving those
# for branch zero.
stream_network = stream_network.exclude_attribute_values(
branch_id_attribute="order_", values_excluded=[1, 2]
)

# if there are no reaches at this point (due to filtering)
if len(stream_network) == 0:
Expand Down
15 changes: 11 additions & 4 deletions src/run_by_branch.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,13 @@ hucUnitLength=${#hucNumber}
huc4Identifier=${hucNumber:0:4}
huc2Identifier=${hucNumber:0:2}

## SET CRS
if [ $huc2Identifier -eq 19 ]; then
huc_CRS=$ALASKA_CRS
else
huc_CRS=$DEFAULT_FIM_PROJECTION_CRS
fi

# Skip branch zero
if [ $current_branch_id = $branch_zero_id ]; then
exit 0
Expand All @@ -39,19 +46,19 @@ date -u
## SUBSET VECTORS
echo -e $startDiv"Subsetting vectors to branches $hucNumber $current_branch_id"
echo -e "Querying NWM streams ..."
ogr2ogr -f GPKG -t_srs $DEFAULT_FIM_PROJECTION_CRS -where $branch_id_attribute="$current_branch_id" \
ogr2ogr -f GPKG -t_srs $huc_CRS -where $branch_id_attribute="$current_branch_id" \
$tempCurrentBranchDataDir/nwm_subset_streams_levelPaths_$current_branch_id.gpkg \
$tempHucDataDir/nwm_subset_streams_levelPaths.gpkg
echo -e "Querying NWM catchments ..."
ogr2ogr -f GPKG -t_srs $DEFAULT_FIM_PROJECTION_CRS -where $branch_id_attribute="$current_branch_id" \
ogr2ogr -f GPKG -t_srs $huc_CRS -where $branch_id_attribute="$current_branch_id" \
$tempCurrentBranchDataDir/nwm_catchments_proj_subset_levelPaths_$current_branch_id.gpkg \
$tempHucDataDir/nwm_catchments_proj_subset_levelPaths.gpkg
echo -e "Querying NWM Dissolved Levelpaths headwaters ..."
ogr2ogr -f GPKG -t_srs $DEFAULT_FIM_PROJECTION_CRS -where $branch_id_attribute="$current_branch_id" \
ogr2ogr -f GPKG -t_srs $huc_CRS -where $branch_id_attribute="$current_branch_id" \
$tempCurrentBranchDataDir/nwm_subset_streams_levelPaths_dissolved_headwaters_$current_branch_id.gpkg \
$tempHucDataDir/nwm_subset_streams_levelPaths_dissolved_headwaters.gpkg
#echo -e "Querying NWM headwaters ..."
# ogr2ogr -f GPKG -t_srs $DEFAULT_FIM_PROJECTION_CRS -where $branch_id_attribute="$current_branch_id" \
# ogr2ogr -f GPKG -t_srs $huc_CRS -where $branch_id_attribute="$current_branch_id" \
# $tempCurrentBranchDataDir/nwm_headwaters_$current_branch_id.gpkg \
# $tempHucDataDir/nwm_headwaters.gpkg

Expand Down
32 changes: 27 additions & 5 deletions src/run_unit_wb.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,25 @@ branch_list_lst_file=$tempHucDataDir/branch_ids.lst

branchSummaryLogFile=$outputDestDir/logs/branch/"$hucNumber"_summary_branch.log

## INITIALIZE TOTAL UNIT AND IT'S BRANCHES TIMER ##
huc2Identifier=${hucNumber:0:2}


## SET CRS and input DEM domain
if [ $huc2Identifier -eq 19 ]; then
huc_CRS=$ALASKA_CRS
huc_input_DEM_domain=$input_DEM_domain_Alaska
dem_domain_filename=DEM_Domain.gpkg

else
huc_CRS=$DEFAULT_FIM_PROJECTION_CRS
huc_input_DEM_domain=$input_DEM_domain
dem_domain_filename=HUC6_dem_domain.gpkg

fi

echo -e $startDiv"Using CRS: $huc_CRS" ## debug

## INITIALIZE TOTAL TIME TIMER ##
T_total_start
huc_start_time=`date +%s`
date -u
Expand All @@ -28,7 +46,7 @@ cp -a $pre_clip_huc_dir/$hucNumber/. $tempHucDataDir

# Copy necessary files from $inputsDir into $tempHucDataDir to avoid File System Collisions
# For buffer_stream_branches.py
cp $input_DEM_domain $tempHucDataDir
cp $huc_input_DEM_domain $tempHucDataDir
# For usgs_gage_unit_setup.py
cp $inputsDir/usgs_gages/usgs_gages.gpkg $tempHucDataDir
cp $ras_rating_curve_points_gpkg $tempHucDataDir
Expand All @@ -47,7 +65,9 @@ $srcDir/derive_level_paths.py -i $tempHucDataDir/nwm_subset_streams.gpkg \
-t $tempHucDataDir/nwm_catchments_proj_subset_levelPaths.gpkg \
-n $tempHucDataDir/nwm_subset_streams_levelPaths_dissolved_headwaters.gpkg \
-w $tempHucDataDir/nwm_lakes_proj_subset.gpkg \
-wbd $tempHucDataDir/wbd.gpkg
-wbd $tempHucDataDir/wbd.gpkg \
-u $hucNumber


# test if we received a non-zero code back from derive_level_paths.py
#subscript_exit_code=$?
Expand All @@ -74,7 +94,7 @@ python3 $srcDir/associate_levelpaths_with_levees.py -nld $tempHucDataDir/nld_sub

## STREAM BRANCH POLYGONS
echo -e $startDiv"Generating Stream Branch Polygons for $hucNumber"
$srcDir/buffer_stream_branches.py -a $tempHucDataDir/HUC6_dem_domain.gpkg \
$srcDir/buffer_stream_branches.py -a $tempHucDataDir/$dem_domain_filename \
-s $tempHucDataDir/nwm_subset_streams_levelPaths_dissolved.gpkg \
-i $branch_id_attribute \
-d $branch_buffer_distance_meters \
Expand All @@ -99,7 +119,9 @@ echo -e $startDiv"Clipping rasters to branches $hucNumber $branch_zero_id"
[ ! -f $tempCurrentBranchDataDir/dem_meters.tif ] && \
gdalwarp -cutline $tempHucDataDir/wbd_buffered.gpkg -crop_to_cutline -ot Float32 -r bilinear -of "GTiff" \
-overwrite -co "BLOCKXSIZE=512" -co "BLOCKYSIZE=512" -co "TILED=YES" -co "COMPRESS=LZW" \
-co "BIGTIFF=YES" -t_srs $DEFAULT_FIM_PROJECTION_CRS $input_DEM $tempHucDataDir/dem_meters.tif -q
-co "BIGTIFF=YES" -t_srs $huc_CRS $input_DEM $tempHucDataDir/dem_meters.tif

Tcount

## GET RASTER METADATA
echo -e $startDiv"Get DEM Metadata $hucNumber $branch_zero_id"
Expand Down
20 changes: 20 additions & 0 deletions src/utils/shared_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
'Wyoming."],BBOX[24.41,-124.79,49.38,-66.91]],ID["EPSG",5070]]'
)
DEFAULT_FIM_PROJECTION_CRS = os.getenv('DEFAULT_FIM_PROJECTION_CRS')
ALASKA_CRS = os.getenv('ALASKA_CRS')
PREP_CRS = CRS(PREP_PROJECTION)
VIZ_PROJECTION = (
'PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",'
Expand Down Expand Up @@ -79,6 +80,25 @@
# -- Field Names -- #
FIM_ID = 'fimid'

# -- High stream density HUCs that require mitigation (currently just in Alaska) -- #
HIGH_STREAM_DENSITY_HUCS = {'19020602', '19020503', '19020402', '19020104'} # stream density 1.5+

MEDIUM_HIGH_STREAM_DENSITY_HUCS = { # stream density between 0.5 and 1.5
'19020504',
'19020502',
'19020601',
'19020505',
'19020101', # 1.0-1.5
'19020102',
'19020501',
'19020301',
'19020401',
'19020302',
'19020103',
'19020202', # 0.5-1.0
}


# -- Other -- #
CONUS_STATE_LIST = {
"AL",
Expand Down
Loading