Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

**Not to be merged but for sharing idea** Diffusive prototype for coastal boundary condition and streamflow DA #544

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
220 changes: 184 additions & 36 deletions src/kernel/diffusive/diffusive.f90

Large diffs are not rendered by default.

2,602 changes: 2,602 additions & 0 deletions src/kernel/diffusive/diffusive_test1.f90

Large diffs are not rendered by default.

7,441 changes: 7,441 additions & 0 deletions src/kernel/diffusive/input/tide_matagorda_city_31d.txt

Large diffs are not rendered by default.

11 changes: 6 additions & 5 deletions src/troute-nwm/src/nwm_routing/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def nwm_route(
waterbody_types_df,
waterbody_type_specified,
)

if diffusive_network_data: # run diffusive side of a hybrid simulation

LOG.debug("MC computation complete in %s seconds." % (time.time() - start_time_mc))
Expand Down Expand Up @@ -332,7 +332,7 @@ def main_v03(argv):
segment_index = segment_index.append(
pd.Index(diffusive_network_data[tw]['mainstem_segs'])
)

# TODO: This function modifies one of its arguments (waterbodies_df), which is somewhat poor practice given its otherwise functional nature. Consider refactoring
waterbodies_df, q0, t0, lastobs_df, da_parameter_dict = nwm_initial_warmstate_preprocess(
break_network_at_waterbodies,
Expand Down Expand Up @@ -438,11 +438,11 @@ def main_v03(argv):
if showtiming:
route_end_time = time.time()
task_times['route_time'] += route_end_time - route_start_time

# create initial conditions for next loop itteration
q0 = new_nwm_q0(run_results)
waterbodies_df = get_waterbody_water_elevation(waterbodies_df, q0)

# TODO move the conditional call to write_lite_restart to nwm_output_generator.
if "lite_restart" in output_parameters:
nhd_io.write_lite_restart(
Expand All @@ -451,7 +451,7 @@ def main_v03(argv):
t0 + timedelta(seconds = dt * nts),
output_parameters['lite_restart']
)

# No forcing to prepare for the last loop
if run_set_iterator < len(run_sets) - 1:
(
Expand Down Expand Up @@ -895,6 +895,7 @@ async def main_v03_async(argv):
help="Use version 2 or 3 of the input format. Default 3",
)
v_args = v_parser.parse_known_args()

if v_args[0].input_version == 4:
LOG.info("Running main v03 - async looping")
coroutine = main_v03_async(v_args[1])
Expand Down
5 changes: 3 additions & 2 deletions src/troute-nwm/src/nwm_routing/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def _input_handler_v03(args):
parity_parameters,
data_assimilation_parameters
)

return (
log_parameters,
preprocessing_parameters,
Expand Down Expand Up @@ -260,12 +260,13 @@ def check_inputs(
preprocessing_parameters['preprocess_source_file']
)


#-----------------------------------------------------------------
# Check that geo_file_path is provided and exists
#-----------------------------------------------------------------
_does_file_exist('geo_file_path',
supernetwork_parameters['geo_file_path'])

#-----------------------------------------------------------------
# Check if mask file is provided. If so, make sure the file exists
# If no mask is provided, write a logging message warning the user
Expand Down
26 changes: 22 additions & 4 deletions src/troute-nwm/src/nwm_routing/preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,16 +76,17 @@ def nwm_network_preprocess(

# read diffusive domain dictionary from yaml or json
diffusive_domain = nhd_io.read_diffusive_domain(domain_file)

if topobathy_file and use_topobathy:

LOG.debug('Natural cross section data are provided.')

# read topobathy domain netcdf file, set index to 'comid'
# TODO: replace 'comid' with a user-specified indexing variable name.
# ... if for whatever reason there is not a `comid` variable in the
# ... dataframe returned from read_netcdf, then the code would break here.
topobathy_data = (nhd_io.read_netcdf(topobathy_file).set_index('comid'))
# ... dataframe returned from read_netcdf, then the code would break here.
#topobathy_data = (nhd_io.read_netcdf(topobathy_file).set_index('comid'))
topobathy_data = (nhd_io.read_netcdf(topobathy_file).set_index('link'))

# TODO: Request GID make comID variable an integer in their product, so
# we do not need to change variable types, here.
Expand Down Expand Up @@ -126,6 +127,24 @@ def nwm_network_preprocess(
"break_network_at_gages", False
)

# temporary code to build diffusive_domain using given IDs of head segment and tailwater segment of mainstem.
'''
headlink_mainstem = 5790106 #3765742
twlink_mainstem = 3766334

uslink_mainstem = headlink_mainstem
dslink_mainstem = 1 # initial value
mainstem_list =[headlink_mainstem]
while dslink_mainstem != twlink_mainstem:
dslink_mainstem = connections[uslink_mainstem][0]
mainstem_list.append(dslink_mainstem)
uslink_mainstem = dslink_mainstem
diffusive_domain={}
diffusive_domain[twlink_mainstem] = mainstem_list
import pdb; pdb.set_trace()
'''


if not wbody_conn:
# Turn off any further reservoir processing if the network contains no
# waterbodies
Expand Down Expand Up @@ -529,7 +548,6 @@ def nwm_initial_warmstate_preprocess(
# Assemble channel initial states (flow and depth)
# also establish simulation initialization timestamp
#----------------------------------------------------------------------------

start_time = time.time()
LOG.info("setting channel initial states ...")

Expand Down
3 changes: 2 additions & 1 deletion src/troute-routing/troute/routing/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -1001,6 +1001,7 @@ def compute_diffusive_routing(

# run the simulation
out_q, out_elv = diffusive.compute_diffusive(diffusive_inputs)
#import pdb; pdb.set_trace()

# unpack results
rch_list, dat_all = diff_utils.unpack_output(
Expand All @@ -1009,7 +1010,7 @@ def compute_diffusive_routing(
out_q,
out_elv
)

# mask segments for which we already have MC solution
x = np.in1d(rch_list, diffusive_network_data[tw]['tributary_segments'])

Expand Down
148 changes: 135 additions & 13 deletions src/troute-routing/troute/routing/diffusive_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ def fp_naturalxsec_map(

else:
seg_idx = segID

# how many stations are in the node cross section?
nstations = len(topobathy_data_bytw.loc[seg_idx])

Expand All @@ -483,7 +483,7 @@ def fp_naturalxsec_map(
x_bathy_g[0:nstations, seg, frj] = topobathy_data_bytw.loc[seg_idx].xid_d
z_bathy_g[0:nstations, seg, frj] = topobathy_data_bytw.loc[seg_idx].z
mann_bathy_g[0:nstations, seg, frj] = topobathy_data_bytw.loc[seg_idx].n

# if terminal node of the network, then adjust the cross section z data using
# channel slope and length data
if segID == dbfksegID:
Expand Down Expand Up @@ -547,7 +547,7 @@ def diffusive_input_data_v02(
# upstream boundary condition timestep (sec)
dt_ub_g = dt
# downstream boundary condition timestep (sec)
dt_db_g = dt * qts_subdivisions
dt_db_g = 360.0 # dt * qts_subdivisions
# tributary inflow timestep (sec)
dt_qtrib_g = dt
# time interval at which flow and depth simulations are written out by Tulane diffusive model
Expand All @@ -559,7 +559,7 @@ def diffusive_input_data_v02(
dtini_g = dt
t0_g = 0.0 # simulation start hr **set to zero for Fortran computation
tfin_g = (dt * nsteps)/60/60

# package timestep variables into single array
timestep_ar_g = np.zeros(8)
timestep_ar_g[0]= dtini_g
Expand Down Expand Up @@ -719,6 +719,14 @@ def diffusive_input_data_v02(
# covert data type from integer to float for frnw
dfrnw_g = frnw_g.astype('float')

np.savetxt("./output/frnw_ar.txt", frnw_g, fmt='%10i')
frj= -1
with open("./output/pynw.txt",'a') as pynwtxt:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
frj= frj+1
pynwtxt.write("%s %s\n" %\
(str(frj+1), str(pynw[frj])))
# ---------------------------------------------------------------------------------
# Step 0-5
# Prepare channel geometry data
Expand All @@ -741,7 +749,17 @@ def diffusive_input_data_v02(
mxncomp_g,
nrch_g,
)


np.savetxt("./output/z_ar.txt", z_ar_g, fmt='%15.5f')
np.savetxt("./output/bo_ar.txt", bo_ar_g, fmt='%15.5f')
np.savetxt("./output/traps_ar.txt", traps_ar_g, fmt='%15.5f')
np.savetxt("./output/tw_ar.txt", tw_ar_g, fmt='%15.5f')
np.savetxt("./output/twcc_ar.txt", twcc_ar_g, fmt='%15.5f')
np.savetxt("./output/mann_ar.txt", mann_ar_g, fmt='%15.5f')
np.savetxt("./output/manncc_ar.txt", manncc_ar_g, fmt='%15.5f')
np.savetxt("./output/so_ar.txt", so_ar_g, fmt='%15.6f')
np.savetxt("./output/dx_ar.txt", dx_ar_g, fmt='%15.5f')

# ---------------------------------------------------------------------------------
# Step 0-6
# Prepare initial conditions data
Expand All @@ -765,15 +783,29 @@ def diffusive_input_data_v02(
# set lower limit on initial flow condition
if iniq[seg, frj]<0.0001:
iniq[seg, frj]=0.0001

frj= -1
with open("./output/iniq_ar.txt",'a') as pyqini:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
for seg in range(0,ncomp):
segID= seg_list[seg]
dmy1= iniq[seg, frj]
pyqini.write("%s %s %s %s\n" %\
(str(segID), str(frj+1), str(seg+1), str(dmy1)) )
# <- +1 is added to frj for accommodating Fortran indexing.

# ---------------------------------------------------------------------------------
# Step 0-7

# Prepare lateral inflow data
# ---------------------------------------------------------------------------------
nts_ql_g = (
int((tfin_g - t0_g) * 3600.0 / dt_ql_g)
) # the number of the entire time steps of lateral flow data
int((tfin_g - t0_g) * 3600.0 / dt_ql_g)
) # the number of the entire time steps of lateral flow data

qlat_g = np.zeros((nts_ql_g, mxncomp_g, nrch_g))

Expand All @@ -785,17 +817,49 @@ def diffusive_input_data_v02(
qlat,
qlat_g,
)


frj= -1
with open("./output/qlat_ar.txt",'a') as pyql:
#if 1==1:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
for seg in range(0,ncomp):
segID= seg_list[seg]
for tsi in range (0,nts_ql_g):
t_min= dt_ql_g/60.0*float(tsi)
dmy1= qlat_g[tsi, seg, frj]
pyql.write("%s %s %s %s\n" %\
(str(frj+1), str(seg+1), str(t_min), str(dmy1)) )
# <- +1 is added to frj for accommodating Fortran indexing.

# ---------------------------------------------------------------------------------
# Step 0-8

# Prepare upstream boundary (top segments of head basin reaches) data
# ---------------------------------------------------------------------------------
ds_seg = []
upstream_flow_array = np.zeros((len(ds_seg), nsteps+1)) # <------ this is a place holder until we figure out what to do with upstream boundary
nts_ub_g = int((tfin_g - t0_g) * 3600.0 / dt_ub_g)
nts_ub_g = int((tfin_g - t0_g) * 3600.0 / dt_ub_g)
ubcd_g = fp_ubcd_map(frnw_g, pynw, nts_ub_g, nrch_g, ds_seg, upstream_flow_array)

frj= -1
with open("./output/ubcd_ar.txt",'a') as ubcd:
#if 1==1:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
for tsi in range (0,nts_ub_g):
t_min = dt_ub_g/60.0*float(tsi)
dmy1 = ubcd_g[tsi, frj]
ubcd.write("%s %s %s %s\n" %\
(str(frj+1), str(tsi+1), str(t_min), str(dmy1)) )
# <- +1 is added to frj for accommodating Fortran indexing.

# ---------------------------------------------------------------------------------
# Step 0-9

Expand All @@ -804,8 +868,19 @@ def diffusive_input_data_v02(

# this is a place holder that uses normal depth and the lower boundary.
# we will need to revisit this
nts_db_g = 1
dbcd_g = -np.ones(nts_db_g)
#nts_db_g = 1
#dbcd_g = -np.ones(nts_db_g)
nts_db_g = int((tfin_g - t0_g) * 3600.0 / dt_db_g)
dbcd_g = np.zeros(nts_db_g)

frj= -1
with open("./output/dbcd_ar.txt",'a') as dbcd:
for tsi in range (0, nts_db_g):
t_min = dt_db_g/60.0*float(tsi)
dmy1 = dbcd_g[tsi]
dbcd.write("%s %s\n" %\
(str(t_min), str(dmy1)) )
# <- +1 is added to frj for accommodating Fortran indexing.

# ---------------------------------------------------------------------------------------------
# Step 0-9-2
Expand All @@ -825,7 +900,22 @@ def diffusive_input_data_v02(
# TODO - if one of the tributary segments is a waterbody, it's initial conditions
# will not be in the initial_conditions array, but rather will be in the waterbodies_df array
qtrib_g[0,frj] = initial_conditions.loc[head_segment, 'qu0']


frj= -1
#with open(os.path.join(output_path,"qtrib_ar.txt"),'a') as pyqtrib:
with open("./output/qtrib_ar.txt",'a') as pyqtrib:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
for tsi in range (0, nts_qtrib_g):
t_min= dt_qtrib_g/60.0*float(tsi)
dmy1= qtrib_g[tsi, frj]
pyqtrib.write("%s %s %s\n" %\
(str(frj+1), str(t_min), str(dmy1)) )
# <- +1 is added to frj for accommodating Fortran indexing.

# ---------------------------------------------------------------------------------
# Step 0-10

Expand All @@ -840,7 +930,39 @@ def diffusive_input_data_v02(
mxncomp_g,
nrch_g,
dbfksegID)

'''
frj= -1
#with open(os.path.join(output_path,"bathy_data.txt"),'a') as pybathy:
with open("./output/bathy_data.txt",'a') as pybathy:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
headsegID= seg_list[0]
if mainstem_seg_list.count(headsegID) > 0:
for seg in range(0,ncomp):
for idp in range(0, size_bathy_g[seg, frj]):
dmy1 = x_bathy_g[idp, seg, frj]
dmy2 = z_bathy_g[idp, seg, frj]
dmy3 = mann_bathy_g[idp, seg, frj]
pybathy.write("%s %s %s %s %s %s\n" %\
(str(idp+1), str(seg+1), str(frj+1), str(dmy1), str(dmy2), str(dmy3)))
frj= -1
#with open(os.path.join(output_path,"bathy_data.txt"),'a') as pybathy:
with open("./output/size_bathy_data.txt",'a') as pysizebathy:
for x in range(mx_jorder,-1,-1):
for head_segment, reach in ordered_reaches[x]:
seg_list= reach["segments_list"]
ncomp= reach["number_segments"]
frj= frj+1
headsegID= seg_list[0]
if mainstem_seg_list.count(headsegID) > 0:
for seg in range(0,ncomp):
dmy1 = size_bathy_g[seg, frj]
pysizebathy.write("%s %s %s\n" %\
(str(seg+1), str(frj+1), str(dmy1)))
'''

# TODO: Call uniform flow lookup table creation kernel
# ---------------------------------------------------------------------------------
Expand Down
Binary file not shown.
Binary file not shown.
Loading