Skip to content

Commit

Permalink
Use all survey data, added conditional macros
Browse files Browse the repository at this point in the history
  • Loading branch information
TCallaghan2 committed Aug 20, 2024
1 parent c116f47 commit 56d14b0
Show file tree
Hide file tree
Showing 9 changed files with 124 additions and 69 deletions.
2 changes: 1 addition & 1 deletion PreProcess/HabCamData5mmbin.m
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ function HabCamData5mmbin(refYear, domain, appendResults)
% accumulate 15 to 18
% sum n(k+24) - n(k+30) into k=25
region = GetRegion(isOctave, lat_t(k,:), lon_t(k,:), stratum_t(k,:));
if region>0
if region >= 0 % TODO filtering does not match recruit as it does no filtering, use all regions
if isOctave
k25 = sum((M(n(k)+24:n(k)+30, svCol)));
density = [(M(n(k):n(k)+23, svCol));k25];
Expand Down
8 changes: 4 additions & 4 deletions PreProcess/ProcessRecruitData.m
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ function ProcessRecruitData(yrStart, yrEnd, domain)
flnm='Data/RecruitsUnadjusted.csv';
header='"decimal year", "latitude", "longitude", "UTM x", "UTM y", "bottom depth(m)","recruits per m^2"';
fprintf('Writing to %s\n\n', flnm)
writecsv(M,flnm,['%g, %g, %g, %g, %g, %g, %e'],header);
writecsv(M,flnm,['%f, %f, %f, %f, %f, %f, %e'],header);


%XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
Expand Down Expand Up @@ -157,7 +157,7 @@ function ProcessRecruitData(yrStart, yrEnd, domain)
M=M(j,:);
flnm='Data/RecruitsRockStrataAdjustment.csv';
header='"decimal year", "latitude", "longitude", "UTM x", "UTM y","bottom depth(m)","recruits per sq m raw","Is Rock Strata","recruits per sq m adjusted"';
writecsv(M,flnm,['%g, %g, %g, %g, %g, %g, %e, %i ,%e' ],header);
writecsv(M,flnm,['%f, %f, %f, %f, %f, %f, %e, %i ,%e' ],header);
fprintf('Writing to %s\n\n', flnm)

close all;
Expand Down Expand Up @@ -220,7 +220,7 @@ function ProcessRecruitData(yrStart, yrEnd, domain)
flnm=['Data/Recruits', domain, '.csv'];
header='"decimal year", "utm x", "utm y", "bottom depth(m)","recruits per sq m"';
fprintf('Writing to %s\n', flnm)
writecsv(M,flnm,'%g, %f, %f, %f, %e',header);
writecsv(M,flnm,'%f, %f, %f, %f, %e',header);

fprintf('Reading from %s\n', flnm);
if isOctave
Expand All @@ -242,7 +242,7 @@ function ProcessRecruitData(yrStart, yrEnd, domain)
end
flnm=['Data/Recruits',int2str(yr),domain,'.csv'];
header='"decimal year", "utm x", "utm y", "bottom depth(m)","recruits per sq m"';
writecsv(M,flnm,'%g, %f, %f, %f, %e',header);
writecsv(M,flnm,'%f, %f, %f, %f, %e',header);
fprintf('Writing to %s. Number of records %d\n', flnm, size(M,1))
end
else
Expand Down
2 changes: 1 addition & 1 deletion PythonScripts/GUI/GeoSAM/GeoSams.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
# arg# 1 2 3 4 5
def Interpolate(ukCfgFile, domainName, obsFile, gridFile, zArg, procID, retDict):
ex = os.path.join('UKsrc', 'UK')
outputFile = 'proc_' + obsFile + '_' + str(procID) +'.txt'
outputFile = 'proc_' + obsFile + '_' + str(procID).zfill(3) +'.txt'
cmd = [ex, ukCfgFile, domainName, obsFile + '.csv', gridFile, zArg]
with open(outputFile, "w") as outfile:
result = subprocess.run(cmd, stdout=outfile)
Expand Down
2 changes: 1 addition & 1 deletion PythonScripts/GUI/GeoSAM/MainInputFrame.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def __init__(self, container, friend, tsPerYear, selectedOutputs, maxYears):
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
survDataFrame = ttk.LabelFrame(self, text='Survey Data Files, Environment Variables', style='SAMS.TFrame')
self.dredgeDataFile = SubFrameElement(self, survDataFrame, 'Dredge Survey Data File', 'dredgetowbysize7917', 0, 0, 1, width=35)
self.habCamDataFile = SubFrameElement(self, survDataFrame, 'HabCam Survey Data File', 'Habcam_BySegment_2000_2011-2023',1, 0, 1, width=35)
self.habCamDataFile = SubFrameElement(self, survDataFrame, 'HabCam Survey Data File', 'Habcam_BySegment_2000_2011-2023_v2',1, 0, 1, width=35)
#-------------------------------------------------------------------------------------------
self.setDredgeDataButton = ttk.Button(survDataFrame, text='Set DredgeData', style="BtnBluGrn.TLabel", command=self.SetDredgeFileName)
self.setDredgeDataButton.grid(row=0, column=3)
Expand Down
37 changes: 0 additions & 37 deletions SRC/IORoutines.f90
Original file line number Diff line number Diff line change
Expand Up @@ -303,43 +303,6 @@ subroutine Write_Column_CSV(n,f,header,file_name,append)

endsubroutine Write_Column_CSV

!--------------------------------------------------------------------------------------------------
!--------------------------------------------------------------------------------------------------
!subroutine Write_CSV_H(n,m,f,file_name,nndim)
! Purpose: Write values of a matrrix (f) to a csv file in exponential format.
! Inputs:
! n (integer) number of rows in f
! m (integer) number of columns in f
! f (real(dp)) values to write to csv file
! file_name (character(72)) filename to write f to in csv format
!--------------------------------------------------------------------------------------------------
! Keston Smith (IBSS corp) June-July 2021
!--------------------------------------------------------------------------------------------------
subroutine Write_CSV_H(n,m,f,file_name,nndim,header)
use globals
implicit none
integer, intent(in):: n,m,nndim
real(dp), intent(in):: f(nndim,*)
character(*), intent(in)::file_name,header
integer k
character(fname_len) buf,fmtstr
k=m-1
write(buf,'(I6)')k
!
! format for exponential format
!
fmtstr='('//trim(adjustl(buf))//'(ES14.7 : ", ") (ES14.7 : ))'
!fmtstr='('//trim(adjustl(buf))//'(E : ", ") (E : ))'
open(write_dev,file=file_name)
write(write_dev,'(A)') trim(adjustl( header))
do k=1,n
!write(write_dev,trim(fmtstr)) f(k,1:m)
write(write_dev,fmtstr) f(k,1:m)
enddo
close(write_dev)
endsubroutine Write_CSV_H
!--------------------------------------------------------------------------------------------------


!--------------------------------------------------------------------------------------------------
! subroutine Read_CSV
Expand Down
95 changes: 78 additions & 17 deletions SRC/ScallopMortality.f90
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,13 @@ module Mortality_Mod
real(dp), PRIVATE, allocatable :: F_mort(:)
real(dp), PRIVATE, allocatable :: landings_by_num(:)
real(dp), PRIVATE, allocatable :: landings_wgt_grams(:)
real(dp), PRIVATE, allocatable :: lpue(:) !, dredge_time(:), dredge_area(:)
real(dp), PRIVATE, allocatable :: lpue(:)
real(dp), PRIVATE, allocatable :: fishing_effort(:)
#ifdef ACCUM_LANDINGS
real(dp), PRIVATE, allocatable :: landings_accum(:)
real(dp), PRIVATE, allocatable :: landings_wgt_accum(:)
real(dp), PRIVATE, allocatable :: lpue_accum(:)
#endif

real(dp), PRIVATE :: expl_scallops_psqm_at_size(num_size_classes)
real(dp), PRIVATE :: landings_at_size(num_size_classes)
Expand All @@ -161,8 +166,11 @@ subroutine Destructor()

deallocate(landings_by_num)
deallocate(landings_wgt_grams)
deallocate(lpue) !, dredge_time, dredge_area)
deallocate(lpue)
deallocate(fishing_effort)
#ifdef ACCUM_LANDINGS
deallocate(landings_accum, landings_wgt_accum, lpue_accum)
#endif
endsubroutine Destructor

!==================================================================================================================
Expand Down Expand Up @@ -232,12 +240,25 @@ subroutine Set_Mortality(mortality, grid, shell_lengths, dom_name, dom_area, num
allocate(expl_scallops_psqm(num_grids))
allocate(expl_num(num_grids))
allocate(F_mort(num_grids))
allocate(lpue(num_grids)) !, dredge_time(num_grids), dredge_area(num_grids))
allocate(lpue(num_grids))
allocate(fishing_effort(num_grids))

allocate(landings_by_num(num_grids))
allocate(landings_wgt_grams(num_grids))
#ifdef ACCUM_LANDINGS
allocate(landings_wgt_accum(num_grids), lpue_accum(num_grids), landings_accum(num_grids))

landings_accum(:) = 0.D0
landings_wgt_accum(:) = 0.D0
lpue_accum(:) = 0.D0
#endif


#ifdef ACCUM_LANDINGS
write(*,*) term_yel, 'Mortality: Accumulating landing values', term_blk
#else
write(*,*) term_yel, 'Mortality: Landing values at timestep', term_blk
#endif
!---------------------------------------------------
! Set mortality parameters
!---------------------------------------------------
Expand Down Expand Up @@ -425,32 +446,32 @@ function Set_Fishing_Effort(year, ts, state_mat, weight_grams, mortality, grid)

!call Calc_LPUE(expl_biomass_gpsqm, expl_scallops_psqm, lpue, dredge_time, dredge_area)
lpue = Calc_LPUE(expl_biomass_gpsqm, expl_scallops_psqm)
#ifdef ACCUM_LANDINGS
lpue_accum(:) = lpue_accum(:) + lpue(:)
#endif

c_mort = fishing_mort * sum(expl_num(:)) / dot_product(expl_num(:), lpue(:)**alpha_mort)
c_mort = fishing_mort * sum(expl_biomass_gpsqm(:)) / dot_product(expl_biomass_gpsqm(:), lpue(:)**alpha_mort)
F_mort(:) = c_mort * lpue(:)**alpha_mort
F_mort(:) = Set_Fishing_Mortality(grid(1:num_grids), year, .true., F_mort(:))

do loc = 1, num_grids

! (1._dp - exp(-F * delta_time)) * state * grid_area_sqm * selectivity
landings_at_size(:) = (1._dp - exp(-F_mort(loc) * delta_time)) &
& * state_mat(loc, 1:num_size_classes) * grid_area_sqm&
& * mortality(loc)%selectivity(1:num_size_classes)
& * state_mat(loc, :) * grid_area_sqm * mortality(loc)%selectivity(:)

! (1._dp - exp(-F * delta_time))
! * dot_product(selectivity * state * grid_area_sqm, weight)
landings_wgt_grams(loc) =&
& dot_product(landings_at_size(:), weight_grams(loc,1:num_size_classes))

landings_wgt_grams(loc) = dot_product(landings_at_size(:), weight_grams(loc,:))

! (1._dp - exp(-F * delta_time)) * selectivity * state * grid_area_sqm
landings_by_num(loc) = sum(landings_at_size(:))
enddo

!=============================================================
! originally in metric tons, Set_Fishing_Effort_Weight_xx modified to accept grams
total_catch = sum(landings_wgt_grams)
!=============================================================

#ifdef ACCUM_LANDINGS
landings_wgt_accum(loc) = landings_wgt_accum(loc) + landings_wgt_grams(loc)
landings_accum(loc) = landings_accum(loc) + landings_by_num(loc)
#endif
enddo

! Report current timestep results
call Mortality_Write_At_Timestep(year, ts, state_mat, weight_grams, mortality, grid)
Expand All @@ -467,6 +488,7 @@ function Set_Fishing_Effort(year, ts, state_mat, weight_grams, mortality, grid)
endif
enddo
!
total_catch = sum(landings_wgt_grams)
if (expl_scallops_psqm(loc) .EQ. 0._DP) then
! if expl_scallops_psqm(n) is 0 then stands to reason that there would
! be zero biomass as well and thus no fishing effort.
Expand Down Expand Up @@ -838,18 +860,34 @@ subroutine Mortality_Write_At_Timestep(year, ts, state_mat, weight_grams, mortal
& call Write_Column_CSV(num_grids, F_mort(1:num_grids), 'FMORT',&
& output_dir//'Lat_Lon_Surv_FMOR_'//domain_name//'.csv',.true.)

! landings_accum or landings_by_num
! landings_wgt_accum or landings_wgt_grams
! lpue_accum or lpue
#ifdef ACCUM_LANDINGS
if (data_select%plot_LAND) &
& call Write_Column_CSV(num_grids, landings_accum(:), 'Landings', &
& output_dir//'Lat_Lon_Surv_LAND_'//domain_name//'.csv',.true.)

if (data_select%plot_LNDW) &
& call Write_Column_CSV(num_grids, landings_wgt_accum(:), 'LNDW', &
& output_dir//'Lat_Lon_Surv_LNDW_'//domain_name//'.csv',.true.)

if (data_select%plot_LPUE) &
& call Write_Column_CSV(num_grids, lpue_accum(:), 'LPUE',&
& output_dir//'Lat_Lon_Surv_LPUE_'//domain_name//'.csv',.true.)
#else
if (data_select%plot_LAND) &
& call Write_Column_CSV(num_grids, landings_by_num(:), 'Landings', &
& output_dir//'Lat_Lon_Surv_LAND_'//domain_name//'.csv',.true.)

if (data_select%plot_LNDW) &
if (data_select%plot_LNDW) &
& call Write_Column_CSV(num_grids, landings_wgt_grams(:), 'LNDW', &
& output_dir//'Lat_Lon_Surv_LNDW_'//domain_name//'.csv',.true.)

if (data_select%plot_LPUE) &
& call Write_Column_CSV(num_grids, lpue(:), 'LPUE',&
& output_dir//'Lat_Lon_Surv_LPUE_'//domain_name//'.csv',.true.)

#endif
! write annual results, i.e. every ts_per_year
! This data is later interpolated to MA or GB Grid
if (mod(ts, ts_per_year) .eq. 1) then
Expand Down Expand Up @@ -879,6 +917,28 @@ subroutine Mortality_Write_At_Timestep(year, ts, state_mat, weight_grams, mortal
& call Write_Column_CSV_By_Region(num_grids, F_mort(:), grid(1:num_grids)%lon, 'FMOR', &
& data_dir//'X_Y_FMOR_'//domain_name//trim(buf), .true.)

! landings_accum or landings_by_num
! landings_wgt_accum or landings_wgt_grams
! lpue_accum or lpue
#ifdef ACCUM_LANDINGS
if (data_select%plot_LAND) then
call Write_Column_CSV_By_Region(num_grids, landings_accum(:), grid(1:num_grids)%lon, 'LAND', &
& data_dir//'X_Y_LAND_'//domain_name//trim(buf), .true.)
landings_accum(:) = 0.D0
endif

if (data_select%plot_LNDW) then
call Write_Column_CSV_By_Region(num_grids, landings_wgt_accum(:), grid(1:num_grids)%lon, 'LNDW', &
& data_dir//'X_Y_LNDW_'//domain_name//trim(buf), .true.)
landings_wgt_accum(:) = 0.D0
endif

if (data_select%plot_LPUE) then
call Write_Column_CSV_By_Region(num_grids, lpue_accum(:), grid(1:num_grids)%lon, 'LPUE', &
& data_dir//'X_Y_LPUE_'//domain_name//trim(buf), .true.)
lpue_accum(:) = 0.D0
endif
#else
if (data_select%plot_LAND) &
& call Write_Column_CSV_By_Region(num_grids, landings_by_num(:), grid(1:num_grids)%lon, 'LAND', &
& data_dir//'X_Y_LAND_'//domain_name//trim(buf), .true.)
Expand All @@ -890,6 +950,7 @@ subroutine Mortality_Write_At_Timestep(year, ts, state_mat, weight_grams, mortal
if (data_select%plot_LPUE) &
& call Write_Column_CSV_By_Region(num_grids, lpue(:), grid(1:num_grids)%lon, 'LPUE', &
& data_dir//'X_Y_LPUE_'//domain_name//trim(buf), .true.)
#endif

endif ! if(mod()) = 1

Expand Down
18 changes: 18 additions & 0 deletions SRC/ScallopRecruit.f90
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,15 @@ subroutine Set_Recruitment(recruit, n_grids, dom_name, dom_area, recr_yr_strt, r
character(fname_len) fname
logical exists

#ifdef USE_FIXED_RAND
! Run with the same "random data" files to produce same results for evaluating output data
! Assumes working with
! * Ten years in duration, e.g. 2022 - 2031
! * Recruit Yr 2012 - 2023
! * HabCam Data Habcam_BySegment_2000_2011-2023 (could also pick Habcam_BySegment_2000_2011-2023_v2)
integer, parameter :: rand_idx_debug(10) = (/4, 12, 8, 3, 8, 10, 12, 10, 4, 2/)
#endif

! Used to define weighting
!
! |
Expand All @@ -102,6 +111,11 @@ subroutine Set_Recruitment(recruit, n_grids, dom_name, dom_area, recr_yr_strt, r
real(dp) sill
integer rand_idx

#ifdef USE_FIXED_RAND
write(*,*) term_yel, 'Recruit: Using fixed "random" index', term_blk
#else
write(*,*) term_yel, 'Recruit: Using random "random" index', term_blk
#endif
call Read_Configuration()
!
sim_start_year = yr_start
Expand Down Expand Up @@ -143,7 +157,11 @@ subroutine Set_Recruitment(recruit, n_grids, dom_name, dom_area, recr_yr_strt, r
do year = sim_start_year, sim_stop_year
year_index = year_index + 1

#ifdef USE_FIXED_RAND
rand_idx = rand_idx_debug(year_index)
#else
rand_idx = random_index()
#endif
random_year = recruit_yr_strt - 1 + rand_idx

write(buf,'(I4)')random_year
Expand Down
13 changes: 11 additions & 2 deletions SRC/makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,16 @@ FC = gfortran
#-fno-automatic
# -fcheck=all -fcheck-array-temporaries-fno-automatic -fmax-stack-var-size=100000
#FFLAGS = -O3 -mcmodel=medium -funroll-all-loops -ffast-math -msse -msse2 -m3dnow -m64
FFLAGS = -g -Wall -std=f95 -fall-intrinsics

# -cpp C preprocessor that will accept macros, i.e.
FFLAGS = -cpp
# #ifdef USE_FIXED_RAND uses the same vs random index for each run to show consistent output
# #ifdef ACCUM_LANDINGS then landing values are accumulated for the year rather than just the last timestep period
#
FFLAGS += -DUSE_FIXED_RAND -UACCUM_LANDINGS
# other
FFLAGS += -g -Wall -std=f95 -fall-intrinsics

SRC_DIRS = .

# Find all the f90 files we want to compile
Expand All @@ -29,7 +38,7 @@ SRCS = Globals.f90 GridManager.f90 ScallopMortality.f90 ScallopRecruit.f90 Scall
#SRCS += $(shell find $(SRC_DIRS) -name '*.f90' | grep -v 'Globals\|DataPoint\|ScallopGrowth\|ScallopRecruit')
SRCS += ScallopPopDensity.f90 IORoutines.f90

#LINUX, WIN?
#LINUX, WIN? removed need for lapack and blas
#LIBS = -L/usr/lib/x86_64-linux-gnu -llapack -lblas
#MAC
#LIBS = -L/usr/lib -llapack -lblas
Expand Down
Loading

0 comments on commit 56d14b0

Please sign in to comment.