From 59bde4c64cb627db4e84cdcd4d61fc02b7e8c1ca Mon Sep 17 00:00:00 2001 From: Ana Ordonez Date: Mon, 1 Oct 2018 16:33:09 -0600 Subject: [PATCH] updates --- area_vs_extent_plots.py | 83 ++++- cyclone_composite_LENS.py | 428 +++++++++++++++++++--- grid1togrid2.py | 93 +++-- make_LENS_cyclone_climo_plots.py | 38 +- make_seasonal_storm_composite_plots.py | 486 +++++++++++++++++++++---- plot_LENS_storm_track.py | 135 +++++-- save_stereo_vars.py | 15 +- 7 files changed, 1036 insertions(+), 242 deletions(-) diff --git a/area_vs_extent_plots.py b/area_vs_extent_plots.py index e49a57b..f41bae0 100644 --- a/area_vs_extent_plots.py +++ b/area_vs_extent_plots.py @@ -9,23 +9,39 @@ import matplotlib.pyplot as plt import matplotlib.cm as cm import matplotlib.gridspec as gridspec +import matplotlib.lines as mlines area = read_area_ice() -num_list = [str(x) for x in range(1,31)] -ratio = np.zeros((30,121)) +num_list = [str(x) for x in range(1,36)] +ratio = np.zeros((35,121)) +ratio2 = np.zeros((35,121)) +areamean = np.zeros((35,121)) +extentmean = np.zeros((35,121)) +volmean = np.zeros((35,121)) for ind,model_num in enumerate(num_list): + print model_num aice = read_ice_data_monthly('aice',model_num) + hi = read_ice_data_monthly('hi',model_num) time = aice.shape[0] aice = aice[range(8,time,12),:,:] + hi = hi[range(8,time,12),:,:] # remove fill values and mask aice[aice > 110] = 0 + hi[hi > 100] = 0 aice = aice.data + hi = hi.data aice = aice / 100. aice[aice < 0.15] = 0 + hi[aice < 0.15] = 0 ice_area = np.nansum(np.nansum(aice * area, axis = 2),axis = 1) ice_extent = np.select([aice>=0.15],[1]) ice_extent = np.nansum(np.nansum(ice_extent * area, axis = 2),axis = 1) ratio[ind,:] = ice_area / ice_extent + ice_vol = np.nansum(np.nansum(hi*area, axis = 2), axis = 1) + ratio2[ind] = ice_area / ice_vol + areamean[ind,:] = ice_area + extentmean[ind,:] = ice_extent + volmean[ind,:] = ice_vol # plot annual ratios with +- 2 std (based on 1980-1999) """ratio_ann = np.reshape(ratio[:,0:1452],(30,121,12)) @@ -34,31 +50,78 @@ ratio_mean = np.nanmean(ratio_ann[:,0:30].flatten()) ratio_ann = np.transpose(ratio_ann) """ +# get standard deviation and mean from 1980-2010 ratio_ann = ratio[:,0:120] ratio_std = np.std(ratio_ann[:,0:30].flatten()) * 2 ratio_mean = np.nanmean(ratio_ann[:,0:30].flatten()) ratio_ann = np.transpose(ratio_ann) -years = np.transpose(np.tile(np.arange(1980,2100),(30,1))) +ratio_ann2 = ratio2[:,0:120] +ratio_ann2 = np.transpose(ratio_ann2) + +areamean = np.transpose(np.nanmean(areamean[:,0:120],axis = 0)) +extentmean = np.transpose(np.nanmean(extentmean[:,0:120],axis = 0)) +volmean = np.transpose(np.nanmean(volmean[:,0:120],axis = 0)) + +obsfile = open("area_extent_ratio_obs_111978-122015_nasateam.txt",'r') +obs = [] +for line in obsfile: + data = line.split() + if data == []: + continue + obs.append(float(data[0])) +obs = np.array(obs) +# get september +obs = obs[10:12:445] + +years = np.transpose(np.tile(np.arange(1980,2100),(35,1))) #1980-2100 subplot gs = gridspec.GridSpec(2,2) ax1 = plt.subplot(gs[0,:]) #ax1.set_autoscale_on(False) ax1.fill_between(years[:,0],ratio_mean + ratio_std,ratio_mean - ratio_std, color = [0.7,0.7,0.7]) -colors = iter(cm.rainbow(np.linspace(0,1,30))) -for y in range(0,30): +colors = iter(cm.rainbow(np.linspace(0,1,35))) +for y in range(0,35): ax1.scatter(years[:,y],ratio_ann[:,y],color = next(colors)) -ax1.set_title('1980-2100') +ax1.set_title('area:extent') ax1.axis([1980,2100,0.0,1.0]) +"""ax2= plt.subplot(gs[1,:]) +ax3 = ax2.twinx() +a = ax2.plot(years[:,0],areamean,color = 'b',linewidth = 2) +b = ax2.plot(years[:,0],extentmean,color = 'r',linewidth = 2) +c = ax3.plot(years[:,0],volmean,color = 'g',linewidth = 2) +ax2.set_title('Ensemble mean') +ax2.set_xlim([1980,2100]) +ax2.set_ylabel('area m**2') +ax3.set_ylabel('volume m**3') +aline = mlines.Line2D([],[],color = 'b',label = 'area') +bline = mlines.Line2D([],[],color = 'r',label = 'extent') +cline = mlines.Line2D([],[],color = 'g',label = 'volume') +mylines = [aline,bline,cline] +ax2.legend(handles = mylines,labels = [h.get_label() for h in mylines])""" + +"""#1980-2100 area/volume subplot +gs = gridspec.GridSpec(2,2) +ax2 = plt.subplot(gs[1,:]) +#ax1.set_autoscale_on(False) +#ax1.fill_between(years[:,0],ratio_mean + ratio_std,ratio_mean - ratio_std, color = [0.7,0.7,0.7]) +colors = iter(cm.rainbow(np.linspace(0,1,35))) +for y in range(0,35): + ax2.scatter(years[:,y],ratio_ann2[:,y],color = next(colors)) +ax2.set_title('area:volume') +ax2.set_xlim([1980,2100])""" + #1980-2030 subplot ax2 = plt.subplot(gs[1,:]) ax2.set_autoscale_on(False) ax2.fill_between(years[:,0],ratio_mean + ratio_std,ratio_mean - ratio_std, color = [0.7,0.7,0.7]) -colors = iter(cm.rainbow(np.linspace(0,1,30))) -for y in range(0,30): +colors = iter(cm.rainbow(np.linspace(0,1,35))) +for y in range(0,35): ax2.scatter(years[0:50,y],ratio_ann[0:50,y],color = next(colors)) +print years.shape, obs.shape +ax2.scatter(years[0:38,y],obs,color = 'k') ax2.set_title('1980-2030') ax2.axis([1980,2030,0.25,0.95]) @@ -72,5 +135,5 @@ #ax3.set_title('2060-2100') #ax3.axis([2060,2100,0.6,3.1]) -plt.suptitle('Ratio of September ice area:extent in 30 LENS members') -plt.savefig('LENS_area_extent_ratio_21stcentury_sept.png') +plt.suptitle('September area:extent ratio in 35 LENS members',fontsize = 14) +plt.savefig('LENS35_area_extent_ratio_21stcentury_sept_withobs.png') diff --git a/cyclone_composite_LENS.py b/cyclone_composite_LENS.py index 845e9cd..b365d6d 100644 --- a/cyclone_composite_LENS.py +++ b/cyclone_composite_LENS.py @@ -49,12 +49,11 @@ def detect_local_minima(arr): detected_minima = local_min - eroded_background return detected_minima -def buffer_coast(pdata,buf = (5,5), edgedif = 90000.): +def buffer_coast(pdata,buf = 1, mask = np.array([0])): """buffer_coast() - Returns a mask that can be used to select only those - grid cells which are not adjacent to land within - a distance given by 'buf' + Returns an array that has been buffered to remove + ocean data close to coasts. Parameters: -------------------- @@ -63,17 +62,37 @@ def buffer_coast(pdata,buf = (5,5), edgedif = 90000.): edgedif: expected difference between water and land values mask: numpy array. 0 at coast, 1 away from coast """ - edge = morphology.morphological_gradient(pdata,\ - size = buf) - edge[edge < edgedif] = 1. - edge[edge >= edgedif] = 0. + pdata[np.isnan(pdata)] = 0 + if len(mask.shape) <= 1: + print("buffer_coast: loading default mask") + mask = np.load('/glade/scratch/aordonez/landmask_stereo.npy') + elif len(mask.shape) > 2: + mask = mask[0,:,:] + if mask.shape != pdata.shape: + print("buffer_coast: mask must be same shape as data") + return bi = morphology.generate_binary_structure(2,2) - mask = morphology.binary_dilation(edge,\ - structure = bi, iterations = 1) - return mask.astype(int) - + mask = morphology.binary_dilation(mask,\ + structure = bi, iterations = buf) + newmask = 1-mask.astype(int) + newmask = newmask.astype(float) + newmask[newmask < 1] = np.nan + pdata = pdata * newmask + return pdata + +def buffer_points(data,buf = 2): + """ + buffer_points(data,buf = (5,5)) + returns an array of buffered data points + data: binary array, where 1 is the value to buffer + buf: the number of iterations for dilation + """ + bi = morphology.generate_binary_structure(2,2) + mask = morphology.binary_dilation(data,\ + structure = bi, iterations = buf) + return mask.astype('int') -def find_cyclone_center(psl,icefrac,lat,pmax,pmin): +def find_cyclone_center(psl,icefrac,laplacian,pmax,pmin): """ find_cyclone_center @@ -99,22 +118,114 @@ def find_cyclone_center(psl,icefrac,lat,pmax,pmin): """ time,rows,cols = psl.shape lows = np.zeros((psl.shape)) + landmask = np.load('/glade/scratch/aordonez/landmask_stereo.npy') # find the lows for n in range(0,psl.shape[0]): - lap = filters.laplace(psl[n,:,:]) + # smooth the pressure field? + psl1 = filters.gaussian_filter(psl[n,:,:],3) + #psl1 = buffer_coast(psl1,buf=1,mask=landmask) + #psl1 = psl[n,:,:] + lap = filters.laplace(psl1) + #lap = laplacian[n,:,:] + #lap = filters.gaussian_filter(lap,3) + #lap = buffer_coast(lap,buf=1,mask=landmask) lapmax = detect_local_minima(lap*-1.) - coast = buffer_coast(psl[n,:,:],buf = (5,5)) - ptmp = psl[n,:,:] * coast - low_n = detect_local_minima(ptmp) + # include cells immediately surrounding maxes + lapmax = buffer_points(lapmax,buf=1) + #ptmp = buffer_coast(psl[n,:,:],buf = 1, mask = landmask) + low_n = detect_local_minima(psl1) + lows[n,:,:] = np.select([(low_n == True) & + (icefrac[n,:,:] > 0.15) & + (psl[n,:, :] <= pmax) & + (psl[n,:,:] >= pmin) & + (lapmax ==1)],[low_n]) + + return lows + + +def find_anticyclone_center(psl,icefrac,pmax,pmin): + """ + find_cyclone_center + + Returns a matrix (time x lon x lat). Cells with + a "1" indicate a low pressure center; cells equal "0" + otherwise. + + For a pixel to be counted as a high, it must meet these criteria: + There must be a local maxima in the sea level pressure (SLP), + a local minima in the laplacian of SLP, greater than 15% ice + cover, and the SLP must be between the bounds 'pmin' and 'pmax'. + Grid cells near the coast are not included due to noise from + the stereo regridding and rotation done in get_boxes() + + Parameters: + -------------------- + psl: numpy array of sea level pressure. land areas masked with 0 + icefrac: numpy array of sea ice concentration on atmosphere grid, + max = 1 + pmax: numeric, maximum allowed value of central pressure + pmin: numeric, minimum allowed value of central pressure + lows: numpy array + """ + time,rows,cols = psl.shape + lows = np.zeros((psl.shape)) + landmask = np.load('/glade/scratch/aordonez/landmask_stereo.npy') + # find the lows + for n in range(0,psl.shape[0]): + lap = filters.laplace(psl[n,:,:]) + lapmax = detect_local_minima(lap) + # include cells immediately surrounding mins + lapmax = buffer_points(lapmax,buf=2) + ptmp = buffer_coast(psl[n,:,:],buf = 1, mask = landmask) + low_n = detect_local_minima(ptmp * -1.) lows[n,:,:] = np.select([(low_n == True) & (icefrac[n,:,:] > 0.15) & - (lat > 65) & + #(lat > 66) & (psl[n,:, :] <= pmax) & (psl[n,:,:] >= pmin) & (lapmax ==1) & - (coast == 1)],[low_n]) + (coast == 0)],[low_n]) return lows +def remove_parked_lows(lows): + """ID_parked_lows + + Returns the locations of low pressure systems + which stay in one location over the coarse of many days. + + Parameters: + ------------------- + lows: a binary array where '1' is the location of a low + lat: an array of latitudes for the lows grid + lon: an array of longitudes for the lows grid + """ + """ + for t in times: + buffer around each low + find buffers that overlap + delete these for now; just include moving storms + """ + lows_copy = np.copy(lows) + time = lows.shape[0] + for day in range(1,time): + bi = morphology.generate_binary_structure(2,2) + buffered_lows_new = morphology.binary_dilation(lows[day,:,:],structure = bi,iterations = 1).astype(int) + buffered_lows_old = morphology.binary_dilation(lows[day-1,:,:],structure = bi,iterations = 1).astype(int) + storms, _ = ndimage.label(buffered_lows_new) + # id which storms are overlapping + lowsum = buffered_lows_new + buffered_lows_old + k = np.where(lowsum == 2) + label_list = np.unique(storms[k]) + # delete those storms + l = np.select([lows[day,:,:] == 1],[storms]) + lnew = np.in1d(l,label_list).astype(int) + lnew = np.reshape(lnew,(lows.shape[1],lows.shape[2])) + l[lnew == 1] = 0 + l[l > 0] = 1 + lows_copy[day,:,:] = l + return lows_copy + + def find_cyclone_center_SH(psl,icefrac,lat,pmax,pmin): """ find_cyclone_center @@ -145,8 +256,7 @@ def find_cyclone_center_SH(psl,icefrac,lat,pmax,pmin): for n in range(0,psl.shape[0]): lap = filters.laplace(psl[n,:,:]) lapmax = detect_local_minima(lap*-1.) - coast = buffer_coast(psl[n,:,:],buf = (5,5)) - ptmp = psl[n,:,:] * coast + ptmp = buffer_coast(psl[n,:,:],buf = (5,5)) low_n = detect_local_minima(ptmp) lows[n,:,:] = np.select([(low_n == True) & (icefrac[n,:,:] > 0.15) & @@ -155,11 +265,11 @@ def find_cyclone_center_SH(psl,icefrac,lat,pmax,pmin): (psl[n,:, :] <= pmax) & (psl[n,:,:] >= pmin) & (lapmax ==1) & - (coast == 1)],[low_n]) + (coast == 0)],[low_n]) return lows -def get_boxes(lows,data,size,lat,lon,edgedif): +def get_boxes(lows,data,size,lat,lon,landmask): """ box = get_boxes(lows, data, size) @@ -185,10 +295,12 @@ def get_boxes(lows,data,size,lat,lon,edgedif): lat_box = np.zeros(data_box.shape) lon_box = np.zeros(data_box.shape) (tmax, ymax, xmax) = data.shape + if len(landmask.shape) == 3: + landmask = landmask[0,:,:] # get lon where north is up - lon0 = lon[0,(xmax/2)-1] + lon0 = lon[0,(int(xmax/2))-1] count = 0 - + indlist = np.zeros((nlows)) for ind in range(0,nlows): time = mylow[0][ind] lowrow = mylow[1][ind] @@ -203,31 +315,101 @@ def get_boxes(lows,data,size,lat,lon,edgedif): deg = mylon - lon0 elif lon0 >= mylon: deg = (360 + mylon) - lon0 - low_rotated = interpolation.rotate(low_mask,deg) + low_rotated = interpolation.rotate(low_mask, deg, order = 2) # because of interpolation, lows != 1 ynew,xnew = np.where(low_rotated == low_rotated.max()) - data_rotated = interpolation.rotate(data[time,:,:],deg) + if len(ynew.shape) > 1: + print("get_boxes: problem with rotation: too many indices for max") + print("get_boxes: exiting script") + #return + data_rotated = interpolation.rotate(data[time,:,:], deg, order =2) + # try to ignore data outside map + data_rotated[data_rotated == 0.0] = np.nan # take out noisy grid cells near coast - coast = buffer_coast(data_rotated, buf = (8,8), edgedif = edgedif) - data_rotated = data_rotated * coast - #ynew,xnew = lowrow,lowcol + landmask_rot = interpolation.rotate(landmask, deg, order = 2) + landmask_rot[landmask_rot < 0.5] = 0 + landmask_rot[landmask_rot >= 0.5] = 1 + data_rotated = buffer_coast(data_rotated, buf = 1, mask = landmask_rot) # ----------------- # extracting box # ----------------- - y1 = ynew - size - y2 = ynew + size + 1 - x1 = xnew - size - x2 = xnew + size + 1 + y1 = int(ynew - size) + y2 = int(ynew + size + 1) + x1 = int(xnew - size) + x2 = int(xnew + size + 1) if (y1 < 0) | (x1 < 0) | (y2 > ymax) | (x2 > xmax): # too close to edge of map continue else: data_box[count,:,:] = data_rotated[y1:y2,x1:x2] #data_box[count,:,:] = data[ind,y1:y2,x1:x2] + #lat_box[count,:,:] = lat[y1:y2,x1:x2] + #lon_box[count,:,:] = lon[y1:y2,x1:x2] + indlist[count] = ind + count += 1 + return data_box[0:count,:,:], indlist[0:count] #, lon_box[0:count,:,:] + +def get_boxes_no_rotation(lows,data,size,lat,lon,edgedif): + """ + box = get_boxes_no_rotation(lows, data, size) + + Clips a square of length(2 x size) + 1 around each low + pressure center in lows and returns an array with all the + boxes. Like get_boxes, but does not rotate the box + relative to north. + + Parameters: + -------------------- + lows: binary matrix where 1 = low pressure center + data: numpy array, land masked with 0 + size: numeric, half the length of the 2D subset box + edgedif: numeric, roughly the difference + in value between data and land grid cells + box: numpy array of data around low pressure centers + """ + lon[lon < 0.] = lon[lon < 0.] + 360. + + long_size = ((size *2) + 1) + mylow = np.where(lows == 1) + nlows = mylow[0].shape[0] + data_box = np.zeros((nlows,long_size,long_size)) + lat_box = np.zeros(data_box.shape) + lon_box = np.zeros(data_box.shape) + (tmax, ymax, xmax) = data.shape + # get lon where north is up + lon0 = lon[0,(xmax/2)-1] + count = 0 + indlist = np.zeros((nlows)) + for ind in range(0,nlows): + time = mylow[0][ind] + lowrow = mylow[1][ind] + lowcol = mylow[2][ind] + # ----------------- + # buffer out coast + # ----------------- + # take out noisy grid cells near coast + coast = buffer_coast(data[time,:,:], buf = 1, edgedif = edgedif) + data_buffered = data[time,:,:] * coast + #ynew,xnew = lowrow,lowcol + # ----------------- + # extracting box + # ----------------- + y1 = lowrow - size + y2 = lowrow + size + 1 + x1 = lowcol - size + x2 = lowcol + size + 1 + if (y1 < 0) | (x1 < 0) | (y2 > ymax) | (x2 > xmax): + # too close to edge of map + continue + else: + data_box[count,:,:] = data_buffered[y1:y2,x1:x2] + #data_box[count,:,:] = data[ind,y1:y2,x1:x2] lat_box[count,:,:] = lat[y1:y2,x1:x2] lon_box[count,:,:] = lon[y1:y2,x1:x2] + indlist[count] = ind count += 1 - return data_box[0:count,:,:], lat_box[0:count,:,:], lon_box[0:count,:,:] + return data_box[0:count,:,:], indlist[0:count-1] #, lon_box[0:count,:,:] + def regrid_to_conic(lat,lon,lat_ref,lon_ref,lat_stnd1,lat_stnd2): # regrid to conformal conic @@ -282,7 +464,7 @@ def get_conic_boxes(lows,data,types,latlist,lonlist): count = 0 for ind in range(0,nlows): if ind % 10 == 0: - print ind + print(ind) time = mylow[0][ind] lowrow = mylow[1][ind] lowcol = mylow[2][ind] @@ -353,6 +535,53 @@ def plot_mean(data,cmin = 90000,cmax = 101000): f.colorbar(h,ax = axs) f.show() +def get_anomaly_from_ma(data,wgts): + n = len(wgts) + half = n + datama = np.zeros(data.shape) + for ind in range(half,data.shape[0]-half): + n0 = ind - half + nf = ind + datama[ind,:,:] = np.average(data[n0:nf,:,:],axis = 0,weights = wgts) + datama = data- datama + datama[0:half,:,:] = 0 + datama[(datama.shape[0]-half):,:,:] = 0 + return datama + +def get_nday_trend(data,ndays,b = False): + """Finds the trend over the previous ndays + number of days at each gridcell + """ + s = data.shape + data = np.reshape(data,(s[0],s[1]*s[2])) + datatrend = np.zeros(data.shape) + if b: + datab = np.zeros(data.shape) + for ind in range(ndays,data.shape[0]-ndays): + n0 = ind - ndays + nf = ind + tmp = np.polyfit(range(0,ndays),data[n0:nf,:],1) + datatrend[ind,:] = tmp[0,:] + if b: + datab[ind,:] = tmp[1,:] + datatrend = np.reshape(datatrend,s) + datatrend[0:ndays,:,:] = 0 + if b: + return datatrend, np.reshape(datab,s) + else: + return datatrend + +def trend_predict(data,ndays,day = 0): + """Uses the linear trend over the past + ndays number of days to predict what + the value of data is on a given day + day: day at which prediction is desired + """ + m,b = get_nday_trend(data,ndays,b = True) + predict = np.zeros(data.shape) + predict = m * (ndays+day) + b + return predict + def get_mam(data): """Pulls out a timeseries only containing days in @@ -375,7 +604,7 @@ def get_jja(data): """Pulls out a timeseries only containing days in June, July, and August """ - nyrs = data.shape[0] / 365 + nyrs = int(data.shape[0] / 365) jja = len(range(151,243)) data_jja = np.zeros((nyrs*jja,data.shape[1],data.shape[2])) for yr in range(0,nyrs): @@ -387,6 +616,87 @@ def get_jja(data): last_ind = last_ind + jja return data_jja +def get_jj(data): + """Pulls out a timeseries only containing days in + June, July + """ + nyrs = int(data.shape[0] / 365 ) + jja = len(range(151,212)) + data_jja = np.zeros((nyrs*jja,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_jja[0:jja,:,:] = data[151:212,:,:] + last_ind = jja + else: + data_jja[last_ind:last_ind + jja,:,:] = data[151+(yr*365):212+(yr*365),:,:] + last_ind = last_ind + jja + return data_jja + +def get_jun(data): + """Pulls out a timeseries only containing days in + June, July, and August + """ + nyrs = data.shape[0] / 365 + jja = 30 + data_jja = np.zeros((nyrs*jja,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_jja[0:jja,:,:] = data[151:181,:,:] + last_ind = jja + else: + data_jja[last_ind:last_ind + jja,:,:] = data[151+(yr*365):181+(yr*365),:,:] + last_ind = last_ind + jja + return data_jja + +def get_aug(data): + """Pulls out a timeseries only containing days in + June, July, and August + """ + nyrs = data.shape[0] / 365 + jja = 31 + data_jja = np.zeros((nyrs*jja,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_jja[0:jja,:,:] = data[212:243,:,:] + last_ind = jja + else: + data_jja[last_ind:last_ind + jja,:,:] = data[212+(yr*365):243+(yr*365),:,:] + last_ind = last_ind + jja + return data_jja + + +def get_sep(data): + """Pulls out a timeseries only containing days in + June, July, and August + """ + nyrs = data.shape[0] / 365 + ndays = 30 + data_sep = np.zeros((nyrs*ndays,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_sep[0:ndays,:,:] = data[243:243+ndays,:,:] + last_ind = ndays + else: + data_sep[last_ind:last_ind + ndays,:,:] = data[243+(yr*365):243+ndays+(yr*365),:,:] + last_ind = last_ind + ndays + return data_sep + +def get_aso(data): + """Pulls out a timeseries only containing days in + August, September, and October + """ + nyrs = data.shape[0] / 365 + son = len(range(212,304)) + data_son = np.zeros((nyrs*son,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_son[0:son,:,:] = data[212:304,:,:] + last_ind = son + else: + data_son[last_ind:last_ind + son,:,:] = data[212+(yr*365):304+(yr*365),:,:] + last_ind = last_ind + son + return data_son + def get_son(data): """Pulls out a timeseries only containing days in September, October, and November @@ -407,21 +717,53 @@ def get_djf(data): """Pulls out a timeseries only containing days in December, January, and February """ - nyrs = data.shape[0] / 365 - jf = len(range(0,60)) + nyrs = int(data.shape[0] / 365) + jf = len(range(0,59)) d = len(range(334,365)) data_djf = np.zeros((nyrs*(d+jf),data.shape[1],data.shape[2])) for yr in range(0,nyrs): if yr == 0: - data_djf[0:60,:,:] = data[0:60,:,:] - data_djf[60:60+d,:,:] =data[334:365,:,:] - last_ind = 60+d + data_djf[0:59,:,:] = data[0:59,:,:] + data_djf[59:59+d,:,:] =data[334:365,:,:] + last_ind = 59+d else: - data_djf[last_ind:last_ind + jf,:,:] = data[0+(yr*365):60+(yr*365),:] + data_djf[last_ind:last_ind + jf,:,:] = data[0+(yr*365):59+(yr*365),:] data_djf[last_ind + jf:last_ind + d + jf,:,:] = data[334+(yr*365):365+(yr*365),:] last_ind = last_ind + d + jf return data_djf +def get_jf(data): + """Pulls out a timeseries only containing days in + January, and February + """ + nyrs = int(data.shape[0] / 365 ) + jf = len(range(0,59)) + data_djf = np.zeros((nyrs*(jf),data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_djf[0:59,:,:] = data[0:59,:,:] + last_ind = jf + else: + data_djf[last_ind:last_ind + jf,:,:] = data[0+(yr*365):59+(yr*365),:] + last_ind = last_ind + jf + return data_djf + +def get_monthly_data(data,month): + nyrs = data.shape[0] / 365 + month_len = [31,28,31,30,31,30,31,31,30,31,30,31] + mlen = month_len[month] + start_day = [0,31,59,90,120,151,181,212,243,273,304,334] + start = start_day[month] + data_month = np.zeros((nyrs*mlen,data.shape[1],data.shape[2])) + for yr in range(0,nyrs): + if yr == 0: + data_month[0:mlen,:,:] = data[start:start+mlen,:,:] + last_ind = mlen + else: + span = yr*365 + data_month[last_ind:last_ind+mlen,:,:] = data[start+span:start+mlen+span,:,:] + last_ind = last_ind + mlen + return data_month def get_difference_from_mean(icedata,vardata,tarea): """Calculates the daily anomalies relative to values diff --git a/grid1togrid2.py b/grid1togrid2.py index ca23093..0a1f771 100644 --- a/grid1togrid2.py +++ b/grid1togrid2.py @@ -1,29 +1,30 @@ -import numpy as np from netCDF4 import Dataset +import numpy as np -def grid1togrid2(ongrid1,ncfile): - """grid2togrid2.py - - Regrids data from grid1 to grid2 using weights provided in ncfile - - ongrid1: numpy array - array of data to be regridded, dims: time x rows x cols - ncfile: string - name of the file containing the regridding weights (can - be generated by EMSF Scrip) - ongrid2: numpy array - data on new grid +def test_grid1togrid2_atm(): + from import_LE_data import read_atm_data + psl_on_latlon = read_atm_data('PSL','001') + # SCRIP regridding file with weights + ncfile = '/glade/p/work/aordonez/cesm_mapping/map_fv0.9x1.25_TO_stereo25km_blin.170209.nc' + psl_on_stereo = grid1togrid2(psl_on_latlon, ncfile) - atm to stereo weights: - ncfile = '/glade/p/work/aordonez/cesm_mapping/map_gx1v6NH_TO_stereo25km_blin.161123.nc' +def test_grid1togrid2_ice(): + from import_LE_data import read_ice_data + aice_on_latlon = read_ice_data('aice','001')[0:4,:,:] + # SCRIP regridding file with weights + ncfile = '/glade/p/work/aordonez/cesm_mapping/map_gx1v6_TO_stereo25km_blin.170210.nc' + aice_on_stereo = grid1togrid2(aice_on_latlon, ncfile) - ocn to stereo weights: - ncfile = '/glade/p/work/aordonez/cesm_mapping/map_fv0.9x1.25_TO_stereo25km_blin.161123.nc' +def grid1togrid2(ongrid1,ncfile): + # based on matlab script by Cecilia Bitz - """ + if len(ongrid1.shape) < 3: + ongrid1 = np.tile(ongrid1,(1,1,1)) + time,dim1,dim2 = ongrid1.shape griddata = Dataset(ncfile) N=griddata.variables['dst_grid_dims'][:] + Ntot=N[0]*N[1] # nasa stereo gridcell location dst=griddata.variables['row'][:] @@ -31,42 +32,34 @@ def grid1togrid2(ongrid1,ncfile): src=griddata.variables['col'][:] # map weight S=griddata.variables['S'][:] - S = np.reshape(S,(len(S),1)) - # convert to python 0-indexing - dst = dst - 1 - src = src - 1 + # convert to 0 based index + dst -= 1 + src -= 1 + + totwts=np.zeros((Ntot)) + totwts[dst]=totwts[dst]+S - Ntot=N[0]*N[1] NN=ongrid1.shape - - if len(NN) < 3: - tmp = ongrid1.flatten() - tmp = np.reshape(tmp,(len(tmp),1)) - ongrid2=np.zeros((Ntot,1)) - # multiply each element of the matrix (not matrix multiplication) - for ind in np.unique(dst): - k = np.where(dst == ind) - totwt = np.sum(S[k]) - ongrid2[ind]=np.sum(S[k]*tmp[src[k]]) / totwt - ongrid2=np.reshape(ongrid2,(N[1],N[0])) - elif len(NN) == 3: - time,y,x = ongrid1.shape - tmp = np.reshape(ongrid1,(time,y*x)) - tmp = np.transpose(tmp,(1,0)) - ongrid2 = np.zeros((Ntot,time)) - for ind in np.unique(dst): - k = np.where(dst == ind) - totwt = np.sum(S[k]) - ongrid2[ind,:] = np.sum(S[k] * tmp[src[k],:],axis = 0) / totwt - ongrid2 = np.reshape(ongrid2,(N[1],N[0],time)) - else: - print "data to regrid must have rank 3 or less" + + tmp = np.reshape(ongrid1,(time,dim1*dim2)) + ongrid2=np.zeros((time,Ntot)) + + # get unique destination points + dunique,dindices = np.unique(dst,return_index = True) + # get the last valid index + indN = np.where(dst == dunique[-1])[0][-1] + # multiply each element of the matrix (not matrix multiplication) + for n in range(0,len(dindices)): + ind1 = dindices[n] + if n == len(dindices)-1: + ind2 = indN + 1 + else: + ind2 = dindices[n+1] + ongrid2[:,dunique[n]] = np.nansum(S[ind1:ind2]*tmp[:,src[ind1:ind2]],axis = 1) + + ongrid2 = np.reshape(ongrid2,(time,N[1],N[0])) return ongrid2 - - - - diff --git a/make_LENS_cyclone_climo_plots.py b/make_LENS_cyclone_climo_plots.py index e138baa..917f080 100644 --- a/make_LENS_cyclone_climo_plots.py +++ b/make_LENS_cyclone_climo_plots.py @@ -8,13 +8,13 @@ # lance bosart - papers on big storm fueled by sea ice loss NAAS workshop video # True: Northern Hemisphere, False: Southern Hemisphere -nh = True +nh = False print 'loading data' #psl = read_atm_data('PSL','001') #icefrac = read_atm_data('ICEFRAC','001') -pproj = np.load('/glade/scratch/aordonez/PSL_001_proj.npy') -iproj = np.load('/glade/scratch/aordonez/ICEFRAC_001_proj.npy') +pproj = np.load('/glade/scratch/aordonez/PSL_001_proj_test_global_blin.npy') +iproj = np.load('/glade/scratch/aordonez/ICEFRAC_001_test_proj_global_blin.npy') if nh: ice_func = read_ice_data ocn_func = read_ocn_data @@ -89,32 +89,18 @@ axs[0].set_title('Northern Hemisphere') axs[0].legend(loc = 'upper left')""" -#axs1 = axs[1].hist(psh1, bins = 20, histtype = 'stepfilled', color = 'r', label = '1980-1999', normed = True) -#axs2 = axs[1].hist(psh2, bins = 20, histtype = 'stepfilled', alpha = 0.5, color = 'b', label = '2060-2079', normed = True) -#axs[1].set_ylabel('Probability') -#axs[1].set_xlabel('Central pressure (mb)') -#axs[1].set_title('Southern Hemisphere') +lowsum = np.nansum(lows1, axis = 0) +k = np.where(lowsum == 1) +newx = np.arange(-30,-90,-0.5) +newy = np.arange(0,360,0.5) +X,Y = np.meshgrid(newx,newy) +s = interpolate.griddata((lon[k],lat[k]),lowsum[k],(X,Y),method = 'linear') -#f3.tight_layout() -#f3.show() -#f3.savefig('cyclone_central_pressure_hist.png') -#D**2 P Intensity at center - - -#data = {'psl':psl[0:365,:,:],'ice':icefrac[0:365,:,:]} -#latlist = {'psl':lat,'ice':lat} -#lonlist = {'psl':lon,'ice':lon} -#types = {'psl':'atm','ice':'ice'} -#boxes,ind = get_conic_boxes(lows1[0:365,:,:],data,types,latlist,lonlist) -#ind = ind.astype('int') -#mylow = np.where(lows1 == 1) -#time = mylow[0][ind] -#lowrow = mylow[1][ind] -#lowcol = mylow[2][ind] time = kx lowrow = ky lowcol = kz lows = lows1 + #k=np.where(lows_edit == 1.) latplot = np.tile(lat,(lows.shape[0],1,1)) lonplot = np.tile(lon,(lows.shape[0],1,1)) @@ -125,11 +111,11 @@ #lonplot = np.select([lows1 == 1.][lonplot]) proj = 'npstere' f,axs = plt.subplots(1,1) -map = Basemap(projection = proj, lat_0 = 90, lon_0 = 180, boundinglat = 40, round = True, ax = axs) +map = Basemap(projection = proj, lat_0 = -90, lon_0 = 180, boundinglat = -40, round = True, ax = axs) map.drawcoastlines() m = map.plot(lonplot,latplot,'bo',markersize = 1, latlon = True) plt.show() -f.savefig('nh_cyclone_count.png') +f.savefig('sh_cyclone_count.png') diff --git a/make_seasonal_storm_composite_plots.py b/make_seasonal_storm_composite_plots.py index 8c5d492..4710ee5 100644 --- a/make_seasonal_storm_composite_plots.py +++ b/make_seasonal_storm_composite_plots.py @@ -2,6 +2,9 @@ Makes storm composite plots for DJF,MAM,JJA, and SON in the Arctic. Should double check low ID criteria in cyclone_composite_LENS to make sure it is compositing over the right areas before running. + +Try to filter storms that seem to be the same one as in a previous time step to get +parked storms - want day before storm arrived, day after it left """ import matplotlib matplotlib.use('pdf') @@ -9,36 +12,77 @@ import matplotlib.colors as colors from import_LE_data import * from cyclone_composite_LENS import * +from SH_plots import get_difference, remove_daily_mean +import scipy.ndimage.filters as filters + + +tmpmask, names = read_region_mask_arctic() +rmask = np.zeros(tmpmask.shape) +rmask[(tmpmask > 10) & (tmpmask < 14)] = 1 #Beaufort, Chukchi, E Siberian +rmask[(tmpmask > 5) & (tmpmask < 9)] = 2 #Baffin, Greenland, Barents +rmask[(tmpmask == 15)] = 3 # central Arctic +rmask = np.reshape(rmask,(1,rmask.shape[0],rmask.shape[1])) +reg = ['Chukchi','NA','Central'] +rnum = 1 + print 'loading data' -pproj = np.load('/glade/scratch/aordonez/PSL_001_proj.npy') -iproj = np.load('/glade/scratch/aordonez/ICEFRAC_001_proj.npy') -tproj = np.load('/glade/scratch/aordonez/TS_001_proj.npy') -uproj = np.load('/glade/scratch/aordonez/TAUX_001_proj.npy') * -1 -vproj = np.load('/glade/scratch/aordonez/TAUY_001_proj.npy') * -1 -dtdproj = np.load('/glade/scratch/aordonez/daidtd_001_proj.npy') -dttproj = np.load('/glade/scratch/aordonez/daidtt_001_proj.npy') -mproj = np.load('/glade/scratch/aordonez/meltb_001_proj.npy') #bottom melt +pproj = np.load('/glade/scratch/aordonez/PSL_034_proj_test_global_blin.npy') +iproj = np.load('/glade/scratch/aordonez/ICEFRAC_034_proj_test_global_blin.npy') +tproj = np.load('/glade/scratch/aordonez/TS_034_proj_test_global_blin.npy') +uproj = np.load('/glade/scratch/aordonez/TAUX_034_proj_test_global_blin.npy') * -1 +vproj = np.load('/glade/scratch/aordonez/TAUY_034_proj_test_global_blin.npy') * -1 +shflx = np.load('/glade/scratch/aordonez/SHFLX_034_proj_test_global_blin.npy') +lhflx = np.load('/glade/scratch/aordonez/LHFLX_034_proj_test_global_blin.npy') +fsns = np.load('/glade/scratch/aordonez/FSNS_034_proj_test_global_blin.npy') +dtdproj = np.load('/glade/scratch/aordonez/daidtd_034_proj_blin.npy') +dttproj = np.load('/glade/scratch/aordonez/daidtt_034_proj_blin.npy') +dvtproj = np.load('/glade/scratch/aordonez/dvidtd_034_proj_blin.npy') +dvdproj = np.load('/glade/scratch/aordonez/dvidtt_034_proj_blin.npy') +mproj = np.load('/glade/scratch/aordonez/meltt_034_proj_blin.npy') +fproj = np.load('/glade/scratch/aordonez/frazil_034_proj_blin.npy') +cproj = np.load('/glade/scratch/aordonez/congel_034_proj_blin.npy') +hblt = np.load('/glade/scratch/aordonez/HBLT_2_034_proj_blin.npy') +sproj = np.load('/glade/scratch/aordonez/SST_005_proj_blin.npy') +#dvtproj = 0 * hblt +#dvdproj = 0 * hblt +#dtdproj = 0*hblt +#dttproj = 0*hblt +#mproj = 0 * hblt lat,lon = read_stereo_lat_lon() + +print 'Calculating anomalies' # get ice as anomaly from 5-day mean wgts = [1./5,1./5,1./5,1./5,1./5,] def get_anomaly_from_ma(data,wgts): n = len(wgts) - half = n/2 + half = n datama = np.zeros(data.shape) for ind in range(half,data.shape[0]-half): n0 = ind - half - nf = ind + half + 1 + nf = ind datama[ind,:,:] = np.average(data[n0:nf,:,:],axis = 0,weights = wgts) - datama = datama - data + datama = data- datama data[0:half,:,:] = 0 data[(data.shape[0]-half):,:,:] = 0 return datama -dtdma = get_anomaly_from_ma(dtdproj,wgts) -dttma = get_anomaly_from_ma(dttproj,wgts) -mltma = get_anomaly_from_ma(mproj,wgts) -iaprojma = get_anomaly_from_ma(iproj,wgts) +#dtdma = get_anomaly_from_ma(dtdproj,wgts) +#dttma = get_anomaly_from_ma(dttproj,wgts) +#dvdma = get_anomaly_from_ma(dvdproj,wgts) +#dvtma = get_anomaly_from_ma(dvtproj,wgts) +#mltma = get_anomaly_from_ma(mproj,wgts) +#iaprojma = get_anomaly_from_ma(iproj,wgts) +dtdma = (dtdproj) +dttma = (dttproj) +dvdma = (dvdproj) +dvtma = (dvtproj) +mltma = (mproj) +iaprojma = get_difference(iproj) +#tma = get_diff_withseasonalerence(tproj) +#hbltma = get_diff_withseasonalerence(hblt) +tma = tproj +hbltma = get_difference(hblt) seasons = ['djf','mam','jja','son'] season_functions = {'djf':get_djf,'mam':get_mam,'jja':get_jja,'son':get_son} @@ -51,65 +95,161 @@ def get_anomaly_from_ma(data,wgts): tprojn = tproj[start:end,:,:] iprojn = iproj[start:end,:,:] - iaprojn = iaprojma[start:end,:,:] + iaprojn = iaprojma[start:end,:,:] - get_nday_trend(iprojn,5) + print 'got iceanom' pprojn = pproj[start:end,:,:] + #panom = pproj[start:end,:,:] - trend_predict(pproj[start:end,:,:],5) uprojn = uproj[start:end,:,:] vprojn = vproj[start:end,:,:] - dtdn = dtdma[start:end,:,:] - dttn = dttma[start:end,:,:] - mltn = mltma[start:end,:,:] + shn = remove_daily_mean(shflx[start:end,:,:]) + lhn = remove_daily_mean(lhflx[start:end,:,:]) + fsn = remove_daily_mean(fsns[start:end,:,:]) + print 'fsn' + dtdn = dtdma[start:end,:,:] - trend_predict(dtdma[start:end,:,:],5) + dttn = dttma[start:end,:,:] - trend_predict(dttma[start:end,:,:],5) + dvdn = dvdma[start:end,:,:] - trend_predict(dvdma[start:end,:,:],5) + dvtn = dvtma[start:end,:,:] - trend_predict(dvdma[start:end,:,:],5) + fn = fproj[start:end,:,:] - trend_predict(fproj[start:end,:,:],5) + cn = cproj[start:end,:,:] - trend_predict(cproj[start:end,:,:],5) + print 'cn' + mltn = mltma[start:end,:,:] - trend_predict(mltma[start:end,:,:],5) + print 1 + tn = tma[start:end,:,:] - trend_predict(tma[start:end,:,:],5) + print 2 + #hbltn = hbltma[start:end,:,:] - trend_predict(hbltma[start:end,:,:],5) + print 3 + sn = sproj[start:end,:,:] - trend_predict(sproj[start:end,:,:],5) + print 4 + #iaprojn = iaprojma[start:end,:,:] + #dtdn = dtdma[start:end,:,:] + #dttn = dttma[start:end,:,:] + #dvdn = dvdma[start:end,:,:] + #dvtn = dvtma[start:end,:,:] + #mltn = mltma[start:end,:,:] + #tn = tma[start:end,:,:] + #hbltn = hbltma[start:end,:,:] - for season in seasons: + for season in ['jja']: + print season get_season = season_functions[season] pprojseas = get_season(pprojn) + #panomseas = get_season(panom) iprojseas = get_season(iprojn) iaprojseas = get_season(iaprojn) tprojseas = get_season(tprojn) uprojseas = get_season(uprojn) vprojseas = get_season(vprojn) + shseas = get_season(shn) + lhseas = get_season(lhn) + fsseas = get_season(fsn) dttseas = get_season(dttn) dtdseas = get_season(dtdn) + dvtseas = get_season(dvtn) + dvdseas = get_season(dvdn) + fseas = get_season(fn) + cseas = get_season(cn) mseas = get_season(mltn) - lows = find_cyclone_center(pprojseas,iprojseas,lat,104000,90000) - print lows.shape - if (len(lows) < 1): + tdifseas = get_season(tn) + #hbltseas = get_season(hbltn) + sstseas = get_season(sn) + + lowss = find_cyclone_center(pprojseas,iprojseas,107000,90000) + lowss = remove_parked_lows(lowss) + #lowss[panomseas > -5] = 0 + print lowss.shape + + if (len(lowss) < 1): continue - elif (np.max(lows) == 1.0): - box,_,_ = get_boxes(lows,pprojseas,50,lat,lon,90000) + elif (np.max(lowss) == 1.0): + #k = np.where(rmask == rnum) + #mask = np.zeros(rmask.shape) + #mask[k] = 1 + #lows = lowss * mask + lows = lowss + + pprojseas[np.isnan(pprojseas)]= -1000 + tprojseas[np.isnan(tprojseas)] = -1000 + iprojseas[np.isnan(iprojseas)]= -1000 + iaprojseas[np.isnan(iaprojseas)] = -1000 + uprojseas[np.isnan(uprojseas)] = -1000 + vprojseas[np.isnan(vprojseas)] = -1000 + shseas[np.isnan(shseas)]= -1000 + lhseas[np.isnan(lhseas)] = -1000 + fsseas[np.isnan(fsseas)] = -1000 + dtdseas[np.isnan(dtdseas)]= -1000 + dttseas[np.isnan(dttseas)] = -1000 + dvdseas[np.isnan(dvdseas)] = -1000 + dvtseas[np.isnan(dvtseas)] = -1000 + fseas[np.isnan(fseas)] = -1000 + cseas[np.isnan(cseas)] = -1000 + mseas[np.isnan(mseas)] = -1000 + #hbltseas[np.isnan(hbltseas)] = -1000 + tdifseas[np.isnan(tdifseas)] = -1000 + sstseas[np.isnan(sstseas)] = -1000 + + box,_ = get_boxes(lows,pprojseas,50,lat,lon,90000) # replace no data and absurd values with nan: - box[box == 0.0] = np.nan + #box[box == 0.0] = np.nan box[box > 108000] = np.nan box[box < 90000] = np.nan - tbox,_,_=get_boxes(lows,tprojseas,50,lat,lon,90000) - tbox[tbox == 0.0] = np.nan + tbox,_=get_boxes(lows,tprojseas,50,lat,lon,900) + #tbox[tbox == 0.0] = np.nan tbox[tbox < 180.] = np.nan tbox[tbox > 360.] = np.nan - ibox,_,_ = get_boxes(lows,iprojseas,50,lat,lon,10) + ibox,_ = get_boxes(lows,iprojseas,50,lat,lon,900) ibox[ibox <= 0.0] = np.nan ibox[ibox > 110] = np.nan + ibox[ibox > 100] = 100. - iabox,_,_ = get_boxes(lows,iaprojseas,50,lat,lon,10) - iabox[iabox > 100] = np.nan - iabox[iabox == 0.0] = np.nan - - dttbox,_,_ = get_boxes(lows,dttseas,50,lat,lon,5) - dttbox[dttbox == 0.0] = np.nan - dttbox[abs(dttbox) > 100] = np.nan - - dtdbox,_,_ = get_boxes(lows,dtdseas,50,lat,lon,5) - dtdbox[dtdbox == 0.0] = np.nan - dtdbox[abs(dtdbox) > 100] = np.nan - - mbox,_,_ = get_boxes(lows,mseas,50,lat,lon,5) - mbox[mbox == 0.0] = np.nan - mbox[abs(mbox) > 200] = np.nan - - ubox,_,_ = get_boxes(lows,uprojseas,50,lat,lon,500) - vbox,_,_ = get_boxes(lows,vprojseas,50,lat,lon,500) - ubox[ubox == 0.0] = np.nan - vbox[vbox == 0.0] = np.nan + iabox,_ = get_boxes(lows,iaprojseas,50,lat,lon,900) + iabox[iabox >= 100] = np.nan + iabox[iabox <= -100] = np.nan + + shbox,_ = get_boxes(lows,shseas,50,lat,lon,500) + shbox[shbox > 500] = np.nan + shbox[shbox < -500] = np.nan + lhbox,_ = get_boxes(lows,lhseas,50,lat,lon,500) + lhbox[lhbox > 500] = np.nan + lhbox[lhbox < -500] = np.nan + fsbox,_ = get_boxes(lows,fsseas,50,lat,lon,500) + fsbox[fsbox > 500] = np.nan + fsbox[fsbox < -500] = np.nan + dttbox,_ = get_boxes(lows,dttseas,50,lat,lon,500) + dttbox[abs(dttbox) > 500] = np.nan + + dtdbox,_ = get_boxes(lows,dtdseas,50,lat,lon,500) + dtdbox[abs(dtdbox) > 500] = np.nan + + dvdbox,_ = get_boxes(lows,dvdseas,50,lat,lon,500) + dvdbox[abs(dvdbox) > 500] = np.nan + + dvtbox,_ = get_boxes(lows,dvtseas,50,lat,lon,500) + dvtbox[abs(dvtbox) > 500] = np.nan + + fbox,_ = get_boxes(lows,fseas,50,lat,lon,500) + fbox[abs(fbox) > 500] = np.nan + + cbox,_= get_boxes(lows,cseas,50,lat,lon,500) + cbox[abs(cbox) > 500] = np.nan + + mbox,_ = get_boxes(lows,mseas,50,lat,lon,500) + mbox[abs(mbox) > 500] = np.nan + + #hbox,_ = get_boxes(lows,hbltseas,50,lat,lon,500) + #hbox[abs(hbox) > 900] = np.nan + + sstbox,_ = get_boxes(lows,sstseas,50,lat,lon,500) + sstbox[abs(sstbox) > 100] = np.nan + + tdifbox,_=get_boxes(lows,tdifseas,50,lat,lon,5000) + tdifbox[abs(tdifbox) > 360.] = np.nan + + ubox,_ = get_boxes(lows,uprojseas,50,lat,lon,500) + vbox,_ = get_boxes(lows,vprojseas,50,lat,lon,500) + #ubox[ubox == 0.0] = np.nan + #vbox[vbox == 0.0] = np.nan ubox[abs(ubox) > 500] = np.nan vbox[abs(vbox) > 500] = np.nan @@ -123,72 +263,272 @@ def get_anomaly_from_ma(data,wgts): iabox = iabox[k[0],:,:] ubox = ubox[k[0],:,:] vbox = vbox[k[0],:,:] + shbox = shbox[k[0],:,:] + lhbox = lhbox[k[0],:,:] + fsbox = fsbox[k[0],:,:] dtdbox = dtdbox[k[0],:,:] dttbox = dttbox[k[0],:,:] + dvtbox = dvtbox[k[0],:,:] + dvdbox = dvdbox[k[0],:,:] + fbox = fbox[k[0],:,:] + cbox = cbox[k[0],:,:] mbox = mbox[k[0],:,:] - - X,Y = np.meshgrid(range(0,101),range(0,101)) + sstbox = sstbox[k[0],:,:] + #hbox[k[0],:,:] + tboxdif = tdifbox[k[0],:,:] ubox = np.nanmean(ubox, axis = 0) vbox = np.nanmean(vbox, axis = 0) + tbox_filt = filters.gaussian_filter(np.nanmean(tbox,axis = 0),3) + pbox_filt = filters.gaussian_filter(np.nanmean(box, axis = 0),3) + + X,Y = np.meshgrid(range(0,101),range(0,101)) + + #plt.close('all') + # ice concentration + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(ibox*100,axis = 0),cmap='PuBu_r', + vmin = 0,vmax = 100) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_ice_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + # ice concentration anomaly + cmax = np.nanmax(abs(np.nanmean(iabox*100, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(iabox*100,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_iceanom_' + str(n) + '_' + season + '_diff_nopark_detrend.png') + + # mean zonal wind stress + cmax = np.nanmax(abs(ubox)) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,ubox,cmap='PuOr',vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_taux_' + str(n) + '_' + season + '_mean_nopark_detrend.png') + + + # mean meridional wind stress + cmax = np.nanmax(abs(vbox)) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,vbox,cmap='PuOr',vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_tauy_' + str(n) + '_' + season + '_mean_nopark_detrend.png') + + + # sensible heat flux anomaly + cmax = np.nanmax(abs(np.nanmean(shbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(shbox,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_shflx_' + str(n) + '_' + season + '_diff_nopark_detrend.png') + + # latent heat flux anomaly + cmax = np.nanmax(abs(np.nanmean(lhbox, axis = 0))) f,axs = plt.subplots(1,1) - h = axs.pcolormesh(X,Y,np.nanmean(ibox,axis = 0),cmap='PuBu_r', - vmin = 0,vmax = 1) + h = axs.pcolormesh(X,Y,np.nanmean(lhbox,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) axs.streamplot(X,Y,ubox,vbox,linewidth = 1) - cs = axs.contour(X,Y,np.nanmean(tbox,axis = 0),range(240,300,5),colors = 'r') + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') plt.clabel(cs,inline = 1,fontsize = 12) - cs = axs.contour(X,Y,np.nanmean(box,axis = 0),range(90000,107000,100),colors = 'k') + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') plt.clabel(cs,inline = 1,fontsize = 12) f.colorbar(h,ax = axs) - f.savefig('all_001_ice_' + str(n) + '_' + season + 'png') + f.savefig('NH_034_lhflx_' + str(n) + '_' + season + '_diff_nopark_detrend.png') + # total heat flux anomaly + cmax = np.nanmax(abs(np.nanmean(shbox+lhbox, axis = 0))) f,axs = plt.subplots(1,1) - h = axs.pcolormesh(X,Y,np.nanmean(iabox,axis = 0),cmap='PuOr', - vmin = -0.002,vmax = 0.002) + h = axs.pcolormesh(X,Y,np.nanmean(shbox+lhbox,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) axs.streamplot(X,Y,ubox,vbox,linewidth = 1) - cs = axs.contour(X,Y,np.nanmean(tbox,axis = 0),range(240,300,5),colors = 'r') + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') plt.clabel(cs,inline = 1,fontsize = 12) - cs = axs.contour(X,Y,np.nanmean(box,axis = 0),range(90000,107000,100),colors = 'k') + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') plt.clabel(cs,inline = 1,fontsize = 12) f.colorbar(h,ax = axs) - f.savefig('all_001_iceanom_' + str(n) + '_' + season + 'png') + f.savefig('NH_034_sfcflx_' + str(n) + '_' + season + '_diff_nopark_detrend.png') + # net surface shortwave flux anomaly + cmax = np.nanmax(abs(np.nanmean(fsbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(fsbox,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_fsns_' + str(n) + '_' + season + '_diff_nopark_detrend.png') + + """# mixed layer depth anomaly + cmax = np.nanmax(abs(np.nanmean(hbox / 100, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(hbox/100,axis = 0),cmap='PuOr', + vmin = -1*cmax,vmax = cmax) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_hblt_' + str(n) + '_' + season + '_diff_nopark_detrend.png')""" + + # dynamic area tendency anomaly + cmax = np.nanmax(abs(np.nanmean(dtdbox, axis = 0))) f,axs = plt.subplots(1,1) h = axs.pcolormesh(X,Y,np.nanmean(dtdbox,axis = 0),cmap='PuOr', norm=colors.SymLogNorm(linthresh=0.01, - linscale=0.01,vmin=-0.5,vmax=0.5)) + linscale=0.1,vmin=-1*cmax,vmax=cmax)) axs.streamplot(X,Y,ubox,vbox,linewidth = 1) - cs = axs.contour(X,Y,np.nanmean(tbox,axis = 0),range(240,300,5),colors = 'r') + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') plt.clabel(cs,inline = 1,fontsize = 12) - cs = axs.contour(X,Y,np.nanmean(box,axis = 0),range(90000,107000,100),colors = 'k') + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') plt.clabel(cs,inline = 1,fontsize = 12) f.colorbar(h,ax = axs) - f.savefig('all_001_dtd_' + str(n) + '_' + season + 'png') + f.savefig('NH_034_dtd_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + # thermodynamic area tendency anomaly + cmax = np.nanmax(abs(np.nanmean(dttbox, axis = 0))) f,axs = plt.subplots(1,1) h = axs.pcolormesh(X,Y,np.nanmean(dttbox,axis = 0),cmap='PuOr', norm=colors.SymLogNorm(linthresh=0.01, - linscale=0.01,vmin=-0.5,vmax=0.5)) + linscale=0.1,vmin=-1*cmax,vmax=cmax)) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_dtt_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + # dynamic volume tendency anomaly + cmax = np.nanmax(abs(np.nanmean(dvdbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(dvdbox,axis = 0),cmap='PuOr', + norm=colors.SymLogNorm(linthresh=0.01, + linscale=0.1,vmin=-1*cmax,vmax=cmax)) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_dvidtd_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + # thermodynamic volume tendency anomaly + cmax = np.nanmax(abs(np.nanmean(dvtbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(dvtbox,axis = 0),cmap='PuOr', + norm=colors.SymLogNorm(linthresh=0.01, + linscale=0.1,vmin=-1*cmax,vmax=cmax)) axs.streamplot(X,Y,ubox,vbox,linewidth = 1) - cs = axs.contour(X,Y,np.nanmean(tbox,axis = 0),range(240,300,5),colors = 'r') + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') plt.clabel(cs,inline = 1,fontsize = 12) - cs = axs.contour(X,Y,np.nanmean(box,axis = 0),range(90000,107000,100),colors = 'k') + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') plt.clabel(cs,inline = 1,fontsize = 12) f.colorbar(h,ax = axs) - f.savefig('all_001_dtt_' + str(n) + '_' + season + 'png') + f.savefig('NH_034_dvidtt_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + # bottom melt anomaly + cmax = np.nanmax(abs(np.nanmean(mbox, axis = 0))) f,axs = plt.subplots(1,1) h = axs.pcolormesh(X,Y,np.nanmean(mbox,axis = 0),cmap='PuOr', norm=colors.SymLogNorm(linthresh=0.01, - linscale=0.01,vmin=-0.5,vmax=0.5)) + linscale=0.1,vmin=-1*cmax,vmax=1*cmax)) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,5),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_meltt_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + # frazil anomaly + cmax = np.nanmax(abs(np.nanmean(fbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(fbox,axis = 0),cmap='PuOr', + norm=colors.SymLogNorm(linthresh=0.01, + linscale=0.1,vmin=-1*cmax,vmax=1*cmax)) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,5),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_frazil_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + # congelation anomaly + cmax = np.nanmax(abs(np.nanmean(cbox, axis = 0))) + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(cbox,axis = 0),cmap='PuOr', + norm=colors.SymLogNorm(linthresh=0.01, + linscale=0.1,vmin=-1*cmax,vmax=1*cmax)) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,5),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_congel_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + + #surface temperature anomaly + f,axs = plt.subplots(1,1) + h = axs.pcolormesh(X,Y,np.nanmean(tdifbox,axis = 0),cmap='PuBu_r', + vmin = -5,vmax = 5) + axs.streamplot(X,Y,ubox,vbox,linewidth = 1) + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') + plt.clabel(cs,inline = 1,fontsize = 12) + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') + plt.clabel(cs,inline = 1,fontsize = 12) + f.colorbar(h,ax = axs) + f.savefig('NH_034_ts_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + + #surface temperature anomaly + f,axs = plt.subplots(1,1) + cmax = np.nanmax(abs(np.nanmean(sstbox, axis = 0))) + h = axs.pcolormesh(X,Y,np.nanmean(sstbox,axis = 0),cmap='PuBu_r', + vmin = -1*cmax,vmax = cmax) axs.streamplot(X,Y,ubox,vbox,linewidth = 1) - cs = axs.contour(X,Y,np.nanmean(tbox,axis = 0),range(240,300,5),colors = 'r') + cs = axs.contour(X,Y,tbox_filt,range(240,300,1),colors = 'r') plt.clabel(cs,inline = 1,fontsize = 12) - cs = axs.contour(X,Y,np.nanmean(box,axis = 0),range(90000,107000,100),colors = 'k') + cs = axs.contour(X,Y,pbox_filt,range(90000,107000,100),colors = 'k') plt.clabel(cs,inline = 1,fontsize = 12) f.colorbar(h,ax = axs) - f.savefig('all_001_meltb_' + str(n) + '_' + season + 'png') + f.savefig('NH_005_sst_' + str(n) + '_' + season + '_dailyanom_nopark_detrend.png') + + plt.close('all') diff --git a/plot_LENS_storm_track.py b/plot_LENS_storm_track.py index bb1ea17..5040fd7 100644 --- a/plot_LENS_storm_track.py +++ b/plot_LENS_storm_track.py @@ -13,6 +13,7 @@ from import_LE_data import * from cyclone_composite_LENS import * from scipy.signal import butter, lfilter +from hyb2pres import * def calculate_g(height): @@ -33,21 +34,23 @@ def convert_geopotential_to_meters(Z): z = -1 * ( (Z*9.81 /(G*m) - 1./re)**(-1) + re) return z -def calculate_eady(lat,Z500,T500,TS): +def calculate_eady(lat,U,Z,T): # eady_growth_rate = 0.3098*g*abs(f)*abs(du/dz)/brunt_vaisala # def from NCL page # estimated at 500 mb - time,rown,coln = TS.shape + time,lev,rown,coln = T.shape g = 9.81 w = 2. * np.pi / (60.*60.*24.) - f = 2. * w * np.sin(lat[:,1:,:] * 2. * np.pi / 180.) - z500 = convert_geopotential_to_meters(Z500) - N = np.sqrt(g / TS[:,1:,:] * (TS[:,1:,:] - T500[:,1:,:])/z500[:,1:,:]) + f = 2. * w * np.sin(lat[:,1:,:,:] * 2. * np.pi / 180.) + #z500 = convert_geopotential_to_meters(Z500) + dT_dz = (T[:,0:lev-1,:,:]-T[:,1:lev,:,:]) / (Z[:,0:lev-1,:,:]-Z[:,1:lev,:,:]) + N = np.sqrt(g / T[:,1:lev,:,:] * dT_dz) + du_dz = (U[:,0:lev-1,:,:] - U[:,1:lev,:,:]) / (Z[:,0:lev-1,:,:]-Z[:,1:lev,:,:]) - dT = TS[:,1:rown,:] - TS[:,0:rown-1,:] - dy = (lat[:,1:rown,:] - lat[:,0:rown-1,:]) * 110000 - dT_dy = dT / dy - du_dz = -1 * g/(f*TS[:,1:,:]) * dT_dy #(approximate) + #dT = TS[:,1:rown,:] - TS[:,0:rown-1,:] + #dy = (lat[:,1:rown,:] - lat[:,0:rown-1,:]) * 110000 + #dT_dy = dT / dy + #du_dz = -1 * g/(f*TS[:,1:,:]) * dT_dy #(approximate) eady = 0.3098 * g * abs(f) * abs(du_dz) / N eady[eady > 1e100] = np.nan return eady * (60 * 60 * 24) #per day @@ -63,7 +66,7 @@ def calculate_eady_surface(lat,num): f = 2. * w * np.sin(lat[:,1:,:] * 2. * np.pi / 180.) #Z = convert_geopotential_to_meters(dZ) #N = np.sqrt(g / T1[:,1:,:] * (T1[:,1:,:] - T2[:,1:,:])/Z[:,1:,:]) - N = np.sqrt(g / T[:,1,:,:] * dT_dZ + N = np.sqrt(g / T[:,1,:,:] * dT_dz) dT = T1[:,1,1:rown,:] - T1[:,1,0:rown-1,:] dy = (lat[:,1:rown,:] - lat[:,0:rown-1,:]) * 110000 dT_dy = dT / dy @@ -90,27 +93,61 @@ def butter_bandpass_filter(data, lowcut, highcut, fs, order=5): return np.reshape(y,(u,v,w)) def get_jja_monthly(data): - nmnths,x,y = data.shape - nyrs = nmnths / 12. - data = np.reshape(data,(12,nyrs,x,y)) - data = data[6:9,:,:,:] - return np.reshape(data,(3*nyrs,x,y)) + if len(data.shape) == 3: + nmnths,x,y = data.shape + nyrs = nmnths / 12. + data = np.reshape(data,(12,nyrs,x,y)) + data = data[6:9,:,:,:] + data = np.reshape(data,(3*nyrs,x,y)) + elif len(data.shape) == 4: + nmnths,lev,x,y = data.shape + nyrs = nmnths / 12. + data = np.reshape(data,(12,nyrs,lev,x,y)) + data = data[6:9,:,:,:,:] + data = np.reshape(data,(3*nyrs,lev,x,y)) + return data def read_T_and_Z(num): - ncfile = '/glade/scratch/aordonez/LENS_T_850_' + str(num) + '_1920-2005.nc' + ncfile = '/glade/scratch/aordonez/LENS_T_850_' + str(num).zfill(3) + '_1920-2005.nc' data = Dataset(ncfile) - T = data.variables['T'][:] - dT = abs(T[2]-T[0]) - - Z = data.variables['Z'][:] - h1 = convert_geopotential_to_meters(Z[0]) - h2 = convert_geopotential_to_meters(Z[2]) + if num == 1: + t1 = ((1980-1850)*12) - 1 + T = data.variables['T850'][t1:(t1+12*26),:,:,:] + Z = data.variables['Z850'][t1:(t1+12*26),:,:,:] + else: + t1 = ((1980-1920)*12) - 1 + T = data.variables['T850'][t1:(t1+12*26),:,:,:] + Z = data.variables['Z850'][t1:(t1+12*26),:,:,:] + ncfile = '/glade/scratch/aordonez/LENS_T_850_' + str(num).zfill(3) + '_2006-2080.nc' + data = Dataset(ncfile) + T2 = data.variables['T850'][:] + Z2 = data.variables['Z850'][:] + print T2.shape + print Z2.shape + T = np.concatenate((T,T2), axis = 0) + Z = np.concatenate((Z,Z2), axis = 0) + print T.shape + print Z.shape + T[T>400] = np.nan + Z[Z > 10000] = np.nan + T = get_jja_monthly(T) + Z = get_jja_monthly(Z) + + dT = abs(T[:,2,:,:]-T[:,0,:,:]) + h1 = convert_geopotential_to_meters(Z[:,0,:,:]) + h2 = convert_geopotential_to_meters(Z[:,2,:,:]) dH = abs(h2-h1) dT_dz = dT / dH return T,dT_dz,dH +def potential_temperature(T,plev,P0): + l = len(plev) + plev = np.reshape(plev,(1,l,1,1)) * np.ones(T.shape) + theta = T * (P0 / plev) ** (0.286) + return theta -nlens = 30 + +nlens = 35 pstd = np.zeros((nlens,192,288)) pstd2 = np.zeros((nlens,192,288)) icemean = np.zeros((nlens,192,288)) @@ -126,12 +163,28 @@ def read_T_and_Z(num): psl = read_atm_data('PSL',test_num) / 100. icefrac = read_atm_data('ICEFRAC',test_num) Z500 = read_atm_data('Z500',test_num) + lattile = np.tile(lat,(20*12,1,1)) + """ + This part was for trying to get the maximum + eady growth rate: + P0, hyam, hybm = read_atm_hyb_coord_monthly(test_num) + P0 = P0 / 100 T = read_atm_data_monthly('T',test_num) - T1 = T[:,28,:,:] - T2 = T[:,29,:,:] + PS = read_atm_data_monthly('PS',test_num) / 100 Z = read_atm_data_monthly('Z3',test_num) - dZ = Z[:,29,:,:]-Z[:,28,:,:] - lattile = np.tile(lat,(dZ.shape[0],1,1)) + U = read_atm_data_monthly('U',test_num) + pnew = np.arange(1000,200,-5) + Tp = np.zeros((T.shape[0],len(pnew),T.shape[2],T.shape[3])) + Zp = np.zeros(Tp.shape) + Up = np.zeros(Tp.shape) + for t in range(0,10): + Tp[t,:,:,:] = hyb2pres(T[t,:,:,:],PS[t,:,:],P0,hyam,hybm,pnew) + Zp[t,:,:,:] = hyb2pres(Z[t,:,:,:],PS[t,:,:],P0,hyam,hybm,pnew) + Up[t,:,:,:] = hyb2pres(U[t,:,:,:],PS[t,:,:],P0,hyam,hybm,pnew) + theta = potential_temperature(Tp,pnew,P0) + h = convert_geopotential_to_meters(Zp) + lattile = np.tile(lat,(20*3,len(pnew),1,1)) + """ start = 0 end = 365*20 @@ -146,10 +199,12 @@ def read_T_and_Z(num): ztmp_filt = get_jja(ztmp_filt) zstd[num-1,:,:,] = np.var(ztmp_filt,axis = 0) icemean[num-1,:,:] = np.nanmean(icefrac[start:end,:,:],axis = 0) - etmp = calculate_eady_surface(get_jja_monthly(lattile[start:end1,:,:]), - get_jja_monthly(dZ[start:end1,:,:]), - get_jja_monthly(T1[start:end1,:,:]), - get_jja_monthly(T2[start:end1,:,:])) + print lattile.shape + print psl.shape + etmp = calculate_eady_surface(get_jja_monthly(lattile[start:end,:,:]),num) + print np.nanmean(etmp) + print np.nanmax(etmp) + print np.nanmin(etmp) etmp = np.nanmean(etmp,axis = 0) eady1[num-1,:,:] = etmp @@ -167,10 +222,7 @@ def read_T_and_Z(num): ztmp2_filt = get_jja(ztmp2_filt) zstd2[num-1,:,:,] = np.var(ztmp2_filt,axis = 0) icemean2[num-1,:,:] = np.nanmean(icefrac[start:end,:,:],axis = 0) - etmp = calculate_eady_surface(get_jja_monthly(lattile[start2:end2,:,:]), - get_jja_monthly(dZ[start2:end2,:,:]), - get_jja_monthly(T1[start2:end2,:,:]), - get_jja_monthly(T2[start2:end2,:,:])) + etmp = calculate_eady_surface(get_jja_monthly(lattile[start:end,:,:]),num) etmp = np.nanmean(etmp,axis = 0) eady2[num-1,:,:] = etmp @@ -185,6 +237,18 @@ def read_T_and_Z(num): zstd = np.nanmean(zstd,axis = 0) zstd2 = np.nanmean(zstd2,axis = 0) + +np.save('/glade/scratch/aordonez/eady1.npy',eady1) +np.save('/glade/scratch/aordonez/eady2.npy',eady2) +np.save('/glade/scratch/aordonez/pstd.npy',pstd) +np.save('/glade/scratch/aordonez/pstd2.npy',pstd2) +np.save('/glade/scratch/aordonez/zstd.npy',zstd) +np.save('/glade/scratch/aordonez/zstd2.npy',zstd2) +np.save('/glade/scratch/aordonez/icemeaneady.npy',icemean) +np.save('/glade/scratch/aordonez/icemeaneady2.npy',icemean2) + +quit() + proj = 'npstere' #---------------------- @@ -260,7 +324,6 @@ def read_T_and_Z(num): #---------------------- # Eady #---------------------- - f,axs=plt.subplots(1,3) map = Basemap(projection = proj, lat_0 = 90, lon_0 = 180, boundinglat = 40, round = True, ax = axs[0]) x,y = map(lon,lat) diff --git a/save_stereo_vars.py b/save_stereo_vars.py index 98894eb..b36b2b4 100644 --- a/save_stereo_vars.py +++ b/save_stereo_vars.py @@ -5,10 +5,17 @@ """ from import_LE_data import * -num = '011' -save_atm_vars_as_stereo(num) -save_ice_vars_as_stereo(num) -save_ocn_vars_as_stereo(num) + +num = ['034','035'] +#num = ['005'] +#num = ['001','002','003','004','005','006','007','008','009','010'] +#num = ['011','012','013','014','015','016','017','018','019','020'] +#num = ['021','022','023','024','025','026','027','028','029','030'] +for n in num: + save_psl_laplacian_as_stereo(n) + #save_atm_vars_as_stereo(n) + #save_ice_vars_as_stereo(n) + #save_ocn_vars_as_stereo(n)