In [ ]:
%load_ext autoreload
%autoreload 2
%run imports.py

Young Sound

Map

Young Sound bathymetry data is courtesy T. Vang & J. Bendtsen. We were not able to make this file available here.

In [ ]:
ds = xr.load_dataset('../data/youngsound2015/YS2015_FN.nc')
stations = gv.Points((ds.lon, ds.lat)).relabel('Co-located MSS/SUNA station')

fname_bathymetry = '../data/youngsound2015/ys_bathy_v3.0_raw.nc'
if os.path.exists(fname_bathymetry):
    ds = xr.load_dataset(fname_bathymetry)
    ds = ds.isel(Longitude=slice(None, None, 5), Latitude=slice(None, None, 5))
    bathymetry = gv.QuadMesh(ds.Bathymetry, ['Longitude', 'Latitude'], crs=ccrs.PlateCarree())

    # Create coastlines by buffering union of data points with zero depth

    platecarre_eps = from_epsg(4326)
    nps_eps = from_epsg(3413)

    df = ds.to_dataframe().reset_index()
    df = df.loc[df.Landmask==1]
    gdf = gpd.GeoDataFrame(
        df,
        geometry=[Point(x, y) for x, y in zip(df.Longitude, df.Latitude)],
        crs=platecarre_eps
    ).to_crs(nps_eps)

    coast_shp = gdf.unary_union.buffer(400).boundary[1]
    coast_gs = gpd.GeoSeries([coast_shp], crs=nps_eps).to_crs(platecarre_eps)[0]
    coast = gv.Shape(coast_gs.simplify(0.005))
else:
    coast = gv.Points([0, 0]).relabel('Coast data not available')
    bathymetry = gv.Points([0, 0]).relabel('Bathymetry data not available')
In [ ]:
proj = ccrs.LambertConformal(central_longitude=-21.)

data_unavailable_kwarg = dict(backend='matplotlib', color='w', apply_extents=False, apply_ranges=False)
options = [
    opts.Feature(backend='matplotlib', projection=proj),
    opts.Points(
        backend='matplotlib', projection=proj, color='orange', s=50, ec='k', padding=.1, 
        show_legend=True, #legend_position='left'
    ),
    opts.Points('Coast data not available', **data_unavailable_kwarg),
    opts.Points('Bathymetry data not available', **data_unavailable_kwarg),
    opts.Shape(backend='matplotlib', linewidth=2),
    opts.QuadMesh(backend='matplotlib', cmap=cmocean.cm.tempo_r, colorbar=True)
]

l = bathymetry.clone() * coast.clone() * stations

def adjust(fig):
    ax = fig.axes[0]
    fig.set_size_inches(10, 6)

    from utils.mpl import set_cartopy_grid
    set_cartopy_grid(ax, lons=np.arange(-22.4, -19.4, .4), lats=np.arange(74.0, 75, 0.1))
    
    transform = ccrs.PlateCarree()._as_mpl_transform(ax)
    arrowprops = dict(
        facecolor='gray', arrowstyle='simple',
        connectionstyle='arc3,rad=-0.2',
        alpha=0.5
    )
    kwargs = dict(
        xycoords=transform, fontsize=15,
        arrowprops=arrowprops, ha='center', va='center'
    )
    ax.annotate(
        'Outer sill to\nGreenland Sea', xy=(-20.13, 74.27), xytext=(-20.4, 74.53), 
        **kwargs
    )
    ax.annotate(
        'Inner sill', xy=(-21.8, 74.44), xytext=(-21.9, 74.3), 
        **kwargs
    )
 
fname = '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_map'
fig = mplrender(l.opts(*options), fname=fname, hooks=[adjust])
fig
In [ ]:
data_unavailable_kwarg = dict(color='w', apply_extents=False, apply_ranges=False)
options = [
    # opts.Feature(projection=proj),
    opts.Points(
        frame_width=500,
        # projection=proj, 
        color='orange', size=5, line_color='k', padding=.1, 
        show_legend=True, #legend_position='left'
    ),
    opts.Points('Coast data not available', **data_unavailable_kwarg),
    opts.Points('Bathymetry data not available', **data_unavailable_kwarg),
    opts.Shape(line_width=2),
    opts.QuadMesh(cmap=cmocean.cm.tempo_r, colorbar=True, tools=['hover'])
]

l = l.opts(*options).redim(
    Latitude=hv.Dimension('Latitude', unit='°N'),
    Longitude=hv.Dimension('Longitude', unit='°E'),
)

l = (
    l 
    * gv.Text(-20.1, 74.3, 'Outer sill') 
    * gv.Text(-21.75, 74.46, 'Inner sill')
)
hv.save(l, fname, fmt='html')
panelA = l.clone().opts(title='A')

Hydrographic transect

In [ ]:
ds = xr.load_dataset('../data/youngsound2015/YS2015_CTD.nc')

def add_transect_distances(ctd):
    """
    For dataframe ctd with columns 'lon', 'lat', 'station', 
    define cumulative distances along a transect from west to east.
    """
    ctd = ctd.dropna(subset=['lat'])
    df = ctd.copy()
    ctd=gpd.GeoDataFrame(ctd.groupby('station').mean())
    ctd['geometry'] = [Point(x,y) for x,y in zip(ctd.lon, ctd.lat)]
    ctd.crs=from_epsg(4326)
    ctd=ctd.to_crs(from_epsg(3413))

    s = gpd.GeoSeries(ctd.sort_values(by='lon', ascending=True).geometry)

    dist = ( np.sqrt(s.centroid.x.diff()**2 
            + s.centroid.y.diff()**2)
            .fillna(value=0).cumsum().rename('dist')/1e3) # in km

    ctd = df.merge(dist, on='station', how='outer')
    ctd = ctd.sort_values(by='dist',ascending=True)
    return ctd

df = ds[['lon', 'lat', 'station']].to_dataframe()
# remove a station not part of the transect
df = df.drop(df.loc[df.station=='Lerbugten'].index)
# average over a bunch of stations that are close by and the difference between which does not matter for a transect over the entire fjord
df.loc[df.station.str.startswith('SILL'), 'station'] = 'SILL'
station2dist = add_transect_distances(df).groupby('station').dist.mean()
In [ ]:
#CTD 

ds = xr.load_dataset('../data/youngsound2015/YS2015_CTD.nc')
ds['dist'] = ('cast', station2dist.reindex(ds.station).values)
ds = ds.to_dataframe().groupby(['P', 'dist']).mean().to_xarray()
sal_label = 'B: Salinity [g kg-1]'
# S = ds.S.hvplot.quadmesh().relabel(sal_label)
S = ds.S.hvplot.contourf(levels=50).relabel(sal_label)

# SUNA

ds = xr.load_dataset('../data/youngsound2015/YS2015_SUNA.nc')
ds['dist'] = ('cast', station2dist.reindex(ds.station).values)
ds = ds.to_dataframe().groupby(['P', 'dist']).mean().to_xarray()
ntr_label = 'C: Nitrate conc [µM]'
ntr = ds.nitrate.hvplot.contourf(levels=20).relabel(ntr_label)

# MSS

ds = xr.load_dataset('../data/youngsound2015/YS2015_MSS.nc')
ds['dist'] = ('set', station2dist.reindex(ds.station).values)
df = ds.to_dataframe().reset_index()
df.P = pd.cut(df.P, bins=np.arange(5, 100, 2), labels=np.arange(6, 100, 2))

ds = df.groupby(['P', 'dist']).median().to_xarray()
eps_label = 'D: Dissipation of TKE [log(W kg-1)]'
eps = np.log10(ds.eps).hvplot.contourf().relabel(eps_label)
In [ ]:
dims = dict(
    dist=hv.Dimension('dist', label='Distance', unit='km', range=(0, 90)),
    P=hv.Dimension('P', label='Pressure', unit='dbar', range=(0, 100)),
)

options = [
    opts.Polygons(invert_yaxis=True), 
    opts.Polygons(sal_label, frame_height=150, cmap=cmocean.cm.dense, xaxis='bare', color_levels=50),
    opts.Polygons(ntr_label, frame_height=150, xaxis='bare', clim=(0, 5), line_color='k'),
    opts.Polygons(eps_label, frame_height=150, cmap=cmocean.cm.rain, color_levels=50),
]

l = (S + ntr + eps).cols(1).redim(**dims).opts(*options)

panelB = l.clone()

l
In [ ]:
fname = '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_transect'
hv.save(l, fname, fmt='html')
hv.save(l.opts(toolbar=None), fname, fmt='png')

New/regenerated production incubations

In [ ]:
def load_newP():
    df = pd.read_csv('../data/youngsound2015/YS2015_N15_inc.csv')
    df = df.rename(columns={
        'newProd_uM_N_d-1': 'newP', 'regProd_uM_N_d-1': 'regP', 
        'depth_m': 'depth', 'NOx_init_uM': 'NOx',
    })
    df['fratio'] = df.newP / (df.newP + df.regP)
    
    # It is difficult to estimate integrated new production 
    # from measurements at only two depths. Here we give a very rough, and conservative, estimate,
    # by assuming the 5 m value holds in the [0-5 m] interval and the 20 m value holds over [5, 20].
    newPint = (
        df.set_index(['depth', 'station']).newP.to_xarray()
        * xr.DataArray(data=[5, 15], coords=dict(depth=[5, 20]), dims=['depth'])
    ).sum(dim='depth').rename('newPint').to_dataframe()
    return df.merge(newPint, on='station')

df = load_newP()
df['dist'] = station2dist.reindex(df.station).values

# jitter distance by a small amount to accomodate both incubations at single station
dx = 1
df['dist_jitter'] = df.dist + df.depth.apply(lambda s: {5: -dx, 20: dx}[s])

dims = dict(
    newP=hv.Dimension('newP', label='New Production', unit='µM d⁻¹', range=(.001, 0.2)),
)

options = [
    opts.Spikes(
        padding=.1,
        line_width=10,
        logy=True,
        position=1e-3,
        frame_width=500, 
        color=hv.Cycle(['blue', 'orange']), show_legend=True
    ),
    opts.NdOverlay(legend_position='top_left')
]

l = hv.Dataset(df, ['dist_jitter', 'depth'], 'newP').to(hv.Spikes).overlay()
l = l.redim(**dims)
l.opts(*options)
In [ ]:
l = hv.Spikes(np.log10(df.newPint.to_frame(name='newPint'))).redim(value='log_newPint').relabel('New production')
newPint_hv = l
newPint_hv

Flux

In [ ]:
df = xr.load_dataset('../data/youngsound2015/YS2015_FN.nc').to_dataframe().assign(station_type='')

sill = ['GH01', 'YS3.02', 'Tyro07']

on_sill = df.station.str.contains('SILL') | df.station.isin(sill)
df.loc[on_sill, 'station_type'] = 'Sills'
df.loc[~on_sill, 'station_type'] = 'Interior'

display(HTML(df[['FN']].describe().to_html()))
display(HTML(df.groupby('station_type').FN.describe().to_html()))
display(HTML(df.groupby('station_type').mean().to_html()))

options = [
    opts.Distribution(
        padding=(0, (0, 0.05)),
        fill_color=hv.Cycle(['blue', 'orange']),
        # fill_color=hv.Cycle(['#1b9e77','#d95f02','#7570b3']),
        xticks=[(logx, 10**logx) for logx in range(-3, 3, 1)]
    ),
    opts.Spikes(spike_length=0.18, line_width=7, tools=['hover']),
    opts.NdOverlay(legend_position='top_right'),
]
dims = dict(
    logFN=hv.Dimension('logFN', label='Nitrate flux', unit='mmol m⁻² d⁻¹', range=(-3.5, 2.5)),
    Density='Probability density',
)

l = df.assign(logFN=np.log10(df.FN)).hvplot.kde('logFN', by='station_type') * newPint_hv
l = l.opts(*options).redim(**dims)

l = l * hv.Text(-1.45, 0.55, 'Median:\n0.04') * hv.Text(-0.2, 0.3, 'Median:\n0.33')

panelC = l.clone().opts(title='E')
l
In [ ]:
fname = '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_fn'
hv.save(l, fname, fmt='html')
hv.save(l.opts(toolbar=None), fname, fmt='png')

Merge figures

In [ ]:
fname = '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND'
l = (panelA + panelB + panelC).cols(1)
hv.save(l, fname, fmt='html')
l = l.opts(toolbar=None)
hv.save(l, fname, fmt='png')

os.system('rm ../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_*.png')
subprocess.run([ 'convert', '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_map.png', '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_transect.png', '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND_fn.png', '-append', '../nb_fig/FIGURE_FN-NEW_YOUNGSOUND.png' ])

Laptev

In [ ]:
dims = dict(
    Nitrate=hv.Dimension('Nitrate', range=(0, None), unit='µM'),
    eps=hv.Dimension('eps', label='Dissipation', unit='W kg⁻¹'),
    Depth=hv.Dimension('Depth', unit='m'),
)

Winter/summer NO3 profiles from 2008/2011

In [ ]:
try:
    # Only executed when there is access to the raw data files.
    renamedict = {
        'Lon [ーE]':'lon',
        'Lat [ーN]': 'lat',
        'Nitrate [umol/l]': 'nitrate',
        'bottle Salinity [psu]': 'sal',
        'Cruise': 'cruise',
        'mon/day/yr': 'date',
        'Temperature [ーC]': 'temp',
        'Depth [m]': 'depth',
        'Station': 'station'
    }

    sel = ['date','lat','lon','nitrate','depth','temp','sal','station','cruise']

    kwargs = dict(parse_dates=['mon/day/yr'], dtype={'Station': str}, date_parser=lambda x: pd.datetime.strptime(x, '%m/%d/%Y'))
    dfs = pd.read_excel('../data/laptev/nitrate_2011/LaptevData.xlsx', **kwargs)
    dfw = pd.read_excel('../data/laptev/nitrate_2008/LaptevData2.xlsx', **kwargs)
    df = pd.concat([dfw, dfs], sort=False)
    df = df.rename(columns=renamedict).dropna(how='all', subset=['nitrate'])[sel]

    df = df.assign(month=lambda d: d.date.dt.month)
    df.depth = pd.to_numeric(pd.cut(df.depth, bins=np.arange(0, 80, 5), labels=np.arange(2.5, 75, 5)))


    # extract data in publishable format
    metadata_dims = ['lon', 'lat', 'date', 'month', 'station']
    #
    df_winter = df.loc[df.month<=6]
    df_winter_mean = df_winter.groupby('depth').mean().dropna(how='all')
    df_winter_meta = df_winter[metadata_dims].groupby('station', as_index=False).first()
    #
    closest_summer_stations = np.array([4, 5, 20, 25]).astype(str)
    df_summer = df.loc[df.station.isin(closest_summer_stations)]
    df_summer_mean = df_summer.groupby('depth').mean().dropna(how='all')
    df_summer_meta = df_summer[metadata_dims].groupby('station', as_index=False).first()

    # save
    df_winter_mean.to_csv('../data/laptev/nitrate_2008/mean_profile.csv')
    df_summer_mean.to_csv('../data/laptev/nitrate_2011/mean_profile.csv')
    df_winter_meta.to_csv('../data/laptev/nitrate_2008/metadata.csv')
    df_summer_meta.to_csv('../data/laptev/nitrate_2011/metadata.csv')

except:
    # Otherwise, use the previously exported files
    df_winter_mean, df_summer_mean, df_winter_meta, df_summer_meta = (
      pd.read_csv('../data/laptev/nitrate_2008/mean_profile.csv'),
      pd.read_csv('../data/laptev/nitrate_2011/mean_profile.csv'),
      pd.read_csv('../data/laptev/nitrate_2008/metadata.csv'),
      pd.read_csv('../data/laptev/nitrate_2011/metadata.csv'),
    )

Define holoviews data objects

In [ ]:
label_winter = 'Winter 2008 mean'

ntr_winter2008 = hv.Curve(df_winter_mean, 'depth', 'nitrate', label=label_winter) 
sal_winter2008 = hv.Curve(df_winter_mean, 'depth', 'sal', label=label_winter)
loc_winter2008 = gv.Points(df_winter_meta, ['lon', 'lat'], ['date', 'month', 'station'], 
                        crs=ccrs.PlateCarree(), label='Winter 2008')


label_summer = 'Summer 2011 mean'

ntr_summer2011 = hv.Curve(df_summer_mean, 'depth', 'nitrate', label=label_summer)
sal_summer2011 = hv.Curve(df_summer_mean, 'depth', 'sal', label=label_summer)
loc_summer2011 = gv.Points(df_summer_meta, ['lon', 'lat'], ['date', 'month', 'station'], 
                        crs=ccrs.PlateCarree(), label='Summer 2011')

Nitrate fluxes

Nitrate data

Combining CTD cast number 59 with MSS profile number TBD

In [ ]:
def load_nitrate2018(fname):
    column_names = [
        'CTD time',
        'Pressure',
        'Depth',
        'Primary temperature',
        'Secondary temperature',
        'Primary conductivity',
        'Secondary conductivity',
        'Primary salinity',
        'Secondary salinity',
        'Dissolved oxygen [mL L-1]',
        'Oxygen voltage',
        'CDOM',
        'CHL',
        'Turbidity',
        'Beam-c',
        'PAR',
        'Transmission',
        'Nitrate_uncorrected',
        'Nitrate',
    ]

    df = pd.read_csv('../data/laptev/nabos_2018/suna_data/'+fname, sep='\t', names=column_names)
    
    df['sigma0'] = gsw.sigma0(df['Primary salinity'], df['Primary temperature'])
    return df

Flux estimate no. 1: NABOS Cast 59+2018 MSS

CTD

In [ ]:
label = '2018 cast 59'
df = load_nitrate2018('AT18_59.txt')
ntr_2018_59 = df.hvplot('Depth', 'Nitrate').relabel(label)
sal_2018_59 = df.hvplot('Depth', 'Primary salinity').relabel(label)
loc_2018_59 = gv.Points((125.857, 77.011), crs=ccrs.PlateCarree(), label='2018 MSS+SUNA station')
In [ ]:
def load_mss2018():
    a = loadmat('../data/laptev/nabos_2018/MSS2018.mat', squeeze_me=True)

    a.pop('__header__')
    a.pop('__version__')
    a.pop('__globals__')

    ds = xr.Dataset(coords=dict(P=a['P_2018'][0, :], cast=np.arange(a['P_2018'].shape[0])))
    a.pop('P_2018')

    # rename variables from *_2018 to just *
    for var, vals in a.items():
        ds[var[:-5]] = (['cast', 'P'], vals)

    ds['eps'] = 10**ds.eps

    ds['sigma'] = (['cast', 'P'], gsw.sigma0(ds.S, ds.T))

    def calc_N2(P, sigma):
        dsigma_dz = np.polyfit(P, sigma.sort_values(), 1)[0]
        return gsw.grav(75, np.nanmean(P)) / (1e3+np.nanmean(sigma)) * dsigma_dz

    df = ds.to_dataframe().reset_index()

    df['P_bin'] = pd.cut(df.P, bins=np.arange(0, 300, 5))

    df = df.merge(
        df.groupby(['P_bin', 'cast'])
        .apply(lambda g: calc_N2(g.P, g.sigma))
        .rename('N2'),
        right_index=True,
        left_on=['P_bin', 'cast'],
        how='outer'
    ).dropna(subset=['P', 'N2', 'eps'])

    # Osborn 1980 parameterization
    df['Krho'] = 0.2 * df.eps / df.N2

    df = df.groupby('P', as_index=False).median()
    return df

eps_label = '2018'
eps_2018_59 = load_mss2018().hvplot('P', 'eps').relabel(group='eps', label='2018').opts()

Viz + calculate

In [ ]:
fn_viz_options = [
    opts.Curve(
        title='',
        invert_axes=True, invert_yaxis=True,
        frame_width=200, height=400, xaxis='top',
        line_color='blue',
    ),
    opts.Curve(
        'eps', 
        logx=True, 
        #xticks=[(1e-9, '10⁻⁹'), (1e-7, '10⁻⁷'), (1e-5, '10⁻⁵')],
        yaxis='bare'
    ),
]
In [ ]:
l = ntr_2018_59 + eps_2018_59
l = l.opts().opts(*fn_viz_options)
hv.output(l) # the nitracline is between 10 and 25 meters depth

# clear options
l = l.opts().opts(opts.Curve('eps'))
df_ntr = ntr_2018_59.data
df_mss = eps_2018_59.data

df_nitracline = df_ntr.loc[df_ntr.Depth.between(10, 25)]
print('Nitrate flux: [mmol/m2/d]')
(
    np.polyfit(df_nitracline.Depth, df_nitracline.Nitrate, 1)[0] 
    * df_mss.loc[df_mss.P.between(10, 25)].median().Krho * 86400
)

Flux estimate no. 2: NABOS Cast 62 + 2014 MSS

MSS

In [ ]:
def load_mss2014():
    df_list = []
    for i in range(7, 12):
        d = loadmat(f'../data/laptev/mss_2014/msp_____{i:04d}.mat', squeeze_me=True)['ppd']
        extract_vars = ['press', 'epsilon', 'asal']#, 'krho', 'bvf2', 'Re_b']

        df = pd.DataFrame(columns=extract_vars)
        for var in extract_vars:
            df[var] = d['data'][()][:, d['sensors'][()] == var].squeeze()
        df.epsilon = 10**df.epsilon
        df_list.append(df)

    df = (
        pd.concat(df_list)
        .groupby('press', as_index=False).median()
        .rename(columns={'asal': 'sal'})
    )
    df['depth'] = -gsw.z_from_p(df.press, 76.)
    df = df.rename(columns=dict(press='P', epsilon='eps'))
    return df
In [ ]:
df = load_mss2014()

eps_2014 = df.hvplot('depth', 'eps').relabel('MSS 2014').opts()
loc_mss2014 = gv.Points(
    (125.98045, 76.30767), ['lon', 'lat'],
    crs=ccrs.PlateCarree(), label='MSS station 2014'
)

sal_2014 = hv.Curve(df, 'depth', 'sal', label='MSS Station 2014')
eps_2014 = hv.Curve(df, 'depth', 'eps', label='2014', group='eps')

SUNA

In [ ]:
label = '2018 cast 62'
df = load_nitrate2018('AT18_62.txt')
ntr_2018_62 = df.hvplot('Depth', 'Nitrate').relabel(label)
sal_2018_62 = df.hvplot('Depth', 'Primary salinity').relabel(label)
loc_2018_62 = gv.Points((125.122, 76.351), crs=ccrs.PlateCarree(), label='2018 SUNA cast 62')

Viz + calculate

In [ ]:
l = ntr_2018_62 + eps_2014
l = l.opts().opts(*fn_viz_options)#.redim(**dims).opts(toolbar=None)
hv.output(l) # from this Figure, we read off the nitracline between 15 and 20 meters depth.

# clear options
l = l.opts().opts(opts.Curve('eps'))
df_ntr = ntr_2018_62.data
df_eps = eps_2014.data

df_ntr_nitracline = df_ntr.loc[df_ntr.Depth.between(15, 20)]
df_eps_nitracline = df_eps.loc[df_eps.P.between(15, 20)]

print('Nitrate flux: [mmol/m2/d]')
dNTR_dz = np.polyfit(df_ntr_nitracline.Depth, df_ntr_nitracline.Nitrate, 1)[0] 
N2 = 9.81/(1e3 + df_ntr_nitracline.sigma0.mean()) * np.polyfit(df_ntr_nitracline.Depth, df_ntr_nitracline.sigma0, 1)[0] 
eps = df_eps_nitracline.median().eps

# calculate nitrate flux using Osborn 1980 approximation (mixing efficiency 0.2)
dNTR_dz * 0.2 * eps / N2 * 86400

Average of both flux estimates

With one being 0.014 and the other 0.017 (see above), we can comfortably use 0.015 mmol m-2 d-1 as the upward nitrate flux through the seasonal halocline in summer in the Laptev sea.

Viz: Vertical profiles

In [ ]:
dims = dict(
    depth=hv.Dimension('depth', label='Depth', unit='m', range=(0, 55)),
    nitrate=hv.Dimension('nitrate', label='NO₃ conc.', unit='µM', range=(0, 7)),
    sal=hv.Dimension('sal', label='Salinity', unit='g kg⁻¹'),
    eps=hv.Dimension('eps', label='TKE Dissipation', unit='W kg⁻¹', range=(5e-10, 5e-4)),
)
In [ ]:
hv.extension('bokeh')

options = [
    opts.Curve(
        title='{label}',
        invert_axes=True, invert_yaxis=True, xaxis='top',
        show_grid=True, show_legend=True,
        tools=['hover'],
        frame_width=200, frame_height=400,
        padding=.05,
        line_width=3,
    ),
    opts.Curve('eps', logx=True, xticks=[(1e-9, '10⁻⁹'), (1e-7, '10⁻⁷'), (1e-5, '10⁻⁵')]),
    opts.Overlay(legend_position='bottom_left', title='{label}', show_title=False),
    opts.NdLayout(title='{label}'),
]

# options = translate_options(options, bokeh2mpl)

panels = [
    hv.Overlay([sal_winter2008, sal_summer2011, sal_2018_62, sal_2018_59, ], label='A'),
    hv.Overlay([ntr_winter2008, ntr_summer2011, ntr_2018_62, ntr_2018_59], label='B'),
    hv.Overlay([eps_2014, eps_2018_59], label='C'),
]


# apply options to all the panels to avoid option matplotlib/bokeh conflicts before linking the styles
_ = hv.Layout(panels).opts().opts(opts.Curve('eps', xticks=None))
sl = StyleLink(
    [
        [ntr_winter2008, sal_winter2008], 
        [ntr_summer2011, sal_summer2011], 
        [ntr_2018_59, sal_2018_59, eps_2018_59], 
        [sal_2014, eps_2014, ntr_2018_62, sal_2018_62], 
    ],
    {
        'line_color': hv.Cycle(cmap_to_list(colorcet.cm.glasbey_bw_minc_20_maxl_70)),
        'line_dash': hv.Cycle(['dashdot', 'dashed', 'dotted', 'solid'])
    }
)
# l = hv.Layout(panels).redim(**dims).opts(*options)
l = hv.NdLayout({k: panel for k, panel in enumerate(panels)}).redim(**dims).opts(*options)
laptev_profiles = l.clone()
hv.output(l)
fname = '../nb_fig/FIGURE_FN-NEW_LAPTEV_profiles'
hv.save(l, fname, fmt='html')
hv.save(l.opts(toolbar=None), fname, fmt='png')
save_bokeh_svg_multipanel(l, fname)

Vertically integrated NO3 difference from summer to winter

In [ ]:
deltaN = (df_winter_mean.nitrate - df_summer_mean.nitrate).sum()*5 # delta_nitrate * delta_z (5 m for each bin)

If mixing extends over 4 months and if (!) we can neglect cycling processes in the water column/benthos, the flux would be:

In [ ]:
deltaN / 4/30 # mmol m-2 d-1

However, given the shapes of the winter/summer profiles, it appears likely that benthic processes and/or advection have affected the nitrate concentrations. Hence this estimate may not be worth much.

Viz: Map of all stations

Lenn et al., 2011, "Intermittent Intense Turbulent Mixing under Ice in the Laptev Sea Continental Shelf": NLS station (drift) and CLS station (mooring)

In [ ]:
lenn2011 = (
    gv.Points((144+56.13/60, 79+14.81/60), ['lon', 'lat'],
              label='Lenn et al. 2011, turbulence obs.')
    * gv.Points((125+55.3/60, 76+44./60), ['lon', 'lat'],
                label='Lenn et al. 2011, mooring')
)
In [ ]:
hv.extension('bokeh')

proj = ccrs.LambertConformal(central_longitude=110, central_latitude=75)

print('PROJ4 string:')
print(proj.proj4_init)

def visaxes(plot, element):
    plot.state.axis.visible = True
    
options = [
    opts.Points(
        color=hv.Cycle(cmap_to_list(colorcet.cm.glasbey_light)),
        marker=hv.Cycle(['circle', 'triangle', 'diamond', 'square']),
        jitter=True, tools=['hover'], 
        frame_height=500, aspect=1,
        size=15, projection=proj,
        hooks=[visaxes],
        xlabel='x (m)', ylabel='y (m)'
    ),
    opts.Points('MSS station 2014', marker='square', size=20, padding=.1),
    opts.Shape(
        'Bathymetry',
        show_legend=True, line_width=2,
        #line_color=hv.Cycle(cmap_to_list(colorcet.cm.glasbey_dark)), 
        line_color='black',
        line_dash=hv.Cycle(['solid', 'dashed', 'dotted']),
    ),
    opts.Shape(
        'Land',
        fill_color='wheat', line_color='black'
    ),
    opts.Overlay(show_legend=True, legend_position='right'),
]


# lb = gv.Labels(df, ['lon', 'lat'], 'station')

# geography
extents = (90, 60, 160, 90)
bbox = box(*extents)
land_feature = gv.feature.land.data.with_scale('10m')
land = gv.Shape(unary_union([geom.intersection(bbox) for geom in land_feature.geometries()])).relabel(group='Land')

bathymetry = hv.Overlay([gv.Shape(bathy.loc[depth].intersection(bbox), group='Bathymetry', label=f'{depth} m isobath') for depth in [50, 200, 1000]])

l = (
    bathymetry
    * loc_winter2008
    * loc_summer2011
    * loc_2018_59
    * loc_mss2014
    * loc_2018_62
    * lenn2011
    * land
    # * gv.feature.rivers.opts(line_width=3, color='blue', scale='110m')
)
    
l = l.opts(*[options]).redim.range(lon=(100, 150), lat=(71, 80))

laptev_map = l.clone()
fname = '../nb_fig/FIGURE_FN-NEW_LAPTEV_map'
hv.save(l, fname+'.html')
hv.save(l.opts(toolbar=None, clone=True), fname+'.png')
l

Merge profiles and map

In [ ]:
l = (laptev_profiles + laptev_map.opts(title='B')).cols(1)
fname = '../nb_fig/FIGURE_FN-NEW_LAPTEV'
hv.save(l, fname, fmt='html')
hv.save(l.opts(toolbar=None, clone=True), fname, fmt='png')
os.system(f'rm ../nb_fig/FIGURE_FN-NEW_LAPTEV_*.png')
l

Bio-Argo floats

In [ ]:
dims = {
    'Depth': hv.Dimension('Depth', unit='m', range=(0, None)),
    'CorNO3': hv.Dimension('CorNO3', label='NO₃', unit='µM', range=(0, None)),
    'NO3int': hv.Dimension('NO3int', label='NO₃ deficit', unit='mmol m⁻²', range=(0, None)),
    'Date': hv.Dimension('Date', range=(pd.Timestamp('2017-8-1'), pd.Timestamp('2018-8-1')))
}
In [ ]:
fname = '../data/baffin/FLOATS_by_date_derived.nc'

if os.path.exists(fname):

    def load_floats(fname, bin_average=False):
        ds = xr.open_dataset(fname)

        if bin_average:
            date_bins = pd.date_range(ds.Date[0].values, ds.Date[-1].values, freq='MS')
            date_labels = date_bins[:-1]

            depth_bins = np.arange(0., 200, 4)
            depth_labels = depth_bins[:-1] + 2

            df = ds.to_dataframe().reset_index()
            df.Date = pd.cut(df.Date, bins=date_bins, labels=date_labels)
            df.Depth = pd.to_numeric(pd.cut(df.Depth, bins=depth_bins, labels=depth_labels))

            ds = df.groupby(['Date', 'Depth']).mean().to_xarray()

        return ds

    # monthly averages of NO3 and MLD
    ds = load_floats(fname, bin_average=True)
    ds = ds.sel(dict(Depth=ds.Depth[ds.Depth<=100.]))
    ds['mld'] = ds.mld.reduce(get_unique, dim='Depth')

    ds = ds[['Date', 'Depth', 'CorNO3', 'mld']]
    
    ds.to_netcdf('../data/baffin/baffin_monthly.nc')

    # Surface layer NO3 deficit
    zref=60
    ds = load_floats(fname)
    ds = ds.sel(Depth=slice(0, zref))

    # mmol/m2
    NO3int = (ds.CorNO3.isel(dict(Depth=-1)) - ds.CorNO3).sum(
        dim='Depth',min_count=1).rename('NO3int')

    #mmol/m2/d
    dNO3= NO3int.diff(dim='Date').rename('dNO3')

    df_dno3 = NO3int.mean(dim='Float').to_dataframe()
    df_dno3.to_csv('../data/baffin/no3_deficit_daily.csv')
    
ds = xr.load_dataset('../data/baffin/baffin_monthly.nc')
df_dno3 = pd.read_csv('../data/baffin/no3_deficit_daily.csv', index_col='Date', parse_dates=['Date'])
df_dno3_avg = df_dno3.NO3int.rolling(15, center=True).mean().dropna()
In [ ]:
df_dno3
In [ ]:
ntr = ds.hvplot.quadmesh('Date', 'Depth', 'CorNO3').opts()
mld = ds.mld.hvplot(label='Mixed Layer Depth')

def adjust(plot, element):
    p = plot.state
    c = p.select(dict(type=ColorBar))[0]
    
    c.title = element.vdims[0].pprint_label
    c.location = (0, -10)
    c.height = 220
        
# Panel 1 
p1 = (ntr*mld).opts(
    opts.QuadMesh(
        width=500, height=310,
        hooks=[adjust], tools=['hover'],
        invert_yaxis=True, 
        cmap=cmocean.cm.turbid, colorbar=True,
    ),
    opts.Curve(
        color='#12d9e3', line_width=3,
        show_legend=True
    ),
    opts.Overlay(legend_position='bottom_left')
)

# Panel 2

p2 = (
    df_dno3.reset_index().hvplot.scatter('Date', 'NO3int', color='grey').opts(yticks=[0, 100, 200, 300])
    * df_dno3_avg.hvplot(color='k', line_width=3)
).opts(show_legend=False)
In [ ]:
hv.renderer('bokeh').theme = theme

l = p1.relabel('A') + p2.relabel('B')
l = l.redim(**dims).opts(
    opts.Overlay(width=600, xrotation=30, xlabel='',)
).cols(1)

fname = '../nb_fig/FIGURE_FN-NEW_BAFFIN'
hv.save(l, fname+'.html')
hv.save(l.opts(toolbar=None), fname+'.png')
save_bokeh_svg_multipanel(l, fname, 'v')
l

We read off from Panel B that the nitrate deficit was decreased by 200 mmol m-2 over the course of four months. The upward nitrate flux at 60 m depth is hence approximately:

In [ ]:
200/4/30

Retrieve the the center coordinate of the BGC Argo float observations

coords = df[['Longitude','Latitude']].dropna()
print(
    MultiPoint(
        [Point(x,y) for x,y in zip(coords.Longitude,coords.Latitude)]
    ).convex_hull.centroid.wkt
)

POINT (-65.36189062114981 72.22652198050153)

Chukchi Sea: Calculation based on nishino2015nutrient

In [ ]:
def load_nishino_etal_2015():
    return pd.read_csv('../data/fn-compilation-database/nishino2015nutrient/Data_from_Mirai2013.csv',na_values=-999, parse_dates=['yyyy-mm-ddThh:mm:ss.sss'])
df = load_nishino_etal_2015()
df.head(3)
df['logFN'] = np.abs(np.log10(df['Flux_N1 [mmol/m~^2~#/s]']))
In [ ]:
options = [
    opts.Curve(invert_yaxis=True, invert_axes=True),
]
no3 = hv.Curve(df, 'Depth [m]', ['Nitrate [~$m~#mol/kg]', 'Station','yyyy-mm-ddThh:mm:ss.sss'])
fn = hv.Curve(df, 'Depth [m]', ['Flux_N1 [mmol/m~^2~#/s]', 'Station'])
l = no3.groupby('Station').overlay() + fn.groupby('Station').overlay()
l.options(*options)

The data comprises three wind events, supposedly leading to enhanced mixing:

  1. 14-15 Sep
  2. 19-21 Sep
  3. 24-25 Sep

First, we'll naively sum the provided depth-dependent nitrate fluxes, which were originally obtained by multiplying dissipation of TKE, buoyancy frequency, and nitrate gradient for each depth interval.

In [ ]:
time = df['yyyy-mm-ddThh:mm:ss.sss']

z_interval=[25,40]

iz = (df['Depth [m]']>=z_interval[0]) & (df['Depth [m]']<=z_interval[1])
ii1 = (
    (time <= dt.datetime(2013,9,15,23,59,59)) & 
    (time >= dt.datetime(2013,9,14,0,0,0))
)
ii2 = (
    (time <= dt.datetime(2013,9,21,23,59,59)) & 
    (time >= dt.datetime(2013,9,19,0,0,0))
)
ii3 = (
    (time <= dt.datetime(2013,9,25,23,59,59)) & 
    (time >= dt.datetime(2013,9,24,0,0,0))
)

for the_vars in [
    ['Flux_N1 [mmol/m~^2~#/s]','Flux_N2 [mmol/m~^2~#/s]'],
    ['Krho1 [m~^2~#/s]', 'Krho2 [m~^2~#/s]']
]:
    if the_vars[0].startswith('Krho'):
        print('Krho in m2/d:')
    elif the_vars[0].startswith('Flux_N'):
        print('Nitrate flux in mmmol/m2/d:')

    print('Each wind event:')
    for ii in [ii1,ii2,ii3]:
        print(df.loc[ii & iz, the_vars].mean().mean()*86400)

    print('All wind events:')
    print(df.loc[(ii1 | ii2 | ii3) & iz,the_vars].mean().mean()*86400)

    print('Entire time series:')
    var_mean = df.loc[iz, the_vars].mean().mean()*86400
    print(var_mean)
    if the_vars[0].startswith('Krho'):
        Krho = var_mean
    elif the_vars[0].startswith('Flux_N'):
        FN = var_mean
    print()

Second, as a more reliable means of averaging the turbulent flux over the (non-turbulent) background field, we determine the nitrate gradient over the same depth interval and multiply that by the average eddy diffusivity:

In [ ]:
df = df.groupby(['Depth [m]']).mean()
df = df.loc[(df.index>=z_interval[0]) & (df.index <= z_interval[1]), 'Nitrate [~$m~#mol/kg]']
print('dNO3/dz in uM/m:')
no3_slope = np.polyfit(df.index,df.values, 1)[0]
no3_slope

The above first estimate is consistent with this second, arguably more accurate, averaging method:

In [ ]:
Krho * no3_slope

Surface NO3 concentration

In [ ]:
df = load_nishino_etal_2015()
df.loc[df['Depth [m]']<=10, 'Nitrate [~$m~#mol/kg]'].mean()

Averaged station coordinates

In [ ]:
d = df.groupby('Station').first()
LineString(zip(d['Longitude [degrees_east]'],d['Latitude [degrees_north]'])).centroid.wkt

In degrees Longitude West:

In [ ]:
191.75-360