import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import xarray as xr
from netCDF4 import Dataset
import xarray as xr
import os
from datetime import datetime
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import matplotlib.pyplot as plt
import matplotlib.patheffects as PathEffects
from scipy import stats
import geocat.viz.util as gvutil
from geocat.viz import cmaps as gvcmaps
from geocat.comp import eofunc_eofs, eofunc_pcs
from sklearn.cross_decomposition import CCA
import geocat.datafiles as gdf
import geocat.viz as gv
import cdo
import matplotlib.colors as colors
import matplotlib.patches as mpatch
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import cartopy.crs as ccrs
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/geocat/viz/cmaps.py:9: DeprecationWarning: geocat.viz.cmaps is deprecated, use cmaps instead warnings.warn("geocat.viz.cmaps is deprecated, use cmaps instead",
Source of the SST data : https://psl.noaa.gov/data/gridded/data.noaa.ersst.v5.html Spatial Resolution: 2° × 2° horizontal grid with Time Frame: January 1854—2021
Source of the SIC data: https://nsidc.org/data/g10010/versions/2 Parameter(s): SEA ICE CONCENTRATION Temporal Coverage:January 1850 to 31 December 2017 Temporal Resolution:1 month Spatial Resolution:1/4 degree X 1/4 degree
#download SST data
cmd="wget https://downloads.psl.noaa.gov//Datasets/noaa.ersst.v5/sst.mnmean.nc >/dev/null 2>&1"
os.system(cmd)
#download SIC data
cmd="wget ftp://sidads.colorado.edu/pub/DATASETS/NOAA/G10010_V2/G10010_SIBT1850_V2.zip >/dev/null 2>&1 "
os.system(cmd)
#unzip SIC data
cmd="unzip -o G10010_SIBT1850_V2.zip >/dev/null 2>&1"
os.system(cmd)
0
#open SST file
fnct = 'sst.mnmean.nc'
dst= xr.open_dataset(fnct)
#print(dst)
#CDO.2.0.5 version is used to remove annual cycle (anomalies), to calculate annual means and to select the Atlantic Basin
from cdo import *
cdo = Cdo()
#select variable and 1854 - 2021 period
cdo.selvar("sst",input="sst.mnmean.nc", output="sst.ersst.185401202206.nc")
cdo.selyear("1854/2021",input="sst.ersst.185401202206.nc", output="sst.mnmean.Ersstv5.18542021.nc")
#calculate anomalies relative to the 1980 - 2010 period
cdo.selyear("1980/2010",input="sst.ersst.185401202206.nc", output="sst.mnmean.Ersstv5.19802010.nc")
cdo.ymonmean(input="sst.mnmean.Ersstv5.19802010.nc", output="annual_cycle_Ersstv5.19802010.nc")
cdo.sub(input="sst.mnmean.Ersstv5.18542021.nc annual_cycle_Ersstv5.19802010.nc", output="anom.sst.Ersstv5.18542021.nc")
#calculate annual means
cdo.yearmonmean(input="anom.sst.Ersstv5.18542021.nc", output="anm.anom.sst.Ersstv5.18542021.nc")
#select region of interest
cdo.sellonlatbox('285,375,-80,80', input="anm.anom.sst.Ersstv5.18542021.nc", output="ATL.anm.anom.sst.Ersstv5.18542021.nc")
# remove the file generated and not of use anymore
os.system("rm " + "sst.ersst.185401202206.nc")
os.system("rm " + "sst.mnmean.Ersstv5.18542021.nc")
os.system("rm " + "sst.mnmean.Ersstv5.19802010.nc")
os.system("rm " + "annual_cycle_Ersstv5.19802010.nc")
os.system("rm " + "anom.sst.Ersstv5.18542021.nc")
os.system("rm " + "sst.mnmean.nc")
os.system("rm " + "G10010_SIBT1850_V2.zip")
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/xarray/backends/plugins.py:98: DeprecationWarning: SelectableGroups dict interface is deprecated. Use select. entrypoints = entry_points().get("xarray.backends", ())
0
# CDO VERSION 2.0.5 is used to remove the SST warming trend
#calculate global average
cdo.fldmean(input="anm.anom.sst.Ersstv5.18542021.nc", output="Fldmean.anm.anom.sst.Ersstv5.18542021.nc")
map_file="ATL.anm.anom.sst.Ersstv5.18542021.nc"
index_file="Fldmean.anm.anom.sst.Ersstv5.18542021.nc"
index_txt='Fldmean.anm.anom.sst.Ersstv5.18542021.txt'
os.system ('cdo showyear ' + index_file)
#transform the NC file into csv and remove the last line
os.system ('cdo -s output ' + index_file+'>'+index_txt + ' |head -n -1')
index_data=pd.read_csv(index_txt,header=None).squeeze()
#np.shape(index_data)
#create map data
dataset=Dataset(map_file)
map_data=dataset.variables['sst'][:].squeeze()
model_lon=dataset.variables['lon'][:].squeeze()
model_lat=dataset.variables['lat'][:].squeeze()
#np.shape(map_data)
#subtract the average from each point of the grid
map_data_normalized=np.zeros(np.shape(map_data))
for x in range(np.size(model_lon)):
for y in range(np.size(model_lat)):
for t in range(np.shape(map_data)[0]):
map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t]
#creates a copy of the original NetCDF file to which the modified data will be written
map_file_export="g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc"
os.system("cp "+map_file+" "+map_file_export)
#write map_data_normalized to netcdf file
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#ncfile.variables['sst'][:]=np.zeros(np.shape(map_data_normalized)) #put zeros to verify that the data actually has been written
ncfile.variables['sst'][:]=map_data_normalized[:,:,:] #"real" data write
ncfile.close()
# remove the file generated and not of use anymore
#os.system("rm " + "Fldmean.anm.anom.sst.Ersstv5.18542021.nc")
#os.system("rm " + "anm.anom.sst.Ersstv5.18542021.nc")
#os.system("rm " + "ATL.anm.anom.sst.Ersstv5.18542021.nc")
1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 cdo showyear: Processed 1 variable over 168 timesteps [0.02s 36MB].
/tmp/ipykernel_11322/103596421.py:28: UserWarning: Warning: converting a masked element to nan. map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t] rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory rm: cannot remove 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc': No such file or directory
#performe EOF on Atlantic detrended SST annual anomalies
#open the detrended SST data
fnc = 'g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc'
dsg = xr.open_dataset(fnc)
#print(dsg)
#Select period and region
ystr = 1854
yend = 2021
latS = -80.
latN = 80.
lonW = -74.
lonE = 14.
neof = 8
anm=dsg.sst
#print(anm)
# put the data in a format required by geocat.eof and which can allow to make easy changes regarding the region/period selected
anmD = anm.sortby("lat", ascending=True)
clat = anmD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmD
wanm = anmD* clat
wanm.attrs = anmD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
# -- Perfom EOF analysis
eofs_sst = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sst = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sst = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
sdev_sst=pcs_sst.std(dim='time')
pcs_norm_sst = pcs_sst / pcs_sst.std(dim='time')
pcs_norm_sst['time']=anmD['time']
pcs_sst['time']=anmD['time']
pcs_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
pcs_norm_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
#print(pcs)
evec_sst = xr.DataArray(data=eofs_sst, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec)
# Plot the first three EOFS and their associated PCs (S1 FIG)
def makefig(dat, ieof, grid_space):
# Fix the artifact of not-shown-data around 0 and 360-degree longitudes
# Generate axes using Cartopy to draw coastlines
ax = fig.add_subplot(grid_space,
projection=ccrs.Robinson(central_longitude=-60))
# projection=ccrs.Robinson(central_longitude=210))
ax.coastlines(linewidth=0.5, alpha=0.6)
gl = ax.gridlines(crs=ccrs.PlateCarree(),
draw_labels=True,
dms=False,
x_inline=False,
y_inline=False,
linewidth=1,
linestyle='dotted',
color="black",
alpha=0.3)
gl.top_labels = False
gl.right_labels = False
gl.rotate_labels = False
gvutil.add_major_minor_ticks(ax, labelsize=16)
gvutil.add_lat_lon_ticklabels(ax)
newcmp = gvcmaps.BlueYellowRed
index = [5, 20, 35, 50, 65, 85, 95, 110, 125, 0, 0, 135, 150, 165, 180, 200, 210, 220, 235, 250 ]
color_list = [newcmp[i].colors for i in index]
color_list[9]=[ 1., 1., 1.]
color_list[10]=[ 1., 1., 1.]
kwargs = dict(
vmin = -0.06,
vmax = 0.06,
levels =19,
colors=color_list,
add_colorbar=False, # allow for colorbar specification later
transform=ccrs.PlateCarree(), # ds projection
)
fillplot = dat[ieof,:,:].plot.contourf(ax=ax, **kwargs)
#ax.add_feature(cfeature.LAND, facecolor='lightgray', zorder=1)
ax.add_feature(cfeature.COASTLINE, edgecolor='gray', linewidth=0.5, zorder=1)
gvutil.set_titles_and_labels(ax,
lefttitle=f'EOF{ieof+1} pattern',
lefttitlefontsize=12,
righttitle='',
righttitlefontsize=12,
maintitle='',
xlabel="",
ylabel="")
return ax, fillplot
def make_bar_plot(dataset, ieof, grid_space):
years = list(dataset.time.dt.year)
values = list(dataset[ieof,:].values)
colors = ['blue' if val < 0 else 'red' for val in values]
ax = fig.add_subplot(grid_space)
ax.bar(years,
values,
color=colors,
width=1.0,
edgecolor='black',
linewidth=0.5)
gvutil.add_major_minor_ticks(ax,
x_minor_per_major=5,
y_minor_per_major=5,
labelsize=12)
gvutil.set_axes_limits_and_ticks(ax,
xticks=np.linspace(1800, 2020, 5),
xlim=[1854.5, 2025.5],
ylim=[-3.0, 3.5])
pct = dataset.attrs['varianceFraction'].values[ieof] * 100
print(pct)
gvutil.set_titles_and_labels(ax,
lefttitle=f'PC{ieof+1} (normalized)',
lefttitlefontsize=12,
righttitle=f'{pct:.1f}%',
righttitlefontsize=12,
xlabel="Year",
ylabel="",
labelfontsize=10 )
return ax
fig = plt.figure(figsize=(14, 9))
grid = fig.add_gridspec(ncols=3, nrows=3, hspace=0.14)
ax1, fill1 = makefig(evec_sst,0, grid[0:2,0])
ax2, fill2 = makefig(-evec_sst,1, grid[0:2,1])
ax3, fill3 = makefig(evec_sst,2, grid[0:2,2])
fig.colorbar(fill2,
ax=[ax1,ax2,ax3],
ticks=np.linspace(-0.06, 0.06, 5),
drawedges=True,
label='Eigenvector',
orientation='horizontal',
shrink=0.3,
pad=0.09,
extendfrac='auto',
extendrect=True)
ax1 = make_bar_plot(pcs_norm_sst,0,grid[2,0])
ax2 = make_bar_plot(-pcs_norm_sst,1,grid[2,1])
ax3 = make_bar_plot(pcs_norm_sst,2,grid[2,2])
#fig.suptitle('EOF for SST (DJF)', fontsize=16, y=0.9)
plt.draw()
plt.savefig("Supp.Fig.1.EOF123.SST.1854.2021.pdf",format="pdf", bbox_inches = 'tight',dpi=500)
plt.savefig("Supp.Fig.1.EOF123.SST.1854.2021.jpg",format="jpg", bbox_inches = 'tight',dpi=500)
18.957945089631913 12.977930125691838 8.993372430130533
Add the first two EOFs (FIG 1a) and their associated PCs (FIG 1)
# Add the first two EOFs (FIG 1a) and their associated PCs (FIG 1)
eofs= eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
np.shape(eofs)
eofs_added_sst=eofs_sst.copy() #create a copy of eofs
eofs_added_sst[0,:,:]=eofs_sst[0,:,:]-eofs_sst[1,:,:] #add two eofs
#he minus sign is because the second eof (Supp Fig 1) was plotted mulitplying both the map and the pc with -1
evec_sst_added = xr.DataArray(data=eofs_added_sst, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec1)
# Add,multiply of make other transformations to the PCs'
pcs = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_added_sst=pcs_sst.copy()
pcs_added_sst[0]=pcs_norm_sst[0]-pcs_norm_sst[1]
#he minus sign is because the second eof (Supp Fig 1) was plotted mulitplying both the map and the pc with -1
pcs_added_sst['time']=anmD['time']
#print(pcs_added)
#calculate the standard deviation of the (not normalized) added PCs
pcs_added_sst[2]=pcs_sst[0]-pcs_sst[1]
sdev_pc_added=pcs_added_sst[2].std (dim='time')
#print(sdev_pc_added)
## Plot the superposition of EOF1 and EOF 2
def makefig(dat, ieof, grid_space):
# Fix the artifact of not-shown-data around 0 and 360-degree longitudes
# Generate axes using Cartopy to draw coastlines
ax = fig.add_subplot(grid_space,
projection=ccrs.Robinson(central_longitude=-60))
# projection=ccrs.Robinson(central_longitude=210))
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=17)
gl = ax.gridlines(crs=ccrs.PlateCarree(),
draw_labels=True,
dms=False,
x_inline=False,
y_inline=False,
linewidth=1,
linestyle='dotted',
color="black",
alpha=0.3)
gl.top_labels = False
gl.right_labels = False
gl.rotate_labels = False
gvutil.add_major_minor_ticks(ax, labelsize=16)
gvutil.add_lat_lon_ticklabels(ax)
newcmp = gvcmaps.BlueYellowRed
index = [5, 20, 35, 50, 65, 85, 95, 110, 125, 0, 0, 135, 150, 165, 180, 200, 210, 220, 235, 250 ]
color_list = [newcmp[i].colors for i in index]
color_list[9]=[ 1., 1., 1.]
color_list[10]=[ 1., 1., 1.]
kwargs = dict(
vmin = -0.6,
vmax = 0.6,
levels = 19,
colors=color_list,
add_colorbar=False, # allow for colorbar specification later
transform=ccrs.PlateCarree(), # ds projection
)
fillplot = dat[ieof,:,:].plot.contourf(ax=ax, **kwargs)
#ax.add_feature(cfeature.LAND, facecolor='lightgray', zorder=1)
ax.add_feature(cfeature.COASTLINE, edgecolor='gray', linewidth=0.5, zorder=1)
gvutil.set_titles_and_labels(ax,
lefttitle='SST (EOF1 + EOF2) ',
lefttitlefontsize=14,
righttitle='',
righttitlefontsize=14,
maintitle='',
xlabel="",
ylabel="")
return ax, fillplot
def make_bar_plot(dataset, ieof, grid_space):
years = list(dataset.time.dt.year)
values = list(dataset[ieof,:].values)
colors = ['cyan' if val < 0 else 'gold' for val in values]
ax = fig.add_subplot(grid_space)
ax.bar(years,
values,
color=colors,
width=1.0,
edgecolor='black',
linewidth=0.5)
gvutil.add_major_minor_ticks(ax,
x_minor_per_major=5,
y_minor_per_major=5,
labelsize=13)
gvutil.set_axes_limits_and_ticks(ax,
xticks=np.linspace(1800, 2020, 5),
xlim=[1850.5, 2021.5],
ylim=[-5.5, 5])
pct = dataset.attrs['varianceFraction'].values[ieof] * 100
print(pct)
gvutil.set_titles_and_labels(ax,
lefttitle='PC1 + PC2 1854 - 2021',
lefttitlefontsize=13,
righttitle='30%',
righttitlefontsize=13,
xlabel="Year",
ylabel="Std.dev",
labelfontsize=13 )
return ax
#Plot the added EOFs and multiply the PCs with the standard deviation
fig = plt.figure(figsize=(16, 13))
grid = fig.add_gridspec(ncols=3, nrows=3, hspace=0.04)
ax1, fill1 = makefig(sdev_pc_added*evec_sst_added,0, grid[0:2,0])
#ax2, fill2 = makefig(cor, grid[0:2,1])
#ax3, fill3 = makefig(-evec1,2, grid[0:2,2])
fig.colorbar(fill1,
ax=[ax1],
ticks=np.linspace(-0.6, 0.6, 5),
drawedges=True,
label='(°C)',
orientation='horizontal',
shrink=1,
pad=0.09,
extendfrac='auto',
extendrect=True)
ax1 = make_bar_plot(pcs_added_sst,0,grid[2,0])
#ax2 = make_bar_plot(-pcs_added,1,grid[2,1])
#ax3 = make_bar_plot(-pcs_added,2,grid[2,2])
plt.savefig("Fig1ac_EOFamoc.SST.1854.2021.pdf",format="pdf", bbox_inches = 'tight', dpi=500)
plt.savefig("Fig1ac_EOFamoc.SST.1854.202.jpg",format="jpg", bbox_inches = 'tight', dpi=500)
18.957945089631913
#export the PCs in a format usable for CCA
df=pcs_norm_sst.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.PC.sst.obs.18542021.8PCs.normalized.txt')
#select 1854-2017 period
pcs_sst_18572017=pcs_norm_sst.sel(time=slice("1854-06-16", "2017-06-16"))
df1=pcs_sst_18572017.to_dataframe().unstack().transpose()
df1.to_csv('RezEOF.PC.sst.obs.18542017.8Cs.normalized.txt')
#export superposion of PC1+PC2
df=pcs_added_sst.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.PC1+PC2.Added.normalized.txt')
#pcs_sst_18572017 = pcs_norm_sst.sel(time=pcs_norm_sst.time.dt.year.isin([range (1854,2018,1)]))
Reconstruct the Atlantic SST field based on a limited number of EOFs.(8)
map_file_export="RezEOF.sst.EOFS_x_PCs.export_to_netcdf.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " -chname,sst,EOF7xPC7 " + fnc + " -chname,sst,EOF8xPC8 " + fnc + " " + map_file_export)
# write the EOFs maps to netcdf file
map_file_export="REZeof.EOFmaps.sst.obs.1850.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1 -seltimestep,1 " + fnc + " -chname,sst,EOF2 -seltimestep,1 " + fnc + " -chname,sst,EOF3 -seltimestep,1 " + fnc + " -chname,sst,EOF4 -seltimestep,1 " + fnc + " -chname,sst,EOF5 -seltimestep,1 " + fnc + " -chname,sst,EOF6 -seltimestep,1 " + fnc + " -chname,sst,EOF7 -seltimestep,1 "+ fnc + " -chname,sst,EOF8 -seltimestep,1 "+ fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sst.reindex(lat=evec_sst.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#sdev=pcs.std(dim="time")
#vec_export=[]
#evec_export = xr.concat(evec_export, range(8)).rename({"concat_dim": "EOF"})
#ds_concat
#print(np.shape(evec))
#print(np.shape(evec_export))
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.variables['EOF7'][:]=evec_export[6,:,:] #write eof7 to dedicated variable
ncfile.variables['EOF8'][:]=evec_export[7,:,:] #write eof8 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs=[]
for i in range(8):
#print(i)
eof_pcs.append(pcs_sst[i,:]*evec_export[i,:,:])
map_file_export="REzEOF.sst.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " -chname,sst,EOF7xPC7 "+ fnc + " -chname,sst,EOF8xPC8 "+ fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(8):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs[i]#.reindex(lat=eof_pcs[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
#reconstruct the initial field based on the first 8 EOFs
eof_pcs_sum=eof_pcs[0]+eof_pcs[1]+eof_pcs[2]+eof_pcs[3]+eof_pcs[4]+eof_pcs[5]+eof_pcs[6]+eof_pcs[7]
map_file_export="sst_ersst_rec8eof_18542021.anm.g.anom.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,sst,sst " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum #.reindex(lat=eof_pcs_sum.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sst"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('RezEOF.sst.EOFS_x_PCs.export_to_netcdf.nc')
os.remove('REZeof.EOFmaps.sst.obs.1850.2017.nc')
os.remove('REzEOF.sst.EOFS_x_PCs.nc')
rm: cannot remove 'RezEOF.sst.EOFS_x_PCs.export_to_netcdf.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(1) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(2) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(3) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(4) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(5) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(6) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(7) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(8) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo merge: Processed 4898880 values from 8 variables over 1344 timesteps [0.89s 76MB].
rm: cannot remove 'REZeof.EOFmaps.sst.obs.1850.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(13) chname: Process started cdo(14) seltimestep: Process started cdo(15) chname: Process started cdo(16) seltimestep: Process started cdo(2) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(14) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(16) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(3) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(5) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(7) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(9) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(11) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(13) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(15) chname: Processed 3645 values from 1 variable over 1 timestep. cdo merge: Processed 29160 values from 8 variables over 8 timesteps [0.12s 54MB].
rm: cannot remove 'REzEOF.sst.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(1) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(2) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(3) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(4) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(5) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(6) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(7) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo(8) chname: Processed 612360 values from 1 variable over 168 timesteps. cdo merge: Processed 4898880 values from 8 variables over 1344 timesteps [0.83s 74MB].
rm: cannot remove 'sst_ersst_rec8eof_18542021.anm.g.anom.nc': No such file or directory
cdo chname: Processed 612360 values from 1 variable over 168 timesteps [0.08s 43MB].
Perform EOF on Arctic detrended SIC annual anomalies
#open NSIDC SIC data
fnc = 'G10010_sibt1850_v2.0.nc'
ds = xr.open_dataset(fnc)
#print(ds)
#transform the calendar from Julian to Gregorian and select variable
ds_cal=ds.convert_calendar('gregorian', dim='time', align_on=None, missing=None, use_cftime=None)
ds_sic=ds_cal.seaice_conc
ds_sic.to_netcdf('sic_walsh_18502017.nc')
os.remove('G10010_sibt1850_v2.0.nc')
from cdo import *
cdo = Cdo()
#detrend the data
cdo.detrend(input="sic_walsh_18502017.nc", output="det.sic_walsh_18502017.nc")
#calculate anomalies relative to the 1980 - 2010 period
cdo.selyear("1980/2010",input="det.sic_walsh_18502017.nc", output="det.sic_walsh_19802017.nc")
cdo.ymonmean(input="det.sic_walsh_19802017.nc", output="annual_cycle_SIC.19802010.nc")
cdo.sub(input="det.sic_walsh_18502017.nc annual_cycle_SIC.19802010.nc", output="anom.det.sic.walsh.18502017.nc")
os.remove('det.sic_walsh_18502017.nc')
os.remove('annual_cycle_SIC.19802010.nc')
#calculate annual means
cdo.yearmonmean(input="anom.det.sic.walsh.18502017.nc", output="anm.anom.det.sic.walsh.18502017.nc")
os.remove('anom.det.sic.walsh.18502017.nc')
#select region of interest
cdo.sellonlatbox('0,360,55,90', input="anm.anom.det.sic.walsh.18502017.nc", output="anm.5590.anom.det.sic.walsh.18502017.nc")
os.remove('anm.anom.det.sic.walsh.18502017.nc')
#Perform EOF on annual detrended SIC anomalies and plot EOF 1
fnc = 'anm.5590.anom.det.sic.walsh.18502017.nc'
ds1_sic = xr.open_dataset(fnc)
#print(ds1_sic)
# ----- Period and area setting ------
ystr = 1850
yend = 2017
latS = 55.
latN = 90.
lonW = 0.
lonE = 360.
neof = 8
anm=ds1_sic.seaice_conc
# -- EOF --
anmnD = anm.sortby("latitude", ascending=True)
clat = anmnD['latitude'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['longitude']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
xw = wanm.sel(latitude=slice(latS, latN), longitude=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'latitude', 'longitude')
#print(xw_anm)
eofs_sic = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sic = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sic = pcs_sic / pcs_sic.std(dim='time')
pcs_sic['time']=anmnD['time']
pcs_norm_sic['time']=anmnD['time']
pcs_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
pcs_norm_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
#print(pcs_norm_sic)
sdev_pc1=pcs_sic[0].std (dim='time')
#print(sdev_pc1)
evec_sic = xr.DataArray(data=eofs_sic, dims=('eof','latitude','longitude'),
coords = {'eof': np.arange(0,neof), 'latitude': xw['latitude'], 'longitude': xw['longitude']} )
#print(evec_sic)
def makefig(dat, ieof, grid_space):
# Fix the artifact of not-shown-data around 0 and 360-degree longitudes
# Generate axes using Cartopy to draw coastlines
ax = fig.add_subplot(grid_space,
projection=ccrs.NorthPolarStereo(central_longitude=-30))
# projection=ccrs.Robinson(central_longitude=210))
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=17)
gl = ax.gridlines(crs=ccrs.PlateCarree(),
draw_labels=True,
dms=False,
x_inline=False,
y_inline=False,
linewidth=1,
linestyle='dotted',
color="black",
alpha=0.3)
gl.top_labels = False
gl.right_labels = False
gl.rotate_labels = False
gvutil.add_major_minor_ticks(ax, labelsize=16)
gvutil.add_lat_lon_ticklabels(ax)
newcmp = gvcmaps.BlueYellowRed
index = [5, 20, 35, 50, 65, 85, 95, 110, 125, 0, 0, 135, 150, 165, 180, 200, 210, 220, 235, 250 ]
color_list = [newcmp[i].colors for i in index]
color_list[9]=[ 1., 1., 1.]
color_list[10]=[ 1., 1., 1.]
kwargs = dict(
vmin = -4,
vmax = 4,
levels = 19,
colors=color_list,
add_colorbar=False, # allow for colorbar specification later
transform=ccrs.PlateCarree(), # ds projection
)
fillplot = dat[ieof,:,:].plot.contourf(ax=ax, **kwargs)
#ax.add_feature(cfeature.LAND, facecolor='lightgray', zorder=1)
ax.add_feature(cfeature.COASTLINE, edgecolor='gray', linewidth=0.5, zorder=1)
gvutil.set_titles_and_labels(ax,
lefttitle='SIC (EOF1) ',
lefttitlefontsize=14,
righttitle='',
righttitlefontsize=14,
maintitle='',
xlabel="",
ylabel="")
return ax, fillplot
def make_bar_plot(dataset, ieof, grid_space):
years = list(dataset.time.dt.year)
values = list(dataset[ieof,:].values)
colors = ['cyan' if val < 0 else 'gold' for val in values]
ax = fig.add_subplot(grid_space)
ax.bar(years,
values,
color=colors,
width=1.0,
edgecolor='black',
linewidth=0.5)
gvutil.add_major_minor_ticks(ax,
x_minor_per_major=5,
y_minor_per_major=5,
labelsize=13)
gvutil.set_axes_limits_and_ticks(ax,
xticks=np.linspace(1800, 2020, 5),
xlim=[1850.5, 2021.5],
ylim=[-5.5, 5])
pct = dataset.attrs['varianceFraction'].values[ieof] * 100
print(pct)
gvutil.set_titles_and_labels(ax,
lefttitle='PC1 1850 - 2017',
lefttitlefontsize=13,
righttitle='25%',
righttitlefontsize=13,
xlabel="Year",
ylabel="Std.dev",
labelfontsize=13 )
return ax
#Plot the added EOFs multiplied with the standard deviation of the PC
fig = plt.figure(figsize=(16, 13))
grid = fig.add_gridspec(ncols=3, nrows=3, hspace=0.04)
ax1, fill1 = makefig(sdev_pc1*-evec_sic,0, grid[0:2,0])
#ax2, fill2 = makefig(cor, grid[0:2,1])
#ax3, fill3 = makefig(-evec1,2, grid[0:2,2])
fig.colorbar(fill1,
ax=[ax1],
ticks=np.linspace(-4, 4, 5),
drawedges=True,
label='(%)',
orientation='horizontal',
shrink=1,
pad=0.09,
extendfrac='auto',
extendrect=True)
ax1 = make_bar_plot(-pcs_norm_sic,0,grid[2,0])
#ax2 = make_bar_plot(-pcs_added,1,grid[2,1])
#ax3 = make_bar_plot(-pcs_added,2,grid[2,2])
plt.savefig("Fig1bd_EOFamoc.SIC.1854.2021.pdf",format="pdf", bbox_inches = 'tight',dpi=500)
plt.savefig("Fig1bd_EOFamoc.SIC.1854.2021.jpg",format="jpg", bbox_inches = 'tight',dpi=500)
25.25937585269605
RECONSTRUCT INITIAL FIELD BASED ON 8 EOFs
#RECONSTRUCT INITIAL FIELD BASED ON 8 EOFs
map_file_export="REZeof.EOFmaps.sic.obs.1850.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF2 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF3 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF4 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF5 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF6 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF7 -seltimestep,1 "+ fnc + " -chname,seaice_conc,EOF8 -seltimestep,1 "+ fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sic.reindex(latitude=evec_sic.latitude[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#ds_concat
#print(np.shape(evec))
#print(np.shape(evec_export))
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.variables['EOF7'][:]=evec_export[6,:,:] #write eof7 to dedicated variable
ncfile.variables['EOF8'][:]=evec_export[7,:,:] #write eof8 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs=[]
for i in range(8):
#print(i)
eof_pcs.append(pcs_sic[i,:]*evec_export[i,:,:])
map_file_export="REzEOF.sic.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1xPC1 " + fnc + " -chname,seaice_conc,EOF2xPC2 " + fnc + " -chname,seaice_conc,EOF3xPC3 " + fnc + " -chname,seaice_conc,EOF4xPC4 " + fnc + " -chname,seaice_conc,EOF5xPC5 " + fnc + " -chname,seaice_conc,EOF6xPC6 " + fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(6):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs[i]#.reindex(latitude=eof_pcs[i].latitude[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#reconstruct the initial field based on the first 8 EOFs
eof_pcs_sum=eof_pcs[0]+eof_pcs[1]+eof_pcs[2]+eof_pcs[3]+eof_pcs[4]+eof_pcs[5]+eof_pcs[6]+eof_pcs[7]
map_file_export="SIC_walsh_rec8eof_1850_2017.anm.det.anom.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,seaice_conc,sic " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum #.reindex(latitude=eof_pcs_sum.latitude[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sic"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sic.obs.1850.2017.nc')
os.remove('REzEOF.sic.EOFS_x_PCs.nc')
rm: cannot remove 'REZeof.EOFmaps.sic.obs.1850.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(13) chname: Process started cdo(14) seltimestep: Process started cdo(15) chname: Process started cdo(16) seltimestep: Process started cdo(2) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(14) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(16) seltimestep: Processed 201600 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(3) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(5) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(7) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(9) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(11) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(13) chname: Processed 201600 values from 1 variable over 1 timestep. cdo(15) chname: Processed 201600 values from 1 variable over 1 timestep. cdo merge: Processed 1612800 values from 8 variables over 8 timesteps [0.24s 83MB].
rm: cannot remove 'REzEOF.sic.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo(2) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo(3) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo(4) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo(5) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo(6) chname: Processed 33868800 values from 1 variable over 168 timesteps. cdo merge: Processed 203212800 values from 6 variables over 1008 timesteps [9.60s 166MB].
rm: cannot remove 'SIC_walsh_rec8eof_1850_2017.anm.det.anom.nc': No such file or directory
cdo chname: Processed 33868800 values from 1 variable over 168 timesteps [1.23s 73MB].
#export the PCs in a format usable for CCA
df=pcs_norm_sic.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.sic.obs.18502017.8PCs.normalized.VF.txt')
#select 1854-2017 period
pcs_sic_18572017 = pcs_norm_sic.sel(time=slice("1854-06-30", "2017-06-30"))
df1=pcs_sic_18572017.to_dataframe().unstack().transpose()
df1.to_csv('RezEOF.PC.sic.obs.18542017.8PCs.normalized.txt')
from cdo import *
cdo = Cdo()
#calculate anomalies and annual means
cdo.selyear("1980/2010",input="sic_walsh_18502017.nc", output="sic_walsh_19802017.nc")
cdo.ymonmean(input="sic_walsh_19802017.nc", output="annual_cycle_SIC.19802010.nc")
cdo.sub(input="sic_walsh_18502017.nc annual_cycle_SIC.19802010.nc", output="anom.sic.walsh.18502017.nc")
os.remove('sic_walsh_18502017.nc')
os.remove('sic_walsh_19802017.nc')
os.remove('annual_cycle_SIC.19802010.nc')
cdo.yearmonmean(input="anom.sic.walsh.18502017.nc", output="anm.anom.sic.walsh.18502017.nc")
cdo.sellonlatbox('0,360,55,90', input="anm.anom.sic.walsh.18502017.nc", output="anm.5590.anom.sic.walsh.18502017.nc")
os.remove('anom.sic.walsh.18502017.nc')
os.remove('anm.anom.sic.walsh.18502017.nc')
#Perform EOF
fnc = 'anm.5590.anom.sic.walsh.18502017.nc'
dsq_sic = xr.open_dataset(fnc)
#print(ds1_sic)
# ----- Period and area setting ------
ystr = 1850
yend = 2017
latS = 55.
latN = 90.
lonW = 0.
lonE = 360
neof = 8
anm=dsq_sic.seaice_conc
# -- EOF --
anmnD = anm.sortby("latitude", ascending=True)
clat = anmnD['latitude'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['longitude']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
xw = wanm.sel(latitude=slice(latS, latN), longitude=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'latitude', 'longitude')
#print(xw_anm)
eofs_sicD = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sicD = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sicD = pcs_sicD / pcs_sicD.std(dim='time')
pcs_sicD['time']=anmnD['time']
pcs_norm_sicD['time']=anmnD['time']
pcs_sicD.attrs['varianceFraction'] = eofs_sicD.attrs['varianceFraction']
pcs_norm_sicD.attrs['varianceFraction'] = eofs_sicD.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sicD = xr.DataArray(data=eofs_sicD, dims=('eof','latitude','longitude'),
coords = {'eof': np.arange(0,neof), 'latitude': xw['latitude'], 'longitude': xw['longitude']} )
#print(evec_sic)
# Plot the three EOFs (Suplementary Figure 3)
def makefig(dat, ieof, grid_space):
# Fix the artifact of not-shown-data around 0 and 360-degree longitudes
# Generate axes using Cartopy to draw coastlines
ax = fig.add_subplot(grid_space,
projection=ccrs.NorthPolarStereo(central_longitude=-30.0, globe=None))
# projection=ccrs.Stereographic(central_latitude=0.0, central_longitude=0.0, false_easting=0.0, false_northing=0.0, true_scale_latitude=None, globe=None))
# projection=ccrs.Robinson(central_longitude=210))
ax.coastlines(linewidth=0.5, alpha=0.6)
gl = ax.gridlines(crs=ccrs.PlateCarree(),
draw_labels=True,
dms=False,
x_inline=False,
y_inline=False,
linewidth=1,
linestyle='dotted',
color="black",
alpha=0.3)
gl.top_labels = False
gl.right_labels = False
gl.rotate_labels = False
# Use geocat.viz.util convenience function to add minor and major tick lines
gvutil.add_major_minor_ticks(ax, labelsize=10)
# Use geocat.viz.util convenience function to make latitude, longitude tick labels
gvutil.add_lat_lon_ticklabels(ax)
# Import the default color map
newcmp = gvcmaps.BlueYellowRed
index = [5, 20, 35, 50, 65, 85, 95, 110, 125, 0, 0, 135, 150, 165, 180, 200, 210, 220, 235, 250 ]
color_list = [newcmp[i].colors for i in index]
#-- Change to white
color_list[9]=[ 1., 1., 1.]
color_list[10]=[ 1., 1., 1.]
# Define dictionary for kwargs
kwargs = dict(
vmin = -0.006,
vmax = 0.006,
levels = 19,
colors=color_list,
add_colorbar=False, # allow for colorbar specification later
transform=ccrs.PlateCarree(), # ds projection
)
# Contouf-plot U data (for filled contours)
fillplot = dat[ieof,:,:].plot.contourf(ax=ax, **kwargs)
# Draw map features on top of filled contour
ax.add_feature(cfeature.LAND, facecolor='lightgray', zorder=1)
ax.add_feature(cfeature.COASTLINE, edgecolor='gray', linewidth=0.5, zorder=1)
# Use geocat.viz.util convenience function to add titles to left and right of the plot axis.
gvutil.set_titles_and_labels(ax,
lefttitle=f'EOF{ieof+1} pattern',
lefttitlefontsize=12,
righttitle='',
righttitlefontsize=12,
maintitle='',
xlabel="",
ylabel="")
return ax, fillplot
def make_bar_plot(dataset, ieof, grid_space):
years = list(dataset.time.dt.year)
values = list(dataset[ieof,:].values)
colors = ['blue' if val < 0 else 'red' for val in values]
ax = fig.add_subplot(grid_space)
ax.bar(years,
values,
color=colors,
width=1.0,
edgecolor='black',
linewidth=0.5)
# Use geocat.viz.util convenience function to add minor and major tick lines
gvutil.add_major_minor_ticks(ax,
x_minor_per_major=5,
y_minor_per_major=5,
labelsize=10)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_axes_limits_and_ticks(ax,
xticks=np.linspace(1800, 2020, 5),
xlim=[1850, 2020],
ylim=[-3.0, 3.5])
pct = dataset.attrs['varianceFraction'].values[ieof] * 100
print(pct)
gvutil.set_titles_and_labels(ax,
lefttitle=f'PC{ieof+1} (normalized)',
lefttitlefontsize=12,
righttitle=f'{pct:.1f}%',
righttitlefontsize=12,
xlabel="Year",
ylabel="",
labelfontsize=10 )
return ax
# Show the plot
fig = plt.figure(figsize=(14, 6))
grid = fig.add_gridspec(ncols=3, nrows=3, hspace=0.4)
ax1, fill1 = makefig(evec_sicD,0, grid[0:2,0])
ax2, fill2 = makefig(evec_sicD,1, grid[0:2,1])
ax3, fill3 = makefig(evec_sicD,2, grid[0:2,2])
fig.colorbar(fill2,
ax=[ax1,ax2,ax3],
ticks=np.linspace(-0.006, 0.006, 5),
drawedges=True,
label='Eigenvector',
orientation='horizontal',
shrink=0.3,
pad=0.08,
extendfrac='auto',
extendrect=True)
ax1 = make_bar_plot(pcs_norm_sicD,0,grid[2,0])
ax2 = make_bar_plot(pcs_norm_sicD,1,grid[2,1])
ax3 = make_bar_plot(pcs_norm_sicD,2,grid[2,2])
#fig.suptitle('EOF for SST (DJF)', fontsize=16, y=0.9)
plt.draw()
plt.savefig("Supp.Fig.3.without.detrend.EOF123.SIC.1850.2017.pdf",format="pdf", bbox_inches = 'tight',dpi=500)
plt.savefig("Supp.Fig.3.EOF123.SIC.without.detrend.1850.2017.jpg",format="jpg", bbox_inches = 'tight',dpi=500)
33.53199297415313 11.774193312381383 7.075159702062489
# select PC1 over the 1854 - 2017 period (this will be used later to corelate with the PCs from 1st CCA pair)
PC1_SIC_EOF_NO_DET=pcs_norm_sicD.sel(time=slice("1854-06-30", "2017-06-30"))
# select the same period for both data sets (and pcs)
from cdo import *
cdo = Cdo()
cdo.selyear("1854/2017",input="sst_ersst_rec8eof_18542021.anm.g.anom.nc", output="sst_ersst_rec8eof_18542017.anm.g.anoma.nc")
cdo.selyear("1854/2017",input="SIC_walsh_rec8eof_1850_2017.anm.det.anom.nc", output="SIC_walsh_rec8eof_1854_2017.anm.det.anoma.nc")
'SIC_walsh_rec8eof_1854_2017.anm.det.anoma.nc'
# open the two datasets, reconstructed based on 8EOFS and their asociated PCs
field1 = 'sst_ersst_rec8eof_18542017.anm.g.anoma.nc'
ds1 = xr.open_dataset(field1)
#print(ds1)
field2 = 'SIC_walsh_rec8eof_1854_2017.anm.det.anoma.nc'
ds2= xr.open_dataset(field2)
#print(ds2)
PCsV1=pd.read_csv('RezEOF.PC.sst.obs.18542017.8Cs.normalized.txt',sep=',')
PCsV1_xr=PCsV1.to_xarray().to_array()
PCsSST=PCsV1[['0','1','2','3','4','5','6','7']]
PCsV2=pd.read_csv('RezEOF.PC.sic.obs.18542017.8PCs.normalized.txt',sep=',')
PCsV2_xr=PCsV2.to_xarray().to_array()
PCsSIC=PCsV2[['0','1','2','3','4','5','6','7']]
#X2
#Normalize the PCs (if necesary)
X1_mc = (PCsSST-PCsSST.mean())/(PCsSST.std())
#X1_mc.head()
X2_mc = (PCsSIC-PCsSIC.mean())/(PCsSIC.std())
#X2_mc.head()
#Perform CCA between the 8PCs obtaine trough EOF on SST and SIC detrended annual anomalies
ca = CCA(n_components=8)
ca.fit(X1_mc, X2_mc)
X_c, Y_c = ca.transform(X1_mc, X2_mc)
#export the pairs of maximum corelated PCs obtained from CCA
cc_res = pd.DataFrame({"CCX_1":X_c[:, 0],
"CCY_1":Y_c[:, 0],
"CCX_2":X_c[:, 1],
"CCY_2":Y_c[:, 1],
"CCX_3":X_c[:, 2],
"CCY_3":Y_c[:, 2],
"CCX_4":X_c[:, 3],
"CCY_4":Y_c[:, 3],
"CCX_5":X_c[:, 4],
"CCY_5":Y_c[:, 4],
"CCX_6":X_c[:, 5],
"CCY_6":Y_c[:, 5],
"CCX_7":X_c[:, 6],
"CCY_7":Y_c[:, 6],
"CCX_8":X_c[:, 7],
"CCY_8":Y_c[:, 7],
#"Species":df.species.tolist(),
})
# cc_res.head()
#plot the corelatio coefficients betweent the PCs
CCAresultPccorr=[]
for p in range (np.shape(X_c)[1]):
print(np.corrcoef(X_c[:, p], Y_c[:, p]))
[[1. 0.75902551] [0.75902551 1. ]] [[1. 0.70725837] [0.70725837 1. ]] [[1. 0.57261095] [0.57261095 1. ]] [[1. 0.42337278] [0.42337278 1. ]] [[1. 0.29283884] [0.29283884 1. ]] [[1. 0.18787788] [0.18787788 1. ]] [[1. 0.11711795] [0.11711795 1. ]] [[1. 0.03512519] [0.03512519 1. ]]
#select the first 4 individual pcs
rezcca=cc_res.to_xarray()
PC1sst=rezcca['CCX_1']
PC1sic=rezcca['CCY_1']
PC2sst=rezcca['CCX_2']
PC2sic=rezcca['CCY_2']
PC3sst=rezcca['CCX_3']
PC3sic=rezcca['CCY_3']
PC4sst=rezcca['CCX_4']
PC4sic=rezcca['CCY_4']
#reindex the time axis in the PCs
PC1sst= PC1sst.rename({"index": "time"})
PC2sst= PC2sst.rename({"index": "time"})
PC3sst= PC3sst.rename({"index": "time"})
PC4sst= PC4sst.rename({"index": "time"})
PC1sic= PC1sic.rename({"index": "time"})
PC2sic= PC2sic.rename({"index": "time"})
PC3sic= PC3sic.rename({"index": "time"})
PC4sic= PC4sic.rename({"index": "time"})
PC1sst['time'] = ds1.time
PC2sst['time'] = ds1.time
PC3sst['time'] = ds1.time
PC4sst['time'] = ds1.time
PC1sic['time'] = ds2.time
PC2sic['time'] = ds2.time
PC3sic['time'] = ds2.time
PC4sic['time'] = ds2.time
#Obtain the associated spatial structures trough linear regression analisis of reconstructed SST/SIC fields on the CCA time series from each pair
regSST1 = xr.cov(PC1sst.load(), ds1.sst.load(), dim="time")/PC1sst.var(dim='time',skipna=True).values
regSST2 = xr.cov(PC2sst.load(), ds1.sst.load(), dim="time")/PC2sst.var(dim='time',skipna=True).values
regSST3 = xr.cov(PC3sst.load(), ds1.sst.load(), dim="time")/PC3sst.var(dim='time',skipna=True).values
regSST4 = xr.cov(PC4sst.load(), ds1.sst.load(), dim="time")/PC4sst.var(dim='time',skipna=True).values
regSIC1 = xr.cov(PC1sic.load(), ds2.sic.load(), dim="time")/PC1sic.var(dim='time',skipna=True).values
regSIC2 = xr.cov(PC2sic.load(), ds2.sic.load(), dim="time")/PC2sic.var(dim='time',skipna=True).values
regSIC3 = xr.cov(PC3sic.load(), ds2.sic.load(), dim="time")/PC3sic.var(dim='time',skipna=True).values
regSIC4 = xr.cov(PC4sic.load(), ds2.sic.load(), dim="time")/PC4sic.var(dim='time',skipna=True).values
# Obtain the procentage of variance explained by each spatial structures (as average of r squared), through corelation analysis between the CCA time series from each pair and the SST/SIC fields, reconstructed based on the first 8 EOFs
cor_SST1 = xr.corr(PC1sst.load(), ds1.sst.load(), dim="time")
r1_squared = cor_SST1*cor_SST1
var_sst1=(r1_squared.mean()*100)
var_sst1_round=var_sst1.round(decimals=1)
print(var_sst1_round)
cor_SST2 = xr.corr(PC2sst.load(), ds1.sst.load(), dim="time")
r2_squared = cor_SST2*cor_SST2
var_sst2=r2_squared.mean()*100
var_sst2_round=var_sst2.round(decimals=1)
print(var_sst2_round)
cor_SST3 = xr.corr(PC3sst.load(), ds1.sst.load(), dim="time")
r3_squared = cor_SST3*cor_SST3
var_sst3=r3_squared.mean()*100
var_sst3_round=var_sst3.round(decimals=1)
print(var_sst3_round)
cor_SIC1 = xr.corr(PC1sic.load(), ds2.sic.load(), dim="time")
r1_squared_sic = cor_SIC1*cor_SIC1
var_sic1=r1_squared_sic.mean()*100
var_sic1_round=var_sic1.round(decimals=1)
print(var_sic1_round)
cor_SIC2 = xr.corr(PC2sic.load(), ds2.sic.load(), dim="time")
r2_squared_sic = cor_SIC2*cor_SIC2
var_sic2=r2_squared_sic.mean()*100
var_sic2_round=var_sic2.round(decimals=1)
print(var_sic2_round)
cor_SIC3 = xr.corr(PC3sic.load(), ds2.sic.load(), dim="time")
r3_squared_sic = cor_SIC3*cor_SIC3
var_sic3=r3_squared_sic.mean()*100
var_sic3_round=var_sic3.round(decimals=2)
print(var_sic3_round)
<xarray.DataArray ()> array(17.6) <xarray.DataArray ()> array(9.5) <xarray.DataArray ()> array(12.6) <xarray.DataArray ()> array(35.5) <xarray.DataArray ()> array(12.3) <xarray.DataArray ()> array(15.21)
### Plot the spatial structures and the time series of each pair, individually
Ploting the two spatial structures from the 1st CCA pair (Figure 2 a, c and e)
# Ploting the two spatial structures from the 1st CCA pair (Figure 2 a, c and e)
#In this plot, the PCs and the regression maps are multiplied with -1, to reflect a positive change in AMOC
PC1sstok=PC1sst*-1
PC1sicok=PC1sic*-1
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], -regSST1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.5,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SST ',
lefttitlefontsize=15,
righttitle=f'{var_sst1_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21 )
plt.savefig("Fig1a.CCAsstSIC.obs.18542017.SST1.8p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("Fig1a.CCAsstSIC.obs.18542017.SST1.8p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
#plt.savefig('asdsa', ...define type)
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['longitude'], ds2['latitude'], -regSIC1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SIC ',
lefttitlefontsize=15,
righttitle=f'{var_sic1_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("Fig1e.CCAsstSIC.obs.18542017.SIC8p.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("Fib1e.CCAsstSIC.obs.18542017.SIC8p.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC1) 1854 - 2017',
lefttitlefontsize=29,
righttitle=f'r = 0.77',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
ax.plot(ds1.time, PC1sstok, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC1sicok, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
#ax.legend(prop={"size":6})
ax.fill_between(ds1.time,PC1sicok, where=PC1sicok > 0, color='gold')
ax.fill_between(ds1.time,PC1sicok, where=PC1sicok < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.obs.18542017.PC110p.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.obs.18542017.PC110p.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
# SECOND CCA PAIR (Suplementary Figure 2/S2 FIG)
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], regSST2, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.5,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA2 SST ',
lefttitlefontsize=15,
righttitle=f'{var_sst2_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21 )
plt.savefig("CCAsstSIC.obs.18542017.SST2.8p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("CCAsstSIC.obs.18542017.SST2.8p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
#plt.savefig('asdsa', ...define type)
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['longitude'], ds2['latitude'], regSIC2, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA2 SIC ',
lefttitlefontsize=15,
righttitle=f'{var_sic2_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCAsstSIC.obs.18542017.SIC8p.222.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.obs.18542017.SIC8p.222.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC2) 1854 - 2017',
lefttitlefontsize=29,
righttitle=f'r = 0.77',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
ax.plot(ds1.time, PC2sst, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC2sic, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
#ax.legend(prop={"size":6})
ax.fill_between(ds1.time,PC2sic, where=PC2sic > 0, color='gold')
ax.fill_between(ds1.time,PC2sic, where=PC2sic < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.obs.18542017.PC28p.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.obs.18542017.PC28p.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
Plot spatial structures from third CCA pair (FIG 2 b and d)
#In order to plot the NAO in it's positive phase the PCs and the regression maps are multiplied with -1
PC3sstok=PC3sst*-1
PC3sicok=PC3sic*-1
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=16)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], regSST3, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.5,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA3 SST ',
lefttitlefontsize=15,
righttitle=f'{var_sst3_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21 )
plt.savefig("Fig.2b.CCAsstSIC.obs.18542017.SST38p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("Fig.2b.CCAsstSIC.obs.18542017.SST38p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
#plt.savefig('asdsa', ...define type)
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=16)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['longitude'], ds2['latitude'], regSIC3, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA3 SIC ',
lefttitlefontsize=15,
righttitle=f'{var_sic3_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("Fig2f.CCAsstSIC.obs.18542017.SIC38p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("Fig.2f.CCAsstSIC.obs.18542017.SIC38p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
ax.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC3) 1854 - 2017',
lefttitlefontsize=29,
righttitle=f'r = 0.57',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
plt.legend()
ax.plot(ds1.time, PC3sst, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC3sic, marker='o', markerfacecolor='red', markersize=3.2, color='red', linewidth=1.75,label='SIC')
ax.fill_between(ds1.time,PC3sic, where=PC1sic > 0, color='gold')
ax.fill_between(ds1.time,PC3sic, where=PC1sic < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.obs.18542017.PC38p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("CCAsstSIC.obs.18542017.PC38p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
No artists with labels found to put in legend. Note that artists whose label start with an underscore are ignored when legend() is called with no argument.
In order to obtain the graph below (Figure 2c) one has to download the two proxy-based AMOC reconstructions used. The RAPID Index will also be used further down the script.
Source of the AMOC Indices used in this study : 1) RAPID Measurments/ocean state estimation using 26.5°N data 2005 - 2019 https://rapid.ac.uk/
2) Reconstruction using sea surface temperature observational data 1871 - 2016
http://www.pik-potsdam.de/~caesar/AMOC_slowdown/sg_index_hadisst.txt
https://www.nature.com/articles/s41586-018-0006-5
3) Reconstuction based on temperature and salinity 1900 - 2020
https://github.com/njfraser/Bernoulli_inverse
https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2021GL093893
4) Monthly/Basin mean Ocean Heat Content (OHC, Atlantic) time series of 0-700m/700-2000m 1955 - 2019 https://www.science.org/doi/10.1126/sciadv.1601545#:~:text=6)%20suggests%20a%20total%20full,and%20below%202000%2Dm%20layers%2C
Plot the time series from CCCA 1st pair together with two AMOC Indices
# Open the two AMOC proxy-based reconstructions
#oper CAESAR et al. (2018) AMOC Index reconstruction
caesar=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/AMOC.INDEX.CAESAR.18712016.dat.txt',header=None, delim_whitespace=True)
dscaesar=ds1.sel(time=slice('1871-06-16', '2016-06-16'))
caesar_anom_n = (caesar[1]-caesar[1].mean())/caesar[1].std()
#open Fraser et al. (2021) AMOC Index reconstruction
fieldf = '/isibhv/projects/paleo_work/petruv/Data/ErsstV5_18542021/ATL.anm.anom.sst.Ersstv5.18542021.nc'
dsf = xr.open_dataset(fieldf)
#print(ds1)
dsfraser=dsf.sel(time=slice('1900-06-16', '2019-06-16'))
I1=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/OK4.anm.AMOC.Index.Fraser.csv',header=None,delim_whitespace=True)
I1ok=I1[1]
I1mean=np.mean(I1[1])
I1std=np.std(I1[1])
AMOC_fraser_anom=(I1ok-I1mean)/I1std
#Plot the time series of the 1st CCA pair together with the two reconstructions (FIG 2c)
plt.figure(figsize=(12, 6))
ax = plt.gca()
#ax2 = ax.twinx()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f' 1854 - 2017',
lefttitlefontsize=29,
righttitle=f'',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=23)
ax2.set_ylim([-5,5])
ax.set_ylim([-5,5])
ax.set_xlim([-39500,17500])
ax2.set_xlim([-39500,17500])
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=1,
labelsize=23)
gv.add_major_minor_ticks(ax2,
x_minor_per_major=1,
y_minor_per_major=1,
labelsize=23)
ax.plot(dscaesar.time, caesar_anom_n, color='blue', linewidth=0.5)
ax.plot(dscaesar.time, caesar_anom_n.rolling(11, center=False).mean(), color='blue', linewidth=4.75,label='AMOC prxy-reconstruction (Caesar et al. 2018)')
ax.plot(dsfraser.time, AMOC_fraser_anom, color='skyblue', linewidth=0.5)
ax.plot(dsfraser.time, AMOC_fraser_anom.rolling(11, center=False).mean(), color='skyblue', linewidth=4.75,label='AMOC prxy-reconstruction (Fraser and Cunningham 2021)')
ax.plot(ds1.time, PC1sstok, color='black', linewidth=0.5,)
ax.plot(ds1.time, PC1sstok.rolling(time=11, center=False).mean(), color='black',linestyle='dashed', linewidth=5.8, label='PC1 SST')
ax.plot(ds1.time, PC1sicok,color='red', linewidth=0.5,)
ax.plot(ds1.time, PC1sicok.rolling(time=11, center=False).mean(), marker='o', markerfacecolor='purple', markersize=4.5, color='red', linewidth=5.8,label='PC1 SIC')
#ax.legend(prop={"size":6})
ax.legend(loc=2,prop={"size":15})
#ax.fill_between(ds1.time,PC1sicok, where=PC1sicok > 0, color='gold')
#ax.fill_between(ds1.time,PC1sicok, where=PC1sicok < 0, color='cyan')
#plt.legend(loc=4,prop={"size":15})
plt.savefig("Fig2c.PC1.CCAsstSIC.obs.18542017.PC18p.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("Fig2c.PC1.CCAsstSIC.obs.18542017.PC18p.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
Plot the time series from CCCA 3rd pair together with two NAO Index
# #Plot the time series of the 3rd CCA pair together with the NAO Index (FIG 2d)
NAO_Index=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/NAO.Index.1854.2017.txt',header=None, delim_whitespace=True)
dsNAO=dsf.sel(time=slice('1854-06-16', '2017-06-16'))
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f' 1854 - 2017',
lefttitlefontsize=29,
righttitle=f'',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=23)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=1,
labelsize=23)
ax2.set_ylim([-5,5])
ax.set_ylim([-5,5])
ax.set_xlim([-39500,17500])
ax2.set_xlim([-39500,17500])
# Plot data
plt.legend()
ax.plot(ds1.time, PC3sst, color='black', linewidth=3.7, linestyle='dashed',label='PC3 SST')
ax.plot(ds1.time, PC3sic, color='red', marker='o', markerfacecolor='purple', markersize=4.5,linewidth=3.7,label='PC3 SIC')
ax.plot(dsNAO.time, -NAO_Index[1], color='blue', linewidth=3.7, label='NAO Index ')
#ax.fill_between(ds1.time,PC3sicok, where=PC1sicok > 0, color='gold')
#ax.fill_between(ds1.time,PC3sicok, where=PC1sicok < 0, color='cyan')
ax.legend(loc=2,prop={"size":15})
#plt.legend(prop={"size":23})
plt.savefig("Fig2d.PC3.CCAsstSIC.obs.18542017.PC38p.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("Fig2d.PC3.CCAsstSIC.obs.18542017.PC38p.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
plt.tight_layout()
plt.show()
No artists with labels found to put in legend. Note that artists whose label start with an underscore are ignored when legend() is called with no argument.
Corelate CCA PC1 with EOF PC1 Arctic SIC (obtained without detrending the data) in order to calculate the variance explained by AMOC-linked pair in the dominant mode of Arctic SIC
#Calculate the corelation
cor=stats.pearsonr(PC1_SIC_EOF_NO_DET[0],PC1sicok)[0]
print(cor)
# Test the significance considering the autocorelation between the two serieses with the formula Neff = N (1 - R1R2)/ (1 + R1R2), where N is number of values of the time series and R1, R2 represent the lag-one autocorrelation of each of the two records
auto_corA = xr.corr(PC1sicok, PC1sicok.shift(time=-1), dim="time")
auto_corB = xr.corr(PC1_SIC_EOF_NO_DET[0], PC1_SIC_EOF_NO_DET[0].shift(time=-1), dim="time")
n=163
r1=auto_corA
r2=auto_corB
a=r1*r2
nreal=n*((1-a)/(1+a))
print(nreal)
dist =stats.beta(nreal/2 - 1, nreal/2 - 1, loc=-1, scale=2)
p= 2*dist.cdf(-abs(cor))
print(p)
if p<0.05:
print("Significant 95%")
else:
print("Not significant")
r_squared=cor*cor
print(r_squared)
0.6277063227749063 <xarray.DataArray ()> array(24.778678) 0.0008301348241783326 Significant 95% 0.3940152276515949
Source (location) of the data : https://psl.noaa.gov/data/gridded/data.20thC_ReanV3.html
Spatial Resolution: 1° × 1° horizontal grid, Time Frame: January 1836 - 2015
Similar to observations the data are deseazonalized and detrended in an initial stage (in a similar manner with SIC data) and 1854 - 2015 period is selected
#Open PC1 from CCA
#open index in xarray format
idx=pd.read_csv('PC1.CCA.OBS.1854.2015.txt',header=None,delim_whitespace=True)
idx_xr=idx.to_xarray().to_array()
#open SAT field
f1 = 'anm.det.anom.SAT.NCEP.1854.2015.nc'
ds = xr.open_dataset(f1)
#print(ds)
sat=ds.air
lon=ds.lon
lat=ds.lat
#print(sat)
#Open TPR field
fnc11 = 'anm.det.anom.prate.ncep20th.1854.2015.nc'
ds1 = xr.open_dataset(fnc11)
#print(ds1)
#transf in mm/day
ds1.prate.data = ds1.prate.data * 86400
ds1.prate.attrs['units'] = 'mm/day'
tpr=ds1.prate
lon=ds1.lon
lat=ds1.lat
#print(tpr)
#Obtain the real number of degrees of freedom for temperature (SAT)
n=162
lag1_autocorelation_SAT=0.81
lag1_autocorelation_PC1CCA=0.80
a=0.81*0.8
nreal=n*((1-a)/(1+a))
#print(nreal)
#Obtain the real number of degrees of freedom for precipitation (TPR)
n=162
lag1_autocorelation_TPR=0.13
r2=0.80
a=0.16*0.80
nreal=n*((1-a)/(1+a))
print(nreal)
125.23404255319147
Obtain and plot SAT correlation map (Fig 3 a and b)
#Obtain the correlation map for SAT
corr=np.zeros((np.shape(sat)[1],np.shape(sat)[2]))
p=np.zeros((np.shape(sat)[1],np.shape(sat)[2]))
sign=np.zeros((np.shape(sat)[1],np.shape(sat)[2]))
sign_threshold=0.05
for y in range(np.shape(sat)[1]):
print("computing correlation for latitude "+str(y)+" ...")
for x in range(np.shape(sat)[2]):
corr[y,x]=stats.pearsonr(idx_xr[1],sat[:,y,x])[0];
dist =stats.beta(35/2 - 1, 35/2 - 1, loc=-1, scale=2)
p[y,x] = 2*dist.cdf(-abs(corr[y,x]))
if p[y,x]<sign_threshold:
sign[y,x]=1
else:
sign[y,x]=0
computing correlation for latitude 0 ... computing correlation for latitude 1 ... computing correlation for latitude 2 ... computing correlation for latitude 3 ... computing correlation for latitude 4 ... computing correlation for latitude 5 ... computing correlation for latitude 6 ... computing correlation for latitude 7 ... computing correlation for latitude 8 ... computing correlation for latitude 9 ... computing correlation for latitude 10 ... computing correlation for latitude 11 ... computing correlation for latitude 12 ... computing correlation for latitude 13 ... computing correlation for latitude 14 ... computing correlation for latitude 15 ... computing correlation for latitude 16 ... computing correlation for latitude 17 ... computing correlation for latitude 18 ... computing correlation for latitude 19 ... computing correlation for latitude 20 ... computing correlation for latitude 21 ... computing correlation for latitude 22 ... computing correlation for latitude 23 ... computing correlation for latitude 24 ... computing correlation for latitude 25 ... computing correlation for latitude 26 ... computing correlation for latitude 27 ... computing correlation for latitude 28 ... computing correlation for latitude 29 ... computing correlation for latitude 30 ... computing correlation for latitude 31 ... computing correlation for latitude 32 ... computing correlation for latitude 33 ... computing correlation for latitude 34 ... computing correlation for latitude 35 ... computing correlation for latitude 36 ... computing correlation for latitude 37 ... computing correlation for latitude 38 ... computing correlation for latitude 39 ... computing correlation for latitude 40 ... computing correlation for latitude 41 ... computing correlation for latitude 42 ... computing correlation for latitude 43 ... computing correlation for latitude 44 ... computing correlation for latitude 45 ... computing correlation for latitude 46 ... computing correlation for latitude 47 ... computing correlation for latitude 48 ... computing correlation for latitude 49 ... computing correlation for latitude 50 ... computing correlation for latitude 51 ... computing correlation for latitude 52 ... computing correlation for latitude 53 ... computing correlation for latitude 54 ... computing correlation for latitude 55 ... computing correlation for latitude 56 ... computing correlation for latitude 57 ... computing correlation for latitude 58 ... computing correlation for latitude 59 ... computing correlation for latitude 60 ... computing correlation for latitude 61 ... computing correlation for latitude 62 ... computing correlation for latitude 63 ... computing correlation for latitude 64 ... computing correlation for latitude 65 ... computing correlation for latitude 66 ... computing correlation for latitude 67 ... computing correlation for latitude 68 ... computing correlation for latitude 69 ... computing correlation for latitude 70 ... computing correlation for latitude 71 ... computing correlation for latitude 72 ... computing correlation for latitude 73 ... computing correlation for latitude 74 ... computing correlation for latitude 75 ... computing correlation for latitude 76 ... computing correlation for latitude 77 ... computing correlation for latitude 78 ... computing correlation for latitude 79 ... computing correlation for latitude 80 ... computing correlation for latitude 81 ... computing correlation for latitude 82 ... computing correlation for latitude 83 ... computing correlation for latitude 84 ... computing correlation for latitude 85 ... computing correlation for latitude 86 ... computing correlation for latitude 87 ... computing correlation for latitude 88 ... computing correlation for latitude 89 ... computing correlation for latitude 90 ... computing correlation for latitude 91 ... computing correlation for latitude 92 ... computing correlation for latitude 93 ... computing correlation for latitude 94 ... computing correlation for latitude 95 ... computing correlation for latitude 96 ... computing correlation for latitude 97 ... computing correlation for latitude 98 ... computing correlation for latitude 99 ... computing correlation for latitude 100 ... computing correlation for latitude 101 ... computing correlation for latitude 102 ... computing correlation for latitude 103 ... computing correlation for latitude 104 ... computing correlation for latitude 105 ... computing correlation for latitude 106 ... computing correlation for latitude 107 ... computing correlation for latitude 108 ... computing correlation for latitude 109 ... computing correlation for latitude 110 ... computing correlation for latitude 111 ... computing correlation for latitude 112 ... computing correlation for latitude 113 ... computing correlation for latitude 114 ... computing correlation for latitude 115 ... computing correlation for latitude 116 ... computing correlation for latitude 117 ... computing correlation for latitude 118 ... computing correlation for latitude 119 ... computing correlation for latitude 120 ... computing correlation for latitude 121 ... computing correlation for latitude 122 ... computing correlation for latitude 123 ... computing correlation for latitude 124 ... computing correlation for latitude 125 ... computing correlation for latitude 126 ... computing correlation for latitude 127 ... computing correlation for latitude 128 ... computing correlation for latitude 129 ... computing correlation for latitude 130 ... computing correlation for latitude 131 ... computing correlation for latitude 132 ... computing correlation for latitude 133 ... computing correlation for latitude 134 ... computing correlation for latitude 135 ... computing correlation for latitude 136 ... computing correlation for latitude 137 ... computing correlation for latitude 138 ... computing correlation for latitude 139 ... computing correlation for latitude 140 ... computing correlation for latitude 141 ... computing correlation for latitude 142 ... computing correlation for latitude 143 ... computing correlation for latitude 144 ... computing correlation for latitude 145 ... computing correlation for latitude 146 ... computing correlation for latitude 147 ... computing correlation for latitude 148 ... computing correlation for latitude 149 ... computing correlation for latitude 150 ... computing correlation for latitude 151 ... computing correlation for latitude 152 ... computing correlation for latitude 153 ... computing correlation for latitude 154 ... computing correlation for latitude 155 ... computing correlation for latitude 156 ... computing correlation for latitude 157 ... computing correlation for latitude 158 ... computing correlation for latitude 159 ... computing correlation for latitude 160 ... computing correlation for latitude 161 ... computing correlation for latitude 162 ... computing correlation for latitude 163 ... computing correlation for latitude 164 ... computing correlation for latitude 165 ... computing correlation for latitude 166 ... computing correlation for latitude 167 ... computing correlation for latitude 168 ... computing correlation for latitude 169 ... computing correlation for latitude 170 ... computing correlation for latitude 171 ... computing correlation for latitude 172 ... computing correlation for latitude 173 ... computing correlation for latitude 174 ... computing correlation for latitude 175 ... computing correlation for latitude 176 ... computing correlation for latitude 177 ... computing correlation for latitude 178 ... computing correlation for latitude 179 ... computing correlation for latitude 180 ...
#Plot the correlation map for SAT
fig, ax = plt.subplots(1, 1, dpi=300, subplot_kw={"projection": ccrs.Robinson(central_longitude=-60)})
ax.set_global()
level_min=-1
level_max=1
stepsize=0.01
levels=np.arange(level_min-stepsize/2,level_max+stepsize/2+stepsize,stepsize)
cmap=colors.LinearSegmentedColormap.from_list('mycmap',
['indigo','blue','cornflowerblue','skyblue','aquamarine', 'white', 'white','yellow', 'orange', 'orangered', 'red','mediumvioletred'],
(level_max-level_min)/stepsize+1) #+1 adds one level for the center
cf=ax.contourf(lon, lat, corr, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='horizontal',
label=r"",pad=0.05,shrink=1.25,aspect=40)
ax.coastlines()
mask = np.ma.masked_not_equal(sign, 1)
cfh=ax.contourf(lon,lat,mask, transform=ccrs.PlateCarree(),colors='none',extend='both', hatches=['...',None])
plt.savefig('2mT.cor.MAP.PC1.cca.obs.1854.2015.jpg')
plt.savefig('2mT.cor.MAP.PC1.cca.obs.1854.2015.eps')
Corelation map of TPR (S4 Fig)
#Obtain corelation map for precipitation
corr1=np.zeros((np.shape(tpr)[1],np.shape(tpr)[2]))
p1=np.zeros((np.shape(tpr)[1],np.shape(tpr)[2]))
sign1=np.zeros((np.shape(sat)[1],np.shape(sat)[2]))
sign1_threshold=0.05
for y in range(np.shape(tpr)[1]):
print("computing correlation for latitude "+str(y)+" ...")
for x in range(np.shape(tpr)[2]):
corr1[y,x]=stats.pearsonr(idx_xr[1],tpr[:,y,x])[0] #compute correlation over time between index at position 1 (pos 0 would be the year) and sat at location x,y
dist =stats.beta(126/2 - 1, 126/2 - 1, loc=-1, scale=2)
p1[y,x] = 2*dist.cdf(-abs(corr1[y,x]))
if p1[y,x]<sign1_threshold:
sign1[y,x]=1
else:
sign1[y,x]=0
computing correlation for latitude 0 ... computing correlation for latitude 1 ... computing correlation for latitude 2 ... computing correlation for latitude 3 ... computing correlation for latitude 4 ... computing correlation for latitude 5 ... computing correlation for latitude 6 ... computing correlation for latitude 7 ... computing correlation for latitude 8 ... computing correlation for latitude 9 ... computing correlation for latitude 10 ... computing correlation for latitude 11 ... computing correlation for latitude 12 ... computing correlation for latitude 13 ... computing correlation for latitude 14 ... computing correlation for latitude 15 ... computing correlation for latitude 16 ... computing correlation for latitude 17 ... computing correlation for latitude 18 ... computing correlation for latitude 19 ... computing correlation for latitude 20 ... computing correlation for latitude 21 ... computing correlation for latitude 22 ... computing correlation for latitude 23 ... computing correlation for latitude 24 ... computing correlation for latitude 25 ... computing correlation for latitude 26 ... computing correlation for latitude 27 ... computing correlation for latitude 28 ... computing correlation for latitude 29 ... computing correlation for latitude 30 ... computing correlation for latitude 31 ... computing correlation for latitude 32 ... computing correlation for latitude 33 ... computing correlation for latitude 34 ... computing correlation for latitude 35 ... computing correlation for latitude 36 ... computing correlation for latitude 37 ... computing correlation for latitude 38 ... computing correlation for latitude 39 ... computing correlation for latitude 40 ... computing correlation for latitude 41 ... computing correlation for latitude 42 ... computing correlation for latitude 43 ... computing correlation for latitude 44 ... computing correlation for latitude 45 ... computing correlation for latitude 46 ... computing correlation for latitude 47 ... computing correlation for latitude 48 ... computing correlation for latitude 49 ... computing correlation for latitude 50 ... computing correlation for latitude 51 ... computing correlation for latitude 52 ... computing correlation for latitude 53 ... computing correlation for latitude 54 ... computing correlation for latitude 55 ... computing correlation for latitude 56 ... computing correlation for latitude 57 ... computing correlation for latitude 58 ... computing correlation for latitude 59 ... computing correlation for latitude 60 ... computing correlation for latitude 61 ... computing correlation for latitude 62 ... computing correlation for latitude 63 ... computing correlation for latitude 64 ... computing correlation for latitude 65 ... computing correlation for latitude 66 ... computing correlation for latitude 67 ... computing correlation for latitude 68 ... computing correlation for latitude 69 ... computing correlation for latitude 70 ... computing correlation for latitude 71 ... computing correlation for latitude 72 ... computing correlation for latitude 73 ... computing correlation for latitude 74 ... computing correlation for latitude 75 ... computing correlation for latitude 76 ... computing correlation for latitude 77 ... computing correlation for latitude 78 ... computing correlation for latitude 79 ... computing correlation for latitude 80 ... computing correlation for latitude 81 ... computing correlation for latitude 82 ... computing correlation for latitude 83 ... computing correlation for latitude 84 ... computing correlation for latitude 85 ... computing correlation for latitude 86 ... computing correlation for latitude 87 ... computing correlation for latitude 88 ... computing correlation for latitude 89 ... computing correlation for latitude 90 ... computing correlation for latitude 91 ... computing correlation for latitude 92 ... computing correlation for latitude 93 ... computing correlation for latitude 94 ... computing correlation for latitude 95 ... computing correlation for latitude 96 ... computing correlation for latitude 97 ... computing correlation for latitude 98 ... computing correlation for latitude 99 ... computing correlation for latitude 100 ... computing correlation for latitude 101 ... computing correlation for latitude 102 ... computing correlation for latitude 103 ... computing correlation for latitude 104 ... computing correlation for latitude 105 ... computing correlation for latitude 106 ... computing correlation for latitude 107 ... computing correlation for latitude 108 ... computing correlation for latitude 109 ... computing correlation for latitude 110 ... computing correlation for latitude 111 ... computing correlation for latitude 112 ... computing correlation for latitude 113 ... computing correlation for latitude 114 ... computing correlation for latitude 115 ... computing correlation for latitude 116 ... computing correlation for latitude 117 ... computing correlation for latitude 118 ... computing correlation for latitude 119 ... computing correlation for latitude 120 ... computing correlation for latitude 121 ... computing correlation for latitude 122 ... computing correlation for latitude 123 ... computing correlation for latitude 124 ... computing correlation for latitude 125 ... computing correlation for latitude 126 ... computing correlation for latitude 127 ... computing correlation for latitude 128 ... computing correlation for latitude 129 ... computing correlation for latitude 130 ... computing correlation for latitude 131 ... computing correlation for latitude 132 ... computing correlation for latitude 133 ... computing correlation for latitude 134 ... computing correlation for latitude 135 ... computing correlation for latitude 136 ... computing correlation for latitude 137 ... computing correlation for latitude 138 ... computing correlation for latitude 139 ... computing correlation for latitude 140 ... computing correlation for latitude 141 ... computing correlation for latitude 142 ... computing correlation for latitude 143 ... computing correlation for latitude 144 ... computing correlation for latitude 145 ... computing correlation for latitude 146 ... computing correlation for latitude 147 ... computing correlation for latitude 148 ... computing correlation for latitude 149 ... computing correlation for latitude 150 ... computing correlation for latitude 151 ... computing correlation for latitude 152 ... computing correlation for latitude 153 ... computing correlation for latitude 154 ... computing correlation for latitude 155 ... computing correlation for latitude 156 ... computing correlation for latitude 157 ... computing correlation for latitude 158 ... computing correlation for latitude 159 ... computing correlation for latitude 160 ... computing correlation for latitude 161 ... computing correlation for latitude 162 ... computing correlation for latitude 163 ... computing correlation for latitude 164 ... computing correlation for latitude 165 ... computing correlation for latitude 166 ... computing correlation for latitude 167 ... computing correlation for latitude 168 ... computing correlation for latitude 169 ... computing correlation for latitude 170 ... computing correlation for latitude 171 ... computing correlation for latitude 172 ... computing correlation for latitude 173 ... computing correlation for latitude 174 ... computing correlation for latitude 175 ... computing correlation for latitude 176 ... computing correlation for latitude 177 ... computing correlation for latitude 178 ... computing correlation for latitude 179 ... computing correlation for latitude 180 ...
#plot TPR cor map
fig1, ax = plt.subplots(1, 1, dpi=300, subplot_kw={"projection": ccrs.Robinson(central_longitude=-60)})
ax.coastlines()
ax.set_global()
#ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=False,
#linewidth=0.5, color='gray', alpha=0.5, linestyle='-')
level_min=-1
level_max=1
stepsize=0.01
levels=np.arange(level_min-stepsize/2,level_max+stepsize/2+stepsize,stepsize)
#cmap='seismic_r'
cmap=colors.LinearSegmentedColormap.from_list('mycmap',
['saddlebrown','peru','goldenrod','darkkhaki','khaki', 'white', 'white','cornflowerblue', 'royalblue', 'mediumblue','darkblue','navy'],
(level_max-level_min)/stepsize+1) #+1 adds one level for the center
cf=ax.contourf(lon, lat, corr1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='horizontal',
label=r"",pad=0.05,shrink=1.25,aspect=40)
#plt.title("Corr.map TPR (NCEP20thCR) CCA.1stPairPC1 1854 - 2015")
mask = np.ma.masked_not_equal(sign1, 1)
cfh=ax.contourf(lon,lat,mask, transform=ccrs.PlateCarree(),colors='none',extend='both', hatches=['....',None])
ax.coastlines(linewidth=0.53,color='dimgray')
#txt="In hatched regions the r value is NOT significant at the 95 % confidence interval based on t test."
#plt.figtext(0.5, 0.01, txt, wrap=True, horizontalalignment='center', fontsize=8)
#plt.show()
plt.savefig('Pre.cor.MAP.PC1.cca.obs.1854.2015.jpg')
plt.savefig('Pre.cor.MAP.PC1.cca.obs.1854.2015.eps')
Source (location) of the data :
https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5
Spatial Resolution: 0.75° × 0.75° horizontal grid, Time Frame: January 1959 - 2021
# Open the detrended ERSSTV5 Atlantic annual anomalies (used in Fig2) and select 1959 - 2021 period
from cdo import *
cdo = Cdo()
cdo.selyear("1959/2021",input="g.ATL.anm.anom.SST.Ersst.1854.2021.mon.nc", output="g.ATL.anm.anom.SST.Ersst.19592021.nc")
'g.ATL.anm.anom.SST.Ersst.19592021.nc'
#Perfom EOF on detrended ERSSTV5 Atlantic annual anomalies over the 1959 - 2021 period
fnc = 'g.ATL.anm.anom.SST.Ersst.19592021.nc'
dsi1 = xr.open_dataset(fnc)
#print(dsi1)
#specify period and are for EOF
ystr = 1959
yend = 2021
latS = -80.
latN = 80.
lonW = -84.
lonE = 14.
neof = 10
anm1=dsi1.sst
# put the data in a format required by geocat.eof and which can allow to make easy changes regarding the region selected
anmD = anm1.sortby("lat", ascending=True)
clat = anmD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmD
wanm = anmD* clat
wanm.attrs = anmD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
# -- Perfom EOF analysis
eofs1 = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs1 = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
sdev1=pcs1.std(dim='time')
pcs_norm1 = pcs1 / pcs1.std(dim='time')
pcs_norm1['time']=anmD['time']
pcs1['time']=anmD['time']
pcs1.attrs['varianceFraction'] = eofs1.attrs['varianceFraction']
pcs_norm1.attrs['varianceFraction'] = eofs1.attrs['varianceFraction']
#print(pcs)
evec_sst1950 = xr.DataArray(data=eofs1, dims=('eof1','lat','lon'),
coords = {'eof1': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec_sst1950)
#export the PCs in a format usable for CCA
#trial and error until the order is as expected
dfi1=pcs_norm1.to_dataframe().unstack().transpose()
dfi1.to_csv('RezEOF.PC.sst.obs.19592021.10PCs.normalized.txt')
#reconstruct the initial SST field based on the first 12 EOFs
map_file_export="RezEOF.sst.EOFS_x_PCs.1950.export_to_netcdf.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 "
+ fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 "
+ fnc + " -chname,sst,EOF7xPC7 " + fnc + " -chname,sst,EOF8xPC8 " + fnc + " -chname,sst,EOF9xPC9 " + fnc
+ " -chname,sst,EOF10xPC10 " + fnc + " " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sst.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1 -seltimestep,1 " + fnc + " -chname,sst,EOF2 -seltimestep,1 "
+ fnc + " -chname,sst,EOF3 -seltimestep,1 " + fnc + " -chname,sst,EOF4 -seltimestep,1 "
+ fnc + " -chname,sst,EOF5 -seltimestep,1 " + fnc + " -chname,sst,EOF6 -seltimestep,1 "
+ fnc + " -chname,sst,EOF7 -seltimestep,1 " + fnc + " -chname,sst,EOF8 -seltimestep,1 "
+ fnc + " -chname,sst,EOF9 -seltimestep,1 " + fnc + " -chname,sst,EOF10 -seltimestep,1 "
+ fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
evec_export=evec_sst1950.reindex(lat=evec_sst1950.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.variables['EOF7'][:]=evec_export[6,:,:] #write eof7 to dedicated variable
ncfile.variables['EOF8'][:]=evec_export[7,:,:] #write eof8 to dedicated variable
ncfile.variables['EOF9'][:]=evec_export[8,:,:] #write eof9 to dedicated variable
ncfile.variables['EOF10'][:]=evec_export[9,:,:] #write eof10 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs1=[]
for i in range(10):
#print(i)
eof_pcs1.append(pcs1[i,:]*evec_export[i,:,:])
map_file_export="REzEOF.sst.EOFS_x_PCs.1950.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " -chname,sst,EOF7xPC7 "+ fnc + " -chname,sst,EOF8xPC8 "+ fnc + " -chname,sst,EOF9xPC9 "+ fnc + " -chname,sst,EOF10xPC10 "+ fnc + " -chname,sst,EOF11xPC11 "+ fnc + " -chname,sst,EOF12xPC12 "+ fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(10):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs1[i]#.reindex(lat=eof_pcs[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
# Multiply each EOF with it's asociated PC and write the products to netcdf file
#reconstruct the initial field based on the first 12 EOFs
eof_pcs_sum1=eof_pcs1[0]+eof_pcs1[1]+eof_pcs1[2]+eof_pcs1[3]+eof_pcs1[4]+eof_pcs1[5]+eof_pcs1[6]+eof_pcs1[7]+eof_pcs1[8]+eof_pcs1[9]
map_file_export="g_anmanom_sst_ersst_rec10eof_1959_2021.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,sst,sst " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum1 #.reindex(lat=eof_pcs_sum.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sst"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('RezEOF.sst.EOFS_x_PCs.1950.export_to_netcdf.nc')
os.remove('REzEOF.sst.EOFS_x_PCs.1950.nc')
os.remove('TEST1.REZeof.EOFmaps.sst.obs.1950.2017.nc')
rm: cannot remove 'RezEOF.sst.EOFS_x_PCs.1950.export_to_netcdf.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(9) chname: Process started cdo(10) chname: Process started cdo(1) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(2) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(3) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(4) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(5) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(6) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(7) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(8) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(9) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(10) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo merge: Processed 2296350 values from 10 variables over 630 timesteps [0.45s 57MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sst.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(13) chname: Process started cdo(14) seltimestep: Process started cdo(15) chname: Process started cdo(16) seltimestep: Process started cdo(17) chname: Process started cdo(18) seltimestep: Process started cdo(19) chname: Process started cdo(20) seltimestep: Process started cdo(2) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(18) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(16) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(14) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(20) seltimestep: Processed 3645 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(3) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(5) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(7) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(9) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(11) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(13) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(15) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(17) chname: Processed 3645 values from 1 variable over 1 timestep. cdo(19) chname: Processed 3645 values from 1 variable over 1 timestep. cdo merge: Processed 36450 values from 10 variables over 10 timesteps [0.13s 49MB].
rm: cannot remove 'REzEOF.sst.EOFS_x_PCs.1950.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(9) chname: Process started cdo(10) chname: Process started cdo(11) chname: Process started cdo(12) chname: Process started cdo(1) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(2) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(3) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(4) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(5) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(6) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(7) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(8) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(9) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(10) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(11) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo(12) chname: Processed 229635 values from 1 variable over 63 timesteps. cdo merge: Processed 2755620 values from 12 variables over 756 timesteps [0.53s 61MB]. cdo chname: Processed 229635 values from 1 variable over 63 timesteps [0.05s 40MB].
rm: cannot remove 'g_anmanom_sst_ersst_rec10eof_1959_2021.nc': No such file or directory
Source (location) of the data : https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5 Spatial Resolution: 0.75° × 0.75° horizontal grid, Time Frame: January 1959 - 2021
***In order to acces the data one has to use or create an ecmwf accout
#CDO.2.0.5 version is used to remove annual cycle (anomalies) and to calculate annual means
from cdo import *
cdo = Cdo()
#calculate anomalies relative to the 1980 - 2010 period
cdo.selyear("1980/2010",input="/isibhv/projects/paleo_work/petruv/Data/ERA5.SIC.19592021/ERA5.sic1959.2021.nc", output="ERA5.sic19802017.nc")
cdo.ymonmean(input="ERA5.sic19802017.nc", output="annual_cycle_SIC.19802010.nc")
cdo.sub(input="/isibhv/projects/paleo_work/petruv/Data/ERA5.SIC.19592021/ERA5.sic1959.2021.nc annual_cycle_SIC.19802010.nc", output="anom.ERA5.sic1959.2021.nc"
, options='-b F64',returnCdf=True)
#calculate annual means
cdo.yearmonmean(input="anom.ERA5.sic1959.2021.nc", output="anm.anom.ERA5.sic1959.2021.nc")
#detrend the Seaice data
cdo.detrend(input="anm.anom.ERA5.sic1959.2021.nc", output="det.anm.anom.ERA5.sic1959.2021.nc")
#remove the files created that are not of use for the current analyzes
os.remove('ERA5.sic19802017.nc')
os.remove('annual_cycle_SIC.19802010.nc')
os.remove('anom.ERA5.sic1959.2021.nc')
os.remove('anm.anom.ERA5.sic1959.2021.nc')
fnc = '/isibhv/projects/paleo_work/petruv/Data/ERA5.SIC.19592021/det.anm.anom.ERA5.sic1959.2021.nc'
ds1_sic = xr.open_dataset(fnc)
#print(ds1_sic)
#have a short look at the data
#import hvplot.xarray
#sic=ds1_sic.seaice_conc
#sic.isel(time=0).hvplot()
# ----- Period and area setting ------
ystr = 1959
yend = 2021
latS = -90.
latN = 90.
lonW = 0.
lonE = 360
neof = 12
anm=ds1_sic.siconc
# -- EOF --
anmnD = anm.sortby("latitude", ascending=True)
clat = anmnD['latitude'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['longitude']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change longitude ')
#print(wanm)
xw = wanm.sel(latitude=slice(latS, latN), longitude=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'latitude', 'longitude')
#print(xw_anm)
eofs_sic = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sic = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sic = pcs_sic / pcs_sic.std(dim='time')
pcs_sic['time']=anmnD['time']
pcs_norm_sic['time']=anmnD['time']
pcs_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
pcs_norm_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sic = xr.DataArray(data=eofs_sic, dims=('eof','latitude','longitude'),
coords = {'eof': np.arange(0,neof), 'latitude': xw['latitude'], 'longitude': xw['longitude']} )
#print(evec_sic)
#reconstruct the initial SIC field based on the first 10 EOFs
map_file_export="REZeof.EOFmaps.sic.obs.1950.2017.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,siconc,EOF1xPC1 " + fnc + " -chname,siconc,EOF2xPC2 " + fnc + " -chname,siconc,EOF3xPC3 "
+ fnc + " -chname,siconc,EOF4xPC4 " + fnc + " -chname,siconc,EOF5xPC5 " + fnc + " -chname,siconc,EOF6xPC6 " + fnc + " -chname,siconc,EOF7xPC7 "
+ fnc + " -chname,siconc,EOF8xPC8 " + fnc + " -chname,siconc,EOF9xPC9 " + fnc + " -chname,siconc,EOF10xPC10 "
+ fnc +" " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,siconc,EOF1 -seltimestep,1 " + fnc + " -chname,siconc,EOF2 -seltimestep,1 "
+ fnc + " -chname,siconc,EOF3 -seltimestep,1 " + fnc + " -chname,siconc,EOF4 -seltimestep,1 "
+ fnc + " -chname,siconc,EOF5 -seltimestep,1 " + fnc + " -chname,siconc,EOF6 -seltimestep,1 "
+ fnc + " -chname,siconc,EOF7 -seltimestep,1 " + fnc + " -chname,siconc,EOF8 -seltimestep,1 " + fnc + " -chname,siconc,EOF9 -seltimestep,1 "
+ fnc + " -chname,siconc,EOF10 -seltimestep,1 " + fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sic.reindex(latitude=evec_sic.latitude[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#sdev=pcs.std(dim="time")
#vec_export=[]
#evec_export = xr.concat(evec_export, range(8)).rename({"concat_dim": "EOF"})
#ds_concat
#print(np.shape(evec))
#print(np.shape(evec_export))
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.variables['EOF7'][:]=evec_export[6,:,:] #write eof7 to dedicated variable
ncfile.variables['EOF8'][:]=evec_export[7,:,:] #write eof8 to dedicated variable
ncfile.variables['EOF9'][:]=evec_export[8,:,:] #write eof8 to dedicated variable
ncfile.variables['EOF10'][:]=evec_export[9,:,:] #write eof8 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs2=[]
for i in range(10):
#print(i)
eof_pcs2.append(pcs_sic[i,:]*evec_export[i,:,:])
map_file_export="REzEOF.sic.EOFS_x_PCs.1950.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,siconc,EOF1xPC1 " + fnc + " -chname,siconc,EOF2xPC2 " + fnc + " -chname,siconc,EOF3xPC3 "
+ fnc + " -chname,siconc,EOF4xPC4 " + fnc + " -chname,siconc,EOF5xPC5 " + fnc + " -chname,siconc,EOF6xPC6 " + fnc + " -chname,siconc,EOF7xPC7 "
+ fnc + " -chname,siconc,EOF8xPC8 "+ fnc + " -chname,siconc,EOF9xPC9 "+ fnc + " -chname,siconc,EOF10xPC10 "
+ fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(10):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs2[i]#.reindex(lat=eof_pcs[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
# Multiply each EOF with it's asociated PC and write the products to netcdf file
#reconstruct the initial field based on the first 12 EOFs
eof_pcs_sum2=eof_pcs2[0]+eof_pcs2[1]+eof_pcs2[2]+eof_pcs2[3]+eof_pcs2[4]+eof_pcs2[5]+eof_pcs2[6]+eof_pcs2[7]+eof_pcs2[8]+eof_pcs2[9]
map_file_export="det.amm.SIC.EREA5R_rec10eof_1959_2021.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,siconc,sic " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum2 #.reindex(lat=eof_pcs_sum.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sic"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sic.obs.1950.2017.nc')
os.remove('TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc')
os.remove('REzEOF.sic.EOFS_x_PCs.1950.nc')
rm: cannot remove 'REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(9) chname: Process started cdo(10) chname: Process started cdo(1) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(2) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(3) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(4) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(5) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(6) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(7) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(8) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(9) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(10) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo merge: Processed 654091200 values from 10 variables over 630 timesteps [34.95s 180MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(13) chname: Process started cdo(14) seltimestep: Process started cdo(15) chname: Process started cdo(16) seltimestep: Process started cdo(17) chname: Process started cdo(18) seltimestep: Process started cdo(19) chname: Process started cdo(20) seltimestep: Process started cdo(2) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(14) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(16) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(18) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(20) seltimestep: Processed 1038240 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(3) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(5) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(7) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(9) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(11) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(13) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(15) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(17) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo(19) chname: Processed 1038240 values from 1 variable over 1 timestep. cdo merge: Processed 10382400 values from 10 variables over 10 timesteps [1.28s 189MB].
rm: cannot remove 'REzEOF.sic.EOFS_x_PCs.1950.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(7) chname: Process started cdo(8) chname: Process started cdo(9) chname: Process started cdo(10) chname: Process started cdo(1) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(2) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(3) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(4) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(5) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(6) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(7) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(8) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(9) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo(10) chname: Processed 65409120 values from 1 variable over 63 timesteps. cdo merge: Processed 654091200 values from 10 variables over 630 timesteps [32.84s 181MB].
rm: cannot remove 'det.amm.SIC.EREA5R_rec10eof_1959_2021.nc': No such file or directory
cdo chname: Processed 65409120 values from 1 variable over 63 timesteps [4.40s 46MB].
#export the PCs in a format usable for CCA
#trial and error until the order is as expected
df3=pcs_norm_sic.to_dataframe().unstack().transpose()
df3.to_csv('RezEOF.sic.ERA5R.19592021.10PCs.normalized.VF.txt')
#df3
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import cartopy.crs as ccrs
#Perform CCA
# open the two datasets
field1 = 'g_anmanom_sst_ersst_rec10eof_1959_2021.nc'
ds1 = xr.open_dataset(field1)
#print(ds1)
field2 = 'det.amm.SIC.EREA5R_rec10eof_1959_2021.nc'
ds2= xr.open_dataset(field2)
#print(ds2)
PCsV1=pd.read_csv('RezEOF.PC.sst.obs.19592021.10PCs.normalized.txt', sep=',')
PCsV1_xr=PCsV1.to_xarray().to_array()
PCsSST=PCsV1[['0','1','2','3','4','5','6','7','8','9']]
PCsV2=pd.read_csv('RezEOF.sic.ERA5R.19592021.10PCs.normalized.VF.txt', sep=',')
PCsV2_xr=PCsV2.to_xarray().to_array()
PCsSIC=PCsV2[['0','1','2','3','4','5','6','7','8','9']]
#X2
#Normalize the PCs (if necesary) and perform CCA between the 10PCs associated with each field
X1_mc = (PCsSST-PCsSST.mean())/(PCsSST.std())
#X1_mc.head()
X2_mc = (PCsSIC-PCsSIC.mean())/(PCsSIC.std())
#X2_mc.head()
ca = CCA(n_components=10)
ca.fit(X1_mc, X2_mc)
X_c, Y_c = ca.transform(X1_mc, X2_mc)
#export the pairs of maximum corelated PCs obtained from CCA
cc_res = pd.DataFrame({"CCX_1":X_c[:, 0],
"CCY_1":Y_c[:, 0],
"CCX_2":X_c[:, 1],
"CCY_2":Y_c[:, 1],
"CCX_3":X_c[:, 2],
"CCY_3":Y_c[:, 2],
"CCX_4":X_c[:, 3],
"CCY_4":Y_c[:, 3],
"CCX_5":X_c[:, 4],
"CCY_5":Y_c[:, 4],
"CCX_6":X_c[:, 5],
"CCY_6":Y_c[:, 5],
#"Species":df.species.tolist(),
})
#cc_res.head()
#Obtain the corelation coeficient matrix between the timeseries from each pair
CCAresultPccorr=[]
for p in range (np.shape(X_c)[1]):
print(np.corrcoef(X_c[:, p], Y_c[:, p]))
[[1. 0.90680987] [0.90680987 1. ]] [[1. 0.79347912] [0.79347912 1. ]] [[1. 0.7497061] [0.7497061 1. ]] [[1. 0.66564199] [0.66564199 1. ]] [[1. 0.60426237] [0.60426237 1. ]] [[1. 0.46966826] [0.46966826 1. ]] [[1. 0.39823528] [0.39823528 1. ]] [[1. 0.25203376] [0.25203376 1. ]] [[1. 0.16382676] [0.16382676 1. ]] [[1. 0.07760937] [0.07760937 1. ]]
#Obtain the spatial structures and the percentage of variance explained
#select the individual pcs
rezcca=cc_res.to_xarray()
PC1sst=rezcca['CCX_1']
PC1sic=rezcca['CCY_1']
PC2sst=rezcca['CCX_2']
PC2sic=rezcca['CCY_2']
PC3sst=rezcca['CCX_3']
PC3sic=rezcca['CCY_3']
PC4sst=rezcca['CCX_4']
PC4sic=rezcca['CCY_4']
#reindex the time axis in the PCs
PC1sst= PC1sst.rename({"index": "time"})
PC2sst= PC2sst.rename({"index": "time"})
PC3sst= PC3sst.rename({"index": "time"})
PC4sst= PC4sst.rename({"index": "time"})
PC1sic= PC1sic.rename({"index": "time"})
PC2sic= PC2sic.rename({"index": "time"})
PC3sic= PC3sic.rename({"index": "time"})
PC4sic= PC4sic.rename({"index": "time"})
PC1sst['time'] = ds1.time
PC2sst['time'] = ds1.time
PC3sst['time'] = ds1.time
PC4sst['time'] = ds1.time
PC1sic['time'] = ds2.time
PC2sic['time'] = ds2.time
PC3sic['time'] = ds2.time
PC4sic['time'] = ds2.time
#perfom regression analysis in order to obtined the SST/SIC spatial structures
regSST1 = xr.cov(PC1sst.load(), ds1.sst.load(), dim="time")/PC1sst.var(dim='time',skipna=True).values
regSST2 = xr.cov(PC2sst.load(), ds1.sst.load(), dim="time")/PC2sst.var(dim='time',skipna=True).values
regSST3 = xr.cov(PC3sst.load(), ds1.sst.load(), dim="time")/PC3sst.var(dim='time',skipna=True).values
regSST4 = xr.cov(PC4sst.load(), ds1.sst.load(), dim="time")/PC4sst.var(dim='time',skipna=True).values
regSIC1 = xr.cov(PC1sic.load(), ds2.sic.load(), dim="time")/PC1sic.var(dim='time',skipna=True).values
regSIC2 = xr.cov(PC2sic.load(), ds2.sic.load(), dim="time")/PC2sic.var(dim='time',skipna=True).values
regSIC3 = xr.cov(PC3sic.load(), ds2.sic.load(), dim="time")/PC3sic.var(dim='time',skipna=True).values
regSIC4 = xr.cov(PC4sic.load(), ds2.sic.load(), dim="time")/PC4sic.var(dim='time',skipna=True).values
#perform corelations in order to obtain the procentage of variance explained, as the average of r squared
cor_SST1 = xr.corr(PC1sst.load(), ds1.sst.load(), dim="time")
r1_squared = cor_SST1*cor_SST1
var_sst1=(r1_squared.mean()*100)
var_sst1_round=var_sst1.round(decimals=1)
print(var_sst1_round)
cor_SST2 = xr.corr(PC2sst.load(), ds1.sst.load(), dim="time")
r2_squared = cor_SST2*cor_SST2
var_sst2=r2_squared.mean()*100
var_sst2_round=var_sst2.round(decimals=1)
print(var_sst2_round)
cor_SST3 = xr.corr(PC3sst.load(), ds1.sst.load(), dim="time")
r3_squared = cor_SST3*cor_SST3
var_sst3=r3_squared.mean()*100
var_sst3_round=var_sst3.round(decimals=1)
print(var_sst3_round)
cor_SIC1 = xr.corr(PC1sic.load(), ds2.sic.load(), dim="time")
r1_squared_sic = cor_SIC1*cor_SIC1
var_sic1=r1_squared_sic.mean()*100
var_sic1_round=var_sic1.round(decimals=1)
print(var_sic1_round)
cor_SIC2 =xr.corr(PC2sic.load(), ds2.sic.load(), dim="time")
r2_squared_sic = cor_SIC2*cor_SIC2
var_sic2=r2_squared_sic.mean()*100
var_sic2_round=var_sic2.round(decimals=1)
print(var_sic2_round)
cor_SIC3 = xr.corr(PC3sic.load(), ds2.sic.load(),dim="time")
r3_squared_sic = cor_SIC3*cor_SIC3
var_sic3=r3_squared_sic.mean()*100
var_sic3_round=var_sic3.round(decimals=2)
print(var_sic3_round)
<xarray.DataArray ()> array(17.2) <xarray.DataArray ()> array(8.1) <xarray.DataArray ()> array(9.2) <xarray.DataArray ()> array(20.) <xarray.DataArray ()> array(9.5) <xarray.DataArray ()> array(10.24)
Plot 1st CCA PAIR (FIG 4 a,b,c and d)
# Ploting is done individualy. Most people plot the panel individually and then obtai the figure
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=16)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], regSST1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.39,aspect=30,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SST ',
lefttitlefontsize=13,
righttitle=f'{var_sst1_round.values} %',
righttitlefontsize=13,
xlabel="Year",
ylabel="",
labelfontsize=21 )
plt.savefig("CCAsstSIC.ERA5.18502014.SST.global.sic.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.ERA5.18502014.SST.globalsic.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['longitude'], ds2['latitude'], regSIC1*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=26,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 ERA5 SIC (Arctic) ',
lefttitlefontsize=13,
righttitle=f' ',
righttitlefontsize=13,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCAsstSIC.ERA5.19592021.GLOBAL.SIC1b.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("ERA5.19592021.GLOBAL.SIC1b.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
fig3, ax3= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.SouthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax3.set_extent([0, 360, -90, -55], crs=ccrs.PlateCarree())
ax3.coastlines(resolution='110m')
ax3.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax3.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax3.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax3)
gvutil.add_major_minor_ticks(ax3, labelsize=20)
#gv.set_map_boundary(ax3, [-180, 180], [-90, -50], south_pad=1)
gl = ax3.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
#cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax3.contourf(ds2['longitude'], ds2['latitude'], regSIC1*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax3,
lefttitle='CCA1 ERA5 SIC (Antarctic)',
lefttitlefontsize=13,
righttitle=f'{var_sic1_round.values} %',
righttitlefontsize=13,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCAsstSIC.ERA5.19592021.GLOBAL.SIC1a.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.ERA5.19592021.GLOBAL.SIC1a.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax3.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC1) 1959 - 2021',
lefttitlefontsize=29,
righttitle=f'r = 0.92',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
ax.plot(ds1.time, PC1sst, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC1sic, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
#ax.legend(prop={"size":6})
ax.fill_between(ds1.time,PC1sic, where=PC1sic > 0, color='gold')
ax.fill_between(ds1.time,PC1sic, where=PC1sic < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.AWIESM21.19592021.PC1.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.19592021.PC1.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
#perform least square regression between (lineary-fitted) RAPID and PC1CCA over 2005-2019 period
PC1CCA_fit=pd.read_csv('PC1.CCA.FIT.2005.2019.dat',header=None, delim_whitespace=True)
rapid_fit=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/OK1.AMOC.rapid.20052019.fit.dat',header=None, delim_whitespace=True)
PC1CCA=((PC1sst+PC1sic)/2)
# generate x and y
x = rapid_fit[1]
y = PC1CCA_fit[1]
# assemble matrix A
A = np.vstack([x, np.ones(len(x))]).T
# turn y into a column vector
y = y[:, np.newaxis]
# Direct least square regression
alpha = np.dot((np.dot(np.linalg.inv(np.dot(A.T,A)),A.T)),y)
#print(alpha)
#reconstruct PC1 relative to AMOC RAPID annomaly
PC1CCA=((PC1sst+PC1sic)/2)
PC1CCA_amoc_anom=alpha[0]*PC1CCA + alpha[1]
#PC1CCA_amoc_anom
/tmp/ipykernel_11322/766666482.py:16: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead. y = y[:, np.newaxis]
#obtain the CAESAR (2018) Index relative to RAPID AMOC Anomally
caesar=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/AMOC.INDEX.CAESAR.19592016.dat.txt',header=None, delim_whitespace=True)
rapid_fit_2016=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/RAPID.anm.AMOC.Index.20052016.AVG.FIT.txt',header=None, delim_whitespace=True)
Caesar_fit=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/AMOC.INDEX.CAESAR.20052016.FIT.txt',header=None, delim_whitespace=True)
#caesar_anom_n = (caesar[1]-caesar[1].mean())/caesar[1].std()
# generate x and y
x = rapid_fit_2016[1]
y = Caesar_fit[1]
# assemble matrix A
A = np.vstack([x, np.ones(len(x))]).T
# turn y into a column vector
y = y[:, np.newaxis]
# Direct least square regression
alpha = np.dot((np.dot(np.linalg.inv(np.dot(A.T,A)),A.T)),y)
#print(alpha)
#reconstruct PC1 relative to AMOC RAPID annomaly
Caesar_anom=alpha[0]*caesar[1] + alpha[1]
Caesar_anom=(Caesar_anom-Caesar_anom.mean())/Caesar_anom.std()
/tmp/ipykernel_11322/4084659567.py:16: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead. y = y[:, np.newaxis]
#obtain the FRASER (2021) Index relative to RAPID AMOC Anomally
fraser=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/OK4-Copy1.anm.AMOC.Index.Fraser.1959.2019.csv',header=None, delim_whitespace=True)
fraser_fit=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/OK4-Copy1.anm.AMOC.Index.Fraser.2005.2019.FIT.txt',header=None, delim_whitespace=True)
fraser_xr=fraser.to_xarray().to_array()
# generate x and y
x1 = rapid_fit[1]
y1 = fraser_fit[1]
# assemble matrix A
A1 = np.vstack([x1, np.ones(len(x1))]).T
# turn y into a column vector
y1 = y1[:, np.newaxis]
# Direct least square regression
alpha1 = np.dot((np.dot(np.linalg.inv(np.dot(A1.T,A1)),A1.T)),y1)
#print(alpha1)
fraser_amoc=alpha1[0]*fraser[1] + alpha1[1]
fraser_amoc_anom=(fraser_amoc-fraser_amoc.mean())/fraser_amoc.std()
/tmp/ipykernel_11322/2553050489.py:15: FutureWarning: Support for multi-dimensional indexing (e.g. `obj[:, None]`) is deprecated and will be removed in a future version. Convert to a numpy array before indexing instead. y1 = y1[:, np.newaxis]
# Plot FIGURE 5
rapid_reg=pd.read_csv('/home/csys/pevaidea/AMOC.Indices/OK1.anm.AMOC.Index.2005.2019.Rapid.anom.dat',header=None,delim_whitespace=True)
PC1sstxr=rapid_reg.to_xarray().to_array()
rapid_rega=PC1sstxr[1]
#rapid_reg[1]
#generate timeaxis according to each PC
time_for_RAPID = '/isibhv/projects/paleo_work/petruv/Data/ErsstV5_18542021/sstv2forqq.mnmean.Ersstv5.20052019.nc'
dsrapid= xr.open_dataset(time_for_RAPID)
#print(dsrapid)
time_for_PC1 = 'g_anmanom_sst_ersst_rec10eof_1959_2021.nc'
dspc1 = xr.open_dataset(time_for_PC1)
#print(ds1)
ds_caesar=dspc1.sel(time=slice('1959-06-16', '2016-06-16'))
ds_fraser=dspc1.sel(time=slice('1959-06-16', '2019-06-16'))
ds_CCAlong=dspc1.sel(time=slice('1959-06-16', '2017-06-16'))
# PLOT FIGURE 5
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'1959 - 2021',
lefttitlefontsize=23,
righttitle=f'AMOC',
righttitlefontsize=23,
xlabel="",
ylabel="(Sv)",
labelfontsize=25)
gv.add_major_minor_ticks(ax,
x_minor_per_major=2,
y_minor_per_major=1,
labelsize=23)
# Plot data
ax.set_ylim([-4,4])
ax.set_xlim([-4000,18900])
#ax.plot(dsrapid.time, rapid_reg[1].rolling(3).mean(), color='blue', linewidth=0.75,label='RAPID')
ax.plot(dsrapid.time, rapid_reg[1], color='black', linestyle = 'dashed',linewidth=4.1,label='RAPID AMOC')
ax.plot(ds_caesar.time,Caesar_anom.rolling(7,center=False).mean(), color='cyan', linestyle="-.",linewidth=4, label='Caesar et al. (2018) AMOC')
ax.plot(ds_fraser.time,fraser_amoc_anom.rolling(7,center=False).mean(), color='gold', linewidth=4, label='Fraser et al. (2021) AMOC')
ax.plot(dspc1.time,PC1CCA_amoc_anom, marker='o', markerfacecolor='purple', color='red', linewidth=4.5, label='CCA PC1 AMOC (Fig 4c)')
plt.legend(loc=2,prop={"size":14})
plt.savefig("Fig5.AMOC_Index.V222T.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
Source (location) of the data (Christian S. Dropbox):
https://1drv.ms/u/s!AnZSDMNwdkDMgeNBhGZk6PxWTXd8Rg?e=Xglhwg PASSWORD: 8slitUbAubiv*
Spatial Resolution: 1° × 1° horizontal grid, Time Frame: January 1850-2014
#open SST data
fnct = 'SST.AWIESM21.Historical.18502014.nc'
dst= xr.open_dataset(fnct)
#print(dst)
#calculate anomalies relative to the 1980 - 2010 period
from cdo import *
cdo = Cdo()
cdo.selyear("1980/2010",input="SST.AWIESM21.Historical.18502014.nc", output="remap.sst.fesom.merged19802010.nc")
cdo.ymonmean(input="remap.sst.fesom.merged19802010.nc", output="annual_cycle_SST.19802010.nc")
cdo.sub(input="SST.AWIESM21.Historical.18502014.nc annual_cycle_SST.19802010.nc", output="anom.remap.sst.fesom.merged18502014.nc")
#calculate annual means
cdo.yearmonmean(input="anom.remap.sst.fesom.merged18502014.nc", output="anm.anom.remap.sst.fesom.merged18502014.nc")
#select region of interest
cdo.sellonlatbox('285,375,-80,80', input="anm.anom.remap.sst.fesom.merged18502014.nc", output="ATLa.anm.anom.remap.sst.fesom.merged18502014.nc")
os.system("rm " + "remap.sst.fesom.merged19802010.nc")
os.system("rm " + "annual_cycle_SST.19802010.nc")
os.system("rm " + "anom.remap.sst.fesom.merged18502014.nc")
os.system("rm " + "anm.anom.remap.sst.fesom.merged18502014.nc")
0
# similar to observations, CDO is used to remove the global average from each point of the grid
cdo.fldmean(input="ATLa.anm.anom.remap.sst.fesom.merged18502014.nc", output="Fldmean.ATL.anm.anom.remap.sst.fesom.merged18502014.nc")
map_file="ATLa.anm.anom.remap.sst.fesom.merged18502014.nc"
index_file="Fldmean.ATL.anm.anom.remap.sst.fesom.merged18502014.nc"
index_txt='Fldmean.ATL.anm.anom.remap.sst.fesom.merged18502014.txt'
os.system ('cdo showyear ' + index_file)
#transform the NC file into csv and remove the last line
os.system ('cdo -s output ' + index_file+'>'+index_txt + ' |head -n -1')
index_data=pd.read_csv(index_txt,header=None).squeeze()
#np.shape(index_data)
#create map data
dataset=Dataset(map_file)
map_data=dataset.variables['sst'][:].squeeze()
model_lon=dataset.variables['lon'][:].squeeze()
model_lat=dataset.variables['lat'][:].squeeze()
#np.shape(map_data)
map_data_normalized=np.zeros(np.shape(map_data))
for x in range(np.size(model_lon)):
for y in range(np.size(model_lat)):
for t in range(np.shape(map_data)[0]):
map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t]
os.system("rm " + "g.ATLa.anm.anom.remap.sst.fesom.merged18502014.nc")
#creates a copy of the original NetCDF file to which the modified data will be written
map_file_export="g.ATLa.anm.anom.remap.sst.fesom.merged18502014.nc"
os.system("cp "+map_file+" "+map_file_export)
#write map_data_normalized to netcdf file
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#ncfile.variables['sst'][:]=np.zeros(np.shape(map_data_normalized)) #put zeros to verify that the data actually has been written
ncfile.variables['sst'][:]=map_data_normalized[:,:,:] #"real" data write
ncfile.close()
os.system("rm " + "ATLa.anm.anom.remap.sst.fesom.merged18502014.nc")
os.system("rm " + "Fldmean.ATL.anm.anom.remap.sst.fesom.merged18502014.nc")
1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 cdo showyear: Processed 1 variable over 165 timesteps [0.01s 36MB].
/tmp/ipykernel_11322/2860984630.py:27: UserWarning: Warning: converting a masked element to nan. map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t] rm: cannot remove 'g.ATLa.anm.anom.remap.sst.fesom.merged18502014.nc': No such file or directory
0
#Perfom EOF on historical SST
fnc = '/isibhv/projects/paleo_work/petruv/Data/AWIESM.Hist/g.ATLa.anm.anom.remap.sst.fesom.merged18502014.nc'
ds1_sst = xr.open_dataset(fnc)
#print(ds1_sic)
#have a short look at the data
#import hvplot.xarray
#sic=ds1_sic.seaice_conc
#sic.isel(time=0).hvplot()
# ----- Period and area setting ------
ystr = 1850
yend = 2014
latS = -80.
latN = 80.
lonW = -75.
lonE = 15
neof = 6
anm=ds1_sst.sst
# -- EOF --
anmnD = anm.sortby("lat", ascending=True)
clat = anmnD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change lon ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
eofs_sst = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sst = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sst = pcs_sst / pcs_sst.std(dim='time')
pcs_sst['time']=anmnD['time']
pcs_norm_sst['time']=anmnD['time']
pcs_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
pcs_norm_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sst = xr.DataArray(data=eofs_sst, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec_sic)
#export the PCs in a format usable for CCA
#trial and error until the order is as expected
df=pcs_norm_sst.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.PC.sst.awiesm21.19592021.6PCs.normalized.txt')
#reconstruct the initial SIC field based on the first 6 EOFs
map_file_export="REZeof.EOFmaps.sst.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1 -seltimestep,1 " + fnc + " -chname,sst,EOF2 -seltimestep,1 " + fnc + " -chname,sst,EOF3 -seltimestep,1 " + fnc + " -chname,sst,EOF4 -seltimestep,1 " + fnc + " -chname,sst,EOF5 -seltimestep,1 " + fnc + " -chname,sst,EOF6 -seltimestep,1 " + fnc + " " + map_file_export)
#os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF2 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF3 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF4 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF5 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF6 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF7 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF8 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF9 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF10 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF11 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF12 -seltimestep,1 " + fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sst.reindex(lat=evec_sst.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs2=[]
for i in range(6):
#print(i)
eof_pcs2.append(pcs_sst[i,:]*evec_export[i,:,:])
map_file_export="awREzEOF.sst.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(6):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs2[i].reindex(lat=eof_pcs2[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
# Multiply each EOF with it's asociated PC and write the products to netcdf file
#reconstruct the initial field based on the first 12 EOFs
eof_pcs_sum2=eof_pcs2[0]+eof_pcs2[1]+eof_pcs2[2]+eof_pcs2[3]+eof_pcs2[4]+eof_pcs2[5]
map_file_export="anm.anom.detrend.remap.sst.fesom.merged18502014_rec6EOF.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,sst,sst " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum2.reindex(lat=eof_pcs_sum2.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sst"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sst.nc')
os.remove('TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc')
os.remove('awREzEOF.sst.EOFS_x_PCs.nc')
rm: cannot remove 'REZeof.EOFmaps.sst.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(2) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(3) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(4) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(5) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(6) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo merge: Processed 14572800 values from 6 variables over 990 timesteps [4.88s 97MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(2) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 14720 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 14720 values from 1 variable over 1 timestep. cdo(3) chname: Processed 14720 values from 1 variable over 1 timestep. cdo(5) chname: Processed 14720 values from 1 variable over 1 timestep. cdo(7) chname: Processed 14720 values from 1 variable over 1 timestep. cdo(9) chname: Processed 14720 values from 1 variable over 1 timestep. cdo(11) chname: Processed 14720 values from 1 variable over 1 timestep. cdo merge: Processed 88320 values from 6 variables over 6 timesteps [0.11s 53MB].
rm: cannot remove 'awREzEOF.sst.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(2) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(3) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(4) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(5) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo(6) chname: Processed 2428800 values from 1 variable over 165 timesteps. cdo merge: Processed 14572800 values from 6 variables over 990 timesteps [5.28s 96MB].
rm: cannot remove 'anm.anom.detrend.remap.sst.fesom.merged18502014_rec6EOF.nc': No such file or directory
cdo chname: Processed 2428800 values from 1 variable over 165 timesteps [0.54s 64MB].
#Open and detrend AWIESM2.1 Historical SIC data
from cdo import *
cdo = Cdo()
#detrend the data
cdo.detrend(input="SIC.AWIESM21.Historical.18502014.nc", output="detrend.remap.a_ice.fesom.merged18502014.nc")
#calculate anomalies relative to the 1980 - 2010 period
cdo.selyear("1980/2010",input="detrend.remap.a_ice.fesom.merged18502014.nc", output="detrend.remap.a_ice.fesom.merged19802010.nc")
cdo.ymonmean(input="detrend.remap.a_ice.fesom.merged19802010.nc", output="annual_cycle_SIC.awiesm.19802010.nc")
cdo.sub(input="detrend.remap.a_ice.fesom.merged18502014.nc annual_cycle_SIC.awiesm.19802010.nc", output="anom.detrend.remap.a_ice.fesom.merged18502014.nc")
#calculate annual means
cdo.yearmonmean(input="anom.detrend.remap.a_ice.fesom.merged18502014.nc", output="anm.anom.detrend.remap.a_ice.fesom.merged18502014.nc")
#select region of interest
cdo.sellonlatbox('0,360,55,90', input="anm.anom.detrend.remap.a_ice.fesom.merged18502014.nc", output="anm.anom.detrend.remap.a_ice5590.fesom.merged18502014.nc")
#remove the files created that are not of use for the current analyzes
os.remove('detrend.remap.a_ice.fesom.merged18502014.nc')
os.remove('detrend.remap.a_ice.fesom.merged19802010.nc')
os.remove('anom.detrend.remap.a_ice.fesom.merged18502014.nc')
os.remove('anm.anom.detrend.remap.a_ice.fesom.merged18502014.nc')
# Perform EOF on Arctic SIC AWIESM2.1 Historical detrended anomalies
fnc = '/isibhv/projects/paleo_work/petruv/Data/AWIESM.Hist/anm.anom.detrend.remap.a_ice5590.fesom.merged18502014.nc'
ds1_sic = xr.open_dataset(fnc)
#print(ds1_sic)
# ----- Period and area setting ------|
ystr = 1850
yend = 2014
latS = 55.
latN = 90.
lonW = 0.
lonE = 360
neof = 6
anm=ds1_sic.a_ice
# -- EOF --
anmnD = anm.sortby("lat", ascending=True)
clat = anmnD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change lon ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
eofs_sic = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sic = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sic = pcs_sic / pcs_sic.std(dim='time')
pcs_sic['time']=anmnD['time']
pcs_norm_sic['time']=anmnD['time']
pcs_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
pcs_norm_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sic = xr.DataArray(data=eofs_sic, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec_sic)
#export the PCs in a format usable for CCA
#trial and error until the order is as expected
df=pcs_norm_sic.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.sic.awiesm21.19592021.6PCs.normalized.txt')
#reconstruct the initial SIC field based on the first 6 EOFs
map_file_export="REZeof.EOFmaps.sic.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1xPC1 " + fnc + " -chname,a_ice,EOF2xPC2 " + fnc + " -chname,a_ice,EOF3xPC3 " + fnc + " -chname,a_ice,EOF4xPC4 " + fnc + " -chname,a_ice,EOF5xPC5 " + fnc + " -chname,a_ice,EOF6xPC6 " + fnc + " " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1 -seltimestep,1 " + fnc + " -chname,a_ice,EOF2 -seltimestep,1 " + fnc + " -chname,a_ice,EOF3 -seltimestep,1 " + fnc + " -chname,a_ice,EOF4 -seltimestep,1 " + fnc + " -chname,a_ice,EOF5 -seltimestep,1 " + fnc + " -chname,a_ice,EOF6 -seltimestep,1 " + fnc + " " + map_file_export)
#map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
#os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF2 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF3 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF4 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF5 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF6 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF7 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF8 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF9 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF10 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF11 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF12 -seltimestep,1 " + fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sic.reindex(lat=evec_sic.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#evec_export=evec.reindex(latitude=evec.latitude[::-1])[:,:,:]
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs2=[]
for i in range(6):
#print(i)
eof_pcs2.append(pcs_sic[i,:]*evec_export[i,:,:])
map_file_export="awREzEOF.sic.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1xPC1 " + fnc + " -chname,a_ice,EOF2xPC2 " + fnc + " -chname,a_ice,EOF3xPC3 " + fnc + " -chname,a_ice,EOF4xPC4 " + fnc + " -chname,a_ice,EOF5xPC5 " + fnc + " -chname,a_ice,EOF6xPC6 " + fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(6):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs2[i].reindex(lat=eof_pcs2[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
#reconstruct the initial field based on the first 6 EOFs
eof_pcs_sum2=eof_pcs2[0]+eof_pcs2[1]+eof_pcs2[2]+eof_pcs2[3]+eof_pcs2[4]+eof_pcs2[5]
map_file_export="anm.anom.detrend.remap.a_ice5590.fesom.merged18502014_rec6EOF.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,a_ice,sic " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum2.reindex(lat=eof_pcs_sum2.lat[::-1])[:,:,:]
#eof_pcs_sum_export=eof_pcs_sum_export.reindex(lat=eof_pcs_sum_export.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sic"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sic.nc')
os.remove('awREzEOF.sic.EOFS_x_PCs.nc')
os.remove('TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc')
rm: cannot remove 'REZeof.EOFmaps.sic.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(2) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(3) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(4) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(5) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(6) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo merge: Processed 12785850 values from 6 variables over 990 timesteps [2.06s 110MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(2) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 12915 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 12915 values from 1 variable over 1 timestep. cdo(3) chname: Processed 12915 values from 1 variable over 1 timestep. cdo(5) chname: Processed 12915 values from 1 variable over 1 timestep. cdo(7) chname: Processed 12915 values from 1 variable over 1 timestep. cdo(9) chname: Processed 12915 values from 1 variable over 1 timestep. cdo(11) chname: Processed 12915 values from 1 variable over 1 timestep. cdo merge: Processed 77490 values from 6 variables over 6 timesteps [0.11s 51MB].
rm: cannot remove 'awREzEOF.sic.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(2) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(3) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(4) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(5) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo(6) chname: Processed 2130975 values from 1 variable over 165 timesteps. cdo merge: Processed 12785850 values from 6 variables over 990 timesteps [1.88s 117MB].
rm: cannot remove 'anm.anom.detrend.remap.a_ice5590.fesom.merged18502014_rec6EOF.nc': No such file or directory
cdo chname: Processed 2130975 values from 1 variable over 165 timesteps [0.21s 57MB].
# open the two datasets
field1 = 'anm.anom.detrend.remap.sst.fesom.merged18502014_rec6EOF.nc'
ds1 = xr.open_dataset(field1)
#print(ds1)
field2 = 'anm.anom.detrend.remap.a_ice5590.fesom.merged18502014_rec6EOF.nc'
ds2= xr.open_dataset(field2)
#print(ds2)
PCsV1=pd.read_csv('RezEOF.PC.sst.awiesm21.19592021.6PCs.normalized.txt',sep=',')
PCsV1_xr=PCsV1.to_xarray().to_array()
PCsSST=PCsV1[['0','1','2','3','4','5']]
PCsV2=pd.read_csv('RezEOF.sic.awiesm21.19592021.6PCs.normalized.txt',sep=',')
PCsV2_xr=PCsV2.to_xarray().to_array()
PCsSIC=PCsV2[['0','1','2','3','4','5']]
#X2
#Normalize the PCs (if necesary) and perform CCA between the 6PCs associated with each field
X1_mc = (PCsSST-PCsSST.mean())/(PCsSST.std())
#X1_mc.head()
X2_mc = (PCsSIC-PCsSIC.mean())/(PCsSIC.std())
#X2_mc.head()
ca = CCA(n_components=6)
ca.fit(X1_mc, X2_mc)
X_c, Y_c = ca.transform(X1_mc, X2_mc)
#export the pairs of maximum corelated PCs obtained from CCA
cc_res = pd.DataFrame({"CCX_1":X_c[:, 0],
"CCY_1":Y_c[:, 0],
"CCX_2":X_c[:, 1],
"CCY_2":Y_c[:, 1],
"CCX_3":X_c[:, 2],
"CCY_3":Y_c[:, 2],
"CCX_4":X_c[:, 3],
"CCY_4":Y_c[:, 3],
"CCX_5":X_c[:, 4],
"CCY_5":Y_c[:, 4],
"CCX_6":X_c[:, 5],
"CCY_6":Y_c[:, 5],
#"Species":df.species.tolist(),
})
#cc_res.head()
#we would need a loop for this
CCAresultPccorr=[]
for p in range (np.shape(X_c)[1]):
print(np.corrcoef(X_c[:, p], Y_c[:, p]))
[[1. 0.86436332] [0.86436332 1. ]] [[1. 0.56660967] [0.56660967 1. ]] [[1. 0.33551253] [0.33551253 1. ]] [[1. 0.25399801] [0.25399801 1. ]] [[1. 0.12689708] [0.12689708 1. ]] [[1. 0.06579858] [0.06579858 1. ]]
#select the individual pcs
rezcca=cc_res.to_xarray()
PC1sst=rezcca['CCX_1']
PC1sic=rezcca['CCY_1']
PC2sst=rezcca['CCX_2']
PC2sic=rezcca['CCY_2']
PC3sst=rezcca['CCX_3']
PC3sic=rezcca['CCY_3']
PC4sst=rezcca['CCX_4']
PC4sic=rezcca['CCY_4']
#reindex the time axis in the PCs
PC1sst= PC1sst.rename({"index": "time"})
PC2sst= PC2sst.rename({"index": "time"})
PC3sst= PC3sst.rename({"index": "time"})
PC4sst= PC4sst.rename({"index": "time"})
PC1sic= PC1sic.rename({"index": "time"})
PC2sic= PC2sic.rename({"index": "time"})
PC3sic= PC3sic.rename({"index": "time"})
PC4sic= PC4sic.rename({"index": "time"})
PC1sst['time'] = ds1.time
PC2sst['time'] = ds1.time
PC3sst['time'] = ds1.time
PC4sst['time'] = ds1.time
PC1sic['time'] = ds2.time
PC2sic['time'] = ds2.time
PC3sic['time'] = ds2.time
PC4sic['time'] = ds2.time
#perfom regression analysis in order to obtined the SST/SIC spatial structures
regSST1 = xr.cov(PC1sst.load(), ds1.sst.load(), dim="time")/PC1sst.var(dim='time',skipna=True).values
regSST2 = xr.cov(PC2sst.load(), ds1.sst.load(), dim="time")/PC2sst.var(dim='time',skipna=True).values
regSST3 = xr.cov(PC3sst.load(), ds1.sst.load(), dim="time")/PC3sst.var(dim='time',skipna=True).values
regSST4 = xr.cov(PC4sst.load(), ds1.sst.load(), dim="time")/PC4sst.var(dim='time',skipna=True).values
regSIC1 = xr.cov(PC1sic.load(), ds2.sic.load(), dim="time")/PC1sic.var(dim='time',skipna=True).values
regSIC2 = xr.cov(PC2sic.load(), ds2.sic.load(), dim="time")/PC2sic.var(dim='time',skipna=True).values
regSIC3 = xr.cov(PC3sic.load(), ds2.sic.load(), dim="time")/PC3sic.var(dim='time',skipna=True).values
regSIC4 = xr.cov(PC4sic.load(), ds2.sic.load(), dim="time")/PC4sic.var(dim='time',skipna=True).values
#perform corelations in order to obtain the procentage of variance explained as the average of r squared
cor_SST1 = xr.corr(PC1sst.load(), ds1.sst.load(), dim="time")
r1_squared = cor_SST1*cor_SST1
var_sst1=(r1_squared.mean()*100)
var_sst1_round=var_sst1.round(decimals=1)
print(var_sst1_round)
cor_SST2 = xr.corr(PC2sst.load(), ds1.sst.load(), dim="time")
r2_squared = cor_SST2*cor_SST2
var_sst2=r2_squared.mean()*100
var_sst2_round=var_sst2.round(decimals=1)
print(var_sst2_round)
cor_SST3 = xr.corr(PC3sst.load(), ds1.sst.load(), dim="time")
r3_squared = cor_SST3*cor_SST3
var_sst3=r3_squared.mean()*100
var_sst3_round=var_sst3.round(decimals=1)
print(var_sst3_round)
cor_SIC1 = xr.corr(PC1sic.load(dim="time"), ds2.sic.load(dim="time"))
r1_squared_sic = cor_SIC1*cor_SIC1
var_sic1=r1_squared_sic.mean()*100
var_sic1_round=var_sic1.round(decimals=1)
print(var_sic1_round)
cor_SIC2 =xr.corr(PC2sic.load(dim="time"), ds2.sic.load(dim="time"))
r2_squared_sic = cor_SIC2*cor_SIC2
var_sic2=r2_squared_sic.mean()*100
var_sic2_round=var_sic2.round(decimals=1)
print(var_sic2_round)
cor_SIC3 = xr.corr(PC3sic.load(dim="time"), ds2.sic.load(dim="time"))
r3_squared_sic = cor_SIC3*cor_SIC3
var_sic3=r3_squared_sic.mean()*100
var_sic3_round=var_sic3.round(decimals=2)
print(var_sic3_round)
<xarray.DataArray ()> array(19.8) <xarray.DataArray ()> array(18.7) <xarray.DataArray ()> array(9.5) <xarray.DataArray ()> array(11.3) <xarray.DataArray ()> array(0.4) <xarray.DataArray ()> array(0.)
# Ploting is done individualy.
PC1sicok=PC1sic*-1
PC1sstcok=PC1sst*-1
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
#gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], -regSST1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.5,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SST ',
lefttitlefontsize=15,
righttitle=f'{var_sst1_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21 )
plt.savefig("CCASIC.AWIESM21.18502014.SST1.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("CCASIC.AWIESM21.18502014.SST1.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
#plt.savefig('asdsa', ...define type)
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['lon'], ds2['lat'], -regSIC1*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SIC ',
lefttitlefontsize=15,
righttitle=f' {var_sic1_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCASIC.AWIESM21.18502014.SIC1.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCASIC.AWIESM21.18502014.SIC1.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC1) 1850 - 2014',
lefttitlefontsize=29,
righttitle=f'r = 0.86',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
ax.plot(ds1_sst.time, PC1sstcok, color='black', linewidth=3, label='SST')
ax.plot(ds1_sst.time, PC1sicok, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
#ax.legend(prop={"size":6})
ax.fill_between(ds1_sst.time,PC1sicok, where=PC1sicok > 0, color='gold')
ax.fill_between(ds1_sst.time,PC1sicok, where=PC1sicok < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCASIC.AWIESM21.18502014.PC1.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCASIC.AWIESM21.18502014.PC1.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
# Ploting is done individualy. Most people plot the panel individually and then obtai the figure
PC2sstcok=PC2sst*-1
PC2siccok=PC2sic*-1
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], regSST2, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.5,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA2 SST ',
lefttitlefontsize=15,
righttitle=f'{var_sst3_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21 )
plt.savefig("CCAsstSIC.AWIESM21.18502014.SST3.pdf",format="pdf", bbox_inches = 'tight',dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.18502014.SST3.jpg",format="jpg", bbox_inches = 'tight',dpi= 600)
#plt.savefig('asdsa', ...define type)
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['lon'], ds2['lat'], regSIC2*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA2 SIC ',
lefttitlefontsize=15,
righttitle=f' 2.7 %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=23)
plt.savefig("CCAsstSIC.AWIESM21.18502014.SIC3.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.AWIESM21.18502014.SIC3.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC2) 1850 - 2014',
lefttitlefontsize=29,
righttitle=f'r = 0.57',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=23)
gv.set_axes_limits_and_ticks(
ax,
xlim=(-44000, 16500),
ylim=(-4.5, 4.5))
gv.add_major_minor_ticks(ax,
x_minor_per_major=2,
y_minor_per_major=1,
labelsize=23)
# Plot data
ax.plot(ds1.time, PC2sst, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC2sic, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
ax.legend(loc=3,prop={"size":15})
#ax.fill_between(ds1.time,PC2sic, where=PC2sicok > 0, color='gold')
#ax.fill_between(ds1.time,PC2sic, where=PC2sicok < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC3.V2.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC3.V2.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
#open AMOC index produced with AWIES2.1 Historical simulation
PCT1=pd.read_csv('/home/csys/pevaidea/hist_41_run3_AMOC_idx_26.5N.csv',sep=',')
AMOCindexAWIES21=PCT1.to_xarray().to_array()
AMOCAWI=AMOCindexAWIES21[1]
AMOCAWImean = (AMOCAWI-AMOCAWI.mean())
PC1sicok=PC1sic*-1
PC1sstcok=PC1sst*-1
#Plot the two PCs from CCA1 together wtith the simulated AMOC Index
#open AMOC index produced with AWIES2.1 Historical simulation
PCT1=pd.read_csv('AMOC_idx_26.5N.AWIESM21.HIST.csv',sep=',')
AMOCindexAWIES21=PCT1.to_xarray().to_array()
AMOCAWI=AMOCindexAWIES21[1]
AMOCAWImean = (AMOCAWI-AMOCAWI.mean())
#Plot PC1 with AMOC Index
plt.figure(figsize=(12, 6))
ax1 = plt.gca()
ax2 = ax1.twinx()
#ax2.set_ylabel('Y2-axis')
# Plot reference line
ax1.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax1,
lefttitle=f'(PC1) 1850 - 2014',
lefttitlefontsize=26,
righttitle=f'',
righttitlefontsize=26,
xlabel="",
ylabel="Standard deviation",
labelfontsize=23)
gv.set_axes_limits_and_ticks(
ax1,
xlim=(-44000, 18000),
ylim=(-4.5, 4.5))
gvutil.set_titles_and_labels(ax2,
lefttitle=f'',
lefttitlefontsize=26,
righttitle=f'',
righttitlefontsize=26,
xlabel="",
ylabel="AMOC anomaly (Sv)",
labelfontsize=23)
gv.set_axes_limits_and_ticks(
ax2,
xlim=(-44000, 16500),
ylim=(-4.5, 4.5))
gv.add_major_minor_ticks(ax1,
x_minor_per_major=2,
y_minor_per_major=1,
labelsize=23)
gv.add_major_minor_ticks(ax2,
x_minor_per_major=2,
y_minor_per_major=1,
labelsize=23)
#plt.ylim([-3, 2])
# Plot data
#plt.legend()
#plt.legend(ax1.plot,prop={"size":23})
ax1.plot(ds1.time, PC1sstcok, color='black', linewidth=0.5,)
ax1.plot(ds1.time, PC1sstcok.rolling(time=11, center=True).mean(), color='black', linewidth=5, label='SST')
ax1.plot(ds1.time, PC1sicok, color='red', linewidth=0.5)
ax2.plot(ds1.time, AMOCAWImean, marker='o', markerfacecolor='blue', markersize=0.8, color='blue', linewidth=0.5)
ax2.plot(ds1.time, AMOCAWImean.rolling(index=11, center=True).mean(), marker='o', markerfacecolor='blue', markersize=2.8, color='blue', linewidth=5,label='AMOC 26.5 N')
ax1.plot(ds1.time, PC1sicok.rolling(time=11, center=True).mean(), color='red', linewidth=5, label='SIC')
#plt.ylim((25,250))
ax1.legend(loc=3,prop={"size":17})
ax2.legend(loc=1,prop={"size":17})
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC1.V5.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC1.V5.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
#Calculate the corelation between CCA PC1 and simulated AMOC INDEX
cor=stats.pearsonr(PC1sstcok,AMOCAWImean)[0]
print(cor)
# Test the significance considering the autocorelation between the two serieses
auto_corA = xr.corr(PC1sst, PC1sst.shift(time=-1), dim="time")
auto_corB = xr.corr(AMOCAWImean, AMOCAWImean.shift(index=-1), dim="index")
n=164
r1=auto_corA
r2=auto_corB
a=r1*r2
nreal=n*((1-a)/(1+a))
print(nreal)
dist =stats.beta(nreal/2 - 1, nreal/2 - 1, loc=-1, scale=2)
p= 2*dist.cdf(-abs(cor))
print(p)
if p<0.05:
print("Significant 95%")
else:
print("Not significant")
0.44360154849773725 <xarray.DataArray ()> array(76.61328703) Coordinates: variable <U17 'AMOC_index@26.5°N' 5.5721558239550666e-05 Significant 95%
Source (location) of the data (Christian S. Dropbox):
https://1drv.ms/u/s!AnZSDMNwdkDMgeNBhGZk6PxWTXd8Rg?e=Xglhwg PASSWORD: 8slitUbAubiv*
Spatial Resolution: 1° × 1° horizontal grid, Time Frame: 100 Years
#Prepare PI sst data
from cdo import *
cdo = Cdo()
#select period
#select the last 100 years of the run
cdo.selyear("2051/2151",input="SST.AWIESM21.preindustrial.20512151.nc", output="sst.remap.awiesm21.pi.20512151.nc")
#calculate anomalies relative to the 1950 - 2010 period
cdo.ymonmean(input="sst.remap.awiesm21.pi.20512151.nc", output="annual_cycle_SST.19852010.nc")
cdo.sub(input="sst.remap.awiesm21.pi.20512151.nc annual_cycle_SST.19852010.nc", output="anom.sst.remap.awiesm21.pi.20512151.nc")
#calculate annual means
cdo.yearmonmean(input="anom.sst.remap.awiesm21.pi.20512151.nc", output="anm.anom.sst.remap.awiesm21.pi.20512151.nc")
#os.system("rm " + "/isibhv/projects/paleo_work/petruv/Data/AWIESM2_1_PI_Fer/Atl.anm.anom.sst.remap.awiesm21.pi.19002014.nc")
#select region of interest
cdo.sellonlatbox('285,375,-80,80', input="anm.anom.sst.remap.awiesm21.pi.20512151.nc", output="Atl.anm.anom.sst.remap.awiesm21.pi.20512151.nc")
os.remove('sst.remap.awiesm21.pi.20512151.nc')
os.remove('annual_cycle_SST.19852010.nc')
os.remove('anom.sst.remap.awiesm21.pi.20512151.nc')
os.remove('anm.anom.sst.remap.awiesm21.pi.20512151.nc')
# CDO is used to remove the global average from each point of the grid
cdo.fldmean(input="Atl.anm.anom.sst.remap.awiesm21.pi.20512151.nc", output="FldmAtl.anm.anom.sst.remap.awiesm21.pi.20512151.nc")
map_file="Atl.anm.anom.sst.remap.awiesm21.pi.20512151.nc"
index_file="FldmAtl.anm.anom.sst.remap.awiesm21.pi.20512151.nc"
index_txt='FldmAtl.anm.anom.sst.remap.awiesm21.pi.20512151.txt'
os.system ('cdo showyear ' + index_file)
#transform the NC file into csv and remove the last line
os.system ('cdo -s output ' + index_file+'>'+index_txt + ' |head -n -1')
index_data=pd.read_csv(index_txt,header=None).squeeze()
#np.shape(index_data)
#create map data
dataset=Dataset(map_file)
map_data=dataset.variables['sst'][:].squeeze()
model_lon=dataset.variables['lon'][:].squeeze()
model_lat=dataset.variables['lat'][:].squeeze()
#np.shape(map_data)
map_data_normalized=np.zeros(np.shape(map_data))
for x in range(np.size(model_lon)):
for y in range(np.size(model_lat)):
for t in range(np.shape(map_data)[0]):
map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t]
#creates a copy of the original NetCDF file to which the modified data will be written
map_file_export="gg.Atl.anm.anom.sst.remap.awiesm21.pi.20512151.nc"
os.system("cp "+map_file+" "+map_file_export)
#write map_data_normalized to netcdf file
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#ncfile.variables['sst'][:]=np.zeros(np.shape(map_data_normalized)) #put zeros to verify that the data actually has been written
ncfile.variables['sst'][:]=map_data_normalized[:,:,:] #"real" data write
ncfile.close()
2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 cdo showyear: Processed 1 variable over 101 timesteps [0.01s 36MB].
/tmp/ipykernel_11322/1357512860.py:27: UserWarning: Warning: converting a masked element to nan. map_data_normalized[t,y,x]=map_data[t,y,x]-index_data[t]
#Perfom EOF on SST PI annual anomalies
fnc = 'gg.Atl.anm.anom.sst.remap.awiesm21.pi.20512151.nc'
ds1_sst = xr.open_dataset(fnc)
#print(ds1_sic)
#have a short look at the data
#import hvplot.xarray
#sic=ds1_sic.seaice_conc
#sic.isel(time=0).hvplot()
# ----- Period and area setting ------
ystr = 2051
yend = 2151
latS = -80.
latN = 80.
lonW = -75.
lonE = 15
neof = 6
anm=ds1_sst.sst
# -- EOF --
anmnD = anm.sortby("lat", ascending=True)
clat = anmnD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change lon ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
eofs_sst = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sst = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sst = pcs_sst / pcs_sst.std(dim='time')
pcs_sst['time']=anmnD['time']
pcs_norm_sst['time']=anmnD['time']
pcs_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
pcs_norm_sst.attrs['varianceFraction'] = eofs_sst.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sst = xr.DataArray(data=eofs_sst, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec_sic)
#export the PCs in a format usable for CCA
df=pcs_norm_sst.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.PC.sst.awiesm21.PI20512151.6PCs.normalized.txt')
#reconstruct the initial SIC field based on the first 6 EOFs
map_file_export="REZeof.EOFmaps.sst.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1 -seltimestep,1 " + fnc + " -chname,sst,EOF2 -seltimestep,1 " + fnc + " -chname,sst,EOF3 -seltimestep,1 " + fnc + " -chname,sst,EOF4 -seltimestep,1 " + fnc + " -chname,sst,EOF5 -seltimestep,1 " + fnc + " -chname,sst,EOF6 -seltimestep,1 " + fnc + " " + map_file_export)
#os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF2 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF3 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF4 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF5 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF6 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF7 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF8 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF9 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF10 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF11 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF12 -seltimestep,1 " + fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sst.reindex(lat=evec_sst.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#sdev=pcs.std(dim="time")
#vec_export=[]
#evec_export = xr.concat(evec_export, range(8)).rename({"concat_dim": "EOF"})
#ds_concat
#print(np.shape(evec))
#print(np.shape(evec_export))
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs2=[]
for i in range(6):
#print(i)
eof_pcs2.append(pcs_sst[i,:]*evec_export[i,:,:])
map_file_export="awREzEOF.sst.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,sst,EOF1xPC1 " + fnc + " -chname,sst,EOF2xPC2 " + fnc + " -chname,sst,EOF3xPC3 " + fnc + " -chname,sst,EOF4xPC4 " + fnc + " -chname,sst,EOF5xPC5 " + fnc + " -chname,sst,EOF6xPC6 " + fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(6):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs2[i].reindex(lat=eof_pcs2[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
# Multiply each EOF with it's asociated PC and write the products to netcdf file
#reconstruct the initial field based on the first 12 EOFs
eof_pcs_sum2=eof_pcs2[0]+eof_pcs2[1]+eof_pcs2[2]+eof_pcs2[3]+eof_pcs2[4]+eof_pcs2[5]
map_file_export="g.sst.fesom.PIfer.20512151_rec6EOF.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,sst,sst " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum2.reindex(lat=eof_pcs_sum2.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sst"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sst.nc')
os.remove('TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc')
os.remove('awREzEOF.sst.EOFS_x_PCs.nc')
rm: cannot remove 'REZeof.EOFmaps.sst.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(2) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(3) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(4) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(5) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(6) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo merge: Processed 8823360 values from 6 variables over 606 timesteps [3.09s 93MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(2) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 14560 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 14560 values from 1 variable over 1 timestep. cdo(3) chname: Processed 14560 values from 1 variable over 1 timestep. cdo(5) chname: Processed 14560 values from 1 variable over 1 timestep. cdo(7) chname: Processed 14560 values from 1 variable over 1 timestep. cdo(9) chname: Processed 14560 values from 1 variable over 1 timestep. cdo(11) chname: Processed 14560 values from 1 variable over 1 timestep. cdo merge: Processed 87360 values from 6 variables over 6 timesteps [0.12s 52MB].
rm: cannot remove 'awREzEOF.sst.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(2) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(3) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(4) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(5) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo(6) chname: Processed 1470560 values from 1 variable over 101 timesteps. cdo merge: Processed 8823360 values from 6 variables over 606 timesteps [2.98s 92MB].
rm: cannot remove 'g.sst.fesom.PIfer.20512151_rec6EOF.nc': No such file or directory
cdo chname: Processed 1470560 values from 1 variable over 101 timesteps [0.34s 56MB].
#OPEN PI SIC DATA
from cdo import *
cdo = Cdo()
#select last 101 years
cdo.selyear("2051/2151",input="SIC.AWIESM21.preindustrial.20512151.nc", output="sic.remap.awiesm21.pi.20512151.nc")
#calculate anomalies relative to the whole 101y period
cdo.ymonmean(input="sic.remap.awiesm21.pi.20512151.nc", output="annual_cycle_SICPI.nc")
cdo.sub(input="sic.remap.awiesm21.pi.20512151.nc annual_cycle_SICPI.nc", output="anom.sic.remap.awiesm21.pi.20512151.nc")
#calculate annual means
cdo.yearmonmean(input="anom.sic.remap.awiesm21.pi.20512151.nc", output="anm.anom.sic.remap.awiesm21.pi.20512151.nc")
#remove the files created that are not of use for the current analyzes
os.remove('sic.remap.awiesm21.pi.20512151.nc')
os.remove('anom.sic.remap.awiesm21.pi.20512151.nc')
Perform EOF on annual PI SIC anomalies
fnc = 'anm.anom.sic.remap.awiesm21.pi.20512151.nc'
ds1_sic = xr.open_dataset(fnc)
#print(ds1_sic)
# ----- Period and area setting ------|
ystr = 2051
yend = 2151
latS = -90.
latN = 90.
lonW = 0.
lonE = 360
neof = 6
anm=ds1_sic.a_ice
# -- EOF --
anmnD = anm.sortby("lat", ascending=True)
clat = anmnD['lat'].astype(np.float64)
clat = np.sqrt(np.cos(np.deg2rad(clat)))
wanm = anmnD
wanm = anmnD * clat
wanm.attrs = anmnD.attrs
wanm.attrs['long_name'] = 'Wgt: ' + wanm.attrs['long_name']
#print(wanm)
lon = wanm['lon']
if ( ((lonW < 0) or (lonE < 0 )) and (lon.values.min() > -1) ):
wanm=wanm.assign_coords(lon=( (lon + 180) % 360 - 180) )
wanm = wanm.sortby("lon")
print(' change lon ')
#print(wanm)
xw = wanm.sel(lat=slice(latS, latN), lon=slice(lonW, lonE))
xw_anm = xw.transpose('time', 'lat', 'lon')
#print(xw_anm)
eofs_sic = eofunc_eofs(xw_anm.data, neofs=neof, meta=True)
pcs_sic = eofunc_pcs(xw_anm.data, npcs=neof, meta=True)
pcs_norm_sic = pcs_sic / pcs_sic.std(dim='time')
pcs_sic['time']=anmnD['time']
pcs_norm_sic['time']=anmnD['time']
pcs_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
pcs_norm_sic.attrs['varianceFraction'] = eofs_sic.attrs['varianceFraction']
#print(pcs_norm_sic)
evec_sic = xr.DataArray(data=eofs_sic, dims=('eof','lat','lon'),
coords = {'eof': np.arange(0,neof), 'lat': xw['lat'], 'lon': xw['lon']} )
#print(evec_sic)
#export the PCs in a format usable for CCA
#trial and error until the order is as expected
df=pcs_norm_sic.to_dataframe().unstack().transpose()
df.to_csv('RezEOF.sic.Global.awiesm21.P120512151.6PCs.normalized.txt')
#reconstruct the initial SIC field based on the first 6 EOFs
map_file_export="REZeof.EOFmaps.sic.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1xPC1 " + fnc + " -chname,a_ice,EOF2xPC2 " + fnc + " -chname,a_ice,EOF3xPC3 " + fnc + " -chname,a_ice,EOF4xPC4 " + fnc + " -chname,a_ice,EOF5xPC5 " + fnc + " -chname,a_ice,EOF6xPC6 " + fnc + " " + map_file_export)
map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1 -seltimestep,1 " + fnc + " -chname,a_ice,EOF2 -seltimestep,1 " + fnc + " -chname,a_ice,EOF3 -seltimestep,1 " + fnc + " -chname,a_ice,EOF4 -seltimestep,1 " + fnc + " -chname,a_ice,EOF5 -seltimestep,1 " + fnc + " -chname,a_ice,EOF6 -seltimestep,1 " + fnc + " " + map_file_export)
#map_file_export="TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc"
#os.system("rm " + map_file_export + "; cdo merge -chname,seaice_conc,EOF1 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF2 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF3 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF4 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF5 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF6 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF7 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF8 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF9 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF10 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF11 -seltimestep,1 " + fnc + " -chname,seaice_conc,EOF12 -seltimestep,1 " + fnc + " " + map_file_export)
import netCDF4
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
map_data_normalized=ncfile.variables['EOF1'][:]
#print(np.shape(map_data_normalized))
evec_export=evec_sic.reindex(lat=evec_sic.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if I do not reindex them here
#evec_export=evec.reindex(latitude=evec.latitude[::-1])[:,:,:]
ncfile.variables['EOF1'][:]=evec_export[0,:,:] #write eof1 to dedicated variable
ncfile.variables['EOF2'][:]=evec_export[1,:,:] #write eof2 to dedicated variable
ncfile.variables['EOF3'][:]=evec_export[2,:,:] #write eof3 to dedicated variable
ncfile.variables['EOF4'][:]=evec_export[3,:,:] #write eof4 to dedicated variable
ncfile.variables['EOF5'][:]=evec_export[4,:,:] #write eof5 to dedicated variable
ncfile.variables['EOF6'][:]=evec_export[5,:,:] #write eof6 to dedicated variable
ncfile.close()
# Multiply each EOF with it's asociated PC and write the products to netcdf file
eof_pcs2=[]
for i in range(6):
#print(i)
eof_pcs2.append(pcs_sic[i,:]*evec_export[i,:,:])
map_file_export="awREzEOF.sic.EOFS_x_PCs.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo merge -chname,a_ice,EOF1xPC1 " + fnc + " -chname,a_ice,EOF2xPC2 " + fnc + " -chname,a_ice,EOF3xPC3 " + fnc + " -chname,a_ice,EOF4xPC4 " + fnc + " -chname,a_ice,EOF5xPC5 " + fnc + " -chname,a_ice,EOF6xPC6 " + fnc + " " + map_file_export)
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
for i in range(6):
varname="EOF"+str(i+1)+"xPC"+str(i+1)
eof_pcs_export=eof_pcs2[i].reindex(lat=eof_pcs2[i].lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
#print(np.shape(eof_pcs_export))
ncfile.variables[varname][:]=eof_pcs_export #write eof to dedicated variable
ncfile.close()
#print(np.shape(eof_pcs))
#print(type(eof_pcs[0]))
# Multiply each EOF with it's asociated PC and write the products to netcdf file
#reconstruct the initial field based on the first 12 EOFs
eof_pcs_sum2=eof_pcs2[0]+eof_pcs2[1]+eof_pcs2[2]+eof_pcs2[3]+eof_pcs2[4]+eof_pcs2[5]
map_file_export="anm.anom.remap.sic.Global.PI20512151_rec6EOF.nc"
#if the file exists, we need to delete it for cdo merge to work - for simplicity, we make a quick-and-dirty rm without actually testing for presence of the file
os.system("rm " + map_file_export + "; cdo chname,a_ice,sic " + fnc + " " + map_file_export)
#write sum of products of eofs and pcs to netcdf file
ncfile=netCDF4.Dataset(map_file_export,mode='r+')
#map_data_normalized=ncfile.variables['EOF1xPC1'][:]
#print(np.shape(map_data_normalized))
eof_pcs_sum_export=eof_pcs_sum2.reindex(lat=eof_pcs_sum2.lat[::-1])[:,:,:]
#eof_pcs_sum_export=eof_pcs_sum_export.reindex(lat=eof_pcs_sum_export.lat[::-1])[:,:,:] #somehow the latitudes are inverted in netcdf if we do not reindex them here
ncfile.variables["sic"][:]=eof_pcs_sum_export #write eof to dedicated variable
ncfile.close()
#remove the files created that are not of use for the current analyzes
os.remove('REZeof.EOFmaps.sic.nc')
os.remove('awREzEOF.sic.EOFS_x_PCs.nc')
os.remove('TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc')
rm: cannot remove 'REZeof.EOFmaps.sic.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(2) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(3) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(4) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(5) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(6) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo merge: Processed 39268800 values from 6 variables over 606 timesteps [4.10s 123MB].
rm: cannot remove 'TEST1.REZeof.EOFmaps.sic.obs.1950.2017.nc': No such file or directory
cdo(1) chname: Process started cdo(2) seltimestep: Process started cdo(3) chname: Process started cdo(4) seltimestep: Process started cdo(5) chname: Process started cdo(6) seltimestep: Process started cdo(7) chname: Process started cdo(8) seltimestep: Process started cdo(9) chname: Process started cdo(10) seltimestep: Process started cdo(11) chname: Process started cdo(12) seltimestep: Process started cdo(2) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(4) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(6) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(8) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(10) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(12) seltimestep: Processed 64800 values from 1 variable over 2 timesteps. cdo(1) chname: Processed 64800 values from 1 variable over 1 timestep. cdo(3) chname: Processed 64800 values from 1 variable over 1 timestep. cdo(5) chname: Processed 64800 values from 1 variable over 1 timestep. cdo(7) chname: Processed 64800 values from 1 variable over 1 timestep. cdo(9) chname: Processed 64800 values from 1 variable over 1 timestep. cdo(11) chname: Processed 64800 values from 1 variable over 1 timestep. cdo merge: Processed 388800 values from 6 variables over 6 timesteps [0.19s 56MB].
rm: cannot remove 'awREzEOF.sic.EOFS_x_PCs.nc': No such file or directory
cdo(1) chname: Process started cdo(2) chname: Process started cdo(3) chname: Process started cdo(4) chname: Process started cdo(5) chname: Process started cdo(6) chname: Process started cdo(1) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(2) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(3) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(4) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(5) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo(6) chname: Processed 6544800 values from 1 variable over 101 timesteps. cdo merge: Processed 39268800 values from 6 variables over 606 timesteps [4.26s 123MB].
rm: cannot remove 'anm.anom.remap.sic.Global.PI20512151_rec6EOF.nc': No such file or directory
cdo chname: Processed 6544800 values from 1 variable over 101 timesteps [1.26s 66MB].
# open the two datasets
field1 = 'g.sst.fesom.PIfer.20512151_rec6EOF.nc'
ds1 = xr.open_dataset(field1)
#print(ds1)
field2 = 'anm.anom.remap.sic.Global.PI20512151_rec6EOF.nc'
ds2= xr.open_dataset(field2)
#print(ds2)
PCsV1=pd.read_csv('RezEOF.PC.sst.awiesm21.PI20512151.6PCs.normalized.txt', sep=',')
PCsV1_xr=PCsV1.to_xarray().to_array()
PCsSST=PCsV1[['0','1','2','3','4','5']]
PCsV2=pd.read_csv('RezEOF.sic.Global.awiesm21.P120512151.6PCs.normalized.txt', sep=',')
PCsV2_xr=PCsV2.to_xarray().to_array()
PCsSIC=PCsV2[['0','1','2','3','4','5']]
#X2
#Normalize the PCs (if necesary) and perform CCA between the 8PCs associated with each field
X1_mc = (PCsSST-PCsSST.mean())/(PCsSST.std())
#X1_mc.head()
X2_mc = (PCsSIC-PCsSIC.mean())/(PCsSIC.std())
#X2_mc.head()
ca = CCA(n_components=6)
ca.fit(X1_mc, X2_mc)
X_c, Y_c = ca.transform(X1_mc, X2_mc)
#export the pairs of maximum corelated PCs obtained from CCA
cc_res = pd.DataFrame({"CCX_1":X_c[:, 0],
"CCY_1":Y_c[:, 0],
"CCX_2":X_c[:, 1],
"CCY_2":Y_c[:, 1],
"CCX_3":X_c[:, 2],
"CCY_3":Y_c[:, 2],
# "CCX_4":X_c[:, 3],
# "CCY_4":Y_c[:, 3],
# "CCX_5":X_c[:, 4],
# "CCY_5":Y_c[:, 4],
# "CCX_6":X_c[:, 5],
# "CCY_6":Y_c[:, 5],
#"Species":df.species.tolist(),
})
cc_res.head()
#we would need a loop for this
CCAresultPccorr=[]
for p in range (np.shape(X_c)[1]):
print(np.corrcoef(X_c[:, p], Y_c[:, p]))
[[1. 0.92757243] [0.92757243 1. ]] [[1. 0.76206922] [0.76206922 1. ]] [[1. 0.61779336] [0.61779336 1. ]] [[1. 0.25836666] [0.25836666 1. ]] [[1. 0.16442244] [0.16442244 1. ]] [[1. 0.05003127] [0.05003127 1. ]]
#select the individual pcs
rezcca=cc_res.to_xarray()
PC1sst=rezcca['CCX_1']
PC1sic=rezcca['CCY_1']
PC2sst=rezcca['CCX_2']
PC2sic=rezcca['CCY_2']
PC3sst=rezcca['CCX_3']
PC3sic=rezcca['CCY_3']
#PC4sst=rezcca['CCX_4']
#PC4sic=rezcca['CCY_4']
#reindex the time axis in the PCs
PC1sst= PC1sst.rename({"index": "time"})
PC2sst= PC2sst.rename({"index": "time"})
PC3sst= PC3sst.rename({"index": "time"})
#PC4sst= PC4sst.rename({"index": "time"})
PC1sic= PC1sic.rename({"index": "time"})
PC2sic= PC2sic.rename({"index": "time"})
PC3sic= PC3sic.rename({"index": "time"})
#PC4sic= PC4sic.rename({"index": "time"})
PC1sst['time'] = ds1.time
PC2sst['time'] = ds1.time
PC3sst['time'] = ds1.time
#PC4sst['time'] = ds1.time
PC1sic['time'] = ds2.time
PC2sic['time'] = ds2.time
PC3sic['time'] = ds2.time
#PC4sic['time'] = ds2.time
#perfom regression analysis in order to obtined the SST/SIC spatial structures
regSST1 = xr.cov(PC1sst.load(), ds1.sst.load(), dim="time")/PC1sst.var(dim='time',skipna=True).values
regSST2 = xr.cov(PC2sst.load(), ds1.sst.load(), dim="time")/PC2sst.var(dim='time',skipna=True).values
regSST3 = xr.cov(PC3sst.load(), ds1.sst.load(), dim="time")/PC3sst.var(dim='time',skipna=True).values
#regSST4 = xr.cov(PC4sst.load(), ds1.sst.load(), dim="time")/PC4sst.var(dim='time',skipna=True).values
regSIC1 = xr.cov(PC1sic.load(), ds2.sic.load(), dim="time")/PC1sic.var(dim='time',skipna=True).values
regSIC2 = xr.cov(PC2sic.load(), ds2.sic.load(), dim="time")/PC2sic.var(dim='time',skipna=True).values
regSIC3 = xr.cov(PC3sic.load(), ds2.sic.load(), dim="time")/PC3sic.var(dim='time',skipna=True).values
#regSIC4 = xr.cov(PC4sic.load(), ds2.sic.load(), dim="time")/PC4sic.var(dim='time',skipna=True).values
#perform corelations in order to obtain the procentage of variance explained as the average of r squared
cor_SST1 = xr.corr(PC1sst.load(), ds1.sst.load(),dim="time")
r1_squared = cor_SST1*cor_SST1
var_sst1=(r1_squared.mean()*100)
var_sst1_round=var_sst1.round(decimals=1)
print(var_sst1_round)
cor_SST2 = xr.corr(PC2sst.load(dim="time"), ds1.sst.load(dim="time"))
r2_squared = cor_SST2*cor_SST2
var_sst2=r2_squared.mean()*100
var_sst2_round=var_sst2.round(decimals=1)
print(var_sst2_round)
cor_SST3 = xr.corr(PC3sst.load(dim="time"), ds1.sst.load(dim="time"))
r3_squared = cor_SST3*cor_SST3
var_sst3=r3_squared.mean()*100
var_sst3_round=var_sst3.round(decimals=1)
print(var_sst3_round)
cor_SIC1 = xr.corr(PC1sic.load(), ds2.sic.load(),dim="time")
r1_squared_sic = cor_SIC1*cor_SIC1
var_sic1=r1_squared_sic.mean()*100
var_sic1_round=var_sic1.round(decimals=1)
print(var_sic1_round)
cor_SIC2 =xr.corr(PC2sic.load(dim="time"), ds2.sic.load(dim="time"))
r2_squared_sic = cor_SIC2*cor_SIC2
var_sic2=r2_squared_sic.mean()*100
var_sic2_round=var_sic2.round(decimals=1)
print(var_sic2_round)
cor_SIC3 = xr.corr(PC3sic.load(dim="time"), ds2.sic.load(dim="time"))
r3_squared_sic = cor_SIC3*cor_SIC3
var_sic3=r3_squared_sic.mean()*100
var_sic3_round=var_sic3.round(decimals=2)
print(var_sic3_round)
<xarray.DataArray ()> array(16.9) <xarray.DataArray ()> array(0.) <xarray.DataArray ()> array(0.) <xarray.DataArray ()> array(24.6) <xarray.DataArray ()> array(0.6) <xarray.DataArray ()> array(0.02)
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import cartopy.crs as ccrs
PC1sstok=PC1sst*-1
PC1sicok=PC1sic*-1
# Ploting is done individualy. Most people plot the panel individually and then obtai the figure
fig, ax = plt.subplots(figsize=(8, 5),dpi=300, subplot_kw={"projection":ccrs.Robinson(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=16)
gv.set_map_boundary(ax, [-75, 15], [-80, 80], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = True
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, -45, 0, 45, 180])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 3, 'color': 'gray'}
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-0.4
level_max=0.4
stepsize=0.04
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("vlag", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap = 'seismic'
cf=ax.contourf(ds1['lon'], ds1['lat'], regSST1, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+0.1,level_max,0.2), drawedges=True, orientation='vertical',
label=r"(°C)",pad=0.025,shrink=0.39,aspect=30,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SST ',
lefttitlefontsize=12,
righttitle=f' {var_sst1_round.values} %',
righttitlefontsize=12,
xlabel="Year",
ylabel="",
labelfontsize=21 )
plt.savefig("CCAsstSIC.AWIESM21.PI.SST.global.sic.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.AWIESM21.PI.SST.globalsic.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
fig1, ax= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.NorthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
#ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax.coastlines(resolution='110m')
ax.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax)
gvutil.add_major_minor_ticks(ax, labelsize=20)
gv.set_map_boundary(ax, [-180, 180], [55, 90], south_pad=1)
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax.contourf(ds2['lon'], ds2['lat'], regSIC1*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=26,)
gvutil.set_titles_and_labels(ax,
lefttitle='CCA1 SIC (Arctic) ',
lefttitlefontsize=13,
righttitle=f' {var_sic1_round.values} %',
righttitlefontsize=13,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCAsstSIC.AWIESM21.PC1.GLOBAL.SIC1b.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.AWIESM21.PC1.GLOBAL.SIC1b.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax.coastlines()
plt.tight_layout()
plt.show()
fig3, ax3= plt.subplots(figsize=(8, 4), dpi=300, subplot_kw={"projection":ccrs.SouthPolarStereo(central_longitude=-35),})
#NorthPolarStereo(central_longitude=-30.0,latitude=40, globe=None)})
ax.set_extent([0, 360, 50, 90], crs=ccrs.PlateCarree())
ax3.set_extent([0, 360, -90, -55], crs=ccrs.PlateCarree())
ax3.coastlines(resolution='110m')
ax3.add_feature(cfeature.LAND, facecolor='silver', zorder=3)
ax3.add_feature(cfeature.COASTLINE, linewidth=0.2, zorder=3)
ax3.add_feature(cfeature.LAKES,
edgecolor='black',
linewidth=0.2,
facecolor='white',
zorder=4)
gvutil.add_lat_lon_ticklabels(ax3)
gvutil.add_major_minor_ticks(ax3, labelsize=20)
#gv.set_map_boundary(ax3, [-180, 180], [-90, -50], south_pad=1)
gl = ax3.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=1, color='gray', alpha=0.3, linestyle='--')
gl.xlabels_top = True
gl.ylabels_left = False
gl.ylabels_right = False
gl.xlines = True
gl.xlocator = mticker.FixedLocator([-180, 90, 0, -90, 180])
gl.ylocator = mticker.FixedLocator([ 60])
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 6.5,'color': 'black', 'weight': 'bold'}
gl.ylabel_style = {'size': 6.5,'color': 'black','weight': 'bold'}
level_min=-5
level_max=5
stepsize=0.6
levels=np.arange(level_min-stepsize/1,level_max+stepsize/1+stepsize,stepsize)
#cmap = sns.color_palette("coolwarm", as_cmap=True)
#cmap = gvcmaps.BlueDarkRed18
#newcmp = gvcmaps.BlueDarkRed18
#cmap=sns.diverging_palette(660, 30, s=60, as_cmap=True)
cmap='seismic'
cf=ax3.contourf(ds2['lon'], ds2['lat'], regSIC1*100, transform=ccrs.PlateCarree(),
cmap=cmap, levels=levels,
vmin=level_min-.05, vmax=level_max+.05,
extend='both')
cax=plt.colorbar(cf, ticks=np.arange(level_min+1,level_max,2), drawedges=True, orientation='vertical',
label=r"(%)",pad=0.025,shrink=0.55,aspect=20,)
gvutil.set_titles_and_labels(ax3,
lefttitle='CCA1 SIC (Antarctic)',
lefttitlefontsize=15,
righttitle=f' {var_sic1_round.values} %',
righttitlefontsize=15,
xlabel="Year",
ylabel="sdaasd",
labelfontsize=21)
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC1.SIC1a.pdf",format="pdf", bbox_inches = 'tight',dpi=600)
plt.savefig("CCAsstSIC.AWIESM21.18502014.PC1.SIC1a.jpg",format="jpg", bbox_inches = 'tight',dpi=600)
ax3.coastlines()
plt.tight_layout()
plt.show()
plt.figure(figsize=(12, 6))
ax = plt.gca()
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax,
lefttitle=f'(PC1) PI year 51 - 151',
lefttitlefontsize=29,
righttitle=f'r = 0.9',
righttitlefontsize=29,
xlabel="",
ylabel="Stdandard deviation",
labelfontsize=29)
gv.add_major_minor_ticks(ax,
x_minor_per_major=1,
y_minor_per_major=5,
labelsize=23)
# Plot data
ax.plot(ds1.time, PC1sst, color='black', linewidth=3, label='SST')
ax.plot(ds1.time, PC1sic, marker='o', markerfacecolor='red', markersize=2.8, color='red', linewidth=1.75,label='SIC')
#ax.legend(prop={"size":6})
ax.fill_between(ds1.time,PC1sic, where=PC1sic > 0, color='gold')
ax.fill_between(ds1.time,PC1sic, where=PC1sic < 0, color='cyan')
plt.legend(prop={"size":23})
plt.savefig("CCAsstSIC.AWIESM21.PI.PC1.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.PI.PC1.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)
plt.tight_layout()
plt.show()
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
/home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:451: UserWarning: The .xlabels_top attribute is deprecated. Please use .top_labels to toggle visibility instead. warnings.warn('The .xlabels_top attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:475: UserWarning: The .ylabels_left attribute is deprecated. Please use .left_labels to toggle visibility instead. warnings.warn('The .ylabels_left attribute is deprecated. Please ' /home/csys/pevaidea/isibhv/projects/paleo_work/.conda/envs/geocat/lib/python3.10/site-packages/cartopy/mpl/gridliner.py:487: UserWarning: The .ylabels_right attribute is deprecated. Please use .right_labels to toggle visibility instead. warnings.warn('The .ylabels_right attribute is deprecated. Please '
#open AMOC index produced with AWIES2.1 Historical simulation
PCT1=pd.read_csv('AMOC_idx_26.5N.AWIESM21.PI.20512151.csv',sep=',')
AMOCindexAWIES21=PCT1.to_xarray().to_array()
AMOCAWI=AMOCindexAWIES21[1]
AMOCAWImean = (AMOCAWI-AMOCAWI.mean())
#Plot PC1 with AMOC Index
plt.figure(figsize=(12, 6))
ax1 = plt.gca()
ax2 = ax1.twinx()
#ax2.set_ylabel('Y2-axis')
# Plot reference line
ax.axhline(y=0, color='grey', linewidth=0.75)
# Use geocat.viz.util convenience function to set axes tick values
gvutil.set_titles_and_labels(ax1,
lefttitle=f'(PC1) PI run ',
lefttitlefontsize=29,
righttitle=f'',
righttitlefontsize=29,
xlabel="Year",
ylabel="Standard deviation",
labelfontsize=23)
gv.set_axes_limits_and_ticks(
ax1,
xlim=(0, 100),
ylim=(-4.2, 4.2))
gvutil.set_titles_and_labels(ax2,
lefttitle=f'',
lefttitlefontsize=29,
righttitle=f'',
righttitlefontsize=29,
xlabel="Year",
ylabel="AMOC anomaly (Sv)",
labelfontsize=23)
gv.set_axes_limits_and_ticks(
ax2,
xlim=(0, 100),
ylim=(-4.2, 4.2))
gv.add_major_minor_ticks(ax1,
x_minor_per_major=1,
y_minor_per_major=1,
labelsize=23)
gv.add_major_minor_ticks(ax2,
x_minor_per_major=1,
y_minor_per_major=1,
labelsize=23)
#plt.ylim([-3, 2])
# Plot data
#plt.legend()
#plt.legend(ax1.plot,prop={"size":23})
ax1.plot(PC1sst, color='black', linewidth=0.5,)
ax1.plot(PC1sst.rolling(time=11, center=True).mean(), color='black', linewidth=5, label='SST')
ax1.plot(PC1sic, color='red', linewidth=0.5)
ax1.plot(PC1sic.rolling(time=11, center=True).mean(), color='red', linewidth=5, label='SIC')
ax2.plot(AMOCAWImean, marker='o', markerfacecolor='blue', markersize=0.8, color='blue', linewidth=0.5,label='')
ax2.plot(AMOCAWImean.rolling(index=11, center=True).mean(), marker='o', markerfacecolor='blue', markersize=2.8, color='blue', linewidth=5,label='AMOC 26.5N')
#plt.ylim((25,250))
ax1.legend(loc=3,prop={"size":15})
ax2.legend(prop={"size":15})
plt.savefig("CCAsstSIC.AWIESM21.PI.PC1.V5.pdf",format="pdf", bbox_inches = 'tight', dpi= 600)
plt.savefig("CCAsstSIC.AWIESM21.PI.PC1.V5.jpg",format="jpg", bbox_inches = 'tight', dpi= 600)