import numpy as np
import pandas as pd
import math as m
import matplotlib
import statistics as stats
import scipy.stats as scistats
import matplotlib.pyplot as plt
import matplotlib.colors as color
import os
import sys
import netCDF4 as nc
import statsmodels.api as sm
import shapely
import statistics
import warnings
warnings.filterwarnings("ignore")
from matplotlib import cm as cm1
from scipy import optimize as opt
from pandas import read_table
from netCDF4 import Dataset as ds
from netCDF4 import num2date
from datetime import datetime, date, timedelta
from matplotlib.dates import date2num, num2date
from matplotlib.colors import LinearSegmentedColormap
from mpl_toolkits.basemap import Basemap, cm, shiftgrid, addcyclic
from matplotlib.patches import Polygon
from scipy.ndimage import gaussian_filter
from sklearn.linear_model import LinearRegression
# Style Dictionary to standarize plotting scheme
# between different python scripts
styledict = {'xtick.labelsize':21,
'xtick.major.size':9,
'xtick.major.width':1,
'ytick.labelsize':21,
'ytick.major.size':9,
'ytick.major.width':1,
'legend.framealpha':0.0,
'legend.fontsize':15,
'axes.labelsize':22,
'axes.titlesize':22,
'axes.linewidth':2,
'figure.figsize':(12,8),
'savefig.format':'jpg'}
plt.rcParams.update(**styledict)
# define skewness function
def skewness(data):
mu = stats.mean(data)
sigmaInv = 1/stats.stdev(data)
skew = ((data - mu)*sigmaInv)**3
return stats.mean(skew)
# define kurstosis function
def kurtosis(data):
mu = stats.mean(data)
sigmaInv = 1/stats.stdev(data)
kurt = ((data - mu)*sigmaInv)**4
return stats.mean(kurt) - 3
# go to your working directory
os.chdir("/Users/sshen/climstats")
# read the data file from the folder named "data"
NOAAtemp = read_table(\
"data/aravg.ann.land_ocean.90S.90N.v4.0.1.201907.txt",
header = None, delimiter = "\s+")
# We need to specfify the name of the file, whether we have a header to
# disregard (in this case, none), and the delimiter for the text file;
# which in this case is a regular expression for any number of consecutive
# spaces. Dimension of Data set 'NOAAtemp' should be 140 rows by 6 columns
# check the data matrix dimension
print("The dimension of our data table is:", NOAAtemp.shape)
# For the next couple of graphs, we'll need the information from the
# first and second columns up to the data from the 139th row
x = np.array(NOAAtemp.loc[0:138, 0])
print(x.shape)
y = np.array(NOAAtemp.loc[0:138, 1])
print(y.shape)
The dimension of our data table is: (140, 6) (139,) (139,)
# plot figure
plt.plot(x, y, 'brown', marker = 'o', linewidth = 3);
plt.title("Global Land-Ocean Average Annual Mean \n \
Surface Temperature Anomalies: 1880 - 2018", pad = 15)
plt.xlabel("Year", labelpad = 20)
plt.ylabel("Temperature Anomaly [$\degree$C]",
labelpad = 20)
# display on screen
plt.show()
# Plot Fig. 1.2: Staircase chart of data
#keyword arguments
kwargs = {'drawstyle' : 'steps'}
plt.plot(x, y, 'black', linewidth = 3, **kwargs);
plt.title("Global Land-Ocean Average Annual Mean \n \
Surface Temperature Anomalies: 1880 - 2018", pad = 15)
plt.xlabel("Year", labelpad = 20)
plt.ylabel("Temperature Anomaly [$\degree$C]",
labelpad = 20)
plt.show()
# Plot Fig. 1.3: Color bar chart of data
# define an array z with y number of ones
z = np.ones(y.size)
z[0] = -99
z[1] = -99
# computer 5-point moving average
for i in range(2, z.size - 2):
z[i] = np.mean(y[i-2:i+3])
z[z.size - 2] = -99
z[z.size - 1] = -99
# define variables based on range
y1 = [y[i] for i in range(y.size) if y[i] >= 0]
x1 = [x[i] for i in range(y.size) if y[i] >= 0]
y2 = [y[i] for i in range(y.size) if y[i] < 0]
x2 = [x[i] for i in range(y.size) if y[i] < 0]
y3 = z[2:x.size-2]
x3 = x[2:x.size-2]
# plot the moving average data
plt.plot(x3, y3, 'black', linewidth = 3)
plt.title("Global Land-Ocean Average Annual Mean \n \
Surface Temperature Anomalies: 1880 - 2018", pad = 20)
# create bar chart
plt.bar(x1, y1, color = 'red');
plt.bar(x2, y2, color = 'blue');
plt.xlabel("Year", labelpad = 20)
plt.ylabel("Temperature Anomaly [$\degree$C]",
labelpad = 16)
plt.show()
# temperature data string from 1880 to 2018
temp2018 = np.array(NOAAtemp.loc[0:138, 1])
print("The first six elements (or head) of the data is:\n",
temp2018[0:6], "\n")
# arithmetic average of the data
print("The Mean is %f.\n" % stats.mean(temp2018))
# standard deviation of the data
print("The Standard Deviation is %f.\n" %
stats.stdev(temp2018))
# variance of the data
print("The Variance is %f.\n" % stats.variance(temp2018))
# skewness of the data
print("The Skewness is %f.\n" % skewness(temp2018))
# kurtosis of the data
print("The Kurtosis is %f.\n" % kurtosis(temp2018))
# median of the data
print("The Median is %f.\n" % stats.median(temp2018))
# percentiles
print("The 5th, 25th, 75th and 95th percentiles are:")
probs = [5, 25, 75, 95]
print([round(np.percentile(temp2018, p),5) for p in probs])
print()
The first six elements (or head) of the data is: [-0.370221 -0.319993 -0.320088 -0.396044 -0.458355 -0.470374] The Mean is -0.185863. The Standard Deviation is 0.324757. The Variance is 0.105467. The Skewness is 0.774270. The Kurtosis is -0.261913. The Median is -0.274434. The 5th, 25th, 75th and 95th percentiles are: [-0.57649, -0.41198, 0.01552, 0.41324]
We use $x = \{x_1, x_2,\cdots, x_n\}$ to denote the time series data. Symbolically the above statistical indices are defined as: \begin{align*} \text{Mean: }\mu(x) &= \frac{1}{n}\sum_{k=1}^n x_k\\ \text{Variance by unbiased estimate: } \sigma^2(x) &= \frac{1}{n-1}\sum_{k=1}^n \left(x_k - \mu(x)\right)^2\\ \text{Standard Deviation: } \sigma(x) &= \sqrt{\sigma^2(x)}\\ \text{Skewness: } \gamma_3(x) &= \frac{1}{n}\sum_{k=1}^n \left(\frac{x_k - \mu(x)}{\sigma(x)}\right)^3\\ \text{Kurtosis: } \gamma_4(x) &= \frac{1}{n}\sum_{k=1}^n \left(\frac{x_k - \mu(x)}{\sigma(x)}\right)^4 - 3\\ \end{align*}
# plot figure
mu, std = scistats.norm.fit(y)
# create an evenly spaced sequence of 100 points in [-1,1]
points = np.linspace(-1, 1, 100)
plt.hist(y, bins = 20, range=(-1,1), color ='white',
edgecolor = 'k', density = True);
plt.plot(points, scistats.norm.pdf(points, mu, std),
color = 'b', linewidth = 3)
plt.title("Histogram of 1880 - 2018 Temperature Anomalies",
pad = 20)
plt.xlabel("Temperature Anomalies [$\degree$C]",
labelpad = 20)
plt.ylabel("Frequency", labelpad = 20)
plt.xticks([-1.0, -0.5, 0.0, 0.5, 1.0])
plt.show()
plt.figure(figsize=(5,8))
# plot boxplot
medianprops = dict(linestyle='-',
linewidth=2.5, color='k')
whiskerprops = dict(linestyle='--',
linewidth=2.0, color='k')
plt.boxplot(y, medianprops = medianprops,
whiskerprops = whiskerprops);
#plt.title("Boxplot of 1880 - 2018\n\
#Temperature Anomalies",
# pad = 20)
plt.ylabel("Temperature Anomalies [$\degree$C]",
labelpad = 23)
y_ticks = np.linspace(-0.5,0.5,3)
plt.yticks(y_ticks, size = 23)
x = np.arange(1)
plt.xticks(x, " ")
plt.show()
NOAAtemp = read_table("data/aravg.ann.land_ocean.90S.90N.v4.0.1.201907.txt",
header = None, delimiter = "\s+")
# We need to specifify the name of the file, whether we have a header to
# disregard (in this case, none), and the delimiter for the text file;
# which in this case is a regular expression for any number of consecutive
# spaces. Dimension of Data set 'NOAAtemp' should be 140 rows by 6 columns
# For the next couple of graphs, we'll need the information from the
# first column up to the data from the 139th row
temp2018 = np.array(NOAAtemp.loc[0:138, 1])
#line = np.linspace(-3, 3, temp2018.size)
tstand = np.sort((temp2018 - np.mean(temp2018))/statistics.stdev(temp2018))
# simulate 139 points by N(0,1)
qn = np.random.normal(size=y.size)
# sort the points
qns = np.sort(qn)
# set up figure
fig = plt.figure(figsize=(12,8))
ax = fig.add_subplot(111)
# plot figure
qq2 = scistats.probplot(qns, dist = "norm", plot = plt)
ax.get_lines()[0].set_color('purple')
ax.get_lines()[1].set_linewidth(3)
ax1 = ax.twinx()
qt = scistats.probplot(tstand, dist = "norm", plot = plt)
ax1.get_lines()[1].set_linewidth(0.01)
ax1.get_lines()[0].set_color('black')
ax.tick_params(length=6, width=2, labelsize=20)
ax.set_title("Q-Q plot for the Standardized Global \n\
Temperature Anomalies vs N(0,1)", pad = 20)
ax1.set_title("")
ax.set_xlabel("Quantile of N(0,1)", labelpad = 20)
ax.set_ylabel("Quantile of Temperature Anomalies", labelpad = 20)
ax1.set_ylabel("")
ax1.tick_params(length=0.01, width=0.01, color = 'white', labelcolor = 'b')
ax1.set_yticks([])
ax.set_xlim(-3,3)
ax.set_ylim(-3,3)
plt.show()
# first column with rows 1-139
x = np.array(NOAAtemp.loc[0:138, 0])
# second column with rows 1-139
y = np.array(NOAAtemp.loc[0:138, 1])
# create trend line
trend = np.array(np.polyfit(x, y, 1))
abline = trend[1] + x*trend[0]
# plot figure
plt.plot(x, y, 'k-',
color = 'brown', linewidth = 3);
# plot trend line
plt.plot(x, abline, 'k-', color = 'b', linewidth = 3);
plt.title("Global Land-Ocean Average Annual Mean \n \
Surface Temperature Anomalies: 1880 - 2018", pad = 20);
plt.text(1880, 0.5, r"Linear trend: 0.7023$\degree$C/100a",
color= 'b', size = 22)
plt.xlabel("Year",labelpad = 20)
plt.ylabel("Temperature Anomaly [$\degree$C]",
labelpad = 20)
plt.yticks([-0.6, -0.2, 0.2, 0.6])
plt.show()
import netCDF4 as nc
# read a .nc file from the folder named "data"
nc = nc.Dataset('data/air.mon.anom.nc')
# get the detailed description of the dataset
print(nc)
# extract latitude, longitude, time and temperature
lon = nc.variables['lon'][:]
lat = nc.variables['lat'][:]
time = nc.variables['time'][:]
air = nc.variables['air']
# covert Julian date to calendar date
units = nc.variables['time'].units
dtime = num2date(time)
lat1=np.linspace(-87.5, 87.5, 36).tolist()
lon1=np.linspace(2.5, 357.5, 72).tolist()
Lat = sorted(lat1 * 72)
Lon = lon1 * 36
#1684 months from Jan 1880-June 2019
print(np.shape(air))
<class 'netCDF4._netCDF4.Dataset'> root group (NETCDF4_CLASSIC data model, file format HDF5): Conventions: CF-1.0 Source: ftp://ftp.ncdc.noaa.gov/pub/data/noaaglobaltemp/operational/ dataset_title: NOAA Global Surface Temperature (NOAAGlobalTemp) References: https://www.esrl.noaa.gov/psd/data/gridded/data.noaaglobaltemp.html keywords_vocabulary: Climate and Forecast (CF) Standard Name Table (Version 46, 25 July 2017) keywords: Earth Science > Oceans > Ocean Temperature > Sea Surface Temperature, Earth Science > Atmosphere > Atmospheric Temperature > Surface Temperature > Air Temperature cdm_data_type: Grid dataset_citation_url: https://doi.org/10.25921/9qth-2p70 references: Vose, R. S., et al., 2012: NOAAs merged land-ocean surface temperature analysis. Bulletin of the American Meteorological Society, 93, 1677-1685. doi: 10.1175/BAMS-D-11-00241.1. Huang, B., Peter W. Thorne, et. al, 2017: Extended Reconstructed Sea Surface Temperature version 5 (ERSSTv5), Upgrades, validations, and intercomparisons. J. Climate, 30, 8179-8205. doi: 10.1175/JCLI-D-16-0836.1 climatology: Climatology is based on 1971-2000 monthly climatology license: These data are available for use without restriction. source: https://www.ncdc.noaa.gov/noaa-merged-land-ocean-global-surface-temperature-analysis-noaaglobaltemp-v5 platform: Ships, moored buoys, surface drifting buoys, Argo floats, and weather stations instrument: Conventional thermometers creator_email: Boyin.Huang@noaa.gov, Xungang.Yin@noaa.gov comment: Merged land ocean surface temperature anomalies. Version 5.0.0, blending ERSST V5 and GHCN-M V4 source_institution: DOC/NOAA/NESDIS/National Centers for Environmental Information(NCEI) history: Created 08/2019 using new V5 data from NCEI version: V5 geospatial_bounds: POLYGON ((2.5 -87.5, 2.5 87.5, 357.5 87.5, 357.5 -87.5, 2.5 -87.5)) title: NOAA Merged Land Ocean Global Surface Temperature Analysis (NOAAGlobalTemp) data_modified: 2019-08-02 dimensions(sizes): time(1674), lat(36), lon(72), nbnds(2) variables(dimensions): float32 lat(lat), float32 lon(lon), float64 time(time), float64 time_bnds(time, nbnds), float32 air(time, lat, lon) groups: (1674, 36, 72)
# NOAAgridT = np.reshape(air, (1679,36,72))
NOAAgridT = air
mapmat = NOAAgridT[1631,:,:]
mapmat=np.maximum(np.minimum(mapmat ,6),-6)
print(np.all(np.isnan(mapmat)))
print(mapmat.shape)
False (36, 72)
def color_pop(colMat, colVec, diff):
if(colMat.shape[0] < 2*diff and colMat.shape[0] > diff):
colMat[0] = color.to_rgba(colVec[0])
if(colMat.shape[0] < diff):
return colMat
else:
colMat[-1] = color.to_rgba(colVec[-1])
return np.concatenate((color_pop(colMat[:colMat.shape[0] - diff],
colVec[:-1], diff),
colMat[colMat.shape[0] - diff:]))
def color_ave(colMat):
indices = np.array([j for j in range(0,colMat.shape[0]) if sum(colMat[j]) > 0])
for i in range(indices.size-1):
diff = indices[i + 1] - indices[i]
off = (colMat[indices[i + 1]] - colMat[indices[i]])
colMat[indices[i]:indices[i + 1]] = np.array([colMat[indices[i]] + (off/diff)*j for j in range(diff)])
return colMat
cols = np.array(['black','blue','darkgreen',
'green','yellow','pink','red','maroon'])
values = np.zeros((256,4))
diff = int(values.shape[0]/(cols.size - 1))
clev = np.linspace(mapmat.min(), mapmat.max(), 81)
myColMap = LinearSegmentedColormap.from_list(name='my_list',
colors=['black','blue','darkgreen',
'green','lime','yellow',
'pink','red','maroon'], N=100)
# Plot Fig. 1.8
dpi = 100
fig = plt.figure(figsize = (1100/dpi, 1100/dpi), dpi = dpi)
ax = fig.add_axes([0.1, 0.1, 0.8, 0.9])
# create map
dmap = Basemap(projection = 'cyl', llcrnrlat = min(lat),
urcrnrlat = max(lat), resolution = 'c',
llcrnrlon = min(lon), urcrnrlon = max(lon))
# draw coastlines, state and country boundaries, edge of map
dmap.drawcoastlines()
dmap.drawstates()
dmap.drawcountries()
# convert latitude/longitude values to plot x/y values
x, y = dmap(*np.meshgrid(lon, lat))
# contour levels
#print(x.shape, y.shape, mapmat.shape)
# draw filled contours
cnplot = dmap.contourf(x, y, mapmat, clev, cmap=myColMap)
# tick marks
ax.set_xticks([0, 50, 100, 150, 200, 250, 300, 350])
ax.set_yticks([-50,0,50])
ax.tick_params(length=6, width=2, labelsize=20)
# add colorbar
# pad: distance between map and colorbar
cbar = dmap.colorbar(cnplot, pad = "4%",drawedges=True,
shrink=0.55, ticks = [-6,-4,-2,0,2,4,6])
# add colorbar title
cbar.ax.set_title('[$\degree$C]', size= 17, pad = 10)
cbar.ax.tick_params(labelsize = 15)
# add plot title
plt.title('NOAAGlobalTemp Anomalies Dec 2015', pad = 15)
# label x and y
plt.xlabel('Longitude', labelpad = 20)
plt.ylabel('Latitude', labelpad = 10)
# display on screen
plt.show()
import netCDF4 as nc
# open netcdf file
ncd = nc.Dataset('data/air.mon.anom.nc')
# read variables
air = ncd.variables['air']
time = ncd.variables['time']
lat = ncd.variables['lat']
lon = ncd.variables['lon']
# define mapmat
NOAAgridT = air
mapmat = NOAAgridT[1631,:,:]
mapmat=np.maximum(np.minimum(mapmat ,6),-6)
print(np.all(np.isnan(mapmat)))
print(mapmat.shape)
False (36, 72)
# define color map
myColMap = LinearSegmentedColormap.from_list(name='my_list',
colors=['blue','lightskyblue',
'lightskyblue','yellow',
'red','maroon'], N=100)
# plot figure
plt.figure(figsize=(15, 10))
# robin map
dmap = Basemap(projection='robin',lon_0=180,resolution='l')
# add continent outlines, parallel lines, meridian lines
dmap.drawcoastlines(linewidth=1,color='black',zorder=40)
dmap.fillcontinents(color='none',zorder=40)
parallels = np.arange(-90,90,15.)
dmap.drawparallels(parallels,labels=[1,0,0,0],fontsize=0,linewidth=1)
meridians = np.arange(0,360,15.)
dmap.drawmeridians(meridians,labels=[0,0,0,1],fontsize=0,linewidth=1)
# plot figure
lats, lons = np.meshgrid(lat, lon)
x, y = dmap(lons, lats)
dmap.contourf(x,y,mapmat.T,np.arange(-6,6,0.1),
cmap=myColMap,extend='both')
cbar=dmap.colorbar(drawedges=True,location='bottom',
size="4%",pad="7%",fraction=0.6)
cbar.ax.tick_params(labelsize=20)
cbar.set_label('Surface Air Temperature Monthly Anomaly($\degree$C)',
fontsize=20, labelpad = 15)
cbar.outline.set_edgecolor('black')
cbar.outline.set_linewidth(1)
cbar.dividers.set_color('k')
cbar.dividers.set_linewidth(1)
ax=plt.gca()
ax.spines['bottom'].set_linewidth(3)
ax.spines['left'].set_linewidth(3)
ax.spines['right'].set_linewidth(3)
ax.spines['top'].set_linewidth(3)
plt.title('NOAA GlobalTemp Monthly Anomalies: Dec 2015')
plt.show()
WARNING: x coordinate not montonically increasing - contour plot may not be what you expect. If it looks odd, your can either adjust the map projection region to be consistent with your data, or (if your data is on a global lat/lon grid) use the shiftgrid function to adjust the data to be consistent with the map projection region (see examples/contour_demo.py).
# Plot Fig. 1.10: Hovmoller diagram
mapmat2 = NOAAgridT[1308:1668,11:23,29]
# define values between -2 and 2
mapmat2=np.maximum(np.minimum(mapmat2 ,2),-2)
# find dimensions
mapmat2 = np.transpose(mapmat2)
print(mapmat2.shape)
lat3 = np.linspace(-30,30, 12)
print(lat3.shape)
time = np.linspace(1989, 2018, 360)
print(time.shape)
(12, 360) (12,) (360,)
# define color map
myColMap = LinearSegmentedColormap.from_list(
name='my_list',
colors=['black','blue','darkgreen','green','lime',
'yellow', 'pink','red','maroon'], N=100)
clev2 = np.linspace(mapmat2.min(), mapmat2.max(), 501)
# plot figure
plt.figure(figsize=(14, 8))
contf = plt.contourf(time, lat3, mapmat2,
clev2, cmap=myColMap);
plt.text(2019, 32,
"[$\degree$C]", color='black', size = 23)
plt.title("Hovmoller diagram of the \n \
NOAAGlobalTemp Anomalies", pad = 15)
plt.xlabel("Time", labelpad = 20)
plt.ylabel("Latitude", labelpad = 12)
# add color bar
colbar = plt.colorbar(contf, drawedges=False,
ticks = [-2,-1,0,1,2])
nc1 = ds("data/pottmp.2015.nc", 'r+', "NETCDF4")
print(nc1)
godasT = nc1.variables['pottmp'][:]
print(godasT.shape)
godasT[11,1,208:210, 245:250]
<class 'netCDF4._netCDF4.Dataset'> root group (NETCDF4_CLASSIC data model, file format HDF5): creation_date: Mon Dec 23 12:15:32 MST 2013 sfcHeatFlux: Note that the net surface heat flux are the total surface heat flux from the NCEP reanalysis 2 plus the relaxation terms. time_comment: The internal time stamp indicates the FIRST day of the averaging period. Conventions: COARDS grib_file: godas.M.2014* html_REFERENCES: http://www.cpc.ncep.noaa.gov/products/GODAS/background.shtml html_BACKGROUND: http://www.cpc.ncep.noaa.gov/products/GODAS/background.shtml html_GODAS: www.cpc.ncep.noaa.gov/products/GODAS comment: NOTE: THESE ARE THE BIAS CORRECTED GODAS FILES. title: GODAS: Global Ocean Data Assimilation System References: https://www.psl.noaa.gov/data/gridded/data.godas.html dataset_title: NCEP Global Ocean Data Assimilation System (GODAS) history: Created 2013/12 by Hoop Converted to chunked, deflated non-packed NetCDF4 2020/08 dimensions(sizes): level(40), lon(360), lat(418), time(12) variables(dimensions): float32 level(level), float32 lon(lon), float32 lat(lat), int32 date(time), float32 timePlot(time), float64 time(time), float32 pottmp(time, level, lat, lon) groups: (12, 40, 418, 360)
masked_array( data=[[300.0654602050781, 299.9830627441406, 299.8793029785156, 299.7770690917969, 299.6641540527344], [300.1844787597656, 300.1005554199219, 299.9998474121094, 299.9006652832031, 299.8045349121094]], mask=[[False, False, False, False, False], [False, False, False, False, False]], fill_value=-9.96921e+36, dtype=float32)
# define climmat
climmat = np.zeros((360, 418))
for i in range(360):
for j in range(418):
climmat[i,j] = np.mean(godasT[:, 20, j, i])
climmat = np.transpose(climmat)
# define latitude and longitude
Lat = np.linspace(-75, 65, 418)
Lon = np.linspace(0, 360, 360)
myColMap = LinearSegmentedColormap.from_list(name='my_list',
colors=['black','blue','darkgreen',
'green','lime','yellow',
'pink','red','maroon'], N=100)
plt.figure(figsize=(14,5.32));
clev3 = np.arange(godasT.min(), godasT.max(), .01)
# define number and positions of the contour lines/regions
ints = np.linspace(273,298, 81)
# plot figure
contf = plt.contourf(Lon, Lat, climmat, clev3, cmap=myColMap,
levels = ints);
plt.text(380, 68, "[K]", fontsize=20, color='black')
plt.tick_params(length=6, width=2, labelsize=18)
# define style and parameters
m = Basemap(projection='cyl', llcrnrlon=0, urcrnrlon=360,
resolution='l', fix_aspect=False,
suppress_ticks=False, llcrnrlat=-75,
urcrnrlat=65)
m.drawcoastlines(linewidth=1)
plt.title("GODAS 2015 Annual Mean Temperature at \n\
195 [m] Depth Level", pad = 15)
plt.xlabel("Longitude", labelpad = 20)
plt.ylabel("Latitude", labelpad = 15)
plt.tick_params(length=6, width=2, labelsize=18)
# add color bar
colbar = plt.colorbar(contf, drawedges=False, ticks = [275,280,285,290,295])
# calculate ratio for map figsize
14*.38
5.32