After enabling SSL with FTPS:
sudo apt install vsftpd
sudo vi /etc/vsftpd.conf
ssl_enable=YES
sudo service vsftpd restart
import ftplib
from tqdm import tqdm
import os
session = ftplib.FTP_TLS(server)
session.login(user=user,passwd=passwd)
session.prot_p()
session.set_pasv(True)
session.cwd("data/imerg/gis/2023/02")
files = session.nlst()
print('Fround {} files'.format(len(files)))
print(files[:10])
print('')
files1 = [x for x in files if '20230205' in x and x.endswith("3hr.tif")]
print(files1)
for filename in tqdm(files1):
print(filename)
if not os.path.exists(filename):
session.retrbinary("RETR " + filename, open(filename, "wb").write)
session.quit()
print('Finished.')
def download_files_from_s3_regions (remote_prefix, local_dir,
bucketpattern='output-from-amps-{region}',
regions=['us-east-1', 'us-east-2', 'us-west-1', 'us-west-2'],
aws_access_key_id=None, aws_secret_access_key=None):
import boto3, os
bucketnames = [bucketpattern.format(region=region) for region in regions]
print(bucketnames)
s3res = boto3.resource('s3', aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key)
buckets = {}
for bucketname in bucketnames:
buckets[bucketname] = s3res.Bucket(bucketname)
if not os.path.exists(local_dir):
print('Creating local dir {}'.format(local_dir))
os.makedirs(local_dir)
remote_files = {}
for bucketname in bucketnames:
remote_files[bucketname] = [obj.key for obj in buckets[bucketname].objects.filter(Prefix=remote_prefix)]
print('Remote files:')
for k,v in remote_files.items():
print('{}: {}'.format(k,v))
print('')
local_files = []
for bucketname in bucketnames:
for remote_file in remote_files[bucketname]:
local_file = os.path.basename(remote_file)
if local_file in local_files:
continue
local_filepath = os.path.join(local_dir, local_file)
print('Downloading {}'.format(local_filepath))
buckets[bucketname].download_file(Key=remote_file, Filename=local_filepath)
local_files += [local_file]
return local_files
All logging (parent and child modules) has the same log level:
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger('main')
Just outer script has debug level:
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('main')
logger.setLevel(logging.DEBUG)
def partial_format(string1, dict1={}):
""" hack to format partial string """
format_fields = re.findall(r'\{(.*?)\}', string1) # list
for k, v in dict1.iteritems():
for field in format_fields:
aux = field.split(':') # in case there are other : like in hour:min:sec
fk = aux[0]
if k == fk:
field = '{' + field + '}'
value = field.format(**{k: v})
string1 = string1.replace(field, value)
break
return string1
basefile = 'aaa{cycle:%Y%m}_{prog:03d}_{xpto:.3f}'
partial_format(basefile, dict(cycle=datetime.datetime.now(), xpto=3, qqq=3))
Execute a system command, passing STDOUT and STDERR to logger, while the command runs.
def execute(cmd, **kwargs):
"""Execute a system command, passing STDOUT and STDERR to logger.
Source: https://stackoverflow.com/a/4417735/2063031
"""
logger.info("cmd: '%s'", cmd)
proc = subprocess.Popen(
shlex.split(cmd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
**kwargs)
for line in iter(proc.stdout.readline, ""):
logger.debug(line.strip())
proc.stdout.close()
return_code = proc.wait()
logger.debug('return_code = %i' % return_code)
if return_code:
raise subprocess.CalledProcessError(return_code, cmd)
2.0.2 yes, fantastic colours but doesn't work with basemap 1.10, ,meaning something about the axes is different and I didn't understand what in a certain amount of time. So, downgraded to 1.4.3 but want to use the new beautiful colours:
import matplotlib.pyplot as plt
from cycler import cycler
new_colors = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728',
'#9467bd', '#8c564b', '#e377c2', '#7f7f7f',
'#bcbd22', '#17becf']
plt.rc('axes', color_cycle=new_colors)
Discrete colormap with extremes (to plot the last category):
import matplotlib as mpl
import matplotlib.pyplot as plt
vals = np.array([1,3,5,16,21])
iswater = 16
# colormap from rgb
rgbs = mpl.cm.cubehelix(np.linspace(0,1,len(vals)+1))
idx = np.where(vals==iswater)[0][0]
rgbs[idx,:] = list(mpl.colors.to_rgb('cyan')) + [1]
cmap = mpl.colors.ListedColormap(rgbs[:-1]).with_extremes(over=rgbs[-1])
# or from list of colors
# 1 3 5 16 this one doesnt show! 21
cmap = mpl.colors.ListedColormap(['red', 'green', 'blue', 'cyan', 'magenta']).with_extremes(over='yellow')
norm = mpl.colors.BoundaryNorm(vals,cmap.N, extend='max')
plt.imshow(lu30s, cmap=cmap, norm=norm); plt.colorbar()
data = np.zeros((5,5))
for i, val in enumerate(vals):
data[i,:] = val
plt.imshow(data, cmap=cmap, norm=norm)
plt.colorbar()
old example not really the solution:
plt.scatter(np.arange(10), np.arange(10), c=np.arange(10), cmap='tab10', s=5)
plt.colorbar(boundaries=np.arange(11)-0.5).set_ticks(np.arange(10))
also works with imshow
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(-5,5)
y = np.arange(-6,4)
fig, axes = plt.subplots(nrows=2, ncols=2)
for ax in axes.flat:
im = ax.pcolormesh(x,y, np.random.random((10,10)), vmin=0, vmax=1)
fig.subplots_adjust(right=0.8)
# [x0,y0,width,height]
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
fig.colorbar(im, cax=cbar_ax)
fg= xr.concat([
ldasout_wrf['ACCPRCP'].isel(time=-1),
ldasout_sup['ACCPRCP'].isel(time=-1)],
dim='new_dim').plot(x='ix', y='iy', col='new_dim', cmap='gist_ncar')
titles = ['wrf', 'sup']
for i,ax in enumerate(fg.axs.flatten()):
ax.axis(zoom_land)
ax.set_title(titles[i])
Example from https://ocefpaf.github.io/python4oceanographers/blog/2013/09/23/cartopy/
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
fig = plt.figure(figsize=(10,5))
ax = plt.axes(projection=ccrs.PlateCarree())
ax.coastlines()
gl = ax.gridlines(draw_labels=True)
gl.xlabels_top = False
gl.ylabels_right = False
fig, axs= plt.subplots(1,2, subplot_kw={'projection': ccrs.PlateCarree()})
fig = plt.figure(figsize=(10,5))
ax = plt.axes(projection=ccrs.PlateCarree())
im = plt.pcolormesh(lon, lat, var2d, vmin=vlims[0], vmax=vlims[1], transform=ccrs.PlateCarree())
ax.coastlines()
ax.set_title(filein)
gl = ax.gridlines(draw_labels=True)
gl.xlabels_top = False
gl.ylabels_right = False
plt.colorbar(im)#, cax=pos_cax)
Gridlines don't work across dateline. Have to specify manually.
import cartopy.crs as ccrs
fig, ax = plt.subplots(1,1, figsize=(6,6), subplot_kw={'projection': ccrs.PlateCarree(180)})
ax.pcolormesh(lon, lat, var2d, transform=ccrs.PlateCarree())
ax.coastlines()
gl = ax.gridlines(draw_labels=True, xlocs=np.arange(160,190,5))
or better yet, example from https://stackoverflow.com/questions/47335851/issue-w-image-crossing-dateline-in-imshow-cartopy:
import numpy as np
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
# demo raster data
n1 = 300
m1 = 0.4
lat = np.arange(n1)*m1 + (-59.99)
lon = np.arange(n1)*m1 + (85.01)
dat = np.reshape(np.arange(n1*n1), [n1,n1])
cm_lon=180 # for central meridian
tran = ccrs.PlateCarree(central_longitude = cm_lon)
proj = tran
plt.figure(figsize=(8,8))
ax = plt.axes(projection=proj)
ext = [lon[0]-cm_lon, lon[-1]-cm_lon, lat[0], lat[-1]]
#print(ext)
ax.imshow(dat, extent=ext, \
transform=tran, interpolation='nearest')
ax.coastlines(resolution='110m', color='black', linewidth=0.5, zorder=10)
# this draws grid lines only, must go beyond E/W extents
ax.gridlines(draw_labels=False, xlocs=[80,100,120,140,160,180,-180,-160,-140])
# this draw lables only, exclude those outside E/W extents
ax.gridlines(draw_labels=True, xlocs=[100,120,140,160,180,-160])
plt.show()
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
def plot_decor(ax, xlocs=None):
ax.coastlines('50m', color='white')
# ax.add_feature(OCEAN, zorder=-1)#, facecolor='0.8', zorder=-1)
gl = ax.gridlines(draw_labels=True,xlocs=xlocs)
gl.xlabels_top = False
gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 'small'}#, 'color': 'gray'}
gl.ylabel_style = {'size': 'small'}#, 'color': 'gray'}
def plot_decor(ax, xlocs=None, dx=None, xlims=None):
ax.coastlines('50m', color='white')
if dx is not None:
xlocs = np.arange(xlims[0], xlims[1]+dx, dx)
xlocs2 = xlocs
if xlocs is not None:
if xlocs[-1] > 180:
# this draws grid lines only, must go beyond E/W extents
ax.gridlines(draw_labels=False, xlocs=xlocs+[360])
xlocs2 = [x-360. if x > 180 else x for x in xlocs]
xlocs2 = xlocs2[:-1]
gl = ax.gridlines(draw_labels=True, xlocs=xlocs2)
gl.xlabels_top = False
gl.ylabels_right = False
Example from http://xarray.pydata.org/en/stable/examples/multidimensional-coords.html
import cartopy.crs as ccrs
ax = plt.axes(projection=ccrs.PlateCarree(180))
print d01.coords
# Use correct Time coordinate
ds.swap_dims({'Time': 'Times'},inplace=True)
d01['T2'].isel(Times=0).plot.pcolormesh(ax=ax, transform=ccrs.PlateCarree(), x='XLONG', y='XLAT')
ax.coastlines()
ds = xr.open_dataset('geo_em.d02.nc')
ds.coords['lat_m'] = (['south_north', 'west_east'], ds['XLAT_M'].isel(Time=0))
ds.coords['lon_m'] = (['south_north', 'west_east'], ds['XLONG_M'].isel(Time=0))
ds.coords['time'] = (['Time'], ds['Times'])#, {'units': 'days since 1970-01-01'})
ax = plt.axes(projection=ccrs.PlateCarree(180))
ds['HGT_M'].isel(Time=0).plot(ax=ax, transform=ccrs.PlateCarree(), x='lon_m', y='lat_m', cmap='Spectral_r')
ax.set_title('')
plot_decor(ax, xlocs=range(165,190,5))
Example from http://xarray.pydata.org/en/stable/auto_gallery/plot_cartopy_facetgrid.html
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import xarray as xr
# Load the data
ds = xr.tutorial.load_dataset('air_temperature')
air = ds.air.isel(time=[0, 724]) - 273.15
# This is the map projection we want to plot *onto*
map_proj = ccrs.LambertConformal(central_longitude=-95, central_latitude=45)
p = air.plot(transform=ccrs.PlateCarree(), # the data's projection
col='time', col_wrap=1, # multiplot settings
aspect=ds.dims['lon'] / ds.dims['lat'], # for a sensible figsize
subplot_kws={'projection': map_proj}) # the plot's projection
# We have to set the map's options on all four axes
for ax in p.axes.flat:
ax.coastlines()
ax.set_extent([-160, -30, 5, 75])
# Without this aspect attributes the maps will look chaotic and the
# "extent" attribute above will be ignored
ax.set_aspect('equal', 'box-forced')
plt.show()
ds = xr.open_mfdataset(pattern, combine='nested', concat_dim='Time')
ds.isel(time=0)
ds[dict(time=0)]
ds[vname].isel(time=slice(10,12))
ds[vname].isel(lon=1, lat=2)
ds.isel(lon=slice(None,None,10), lat=slice(None,None,10))
ds.loc[dict(space='IA')]
ds[vname].sel(time='2017-06-29T12:00:00',lon=0.125)
In new versions: figsize = (aspect * size, size)
fig,ax = plt.subplots(1,1, figsize=(8,2))
ds[vname].isel(lon=1, lat=2).plot(ax=ax)
ds[vname].plot(col='time', col_wrap=5, vmin=0,vmax=0.01)
gfs['ugrd10m'].isel(time=(0,-1)).plot(x='longitude', y='latitude', col='time', col_wrap=2)#, vmin=-30, vmax=30)
Method 1 (1 var, 1d lat/lon):
var = f[vname]
time = var.getTime()
time = netCDF4.num2date(time, units=time.units)
lon = var.getLongitude()[:]
lat = var.getLatitude()[:]
print var.shape, time.shape, lat.shape, lon.shape
ds = xr.Dataset({vname: (['time', 'lat', 'lon'], var)}, coords={'lon': lon, 'lat': lat, 'time': time})
Method 2 (2d lat/lon):
import xarray as xr
import numpy as np
data = np.random.random(300).reshape(15,20)
print data.shape
v2d, lat, lon = data,data,data
print v2d.shape, lat.shape, lon.shape
time = np.array([0.])
v2d = np.expand_dims(v2d, axis=0) # add time dimension
print v2d.shape
dvars = {}
dvars['u10'] = (['time','y', 'x'], v2d)
ds = xr.Dataset(dvars)
ds.coords['lat'] = (['y', 'x'], lat)
ds.coords['lon'] = (['y', 'x'], lon)
ds.coords['time'] = (['time'], time, {'units': 'days since 1970-01-01' })
ds.to_netcdf('test_irregular.nc')
print ds
Output:
Dimensions: (time: 1, x: 20, y: 15) Coordinates: lat (y, x) float64 0.9063 0.599 0.6727 0.9662 0.3724 0.9094 0.6241 ... lon (y, x) float64 0.9063 0.599 0.6727 0.9662 0.3724 0.9094 0.6241 ... * time (time) float64 0.0 Dimensions without coordinates: x, y Data variables: u10 (time, y, x) float64 0.9063 0.599 0.6727 0.9662 0.3724 0.9094 ...ncdump -h test_irregular.nc
netcdf test_irregular { dimensions: time = 1 ; y = 15 ; x = 20 ; variables: double u10(time, y, x) ; u10:_FillValue = NaN ; u10:coordinates = "lat lon" ; double lat(y, x) ; lat:_FillValue = NaN ; double lon(y, x) ; lon:_FillValue = NaN ; double time(time) ; time:_FillValue = NaN ; time:units = "days since 1970-01-01" ; }Method 3 (from another DataSet):
ds = xr.open_dataset('some netcdf or open dap ctl')
variables_of_interest = ['ugrd10m', 'vgrd10m', 'tmp2m', 'vissfc', 'rh2m', 'prmslmsl']
vardict = {}
for varname in variables_of_interest:
vardict[varname] = ds.data_vars[varname]
ds = xr.Dataset(data_vars=vardict, coords=ds.coords, attrs=ds.attrs, compat='broadcast_equals')
ds.to_netcdf('/tmp/out.nc')
Method 4 (from cdms2 ctl file):
def ctl2xarray(filein, vnames = ['ugrd10m', 'vgrd10m', 'tmp2m', 'tmpsfc']):
f = cdms2.open(filein)
print(f.listvariables())
data_vars = {}
first_time = True
for vname in vnames:
var = f[vname]
if first_time:
time = var.getTime()
time = netCDF4.num2date(time, units=time.units)
lon = var.getLongitude()[:]
lat = var.getLatitude()[:]
print var.shape, time.shape, lat.shape, lon.shape
first_time = False
# var[var>=1e20] = np.nan
data_vars[vname] = (['time', 'lat', 'lon'], var.getValue())
ds = xr.Dataset(data_vars, coords={'lon': lon, 'lat': lat, 'time': time})
return ds
df.loc[(sites, 'nzra','bias'),:]
df.xs('rmse', level=2).xs('cfsr2', level=1)
df[vname].isel(site=0, dum1=0)
df.to_dataframe().reset_index(level=0, drop=True).reset_index(level=0, drop=True)
ts = df.xs(0).xs(0)
ts['time'] = pd.to_datetime(dict(year=ts['Year'], month=ts['Month'], day=ts['Day'], hour=ts['H[UTC]']))
ts = ts.set_index(pd.DatetimeIndex(ts['time']))
inner only gives you common time records (default option).
Series (join)
both = a.to_frame().join(b.to_frame())
DataFrame (merge)
both = pd.merge(obs,mod, how='inner', right_index=True, left_index=True, suffixes=('_obs', '_mod'))
More than 2 DataFrames (conca(slightly faster than merge 2 by 2)
frames = [ xr.open_dataset(fname)[[vname]].to_dataframe() for fname in obsfiles ]
ds = pd.concat(frames,axis=1,join='outer')
err = both[cols].sub(both['obs'], axis=0)
dif = df.subtract(df['obs'], axis='index')
df.dropna() #drop all rows that have any NaN values
df.dropna(how='all') #drop only if ALL columns are NaN
https://stackoverflow.com/questions/13035764/remove-pandas-rows-with-duplicate-indices
>>> %timeit df3.reset_index().drop_duplicates(subset='index', keep='first').set_index('index')
1000 loops, best of 3: 1.54 ms per loop
>>> %timeit df3.groupby(df3.index).first()
1000 loops, best of 3: 580 µs per loop
>>> %timeit df3[~df3.index.duplicated(keep='first')]
1000 loops, best of 3: 307 µs per loop
grouped = df.groupby(pd.TimeGrouper(freq='M'))
fig,axs = plt.subplots(12,1,sharey=True, figsize=(12,12))
i=0
for gname, dfm in grouped:
ax = axs[i]
dfm.plot(ax=ax, legend=False)
i+=1
ds.interpolate().plot() # but also extrapolates
ds.resample('1H').interpolate(method='slinear').plot() # doesn't extrapolate; time integrity
slinear is from scipy and doesn't extrapolate. We ensure time integrity by resampling. See here. Defaults from pandas 0.22 are:
closed : {‘right’, ‘left’} Which side of bin interval is closed. The default is ‘left’ for all frequency offsets except for ‘M’, ‘A’, ‘Q’, ‘BM’, ‘BA’, ‘BQ’, and ‘W’ which all have a default of ‘right’.
label : {‘right’, ‘left’} Which bin edge label to label bucket with. The default is ‘left’ for all frequency offsets except for ‘M’, ‘A’, ‘Q’, ‘BM’, ‘BA’, ‘BQ’, and ‘W’ which all have a default of ‘right’.
Example: closed = left, label = right is 00:00 is average from [00:00, 01:00[
ax.legend(loc=2,ncol=2, fontsize='small', framealpha=0)
from pandas.tools.plotting import scatter_matrix
_ = scatter_matrix(df, alpha=0.2, figsize=(10, 10))
ax.plot_date(ts1.index.to_pydatetime(), ts1[vname])
Method 1:
_ = plt.xticks(rotation=45)
Method 2:
for tick in ax.get_xticklabels():
tick.set_rotation(45)
Method 3:
plt.setp(ax.xaxis.get_majorticklabels(), rotation=45)
Method 4:
Rotates and right aligns the x labels, and moves the bottom of the axes up to make room for them
fig.autofmt_xdate()
The default values are a rotation angle 30 deg and horizontal alignment "right". But they can be changed in the function call. The additional bottom argument is equivalent to setting:
plt.subplots_adjust(bottom=bottom)
fig.autofmt_xdate(bottom=0.2, rotation=30, ha='right')
Method 5 (pandas DataFrame)
ds.plot(rot=0)
import matplotlib.dates as dates
ax.xaxis.set_major_locator(dates.DayLocator(interval=1))
ax.xaxis.set_minor_locator(dates.HourLocator(byhour=[0,6,12,18]))
ax.xaxis.set_major_formatter(dates.DateFormatter('%Y-%m-%d'))
fig.autofmt_xdate()
with PyNIO + xarray
ds = xr.open_dataset(filein, engine='pynio')
with cfgrib + xarray
ds = xr.open_dataset(filein, engine='cfgrib', backend_kwargs={'filter_by_keys': {'typeOfLevel': 'surface'}})
ds = ds.swap_dims({'step': 'valid_time'})
with pygrib:
lats, lons = rec.latlons()
data = grb.values
or
data, lats, lons = rec.data() #(lat1=20,lat2=70,lon1=220,lon2=320)
print(lons.shape, lons.min(), lons.max())
print(lats.shape, lats.min(), lats.max())
print(data.shape, data.min(), data.max())
f = pygrib.open('gepqpf.t18z.pgrb2a.0p50.24hf024')
recs = f.select()
print len(recs)
fig, axs = plt.subplots(5,3, figsize=(15,15), sharex=True, sharey=True,
subplot_kw={'projection': ccrs.PlateCarree()})
for i, rec in enumerate(recs):
data, lats, lons = rec.data()
ax = axs.flatten()[i]
p1 = ax.pcolormesh(lons, lats, data, transform=ccrs.PlateCarree())
plt.colorbar(p1, ax=ax)
ax.set_title(i)
ax.coastlines(color='white')
def uv2theta(u,v):
theta = np.mod(270. - 180./np.pi * np.arctan2(v, u), 360.)
return theta
def theta2uv(theta):
u = -1.* np.sin(theta * np.pi/180.)
v = -1.* np.cos(theta * np.pi/180.)
return (u,v)
fig = plt.figure(figsize=(14,5))
ax1 = plt.subplot(121)
dfd[['Wind Speed', 'Wind Gust']].plot(ax=ax1)
ax2 = plt.subplot(122, projection='windrose')
ax2.bar(dfd['Wind Direction'], dfd['Wind Speed'], normed=True, opening=0.8, edgecolor='white')
ax2.legend(loc=(1.1,0), fontsize='small')
n = 500
ws = np.random.random(n) * 6
wd = np.random.random(n) * 360
df1 = pd.DataFrame({'ws':ws, 'wd': wd})
ws = np.random.random(n) * 6
wd = np.random.random(n) * 360
df2 = pd.DataFrame({'ws':ws, 'wd': wd})
df = pd.concat({'x': df1, 'y': df2}, axis=1)
colnames = ['x', 'y']
thetavar = 'wd'
modvar = 'ws'
rmaxs = []
for vname in colnames:
dfi = df.xs(vname, axis=1)
ax = WindroseAxes.from_ax()
ax.bar(df[thetavar], dfi[modwar], normed=True, bins=bins)
rmaxs += [np.max(np.sum(ax._info["table"], axis=0))]
plt.close()
rmax = np.ceil(np.max(rmaxs)/10)*10
print rmax
fig, axs = plt.subplots(1,2, figsize=(8,4),
subplot_kw={'projection': 'windrose',
'rmax': rmax})
for i,vname in enumerate(colnames):
ax = axs[i]
dfi = df.xs(vname, axis=1)
ax.bar(df[thetavar], dfi[modwar], normed=True, bins=bins)
ax.set_title(vname, y=1.1)
ax.legend(loc=(1.1,0), fontsize='small')#bbox_to_anchor=(1.1, 1.5))
def get_ij(lons,lats, plon, plat):
dist = np.sqrt((lons - plon)**2 + (lats - plat)**2)
aux = np.where(dist == dist.min())
j,i = aux[0][0], aux[1][0]
print(plon, lons[j,i], plon-lons[j,i])
print(plat, lats[j,i], plat-lats[j,i])
return (i,j)
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-x", "--xxxx", help="type (default: %(default)s)", default=2)
args = parser.parse_args()
print(args.__dict__)
t0=datetime.datetime(2017,1,31,3)
GOOD (736361.125):
dt2ncep(t0)
netCDF4.date2num(t0, units='days since 0001-01-01 00:00:00', calendar='standard')
BAD:
t0 - datetime.datetime(1,1,1) # datetime.timedelta(736359, 10800)
matplotlib.dates.date2num(t0) # 736360.125
Hycom example: http://www.unidata.ucar.edu/blogs/developer/entry/accessing_netcdf_data_by_coordinates
tpar=numpy.loadtxt(filetpar, skiprows=1)
tab=numpy.loadtxt(filetab, skiprows=7)
header=['BC_hs[m]','BC_tp[s]','BC_dp[deg]','St_hs[m]','St_tp[s]', 'St_dm[deg]','St_dp[deg]']
datout=numpy.c_[tpar[:,1:4], tab[:,(1,4,2,3)]]
f=open(fileout, 'w')
f.write(",".join(header)+"\n")
numpy.savetxt(f, datout, fmt='%.5f', delimiter=",")
f.close()
Export google calendar to csv with https://google-calendar-hours.com
import pandas as pd
ts = pd.read_csv('Work_July_2019_(20190712141253).csv')
ts['date'] = pd.to_datetime(ts['End'])
ts = ts.set_index('date')
ts['day'] = ts.index.floor('d')
ts.pivot_table(values='Hours', index='Title', columns='day', aggfunc='sum', margins=True)