Basically something like this, to turn some old 800m datasets into 400m facsimiles:-
Load a low_res xarray zarr dataset, interpolate to higher resolution.
def simplezarr3(creds):
with worker_client() as client:
for lr in lowRes:
print("loading test lores:", lr)
s3_path = 's3://bananasplits.zarr/'
client_kwargs={'region_name': 'us-west-2'}
s3 = s3fs.S3FileSystem(anon=False, key=access_key, secret=secret_key, client_kwargs=client_kwargs)
store = s3fs.S3Map(root=s3_path, s3=s3, check=False)
loRes = xr.open_zarr(store=store)
with rasterio.Env(session):
for index, basedir in enumerate(exploracorn_hiRes):
s3dir = 's3://hiresSample/'
s3dir = s3dir + 'cog/results-' + basedir + '/'
basefile = 'Fleagle.tif'
s3file = s3dir + basefile
print(s3file)
commod = rioxarray.open_rasterio(s3file, chunks=(1,4096,4096))
commod = commod.expand_dims(time = [basedir])
hiRes = loRes.interp_like(commod)
s3_path_write = 's3://bananasplits/hires/' + lr + '.zarr/'
s3 = s3fs.S3FileSystem(anon=False, key=access_key, secret=secret_key, client_kwargs=client_kwargs)
writestore = s3fs.S3Map(root=s3_path_write, s3=s3, check=False)
hiRes.to_zarr(store=writestore, mode='w')
return True
....
future = client.submit(simplezarr3, creds)
However, running this just seems to write the first chunk of the zarr array for each variable - so is the wrong and just writing out from one worker, or does something odd happen with dask/zarr xarray interps?
Get the metadata ok, but only the 0.0.0 chunk in each variable - when there should be several.