-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
14 changed files
with
2,805 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
#!/home/x_guimo/.conda/envs/mpi/bin/python | ||
#SBATCH --export=LUMIA_SCRATCH=/scratch/local | ||
|
||
import sys | ||
from argparse import ArgumentParser | ||
import lumia | ||
from lumia.formatters import lagrange | ||
from lumia.Tools.logging_tools import logger | ||
from lumia.interfaces.footprint_flexRes import Interface | ||
from lumia.obsdb.InversionDb import obsdb | ||
|
||
p = ArgumentParser() | ||
p.add_argument('action', type=str, choices=['var4d', 'gradtest', 'adjtest']) | ||
p.add_argument('--rc', type=str, help='Configuration file ("rc-file")') | ||
p.add_argument('--verbosity', '-v', default='INFO') | ||
args = p.parse_args(sys.argv[1:]) | ||
|
||
logger.setLevel(args.verbosity) | ||
|
||
rcf = lumia.rc(args.rc) | ||
|
||
if args.action == 'var4d': | ||
obs = obsdb(rcf, setupUncertainties=rcf.get('obs.uncertainty.setup', default=True)) | ||
emis = lagrange.Emissions(rcf, obs.start, obs.end) | ||
model = lumia.transport(rcf, obs=obs, formatter=lagrange) | ||
sensi = model.calcSensitivityMap() | ||
control = Interface(rcf, ancilliary={'sensi_map':sensi}, emis=emis.data) | ||
opt = lumia.optimizer.Optimizer(rcf, model, control) | ||
opt.Var4D() | ||
|
||
elif args.action == 'gradtest': | ||
obs = obsdb(rcf, setupUncertainties=rcf.get('obs.uncertainty.setup', default=True)) | ||
emis = lagrange.Emissions(rcf, obs.start, obs.end) | ||
model = lumia.transport(rcf, obs=obs, formatter=lagrange) | ||
sensi = model.calcSensitivityMap() | ||
control = Interface(rcf, ancilliary={'sensi_map':sensi}, emis=emis.data) | ||
opt = lumia.optimizer.Optimizer(rcf, model, control) | ||
opt.GradientTest() | ||
|
||
elif args.action == 'adjtest': | ||
obs = obsdb(rcf, setupUncertainties=rcf.get('obs.uncertainty.setup', default=True)) | ||
emis = lagrange.Emissions(rcf, obs.start, obs.end) | ||
model = lumia.transport(rcf, obs=obs, formatter=lagrange) | ||
sensi = model.calcSensitivityMap() | ||
control = Interface(rcf, ancilliary={'sensi_map':sensi}, emis=emis.data) | ||
opt = lumia.optimizer.Optimizer(rcf, model, control) | ||
opt.AdjointTest() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,146 @@ | ||
#!/usr/bin/env python | ||
|
||
from .regions import region | ||
from numpy import expand_dims, array, squeeze, repeat, zeros | ||
import logging | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
class Region(region): | ||
def __init__(self, rcf): | ||
rname = rcf.get('region') | ||
region.__init__( | ||
self, | ||
name=rname, | ||
lat0=rcf.get('region.lat0'), lat1=rcf.get('region.lat1'), dlat=rcf.get('region.dlat'), | ||
lon0=rcf.get('region.lon0'), lon1=rcf.get('region.lon1'), dlon=rcf.get('region.dlon') | ||
) | ||
|
||
class GriddedData: | ||
def __init__(self, data, reg, padding=None): | ||
self.data = data.copy() | ||
self.region = reg | ||
if padding is not None : | ||
self.Pad(padding) | ||
if len(self.data.shape) == 2 : | ||
# Add a dummy vertical dimension if we deal with surface data | ||
self.data = expand_dims(self.data, 0) | ||
|
||
def regrid(self, newreg, weigh_by_area=False): | ||
assert newreg <= self.region | ||
|
||
if self.region.dlon == newreg.dlon and self.region.dlat == newreg.dlat : | ||
data_out = self.crop(newreg) | ||
elif self.region.dlon > newreg.dlon and self.region.dlat > newreg.dlat : | ||
data_out = self.refine(newreg, weigh_by_area=weigh_by_area in [True, 'refine']) | ||
elif self.region.dlon < newreg.dlon and self.region.dlat < newreg.dlat : | ||
data_out = self.coarsen(newreg, weigh_by_area=weigh_by_area in [True, 'coarsen']) | ||
else : | ||
# In theory, there are other cases, but since they are unlikely, I didn't implement them | ||
logger.error("Requested regridding is not implemented (yet?)") | ||
raise NotImplementedError | ||
|
||
return data_out | ||
|
||
def Pad(self, padding): | ||
logger.debug(f"Pad with {padding:.1f}") | ||
|
||
# Create a global grid with the same resolution as the original grid: | ||
reg_out = region(lon0=-180, lon1=180, dlon=self.region.dlon, | ||
lat0=-90, lat1=90, dlat=self.region.dlat) | ||
|
||
data_out = zeros((self.data.shape[0], reg_out.nlat, reg_out.nlon)) + padding | ||
lat0 = reg_out.lat0.tolist().index(self.region.latmin) | ||
lon0 = reg_out.lon0.tolist().index(self.region.lonmin) | ||
data_out[:, lat0:lat0+self.region.nlat, lon0:lon0+self.region.nlon] = self.data | ||
self.data = data_out | ||
self.region = reg_out | ||
|
||
def refine(self, newreg, weigh_by_area): | ||
logger.debug("Refine grid, %s weigh by area" % (["don't", "do"][weigh_by_area])) | ||
|
||
# Create an intermediate grid, with the same resolution as the new grid and the same boundaries as the original one: | ||
regX = region(lat0=self.region.latmin, lat1=self.region.latmax, dlat=newreg.dlat, | ||
lon0=self.region.lonmin, lon1=self.region.lonmax, dlon=newreg.dlon | ||
) | ||
|
||
# Make sure that the new data can be generated just by dividing the original data | ||
assert self.region.dlat % newreg.dlat == 0 | ||
assert self.region.dlon % newreg.dlon == 0 | ||
assert newreg.latmin in regX.lat0 | ||
assert newreg.lonmin in regX.lon0 | ||
|
||
ratio_lats = self.region.dlat / newreg.dlat | ||
ratio_lons = self.region.dlon / newreg.dlon | ||
|
||
# Work on a copy of the data | ||
data = self.data.copy() | ||
|
||
# convert to unit/m2 for the conversion | ||
if weigh_by_area: | ||
data /= self.region.area[None, :, :] | ||
|
||
# refine: | ||
data = repeat(data, int(ratio_lats), axis=1) | ||
data = repeat(data, int(ratio_lons), axis=2) | ||
|
||
# Concert back to the original unit | ||
if weigh_by_area: | ||
data *= regX.area[None, :, :] | ||
|
||
# "data" is on a different grid than "self.data", so we need to instantiate a new "GriddedData" object | ||
return GriddedData(data, regX).crop(newreg) | ||
|
||
def coarsen(self, newreg, weigh_by_area): | ||
""" | ||
Coarsen a 3D array by aggregating pixels along the lat and lon axis. | ||
""" | ||
logger.debug("Coarsen grid, %s weigh by area" % (["do", "don't"][weigh_by_area])) | ||
|
||
# Create an intermediate grid, with the same resolution as the new grid and the same boundaries as the original one: | ||
regX = region(lat0=self.region.latmin, lat1=self.region.latmax, dlat=newreg.dlat, | ||
lon0=self.region.lonmin, lon1=self.region.lonmax, dlon=newreg.dlon | ||
) | ||
|
||
# Make sure that the new data can be generated just by aggregating the original data | ||
assert newreg.dlat % self.region.dlat == 0 | ||
assert newreg.dlon % self.region.dlon == 0 | ||
assert newreg.latmin in regX.lat0 | ||
assert newreg.lonmin in regX.lon0 | ||
|
||
ratio_lats = int(newreg.dlat / self.region.dlat) | ||
ratio_lons = int(newreg.dlon / self.region.dlon) | ||
|
||
# work on a copy of the data | ||
data = self.data.copy() | ||
|
||
# convert to unit/m2 for the conversion | ||
if weigh_by_area: | ||
data *= self.region.area[None, :, :] | ||
nlev = data.shape[0] | ||
|
||
# Coarsen: | ||
data = data.reshape(nlev, self.region.nlat, regX.nlon, ratio_lons).sum(3) | ||
data = data.reshape(nlev, regX.nlat, ratio_lats, regX.nlon).sum(2) | ||
|
||
# Convert back to the original unit: | ||
if weigh_by_area: | ||
data /= regX.area[None, :, :] | ||
|
||
# "data" is on a different grid than "self.data", so we need to instantiate a new "GriddedData" object | ||
return GriddedData(data, regX).crop(newreg) | ||
|
||
def crop(self, newreg): | ||
logger.debug("crop grid") | ||
|
||
# ensure that the new grid is a subset of the old one | ||
assert all([l in self.region.lats for l in newreg.lats]) | ||
assert all([l in self.region.lons for l in newreg.lons]) | ||
|
||
# crop | ||
slat = array([l in newreg.lats for l in self.region.lats]) | ||
slon = array([l in newreg.lons for l in self.region.lons]) | ||
|
||
# return. Remove the dummy vertical dimension if possible | ||
return squeeze(self.data[:, slat, :][:, :, slon]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
#!/usr/bin/env python | ||
import logging | ||
from lumia import tqdm | ||
import shutil | ||
columns = shutil.get_terminal_size().columns | ||
|
||
|
||
def colorize(msg, color=None): | ||
if color is not None : | ||
msg = f'<{color}>{msg}</{color}>' | ||
# grey : | ||
msg = msg.replace('<k>', '\x1b[0;30m') | ||
msg = msg.replace('</k>', '\x1b[0m') | ||
# red : | ||
msg = msg.replace('<r>', '\x1b[0;31m') | ||
msg = msg.replace('</r>', '\x1b[0m') | ||
# Green | ||
msg = msg.replace('<g>', '\x1b[0;32m') | ||
msg = msg.replace('</g>', '\x1b[0m') | ||
# Yellow | ||
msg = msg.replace('<y>', '\x1b[0;33m') | ||
msg = msg.replace('</y>', '\x1b[0m') | ||
msg = msg.replace('<ybg>', '\x1b[0;43m') | ||
# Blue | ||
msg = msg.replace('<b>', '\x1b[0;34m') | ||
msg = msg.replace('</b>', '\x1b[0m') | ||
# Magenta | ||
msg = msg.replace('<m>', '\x1b[0;35m') | ||
msg = msg.replace('</m>', '\x1b[0m') | ||
# Cyan | ||
msg = msg.replace('<c>', '\x1b[0;36m') | ||
msg = msg.replace('</c>', '\x1b[0m') | ||
# White | ||
msg = msg.replace('<w>', '\x1b[0;37m') | ||
msg = msg.replace('</w>', '\x1b[0m') | ||
|
||
# Bold | ||
msg = msg.replace('<s>', '\x1b[1m') | ||
msg = msg.replace('</s>', '\x1b[22m') | ||
# Italic | ||
msg = msg.replace('<i>', '\x1b[3m') | ||
msg = msg.replace('</i>', '\x1b[23m') | ||
# Underlined | ||
msg = msg.replace('<u>', '\x1b[4m') | ||
msg = msg.replace('</u>', '\x1b[24m') | ||
return msg | ||
|
||
|
||
try : | ||
import colorlog | ||
base_handler = colorlog.StreamHandler | ||
formatter = colorlog.ColoredFormatter( | ||
"%(message_log_color)s %(name)30s | %(reset)s %(log_color)s %(levelname)-8s (line %(lineno)d) | %(reset)s %(message)s", | ||
datefmt=None, | ||
reset=True, | ||
log_colors={ | ||
'DEBUG': 'purple', | ||
'INFO': 'cyan', | ||
'WARNING': 'yellow', | ||
'ERROR': 'red', | ||
'CRITICAL': 'white,bg_red'}, | ||
secondary_log_colors={'message': { | ||
'DEBUG': 'bold_blue', | ||
'INFO': 'bold_cyan', | ||
'WARNING': 'bold_yellow', | ||
'ERROR': 'red', | ||
'CRITICAL': 'white,bg_red'}}, | ||
|
||
) | ||
except ModuleNotFoundError : | ||
base_handler = logging.StreamHandler | ||
formatter = logging.Formatter( | ||
"%(name)30s | %(levelname)-8s (line %(lineno)d) | %(message)s", | ||
datefmt=None | ||
) | ||
|
||
|
||
class TqdmHandler(base_handler): | ||
def __init__(self): | ||
super().__init__() | ||
|
||
def emit(self, record): | ||
try: | ||
msg = colorize(self.format(record)) | ||
tqdm.write(msg) | ||
except (KeyboardInterrupt, SystemExit): | ||
raise | ||
except Exception: | ||
self.handleError(record) | ||
|
||
#handler = hl() | ||
#handler.setFormatter(formatter) | ||
|
||
|
||
handler = TqdmHandler() | ||
handler.setFormatter(formatter) | ||
|
||
#log.addHandler(handler) | ||
logger = logging.getLogger() | ||
logger.addHandler(handler) | ||
|
||
#fh = logging.FileHandler('lumia.log') | ||
#fh.setLevel(logging.INFO) | ||
#fh.setFormatter(formatter) | ||
#logger.addHandler(fh) | ||
#logger.setLevel(logging.INFO) |
Oops, something went wrong.