# import earthaccess
import os
import warnings
import requests
import pandas as pd
'USE_PYGEOS'] = '0'
os.environ[import geopandas
import folium
import folium.plugins
import seaborn as sns
import glob
import numpy as np
import netCDF4 as nc
import matplotlib.pyplot as plt
import branca.colormap as cm
from folium import Map, TileLayer
from branca.element import Figure
from pystac_client import Client
from pyproj import Geod
from shapely import wkt
from datetime import datetime
Interactive Session: Complementing anthropogenic GHG emissions with natural GHG emissions and fluxes
About the Data
Methane (CH₄) emissions from wetlands are estimated to be the largest natural source of methane in the global CH₄ budget, contributing to roughly one third of the total of natural and anthropogenic emissions. Wetland CH₄ is produced by microbes breaking down organic matter in the oxygen deprived environment of inundated soils. Due to limited data availability, the details of the role of wetland CH₄ emissions has thus far been underrepresented. Using the Wald Schnee und Landschaft version (LPJ-wsl) of the Lund-Potsdam-Jena Dynamic Global Vegetation Model (LPJ-DGVM) global CH₄ emissions from wetlands are estimated at 0.5 x 0.5 degree resolution by simulating wetland extent and using characteristics of these inundated areas, such as soil moisture, temperature, and carbon content, to estimate CH₄ quantities emitted into the atmosphere. Highlighted areas displayed in this dataset show concentrated methane sources from tropical and high latitude ecosystems. The LPJ-wsl Wetland Methane Emissions data product presented here consists of global daily and monthly model estimates of terrestrial wetland CH₄ emissions from 1980 - 2021. These data are regularly used in conjunction with National Aeronautics and Space Administration’s (NASA) Goddard Earth Observing System (GEOS) model to simulate the impact of wetlands and other methane sources on atmospheric methane concentrations, to compare against satellite and airborne data, and to improve understanding and prediction of wetland emissions.
Requirements
- Set up Python Environment - See setup_instructions.md in the
/setup/
folder
Learning Objectives
- How to use
U.S. GHG Center STAC Catalog
to accessWetland Methane Emissions, LPJ-wsl Model
data - How to use
earthaccess
to find MERRA-2 data - How to visualize datasets using
folium
and perform zonal statistics - How to plot time series for
MERRA-2
variables andWetland Methane Emissions, LPJ-wsl Model
and analyze the results
Approach
- Identify available dates and temporal frequency of observations for the given collection using the GHGC API
/stac
endpoint. The collection processed in this notebook is the Wetland Methane Emissions, LPJ-wsl Model data product - Pass the STAC item into the raster API
/stac/tilejson.json
endpoint - Access the MERRA-2 data for different variables (precipitation rate, surface soil moisture)
- Define the spatial region of interest
- Using plugins from
folium
to visualize two tiles (side-by-side), allowing time point comparison - After the visualization, perform zonal statistics for a given polygon
- Plot monthly time series for LPJ-wetland emission and different MERRA-2 dataset and analyze them
Data
Monthly LPJ Wetland CH4 Emissions (U.S.GHG Center STAC)
Monthly MERRA-2 Precipitation RateDataset: MERRA2_400.tavgM_2d_flx_Nx Variable:
PRECTOT
https://disc.gsfc.nasa.gov/datasets/M2TMNXFLX_5.12.4/summaryMonthly MERRA-2 Surface Soil MoistureDataset: MERRA2_400.tavgM_2d_lnd_Nx Variable:
SFMC
Long-term mean variable:GWETTOP
https://disc.gsfc.nasa.gov/datasets/M2TMNXLND_5.12.4/summaryMonthly MERRA-2 T2MDataset: MERRA2_400.instM_2d_asm_Nx Variable:
T2M
https://disc.gsfc.nasa.gov/datasets/M2IMNXASM_5.12.4/summaryMERRA-2 Long-Term MeansMERRA2.tavgC_2d_ltm_Nx https://disc.gsfc.nasa.gov/datasets/M2TCNXLTM_1/summary
Setup
Import the required Python libraries by running the next cell.
NASA Earth Data Login Credentials
To download or stream NASA data you will need an Earthdata account, you can create one here https://urs.earthdata.nasa.gov/home. We will use the login
function from the earthaccess
library for authentication before downloading at the end of the notebook. This function can also be used to create a local .netrc
file if it doesn’t exist or add your login info to an existing .netrc
file. If no Earthdata Login credentials are found in the .netrc
you’ll be prompted for them. This step is not necessary to conduct searches but is needed to download or stream data.
# from netrc import netrc
# from subprocess import Popen
# from platform import system
# from getpass import getpass
# import os
# import requests
# import xarray as xr
# import s3fs
# import boto3
# import cartopy.crs as ccrs
# import cartopy.feature as cfeature
# import matplotlib.pyplot as plt
# import warnings
# from IPython.display import display, Markdown
# if (boto3.client('s3').meta.region_name == 'us-west-2'):
# display(Markdown('### us-west-2 Region Check: ✅'))
# else:
# display(Markdown('### us-west-2 Region Check: ❌'))
# raise ValueError('Your notebook is not running inside the AWS us-west-2 region, and will not be able to directly access NASA Earthdata S3 buckets')
# urs = 'urs.earthdata.nasa.gov' # Earthdata URL endpoint for authentication
# prompts = ['Enter NASA Earthdata Login Username: ',
# 'Enter NASA Earthdata Login Password: ']
# netrc_name = ".netrc"
# # Determine if netrc file exists, and if so, if it includes NASA Earthdata Login Credentials
# try:
# netrcDir = os.path.expanduser(f"~/{netrc_name}")
# # Check credentials against URS, and if username exists
# netrc(netrcDir).authenticators(urs)[0]
# # Below, create a netrc file and prompt user for NASA Earthdata Login Username and Password
# except FileNotFoundError:
# homeDir = os.path.expanduser("~")
# Popen('touch {0}{2} | echo machine {1} >> {0}{2}'.format(homeDir + os.sep, urs, netrc_name), shell=True)
# Popen('echo login {} >> {}{}'.format(getpass(prompt=prompts[0]), homeDir + os.sep, netrc_name), shell=True)
# Popen('echo \'password {} \'>> {}{}'.format(getpass(prompt=prompts[1]), homeDir + os.sep, netrc_name), shell=True)
# # Set restrictive permissions
# Popen('chmod 0600 {0}{1}'.format(homeDir + os.sep, netrc_name), shell=True)
# gesdisc_s3 = "https://data.gesdisc.earthdata.nasa.gov/s3credentials"
# # Define a function for S3 access credentials
# def begin_s3_direct_access(url: str=gesdisc_s3):
# response = requests.get(url).json()
# return s3fs.S3FileSystem(key=response['accessKeyId'],
# secret=response['secretAccessKey'],
# token=response['sessionToken'],
# client_kwargs={'region_name':'us-west-2'})
# fs = begin_s3_direct_access()
# fn = 's3://gesdisc-cumulus-prod-protected/MERRA2/M2T1NXSLV.5.12.4/2019/03/MERRA2_400.tavg1_2d_slv_Nx.20190313.nc4'
# fs.info(fn)
# fs.ls('s3://gesdisc-cumulus-prod-protected/MERRA2/')
Querying the STAC API
Search for the LPJ Wetland Methane Emissions Data using the Raster API and its STAC collection name!
# Provide STAC and RASTER API endpoints
= "http://ghg.center/api/stac"
STAC_API_URL = "https://ghg.center/api/raster"
RASTER_API_URL
# Please use the collection name similar to the one used in STAC collection
# Name of the collection for wetland methane monthly emissions
= "lpjwsl-wetlandch4-monthgrid-v1" collection_name
# Fetching the collection from STAC collections using appropriate endpoint
# The 'requests' library allows a HTTP request possible
= requests.get(f"{STAC_API_URL}/collections/{collection_name}").json()
collection collection
Here we are examining the contents of our collection
under summaries
. We notice the data is available from January 1980 to May 2021. By looking at dashboard: time density
, we can see that these observations are collected monthly.
# Create a function that would search for the number of items in above data collection in the STAC API
def get_item_count(collection_id):
= 0
count = f"{STAC_API_URL}/collections/{collection_id}/items"
items_url
while True:
= requests.get(items_url)
response
if not response.ok:
print("error getting items")
exit()
= response.json()
stac += int(stac["context"].get("returned", 0))
count next = [link for link in stac["links"] if link["rel"] == "next"]
if not next:
break
= next[0]["href"]
items_url
return count
# Check total number of items available
= get_item_count(collection_name)
number_of_items = requests.get(f"{STAC_API_URL}/collections/{collection_name}/items?limit={number_of_items}").json()["features"]
items print(f"Found {len(items)} items")
# Examining the first item in the collection
# Keep in mind that a list starts from 0, 1, 2,... therefore ‘[0]’ is referring to the first item in the list/collection
0] items[
Below, we enter minimum and maximum values to provide our upper and lower bounds in ‘rescale_values.’
Search for MERRA-2 Data
= f"{os.getenv('HOME')}/shared/data/use-case-2"
data_dir =f'{data_dir}/merra_t2m_dir/'
merra_t2m_dir= f'{data_dir}/merra_soil_moisture_dir/'
merra_soil_moisture_dir = f'{data_dir}/merra_precip_rate_dir/'
merra_precip_rate_dir = f'{data_dir}/merra_t2m_clim_dir/'
merra_t2m_clim_dir
= merra_t2m_clim_dir
merra_precip_rate_clim_dir = merra_t2m_clim_dir merra_soil_moisture_clim_dir
Define the Spatial Region of Interest
For this example, our spatial region of interest (ROI) will be a region in the state of Louisiana (LA). In this example, we will create a rectangular ROI. The state of Louisiana encompasses a diverse range of non-tidal and tidal freshwater wetlands including palustrine, lacustrine, riverine, estuarine, and marine wetlands. These ecosystems cover roughly one-third of the state according to the U.S. Fish and Wildlife Service, making Louisiana an ideal site for monitoring the natural source of methane emissions.
# We create a area of interest (polygon) on which will be used later
={
boundaries'Global':[-180,180,-90,90],
'Louisiana': [-95.9,-87.50,28.7,33.5],
'CONUS':[-127.08,-63.87,23.55,49.19], # conus
'Florida':[-84.07,-79.14,24.85,30.5],
'Northeast':[-74.88,-69.81,40.48,42.88]
}= 'Louisiana'
focus
= boundaries[focus]
aoi = {
louisiana_aoi "type": "Feature",
"properties": {},
"geometry": {
"coordinates": [
[0], aoi[2]], # SW Bounding Coordinate
[aoi[0], aoi[3]], # NW Bounding Coordinate
[aoi[1], aoi[3]], # NE Bounding Coordinate
[aoi[1],aoi[2]], # SE Bounding Coordinate
[aoi[0], aoi[2]] # Closing the polygon at the SW Bounding Coordinate
[aoi[
]
],"type": "Polygon",
}, }
Opening and Exploring Wetland Methane Emissions Data Using the Raster API
In this notebook, we will explore the temporal impacts of methane emissions. We will visualize the outputs on a map using folium
.
# To access the year value from each item more easily, this will let us query more explicitly by year and month (e.g., 2020-02)
= {item["properties"]["datetime"][:7]: item for item in items}
items
# Set the asset value to the appropriate parameter
= 'ch4-wetlands-emissions'
asset_name
# Set the minimum and maximum values to provide our upper and lower bounds
= {'max': 2.0, 'min': 0.0} rescale_values
Now, we will pass the item id, collection name, and rescaling_factor
to the Raster API
endpoint. We will do this twice, once for May 2020 and again for May 2021, so we can visualize each event independently.
# Set a colormap for the granule
# Please refer to matplotlib library if you'd prefer choosing a different color ramp (https://matplotlib.org/stable/users/explain/colors/colormaps.html)
= "magma"
color_map
= '2020-05', '2021-05'
date1, date2
# May 2020 tile
= requests.get(
tile_1 f"{RASTER_API_URL}/stac/tilejson.json?collection={items[date1]['collection']}&item={items[date1]['id']}"
"&assets=ch4-wetlands-emissions"
f"&color_formula=gamma+r+1.05&colormap_name={color_map}"
f"&rescale={rescale_values['min']},{rescale_values['max']}"
).json() tile_1
# May 2021 tile
= requests.get(
tile_2 f"{RASTER_API_URL}/stac/tilejson.json?collection={items[date2]['collection']}&item={items[date2]['id']}"
"&assets=ch4-wetlands-emissions"
f"&color_formula=gamma+r+1.05&colormap_name={color_map}"
f"&rescale={rescale_values['min']},{rescale_values['max']}",
).json() tile_2
Visualizing CH₄ Emissions
# We will import folium to map and folium.plugins to allow side-by-side mapping
# Set initial zoom and center of map for CH₄ Layer
# Centre of map [latitude,longitude]
from folium.plugins import MousePosition
= folium.Map(location=(30,-90), zoom_start=6)
map_
# Use the 'TileLayer' library to display the raster layers and adjust the transparency of the layers on the map
# May 2020
= TileLayer(
vis_tile_1 =tile_1["tiles"][0],
tiles="GHG",
attr=0.5,
opacity
)
# May 2021
= TileLayer(
vis_tile_2 =tile_2["tiles"][0],
tiles="GHG",
attr=0.5,
opacity
)
# Use the SideBySideLayers plugin to compare two layers on the same map
= folium.plugins.SideBySideLayers(layer_left=vis_tile_1, layer_right=vis_tile_2)
sbs
vis_tile_1.add_to(map_)
vis_tile_2.add_to(map_)
# Load the GeoJSON file representing the state of Louisiana
="louisiana, USA").add_to(map_)
folium.GeoJson(louisiana_aoi, name
sbs.add_to(map_)
MousePosition().add_to(map_)
# Visualizing the map
map_
# We will import folium to map and folium.plugins to add multiple tiles with layer control
# Defining the breaks in the colormap
= cm.LinearColormap(colors = ['#2c115f','#721f81','#b73779','#f1605d','#feb078'], vmin = 0, vmax = 2 )
colormap
# Add an appropriate caption, in this case it would be grams of methane per meter squared per day
= 'g CH₄/m²/day'
colormap.caption
= '2021-05', '2021-06','2021-07'
new_date1, new_date2, new_date3 # Reading the tiles from raster api
= requests.get(
tile_date_1 f"{RASTER_API_URL}/stac/tilejson.json?collection={items[new_date1]['collection']}&item={items[new_date1]['id']}"
"&assets=ch4-wetlands-emissions"
f"&color_formula=gamma+r+1.05&colormap_name={color_map}"
f"&rescale={rescale_values['min']},{rescale_values['max']}",
).json()
tile_date_1
= requests.get(
tile_date_2 f"{RASTER_API_URL}/stac/tilejson.json?collection={items[new_date2]['collection']}&item={items[new_date2]['id']}"
"&assets=ch4-wetlands-emissions"
f"&color_formula=gamma+r+1.05&colormap_name={color_map}"
f"&rescale={rescale_values['min']},{rescale_values['max']}",
).json()
tile_date_2
= requests.get(
tile_date_3 f"{RASTER_API_URL}/stac/tilejson.json?collection={items[new_date3]['collection']}&item={items[new_date3]['id']}"
"&assets=ch4-wetlands-emissions"
f"&color_formula=gamma+r+1.05&colormap_name={color_map}"
f"&rescale={rescale_values['min']},{rescale_values['max']}",
).json()
tile_date_3
# Interactive visulaization
= folium.Map(location=(30,-90), zoom_start=5)
map_
# May 2021
= TileLayer(
tile1 =tile_date_1["tiles"][0],
tiles="GHG",
attr=0.8,
opacity=new_date1
name
)
tile1.add_to(map_)
# June 2021
= TileLayer(
tile2 =tile_date_2["tiles"][0],
tiles="GHG",
attr=0.8,
opacity=new_date2
name
)
tile2.add_to(map_)
# July 2021
= TileLayer(
tile3 =tile_date_3["tiles"][0],
tiles="GHG",
attr=0.8,
opacity=new_date3
name
)
tile3.add_to(map_)
="louisiana, USA").add_to(map_)
folium.GeoJson(louisiana_aoi, name=False,position='bottomleft').add_to(map_)
folium.LayerControl(collapsed
= '<style>svg#legend {font-size: 14px; background-color: white;}</style>'
svg_style
map_.get_root().header.add_child(folium.Element(svg_style))
map_.add_child(colormap)
# Visualizing the map
map_
Opening and Exploring MERRA-2 Data
In this notebook, we will explore the MERRA-2 data. We will visualize the outputs on a map using folium
.
# Fetch the MERRA-2 collection from STAC collections using the appropriate endpoint
= requests.get(f"{STAC_API_URL}/collections/{collection_name}/items?limit={number_of_items}").json()["features"] items
## visualize MERRA-2
# [-95.9,-87.50,28.7,33.5]
# Insert the path to the NetCDF file
= f'{data_dir}/merra_t2m_dir/MERRA2_100.instM_2d_asm_Nx.199101.nc4' # Replace with the path to your NetCDF file
file_path = nc.Dataset(file_path)
dataset
# Access coordinates (if needed)
= dataset.variables['lat'][:]
latitude = dataset.variables['lon'][:]
longitude
# Access variables
= 'T2M' # Replace with the variable you want to plot
variable_name # dataset.variables['T2M'][0, 237:248, 135:150]
= dataset.variables['T2M'][0, :,:]
variable_data
# Close the NetCDF file
# dataset.close()
# Plot the data
='lower', cmap='viridis', extent=(longitude.min(), longitude.max(), latitude.min(), latitude.max()))
plt.imshow(variable_data, origin='Variable Unit')
plt.colorbar(label'Longitude')
plt.xlabel('Latitude')
plt.ylabel(f'Plot of {variable_name}')
plt.title( plt.show()
print('variables provided in this netcdf are:', dataset.variables)
Generate Statistics and Time Series Line Plots for the Methane Emission based on observations collected in 2020 and 2021
# The bounding box should be passed to the geojson param as a geojson Feature or FeatureCollection
# The following function generates statistics for the assigned parameter
def generate_stats(item, geojson):
= requests.post(
result f"{RASTER_API_URL}/cog/statistics",
={"url": item["assets"]["ch4-wetlands-emissions"]["href"]},
params=geojson,
json
).json()return {
**result["properties"],
"datetime": item["properties"]["datetime"],
}
With the function above, we can generate the statistics for the area of interest.
# Default value is to fetch the data for the past 2 years (2020, 2021). You can change the indices to fetch the values for years beyond that.
= [generate_stats(item, louisiana_aoi) for item in items[:24]]
stats stats
# Manipulating and cleaning the stats output from previous cell
def clean_stats(stats_json) -> pd.DataFrame:
= pd.json_normalize(stats_json)
df = [col.replace("statistics.b1.", "") for col in df.columns]
df.columns "date"] = pd.to_datetime(df["datetime"])
df[return df
= clean_stats(stats)
df 5) df.head(
# Filtering the stats for year 2020 and 2021
= df[(df['date'].dt.year == 2020) | (df['date'].dt.year == 2021)]
df_2020_2021 'year'] = pd.to_datetime(df_2020_2021['datetime']).dt.year
df_2020_2021['month'] = pd.to_datetime(df_2020_2021['datetime']).dt.month
df_2020_2021[ df_2020_2021
Visualizing the Data as a Time Series
We can now plot the time-series of the wetland methane emissions for the state of Louisiana for the 2020-2021 (January - December) period.
= {item["properties"]["datetime"][:7]: item for item in items}
items = plt.figure(figsize=(20, 10))
fig
# Set the plot elements
sns.lineplot(
df_2020_2021,= 'month',
x = 'sum',
y = 'year',
hue='flare'
palette
)
# Set the labels for the X and Y axis and assign a title for the plot
# plt.legend()
"months")
plt.xlabel("CH4 emissions g/m2")
plt.ylabel("CH4 emission Values for Louisiana for 2020 and 2021") plt.title(
Generate Statistics and Time Series Lineplots for MERRA2 Data in Year 2020, 2021
={
params'MERRA-2 T2M':
'var':'T2M',
{'cmap':'#000000',
'dir':merra_t2m_dir,
'nickname':'merra2_t2m',
'climdir':merra_t2m_clim_dir,
'climvar':'T2MMEAN'},
'MERRA-2 Surface Soil Moisture':
'var':'GWETTOP',
{'cmap':'#000000',
'dir':merra_soil_moisture_dir,
'nickname':'merra2_sm',
'climdir':merra_soil_moisture_clim_dir,
'climvar':'GWETTOP'},
'MERRA-2 Precipitation Rate':
'var':'PRECTOT',
{'cmap':'#000000',
'dir':merra_precip_rate_dir,
'nickname':'merra2_pr',
'climdir':merra_precip_rate_clim_dir,
'climvar':'PRECTOT'}
}
=1991
year
= 1
anomaly = ['MERRA-2 Precipitation Rate','MERRA-2 Surface Soil Moisture', 'MERRA-2 T2M' ] param
def get_merra2_timeseries(year,focus,p,anomaly):
= glob.glob(params[p]['dir']+'*.nc4') # change the year for which you want to read the files
files if anomaly:
try:
= glob.glob(params[p]['climdir']+'*.nc4')
clim_files except:
print('Climatological mean files (climdir) not found for specified parameter.')
breakpoint()
= []
month_labels = []
box_totals = []
month_field = []
dt for i,f in enumerate(files):
= nc.Dataset(f)
data
# Get bounding box
= np.logical_and(
wlat 'lat'][:] < boundaries[focus][3],
data['lat'][:] > boundaries[focus][2]
data[
)= np.logical_and(
wlon 'lon'][:] < boundaries[focus][1],
data['lon'][:] > boundaries[focus][0]
data[
)
= f.split('.')[-2]
datestamp = int(datestamp[-2::])
month
1))
dt.append(datetime(year,month,1).strftime('%B'))
month_labels.append(datetime(year,month,
if anomaly:
# Make sure you read the climatology for the right month (whichfile)
= [datetime(1991,month,1).strftime('%y%m')[1:] in f for f in clim_files] # Change the year for the data being used.
whichfile = nc.Dataset(np.array(clim_files)[whichfile][0])
climdata
# Calculate sum (emissions) or mean (met params) over your bounding box
if 'LPJ' in p:
= np.nansum(climdata[params[p]['climvar']][0,wlat,wlon])
clim_box_total = np.nansum(data[params[p]['var']][0,wlat,wlon])
now_box_total elif 'MERRA' in p:
= np.nanmean(climdata[params[p]['climvar']][0,wlat,wlon])
clim_box_total = np.nanmean(data[params[p]['var']][0,wlat,wlon])
now_box_total
# Replace fill values with NaN
# Otherwise differencing might give wild results? (Just be safe)
= np.where(climdata[params[p]['climvar']][0,:,:] == climdata[params[p]['climvar']]._FillValue)
wfillclim = climdata[params[p]['climvar']][0,:,:]
climfield = np.nan
climfield[wfillclim] = np.where(data[params[p]['var']][0,:,:] == data[params[p]['var']]._FillValue)
wfillnow = data[params[p]['var']][0,:,:]
nowfield = np.nan
nowfield[wfillnow]
# And finally, difference current month and long-term mean
- clim_box_total)
box_totals.append(now_box_total - climfield)
month_field.append(nowfield
climdata.close()else:
if 'LPJ' in p:
'var']][0,wlat,wlon]))
box_totals.append(np.nansum(data[params[p][elif 'MERRA' in p:
'var']][0,wlat,wlon]))
box_totals.append(np.nanmean(data[params[p][# Replace fill values with NaN (otherwise maps are hard to read)
'var']][0,:,:])
month_field.append(data[params[p][= np.where(month_field[-1] == data[params[p]['var']]._FillValue)
wfill -1][wfill] = np.nan
month_field[#breakpoint()
# Sort in case months are out of order
= np.argsort(dt)
dti = np.array(month_labels)[dti]
month_labels = np.array(box_totals)[dti]
box_totals = np.array(month_field)[dti]
month_field
# print('mean ',np.nanmean(month_field))
# print('std ',np.nanstd(month_field))
= {
data_return 'month_labels':month_labels,
'box_totals':box_totals,
'month_fields':month_field,
'units':data[params[p]['var']].units,
'lat':data['lat'][:],
'lon':data['lon'][:],
'mean':np.nanmean(month_field),
'std' : np.nanstd(month_field)
}
data.close()return data_return
# List of color codes assigned
= ['#1B8FB5', '#16B573', '#C76B21']
colors for i,p in enumerate(param):
= get_merra2_timeseries(year,focus,p,anomaly)
ts print(f'Mean of variable {p} is {ts["mean"]}')
print(f'Standard deviation of variable {p} is {ts["std"]}')
# if i == 0:
= plt.figure(figsize=(6,3))
fig= fig.add_subplot(1,1,1)
ax
#breakpoint()
try:
ax.plot(list(range(0,12)),
'box_totals'],
ts[='-',
linestyle=2,
linewidth=colors[i],
color=4,
markersize='o',
marker=p
label
)except ValueError:
print('Double check that you have all twelve months of MERRA-2 data downloaded!')
print(params[p]['dir'])
breakpoint()
# Construct plot title
= '%s\n%s Mean Monthly %s'%(focus,year,p)
title if anomaly:
+=' Anomaly'
titleif 'LPJ' in p:
= title.replace('Mean','Total')
title
plt.title(title)
list(range(0,12)))
plt.xticks('month_labels'],rotation=40,ha='right')
ax.set_xticklabels(ts[
='best')
ax.legend(loc= params[p]['nickname']
nickname = 'box_summed_%s_%s_%s.png'% \
savename
(nickname,year,focus)if anomaly:
list(range(-1,13)),np.zeros(14),linewidth=0.4)
ax.plot(= savename.replace('.png','_Anomaly.png')
savename -1,12)
ax.set_xlim(min(ts['box_totals']),max(ts['box_totals'])) # manual per parameter
ax.set_ylim(
plt.show()'/')[-1],dpi=300,bbox_inches='tight') plt.savefig(savename.split(