access OCI data
Posted: Tue Apr 22, 2025 12:15 am America/New_York
Hi everyone,
I'm trying to access Level-3 chlorophyll-a data from NASA's PACE mission (PACE_OCI_L3M_CHL_NRT) using the earthaccess Python package. The response indicates no granules are found:
Details:
App: Python-based ingestion pipeline using earthaccess, xarray, pandas, and pymysql
Data Product: PACE_OCI_L3M_CHL_NRT
Dataset: Level-3 Chlorophyll-a concentration
Data Variable: chlor_a (automatically parsed from NetCDF)
Access Method: earthaccess.search_data() with these parameters:
short_name="PACE_OCI_L3M_CHL_NRT"
temporal=("2024-12-15", "2025-01-07")
bounding_box=(-61.5, -53.2, -57.5, -50.9) (Falkland Islands region)
cloud_cover=(0, 50)
Platform: Accessing via NASA Earthdata Cloud (earthaccess)
Operating System: Windows 11
Earthaccess Version: (latest pip install)
Would really appreciate guidance — is the PACE_OCI_L3M_CHL_NRT dataset not available yet for this region/date, or am I structuring the request wrong?
Current code:
# 📁 config.py
import pathlib
# Earthaccess will handle authentication
username =
password =
# This will prompt once and store credentials in ~/.netrc
import earthaccess
auth = earthaccess.login(persist=True)
# Region of interest: Chesapeake Bay area (guaranteed to have data)
bounding_box = (-61.5, -53.2, -57.5, -50.9)
start_date = "2024-12-15"
end_date = "2025-01-07"
iron_release_date = "2024-12-30"
# Specific OCI L3M chlorophyll product
product_list = ["PACE_OCI_L3M_CHL_NRT"]
# Output directory for downloads
download_dir = pathlib.Path("downloads")
download_dir.mkdir(exist_ok=True)
# Cloud cover range (optional filter)
cloud_cover = (0, 50)
# MySQL DB config (DigitalOcean)
db_config =
# 📁 ingest.py
import os
import xarray as xr
import pandas as pd
from config import product_list, bounding_box, cloud_cover, download_dir, iron_release_date , start_date, end_date
import earthaccess
def fetch_and_process():
all_metrics = [] # store results here
for product in product_list:
print(f"🔍 Searching for granules for: {product}")
# Search with Earthaccess
results = earthaccess.search_data(
short_name=product,
temporal=(start_date, end_date),
bounding_box=bounding_box,
cloud_cover=cloud_cover
)
if not results:
print(f"⚠️ No granules found for {product}")
continue
print(f"📥 Downloading {len(results)} granules...")
paths = earthaccess.download(results, download_dir) # download locally
for file_path in paths:
try:
ds = xr.open_dataset(file_path)
date = file_path.name.split(".")[1][:8] # extract date from filename
period = "before" if date < iron_release_date.replace("-", "") else "after"
for var in ds.data_vars:
data = ds[var]
all_metrics.append({
"product": product,
"variable": var,
"filename": file_path.name,
"date": pd.to_datetime(date),
"period": period,
"mean": float(data.mean(skipna=True).values),
"max": float(data.max(skipna=True).values)
})
except Exception as e:
print(f"⚠️ Failed to read {file_path.name}: {e}")
os.remove(file_path) # clean up
return pd.DataFrame(all_metrics)
if __name__ == "__main__":
df = fetch_and_process()
print(df.head()) # Optional: preview results
Thanks in advance!
— Will
I'm trying to access Level-3 chlorophyll-a data from NASA's PACE mission (PACE_OCI_L3M_CHL_NRT) using the earthaccess Python package. The response indicates no granules are found:
Details:
App: Python-based ingestion pipeline using earthaccess, xarray, pandas, and pymysql
Data Product: PACE_OCI_L3M_CHL_NRT
Dataset: Level-3 Chlorophyll-a concentration
Data Variable: chlor_a (automatically parsed from NetCDF)
Access Method: earthaccess.search_data() with these parameters:
short_name="PACE_OCI_L3M_CHL_NRT"
temporal=("2024-12-15", "2025-01-07")
bounding_box=(-61.5, -53.2, -57.5, -50.9) (Falkland Islands region)
cloud_cover=(0, 50)
Platform: Accessing via NASA Earthdata Cloud (earthaccess)
Operating System: Windows 11
Earthaccess Version: (latest pip install)
Would really appreciate guidance — is the PACE_OCI_L3M_CHL_NRT dataset not available yet for this region/date, or am I structuring the request wrong?
Current code:
# 📁 config.py
import pathlib
# Earthaccess will handle authentication
username =
password =
# This will prompt once and store credentials in ~/.netrc
import earthaccess
auth = earthaccess.login(persist=True)
# Region of interest: Chesapeake Bay area (guaranteed to have data)
bounding_box = (-61.5, -53.2, -57.5, -50.9)
start_date = "2024-12-15"
end_date = "2025-01-07"
iron_release_date = "2024-12-30"
# Specific OCI L3M chlorophyll product
product_list = ["PACE_OCI_L3M_CHL_NRT"]
# Output directory for downloads
download_dir = pathlib.Path("downloads")
download_dir.mkdir(exist_ok=True)
# Cloud cover range (optional filter)
cloud_cover = (0, 50)
# MySQL DB config (DigitalOcean)
db_config =
# 📁 ingest.py
import os
import xarray as xr
import pandas as pd
from config import product_list, bounding_box, cloud_cover, download_dir, iron_release_date , start_date, end_date
import earthaccess
def fetch_and_process():
all_metrics = [] # store results here
for product in product_list:
print(f"🔍 Searching for granules for: {product}")
# Search with Earthaccess
results = earthaccess.search_data(
short_name=product,
temporal=(start_date, end_date),
bounding_box=bounding_box,
cloud_cover=cloud_cover
)
if not results:
print(f"⚠️ No granules found for {product}")
continue
print(f"📥 Downloading {len(results)} granules...")
paths = earthaccess.download(results, download_dir) # download locally
for file_path in paths:
try:
ds = xr.open_dataset(file_path)
date = file_path.name.split(".")[1][:8] # extract date from filename
period = "before" if date < iron_release_date.replace("-", "") else "after"
for var in ds.data_vars:
data = ds[var]
all_metrics.append({
"product": product,
"variable": var,
"filename": file_path.name,
"date": pd.to_datetime(date),
"period": period,
"mean": float(data.mean(skipna=True).values),
"max": float(data.max(skipna=True).values)
})
except Exception as e:
print(f"⚠️ Failed to read {file_path.name}: {e}")
os.remove(file_path) # clean up
return pd.DataFrame(all_metrics)
if __name__ == "__main__":
df = fetch_and_process()
print(df.head()) # Optional: preview results
Thanks in advance!
— Will