Hello,
I've been trying to download EPIC L1B data, and I already succeeded to download some epic data.
But, since yesterday (13.April.2025, UTC), I couldn't download data.
Following is the method I already tried.:
(in linux server, 1st method)
#!/bin/bash
URL=https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/08/
TOKEN=(my token)
wget --header "Authorization: Bearer $TOKEN" --recursive --no-parent --reject "index.html*" --execute robots=off $URL
(in linux server, 2nd method)
cookiejar=$(mktemp cookies.XXXXXXXXXX)
netrc=$(mktemp netrc.XXXXXXXXXX)
chmod 0600 "$cookiejar" "$netrc"
function finish {
rm -rf "$cookiejar" "$netrc"
}
trap finish EXIT
WGETRC="$wgetrc"
prompt_credentials() {
echo "Enter your Earthdata Login or other provider supplied credentials"
read -p "Username (sehyeon.park): " username
username=${username:-sehyeon.park}
read -s -p "Password: " password
echo "machine urs.earthdata.nasa.gov login $username password $password" >> $netrc
echo
}
exit_with_error() {
echo
echo "Unable to Retrieve Data"
echo
echo $1
echo
echo "https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190331221307_03.h5"
echo
exit 1
}
prompt_credentials
detect_app_approval() {
approved=`curl -s -b "$cookiejar" -c "$cookiejar" -L --max-redirs 5 --netrc-file "$netrc" https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190331221307_03.h5 -w '\n%{http_code}' | tail -1`
if [ "$approved" -ne "200" ] && [ "$approved" -ne "301" ] && [ "$approved" -ne "302" ]; then
# User didn't approve the app. Direct users to approve the app in URS
exit_with_error "Please ensure that you have authorized the remote application by visiting the link below "
fi
}
setup_auth_curl() {
# Firstly, check if it require URS authentication
status=$(curl -s -z "$(date)" -w '\n%{http_code}' https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190331221307_03.h5 | tail -1)
if [[ "$status" -ne "200" && "$status" -ne "304" ]]; then
# URS authentication is required. Now further check if the application/remote service is approved.
detect_app_approval
fi
}
setup_auth_wget() {
# The safest way to auth via curl is netrc. Note: there's no checking or feedback
# if login is unsuccessful
touch ~/.netrc
chmod 0600 ~/.netrc
credentials=$(grep 'machine urs.earthdata.nasa.gov' ~/.netrc)
if [ -z "$credentials" ]; then
cat "$netrc" >> ~/.netrc
fi
}
fetch_urls() {
if command -v curl >/dev/null 2>&1; then
setup_auth_curl
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
curl -f -b "$cookiejar" -c "$cookiejar" -L --netrc-file "$netrc" -g -o $stripped_query_params -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
elif command -v wget >/dev/null 2>&1; then
# We can't use wget to poke provider server to get info whether or not URS was integrated without download at least one of the files.
echo
echo "WARNING: Can't find curl, use wget instead."
echo "WARNING: Script may not correctly identify Earthdata Login integrations."
echo
setup_auth_wget
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
wget --load-cookies "$cookiejar" --save-cookies "$cookiejar" --output-document $stripped_query_params --keep-session-cookies -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
else
exit_with_error "Error: Could not find a command-line downloader. Please install curl or wget"
fi
}
fetch_urls <<'EDSCEOF'
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190331221307_03.h5
;;;;;
Please help me find some solutions...
Error when downloading EPIC L1B
-
- Posts: 1
- Joined: Wed Jun 19, 2024 1:49 am America/New_York
-
- Subject Matter Expert
- Posts: 16
- Joined: Wed Mar 29, 2023 8:41 am America/New_York
Re: Error when downloading EPIC L1B
Hello @sehyeon.park,
Below is a script that worked for me just a few minutes ago. The list of links at the very bottom needs to be replaced for what you need. To get the links, go to https://search.earthdata.nasa.gov/ and search for collection you need (in this particular case DSCOVR_EPIC_L1B_3), select your timeframe of interest, and click "download all" button. You will be prompted to download option selection page, select "Download all data", click "Done" at the bottom right, then "download data" at the bottom left. You will be prompted to a page that will create a list of download link. Compiling a list may take some time - be patient. Copy the list of links to your script.
#!/bin/bash
GREP_OPTIONS=''
cookiejar=$(mktemp cookies.XXXXXXXXXX)
netrc=$(mktemp netrc.XXXXXXXXXX)
chmod 0600 "$cookiejar" "$netrc"
function finish {
rm -rf "$cookiejar" "$netrc"
}
trap finish EXIT
WGETRC="$wgetrc"
prompt_credentials() {
echo "Enter your Earthdata Login or other provider supplied credentials"
read -p "Username: " username
read -s -p "Password: " password
echo "machine urs.earthdata.nasa.gov login $username password $password" >> $netrc
echo
}
exit_with_error() {
echo
echo "Unable to Retrieve Data"
echo
echo $1
echo
echo "https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5"
echo
exit 1
}
prompt_credentials
detect_app_approval() {
approved=`curl -s -b "$cookiejar" -c "$cookiejar" -L --max-redirs 5 --netrc-file "$netrc" https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5 -w '\n%{http_code}' | tail -1`
if [ "$approved" -ne "200" ] && [ "$approved" -ne "301" ] && [ "$approved" -ne "302" ]; then
# User didn't approve the app. Direct users to approve the app in URS
exit_with_error "Please ensure that you have authorized the remote application by visiting the link below "
fi
}
setup_auth_curl() {
# Firstly, check if it require URS authentication
status=$(curl -s -z "$(date)" -w '\n%{http_code}' https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5 | tail -1)
if [[ "$status" -ne "200" && "$status" -ne "304" ]]; then
# URS authentication is required. Now further check if the application/remote service is approved.
detect_app_approval
fi
}
setup_auth_wget() {
# The safest way to auth via curl is netrc. Note: there's no checking or feedback
# if login is unsuccessful
touch ~/.netrc
chmod 0600 ~/.netrc
credentials=$(grep 'machine urs.earthdata.nasa.gov' ~/.netrc)
if [ -z "$credentials" ]; then
cat "$netrc" >> ~/.netrc
fi
}
fetch_urls() {
if command -v curl >/dev/null 2>&1; then
setup_auth_curl
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
curl -f -b "$cookiejar" -c "$cookiejar" -L --netrc-file "$netrc" -g -o $stripped_query_params -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
elif command -v wget >/dev/null 2>&1; then
# We can't use wget to poke provider server to get info whether or not URS was integrated without download at least one of the files.
echo
echo "WARNING: Can't find curl, use wget instead."
echo "WARNING: Script may not correctly identify Earthdata Login integrations."
echo
setup_auth_wget
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
wget --load-cookies "$cookiejar" --save-cookies "$cookiejar" --output-document $stripped_query_params --keep-session-cookies -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
else
exit_with_error "Error: Could not find a command-line downloader. Please install curl or wget"
fi
}
fetch_urls <<'EDSCEOF'
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190328023357_03.h5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190328004554_03.h5
EDSCEOF
Below is a script that worked for me just a few minutes ago. The list of links at the very bottom needs to be replaced for what you need. To get the links, go to https://search.earthdata.nasa.gov/ and search for collection you need (in this particular case DSCOVR_EPIC_L1B_3), select your timeframe of interest, and click "download all" button. You will be prompted to download option selection page, select "Download all data", click "Done" at the bottom right, then "download data" at the bottom left. You will be prompted to a page that will create a list of download link. Compiling a list may take some time - be patient. Copy the list of links to your script.
#!/bin/bash
GREP_OPTIONS=''
cookiejar=$(mktemp cookies.XXXXXXXXXX)
netrc=$(mktemp netrc.XXXXXXXXXX)
chmod 0600 "$cookiejar" "$netrc"
function finish {
rm -rf "$cookiejar" "$netrc"
}
trap finish EXIT
WGETRC="$wgetrc"
prompt_credentials() {
echo "Enter your Earthdata Login or other provider supplied credentials"
read -p "Username: " username
read -s -p "Password: " password
echo "machine urs.earthdata.nasa.gov login $username password $password" >> $netrc
echo
}
exit_with_error() {
echo
echo "Unable to Retrieve Data"
echo
echo $1
echo
echo "https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5"
echo
exit 1
}
prompt_credentials
detect_app_approval() {
approved=`curl -s -b "$cookiejar" -c "$cookiejar" -L --max-redirs 5 --netrc-file "$netrc" https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5 -w '\n%{http_code}' | tail -1`
if [ "$approved" -ne "200" ] && [ "$approved" -ne "301" ] && [ "$approved" -ne "302" ]; then
# User didn't approve the app. Direct users to approve the app in URS
exit_with_error "Please ensure that you have authorized the remote application by visiting the link below "
fi
}
setup_auth_curl() {
# Firstly, check if it require URS authentication
status=$(curl -s -z "$(date)" -w '\n%{http_code}' https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L4_TrO3_01/2022/12/DSCOVR_EPIC_L4_TrO3_01_20221230130329_03.h5 | tail -1)
if [[ "$status" -ne "200" && "$status" -ne "304" ]]; then
# URS authentication is required. Now further check if the application/remote service is approved.
detect_app_approval
fi
}
setup_auth_wget() {
# The safest way to auth via curl is netrc. Note: there's no checking or feedback
# if login is unsuccessful
touch ~/.netrc
chmod 0600 ~/.netrc
credentials=$(grep 'machine urs.earthdata.nasa.gov' ~/.netrc)
if [ -z "$credentials" ]; then
cat "$netrc" >> ~/.netrc
fi
}
fetch_urls() {
if command -v curl >/dev/null 2>&1; then
setup_auth_curl
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
curl -f -b "$cookiejar" -c "$cookiejar" -L --netrc-file "$netrc" -g -o $stripped_query_params -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
elif command -v wget >/dev/null 2>&1; then
# We can't use wget to poke provider server to get info whether or not URS was integrated without download at least one of the files.
echo
echo "WARNING: Can't find curl, use wget instead."
echo "WARNING: Script may not correctly identify Earthdata Login integrations."
echo
setup_auth_wget
while read -r line; do
# Get everything after the last '/'
filename="${line##*/}"
# Strip everything after '?'
stripped_query_params="${filename%%\?*}"
wget --load-cookies "$cookiejar" --save-cookies "$cookiejar" --output-document $stripped_query_params --keep-session-cookies -- $line && echo || exit_with_error "Command failed with error. Please retrieve the data manually."
done;
else
exit_with_error "Error: Could not find a command-line downloader. Please install curl or wget"
fi
}
fetch_urls <<'EDSCEOF'
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190328023357_03.h5
https://asdc.larc.nasa.gov/data/DSCOVR/EPIC/L1B/2019/03/epic_1b_20190328004554_03.h5
EDSCEOF