query and download reduced data from the ESO Archive
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/sh | |
#*********************************************************************** | |
#* ESO Science Archive Facility | |
#* Programmatic Access | |
#* | |
#* Script: eso_access_phase3.sh | |
#* Shell: bash | |
#* Date: 15-Jul-2015 | |
#* Contact: archive@eso.org | |
#* Description: Script to query and download reduced data (Phase 3) | |
#* Usage: Run the script without arguments to see its Usage | |
#* !!! | |
#* !!! The script can be modified and adapted according to your needs | |
#* !!! but please read the ESO Data Access Policy before using it for | |
#* !!! downloading data: | |
#* !!! http://archive.eso.org/cms/eso-data-access-policy.html | |
#* !!! | |
#*********************************************************************** | |
cmdline=$@ | |
wdbo=csv | |
noinput=1 | |
box=02+09+00 | |
################################################################ | |
# Defining some useful functions to purge temporary files | |
# when program exits or is interrupted | |
################################################################ | |
# Function | |
declare -a on_exit_items | |
function on_exit() | |
{ | |
for i in "${on_exit_items[@]}" | |
do | |
echo "on_exit: $i" | |
eval $i | |
done | |
} | |
function add_on_exit() | |
{ | |
local n=${#on_exit_items[*]} | |
on_exit_items[$n]="$*" | |
if [[ $n -eq 0 ]]; then | |
echo "Setting trap" | |
trap on_exit EXIT INT HUP TERM | |
fi | |
} | |
##################################### | |
# Get the command line option values | |
##################################### | |
server="archive.eso.org" | |
while [ $# -gt 0 ]; do | |
case $1 in | |
-User ) shift | |
user=$1 | |
;; | |
-target ) shift | |
targetname=$1 | |
noinput=0 | |
;; | |
-ra ) shift | |
ra=$1 | |
noinput=0 | |
;; | |
-dec ) shift | |
dec=$1 | |
noinput=0 | |
;; | |
-box ) shift | |
box=$1 | |
;; | |
-collection ) shift | |
collection=$1 | |
noinput=0 | |
;; | |
-listcoll ) | |
listcoll="show" | |
;; | |
-pi ) shift | |
piname=$1 | |
noinput=0 | |
;; | |
-runid ) shift | |
pid=$1 | |
noinput=0 | |
;; | |
-object ) shift | |
obj=$1 | |
noinput=0 | |
;; | |
-inst ) shift | |
inst=$1 | |
noinput=0 | |
;; | |
-filter ) shift | |
filter=$1 | |
;; | |
-lambda ) shift | |
lambda=$1 | |
;; | |
-start ) shift | |
date1=$1 | |
noinput=0 | |
;; | |
-end ) shift | |
date2=$1 | |
noinput=0 | |
;; | |
-category ) shift | |
prodcat=$1 | |
;; | |
-format ) shift | |
format=$1 | |
;; | |
-outputdir ) shift | |
outputdir=$1 | |
;; | |
-top ) shift | |
top=$1 | |
;; | |
-server ) shift | |
server=$1 | |
;; | |
-debug ) shift | |
debug=$1 | |
;; | |
esac | |
shift | |
done | |
if [ "$debug" != "" ]; then | |
echo "RECEIVED INPUT:" | |
echo " user=$user" | |
echo " target=$targetname" | |
echo " ra=$ra" | |
echo " dec=$dec" | |
echo " box=$box" | |
echo " collection=$collection" | |
echo " pi=$piname" | |
echo " runid=$pid" | |
echo " object=$obj" | |
echo " inst=$inst" | |
echo " filter=$filter" | |
echo " lambda=$lambda" | |
echo " start=$date1" | |
echo " end=$date2" | |
echo " category=$prodcat" | |
echo " format=$format" | |
echo " outputdir=$outputdir" | |
echo " top=$top" | |
echo " server=$server" | |
fi | |
if [ "$listcoll" != "" ]; then | |
echo "The names of the collections you can use with the -collection option are:" | |
wget -O - "http://$server/wdb/wdb/adp/collections/query?wdbo=ascii" 2> /dev/null | awk '/^\------/,/^\[EOD\]/{print $1}' | egrep -v '^\----|^\[EOD\]' | sort -u | |
exit | |
fi | |
echo "Your query:" | |
echo " target=$targetname; ra=$ra; dec=$dec; box=$box;" | |
echo " collection=$collection; pi=$piname; runid=$pid; object=$obj; inst=$inst;" | |
echo " filter=$filter; lambda=$lambda; start=$date1; end=$date2;" | |
echo " category=$prodcat; format=$format; outputdir=$outputdir; top=$top;" | |
echo "" | |
################################################################ | |
# Display the usage, if credentials are missing exit gracefully | |
################################################################ | |
if [ "$noinput" = "1" ]; then | |
echo " USAGE: ./eso_access_phase3.sh [-ParamName ParamValue]" | |
echo " where the supported ParamNames are:" | |
echo " -User <your archive username>" | |
echo " -target <Target name> to be resolved by SIMBAD into its coordinates for a coordinate box search (see -box)" | |
echo " -ra <Right Ascension> either expressed in decimal degrees or is sexagesimal hours." | |
echo " -dec <Declination> in decimal or sexagesimal degrees" | |
echo " -box <box_size> in decimal or sexagesimal degrees [default: 2d9m0s - VIRCAM field of view" | |
echo " -object <object name> as specified by the PI" | |
echo " -pi <PI Name>, the name of the PI" | |
echo " -runid <Run ID>, actually a run Id like 188.D-3003(T), or a programme Id like 188.D-3003" | |
echo " -inst <INSTRUMENT>, a valid ESO instrument name" | |
echo " -filter <FILTER>, a valid filter name" | |
echo " -lambda <a wavelength condition>, e.g. a single number (in nanometers) to require that such wavelength is included in the spectrum" | |
echo " -start <Start-date> specified as YYYY-MM-DD or YYYY-MM-DDThh:mm:ss" | |
echo " -end <End-date> specified as YYYY-MM-DD or YYYY-MM-DDThh:mm:ss" | |
echo " -collection The name of one <Phase3 Collection>, e.g.: VMC, or PESSTO, or VVV, etc." | |
echo " Please see http://archive.eso.org/wdb/wdb/adp/collections/query to get all existing collection names and their content" | |
echo " or simply run this script with the -listcoll option to retrieve the list of available collection names." | |
echo " -listcoll To get the list of Phase3 Collection names available" | |
echo " -category <Category>, e.g.: SCIENCE.IMAGE, SCIENCE.SPECTRUM, SCIENCE.MEFIMAGE, SCIENCE.SRCTBL, etc." | |
echo " -format <Output type>, the query result will be shown using one of the following formats: csv [default], ascii (fixed length), html." | |
echo " -outputdir <Output directory>, the query result and all downloads will happen in the user's specified -outputdir (default being the current directory: ./" | |
echo " NOTE: If -format is specified, the script stops at showing query results and does NOT continue into downloading data." | |
echo " -top <Maximum number of products returned> [Default: 200 shown both for displaying and for downloading]" | |
echo " The actual number of files downloaded could be higher given that each dataset is composed of one or more files." | |
echo " " | |
echo " and where at least one of target, ra, dec, collection, pi, runid, object, inst, start, or end, is provided." | |
echo " If no -User is provided, query results are shown but not downloaded." | |
echo " " | |
echo " Dependencies: wget must be installed on your machine as well as GNU awk version 4 or higher." | |
echo " " | |
echo " Examples:" | |
echo " =========" | |
echo " The following example will only display (not download) 12 1d-spectra belonging to the specified run:" | |
echo " ./eso_access_phase3.sh -runid '081.B-0258(A)' -category SCIENCE.SPECTRUM -top 12" | |
echo " " | |
echo " Search and retrieve by date, limit to 4 files" | |
echo " ./eso_access_phase3.sh -User <your username> -collection VMC -start '2009-11-17' -end '2009-11-19' -top 4" | |
echo " " | |
echo " Search by object, category, and filter (no download)" | |
echo " ./eso_access_phase3.sh -collection VMC -object LMC -category SCIENCE.IMAGE -filter Y" | |
echo " " | |
echo " Search by coordinates (in a box)" | |
echo " ./eso_access_phase3.sh -ra 167.081667 -dec -77.654720 -box 00:00:01" | |
echo " " | |
echo " Specific PESSTO object with He I rest-frame wavelength included in the spectrum, observed by EFOSC (Note: any constraint on lambda restricts results to spectra only)" | |
echo " ./eso_access_phase3.sh -collection PESSTO -object SN2012fr -lambda 587.6 -inst EFOSC" | |
echo "" | |
exit | |
fi | |
# Please do not change the user_agent here below: | |
user_agent="ESO_PHASE3_PROGRAMMATIC_SCRIPT(`uname`)" | |
###################################################### | |
# If successful this routine will be invoked at the end. | |
###################################################### | |
success() { | |
echo "" | |
echo "Successful execution." | |
echo "INPUT:" | |
echo " $0 $cmdline" | |
echo "OUTPUT:" | |
if [ "$1" == "1" ]; then | |
echo " - METADATA ($1 product) $resultsfullpath # This is the query result file with info like Object,RA,DEC,Instrument,band,etc." | |
else | |
echo " - METADATA ($1 products) $resultsfullpath # This is the query result file with info like Object,RA,DEC,Instrument,band,etc." | |
fi | |
if [ "$2" != "" ]; then | |
echo " - ESO ARCHIVE DATA REQUEST NUMBER: $2" | |
echo " REQUEST DESCRIPTION: $reqdescr" | |
echo " - Data files (num=$3) downloaded in directory: $outputdir" | |
fi | |
echo "" | |
if [ "$format" == "csv" ]; then | |
echo ' To visualise the metadata file when in csv format you could use e.g. excel, numbers, or topcat (with "-f csv" option).' | |
echo " To properly use topcat, please remove the first line of the $resultsfullpath file" | |
fi | |
echo "" | |
} | |
######################################################################## | |
# Check if the wget is installed | |
######################################################################## | |
check_wget() { | |
wget_status=0 | |
# check if wget is in the path | |
which wget >& /dev/null | |
wget_status=$? | |
if [ "$wget_status" -ne "0" ]; then | |
echo "Sorry, you either don't have wget installed, or it is not in your path (check your \$PATH environment variable)." | |
exit $wget_status | |
fi | |
} | |
check_wget | |
######################################################################## | |
# Check if the correct gawk version is installed (version 4 and higher) | |
######################################################################## | |
check_gawk() { | |
gawk_status=1 | |
# check if gawk is in the path | |
which gawk >& /dev/null | |
if [ "$?" -eq "0" ]; then | |
# Check if this version of gawk allows FPAT | |
tmpfile=/tmp/test_gawk_$$ | |
echo '"A,SeparatedField",Another' > $tmpfile | |
add_on_exit rm -f $tmpfile | |
numfields=`cat $tmpfile | gawk '{print NF}' FPAT='([^,]+)|("[^"]+")'` | |
if [ "$numfields" -eq "2" ]; then | |
gawk_status=0 | |
fi | |
fi | |
} | |
check_gawk | |
if [ "$gawk_status" -eq "1" ]; then | |
echo "Sorry, you either don't have gawk installed, or it is installed but it does not support the FPAT construct (version 4 and higher)." | |
echo "You could get it from: http://ftp.gnu.org/gnu/gawk/ (e.g. gawk-4.1.2.tar.gz)" | |
exit $gawk_status | |
fi | |
if [ "$outputdir" == "" ]; then | |
outputdir="." | |
fi | |
if [ ! -w $outputdir ]; then | |
echo "Sorry, the -outputdir you provided is not writable ($outputdir)." | |
exit 1 | |
fi | |
#################################################################### | |
# Before to query the archive force capitalise: instrument and pi | |
#################################################################### | |
inst=`echo $inst | tr '[a-z]' '[A-Z]'` | |
piname=`echo $piname | tr '[a-z]' '[A-Z]'` | |
do_not_download=0 | |
if [ "$format" != "csv" ] && [ "$format" != "ascii" ] && [ "$format" != "html" ] && [ "$format" != "" ] ; then | |
echo "ERROR: Supplied -format format ($format) unknwon" | |
echo "Supported -format formats are: csv, ascii, html." | |
exit 1 | |
else | |
if [ "$format" == "" ]; then | |
format="csv" | |
else | |
do_not_download=1 | |
fi | |
wdbo=$format | |
fi | |
######################################################################################################### | |
# Handle -start and -end inputs | |
######################################################################################################### | |
if [ "$date1" != "" ]; then | |
if [ "$date2" != "" ]; then | |
date_obs="${date1}..${date2}" | |
else | |
date_obs="${date1}..2099-DEC-31" | |
fi | |
else | |
if [ "$date2" != "" ]; then | |
date_obs="1980-JAN-01..${date2}" | |
else | |
date_obs="" | |
fi | |
fi | |
resultsfullpathtmp="${outputdir}/output_query_$$.$format.tmp" | |
resultsfullpath="${outputdir}/output_query_$$.$format" | |
# echo wget -O ${resultsfullpathtmp} --user-agent="${user_agent}" "'http://$server/wdb/wdb/adp/phase3_main/query?tab_object=on&target=$targetname&resolver=simbad&object=$obj&coord_sys=eq&tab_coord1=1&coord1=$ra&tab_coord2=1&coord2=$dec&box=$box&dcoord_sys=eq&format=sexagesimal&tab_prog_id=on&prog_id=$pid&tab_ins_id=on&ins_id=$inst&filter=$filter&wavelength=$lambda&date_obs=$date_obs&tab_dataproduct_type=1&dataproduct_type=$prodcat&phase3_collection=$collection&tab_dataset_id=1&top=$top&wdbo=$wdbo'" | |
if [ "$debug" != "" ]; then | |
echo wget -O ${resultsfullpathtmp} --user-agent="${user_agent}" "http://$server/wdb/wdb/adp/phase3_main/query?tab_dp_id=on&tab_object=on&target=$targetname&resolver=simbad&object=$obj&coord_sys=eq&tab_coord1=1&coord1=$ra&tab_coord2=1&coord2=$dec&box=$box&dcoord_sys=eq&format=sexagesimal&tab_username=on&username=$piname&tab_prog_id=on&prog_id=$pid&tab_ins_id=on&ins_id=$inst&filter=$filter&wavelength=$lambda&date_obs=$date_obs&tab_dataproduct_type=1&dataproduct_type=$prodcat&phase3_collection=$collection&tab_dataset_id=1&force_tabular_mode=1&top=$top&wdbo=$wdbo&order_main=mjd_obs%20desc" | |
fi | |
wget -O ${resultsfullpathtmp} --user-agent="${user_agent}" "http://$server/wdb/wdb/adp/phase3_main/query?tab_dp_id=on&tab_object=on&target=$targetname&resolver=simbad&object=$obj&coord_sys=eq&tab_coord1=1&coord1=$ra&tab_coord2=1&coord2=$dec&box=$box&dcoord_sys=eq&format=sexagesimal&tab_username=on&username=$piname&tab_prog_id=on&prog_id=$pid&tab_ins_id=on&ins_id=$inst&filter=$filter&wavelength=$lambda&date_obs=$date_obs&tab_dataproduct_type=1&dataproduct_type=$prodcat&phase3_collection=$collection&tab_dataset_id=1&force_tabular_mode=1&top=$top&wdbo=$wdbo&order_main=mjd_obs%20desc" > /dev/null 2>&1 | |
numRecords=`grep '^# A ' ${resultsfullpathtmp} | awk '{print $5}'` | |
egrep -v '^$|^#' ${resultsfullpathtmp} > ${resultsfullpath} | |
###################################################################################### | |
# Check if there is any record matching the provided criteria, if not exit gracefully | |
###################################################################################### | |
checkifempty=`cat $resultsfullpathtmp | grep "A total of 0"` | |
rm -f ${resultsfullpathtmp} | |
if [ -n "$checkifempty" ]; then | |
echo "A total of 0 records were found matching the provided criteria. Exiting." | |
rm $resultsfullpath | |
exit | |
fi | |
echo "Metadata stored in: $resultsfullpath (This is the query result output, with info like Object,RA,DEC,Instrument,band,etc.)" | |
if [ "$do_not_download" == "1" ]; then | |
echo "MODE: METADATA ONLY (Data are not going to be downloaded because option -format was specified by you)." | |
printf "%s " "Display $numRecords returned results? [y-yes, else quit]:" | |
read continue | |
if [ "$continue" == "y" ] | |
then | |
cat $resultsfullpath | more | |
fi | |
success $numRecords | |
exit 0 | |
fi | |
#################################################################################################### | |
# Create a list of file_ids out of the output csv file to get submitted as a request to the Archive. | |
# The list of files to get submitted must be in the right format. | |
#################################################################################################### | |
dataset_col_nr=`cat $resultsfullpath | grep 'Dataset ID' | tr ',' '\012' | grep -n 'Dataset ID' | awk -F: '{print $1}'` | |
arcfile_col_nr=`cat $resultsfullpath | grep 'Dataset ID' | tr ',' '\012' | grep -n 'ARCFILE' | awk -F: '{print $1}'` | |
#filelist=`cat $resultsfullpath | grep "^[0-9]" | gawk -v dataset=$dataset_col_nr -v arcfile=$arcfile_col_nr -F, '{print "PHASE3%2B"$dataset"%2B"$arcfile}' FPAT="([^,]*)|(\"[^\"]*\")" | tr '\012' ','` | |
filelist=`cat $resultsfullpath | grep "SCIENCE\." | gawk -v dataset=$dataset_col_nr -v arcfile=$arcfile_col_nr -F, '{print "PHASE3%2B"$dataset"%2B"$arcfile}' FPAT="([^,]*)|(\"[^\"]*\")" | tr '\012' ','` | |
if [ "$debug" != "" ]; then | |
echo "DEBUG: $resultsfullpath" | |
echo "DEBUG: dataset_col_nr = $dataset_col_nr" | |
echo "DEBUG: arcfile_col_nr = $arcfile_col_nr" | |
echo "DEBUG: filelist={$filelist}" | |
fi | |
######################################################################### | |
# Defining a function to prompt the user's password (if no netrc is used) | |
######################################################################### | |
on_ctrl_c() { | |
printf "\rSIGINT caught " | |
exit 1 | |
} | |
trap 'on_ctrl_c' SIGINT | |
function set_wgetrc { | |
prompt= | |
############################################ | |
# Set the username and password or use netrc | |
############################################ | |
netrc_in_place="unknown yet" | |
if [ "$prompt" != "yes" ]; then | |
# take password (and user) from netrc if no -p option | |
if test -f "$HOME/.netrc" -a -r "$HOME/.netrc" | |
then | |
# combine multi-line netrc declaration then split using = as a special separator | |
# if the = is in the paswd of the user it is no problem because the relevant part is the machine name not the the paswd (splitting the pswd is not a problem) | |
# pipe sequence explained: | |
# | |
# cat .netrc | |
# add a = at the end of each original .netrc line | |
# substitute new line with a blank (entire .netrc in a single line now, with = separators added) | |
# substitute any = with a newline | |
# grep each line for dataportal.eso.org followed by the username | |
# The last grep command will be successful if an entry for dataportal + user is found, it will fail otherwise. | |
cat $HOME/.netrc | sed 's+=+ESO_EqUaL+g' | sed 's+$+=+g' | tr '\012' ' ' | tr '=' '\012' | sed 's+ESO_EqUaL+=+g' | grep -i "dataportal.eso.org.*${user}" > /dev/null | |
# if the last grep was not successful, then report to the user what it might want to do to set up a proper .netrc. | |
if [ $? -ne 0 ]; then | |
#no entry for dataportal.eso.org, user is prompted for password | |
echo "*** WARNING: A .netrc is available but there is no entry for dataportal.eso.org for this user; " | |
echo "*** if you do not want to be asked for your password again please ensure that" | |
echo "*** a single entry for dataportal.eso.org exists in your$HOME/.netrc and that" | |
echo "*** it looks like this: " | |
echo "*** machine dataportal.eso.org login $user password _yourpassword_" | |
prompt="yes" | |
else | |
netrc_in_place="yes" | |
fi | |
else | |
echo "*** WARNING: To avoid to be prompted for the passowrd, store the following line in your $HOME/.netrc file:" | |
echo "*** machine dataportal.eso.org login $user password _yourpassword_" | |
prompt="yes" | |
fi | |
fi | |
if test -n "$prompt" | |
then | |
trap 'stty echo 2>/dev/null; echo "Cancelled."; exit 1' INT HUP TERM | |
stty -echo 2>/dev/null | |
printf 'Password: ' | |
read password | |
echo '' | |
stty echo 2>/dev/null | |
write_netrc="n" | |
trap - INT HUP TERM | |
printf '*** Do you want to permanently store your ESO credentials in your $HOME/.netrc file? [y|n]: ' | |
read write_netrc | |
# echo '' | |
if [ "$write_netrc" == "y" ]; then | |
if test -e "$HOME/.netrc" | |
then | |
# Checkif a dataportal.eso.org entry already exists: | |
found=0 | |
found=`grep -c dataportal.eso.org $HOME/.netrc` | |
if [ "$found" == "0" ]; then | |
chmod 600 $HOME/.netrc | |
echo "" >> $HOME/.netrc | |
echo "machine dataportal.eso.org login $user password $password" >> $HOME/.netrc | |
echo "" >> $HOME/.netrc | |
if [ "$?" != "0" ]; then | |
echo " *** WARNING: It was not possible to store your credentials in the $HOME/.netrc file; please check your permissions." | |
else | |
echo "Successful: ESO credentials for user $user on dataportal.eso.org have been stored in your $HOME/.netrc file." | |
netrc_in_place="yes" | |
fi | |
else | |
# The netrc only supports a single entry for a given machine! The second entry is not going to be considered at all! | |
echo "*** WARNING: Could not store your credentials in your $HOME/.netrc file because" | |
echo "*** an entry for dataportal.eso.org already exists, but for a different user." | |
echo "*** Please edit your .netrc to use the $user instead." | |
fi | |
else | |
#Creating a $HOME/.netrc file | |
(umask 077 && : > $HOME/.netrc) | |
if [ "$?" != "0" ]; then | |
echo " *** WARNING: It was not possible to create the $HOME/.netrc file; please check your permissions." | |
else | |
echo "machine dataportal.eso.org login $user password $password" >> $HOME/.netrc | |
echo "Successful: ESO credentials for user $user on dataportal.eso.org have been stored in your $HOME/.netrc file." | |
echo " You won't be prompted for your password again." | |
netrc_in_place="yes" | |
fi | |
fi | |
else | |
echo "Your ESO credentials will not be stored, you might get prompted for your password again." | |
fi | |
fi | |
if [ "$netrc_in_place" != "yes" ]; then | |
# use a tempfile to which only user has access | |
echo "*** Warning: Using a temp wgetrc file ($tempfile) containing your password." | |
tempfile=`mktemp /tmp/dl.XXXXXXXX 2>/dev/null` | |
test "$tempfile" -a -f $tempfile || { | |
tempfile=/tmp/dl.$$ | |
( umask 077 && : >$tempfile ) | |
} | |
add_on_exit rm -f $tempfile | |
echo "auth_no_challenge=on" > $tempfile | |
#if [ -n "$prompt" ]; then | |
echo "--http-user=$user" >> $tempfile | |
echo "--http-password=$password" >> $tempfile | |
#Next line is to ensure that the netrc is not used (could be confilcting if the netrc entry for dataportal is for a different user) | |
echo "netrc=off" >> $tempfile | |
#fi | |
WGETRC=$tempfile; export WGETRC | |
unset password | |
fi | |
} | |
################################################# | |
# Submit the request using the file list created | |
################################################# | |
if [ "$user" == "" ]; then | |
echo "" | |
echo "MODE: METADATA ONLY (Data are not going to be retrieved given that user's credentials were not provided; use -User if you want to retrieve the data)." | |
echo "" | |
printf "%s " "Display $numRecords returned results? [y-yes, else quit]:" | |
read continue | |
if [ "$continue" == "y" ] | |
then | |
cat $resultsfullpath | more | |
fi | |
success $numRecords | |
exit 0 | |
else | |
echo "Using account $user to submit an ESO archive data request" | |
set_wgetrc | |
fi | |
cd $outputdir | |
if [ "$?" != "0" ]; then | |
echo "ERROR: Could not cd $outputdir." | |
exit 1 | |
fi | |
# Asking user a request description (optional) that will be shown in the ESO request handler | |
printf 'Please provide a request description (optional, up to 255 chars) or hit return to continue:' | |
read reqdescr | |
wget -O submission_$$ --user-agent="${user_agent}" --auth-no-challenge --no-check-certificate --post-data="requestDescription=programmatic access:$reqdescr&requestCommand=SELECTIVE_HOTFLY&dataset=$filelist" --header="Accept:text/plain" https://dataportal.eso.org/rh/api/requests/$user/submission 3>&1 1>&2 2>&3 3>&- | egrep -v 'WARNING|Self-signed certificate encountered' > /dev/null | |
add_on_exit rm -f submission_$$ | |
##################################################################### | |
# Get the request number of the request that has just been submitted | |
##################################################################### | |
reqnum=`cat submission_$$ | gawk '{print$1}'` | |
if [ "$reqnum" == "" ]; then | |
echo "FATAL ERROR: No request could be submitted (maybe your credentials are not correct). Aborting." | |
exit 1 | |
fi | |
echo "reqnum=" $reqnum | |
##################################################################### | |
# Before downloading the data make sure that the request is complete | |
# so that also the download.sh script is complete | |
##################################################################### | |
wget -O state_$$ --auth-no-challenge --user-agent="${user_agent}" --no-check-certificate https://dataportal.eso.org/rh/api/requests/$user/$reqnum/state 3>&1 1>&2 2>&3 3>&- | egrep -v 'WARNING|Self-signed certificate encountered' > /dev/null | |
add_on_exit rm -f state_$$ | |
requeststate=`tail -1 state_$$ | cut -c1-9` | |
echo "request state is" $requeststate | |
while [ "$requeststate" != "COMPLETE" ]; do | |
\rm state_$$ | |
wget -O state_$$ --user-agent="${user_agent}" --auth-no-challenge --no-check-certificate https://dataportal.eso.org/rh/api/requests/$user/$reqnum/state 3>&1 1>&2 2>&3 3>&- | egrep -v 'WARNING|Self-signed certificate encountered' > /dev/null | |
requeststate=`tail -1 state_$$| cut -c1-9` | |
echo "request state is now" $requeststate | |
sleep 5 | |
done | |
################################################################### | |
# Download the download script from the user's request (dataportal) | |
################################################################### | |
wget -O downloadRequest_$reqnum.sh --user-agent="${user_agent}" --auth-no-challenge --no-check-certificate https://dataportal.eso.org/rh/api/requests/$user/$reqnum/script 3>&1 1>&2 2>&3 3>&- | egrep -v 'WARNING|Self-signed certificate encountered' > /dev/null | |
################################################################################### | |
# Download the data, first by making the download script executable. | |
# Files will be downloaded with 2 parallel threads, 5 being the maximum allowed, | |
# just like with the download manager applet. | |
################################################################################### | |
numExpectedFiles=`cat downloadRequest_$reqnum.sh | egrep -c '^"https://dataportal.eso.org|^https://dataportal.eso.org'` | |
echo "Downloading $numExpectedFiles files (including ancillary files if any, and the readme file): please wait... " | |
chmod 777 downloadRequest_$reqnum.sh | |
user_agent=`echo $user_agent | tr ' ' '_'` | |
echo "DOWNLOAD SCRIPT RUNS NOW: ./downloadRequest_$reqnum.sh " -X "-L 1 -P 2" -d "--no-check-certificate --user-agent=${user_agent}" | |
./downloadRequest_$reqnum.sh -X "-L 1 -P 2" -d "--no-check-certificate --user-agent=${user_agent}" | |
status=$? | |
if [ "$status" == "0" ]; then | |
echo "Downloading successfully completed." | |
else | |
echo "Download failed with error code = $status" | |
exit 1 | |
fi | |
# On success report what happened and where the files are. | |
success $numRecords $reqnum $numExpectedFiles | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment