For downloading multiple zip files use this bash-script using curl
# Download multiple files using curl
URL=https://eye2sky.de/data/asi/imgs
# Start and end of period (exclusive)
START_DATE='2022-04-01'
END_DATE='2022-04-10'
# List of all sites (Do not change)
ALL_SITES="AURIC BARSE BAZWI DOERP EMSTE ESENS ESREF LEEER OLBFE OLCLO OLDLR OLDON OLEMS OLETZ OLFLE OLHOL OLJET OLMAR OLTIR OLTWE OLUOL PAPEN PVAMM PVNOR PVRAS SOEGE VAREL WESTE WITTM"
# Sites selected
SITES=AURIC
while [[ "$START_DATE" != "$END_DATE" ]]; do
START_DATE=$(date --date "$START_DATE + 1 day" +"%Y-%m-%d")
for SITE in ${SITES}; do
DIRNAME=$(date --date "$START_DATE + 1 day" +"%Y/%m/%d")
FILENAME=ASI_$(date --date "$START_DATE + 1 day" +"%Y%m%d")_${SITE}.zip
TARGET_URL=${URL}/${DIRNAME}/${FILENAME}
echo "Downloading StationID ${SITE} and date ${START_DATE} from $TARGET_URL"
curl -O $TARGET_URL
done
done
or this python-script using requests
import os
import urllib.request
import warnings
from datetime import datetime, timedelta
# Download multiple files using python
# Start and end of period (exclusive)
START_DATE = "2022-04-01"
END_DATE = "2022-04-10"
# List of all sites (Do not change)
ALL_SITES = [
"AURIC",
"BARSE",
"BAZWI",
"DOERP",
"EMSTE",
"ESENS",
"ESREF",
"LEEER",
"OLBFE",
"OLCLO",
"OLDLR",
"OLDON",
"OLEMS",
"OLETZ",
"OLFLE",
"OLHOL",
"OLJET",
"OLMAR",
"OLTIR",
"OLTWE",
"OLUOL",
"PAPEN",
"PVAMM",
"PVNOR",
"PVRAS",
"SOEGE",
"VAREL",
"WESTE",
"WITTM",
]
# Sites selected (Provide list of sites)
SITES = ["AURIC", "BARSE"]
URL = "https://eye2sky.de/data/asi/imgs"
DIRNAME = "{dt:%Y/%m/%d}"
# Remote address
TARGET_URL = os.path.join(URL, DIRNAME, "ASI_{dt:%Y%m%d}_{stn}.zip")
# How to save the file locally
LOCAL_FILENAME = os.path.join("Eye2Sky", DIRNAME, "ASI_{dt:%Y%m%d}_{stn}.zip")
# All dates to download
START_DATE = datetime.strptime(START_DATE, "%Y-%m-%d")
END_DATE = datetime.strptime(END_DATE, "%Y-%m-%d")
DATES = [START_DATE + timedelta(days=x) for x in range(0, (END_DATE - START_DATE).days)]
for dt in DATES:
if type(SITES) is not list:
SITES = [SITES]
for site in SITES:
# Generate URL to the ZIP archive
url = TARGET_URL.format(stn=site, dt=dt)
print(
f"Downloading images for Station_ID {site} and date {dt:%Y-%m-%d} from {url}"
)
try:
remotezip = urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
if "not found" in e.msg.lower():
warnings.warn(f"File {url} was not found.")
continue
else:
raise
zipinmemory = remotezip.read()
# Save to file
fname = LOCAL_FILENAME.format(stn=site, dt=dt)
os.makedirs(os.path.dirname(fname), exist_ok=True)
with open(fname, "wb") as download:
download.write(zipinmemory)