diff --git a/Dockerfile b/Dockerfile index 11b415f..1e18818 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM continuumio/miniconda3:4.6.14 +FROM continuumio/miniconda3:22.11.1 ENV PYTHONDONTWRITEBYTECODE=true @@ -9,7 +9,8 @@ WORKDIR /home/firedpy RUN conda update conda --yes \ && conda config --add channels conda-forge \ && conda config --set channel_priority strict \ - && conda env create -f environment.yaml + && conda env create -f environment.yaml \ + && echo "conda activate firedpy" >> ~/.bashrc RUN conda clean --all --yes --force-pkgs-dirs \ && find /opt/conda/ -follow -type f -name '*.a' -delete \ @@ -18,10 +19,18 @@ RUN conda clean --all --yes --force-pkgs-dirs \ && conda list RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - awscli \ - htop - + && apt-get install -y htop curl unzip + +# Download AWS CLI v2 and install it +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \ + && unzip awscliv2.zip \ + && ./aws/install + +# Clean up the downloaded files and temporary packages +RUN rm -rf awscliv2.zip ./aws \ + && apt-get remove -y curl unzip \ + && apt-get clean + # The following line of code solved a problem that apparently is now not happening, and now this creates its own problem. # If one is trying to do a docker build, and gets an error involving libffi.so.7, uncomment the following lines. # RUN ln -s /opt/conda/envs/firedpy/lib/libffi.so.6 /opt/conda/envs/firedpy/lib/libffi.so.7 \ @@ -29,4 +38,4 @@ RUN apt-get update \ SHELL ["conda", "run", "-n", "firedpy", "/bin/bash", "-c"] -RUN python setup.py install +RUN python setup.py install diff --git a/environment.yaml b/environment.yaml index 3a13546..2146a5a 100644 --- a/environment.yaml +++ b/environment.yaml @@ -2,23 +2,23 @@ name: firedpy channels: - defaults dependencies: - - beautifulsoup4 - - dask - - descartes - - gdal - - geopandas - - fiona - - lxml - - matplotlib - - netcdf4 - - numpy - - pandas - - pycurl - - pyyaml - - rasterio - - requests - - toolz - - tqdm - - xarray - - paramiko - - pytest-shutil + - beautifulsoup4=4.11.1 + - dask=2022.7.0 + - descartes=1.1.0 + - gdal=3.0.2 + - geopandas=0.9.0 + - fiona=1.8.13.post1 + - lxml=4.9.1 + - matplotlib=3.5.2 + - netcdf4=1.5.7 + - numpy=1.23.1 + - pandas=1.4.3 + - pycurl=7.45.1 + - pyyaml=6.0 + - rasterio=1.2.10 + - requests=2.28.1 + - toolz=0.11.2 + - tqdm=4.64.0 + - xarray=0.20.1 + - paramiko=2.8.1 + - pytest-shutil=1.7.0 diff --git a/firedpy/__main__.py b/firedpy/__main__.py index 2da6789..a61faf4 100644 --- a/firedpy/__main__.py +++ b/firedpy/__main__.py @@ -343,7 +343,7 @@ def main(): if landcover_type: # Earthdata Login #test url for correct user/password - url = "https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/2019.01.01/MCD12Q1.A2019001.h13v12.006.2020212130349.hdf" + url = "https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/2019.01.01/MCD12Q1.A2019001.h13v12.061.2022169161130.hdf" password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, "https://urs.earthdata.nasa.gov", username, password) diff --git a/firedpy/functions.py b/firedpy/functions.py index f7080e1..2494a47 100644 --- a/firedpy/functions.py +++ b/firedpy/functions.py @@ -64,7 +64,7 @@ def convertDates(array, year): def convertDate(julien_day, year): base = dt.datetime(1970, 1, 1) - date = dt.datetime(year, 1, 1) + dt.timedelta(int(julien_day)) + date = dt.datetime(year, 1, 1) + dt.timedelta(int(julien_day) - 1) days = date - base return days.days @@ -87,7 +87,7 @@ def dateRange(perimeter): if len(perimeter.coords) > 0: base = dt.datetime(1970, 1, 1) days = [p[2] for p in perimeter.coords] - day1 = (base + dt.timedelta(days=int(min(days)))).strftime("%Y-%m-%d") + day1 = (base + dt.timedelta(days=int(min(days)) - 1)).strftime("%Y-%m-%d") else: day1 = "N/A" return day1 @@ -380,7 +380,7 @@ def getBurns(self): if self.tiles[0].lower() != "all": tiles = self.tiles else: - sftp_client.chdir('/data/MODIS/C6/MCD64A1/HDF') + sftp_client.chdir('/data/MODIS/C61/MCD64A1/HDF') dirs = sftp_client.listdir() tiles = dirs @@ -389,7 +389,7 @@ def getBurns(self): # Download the available files and catch failed downloads for tile in tiles: # Find remote folder for the tile - sftp_folder = '/data/MODIS/C6/MCD64A1/HDF/' + tile + sftp_folder = '/data/MODIS/C61/MCD64A1/HDF/' + tile # Check if remote folder exists and if not, continue to next tile try: @@ -418,23 +418,24 @@ def getBurns(self): for yr in yrs: tile_range.append("MCD64A1.A"+str(yr)) # Attempt file download - try: - for h in tqdm(hdfs): - remote = sftp_folder+"/"+h - for name in tile_range: - if name in h: - os.chdir(folder) + for h in tqdm(hdfs): + remote = sftp_folder+"/"+h + for name in tile_range: + if name in h: + os.chdir(folder) + try: sftp_client.get(remote, h) - except Exception as e: - print(e) + except Exception as e: + print(e) elif self.start_yr==None and self.end_yr==None: - try: - for h in tqdm(hdfs): - remote = sftp_folder+"/"+h - os.chdir(folder) + + for h in tqdm(hdfs): + remote = sftp_folder+"/"+h + os.chdir(folder) + try: sftp_client.get(remote, h) - except Exception as e: - print(e) + except Exception as e: + print(e) except Exception: print("No MCD64A1 Product for tile: "+str(tile)+", skipping...") @@ -485,20 +486,21 @@ def getBurns(self): for m in missings: tile = m.split("/")[-2] - sftp_folder = "/MCD64A1/C6/HDF/" + tile + sftp_folder = "/MCD64A1/C61/HDF/" + tile sftp_client.chdir(sftp_folder) file = os.path.basename(m) localpath = os.path.join(self.hdf_path, tile) trgt = os.path.join(self.hdf_path, tile, file) # Attempt re-download - try: - for h in tqdm(hdfs): - remote = sftp_folder+"/"+h - os.chdir(localpath) + + for h in tqdm(hdfs): + remote = sftp_folder+"/"+h + os.chdir(localpath) + try: sftp_client.get(remote, localpath) - except Exception as e: - print(e) + except Exception as e: + print(e) # Check the downloaded file try: @@ -711,7 +713,7 @@ def rebuild_auth(self, prepared_request, response): print("Connected to 'fuoco.geog.umd.edu' ...") # Open the connection to the SFTP sftp_client = ssh_client.open_sftp() - sftp_client.chdir('/data/MODIS/C6/MCD64A1/HDF') + sftp_client.chdir('/data/MODIS/C61/MCD64A1/HDF') tiles = sftp_client.listdir() ssh_client.close() sftp_client.close() @@ -725,7 +727,7 @@ def rebuild_auth(self, prepared_request, response): lc_type = "type" + str(landcover_type) # Get available years - r = requestIO("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/") + r = requestIO("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/") soup = BeautifulSoup(r, 'html.parser') links = [link["href"] for link in soup.find_all("a", href=True)] years = [ll[:4] for ll in links if '01.01' in ll] @@ -789,7 +791,7 @@ def fileCheck(landcover_path, year, file): year_tiles = needed_tiles[yr] # Retrieve list of links to hdf files - url = ("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.006/" + yr + ".01.01/") + url = ("https://e4ftl01.cr.usgs.gov/MOTA/MCD12Q1.061/" + yr + ".01.01/") r = requestIO(url) soup = BeautifulSoup(r, 'html.parser') names = [link["href"] for link in soup.find_all("a", href=True)]