diff --git a/.gitignore b/.gitignore index de7b05c4..057bf5b2 100644 --- a/.gitignore +++ b/.gitignore @@ -24,4 +24,5 @@ ceres-solver.tar.gz *.pyc opencv.zip settings.yaml -.setupdevenv \ No newline at end of file +.setupdevenv +__pycache__ diff --git a/Dockerfile b/Dockerfile index 7e294e61..ca9bb7db 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,61 +1,10 @@ -FROM phusion/baseimage:0.10.2 as base +FROM ubuntu:18.04 # Env variables ENV DEBIAN_FRONTEND noninteractive - -#Install dependencies and required requisites -RUN add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable \ - && add-apt-repository -y ppa:george-edison55/cmake-3.x \ - && apt-get update -y \ - && apt-get install --no-install-recommends -y \ - build-essential \ - cmake \ - gdal-bin \ - git \ - libatlas-base-dev \ - libavcodec-dev \ - libavformat-dev \ - libboost-date-time-dev \ - libboost-filesystem-dev \ - libboost-iostreams-dev \ - libboost-log-dev \ - libboost-python-dev \ - libboost-regex-dev \ - libboost-thread-dev \ - libeigen3-dev \ - libflann-dev \ - libgdal-dev \ - libgeotiff-dev \ - libgoogle-glog-dev \ - libgtk2.0-dev \ - libjasper-dev \ - libjpeg-dev \ - libjsoncpp-dev \ - liblapack-dev \ - liblas-bin \ - libpng-dev \ - libproj-dev \ - libsuitesparse-dev \ - libswscale-dev \ - libtbb2 \ - libtbb-dev \ - libtiff-dev \ - libvtk6-dev \ - libxext-dev \ - python-dev \ - python-gdal \ - python-matplotlib \ - python-pip \ - python-software-properties \ - python-wheel \ - software-properties-common \ - swig2.0 \ - grass-core \ - libssl-dev \ - && apt-get remove libdc1394-22-dev \ - && pip install --upgrade pip \ - && pip install setuptools - +ENV PYTHONPATH "$PYTHONPATH:/code/SuperBuild/install/lib/python3.6/dist-packages" +ENV PYTHONPATH "$PYTHONPATH:/code/SuperBuild/src/opensfm" +ENV LD_LIBRARY_PATH "$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" # Prepare directories WORKDIR /code @@ -63,24 +12,7 @@ WORKDIR /code # Copy everything COPY . ./ -RUN pip install -r requirements.txt - -ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages" -ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm" -ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" - -# Compile code in SuperBuild and root directories -RUN rm -fr docker \ - && cd SuperBuild \ - && mkdir build \ - && cd build \ - && cmake .. \ - && make -j$(nproc) \ - && cd ../.. \ - && mkdir build \ - && cd build \ - && cmake .. \ - && make -j$(nproc) +RUN bash configure.sh install # Cleanup APT RUN apt-get clean \ @@ -98,4 +30,4 @@ RUN rm -rf \ /code/SuperBuild/src/pdal # Entry point -ENTRYPOINT ["python", "/code/run.py"] \ No newline at end of file +ENTRYPOINT ["python3", "/code/run.py"] \ No newline at end of file diff --git a/README.md b/README.md index 1b1925cb..e4993064 100644 --- a/README.md +++ b/README.md @@ -75,9 +75,9 @@ See http://docs.opendronemap.org for tutorials and more guides. We have a vibrant [community forum](https://community.opendronemap.org/). You can [search it](https://community.opendronemap.org/search?expanded=true) for issues you might be having with ODM and you can post questions there. We encourage users of ODM to partecipate in the forum and to engage with fellow drone mapping users. -## Native Install (Ubuntu 16.04) +## Native Install (Ubuntu 18.04) -You can run ODM natively on Ubuntu 16.04 LTS (although we don't recommend it): +You can run ODM natively on Ubuntu 18.04 LTS (although we don't recommend it): 1. Download the source from [here](https://github.com/OpenDroneMap/ODM/archive/master.zip) 2. Run `bash configure.sh install` diff --git a/SuperBuild/CMakeLists.txt b/SuperBuild/CMakeLists.txt index adfc53f2..d3180c9d 100644 --- a/SuperBuild/CMakeLists.txt +++ b/SuperBuild/CMakeLists.txt @@ -129,7 +129,7 @@ endforeach() externalproject_add(mve GIT_REPOSITORY https://github.com/OpenDroneMap/mve.git - GIT_TAG 099 + GIT_TAG 200 UPDATE_COMMAND "" SOURCE_DIR ${SB_SOURCE_DIR}/elibs/mve CONFIGURE_COMMAND "" diff --git a/SuperBuild/cmake/External-MvsTexturing.cmake b/SuperBuild/cmake/External-MvsTexturing.cmake index 21721e9a..fff13360 100644 --- a/SuperBuild/cmake/External-MvsTexturing.cmake +++ b/SuperBuild/cmake/External-MvsTexturing.cmake @@ -9,7 +9,7 @@ ExternalProject_Add(${_proj_name} #--Download step-------------- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name} GIT_REPOSITORY https://github.com/OpenDroneMap/mvs-texturing - GIT_TAG 101 + GIT_TAG 200 #--Update/Patch step---------- UPDATE_COMMAND "" #--Configure step------------- diff --git a/SuperBuild/cmake/External-OpenGV.cmake b/SuperBuild/cmake/External-OpenGV.cmake index 70dc52a3..7e056402 100644 --- a/SuperBuild/cmake/External-OpenGV.cmake +++ b/SuperBuild/cmake/External-OpenGV.cmake @@ -16,7 +16,7 @@ ExternalProject_Add(${_proj_name} CMAKE_ARGS -DBUILD_TESTS=OFF -DBUILD_PYTHON=ON - -DPYBIND11_PYTHON_VERSION=2.7 + -DPYBIND11_PYTHON_VERSION=3.6 -DCMAKE_INSTALL_PREFIX:PATH=${SB_INSTALL_DIR} #--Build step----------------- BINARY_DIR ${_SB_BINARY_DIR} diff --git a/SuperBuild/cmake/External-OpenSfM.cmake b/SuperBuild/cmake/External-OpenSfM.cmake index b19d7620..45f3a604 100644 --- a/SuperBuild/cmake/External-OpenSfM.cmake +++ b/SuperBuild/cmake/External-OpenSfM.cmake @@ -9,7 +9,7 @@ ExternalProject_Add(${_proj_name} #--Download step-------------- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} GIT_REPOSITORY https://github.com/OpenDroneMap/OpenSfM/ - GIT_TAG 100 + GIT_TAG 200 #--Update/Patch step---------- UPDATE_COMMAND git submodule update --init --recursive #--Configure step------------- @@ -18,7 +18,7 @@ ExternalProject_Add(${_proj_name} -DCERES_ROOT_DIR=${SB_INSTALL_DIR} -DOpenCV_DIR=${SB_INSTALL_DIR}/share/OpenCV -DOPENSFM_BUILD_TESTS=off - -DPYTHON_EXECUTABLE=/usr/bin/python + -DPYTHON_EXECUTABLE=/usr/bin/python3 #--Build step----------------- BINARY_DIR ${_SB_BINARY_DIR} #--Install step--------------- diff --git a/SuperBuild/cmake/External-PDAL.cmake b/SuperBuild/cmake/External-PDAL.cmake index 869721af..879826c0 100644 --- a/SuperBuild/cmake/External-PDAL.cmake +++ b/SuperBuild/cmake/External-PDAL.cmake @@ -8,7 +8,7 @@ ExternalProject_Add(${_proj_name} STAMP_DIR ${_SB_BINARY_DIR}/stamp #--Download step-------------- DOWNLOAD_DIR ${SB_DOWNLOAD_DIR} - URL https://github.com/PDAL/PDAL/archive/2.1.0.zip + URL https://github.com/PDAL/PDAL/archive/2.2.0.zip #--Update/Patch step---------- UPDATE_COMMAND "" #--Configure step------------- diff --git a/VERSION b/VERSION index 6d7de6e6..227cea21 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.2 +2.0.0 diff --git a/configure.sh b/configure.sh old mode 100755 new mode 100644 index 94f9a653..3111edb0 --- a/configure.sh +++ b/configure.sh @@ -1,5 +1,20 @@ #!/bin/bash +check_version(){ + UBUNTU_VERSION=$(lsb_release -r) + if [[ $UBUNTU_VERSION = *"18.04"* ]]; then + echo "Ubuntu: $UBUNTU_VERSION, good!" + elif [[ $UBUNTU_VERSION = *"16.04" ]]; then + echo "ODM 2.0 has upgraded to Ubuntu 18.04, but you're on 16.04" + echo "The last version of ODM that supports Ubuntu 16.04 is v1.0.2. We recommend you upgrade to Ubuntu 18.04, or better yet, use docker." + exit 1 + else + echo "You are not on Ubuntu 18.04 (detected: $UBUNTU_VERSION)" + echo "It might be possible to run ODM on a newer version of Ubuntu, however, you cannot rely on this script." + exit 1 + fi +} + if [[ $2 =~ ^[0-9]+$ ]] ; then processes=$2 else @@ -7,63 +22,73 @@ else fi install() { + cd /code + ## Set up library paths - export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH + export DEBIAN_FRONTEND=noninteractive export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib - ## Before installing + ## Before installing echo "Updating the system" - add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable - apt-get update - + if ! command -v sudo &> /dev/null + then + echo "Installing sudo" + apt-get update && apt-get install -y sudo + fi + sudo apt-get update && sudo apt-get install software-properties-common lsb-release tzdata -y --no-install-recommends + + # Check version + check_version + + sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable + sudo apt-get update + echo "Installing Required Requisites" - apt-get install -y -qq build-essential \ + sudo apt-get install -y -qq --no-install-recommends \ + build-essential \ git \ cmake \ - python-pip \ + python3-pip \ libgdal-dev \ gdal-bin \ libgeotiff-dev \ pkg-config \ libjsoncpp-dev \ - python-gdal \ + python3-gdal \ + python3-setuptools \ grass-core \ libssl-dev \ - liblas-bin \ - swig2.0 \ - python-wheel \ + swig3.0 \ + python3-wheel \ libboost-log-dev + sudo pip3 install -U pip - echo "Getting CMake 3.1 for MVS-Texturing" - apt-get install -y software-properties-common python-software-properties - add-apt-repository -y ppa:george-edison55/cmake-3.x - apt-get update -y - apt-get install -y --only-upgrade cmake echo "Installing OpenCV Dependencies" - apt-get install -y -qq libgtk2.0-dev \ + sudo apt-get install -y -qq --no-install-recommends libgtk2.0-dev \ libavcodec-dev \ libavformat-dev \ libswscale-dev \ - python-dev \ + python3-dev \ libtbb2 \ libtbb-dev \ libjpeg-dev \ libpng-dev \ libtiff-dev \ - libjasper-dev \ libflann-dev \ libproj-dev \ libxext-dev \ liblapack-dev \ libeigen3-dev \ libvtk6-dev - - echo "Removing libdc1394-22-dev due to python opencv issue" - apt-get remove libdc1394-22-dev - + + sudo add-apt-repository "deb http://security.ubuntu.com/ubuntu xenial-security main" + sudo apt-get update + sudo apt-get install -y -qq --no-install-recommends libjasper1 \ + libjasper-dev + echo "Installing OpenSfM Dependencies" - apt-get install -y -qq libgoogle-glog-dev \ + sudo apt-get install -y -qq --no-install-recommends libgoogle-glog-dev \ libsuitesparse-dev \ libboost-filesystem-dev \ libboost-iostreams-dev \ @@ -72,12 +97,22 @@ install() { libboost-date-time-dev \ libboost-thread-dev - pip install -r "${RUNPATH}/requirements.txt" + pip install -r requirements.txt - # Fix: /usr/local/lib/python2.7/dist-packages/requests/__init__.py:83: RequestsDependencyWarning: Old version of cryptography ([1, 2, 3]) may cause slowdown. - pip install --upgrade cryptography - python -m easy_install --upgrade pyOpenSSL + if [ ! -z "$PORTABLE_INSTALL" ]; then + echo "Replacing g++ and gcc with our scripts for portability..." + if [ ! -e /usr/bin/gcc_real ]; then + sudo mv -v /usr/bin/gcc /usr/bin/gcc_real + sudo cp -v ./docker/gcc /usr/bin/gcc + fi + if [ ! -e /usr/bin/g++_real ]; then + sudo mv -v /usr/bin/g++ /usr/bin/g++_real + sudo cp -v ./docker/g++ /usr/bin/g++ + fi + fi + set -eo pipefail + echo "Compiling SuperBuild" cd ${RUNPATH}/SuperBuild mkdir -p build && cd build @@ -87,11 +122,13 @@ install() { cd ${RUNPATH} mkdir -p build && cd build cmake .. && make -j$processes - + echo "Configuration Finished" } uninstall() { + check_version + echo "Removing SuperBuild and build directories" cd ${RUNPATH}/SuperBuild rm -rfv build src download install @@ -100,11 +137,12 @@ uninstall() { } reinstall() { + check_version + echo "Reinstalling ODM modules" uninstall install } - usage() { echo "Usage:" echo "bash configure.sh [nproc]" @@ -120,7 +158,7 @@ usage() { echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)" } -if [[ $1 =~ ^(install|reinstall|uninstall|usage)$ ]]; then +if [[ $1 =~ ^(install|reinstall|uninstall)$ ]]; then RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" "$1" else diff --git a/configure_18_04.sh b/configure_18_04.sh deleted file mode 100644 index 6da0e58a..00000000 --- a/configure_18_04.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/bash - -if [[ $2 =~ ^[0-9]+$ ]] ; then - processes=$2 -else - processes=$(nproc) -fi - -install() { - -cd /code - ## Set up library paths - export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib - - - ## Before installing - echo "Updating the system" - sudo add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable - - - echo "Installing Required Requisites" - sudo apt-get install -y -qq build-essential \ - git \ - cmake \ - python-pip \ - libgdal-dev \ - gdal-bin \ - libgeotiff-dev \ - pkg-config \ - libjsoncpp-dev \ - python-gdal \ - grass-core \ - libssl-dev \ - liblas-bin \ - swig3.0 \ - python-wheel \ - libboost-log-dev - - - echo "Installing OpenCV Dependencies" - sudo apt-get install -y -qq libgtk2.0-dev \ - libavcodec-dev \ - libavformat-dev \ - libswscale-dev \ - python-dev \ - libtbb2 \ - libtbb-dev \ - libjpeg-dev \ - libpng-dev \ - libtiff-dev \ - libflann-dev \ - libproj-dev \ - libxext-dev \ - liblapack-dev \ - libeigen3-dev \ - libvtk6-dev - - sudo add-apt-repository "deb http://security.ubuntu.com/ubuntu xenial-security main" - sudo apt-get update - sudo apt-get install -y -qq libjasper1 \ - libjasper-dev - - echo "Installing OpenSfM Dependencies" - sudo apt-get install -y -qq libgoogle-glog-dev \ - libsuitesparse-dev \ - libboost-filesystem-dev \ - libboost-iostreams-dev \ - libboost-regex-dev \ - libboost-python-dev \ - libboost-date-time-dev \ - libboost-thread-dev - - pip install -r "/code/requirements.txt" - - # Fix: /usr/local/lib/python2.7/dist-packages/requests/__init__.py:83: RequestsDependencyWarning: Old version of cryptography ([1, 2, 3]) may cause slowdown. - pip install --upgrade cryptography - python -m easy_install --upgrade pyOpenSSL - - echo "Compiling SuperBuild" - cd ${RUNPATH}/SuperBuild - mkdir -p build && cd build - cmake .. && make -j$processes - - echo "Compiling build" - cd ${RUNPATH} - mkdir -p build && cd build - cmake .. && make -j$processes - - echo "Configuration Finished" -} - -uninstall() { - echo "Removing SuperBuild and build directories" - cd ${RUNPATH}/SuperBuild - rm -rfv build src download install - cd ../ - rm -rfv build -} - -reinstall() { - echo "Reinstalling ODM modules" - uninstall - install -} -usage() { - echo "Usage:" - echo "bash configure.sh [nproc]" - echo "Subcommands:" - echo " install" - echo " Installs all dependencies and modules for running OpenDroneMap" - echo " reinstall" - echo " Removes SuperBuild and build modules, then re-installs them. Note this does not update OpenDroneMap to the latest version. " - echo " uninstall" - echo " Removes SuperBuild and build modules. Does not uninstall dependencies" - echo " help" - echo " Displays this message" - echo "[nproc] is an optional argument that can set the number of processes for the make -j tag. By default it uses $(nproc)" -} - -if [[ $1 =~ ^(install|reinstall|uninstall)$ ]]; then - RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - "$1" -else - echo "Invalid instructions." >&2 - usage - exit 1 -fi diff --git a/hooks/pre-commit b/hooks/pre-commit deleted file mode 100644 index 0a85afe4..00000000 --- a/hooks/pre-commit +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh -# -# An example hook script to verify what is about to be committed. -# Called by "git commit" with no arguments. The hook should -# exit with non-zero status after issuing an appropriate message if -# it wants to stop the commit. -# -# To enable this hook, rename this file to "pre-commit". - -exec 1>&2 - -echo "RUNNING PRE-COMMIT" -EXIT_CODE=0 -# Get list of files about to be committed -if git diff --cached --name-only --diff-filter=ACM | grep 'ccd_defs.json'; then - echo "We changed ccd_defs.json" - GIT_ROOT=$(git rev-parse --show-toplevel) - python $GIT_ROOT/ccd_defs_check.py - EXIT_CODE=$(echo $?) -fi - -# non-zero exit fails the commit -exit $EXIT_CODE diff --git a/img/bellus_map.png.REMOVED.git-id b/img/bellus_map.png.REMOVED.git-id deleted file mode 100644 index 12a7e075..00000000 --- a/img/bellus_map.png.REMOVED.git-id +++ /dev/null @@ -1 +0,0 @@ -305acb70d8d2c350a8374fbb5028d914facf3fa4 \ No newline at end of file diff --git a/img/odm_image.png b/img/odm_image.png deleted file mode 100644 index 23959f59..00000000 Binary files a/img/odm_image.png and /dev/null differ diff --git a/img/tol_ptcloud.png.REMOVED.git-id b/img/tol_ptcloud.png.REMOVED.git-id deleted file mode 100644 index 0a8dea4c..00000000 --- a/img/tol_ptcloud.png.REMOVED.git-id +++ /dev/null @@ -1 +0,0 @@ -de3d398070f83430c950ae84845fd85b504b5452 \ No newline at end of file diff --git a/opendm/concurrency.py b/opendm/concurrency.py index 53dea176..910cf6be 100644 --- a/opendm/concurrency.py +++ b/opendm/concurrency.py @@ -63,8 +63,10 @@ def parallel_map(func, items, max_workers=1): t.start() threads.append(t) + i = 1 for t in items: pq.put((i, t.copy())) + i += 1 def stop_workers(): for i in range(len(threads)): diff --git a/opendm/config.py b/opendm/config.py index 8681c104..903a62c6 100755 --- a/opendm/config.py +++ b/opendm/config.py @@ -60,16 +60,18 @@ class StoreValue(argparse.Action): setattr(namespace, self.dest, values) setattr(namespace, self.dest + '_is_set', True) -parser = SettingsParser(description='OpenDroneMap', - usage='%(prog)s [options] ', - yaml_file=open(context.settings_path)) args = None -def config(argv=None): +def config(argv=None, parser=None): global args if args is not None and argv is None: return args + + if parser is None: + parser = SettingsParser(description='ODM', + usage='%(prog)s [options] ', + yaml_file=open(context.settings_path)) parser.add_argument('--project-path', metavar='', @@ -89,9 +91,9 @@ def config(argv=None): action=StoreValue, default=2048, type=int, - help='Resizes images by the largest side for feature extraction purposes only. ' + help='Legacy option (use --feature-quality instead). Resizes images by the largest side for feature extraction purposes only. ' 'Set to -1 to disable. This does not affect the final orthophoto ' - ' resolution quality and will not resize the original images. Default: %(default)s') + 'resolution quality and will not resize the original images. Default: %(default)s') parser.add_argument('--end-with', '-e', metavar='', @@ -143,7 +145,16 @@ def config(argv=None): default='sift', choices=['sift', 'hahog'], help=('Choose the algorithm for extracting keypoints and computing descriptors. ' - 'Can be one of: [sift, hahog]. Default: ' + 'Can be one of: %(choices)s. Default: ' + '%(default)s')) + + parser.add_argument('--feature-quality', + metavar='', + action=StoreValue, + default='high', + choices=['ultra', 'high', 'medium', 'low', 'lowest'], + help=('Set feature extraction quality. Higher quality generates better features, but requires more memory and takes longer. ' + 'Can be one of: %(choices)s. Default: ' '%(default)s')) parser.add_argument('--matcher-neighbors', @@ -194,7 +205,7 @@ def config(argv=None): help=('Set a camera projection type. Manually setting a value ' 'can help improve geometric undistortion. By default the application ' 'tries to determine a lens type from the images metadata. Can be ' - 'set to one of: [auto, perspective, brown, fisheye, spherical]. Default: ' + 'set to one of: %(choices)s. Default: ' '%(default)s')) parser.add_argument('--radiometric-calibration', @@ -207,7 +218,7 @@ def config(argv=None): 'to obtain reflectance values (otherwise you will get digital number values). ' '[camera] applies black level, vignetting, row gradient gain/exposure compensation (if appropriate EXIF tags are found). ' '[camera+sun] is experimental, applies all the corrections of [camera], plus compensates for spectral radiance registered via a downwelling light sensor (DLS) taking in consideration the angle of the sun. ' - 'Can be set to one of: [none, camera, camera+sun]. Default: ' + 'Can be set to one of: %(choices)s. Default: ' '%(default)s')) parser.add_argument('--max-concurrency', @@ -512,11 +523,25 @@ def config(argv=None): metavar='', action=StoreValue, default=None, - help=('path to the file containing the ground control ' + help=('Path to the file containing the ground control ' 'points used for georeferencing. Default: ' '%(default)s. The file needs to ' - 'be on the following line format: \neasting ' - 'northing height pixelrow pixelcol imagename')) + 'use the following format: \n' + 'EPSG: or <+proj definition>\n' + 'geo_x geo_y geo_z im_x im_y image_name [gcp_name] [extra1] [extra2]')) + + parser.add_argument('--geo', + metavar='', + action=StoreValue, + default=None, + help=('Path to the image geolocation file containing the camera center coordinates used for georeferencing. ' + 'Note that omega/phi/kappa are currently not supported (you can set them to 0). ' + 'Default: ' + '%(default)s. The file needs to ' + 'use the following format: \n' + 'EPSG: or <+proj definition>\n' + 'image_name geo_x geo_y geo_z [omega (degrees)] [phi (degrees)] [kappa (degrees)] [horz accuracy (meters)] [vert accuracy (meters)]' + '')) parser.add_argument('--use-exif', action=StoreTrue, @@ -606,9 +631,7 @@ def config(argv=None): type=str, choices=['JPEG', 'LZW', 'PACKBITS', 'DEFLATE', 'LZMA', 'NONE'], default='DEFLATE', - help='Set the compression to use. Note that this could ' - 'break gdal_translate if you don\'t know what you ' - 'are doing. Options: %(choices)s.\nDefault: %(default)s') + help='Set the compression to use for orthophotos. Options: %(choices)s.\nDefault: %(default)s') parser.add_argument('--orthophoto-cutline', action=StoreTrue, @@ -620,6 +643,14 @@ def config(argv=None): 'Default: ' '%(default)s') + parser.add_argument('--tiles', + action=StoreTrue, + nargs=0, + default=False, + help='Generate static tiles for orthophotos and DEMs that are ' + 'suitable for viewers like Leaflet or OpenLayers. ' + 'Default: %(default)s') + parser.add_argument('--build-overviews', action=StoreTrue, nargs=0, @@ -649,7 +680,7 @@ def config(argv=None): parser.add_argument('--version', action='version', - version='OpenDroneMap {0}'.format(__version__), + version='ODM {0}'.format(__version__), help='Displays version number and exits. ') parser.add_argument('--split', @@ -742,7 +773,7 @@ def config(argv=None): if not args.project_path: log.ODM_ERROR('You need to set the project path in the ' 'settings.yaml file before you can run ODM, ' - 'or use `--project-path `. Run `python ' + 'or use `--project-path `. Run `python3 ' 'run.py --help` for more information. ') sys.exit(1) diff --git a/opendm/context.py b/opendm/context.py index 457bcfc4..c188938f 100644 --- a/opendm/context.py +++ b/opendm/context.py @@ -1,6 +1,5 @@ import os import sys -from opendm import io import multiprocessing # Define some needed locations @@ -12,9 +11,15 @@ superbuild_bin_path = os.path.join(superbuild_path, 'install', 'bin') tests_path = os.path.join(root_path, 'tests') tests_data_path = os.path.join(root_path, 'tests/test_data') -# add opencv to python path -pyopencv_path = os.path.join(superbuild_path, 'install/lib/python2.7/dist-packages') -sys.path.append(pyopencv_path) +# add opencv,opensfm to python path +python_packages_paths = [os.path.join(superbuild_path, p) for p in [ + 'install/lib/python3.6/dist-packages', + 'install/lib/python3/dist-packages', + 'src/opensfm' +]] +for p in python_packages_paths: + sys.path.append(p) + # define opensfm path opensfm_path = os.path.join(superbuild_path, "src/opensfm") @@ -46,7 +51,12 @@ odm_modules_src_path = os.path.join(root_path, "modules") settings_path = os.path.join(root_path, 'settings.yaml') # Define supported image extensions -supported_extensions = {'.jpg','.jpeg','.png', '.tif', '.tiff'} +supported_extensions = {'.jpg','.jpeg','.png', '.tif', '.tiff', '.bmp'} # Define the number of cores num_cores = multiprocessing.cpu_count() + + +# Print python paths if invoked as a script +if __name__ == "__main__": + print("export PYTHONPATH=" + ":".join(python_packages_paths)) \ No newline at end of file diff --git a/opendm/cropper.py b/opendm/cropper.py index b372842a..f18a6c84 100644 --- a/opendm/cropper.py +++ b/opendm/cropper.py @@ -60,7 +60,7 @@ class Cropper: os.remove(original_geotiff) except Exception as e: - log.ODM_WARNING('Something went wrong while cropping: {}'.format(e.message)) + log.ODM_WARNING('Something went wrong while cropping: {}'.format(e)) # Revert rename os.rename(original_geotiff, geotiff_path) @@ -189,8 +189,14 @@ class Cropper: BUFFER_SMOOTH_DISTANCE = 3 if buffer_distance > 0: - convexhull = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE)) - convexhull = convexhull.Buffer(BUFFER_SMOOTH_DISTANCE) + # For small areas, check that buffering doesn't obliterate + # our hull + tmp = convexhull.Buffer(-(buffer_distance + BUFFER_SMOOTH_DISTANCE)) + tmp = tmp.Buffer(BUFFER_SMOOTH_DISTANCE) + if tmp.Area() > 0: + convexhull = tmp + else: + log.ODM_WARNING("Very small crop area detected, we will not smooth it.") # Save to a new file bounds_geojson_path = self.path('bounds.geojson') diff --git a/opendm/dem/commands.py b/opendm/dem/commands.py index 0d4683d6..de1a0a26 100755 --- a/opendm/dem/commands.py +++ b/opendm/dem/commands.py @@ -9,7 +9,7 @@ from opendm.system import run from opendm import point_cloud from opendm import io from opendm import system -from opendm.concurrency import get_max_memory +from opendm.concurrency import get_max_memory, parallel_map from scipy import ndimage from datetime import datetime from opendm import log @@ -81,8 +81,6 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'] apply_smoothing=True): """ Create DEM from multiple radii, and optionally gapfill """ - # TODO: refactor to use concurrency.parallel_map - global error error = None @@ -164,7 +162,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'] # Sort tiles by increasing radius tiles.sort(key=lambda t: float(t['radius']), reverse=True) - def process_one(q): + def process_tile(q): log.ODM_INFO("Generating %s (%s, radius: %s, resolution: %s)" % (q['filename'], output_type, q['radius'], resolution)) d = pdal.json_gdal_base(q['filename'], output_type, q['radius'], resolution, q['bounds']) @@ -178,63 +176,7 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'] pdal.json_add_readers(d, [input_point_cloud]) pdal.run_pipeline(d, verbose=verbose) - def worker(): - global error - - while True: - (num, q) = pq.get() - if q is None or error is not None: - pq.task_done() - break - - try: - process_one(q) - except Exception as e: - error = e - finally: - pq.task_done() - - if max_workers > 1: - use_single_thread = False - pq = queue.PriorityQueue() - threads = [] - for i in range(max_workers): - t = threading.Thread(target=worker) - t.start() - threads.append(t) - - for t in tiles: - pq.put((i, t.copy())) - - def stop_workers(): - for i in range(len(threads)): - pq.put((-1, None)) - for t in threads: - t.join() - - # block until all tasks are done - try: - while pq.unfinished_tasks > 0: - time.sleep(0.5) - except KeyboardInterrupt: - print("CTRL+C terminating...") - stop_workers() - sys.exit(1) - - stop_workers() - - if error is not None: - # Try to reprocess using a single thread - # in case this was a memory error - log.ODM_WARNING("DEM processing failed with multiple threads, let's retry with a single thread...") - use_single_thread = True - else: - use_single_thread = True - - if use_single_thread: - # Boring, single thread processing - for q in tiles: - process_one(q) + parallel_map(process_tile, tiles, max_workers) output_file = "%s.tif" % dem_type output_path = os.path.abspath(os.path.join(outdir, output_file)) @@ -294,11 +236,15 @@ def create_dem(input_point_cloud, dem_type, output_type='max', radiuses=['0.56'] run('gdalbuildvrt -resolution highest -r bilinear "%s" "%s" "%s"' % (merged_vrt_path, geotiff_small_filled_path, geotiff_tmp_path)) run('gdal_translate ' '-co NUM_THREADS={threads} ' + '-co TILED=YES ' + '-co COMPRESS=DEFLATE ' '--config GDAL_CACHEMAX {max_memory}% ' '{merged_vrt} {geotiff}'.format(**kwargs)) else: run('gdal_translate ' '-co NUM_THREADS={threads} ' + '-co TILED=YES ' + '-co COMPRESS=DEFLATE ' '--config GDAL_CACHEMAX {max_memory}% ' '{tiles_vrt} {geotiff}'.format(**kwargs)) diff --git a/opendm/dem/pdal.py b/opendm/dem/pdal.py index 11d9d31e..cea98c86 100644 --- a/opendm/dem/pdal.py +++ b/opendm/dem/pdal.py @@ -148,7 +148,7 @@ def run_pipeline(json, verbose=False): f, jsonfile = tempfile.mkstemp(suffix='.json') if verbose: log.ODM_INFO('Pipeline file: %s' % jsonfile) - os.write(f, jsonlib.dumps(json)) + os.write(f, jsonlib.dumps(json).encode('utf8')) os.close(f) cmd = [ diff --git a/opendm/dem/utils.py b/opendm/dem/utils.py index dbcc625f..9fb383a9 100644 --- a/opendm/dem/utils.py +++ b/opendm/dem/utils.py @@ -2,7 +2,7 @@ def get_dem_vars(args): return { 'TILED': 'YES', - 'COMPRESS': 'LZW', + 'COMPRESS': 'DEFLATE', 'BLOCKXSIZE': 512, 'BLOCKYSIZE': 512, 'BIGTIFF': 'IF_SAFER', diff --git a/opendm/gcp.py b/opendm/gcp.py index 57769e42..cd5ae998 100644 --- a/opendm/gcp.py +++ b/opendm/gcp.py @@ -15,9 +15,9 @@ class GCPFile: def read(self): if self.exists(): with open(self.gcp_path, 'r') as f: - contents = f.read().decode('utf-8-sig').encode('utf-8').strip() + contents = f.read().strip() - lines = map(str.strip, contents.split('\n')) + lines = list(map(str.strip, contents.split('\n'))) if lines: self.raw_srs = lines[0] # SRS self.srs = location.parse_srs_header(self.raw_srs) @@ -104,7 +104,7 @@ class GCPFile: if os.path.exists(gcp_file_output): os.remove(gcp_file_output) - files = map(os.path.basename, glob.glob(os.path.join(images_dir, "*"))) + files = list(map(os.path.basename, glob.glob(os.path.join(images_dir, "*")))) output = [self.raw_srs] files_found = 0 diff --git a/opendm/geo.py b/opendm/geo.py new file mode 100644 index 00000000..a1dd50b6 --- /dev/null +++ b/opendm/geo.py @@ -0,0 +1,80 @@ +import os +from opendm import log +from opendm import location +from pyproj import CRS + +class GeoFile: + def __init__(self, geo_path): + self.geo_path = geo_path + self.entries = {} + self.srs = None + + with open(self.geo_path, 'r') as f: + contents = f.read().strip() + + lines = list(map(str.strip, contents.split('\n'))) + if lines: + self.raw_srs = lines[0] # SRS + self.srs = location.parse_srs_header(self.raw_srs) + longlat = CRS.from_epsg("4326") + + for line in lines[1:]: + if line != "" and line[0] != "#": + parts = line.split() + if len(parts) >= 3: + i = 3 + filename = parts[0] + x, y = [float(p) for p in parts[1:3]] + z = float(parts[3]) if len(parts) >= 4 else None + + # Always convert coordinates to WGS84 + if z is not None: + x, y, z = location.transform3(self.srs, longlat, x, y, z) + else: + x, y = location.transform2(self.srs, longlat, x, y) + + omega = phi = kappa = None + + if len(parts) >= 7: + omega, phi, kappa = [float(p) for p in parts[4:7]] + i = 7 + + horizontal_accuracy = vertical_accuracy = None + if len(parts) >= 9: + horizontal_accuracy,vertical_accuracy = [float(p) for p in parts[7:9]] + i = 9 + + extras = " ".join(parts[i:]) + self.entries[filename] = GeoEntry(filename, x, y, z, + omega, phi, kappa, + horizontal_accuracy, vertical_accuracy, + extras) + else: + logger.warning("Malformed geo line: %s" % line) + + def get_entry(self, filename): + return self.entries.get(filename) + + +class GeoEntry: + def __init__(self, filename, x, y, z, omega=None, phi=None, kappa=None, horizontal_accuracy=None, vertical_accuracy=None, extras=None): + self.filename = filename + self.x = x + self.y = y + self.z = z + self.omega = omega + self.phi = phi + self.kappa = kappa + self.horizontal_accuracy = horizontal_accuracy + self.vertical_accuracy = vertical_accuracy + self.extras = extras + + def __str__(self): + return "{} ({} {} {}) ({} {} {}) ({} {}) {}".format(self.filename, + self.x, self.y, self.z, + self.omega, self.phi, self.kappa, + self.horizontal_accuracy, self.vertical_accuracy, + self.extras).rstrip() + + def position_string(self): + return "{} {} {}".format(self.x, self.y, self.z) diff --git a/opendm/grass/addons/i.cutlinesmod.py b/opendm/grass/addons/i.cutlinesmod.py index 4ae946fd..bd9f9d46 100755 --- a/opendm/grass/addons/i.cutlinesmod.py +++ b/opendm/grass/addons/i.cutlinesmod.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 ############################################################################ # diff --git a/opendm/grass/compute_cutline.grass b/opendm/grass/compute_cutline.grass index 299411c9..2a062ef2 100644 --- a/opendm/grass/compute_cutline.grass +++ b/opendm/grass/compute_cutline.grass @@ -23,7 +23,7 @@ i.cutlinesmod.py --overwrite input=ortho output=cutline number_lines=${number_li v.select ainput=cutline binput=crop_area output=result operator=within # Export -v.out.ogr input=result output="result.gpkg" format=GPKG +v.out.ogr input=result output="result.gpkg" format=GPKG --overwrite # Merge all geometries, select only the largest one (remove islands) ogr2ogr -f GPKG -overwrite -explodecollections -dialect SQLite -sql "SELECT ST_Union(geom) FROM result ORDER BY ST_AREA(geom) DESC LIMIT 1" cutline.gpkg result.gpkg diff --git a/opendm/grass_engine.py b/opendm/grass_engine.py index cb2a6669..1f9b48e3 100644 --- a/opendm/grass_engine.py +++ b/opendm/grass_engine.py @@ -6,6 +6,7 @@ import sys import time from opendm import log from opendm import system +import locale from string import Template @@ -94,18 +95,19 @@ class GrassContext: log.ODM_INFO("Executing grass script from {}: {} --tmp-location {} --exec bash script.sh".format(self.get_cwd(), self.grass_binary, self.location)) env = os.environ.copy() env["GRASS_ADDON_PATH"] = env.get("GRASS_ADDON_PATH", "") + os.path.abspath(os.path.join("opendm/grass/addons")) - + env["LC_ALL"] = "C.UTF-8" + filename = os.path.join(self.get_cwd(), 'output.log') with open(filename, 'wb') as writer, open(filename, 'rb', 1) as reader: p = subprocess.Popen([self.grass_binary, '--tmp-location', self.location, '--exec', 'bash', 'script.sh'], cwd=self.get_cwd(), stdout=subprocess.PIPE, stderr=writer, env=env) while p.poll() is None: - sys.stdout.write(reader.read()) + sys.stdout.write(reader.read().decode('utf8')) time.sleep(0.5) # Read the remaining - sys.stdout.write(reader.read()) + sys.stdout.write(reader.read().decode('utf8')) out, err = p.communicate() out = out.decode('utf-8').strip() diff --git a/opendm/io.py b/opendm/io.py index 66ee5f98..bb23a1ed 100644 --- a/opendm/io.py +++ b/opendm/io.py @@ -2,19 +2,10 @@ import os import shutil, errno import json -def get_files_list(path_dir): - return os.listdir(path_dir) - - def absolute_path_file(path_file): return os.path.abspath(path_file) -def extract_file_from_path_file(path_file): - path, file = os.path.split(path_file) - return file - - def extract_path_from_file(file): path_file = os.path.abspath(os.path.dirname(file)) path, file = os.path.split(path_file) diff --git a/opendm/location.py b/opendm/location.py index c697964d..99277eb8 100644 --- a/opendm/location.py +++ b/opendm/location.py @@ -20,15 +20,16 @@ def extract_utm_coords(photos, images_path, output_coords_file): coords = [] reference_photo = None for photo in photos: - if photo.latitude is None or photo.longitude is None or photo.altitude is None: - log.ODM_ERROR("Failed parsing GPS position for %s, skipping" % photo.filename) + if photo.latitude is None or photo.longitude is None: + log.ODM_WARNING("GPS position not available for %s" % photo.filename) continue if utm_zone is None: utm_zone, hemisphere = get_utm_zone_and_hemisphere_from(photo.longitude, photo.latitude) try: - coord = convert_to_utm(photo.longitude, photo.latitude, photo.altitude, utm_zone, hemisphere) + alt = photo.altitude if photo.altitude is not None else 0 + coord = convert_to_utm(photo.longitude, photo.latitude, alt, utm_zone, hemisphere) except: raise Exception("Failed to convert GPS position to UTM for %s" % photo.filename) @@ -74,6 +75,8 @@ def proj_srs_convert(srs): proj4 = srs.to_proj4() res.ImportFromProj4(proj4) + res.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) + return res def transformer(from_srs, to_srs): diff --git a/opendm/mesh.py b/opendm/mesh.py index d08b8ed3..f8bb51da 100644 --- a/opendm/mesh.py +++ b/opendm/mesh.py @@ -4,7 +4,7 @@ from opendm.dem import commands from opendm import system from opendm import log from opendm import context -from scipy import signal, ndimage +from scipy import signal import numpy as np def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, depth=8, samples=1, maxVertexCount=100000, verbose=False, available_cores=None, method='gridded', smooth_dsm=True): @@ -26,7 +26,7 @@ def create_25dmesh(inPointCloud, outMesh, dsm_radius=0.07, dsm_resolution=0.05, inPointCloud, 'mesh_dsm', output_type='max', - radiuses=map(str, radius_steps), + radiuses=list(map(str, radius_steps)), gapfill=True, outdir=tmp_directory, resolution=dsm_resolution, diff --git a/opendm/orthophoto.py b/opendm/orthophoto.py index b4c635fd..0c193259 100644 --- a/opendm/orthophoto.py +++ b/opendm/orthophoto.py @@ -7,10 +7,11 @@ import math import numpy as np import rasterio import fiona -from scipy import ndimage +from edt import edt from rasterio.transform import Affine, rowcol from rasterio.mask import mask from opendm import io +from opendm.tiles.tiler import generate_orthophoto_tiles def get_orthophoto_vars(args): return { @@ -42,7 +43,7 @@ def generate_png(orthophoto_file): '--config GDAL_CACHEMAX %s%% ' % (orthophoto_file, orthophoto_png, get_max_memory())) -def post_orthophoto_steps(args, bounds_file_path, orthophoto_file): +def post_orthophoto_steps(args, bounds_file_path, orthophoto_file, orthophoto_tiles_dir): if args.crop > 0: Cropper.crop(bounds_file_path, orthophoto_file, get_orthophoto_vars(args), keep_original=not args.optimize_disk_space, warp_options=['-dstalpha']) @@ -52,6 +53,9 @@ def post_orthophoto_steps(args, bounds_file_path, orthophoto_file): if args.orthophoto_png: generate_png(orthophoto_file) + if args.tiles: + generate_orthophoto_tiles(orthophoto_file, orthophoto_tiles_dir, args.max_concurrency) + def compute_mask_raster(input_raster, vector_mask, output_raster, blend_distance=20, only_max_coords_feature=False): if not os.path.exists(input_raster): @@ -87,7 +91,7 @@ def compute_mask_raster(input_raster, vector_mask, output_raster, blend_distance if out_image.shape[0] >= 4: # alpha_band = rast.dataset_mask() alpha_band = out_image[-1] - dist_t = ndimage.distance_transform_edt(alpha_band) + dist_t = edt(alpha_band, black_border=True, parallel=0) dist_t[dist_t <= blend_distance] /= blend_distance dist_t[dist_t > blend_distance] = 1 np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe") @@ -112,7 +116,7 @@ def feather_raster(input_raster, output_raster, blend_distance=20): if blend_distance > 0: if out_image.shape[0] >= 4: alpha_band = out_image[-1] - dist_t = ndimage.distance_transform_edt(alpha_band) + dist_t = edt(alpha_band, black_border=True, parallel=0) dist_t[dist_t <= blend_distance] /= blend_distance dist_t[dist_t > blend_distance] = 1 np.multiply(alpha_band, dist_t, out=alpha_band, casting="unsafe") diff --git a/opendm/osfm.py b/opendm/osfm.py index abc26f02..a6802c81 100644 --- a/opendm/osfm.py +++ b/opendm/osfm.py @@ -18,8 +18,7 @@ class OSFMContext: self.opensfm_project_path = opensfm_project_path def run(self, command): - # Use Python 2.x by default, otherwise OpenSfM uses Python 3.x - system.run('/usr/bin/env python2 %s/bin/opensfm %s "%s"' % + system.run('/usr/bin/env python3 %s/bin/opensfm %s "%s"' % (context.opensfm_path, command, self.opensfm_project_path)) def is_reconstruction_done(self): @@ -104,39 +103,77 @@ class OSFMContext: use_bow = True # GPSDOP override if we have GPS accuracy information (such as RTK) - override_gps_dop = 'gps_accuracy_is_set' in args - for p in photos: - if p.get_gps_dop() is not None: - override_gps_dop = True - break + if 'gps_accuracy_is_set' in args: + log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy) - if override_gps_dop: + log.ODM_INFO("Writing exif overrides") + + exif_overrides = {} + for p in photos: if 'gps_accuracy_is_set' in args: - log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy) + dop = args.gps_accuracy + elif p.get_gps_dop() is not None: + dop = p.get_gps_dop() else: - log.ODM_INFO("Looks like we have RTK accuracy info for some photos. Good! We'll use it.") + dop = args.gps_accuracy # default value - exif_overrides = {} - for p in photos: - dop = args.gps_accuracy if 'gps_accuracy_is_set' in args else p.get_gps_dop() - if dop is not None and p.latitude is not None and p.longitude is not None: - exif_overrides[p.filename] = { - 'gps': { - 'latitude': p.latitude, - 'longitude': p.longitude, - 'altitude': p.altitude if p.altitude is not None else 0, - 'dop': dop, - } + if p.latitude is not None and p.longitude is not None: + exif_overrides[p.filename] = { + 'gps': { + 'latitude': p.latitude, + 'longitude': p.longitude, + 'altitude': p.altitude if p.altitude is not None else 0, + 'dop': dop, } + } - with open(os.path.join(self.opensfm_project_path, "exif_overrides.json"), 'w') as f: - f.write(json.dumps(exif_overrides)) + with open(os.path.join(self.opensfm_project_path, "exif_overrides.json"), 'w') as f: + f.write(json.dumps(exif_overrides)) + + # Check image masks + masks = [] + for p in photos: + if p.mask is not None: + masks.append((p.filename, os.path.join(images_path, p.mask))) + + if masks: + log.ODM_INFO("Found %s image masks" % len(masks)) + with open(os.path.join(self.opensfm_project_path, "mask_list.txt"), 'w') as f: + for fname, mask in masks: + f.write("{} {}\n".format(fname, mask)) + + # Compute feature_process_size + feature_process_size = 2048 # default + if 'resize_to_is_set' in args: + # Legacy + log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.") + feature_process_size = int(args.resize_to) + else: + feature_quality_scale = { + 'ultra': 1, + 'high': 0.5, + 'medium': 0.25, + 'low': 0.125, + 'lowest': 0.0675, + } + # Find largest photo dimension + max_dim = 0 + for p in photos: + if p.width is None: + continue + max_dim = max(max_dim, max(p.width, p.height)) + + if max_dim > 0: + log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim)) + feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality]) + else: + log.ODM_WARNING("Cannot compute max image dimensions, going with defaults") # create config file for OpenSfM config = [ "use_exif_size: no", "flann_algorithm: KDTREE", # more stable, faster than KMEANS - "feature_process_size: %s" % args.resize_to, + "feature_process_size: %s" % feature_process_size, "feature_min_frames: %s" % args.min_num_features, "processes: %s" % args.max_concurrency, "matching_gps_neighbors: %s" % matcher_neighbors, @@ -317,7 +354,7 @@ def get_submodel_argv(args, submodels_path = None, submodel_name = None): :return the same as argv, but removing references to --split, setting/replacing --project-path and name removing --rerun-from, --rerun, --rerun-all, --sm-cluster - removing --pc-las, --pc-csv, --pc-ept flags (processing these is wasteful) + removing --pc-las, --pc-csv, --pc-ept, --tiles flags (processing these is wasteful) adding --orthophoto-cutline adding --dem-euclidean-map adding --skip-3dmodel (split-merge does not support 3D model merging) @@ -326,7 +363,7 @@ def get_submodel_argv(args, submodels_path = None, submodel_name = None): reading the contents of --cameras """ assure_always = ['orthophoto_cutline', 'dem_euclidean_map', 'skip_3dmodel'] - remove_always = ['split', 'split_overlap', 'rerun_from', 'rerun', 'gcp', 'end_with', 'sm_cluster', 'rerun_all', 'pc_csv', 'pc_las', 'pc_ept'] + remove_always = ['split', 'split_overlap', 'rerun_from', 'rerun', 'gcp', 'end_with', 'sm_cluster', 'rerun_all', 'pc_csv', 'pc_las', 'pc_ept', 'tiles'] read_json_always = ['cameras'] argv = sys.argv diff --git a/opendm/photo.py b/opendm/photo.py index 6831c301..16a46fed 100644 --- a/opendm/photo.py +++ b/opendm/photo.py @@ -1,6 +1,6 @@ -import io import logging import re +import os import exifread import numpy as np @@ -8,19 +8,21 @@ from six import string_types from datetime import datetime, timedelta import pytz -import log -import system +from opendm import io +from opendm import log +from opendm import system import xmltodict as x2d from opendm import get_image_size from xml.parsers.expat import ExpatError class ODM_Photo: - """ ODMPhoto - a class for ODMPhotos - """ + """ODMPhoto - a class for ODMPhotos""" def __init__(self, path_file): + self.filename = os.path.basename(path_file) + self.mask = None + # Standard tags (virtually all photos have these) - self.filename = io.extract_file_from_path_file(path_file) self.width = None self.height = None self.camera_make = '' @@ -76,6 +78,19 @@ class ODM_Photo: self.filename, self.camera_make, self.camera_model, self.width, self.height, self.latitude, self.longitude, self.altitude, self.band_name, self.band_index) + def set_mask(self, mask): + self.mask = mask + + def update_with_geo_entry(self, geo_entry): + self.latitude = geo_entry.y + self.longitude = geo_entry.x + self.altitude = geo_entry.z + self.dls_yaw = geo_entry.omega + self.dls_pitch = geo_entry.phi + self.dls_roll = geo_entry.kappa + self.gps_xy_stddev = geo_entry.horizontal_accuracy + self.gps_z_stddev = geo_entry.vertical_accuracy + def parse_exif_values(self, _path_file): # Disable exifread log logging.getLogger('exifread').setLevel(logging.CRITICAL) @@ -85,13 +100,13 @@ class ODM_Photo: try: if 'Image Make' in tags: try: - self.camera_make = tags['Image Make'].values.encode('utf8') + self.camera_make = tags['Image Make'].values except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Make might be corrupted") self.camera_make = "unknown" if 'Image Model' in tags: try: - self.camera_model = tags['Image Model'].values.encode('utf8') + self.camera_model = tags['Image Model'].values except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Model might be corrupted") self.camera_model = "unknown" @@ -129,7 +144,7 @@ class ODM_Photo: if 'Image BitsPerSample' in tags: self.bits_per_sample = self.int_value(tags['Image BitsPerSample']) if 'EXIF DateTimeOriginal' in tags: - str_time = tags['EXIF DateTimeOriginal'].values.encode('utf8') + str_time = tags['EXIF DateTimeOriginal'].values utc_time = datetime.strptime(str_time, "%Y:%m:%d %H:%M:%S") subsec = 0 if 'EXIF SubSecTime' in tags: @@ -146,7 +161,7 @@ class ODM_Photo: epoch = timezone.localize(datetime.utcfromtimestamp(0)) self.utc_time = (timezone.localize(utc_time) - epoch).total_seconds() * 1000.0 except Exception as e: - log.ODM_WARNING("Cannot read extended EXIF tags for %s: %s" % (_path_file, e.message)) + log.ODM_WARNING("Cannot read extended EXIF tags for %s: %s" % (_path_file, str(e))) # Extract XMP tags @@ -262,15 +277,15 @@ class ODM_Photo: # From https://github.com/mapillary/OpenSfM/blob/master/opensfm/exif.py def get_xmp(self, file): - img_str = str(file.read()) - xmp_start = img_str.find(' 100: @@ -275,7 +275,7 @@ def fast_merge_ply(input_point_cloud_files, output_file): with open(ipc, "rb") as fin: # Skip header line = fin.readline() - while line.strip().lower() != "end_header": + while line.strip().lower() != b"end_header": line = fin.readline() i += 1 diff --git a/opendm/progress.py b/opendm/progress.py index 3114b4f9..264db82d 100644 --- a/opendm/progress.py +++ b/opendm/progress.py @@ -32,8 +32,9 @@ class Broadcaster: global_progress = 100 try: - sock.sendto("PGUP/{}/{}/{}".format(self.pid, self.project_name, float(global_progress)).encode('utf-8'), (UDP_IP, self.port)) - except: - log.ODM_WARNING("Failed to broadcast progress update on UDP port %s" % str(self.port)) + sock.sendto("PGUP/{}/{}/{}".format(self.pid, self.project_name, float(global_progress)).encode('utf8'), + (UDP_IP, self.port)) + except Exception as e: + log.ODM_WARNING("Failed to broadcast progress update on UDP port %s (%s)" % (str(self.port), str(e))) progressbc = Broadcaster(PROGRESS_BROADCAST_PORT) \ No newline at end of file diff --git a/opendm/pseudogeo.py b/opendm/pseudogeo.py index 18d41ee8..9e6fcb20 100644 --- a/opendm/pseudogeo.py +++ b/opendm/pseudogeo.py @@ -20,6 +20,7 @@ def add_pseudo_georeferencing(geotiff): dst_ds = gdal.Open(geotiff, GA_Update) srs = osr.SpatialReference() + srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) srs.ImportFromProj4(get_pseudogeo_utm()) dst_ds.SetProjection( srs.ExportToWkt() ) dst_ds.SetGeoTransform( [ 0.0, get_pseudogeo_scale(), 0.0, 0.0, 0.0, -get_pseudogeo_scale() ] ) diff --git a/opendm/remote.py b/opendm/remote.py index 931f5cf7..d520f653 100644 --- a/opendm/remote.py +++ b/opendm/remote.py @@ -12,7 +12,7 @@ from opendm import config from pyodm import Node, exceptions from pyodm.utils import AtomicCounter from pyodm.types import TaskStatus -from osfm import OSFMContext, get_submodel_args_dict, get_submodel_argv +from opendm.osfm import OSFMContext, get_submodel_args_dict, get_submodel_argv from pipes import quote try: @@ -474,7 +474,7 @@ class ToolchainTask(Task): argv = get_submodel_argv(config.config(), submodels_path, submodel_name) # Re-run the ODM toolchain on the submodel - system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy()) + system.run(" ".join(map(quote, map(str, argv))), env_vars=os.environ.copy()) # This will only get executed if the command above succeeds self.touch(completed_file) diff --git a/opendm/system.py b/opendm/system.py index f5512ae7..b183b062 100644 --- a/opendm/system.py +++ b/opendm/system.py @@ -53,7 +53,7 @@ def sighandler(signum, frame): signal.signal(signal.SIGINT, sighandler) signal.signal(signal.SIGTERM, sighandler) -def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}): +def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}, packages_paths=context.python_packages_paths): """Run a system command""" global running_subprocesses @@ -63,6 +63,9 @@ def run(cmd, env_paths=[context.superbuild_bin_path], env_vars={}): if len(env_paths) > 0: env["PATH"] = env["PATH"] + ":" + ":".join(env_paths) + if len(packages_paths) > 0: + env["PYTHONPATH"] = env.get("PYTHONPATH", "") + ":" + ":".join(packages_paths) + for k in env_vars: env[k] = str(env_vars[k]) diff --git a/opendm/tiles/color_relief.txt b/opendm/tiles/color_relief.txt new file mode 100644 index 00000000..86d03ae5 --- /dev/null +++ b/opendm/tiles/color_relief.txt @@ -0,0 +1,12 @@ +0% 255 0 255 +10% 128 0 255 +20% 0 0 255 +30% 0 128 255 +40% 0 255 255 +50% 0 255 128 +60% 0 255 0 +70% 128 255 0 +80% 255 255 0 +90% 255 128 0 +100% 255 0 0 +nv 0 0 0 0 \ No newline at end of file diff --git a/opendm/tiles/gdal2tiles.py b/opendm/tiles/gdal2tiles.py new file mode 100644 index 00000000..7ffe7f9a --- /dev/null +++ b/opendm/tiles/gdal2tiles.py @@ -0,0 +1,2949 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ****************************************************************************** +# $Id$ +# +# Project: Google Summer of Code 2007, 2008 (http://code.google.com/soc/) +# Support: BRGM (http://www.brgm.fr) +# Purpose: Convert a raster into TMS (Tile Map Service) tiles in a directory. +# - generate Google Earth metadata (KML SuperOverlay) +# - generate simple HTML viewer based on Google Maps and OpenLayers +# - support of global tiles (Spherical Mercator) for compatibility +# with interactive web maps a la Google Maps +# Author: Klokan Petr Pridal, klokan at klokan dot cz +# Web: http://www.klokan.cz/projects/gdal2tiles/ +# GUI: http://www.maptiler.org/ +# +############################################################################### +# Copyright (c) 2008, Klokan Petr Pridal +# Copyright (c) 2010-2013, Even Rouault +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +# ****************************************************************************** + +from __future__ import print_function, division + +import math +from multiprocessing import Pipe, Pool, Process, Manager +import os +import tempfile +import shutil +import sys +from uuid import uuid4 +from xml.etree import ElementTree + +from osgeo import gdal +from osgeo import osr + +try: + from PIL import Image + import numpy + import osgeo.gdal_array as gdalarray +except Exception: + # 'antialias' resampling is not available + pass + +__version__ = "$Id$" + +resampling_list = ('average', 'near', 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'antialias') +profile_list = ('mercator', 'geodetic', 'raster') +webviewer_list = ('all', 'google', 'openlayers', 'leaflet', 'none') + +# ============================================================================= +# ============================================================================= +# ============================================================================= + +__doc__globalmaptiles = """ +globalmaptiles.py + +Global Map Tiles as defined in Tile Map Service (TMS) Profiles +============================================================== + +Functions necessary for generation of global tiles used on the web. +It contains classes implementing coordinate conversions for: + + - GlobalMercator (based on EPSG:3857) + for Google Maps, Yahoo Maps, Bing Maps compatible tiles + - GlobalGeodetic (based on EPSG:4326) + for OpenLayers Base Map and Google Earth compatible tiles + +More info at: + +http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification +http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation +http://msdn.microsoft.com/en-us/library/bb259689.aspx +http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates + +Created by Klokan Petr Pridal on 2008-07-03. +Google Summer of Code 2008, project GDAL2Tiles for OSGEO. + +In case you use this class in your product, translate it to another language +or find it useful for your project please let me know. +My email: klokan at klokan dot cz. +I would like to know where it was used. + +Class is available under the open-source GDAL license (www.gdal.org). +""" + +MAXZOOMLEVEL = 32 + + +class GlobalMercator(object): + r""" + TMS Global Mercator Profile + --------------------------- + + Functions necessary for generation of tiles in Spherical Mercator projection, + EPSG:3857. + + Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps, + UK Ordnance Survey OpenSpace API, ... + and you can overlay them on top of base maps of those web mapping applications. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Mercator tiles:: + + LatLon <-> Meters <-> Pixels <-> Tile + + WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid + lat/lon XY in meters XY pixels Z zoom XYZ from TMS + EPSG:4326 EPSG:387 + .----. --------- -- TMS + / \ <-> | | <-> /----/ <-> Google + \ / | | /--------/ QuadTree + ----- --------- /------------/ + KML, public WebMapService Web Clients TileMapService + + What is the coordinate extent of Earth in EPSG:3857? + + [-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244] + Constant 20037508.342789244 comes from the circumference of the Earth in meters, + which is 40 thousand kilometers, the coordinate origin is in the middle of extent. + In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0 + $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857 + Polar areas with abs(latitude) bigger then 85.05112878 are clipped off. + + What are zoom level constants (pixels/meter) for pyramid with EPSG:3857? + + whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile, + every lower zoom level resolution is always divided by two + initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062 + + What is the difference between TMS and Google Maps/QuadTree tile name convention? + + The tile raster itself is the same (equal extent, projection, pixel size), + there is just different identification of the same raster tile. + Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ. + Google placed the origin [0,0] to the top-left corner, reference is XYZ. + Microsoft is referencing tiles by a QuadTree name, defined on the website: + http://msdn2.microsoft.com/en-us/library/bb259689.aspx + + The lat/lon coordinates are using WGS84 datum, yes? + + Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum. + Well, the web clients like Google Maps are projecting those coordinates by + Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if + the were on the WGS84 ellipsoid. + + From MSDN documentation: + To simplify the calculations, we use the spherical form of projection, not + the ellipsoidal form. Since the projection is used only for map display, + and not for displaying numeric coordinates, we don't need the extra precision + of an ellipsoidal projection. The spherical projection causes approximately + 0.33 percent scale distortion in the Y direction, which is not visually + noticeable. + + How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4? + + You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform. + All of the tools supports -t_srs 'epsg:3857'. + + For other GIS programs check the exact definition of the projection: + More info at http://spatialreference.org/ref/user/google-projection/ + The same projection is designated as EPSG:3857. WKT definition is in the + official EPSG database. + + Proj4 Text: + +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 + +k=1.0 +units=m +nadgrids=@null +no_defs + + Human readable WKT format of EPSG:3857: + PROJCS["Google Maps Global Mercator", + GEOGCS["WGS 84", + DATUM["WGS_1984", + SPHEROID["WGS 84",6378137,298.257223563, + AUTHORITY["EPSG","7030"]], + AUTHORITY["EPSG","6326"]], + PRIMEM["Greenwich",0], + UNIT["degree",0.0174532925199433], + AUTHORITY["EPSG","4326"]], + PROJECTION["Mercator_1SP"], + PARAMETER["central_meridian",0], + PARAMETER["scale_factor",1], + PARAMETER["false_easting",0], + PARAMETER["false_northing",0], + UNIT["metre",1, + AUTHORITY["EPSG","9001"]]] + """ + + def __init__(self, tileSize=256): + "Initialize the TMS Global Mercator pyramid" + self.tileSize = tileSize + self.initialResolution = 2 * math.pi * 6378137 / self.tileSize + # 156543.03392804062 for tileSize 256 pixels + self.originShift = 2 * math.pi * 6378137 / 2.0 + # 20037508.342789244 + + def LatLonToMeters(self, lat, lon): + "Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857" + + mx = lon * self.originShift / 180.0 + my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / (math.pi / 180.0) + + my = my * self.originShift / 180.0 + return mx, my + + def MetersToLatLon(self, mx, my): + "Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum" + + lon = (mx / self.originShift) * 180.0 + lat = (my / self.originShift) * 180.0 + + lat = 180 / math.pi * (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0) + return lat, lon + + def PixelsToMeters(self, px, py, zoom): + "Converts pixel coordinates in given zoom level of pyramid to EPSG:3857" + + res = self.Resolution(zoom) + mx = px * res - self.originShift + my = py * res - self.originShift + return mx, my + + def MetersToPixels(self, mx, my, zoom): + "Converts EPSG:3857 to pyramid pixel coordinates in given zoom level" + + res = self.Resolution(zoom) + px = (mx + self.originShift) / res + py = (my + self.originShift) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns a tile covering region in given pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def PixelsToRaster(self, px, py, zoom): + "Move the origin of pixel coordinates to top-left corner" + + mapSize = self.tileSize << zoom + return px, mapSize - py + + def MetersToTile(self, mx, my, zoom): + "Returns tile for given mercator coordinates" + + px, py = self.MetersToPixels(mx, my, zoom) + return self.PixelsToTile(px, py) + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in EPSG:3857 coordinates" + + minx, miny = self.PixelsToMeters(tx*self.tileSize, ty*self.tileSize, zoom) + maxx, maxy = self.PixelsToMeters((tx+1)*self.tileSize, (ty+1)*self.tileSize, zoom) + return (minx, miny, maxx, maxy) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in latitude/longitude using WGS84 datum" + + bounds = self.TileBounds(tx, ty, zoom) + minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1]) + maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3]) + + return (minLat, minLon, maxLat, maxLon) + + def Resolution(self, zoom): + "Resolution (meters/pixel) for given zoom level (measured at Equator)" + + # return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom) + return self.initialResolution / (2**zoom) + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != -1: + return i-1 + else: + return 0 # We don't want to scale up + + def GoogleTile(self, tx, ty, zoom): + "Converts TMS tile coordinates to Google Tile coordinates" + + # coordinate origin is moved from bottom-left to top-left corner of the extent + return tx, (2**zoom - 1) - ty + + def QuadTree(self, tx, ty, zoom): + "Converts TMS tile coordinates to Microsoft QuadTree" + + quadKey = "" + ty = (2**zoom - 1) - ty + for i in range(zoom, 0, -1): + digit = 0 + mask = 1 << (i-1) + if (tx & mask) != 0: + digit += 1 + if (ty & mask) != 0: + digit += 2 + quadKey += str(digit) + + return quadKey + + +class GlobalGeodetic(object): + r""" + TMS Global Geodetic Profile + --------------------------- + + Functions necessary for generation of global tiles in Plate Carre projection, + EPSG:4326, "unprojected profile". + + Such tiles are compatible with Google Earth (as any other EPSG:4326 rasters) + and you can overlay the tiles on top of OpenLayers base map. + + Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left). + + What coordinate conversions do we need for TMS Global Geodetic tiles? + + Global Geodetic tiles are using geodetic coordinates (latitude,longitude) + directly as planar coordinates XY (it is also called Unprojected or Plate + Carre). We need only scaling to pixel pyramid and cutting to tiles. + Pyramid has on top level two tiles, so it is not square but rectangle. + Area [-180,-90,180,90] is scaled to 512x256 pixels. + TMS has coordinate origin (for pixels and tiles) in bottom-left corner. + Rasters are in EPSG:4326 and therefore are compatible with Google Earth. + + LatLon <-> Pixels <-> Tiles + + WGS84 coordinates Pixels in pyramid Tiles in pyramid + lat/lon XY pixels Z zoom XYZ from TMS + EPSG:4326 + .----. ---- + / \ <-> /--------/ <-> TMS + \ / /--------------/ + ----- /--------------------/ + WMS, KML Web Clients, Google Earth TileMapService + """ + + def __init__(self, tmscompatible, tileSize=256): + self.tileSize = tileSize + if tmscompatible is not None: + # Defaults the resolution factor to 0.703125 (2 tiles @ level 0) + # Adhers to OSGeo TMS spec + # http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification#global-geodetic + self.resFact = 180.0 / self.tileSize + else: + # Defaults the resolution factor to 1.40625 (1 tile @ level 0) + # Adheres OpenLayers, MapProxy, etc default resolution for WMTS + self.resFact = 360.0 / self.tileSize + + def LonLatToPixels(self, lon, lat, zoom): + "Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid" + + res = self.resFact / 2**zoom + px = (180 + lon) / res + py = (90 + lat) / res + return px, py + + def PixelsToTile(self, px, py): + "Returns coordinates of the tile covering region in pixel coordinates" + + tx = int(math.ceil(px / float(self.tileSize)) - 1) + ty = int(math.ceil(py / float(self.tileSize)) - 1) + return tx, ty + + def LonLatToTile(self, lon, lat, zoom): + "Returns the tile for zoom which covers given lon/lat coordinates" + + px, py = self.LonLatToPixels(lon, lat, zoom) + return self.PixelsToTile(px, py) + + def Resolution(self, zoom): + "Resolution (arc/pixel) for given zoom level (measured at Equator)" + + return self.resFact / 2**zoom + + def ZoomForPixelSize(self, pixelSize): + "Maximal scaledown zoom of the pyramid closest to the pixelSize." + + for i in range(MAXZOOMLEVEL): + if pixelSize > self.Resolution(i): + if i != 0: + return i-1 + else: + return 0 # We don't want to scale up + + def TileBounds(self, tx, ty, zoom): + "Returns bounds of the given tile" + res = self.resFact / 2**zoom + return ( + tx*self.tileSize*res - 180, + ty*self.tileSize*res - 90, + (tx+1)*self.tileSize*res - 180, + (ty+1)*self.tileSize*res - 90 + ) + + def TileLatLonBounds(self, tx, ty, zoom): + "Returns bounds of the given tile in the SWNE form" + b = self.TileBounds(tx, ty, zoom) + return (b[1], b[0], b[3], b[2]) + + +class Zoomify(object): + """ + Tiles compatible with the Zoomify viewer + ---------------------------------------- + """ + + def __init__(self, width, height, tilesize=256, tileformat='jpg'): + """Initialization of the Zoomify tile tree""" + + self.tilesize = tilesize + self.tileformat = tileformat + imagesize = (width, height) + tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize)) + + # Size (in tiles) for each tier of pyramid. + self.tierSizeInTiles = [] + self.tierSizeInTiles.append(tiles) + + # Image size in pixels for each pyramid tierself + self.tierImageSize = [] + self.tierImageSize.append(imagesize) + + while (imagesize[0] > tilesize or imagesize[1] > tilesize): + imagesize = (math.floor(imagesize[0] / 2), math.floor(imagesize[1] / 2)) + tiles = (math.ceil(imagesize[0] / tilesize), math.ceil(imagesize[1] / tilesize)) + self.tierSizeInTiles.append(tiles) + self.tierImageSize.append(imagesize) + + self.tierSizeInTiles.reverse() + self.tierImageSize.reverse() + + # Depth of the Zoomify pyramid, number of tiers (zoom levels) + self.numberOfTiers = len(self.tierSizeInTiles) + + # Number of tiles up to the given tier of pyramid. + self.tileCountUpToTier = [] + self.tileCountUpToTier[0] = 0 + for i in range(1, self.numberOfTiers+1): + self.tileCountUpToTier.append( + self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + + self.tileCountUpToTier[i-1] + ) + + def tilefilename(self, x, y, z): + """Returns filename for tile with given coordinates""" + + tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] + return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), + "%s-%s-%s.%s" % (z, x, y, self.tileformat)) + + +class GDALError(Exception): + pass + +import os +main_pid = os.getpid() + +def exit_with_error(message, details=""): + # Message printing and exit code kept from the way it worked using the OptionParser (in case + # someone parses the error output) + sys.stderr.write("Usage: gdal2tiles.py [options] input_file [output]\n\n") + sys.stderr.write("gdal2tiles.py: error: %s\n" % message) + if details: + sys.stderr.write("\n\n%s\n" % details) + + import signal + os.kill(main_pid, signal.SIGINT) + sys.exit(2) + + +def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args): + """ + Template for the KML. Returns filled string. + """ + if not children: + children = [] + + args['tx'], args['ty'], args['tz'] = tx, ty, tz + args['tileformat'] = tileext + if 'tilesize' not in args: + args['tilesize'] = tilesize + + if 'minlodpixels' not in args: + args['minlodpixels'] = int(args['tilesize'] / 2) + if 'maxlodpixels' not in args: + args['maxlodpixels'] = int(args['tilesize'] * 8) + if children == []: + args['maxlodpixels'] = -1 + + if tx is None: + tilekml = False + args['title'] = options.title + else: + tilekml = True + args['title'] = "%d/%d/%d.kml" % (tz, tx, ty) + args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz) + + if tx == 0: + args['drawOrder'] = 2 * tz + 1 + elif tx is not None: + args['drawOrder'] = 2 * tz + else: + args['drawOrder'] = 0 + + url = options.url + if not url: + if tilekml: + url = "../../" + else: + url = "" + + s = """ + + + %(title)s + + """ % args + if tilekml: + s += """ + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + + %(minlodpixels)d + %(maxlodpixels)d + + + + %(drawOrder)d + + %(ty)d.%(tileformat)s + + + %(north).14f + %(south).14f + %(east).14f + %(west).14f + + +""" % args + + for cx, cy, cz in children: + csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz) + s += """ + + %d/%d/%d.%s + + + %.14f + %.14f + %.14f + %.14f + + + %d + -1 + + + + %s%d/%d/%d.kml + onRegion + + + + """ % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest, + args['minlodpixels'], url, cz, cx, cy) + + s += """ + + """ + return s + + +def scale_query_to_tile(dsquery, dstile, tiledriver, options, tilefilename=''): + """Scales down query dataset to the tile dataset""" + + querysize = dsquery.RasterXSize + tilesize = dstile.RasterXSize + tilebands = dstile.RasterCount + + if options.resampling == 'average': + + # Function: gdal.RegenerateOverview() + for i in range(1, tilebands+1): + # Black border around NODATA + res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i), + 'average') + if res != 0: + exit_with_error("RegenerateOverview() failed on %s, error %d" % ( + tilefilename, res)) + + elif options.resampling == 'antialias': + + # Scaling by PIL (Python Imaging Library) - improved Lanczos + array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8) + for i in range(tilebands): + array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i+1), + 0, 0, querysize, querysize) + im = Image.fromarray(array, 'RGBA') # Always four bands + im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS) + if os.path.exists(tilefilename): + im0 = Image.open(tilefilename) + im1 = Image.composite(im1, im0, im1) + im1.save(tilefilename, tiledriver) + + else: + + if options.resampling == 'near': + gdal_resampling = gdal.GRA_NearestNeighbour + + elif options.resampling == 'bilinear': + gdal_resampling = gdal.GRA_Bilinear + + elif options.resampling == 'cubic': + gdal_resampling = gdal.GRA_Cubic + + elif options.resampling == 'cubicspline': + gdal_resampling = gdal.GRA_CubicSpline + + elif options.resampling == 'lanczos': + gdal_resampling = gdal.GRA_Lanczos + + # Other algorithms are implemented by gdal.ReprojectImage(). + dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0, + tilesize / float(querysize))) + dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0)) + + res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling) + if res != 0: + exit_with_error("ReprojectImage() failed on %s, error %d" % (tilefilename, res)) + + +def setup_no_data_values(input_dataset, options): + """ + Extract the NODATA values from the dataset or use the passed arguments as override if any + """ + in_nodata = [] + if options.srcnodata: + nds = list(map(float, options.srcnodata.split(','))) + if len(nds) < input_dataset.RasterCount: + in_nodata = (nds * input_dataset.RasterCount)[:input_dataset.RasterCount] + else: + in_nodata = nds + else: + for i in range(1, input_dataset.RasterCount+1): + raster_no_data = input_dataset.GetRasterBand(i).GetNoDataValue() + if raster_no_data is not None: + in_nodata.append(raster_no_data) + + if options.verbose: + print("NODATA: %s" % in_nodata) + + return in_nodata + + +def setup_input_srs(input_dataset, options): + """ + Determines and returns the Input Spatial Reference System (SRS) as an osr object and as a + WKT representation + + Uses in priority the one passed in the command line arguments. If None, tries to extract them + from the input dataset + """ + + input_srs = None + input_srs_wkt = None + + if options.s_srs: + input_srs = osr.SpatialReference() + input_srs.SetFromUserInput(options.s_srs) + input_srs_wkt = input_srs.ExportToWkt() + else: + input_srs_wkt = input_dataset.GetProjection() + if not input_srs_wkt and input_dataset.GetGCPCount() != 0: + input_srs_wkt = input_dataset.GetGCPProjection() + if input_srs_wkt: + input_srs = osr.SpatialReference() + input_srs.ImportFromWkt(input_srs_wkt) + + return input_srs, input_srs_wkt + + +def setup_output_srs(input_srs, options): + """ + Setup the desired SRS (based on options) + """ + output_srs = osr.SpatialReference() + + if options.profile == 'mercator': + output_srs.ImportFromEPSG(3857) + elif options.profile == 'geodetic': + output_srs.ImportFromEPSG(4326) + else: + output_srs = input_srs + + return output_srs + + +def has_georeference(dataset): + return (dataset.GetGeoTransform() != (0.0, 1.0, 0.0, 0.0, 0.0, 1.0) or + dataset.GetGCPCount() != 0) + + +def reproject_dataset(from_dataset, from_srs, to_srs, options=None): + """ + Returns the input dataset in the expected "destination" SRS. + If the dataset is already in the correct SRS, returns it unmodified + """ + if not from_srs or not to_srs: + raise GDALError("from and to SRS must be defined to reproject the dataset") + + if (from_srs.ExportToProj4() != to_srs.ExportToProj4()) or (from_dataset.GetGCPCount() != 0): + to_dataset = gdal.AutoCreateWarpedVRT(from_dataset, + from_srs.ExportToWkt(), to_srs.ExportToWkt()) + + if options and options.verbose: + print("Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')") + to_dataset.GetDriver().CreateCopy("tiles.vrt", to_dataset) + + return to_dataset + else: + return from_dataset + + +def add_gdal_warp_options_to_string(vrt_string, warp_options): + if not warp_options: + return vrt_string + + vrt_root = ElementTree.fromstring(vrt_string) + options = vrt_root.find("GDALWarpOptions") + + if options is None: + return vrt_string + + for key, value in warp_options.items(): + tb = ElementTree.TreeBuilder() + tb.start("Option", {"name": key}) + tb.data(value) + tb.end("Option") + elem = tb.close() + options.insert(0, elem) + + return ElementTree.tostring(vrt_root).decode() + + +def update_no_data_values(warped_vrt_dataset, nodata_values, options=None): + """ + Takes an array of NODATA values and forces them on the WarpedVRT file dataset passed + """ + # TODO: gbataille - Seems that I forgot tests there + if nodata_values != []: + temp_file = gettempfilename('-gdal2tiles.vrt') + warped_vrt_dataset.GetDriver().CreateCopy(temp_file, warped_vrt_dataset) + with open(temp_file, 'r') as f: + vrt_string = f.read() + + vrt_string = add_gdal_warp_options_to_string( + vrt_string, {"INIT_DEST": "NO_DATA", "UNIFIED_SRC_NODATA": "YES"}) + +# TODO: gbataille - check the need for this replacement. Seems to work without +# # replace BandMapping tag for NODATA bands.... +# for i in range(len(nodata_values)): +# s = s.replace( +# '' % ((i+1), (i+1)), +# """ +# +# %i +# 0 +# %i +# 0 +# +# """ % ((i+1), (i+1), nodata_values[i], nodata_values[i])) + + # save the corrected VRT + with open(temp_file, 'w') as f: + f.write(vrt_string) + + corrected_dataset = gdal.Open(temp_file) + os.unlink(temp_file) + + # set NODATA_VALUE metadata + corrected_dataset.SetMetadataItem( + 'NODATA_VALUES', ' '.join([str(i) for i in nodata_values])) + + if options and options.verbose: + print("Modified warping result saved into 'tiles1.vrt'") + # TODO: gbataille - test replacing that with a gdal write of the dataset (more + # accurately what's used, even if should be the same + with open("tiles1.vrt", "w") as f: + f.write(vrt_string) + + return corrected_dataset + + +def add_alpha_band_to_string_vrt(vrt_string): + # TODO: gbataille - Old code speak of this being equivalent to gdalwarp -dstalpha + # To be checked + + vrt_root = ElementTree.fromstring(vrt_string) + + index = 0 + nb_bands = 0 + for subelem in list(vrt_root): + if subelem.tag == "VRTRasterBand": + nb_bands += 1 + color_node = subelem.find("./ColorInterp") + if color_node is not None and color_node.text == "Alpha": + raise Exception("Alpha band already present") + else: + if nb_bands: + # This means that we are one element after the Band definitions + break + + index += 1 + + tb = ElementTree.TreeBuilder() + tb.start("VRTRasterBand", + {'dataType': "Byte", "band": str(nb_bands + 1), "subClass": "VRTWarpedRasterBand"}) + tb.start("ColorInterp", {}) + tb.data("Alpha") + tb.end("ColorInterp") + tb.end("VRTRasterBand") + elem = tb.close() + + vrt_root.insert(index, elem) + + warp_options = vrt_root.find(".//GDALWarpOptions") + tb = ElementTree.TreeBuilder() + tb.start("DstAlphaBand", {}) + tb.data(str(nb_bands + 1)) + tb.end("DstAlphaBand") + elem = tb.close() + warp_options.append(elem) + + # TODO: gbataille - this is a GDALWarpOptions. Why put it in a specific place? + tb = ElementTree.TreeBuilder() + tb.start("Option", {"name": "INIT_DEST"}) + tb.data("0") + tb.end("Option") + elem = tb.close() + warp_options.append(elem) + + return ElementTree.tostring(vrt_root).decode() + + +def update_alpha_value_for_non_alpha_inputs(warped_vrt_dataset, options=None): + """ + Handles dataset with 1 or 3 bands, i.e. without alpha channel, in the case the nodata value has + not been forced by options + """ + if warped_vrt_dataset.RasterCount in [1, 3]: + tempfilename = gettempfilename('-gdal2tiles.vrt') + warped_vrt_dataset.GetDriver().CreateCopy(tempfilename, warped_vrt_dataset) + with open(tempfilename) as f: + orig_data = f.read() + alpha_data = add_alpha_band_to_string_vrt(orig_data) + with open(tempfilename, 'w') as f: + f.write(alpha_data) + + warped_vrt_dataset = gdal.Open(tempfilename) + os.unlink(tempfilename) + + if options and options.verbose: + print("Modified -dstalpha warping result saved into 'tiles1.vrt'") + # TODO: gbataille - test replacing that with a gdal write of the dataset (more + # accurately what's used, even if should be the same + with open("tiles1.vrt", "w") as f: + f.write(alpha_data) + + return warped_vrt_dataset + + +def nb_data_bands(dataset): + """ + Return the number of data (non-alpha) bands of a gdal dataset + """ + alphaband = dataset.GetRasterBand(1).GetMaskBand() + if ((alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or + dataset.RasterCount == 4 or + dataset.RasterCount == 2): + return dataset.RasterCount - 1 + else: + return dataset.RasterCount + + +def gettempfilename(suffix): + """Returns a temporary filename""" + if '_' in os.environ: + # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly) + if os.environ['_'].find('wine') >= 0: + tmpdir = '.' + if 'TMP' in os.environ: + tmpdir = os.environ['TMP'] + import time + import random + random.seed(time.time()) + random_part = 'file%d' % random.randint(0, 1000000000) + return os.path.join(tmpdir, random_part + suffix) + + return tempfile.mktemp(suffix) + + +def create_base_tile(tile_job_info, tile_detail, queue=None): + gdal.AllRegister() + + dataBandsCount = tile_job_info.nb_data_bands + output = tile_job_info.output_file_path + tileext = tile_job_info.tile_extension + tilesize = tile_job_info.tile_size + options = tile_job_info.options + + tilebands = dataBandsCount + 1 + ds = gdal.Open(tile_job_info.src_file, gdal.GA_ReadOnly) + mem_drv = gdal.GetDriverByName('MEM') + out_drv = gdal.GetDriverByName(tile_job_info.tile_driver) + alphaband = ds.GetRasterBand(1).GetMaskBand() + + tx = tile_detail.tx + ty = tile_detail.ty + tz = tile_detail.tz + rx = tile_detail.rx + ry = tile_detail.ry + rxsize = tile_detail.rxsize + rysize = tile_detail.rysize + wx = tile_detail.wx + wy = tile_detail.wy + wxsize = tile_detail.wxsize + wysize = tile_detail.wysize + querysize = tile_detail.querysize + + # Tile dataset in memory + tilefilename = os.path.join( + output, str(tz), str(tx), "%s.%s" % (ty, tileext)) + dstile = mem_drv.Create('', tilesize, tilesize, tilebands) + + data = alpha = None + + if options.verbose: + print("\tReadRaster Extent: ", + (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)) + + # Query is in 'nearest neighbour' but can be bigger in then the tilesize + # We scale down the query to the tilesize by supplied algorithm. + + if rxsize != 0 and rysize != 0 and wxsize != 0 and wysize != 0: + data = ds.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize, + band_list=list(range(1, dataBandsCount+1))) + alpha = alphaband.ReadRaster(rx, ry, rxsize, rysize, wxsize, wysize) + + # The tile in memory is a transparent file by default. Write pixel values into it if + # any + if data: + if tilesize == querysize: + # Use the ReadRaster result directly in tiles ('nearest neighbour' query) + dstile.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, dataBandsCount+1))) + dstile.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + # Note: For source drivers based on WaveLet compression (JPEG2000, ECW, + # MrSID) the ReadRaster function returns high-quality raster (not ugly + # nearest neighbour) + # TODO: Use directly 'near' for WaveLet files + else: + # Big ReadRaster query in memory scaled to the tilesize - all but 'near' + # algo + dsquery = mem_drv.Create('', querysize, querysize, tilebands) + # TODO: fill the null value in case a tile without alpha is produced (now + # only png tiles are supported) + dsquery.WriteRaster(wx, wy, wxsize, wysize, data, + band_list=list(range(1, dataBandsCount+1))) + dsquery.WriteRaster(wx, wy, wxsize, wysize, alpha, band_list=[tilebands]) + + scale_query_to_tile(dsquery, dstile, tile_job_info.tile_driver, options, + tilefilename=tilefilename) + del dsquery + + # Force freeing the memory to make sure the C++ destructor is called and the memory as well as + # the file locks are released + del ds + del data + + if options.resampling != 'antialias': + # Write a copy of tile to png/jpg + out_drv.CreateCopy(tilefilename, dstile, strict=0) + + del dstile + + # Create a KML file for this tile. + if tile_job_info.kml: + kmlfilename = os.path.join(output, str(tz), str(tx), '%d.kml' % ty) + if not options.resume or not os.path.exists(kmlfilename): + with open(kmlfilename, 'wb') as f: + f.write(generate_kml( + tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size, + tile_job_info.tile_swne, tile_job_info.options + ).encode('utf-8')) + + if queue: + queue.put("tile %s %s %s" % (tx, ty, tz)) + + +def create_overview_tiles(tile_job_info, output_folder, options): + """Generation of the overview tiles (higher in the pyramid) based on existing tiles""" + mem_driver = gdal.GetDriverByName('MEM') + tile_driver = tile_job_info.tile_driver + out_driver = gdal.GetDriverByName(tile_driver) + + tilebands = tile_job_info.nb_data_bands + 1 + + # Usage of existing tiles: from 4 underlying tiles generate one as overview. + + tcount = 0 + for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1): + tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz] + tcount += (1 + abs(tmaxx-tminx)) * (1 + abs(tmaxy-tminy)) + + ti = 0 + + if tcount == 0: + return + + if not options.quiet: + print("Generating Overview Tiles:") + + progress_bar = ProgressBar(tcount) + progress_bar.start() + + for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1): + tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz] + for ty in range(tmaxy, tminy - 1, -1): + for tx in range(tminx, tmaxx + 1): + + ti += 1 + tilefilename = os.path.join(output_folder, + str(tz), + str(tx), + "%s.%s" % (ty, tile_job_info.tile_extension)) + + if options.verbose: + print(ti, '/', tcount, tilefilename) + + if options.resume and os.path.exists(tilefilename): + if options.verbose: + print("Tile generation skipped because of --resume") + else: + progress_bar.log_progress() + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + dsquery = mem_driver.Create('', 2 * tile_job_info.tile_size, + 2 * tile_job_info.tile_size, tilebands) + # TODO: fill the null value + dstile = mem_driver.Create('', tile_job_info.tile_size, tile_job_info.tile_size, + tilebands) + + # TODO: Implement more clever walking on the tiles with cache functionality + # probably walk should start with reading of four tiles from top left corner + # Hilbert curve + + children = [] + # Read the tiles and write them to query window + for y in range(2 * ty, 2 * ty + 2): + for x in range(2 * tx, 2 * tx + 2): + minx, miny, maxx, maxy = tile_job_info.tminmax[tz + 1] + if x >= minx and x <= maxx and y >= miny and y <= maxy: + dsquerytile = gdal.Open( + os.path.join(output_folder, str(tz + 1), str(x), + "%s.%s" % (y, tile_job_info.tile_extension)), + gdal.GA_ReadOnly) + if (ty == 0 and y == 1) or (ty != 0 and (y % (2 * ty)) != 0): + tileposy = 0 + else: + tileposy = tile_job_info.tile_size + if tx: + tileposx = x % (2 * tx) * tile_job_info.tile_size + elif tx == 0 and x == 1: + tileposx = tile_job_info.tile_size + else: + tileposx = 0 + dsquery.WriteRaster( + tileposx, tileposy, tile_job_info.tile_size, + tile_job_info.tile_size, + dsquerytile.ReadRaster(0, 0, + tile_job_info.tile_size, + tile_job_info.tile_size), + band_list=list(range(1, tilebands + 1))) + children.append([x, y, tz + 1]) + + scale_query_to_tile(dsquery, dstile, tile_driver, options, + tilefilename=tilefilename) + # Write a copy of tile to png/jpg + if options.resampling != 'antialias': + # Write a copy of tile to png/jpg + out_driver.CreateCopy(tilefilename, dstile, strict=0) + + if options.verbose: + print("\tbuild from zoom", tz + 1, + " tiles:", (2 * tx, 2 * ty), (2 * tx + 1, 2 * ty), + (2 * tx, 2 * ty + 1), (2 * tx + 1, 2 * ty + 1)) + + # Create a KML file for this tile. + if tile_job_info.kml: + with open(os.path.join( + output_folder, + '%d/%d/%d.kml' % (tz, tx, ty) + ), 'wb') as f: + f.write(generate_kml( + tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size, + get_tile_swne(tile_job_info, options), options, children + ).encode('utf-8')) + + if not options.verbose and not options.quiet: + progress_bar.log_progress() + + +def optparse_init(): + """Prepare the option parser for input (argv)""" + + from optparse import OptionParser, OptionGroup + usage = "Usage: %prog [options] input_file [output]" + p = OptionParser(usage, version="%prog " + __version__) + p.add_option("-p", "--profile", dest='profile', + type='choice', choices=profile_list, + help=("Tile cutting profile (%s) - default 'mercator' " + "(Google Maps compatible)" % ",".join(profile_list))) + p.add_option("-r", "--resampling", dest="resampling", + type='choice', choices=resampling_list, + help="Resampling method (%s) - default 'average'" % ",".join(resampling_list)) + p.add_option('-s', '--s_srs', dest="s_srs", metavar="SRS", + help="The spatial reference system used for the source input data") + p.add_option('-z', '--zoom', dest="zoom", + help="Zoom levels to render (format:'2-5' or '10').") + p.add_option('-e', '--resume', dest="resume", action="store_true", + help="Resume mode. Generate only missing files.") + p.add_option('-a', '--srcnodata', dest="srcnodata", metavar="NODATA", + help="NODATA transparency value to assign to the input data") + p.add_option('-d', '--tmscompatible', dest="tmscompatible", action="store_true", + help=("When using the geodetic profile, specifies the base resolution " + "as 0.703125 or 2 tiles at zoom level 0.")) + p.add_option("-v", "--verbose", + action="store_true", dest="verbose", + help="Print status messages to stdout") + p.add_option("-q", "--quiet", + action="store_true", dest="quiet", + help="Disable messages and status to stdout") + p.add_option("--processes", + dest="nb_processes", + type='int', + help="Number of processes to use for tiling") + + # KML options + g = OptionGroup(p, "KML (Google Earth) options", + "Options for generated Google Earth SuperOverlay metadata") + g.add_option("-k", "--force-kml", dest='kml', action="store_true", + help=("Generate KML for Google Earth - default for 'geodetic' profile and " + "'raster' in EPSG:4326. For a dataset with different projection use " + "with caution!")) + g.add_option("-n", "--no-kml", dest='kml', action="store_false", + help="Avoid automatic generation of KML files for EPSG:4326") + g.add_option("-u", "--url", dest='url', + help="URL address where the generated tiles are going to be published") + p.add_option_group(g) + + # HTML options + g = OptionGroup(p, "Web viewer options", + "Options for generated HTML viewers a la Google Maps") + g.add_option("-w", "--webviewer", dest='webviewer', type='choice', choices=webviewer_list, + help="Web viewer to generate (%s) - default 'all'" % ",".join(webviewer_list)) + g.add_option("-t", "--title", dest='title', + help="Title of the map") + g.add_option("-c", "--copyright", dest='copyright', + help="Copyright for the map") + g.add_option("-g", "--googlekey", dest='googlekey', + help="Google Maps API key from http://code.google.com/apis/maps/signup.html") + g.add_option("-b", "--bingkey", dest='bingkey', + help="Bing Maps API key from https://www.bingmapsportal.com/") + p.add_option_group(g) + + p.set_defaults(verbose=False, profile="mercator", kml=False, url='', + webviewer='all', copyright='', resampling='average', resume=False, + googlekey='INSERT_YOUR_KEY_HERE', bingkey='INSERT_YOUR_KEY_HERE', + processes=1) + + return p + + +def process_args(argv): + parser = optparse_init() + options, args = parser.parse_args(args=argv) + + # Args should be either an input file OR an input file and an output folder + if (len(args) == 0): + exit_with_error("You need to specify at least an input file as argument to the script") + if (len(args) > 2): + exit_with_error("Processing of several input files is not supported.", + "Please first use a tool like gdal_vrtmerge.py or gdal_merge.py on the " + "files: gdal_vrtmerge.py -o merged.vrt %s" % " ".join(args)) + + input_file = args[0] + if not os.path.isfile(input_file): + exit_with_error("The provided input file %s does not exist or is not a file" % input_file) + + if len(args) == 2: + output_folder = args[1] + else: + output_folder = os.path.basename(input_file) + + options = options_post_processing(options, input_file, output_folder) + + return input_file, output_folder, options + + +def options_post_processing(options, input_file, output_folder): + if not options.title: + options.title = os.path.basename(input_file) + + if options.url and not options.url.endswith('/'): + options.url += '/' + if options.url: + out_path = output_folder + if out_path.endswith("/"): + out_path = out_path[:-1] + options.url += os.path.basename(out_path) + '/' + + # Supported options + if options.resampling == 'average': + try: + if gdal.RegenerateOverview: + pass + except Exception: + exit_with_error("'average' resampling algorithm is not available.", + "Please use -r 'near' argument or upgrade to newer version of GDAL.") + + elif options.resampling == 'antialias': + try: + if numpy: # pylint:disable=W0125 + pass + except Exception: + exit_with_error("'antialias' resampling algorithm is not available.", + "Install PIL (Python Imaging Library) and numpy.") + + try: + os.path.basename(input_file).encode('ascii') + except UnicodeEncodeError: + full_ascii = False + else: + full_ascii = True + + # LC_CTYPE check + if not full_ascii and 'UTF-8' not in os.environ.get("LC_CTYPE", ""): + if not options.quiet: + print("\nWARNING: " + "You are running gdal2tiles.py with a LC_CTYPE environment variable that is " + "not UTF-8 compatible, and your input file contains non-ascii characters. " + "The generated sample googlemaps, openlayers or " + "leaflet files might contain some invalid characters as a result\n") + + # Output the results + if options.verbose: + print("Options:", options) + print("Input:", input_file) + print("Output:", output_folder) + print("Cache: %s MB" % (gdal.GetCacheMax() / 1024 / 1024)) + print('') + + return options + + +class TileDetail(object): + tx = 0 + ty = 0 + tz = 0 + rx = 0 + ry = 0 + rxsize = 0 + rysize = 0 + wx = 0 + wy = 0 + wxsize = 0 + wysize = 0 + querysize = 0 + + def __init__(self, **kwargs): + for key in kwargs: + if hasattr(self, key): + setattr(self, key, kwargs[key]) + + def __unicode__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + def __str__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + def __repr__(self): + return "TileDetail %s\n%s\n%s\n" % (self.tx, self.ty, self.tz) + + +class TileJobInfo(object): + """ + Plain object to hold tile job configuration for a dataset + """ + src_file = "" + nb_data_bands = 0 + output_file_path = "" + tile_extension = "" + tile_size = 0 + tile_driver = None + kml = False + tminmax = [] + tminz = 0 + tmaxz = 0 + in_srs_wkt = 0 + out_geo_trans = [] + ominy = 0 + is_epsg_4326 = False + options = None + + def __init__(self, **kwargs): + for key in kwargs: + if hasattr(self, key): + setattr(self, key, kwargs[key]) + + def __unicode__(self): + return "TileJobInfo %s\n" % (self.src_file) + + def __str__(self): + return "TileJobInfo %s\n" % (self.src_file) + + def __repr__(self): + return "TileJobInfo %s\n" % (self.src_file) + + +class Gdal2TilesError(Exception): + pass + + +class GDAL2Tiles(object): + + def __init__(self, input_file, output_folder, options): + """Constructor function - initialization""" + self.out_drv = None + self.mem_drv = None + self.warped_input_dataset = None + self.out_srs = None + self.nativezoom = None + self.tminmax = None + self.tsize = None + self.mercator = None + self.geodetic = None + self.alphaband = None + self.dataBandsCount = None + self.out_gt = None + self.tileswne = None + self.swne = None + self.ominx = None + self.omaxx = None + self.omaxy = None + self.ominy = None + + self.input_file = None + self.output_folder = None + + # Tile format + self.tilesize = 256 + self.tiledriver = 'PNG' + self.tileext = 'png' + self.tmp_dir = tempfile.mkdtemp() + self.tmp_vrt_filename = os.path.join(self.tmp_dir, str(uuid4()) + '.vrt') + + # Should we read bigger window of the input raster and scale it down? + # Note: Modified later by open_input() + # Not for 'near' resampling + # Not for Wavelet based drivers (JPEG2000, ECW, MrSID) + # Not for 'raster' profile + self.scaledquery = True + # How big should be query window be for scaling down + # Later on reset according the chosen resampling algorightm + self.querysize = 4 * self.tilesize + + # Should we use Read on the input file for generating overview tiles? + # Note: Modified later by open_input() + # Otherwise the overview tiles are generated from existing underlying tiles + self.overviewquery = False + + self.input_file = input_file + self.output_folder = output_folder + self.options = options + + if self.options.resampling == 'near': + self.querysize = self.tilesize + + elif self.options.resampling == 'bilinear': + self.querysize = self.tilesize * 2 + + # User specified zoom levels + self.tminz = None + self.tmaxz = None + if self.options.zoom: + minmax = self.options.zoom.split('-', 1) + minmax.extend(['']) + zoom_min, zoom_max = minmax[:2] + self.tminz = int(zoom_min) + if zoom_max: + self.tmaxz = int(zoom_max) + else: + self.tmaxz = int(zoom_min) + + # KML generation + self.kml = self.options.kml + + # ------------------------------------------------------------------------- + def open_input(self): + """Initialization of the input raster, reprojection if necessary""" + gdal.AllRegister() + + self.out_drv = gdal.GetDriverByName(self.tiledriver) + self.mem_drv = gdal.GetDriverByName('MEM') + + if not self.out_drv: + raise Exception("The '%s' driver was not found, is it available in this GDAL build?", + self.tiledriver) + if not self.mem_drv: + raise Exception("The 'MEM' driver was not found, is it available in this GDAL build?") + + # Open the input file + + if self.input_file: + input_dataset = gdal.Open(self.input_file, gdal.GA_ReadOnly) + else: + raise Exception("No input file was specified") + + if self.options.verbose: + print("Input file:", + "( %sP x %sL - %s bands)" % (input_dataset.RasterXSize, + input_dataset.RasterYSize, + input_dataset.RasterCount)) + + if not input_dataset: + # Note: GDAL prints the ERROR message too + exit_with_error("It is not possible to open the input file '%s'." % self.input_file) + + # Read metadata from the input file + if input_dataset.RasterCount == 0: + exit_with_error("Input file '%s' has no raster band" % self.input_file) + + if input_dataset.GetRasterBand(1).GetRasterColorTable(): + exit_with_error( + "Please convert this file to RGB/RGBA and run gdal2tiles on the result.", + "From paletted file you can create RGBA file (temp.vrt) by:\n" + "gdal_translate -of vrt -expand rgba %s temp.vrt\n" + "then run:\n" + "gdal2tiles temp.vrt" % self.input_file + ) + + in_nodata = setup_no_data_values(input_dataset, self.options) + + if self.options.verbose: + print("Preprocessed file:", + "( %sP x %sL - %s bands)" % (input_dataset.RasterXSize, + input_dataset.RasterYSize, + input_dataset.RasterCount)) + + in_srs, self.in_srs_wkt = setup_input_srs(input_dataset, self.options) + + self.out_srs = setup_output_srs(in_srs, self.options) + + # If input and output reference systems are different, we reproject the input dataset into + # the output reference system for easier manipulation + + self.warped_input_dataset = None + + if self.options.profile in ('mercator', 'geodetic'): + + if not in_srs: + exit_with_error( + "Input file has unknown SRS.", + "Use --s_srs ESPG:xyz (or similar) to provide source reference system.") + + if not has_georeference(input_dataset): + exit_with_error( + "There is no georeference - neither affine transformation (worldfile) " + "nor GCPs. You can generate only 'raster' profile tiles.", + "Either gdal2tiles with parameter -p 'raster' or use another GIS " + "software for georeference e.g. gdal_transform -gcp / -a_ullr / -a_srs" + ) + + if ((in_srs.ExportToProj4() != self.out_srs.ExportToProj4()) or + (input_dataset.GetGCPCount() != 0)): + self.warped_input_dataset = reproject_dataset( + input_dataset, in_srs, self.out_srs) + + if in_nodata: + self.warped_input_dataset = update_no_data_values( + self.warped_input_dataset, in_nodata, options=self.options) + else: + self.warped_input_dataset = update_alpha_value_for_non_alpha_inputs( + self.warped_input_dataset, options=self.options) + + if self.warped_input_dataset and self.options.verbose: + print("Projected file:", "tiles.vrt", "( %sP x %sL - %s bands)" % ( + self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize, + self.warped_input_dataset.RasterCount)) + + if not self.warped_input_dataset: + self.warped_input_dataset = input_dataset + + self.warped_input_dataset.GetDriver().CreateCopy(self.tmp_vrt_filename, + self.warped_input_dataset) + + # Get alpha band (either directly or from NODATA value) + self.alphaband = self.warped_input_dataset.GetRasterBand(1).GetMaskBand() + self.dataBandsCount = nb_data_bands(self.warped_input_dataset) + + # KML test + self.isepsg4326 = False + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + if self.out_srs and srs4326.ExportToProj4() == self.out_srs.ExportToProj4(): + self.kml = True + self.isepsg4326 = True + if self.options.verbose: + print("KML autotest OK!") + + # Read the georeference + self.out_gt = self.warped_input_dataset.GetGeoTransform() + + # Test the size of the pixel + + # Report error in case rotation/skew is in geotransform (possible only in 'raster' profile) + if (self.out_gt[2], self.out_gt[4]) != (0, 0): + exit_with_error("Georeference of the raster contains rotation or skew. " + "Such raster is not supported. Please use gdalwarp first.") + + # Here we expect: pixel is square, no rotation on the raster + + # Output Bounds - coordinates in the output SRS + self.ominx = self.out_gt[0] + self.omaxx = self.out_gt[0] + self.warped_input_dataset.RasterXSize * self.out_gt[1] + self.omaxy = self.out_gt[3] + self.ominy = self.out_gt[3] - self.warped_input_dataset.RasterYSize * self.out_gt[1] + # Note: maybe round(x, 14) to avoid the gdal_translate behaviour, when 0 becomes -1e-15 + + if self.options.verbose: + print("Bounds (output srs):", round(self.ominx, 13), self.ominy, self.omaxx, self.omaxy) + + # Calculating ranges for tiles in different zoom levels + if self.options.profile == 'mercator': + + self.mercator = GlobalMercator() + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.mercator.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.mercator.MetersToTile(self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.mercator.MetersToTile(self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**tz-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the minimal zoom level (map covers area equivalent to one tile) + if self.tminz is None: + self.tminz = self.mercator.ZoomForPixelSize( + self.out_gt[1] * + max(self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize) / + float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.mercator.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", + self.mercator.MetersToLatLon(self.ominx, self.ominy), + self.mercator.MetersToLatLon(self.omaxx, self.omaxy)) + print('MinZoomLevel:', self.tminz) + print("MaxZoomLevel:", + self.tmaxz, + "(", + self.mercator.Resolution(self.tmaxz), + ")") + + if self.options.profile == 'geodetic': + + self.geodetic = GlobalGeodetic(self.options.tmscompatible) + + # Function which generates SWNE in LatLong for given tile + self.tileswne = self.geodetic.TileLatLonBounds + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, 32)) + for tz in range(0, 32): + tminx, tminy = self.geodetic.LonLatToTile(self.ominx, self.ominy, tz) + tmaxx, tmaxy = self.geodetic.LonLatToTile(self.omaxx, self.omaxy, tz) + # crop tiles extending world limits (+-180,+-90) + tminx, tminy = max(0, tminx), max(0, tminy) + tmaxx, tmaxy = min(2**(tz+1)-1, tmaxx), min(2**tz-1, tmaxy) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # TODO: Maps crossing 180E (Alaska?) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tminz is None: + self.tminz = self.geodetic.ZoomForPixelSize( + self.out_gt[1] * + max(self.warped_input_dataset.RasterXSize, + self.warped_input_dataset.RasterYSize) / + float(self.tilesize)) + + # Get the maximal zoom level + # (closest possible zoom level up on the resolution of raster) + if self.tmaxz is None: + self.tmaxz = self.geodetic.ZoomForPixelSize(self.out_gt[1]) + + if self.options.verbose: + print("Bounds (latlong):", self.ominx, self.ominy, self.omaxx, self.omaxy) + + if self.options.profile == 'raster': + + def log2(x): + return math.log10(x) / math.log10(2) + + self.nativezoom = int( + max(math.ceil(log2(self.warped_input_dataset.RasterXSize/float(self.tilesize))), + math.ceil(log2(self.warped_input_dataset.RasterYSize/float(self.tilesize))))) + + if self.options.verbose: + print("Native zoom of the raster:", self.nativezoom) + + # Get the minimal zoom level (whole raster in one tile) + if self.tminz is None: + self.tminz = 0 + + # Get the maximal zoom level (native resolution of the raster) + if self.tmaxz is None: + self.tmaxz = self.nativezoom + + # Generate table with min max tile coordinates for all zoomlevels + self.tminmax = list(range(0, self.tmaxz+1)) + self.tsize = list(range(0, self.tmaxz+1)) + for tz in range(0, self.tmaxz+1): + tsize = 2.0**(self.nativezoom-tz)*self.tilesize + tminx, tminy = 0, 0 + tmaxx = int(math.ceil(self.warped_input_dataset.RasterXSize / tsize)) - 1 + tmaxy = int(math.ceil(self.warped_input_dataset.RasterYSize / tsize)) - 1 + self.tsize[tz] = math.ceil(tsize) + self.tminmax[tz] = (tminx, tminy, tmaxx, tmaxy) + + # Function which generates SWNE in LatLong for given tile + if self.kml and self.in_srs_wkt: + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + pixelsizex = (2**(self.tmaxz-z) * self.out_gt[1]) # X-pixel size in level + west = self.out_gt[0] + x*self.tilesize*pixelsizex + east = west + self.tilesize*pixelsizex + south = self.ominy + y*self.tilesize*pixelsizex + north = south + self.tilesize*pixelsizex + if not self.isepsg4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + self.tileswne = rastertileswne + else: + self.tileswne = lambda x, y, z: (0, 0, 0, 0) # noqa + + def generate_metadata(self): + """ + Generation of main metadata files and HTML viewers (metadata related to particular + tiles are generated during the tile processing). + """ + + if not os.path.exists(self.output_folder): + os.makedirs(self.output_folder) + + if self.options.profile == 'mercator': + + south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy) + north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy) + south, west = max(-85.05112878, south), max(-180.0, west) + north, east = min(85.05112878, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate googlemaps.html + if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator': + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'googlemaps.html'))): + with open(os.path.join(self.output_folder, 'googlemaps.html'), 'wb') as f: + f.write(self.generate_googlemaps().encode('utf-8')) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))): + with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f: + f.write(self.generate_openlayers().encode('utf-8')) + + # Generate leaflet.html + if self.options.webviewer in ('all', 'leaflet'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'leaflet.html'))): + with open(os.path.join(self.output_folder, 'leaflet.html'), 'wb') as f: + f.write(self.generate_leaflet().encode('utf-8')) + + elif self.options.profile == 'geodetic': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + south, west = max(-90.0, south), max(-180.0, west) + north, east = min(90.0, north), min(180.0, east) + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))): + with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f: + f.write(self.generate_openlayers().encode('utf-8')) + + elif self.options.profile == 'raster': + + west, south = self.ominx, self.ominy + east, north = self.omaxx, self.omaxy + + self.swne = (south, west, north, east) + + # Generate openlayers.html + if self.options.webviewer in ('all', 'openlayers'): + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))): + with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f: + f.write(self.generate_openlayers().encode('utf-8')) + + # Generate tilemapresource.xml. + if not self.options.resume or not os.path.exists(os.path.join(self.output_folder, 'tilemapresource.xml')): + with open(os.path.join(self.output_folder, 'tilemapresource.xml'), 'wb') as f: + f.write(self.generate_tilemapresource().encode('utf-8')) + + if self.kml: + # TODO: Maybe problem for not automatically generated tminz + # The root KML should contain links to all tiles in the tminz level + children = [] + xmin, ymin, xmax, ymax = self.tminmax[self.tminz] + for x in range(xmin, xmax+1): + for y in range(ymin, ymax+1): + children.append([x, y, self.tminz]) + # Generate Root KML + if self.kml: + if (not self.options.resume or not + os.path.exists(os.path.join(self.output_folder, 'doc.kml'))): + with open(os.path.join(self.output_folder, 'doc.kml'), 'wb') as f: + f.write(generate_kml( + None, None, None, self.tileext, self.tilesize, self.tileswne, + self.options, children + ).encode('utf-8')) + + def generate_base_tiles(self): + """ + Generation of the base tiles (the lowest in the pyramid) directly from the input raster + """ + + if not self.options.quiet: + print("Generating Base Tiles:") + + if self.options.verbose: + print('') + print("Tiles generated from the max zoom level:") + print("----------------------------------------") + print('') + + # Set the bounds + tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz] + + ds = self.warped_input_dataset + tilebands = self.dataBandsCount + 1 + querysize = self.querysize + + if self.options.verbose: + print("dataBandsCount: ", self.dataBandsCount) + print("tilebands: ", tilebands) + + tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy)) + ti = 0 + + tile_details = [] + + tz = self.tmaxz + for ty in range(tmaxy, tminy-1, -1): + for tx in range(tminx, tmaxx+1): + + ti += 1 + tilefilename = os.path.join( + self.output_folder, str(tz), str(tx), "%s.%s" % (ty, self.tileext)) + if self.options.verbose: + print(ti, '/', tcount, tilefilename) + + if self.options.resume and os.path.exists(tilefilename): + if self.options.verbose: + print("Tile generation skipped because of --resume") + continue + + # Create directories for the tile + if not os.path.exists(os.path.dirname(tilefilename)): + os.makedirs(os.path.dirname(tilefilename)) + + if self.options.profile == 'mercator': + # Tile bounds in EPSG:3857 + b = self.mercator.TileBounds(tx, ty, tz) + elif self.options.profile == 'geodetic': + b = self.geodetic.TileBounds(tx, ty, tz) + + # Don't scale up by nearest neighbour, better change the querysize + # to the native resolution (and return smaller query tile) for scaling + + if self.options.profile in ('mercator', 'geodetic'): + rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1]) + + # Pixel size in the raster covering query geo extent + nativesize = wb[0] + wb[2] + if self.options.verbose: + print("\tNative Extent (querysize", nativesize, "): ", rb, wb) + + # Tile bounds in raster coordinates for ReadRaster query + rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1], querysize=querysize) + + rx, ry, rxsize, rysize = rb + wx, wy, wxsize, wysize = wb + + else: # 'raster' profile: + + tsize = int(self.tsize[tz]) # tilesize in raster coordinates for actual zoom + xsize = self.warped_input_dataset.RasterXSize # size of the raster in pixels + ysize = self.warped_input_dataset.RasterYSize + if tz >= self.nativezoom: + querysize = self.tilesize + + rx = (tx) * tsize + rxsize = 0 + if tx == tmaxx: + rxsize = xsize % tsize + if rxsize == 0: + rxsize = tsize + + rysize = 0 + if ty == tmaxy: + rysize = ysize % tsize + if rysize == 0: + rysize = tsize + ry = ysize - (ty * tsize) - rysize + + wx, wy = 0, 0 + wxsize = int(rxsize/float(tsize) * self.tilesize) + wysize = int(rysize/float(tsize) * self.tilesize) + if wysize != self.tilesize: + wy = self.tilesize - wysize + + # Read the source raster if anything is going inside the tile as per the computed + # geo_query + tile_details.append( + TileDetail( + tx=tx, ty=ty, tz=tz, rx=rx, ry=ry, rxsize=rxsize, rysize=rysize, wx=wx, + wy=wy, wxsize=wxsize, wysize=wysize, querysize=querysize, + ) + ) + + conf = TileJobInfo( + src_file=self.tmp_vrt_filename, + nb_data_bands=self.dataBandsCount, + output_file_path=self.output_folder, + tile_extension=self.tileext, + tile_driver=self.tiledriver, + tile_size=self.tilesize, + kml=self.kml, + tminmax=self.tminmax, + tminz=self.tminz, + tmaxz=self.tmaxz, + in_srs_wkt=self.in_srs_wkt, + out_geo_trans=self.out_gt, + ominy=self.ominy, + is_epsg_4326=self.isepsg4326, + options=self.options, + ) + + return conf, tile_details + + def geo_query(self, ds, ulx, uly, lrx, lry, querysize=0): + """ + For given dataset and query in cartographic coordinates returns parameters for ReadRaster() + in raster coordinates and x/y shifts (for border tiles). If the querysize is not given, the + extent is returned in the native resolution of dataset ds. + + raises Gdal2TilesError if the dataset does not contain anything inside this geo_query + """ + geotran = ds.GetGeoTransform() + rx = int((ulx - geotran[0]) / geotran[1] + 0.001) + ry = int((uly - geotran[3]) / geotran[5] + 0.001) + rxsize = int((lrx - ulx) / geotran[1] + 0.5) + rysize = int((lry - uly) / geotran[5] + 0.5) + + if not querysize: + wxsize, wysize = rxsize, rysize + else: + wxsize, wysize = querysize, querysize + + # Coordinates should not go out of the bounds of the raster + wx = 0 + if rx < 0: + rxshift = abs(rx) + wx = int(wxsize * (float(rxshift) / rxsize)) + wxsize = wxsize - wx + rxsize = rxsize - int(rxsize * (float(rxshift) / rxsize)) + rx = 0 + if rx+rxsize > ds.RasterXSize: + wxsize = int(wxsize * (float(ds.RasterXSize - rx) / rxsize)) + rxsize = ds.RasterXSize - rx + + wy = 0 + if ry < 0: + ryshift = abs(ry) + wy = int(wysize * (float(ryshift) / rysize)) + wysize = wysize - wy + rysize = rysize - int(rysize * (float(ryshift) / rysize)) + ry = 0 + if ry+rysize > ds.RasterYSize: + wysize = int(wysize * (float(ds.RasterYSize - ry) / rysize)) + rysize = ds.RasterYSize - ry + + return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize) + + def generate_tilemapresource(self): + """ + Template for tilemapresource.xml. Returns filled string. Expected variables: + title, north, south, east, west, isepsg4326, projection, publishurl, + zoompixels, tilesize, tileformat, profile + """ + + args = {} + args['title'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['profile'] = self.options.profile + + if self.options.profile == 'mercator': + args['srs'] = "EPSG:3857" + elif self.options.profile == 'geodetic': + args['srs'] = "EPSG:4326" + elif self.options.s_srs: + args['srs'] = self.options.s_srs + elif self.out_srs: + args['srs'] = self.out_srs.ExportToWkt() + else: + args['srs'] = "" + + s = """ + + %(title)s + + %(srs)s + + + + +""" % args # noqa + for z in range(self.tminz, self.tmaxz+1): + if self.options.profile == 'raster': + s += """ \n""" % ( + args['publishurl'], z, (2**(self.nativezoom-z) * self.out_gt[1]), z) + elif self.options.profile == 'mercator': + s += """ \n""" % ( + args['publishurl'], z, 156543.0339/2**z, z) + elif self.options.profile == 'geodetic': + s += """ \n""" % ( + args['publishurl'], z, 0.703125/2**z, z) + s += """ + + """ + return s + + def generate_googlemaps(self): + """ + Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, + publishurl + """ + args = {} + args['title'] = self.options.title + args['googlemapskey'] = self.options.googlekey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + + s = r""" + + + %(title)s + + + + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + def generate_leaflet(self): + """ + Template for leaflet.html implementing overlay of tiles for 'mercator' profile. + It returns filled string. Expected variables: + title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title.replace('"', '\\"') + args['htmltitle'] = self.options.title + args['south'], args['west'], args['north'], args['east'] = self.swne + args['centerlon'] = (args['north'] + args['south']) / 2. + args['centerlat'] = (args['west'] + args['east']) / 2. + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['beginzoom'] = self.tmaxz + args['tilesize'] = self.tilesize # not used + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url # not used + args['copyright'] = self.options.copyright.replace('"', '\\"') + + s = """ + + + + + %(htmltitle)s + + + + + + + + + + +
+ + + + + + + """ % args # noqa + + return s + + def generate_openlayers(self): + """ + Template for openlayers.html implementing overlay of available Spherical Mercator layers. + + It returns filled string. Expected variables: + title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl + """ + + args = {} + args['title'] = self.options.title + args['bingkey'] = self.options.bingkey + args['south'], args['west'], args['north'], args['east'] = self.swne + args['minzoom'] = self.tminz + args['maxzoom'] = self.tmaxz + args['tilesize'] = self.tilesize + args['tileformat'] = self.tileext + args['publishurl'] = self.options.url + args['copyright'] = self.options.copyright + if self.options.tmscompatible: + args['tmsoffset'] = "-1" + else: + args['tmsoffset'] = "" + if self.options.profile == 'raster': + args['rasterzoomlevels'] = self.tmaxz+1 + args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1] + + s = r""" + + %(title)s + + """ % args # noqa + + if self.options.profile == 'mercator': + s += """ + + """ % args + + s += """ + + + + + +
Generated by GDAL2Tiles, Copyright © 2008 Klokan Petr Pridal, GDAL & OSGeo GSoC + +
+
+ + + """ % args # noqa + + return s + + +def worker_tile_details(input_file, output_folder, options, send_pipe=None): + try: + gdal2tiles = GDAL2Tiles(input_file, output_folder, options) + gdal2tiles.open_input() + gdal2tiles.generate_metadata() + tile_job_info, tile_details = gdal2tiles.generate_base_tiles() + return_data = (tile_job_info, tile_details) + if send_pipe: + send_pipe.send(return_data) + + return return_data + except Exception as e: + print("worker_tile_details failed ", str(e)) + + +def progress_printer_thread(queue, nb_jobs): + pb = ProgressBar(nb_jobs) + pb.start() + for _ in range(nb_jobs): + queue.get() + pb.log_progress() + queue.task_done() + + +class ProgressBar(object): + + def __init__(self, total_items): + self.total_items = total_items + self.nb_items_done = 0 + self.current_progress = 0 + self.STEP = 2.5 + + def start(self): + sys.stdout.write("0") + + def log_progress(self, nb_items=1): + self.nb_items_done += nb_items + progress = float(self.nb_items_done) / self.total_items * 100 + if progress >= self.current_progress + self.STEP: + done = False + while not done: + if self.current_progress + self.STEP <= progress: + self.current_progress += self.STEP + if self.current_progress % 10 == 0: + sys.stdout.write(str(int(self.current_progress))) + if self.current_progress == 100: + sys.stdout.write("\n") + else: + sys.stdout.write(".") + else: + done = True + sys.stdout.flush() + + +def get_tile_swne(tile_job_info, options): + if options.profile == 'mercator': + mercator = GlobalMercator() + tile_swne = mercator.TileLatLonBounds + elif options.profile == 'geodetic': + geodetic = GlobalGeodetic(options.tmscompatible) + tile_swne = geodetic.TileLatLonBounds + elif options.profile == 'raster': + srs4326 = osr.SpatialReference() + srs4326.ImportFromEPSG(4326) + if tile_job_info.kml and tile_job_info.in_srs_wkt: + in_srs = osr.SpatialReference() + in_srs.ImportFromWkt(tile_job_info.in_srs_wkt) + ct = osr.CoordinateTransformation(in_srs, srs4326) + + def rastertileswne(x, y, z): + pixelsizex = (2 ** (tile_job_info.tmaxz - z) * tile_job_info.out_geo_trans[1]) + west = tile_job_info.out_geo_trans[0] + x * tile_job_info.tilesize * pixelsizex + east = west + tile_job_info.tilesize * pixelsizex + south = tile_job_info.ominy + y * tile_job_info.tilesize * pixelsizex + north = south + tile_job_info.tilesize * pixelsizex + if not tile_job_info.is_epsg_4326: + # Transformation to EPSG:4326 (WGS84 datum) + west, south = ct.TransformPoint(west, south)[:2] + east, north = ct.TransformPoint(east, north)[:2] + return south, west, north, east + + tile_swne = rastertileswne + else: + tile_swne = lambda x, y, z: (0, 0, 0, 0) # noqa + else: + tile_swne = lambda x, y, z: (0, 0, 0, 0) # noqa + + return tile_swne + + +def single_threaded_tiling(input_file, output_folder, options): + """ + Keep a single threaded version that stays clear of multiprocessing, for platforms that would not + support it + """ + if options.verbose: + print("Begin tiles details calc") + conf, tile_details = worker_tile_details(input_file, output_folder, options) + + if options.verbose: + print("Tiles details calc complete.") + + if not options.verbose and not options.quiet: + progress_bar = ProgressBar(len(tile_details)) + progress_bar.start() + + for tile_detail in tile_details: + create_base_tile(conf, tile_detail) + + if not options.verbose and not options.quiet: + progress_bar.log_progress() + + create_overview_tiles(conf, output_folder, options) + + shutil.rmtree(os.path.dirname(conf.src_file)) + + +def multi_threaded_tiling(input_file, output_folder, options): + nb_processes = options.nb_processes or 1 + (conf_receiver, conf_sender) = Pipe(False) + + if options.verbose: + print("Begin tiles details calc") + p = Process(target=worker_tile_details, + args=[input_file, output_folder, options], + kwargs={"send_pipe": conf_sender}) + p.start() + # Make sure to consume the queue before joining. If the payload is too big, it won't be put in + # one go in the queue and therefore the sending process will never finish, waiting for space in + # the queue to send data + conf, tile_details = conf_receiver.recv() + p.join() + if options.verbose: + print("Tiles details calc complete.") + # Have to create the Queue through a multiprocessing.Manager to get a Queue Proxy, + # otherwise you can't pass it as a param in the method invoked by the pool... + manager = Manager() + queue = manager.Queue() + pool = Pool(processes=nb_processes) + # TODO: gbataille - check the confs for which each element is an array... one useless level? + # TODO: gbataille - assign an ID to each job for print in verbose mode "ReadRaster Extent ..." + # TODO: gbataille - check memory footprint and time on big image. are they opened x times + for tile_detail in tile_details: + pool.apply_async(create_base_tile, (conf, tile_detail), {"queue": queue}) + + if not options.verbose and not options.quiet: + p = Process(target=progress_printer_thread, args=[queue, len(tile_details)]) + p.start() + + pool.close() + pool.join() # Jobs finished + if not options.verbose and not options.quiet: + p.join() # Traces done + + create_overview_tiles(conf, output_folder, options) + + shutil.rmtree(os.path.dirname(conf.src_file)) + + +def main(): + # TODO: gbataille - use mkdtemp to work in a temp directory + # TODO: gbataille - debug intermediate tiles.vrt not produced anymore? + # TODO: gbataille - Refactor generate overview tiles to not depend on self variables + argv = gdal.GeneralCmdLineProcessor(sys.argv) + input_file, output_folder, options = process_args(argv[1:]) + nb_processes = options.nb_processes or 1 + + if nb_processes == 1: + single_threaded_tiling(input_file, output_folder, options) + else: + multi_threaded_tiling(input_file, output_folder, options) + + +if __name__ == '__main__': + main() + +# vim: set tabstop=4 shiftwidth=4 expandtab: diff --git a/opendm/tiles/hsv_merge.py b/opendm/tiles/hsv_merge.py new file mode 100644 index 00000000..0d2e0c2a --- /dev/null +++ b/opendm/tiles/hsv_merge.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python +#****************************************************************************** +# $Id$ +# +# Project: GDAL Python Interface +# Purpose: Script to merge greyscale as intensity into an RGB(A) image, for +# instance to apply hillshading to a dem colour relief. +# Author: Frank Warmerdam, warmerdam@pobox.com +# Trent Hare (USGS) +# +#****************************************************************************** +# Copyright (c) 2009, Frank Warmerdam +# Copyright (c) 2010, Even Rouault +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), +# to deal in the Software without restriction, including without limitation +# the rights to use, copy, modify, merge, publish, distribute, sublicense, +# and/or sell copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +# DEALINGS IN THE SOFTWARE. +#****************************************************************************** + +import sys + +import numpy +from osgeo import gdal + +# ============================================================================= +# rgb_to_hsv() +# +# rgb comes in as [r,g,b] with values in the range [0,255]. The returned +# hsv values will be with hue and saturation in the range [0,1] and value +# in the range [0,255] +# +def rgb_to_hsv( r,g,b ): + + maxc = numpy.maximum(r,numpy.maximum(g,b)) + minc = numpy.minimum(r,numpy.minimum(g,b)) + + v = maxc + + minc_eq_maxc = numpy.equal(minc,maxc) + + # compute the difference, but reset zeros to ones to avoid divide by zeros later. + ones = numpy.ones((r.shape[0],r.shape[1])) + maxc_minus_minc = numpy.choose( minc_eq_maxc, (maxc-minc,ones) ) + + s = (maxc-minc) / numpy.maximum(ones,maxc) + rc = (maxc-r) / maxc_minus_minc + gc = (maxc-g) / maxc_minus_minc + bc = (maxc-b) / maxc_minus_minc + + maxc_is_r = numpy.equal(maxc,r) + maxc_is_g = numpy.equal(maxc,g) + maxc_is_b = numpy.equal(maxc,b) + + h = numpy.zeros((r.shape[0],r.shape[1])) + h = numpy.choose( maxc_is_b, (h,4.0+gc-rc) ) + h = numpy.choose( maxc_is_g, (h,2.0+rc-bc) ) + h = numpy.choose( maxc_is_r, (h,bc-gc) ) + + h = numpy.mod(h/6.0,1.0) + + hsv = numpy.asarray([h,s,v]) + + return hsv + +# ============================================================================= +# hsv_to_rgb() +# +# hsv comes in as [h,s,v] with hue and saturation in the range [0,1], +# but value in the range [0,255]. + +def hsv_to_rgb( hsv ): + + h = hsv[0] + s = hsv[1] + v = hsv[2] + + #if s == 0.0: return v, v, v + i = (h*6.0).astype(int) + f = (h*6.0) - i + p = v*(1.0 - s) + q = v*(1.0 - s*f) + t = v*(1.0 - s*(1.0-f)) + + r = i.choose( v, q, p, p, t, v ) + g = i.choose( t, v, v, q, p, p ) + b = i.choose( p, p, t, v, v, q ) + + rgb = numpy.asarray([r,g,b]).astype(numpy.uint8) + + return rgb + +# ============================================================================= +# Usage() + +def Usage(): + print("""Usage: hsv_merge.py [-q] [-of format] src_color src_greyscale dst_color + +where src_color is a RGB or RGBA dataset, + src_greyscale is a greyscale dataset (e.g. the result of gdaldem hillshade) + dst_color will be a RGB or RGBA dataset using the greyscale as the + intensity for the color dataset. +""") + sys.exit(1) + +# ============================================================================= +# Mainline +# ============================================================================= + +argv = gdal.GeneralCmdLineProcessor( sys.argv ) +if argv is None: + sys.exit( 0 ) + +format = 'GTiff' +src_color_filename = None +src_greyscale_filename = None +dst_color_filename = None +quiet = False + +# Parse command line arguments. +i = 1 +while i < len(argv): + arg = argv[i] + + if arg == '-of': + i = i + 1 + format = argv[i] + + elif arg == '-q' or arg == '-quiet': + quiet = True + + elif src_color_filename is None: + src_color_filename = argv[i] + + elif src_greyscale_filename is None: + src_greyscale_filename = argv[i] + + elif dst_color_filename is None: + dst_color_filename = argv[i] + else: + Usage() + + i = i + 1 + +if dst_color_filename is None: + Usage() + +datatype = gdal.GDT_Byte + +hilldataset = gdal.Open( src_greyscale_filename, gdal.GA_ReadOnly ) +colordataset = gdal.Open( src_color_filename, gdal.GA_ReadOnly ) + +#check for 3 or 4 bands in the color file +if (colordataset.RasterCount != 3 and colordataset.RasterCount != 4): + print('Source image does not appear to have three or four bands as required.') + sys.exit(1) + +#define output format, name, size, type and set projection +out_driver = gdal.GetDriverByName(format) +outdataset = out_driver.Create(dst_color_filename, colordataset.RasterXSize, \ + colordataset.RasterYSize, colordataset.RasterCount, datatype) +outdataset.SetProjection(hilldataset.GetProjection()) +outdataset.SetGeoTransform(hilldataset.GetGeoTransform()) + +#assign RGB and hillshade bands +rBand = colordataset.GetRasterBand(1) +gBand = colordataset.GetRasterBand(2) +bBand = colordataset.GetRasterBand(3) +if colordataset.RasterCount == 4: + aBand = colordataset.GetRasterBand(4) +else: + aBand = None + +hillband = hilldataset.GetRasterBand(1) +hillbandnodatavalue = hillband.GetNoDataValue() + +#check for same file size +if ((rBand.YSize != hillband.YSize) or (rBand.XSize != hillband.XSize)): + print('Color and hillshade must be the same size in pixels.') + sys.exit(1) + +#loop over lines to apply hillshade +for i in range(hillband.YSize): + #load RGB and Hillshade arrays + rScanline = rBand.ReadAsArray(0, i, hillband.XSize, 1, hillband.XSize, 1) + gScanline = gBand.ReadAsArray(0, i, hillband.XSize, 1, hillband.XSize, 1) + bScanline = bBand.ReadAsArray(0, i, hillband.XSize, 1, hillband.XSize, 1) + hillScanline = hillband.ReadAsArray(0, i, hillband.XSize, 1, hillband.XSize, 1) + + #convert to HSV + hsv = rgb_to_hsv( rScanline, gScanline, bScanline ) + + # if there's nodata on the hillband, use the v value from the color + # dataset instead of the hillshade value. + if hillbandnodatavalue is not None: + equal_to_nodata = numpy.equal(hillScanline, hillbandnodatavalue) + v = numpy.choose(equal_to_nodata,(hillScanline,hsv[2])) + else: + v = hillScanline + + #replace v with hillshade + hsv_adjusted = numpy.asarray( [hsv[0], hsv[1], v] ) + + #convert back to RGB + dst_color = hsv_to_rgb( hsv_adjusted ) + + #write out new RGB bands to output one band at a time + outband = outdataset.GetRasterBand(1) + outband.WriteArray(dst_color[0], 0, i) + outband = outdataset.GetRasterBand(2) + outband.WriteArray(dst_color[1], 0, i) + outband = outdataset.GetRasterBand(3) + outband.WriteArray(dst_color[2], 0, i) + if aBand is not None: + aScanline = aBand.ReadAsArray(0, i, hillband.XSize, 1, hillband.XSize, 1) + outband = outdataset.GetRasterBand(4) + outband.WriteArray(aScanline, 0, i) + + #update progress line + if not quiet: + gdal.TermProgress_nocb( (float(i+1) / hillband.YSize) ) diff --git a/opendm/tiles/tiler.py b/opendm/tiles/tiler.py new file mode 100644 index 00000000..d7801ab5 --- /dev/null +++ b/opendm/tiles/tiler.py @@ -0,0 +1,34 @@ +import os +from opendm import log +from opendm import system +from opendm import io + +def generate_tiles(geotiff, output_dir, max_concurrency): + gdal2tiles = os.path.join(os.path.dirname(__file__), "gdal2tiles.py") + system.run('python3 "%s" --processes %s -z 5-21 -n -w none "%s" "%s"' % (gdal2tiles, max_concurrency, geotiff, output_dir)) + +def generate_orthophoto_tiles(geotiff, output_dir, max_concurrency): + try: + generate_tiles(geotiff, output_dir, max_concurrency) + except Exception as e: + log.ODM_WARNING("Cannot generate orthophoto tiles: %s" % str(e)) + +def generate_dem_tiles(geotiff, output_dir, max_concurrency): + relief_file = os.path.join(os.path.dirname(__file__), "color_relief.txt") + hsv_merge_script = os.path.join(os.path.dirname(__file__), "hsv_merge.py") + colored_dem = io.related_file_path(geotiff, postfix="color") + hillshade_dem = io.related_file_path(geotiff, postfix="hillshade") + colored_hillshade_dem = io.related_file_path(geotiff, postfix="colored_hillshade") + + try: + system.run('gdaldem color-relief "%s" "%s" "%s" -alpha -co ALPHA=YES' % (geotiff, relief_file, colored_dem)) + system.run('gdaldem hillshade "%s" "%s" -z 1.0 -s 1.0 -az 315.0 -alt 45.0' % (geotiff, hillshade_dem)) + system.run('python3 "%s" "%s" "%s" "%s"' % (hsv_merge_script, colored_dem, hillshade_dem, colored_hillshade_dem)) + generate_tiles(colored_hillshade_dem, output_dir, max_concurrency) + + # Cleanup + for f in [colored_dem, hillshade_dem, colored_hillshade_dem]: + if os.path.isfile(f): + os.remove(f) + except Exception as e: + log.ODM_WARNING("Cannot generate DEM tiles: %s" % str(e)) diff --git a/opendm/types.py b/opendm/types.py index 08ae1bf7..341aa5ab 100644 --- a/opendm/types.py +++ b/opendm/types.py @@ -8,11 +8,11 @@ from pyproj import CRS import xmltodict as x2d from six import string_types -import log -import io -import system -import context -import logging +from opendm import log +from opendm import io +from opendm import system +from opendm import context + from opendm.progress import progressbc from opendm.photo import ODM_Photo @@ -22,6 +22,7 @@ class ODM_Reconstruction(object): self.photos = photos self.georef = None self.gcp = None + self.geo_file = None self.multi_camera = self.detect_multi_camera() def detect_multi_camera(self): @@ -200,7 +201,7 @@ class ODM_GeoRef(object): class ODM_Tree(object): - def __init__(self, root_path, gcp_file = None): + def __init__(self, root_path, gcp_file = None, geo_file = None): # root path to the project self.root_path = io.absolute_path_file(root_path) self.input_images = io.join_paths(self.root_path, 'images') @@ -265,6 +266,8 @@ class ODM_Tree(object): self.odm_georeferencing, 'coords.txt') self.odm_georeferencing_gcp = gcp_file or io.find('gcp_list.txt', self.root_path) self.odm_georeferencing_gcp_utm = io.join_paths(self.odm_georeferencing, 'gcp_list_utm.txt') + self.odm_geo_file = geo_file or io.find('geo.txt', self.root_path) + self.odm_georeferencing_utm_log = io.join_paths( self.odm_georeferencing, 'odm_georeferencing_utm_log.txt') self.odm_georeferencing_log = 'odm_georeferencing_log.txt' @@ -290,6 +293,9 @@ class ODM_Tree(object): self.odm_orthophoto_log = io.join_paths(self.odm_orthophoto, 'odm_orthophoto_log.txt') self.odm_orthophoto_tif_log = io.join_paths(self.odm_orthophoto, 'gdal_translate_log.txt') + # tiles + self.orthophoto_tiles = io.join_paths(self.root_path, "orthophoto_tiles") + # Split-merge self.submodels_path = io.join_paths(self.root_path, 'submodels') diff --git a/portable.Dockerfile b/portable.Dockerfile index 8be5cfd3..b279dc81 100644 --- a/portable.Dockerfile +++ b/portable.Dockerfile @@ -1,93 +1,18 @@ -FROM phusion/baseimage:0.10.2 as base +FROM ubuntu:18.04 # Env variables ENV DEBIAN_FRONTEND noninteractive +ENV PYTHONPATH "$PYTHONPATH:/code/SuperBuild/install/lib/python3.6/dist-packages" +ENV PYTHONPATH "$PYTHONPATH:/code/SuperBuild/src/opensfm" +ENV LD_LIBRARY_PATH "$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" -#Install dependencies and required requisites -RUN add-apt-repository -y ppa:ubuntugis/ubuntugis-unstable \ - && add-apt-repository -y ppa:george-edison55/cmake-3.x \ - && apt-get update -y \ - && apt-get install --no-install-recommends -y \ - build-essential \ - cmake \ - gdal-bin \ - git \ - libatlas-base-dev \ - libavcodec-dev \ - libavformat-dev \ - libboost-date-time-dev \ - libboost-filesystem-dev \ - libboost-iostreams-dev \ - libboost-log-dev \ - libboost-python-dev \ - libboost-regex-dev \ - libboost-thread-dev \ - libeigen3-dev \ - libflann-dev \ - libgdal-dev \ - libgeotiff-dev \ - libgoogle-glog-dev \ - libgtk2.0-dev \ - libjasper-dev \ - libjpeg-dev \ - libjsoncpp-dev \ - liblapack-dev \ - liblas-bin \ - libpng-dev \ - libproj-dev \ - libsuitesparse-dev \ - libswscale-dev \ - libtbb2 \ - libtbb-dev \ - libtiff-dev \ - libvtk6-dev \ - libxext-dev \ - python-dev \ - python-gdal \ - python-matplotlib \ - python-pip \ - python-software-properties \ - python-wheel \ - software-properties-common \ - swig2.0 \ - grass-core \ - libssl-dev \ - && apt-get remove libdc1394-22-dev \ - && pip install --upgrade pip \ - && pip install setuptools - # Prepare directories WORKDIR /code # Copy everything COPY . ./ -RUN pip install -r requirements.txt \ - && pip install --upgrade cryptography \ - && python -m easy_install --upgrade pyOpenSSL - -ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages" -ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm" -ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/code/SuperBuild/install/lib" - -# Replace g++ and gcc with our own scripts -COPY /docker/ /code/docker/ -RUN mv -v /usr/bin/gcc /usr/bin/gcc_real \ - && mv -v /usr/bin/g++ /usr/bin/g++_real \ - && cp -v /code/docker/gcc /usr/bin/gcc \ - && cp -v /code/docker/g++ /usr/bin/g++ - -# Compile code in SuperBuild and root directories -RUN cd SuperBuild \ - && mkdir build \ - && cd build \ - && cmake .. \ - && make -j$(nproc) \ - && cd ../.. \ - && mkdir build \ - && cd build \ - && cmake .. \ - && make -j$(nproc) +RUN PORTABLE_INSTALL=YES bash configure.sh install # Cleanup APT RUN apt-get clean \ @@ -105,5 +30,5 @@ RUN rm -rf \ /code/SuperBuild/src/pdal # Entry point -ENTRYPOINT ["python", "/code/run.py"] +ENTRYPOINT ["python3", "/code/run.py"] diff --git a/requirements.txt b/requirements.txt index ac04e761..d01f0a89 100755 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ pyodm==1.5.4 Pillow==6.1.0 networkx==2.2 scipy==1.2.1 -numpy==1.15.4 +numpy==1.19.2 pyproj==2.2.2 Pysolar==0.6 psutil==5.6.3 @@ -25,3 +25,5 @@ scikit-learn==0.20 laspy==1.6.0 beautifulsoup4==4.9.1 lxml==4.5.1 +matplotlib==1.5.1 +edt==2.0.1 \ No newline at end of file diff --git a/run.py b/run.py index 3950983f..df0dab22 100755 --- a/run.py +++ b/run.py @@ -1,4 +1,10 @@ -#!/usr/bin/python +#!/usr/bin/python3 + +# Basic check +import sys +if sys.version_info.major < 3: + print("Ups! ODM needs to run with Python 3. It seems you launched it with Python 2. Try using: python3 run.py ... ") + sys.exit(1) from opendm import log from opendm import config @@ -104,4 +110,4 @@ if __name__ == '__main__': log.ODM_INFO('MMMMMMMMMMMN- smNm/ +MMm :NNdo` .mMM` oMM+/yMM/ MMMMMMMMMMMM') log.ODM_INFO('MMMMMMMMMMMMNo- `:yMMMm `:sNMMM` sMMMMMMM+ NMMMMMMMMMMM') log.ODM_INFO('MMMMMMMMMMMMMMMNmmNMMMMMMMNmmmmNMMMMMMMNNMMMMMMMMMNNMMMMMMMMMMMM') - log.ODM_INFO('OpenDroneMap app finished - %s' % system.now()) + log.ODM_INFO('ODM app finished - %s' % system.now()) diff --git a/run.sh b/run.sh index 3b511725..a743b6de 100755 --- a/run.sh +++ b/run.sh @@ -1,7 +1,6 @@ #!/bin/bash RUNPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -export PYTHONPATH=$RUNPATH/SuperBuild/install/lib/python2.7/dist-packages:$RUNPATH/SuperBuild/src/opensfm:$PYTHONPATH export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$RUNPATH/SuperBuild/install/lib -python $RUNPATH/run.py "$@" +python3 $RUNPATH/run.py "$@" diff --git a/stages/dataset.py b/stages/dataset.py index 93b7f651..0c3ed086 100644 --- a/stages/dataset.py +++ b/stages/dataset.py @@ -6,13 +6,14 @@ from opendm import io from opendm import types from opendm import log from opendm import system +from opendm.geo import GeoFile from shutil import copyfile from opendm import progress def save_images_database(photos, database_file): with open(database_file, 'w') as f: - f.write(json.dumps(map(lambda p: p.__dict__, photos))) + f.write(json.dumps([p.__dict__ for p in photos])) log.ODM_INFO("Wrote images database: %s" % database_file) @@ -39,7 +40,7 @@ def load_images_database(database_file): class ODMLoadDatasetStage(types.ODM_Stage): def process(self, args, outputs): - tree = types.ODM_Tree(args.project_path, args.gcp) + tree = types.ODM_Tree(args.project_path, args.gcp, args.geo) outputs['tree'] = tree if args.time and io.file_exists(tree.benchmarking): @@ -48,26 +49,38 @@ class ODMLoadDatasetStage(types.ODM_Stage): with open(tree.benchmarking, 'a') as b: b.write('ODM Benchmarking file created %s\nNumber of Cores: %s\n\n' % (system.now(), context.num_cores)) - # check if the extension is supported - def supported_extension(file_name): - (pathfn, ext) = os.path.splitext(file_name) - return ext.lower() in context.supported_extensions + # check if the image filename is supported + def valid_image_filename(filename): + (pathfn, ext) = os.path.splitext(filename) + return ext.lower() in context.supported_extensions and pathfn[-5:] != "_mask" # Get supported images from dir def get_images(in_dir): - # filter images for its extension type log.ODM_DEBUG(in_dir) - return [f for f in io.get_files_list(in_dir) if supported_extension(f)] + entries = os.listdir(in_dir) + valid, rejects = [], [] + for f in entries: + if valid_image_filename(f): + valid.append(f) + else: + rejects.append(f) + return valid, rejects + + def find_mask(photo_path, masks): + (pathfn, ext) = os.path.splitext(os.path.basename(photo_path)) + k = "{}_mask".format(pathfn) + + mask = masks.get(k) + if mask: + # Spaces are not supported due to OpenSfM's mask_list.txt format reqs + if not " " in mask: + return mask + else: + log.ODM_WARNING("Image mask {} has a space. Spaces are currently not supported for image masks.".format(mask)) # get images directory - input_dir = tree.input_images images_dir = tree.dataset_raw - if not io.dir_exists(images_dir): - log.ODM_INFO("Project directory %s doesn't exist. Creating it now. " % images_dir) - system.mkdir_p(images_dir) - copied = [copyfile(io.join_paths(input_dir, f), io.join_paths(images_dir, f)) for f in get_images(input_dir)] - # define paths and create working directories system.mkdir_p(tree.odm_georeferencing) if not args.use_3dmesh: system.mkdir_p(tree.odm_25dgeoreferencing) @@ -77,18 +90,39 @@ class ODMLoadDatasetStage(types.ODM_Stage): # check if we rerun cell or not images_database_file = io.join_paths(tree.root_path, 'images.json') if not io.file_exists(images_database_file) or self.rerun(): - files = get_images(images_dir) + files, rejects = get_images(images_dir) if files: # create ODMPhoto list path_files = [io.join_paths(images_dir, f) for f in files] + # Lookup table for masks + masks = {} + for r in rejects: + (p, ext) = os.path.splitext(r) + if p[-5:] == "_mask" and ext.lower() in context.supported_extensions: + masks[p] = r + photos = [] with open(tree.dataset_list, 'w') as dataset_list: log.ODM_INFO("Loading %s images" % len(path_files)) for f in path_files: - photos += [types.ODM_Photo(f)] + p = types.ODM_Photo(f) + p.set_mask(find_mask(f, masks)) + photos += [p] dataset_list.write(photos[-1].filename + '\n') + # Check if a geo file is available + if tree.odm_geo_file is not None and os.path.exists(tree.odm_geo_file): + log.ODM_INFO("Found image geolocation file") + gf = GeoFile(tree.odm_geo_file) + updated = 0 + for p in photos: + entry = gf.get_entry(p.filename) + if entry: + p.update_with_geo_entry(entry) + updated += 1 + log.ODM_INFO("Updated %s image positions" % updated) + # Save image database for faster restart save_images_database(photos, images_database_file) else: diff --git a/stages/mve.py b/stages/mve.py index 9828d848..d775cd45 100644 --- a/stages/mve.py +++ b/stages/mve.py @@ -77,7 +77,8 @@ class ODMMveStage(types.ODM_Stage): self.update_progress(90) scene2pset_config = [ - "-F%s" % mve_output_scale + "-F%s" % mve_output_scale, + '-mmask' ] # run scene2pset diff --git a/stages/odm_app.py b/stages/odm_app.py index 84ac753b..f8822ca1 100644 --- a/stages/odm_app.py +++ b/stages/odm_app.py @@ -6,19 +6,19 @@ from opendm import io from opendm import system from opendm import log -from dataset import ODMLoadDatasetStage -from run_opensfm import ODMOpenSfMStage -from mve import ODMMveStage -from odm_slam import ODMSlamStage -from odm_meshing import ODMeshingStage -from mvstex import ODMMvsTexStage -from odm_georeferencing import ODMGeoreferencingStage -from odm_orthophoto import ODMOrthoPhotoStage -from odm_dem import ODMDEMStage -from odm_filterpoints import ODMFilterPoints -from splitmerge import ODMSplitStage, ODMMergeStage -from odm_report import ODMReport +from stages.dataset import ODMLoadDatasetStage +from stages.run_opensfm import ODMOpenSfMStage +from stages.mve import ODMMveStage +from stages.odm_slam import ODMSlamStage +from stages.odm_meshing import ODMeshingStage +from stages.mvstex import ODMMvsTexStage +from stages.odm_georeferencing import ODMGeoreferencingStage +from stages.odm_orthophoto import ODMOrthoPhotoStage +from stages.odm_dem import ODMDEMStage +from stages.odm_filterpoints import ODMFilterPoints +from stages.splitmerge import ODMSplitStage, ODMMergeStage +from stages.odm_report import ODMReport class ODMApp: def __init__(self, args): diff --git a/stages/odm_dem.py b/stages/odm_dem.py index af5f338d..7d6883f8 100755 --- a/stages/odm_dem.py +++ b/stages/odm_dem.py @@ -10,6 +10,7 @@ from opendm import gsd from opendm.dem import commands, utils from opendm.cropper import Cropper from opendm import pseudogeo +from opendm.tiles.tiler import generate_dem_tiles class ODMDEMStage(types.ODM_Stage): def process(self, args, outputs): @@ -101,7 +102,7 @@ class ODMDEMStage(types.ODM_Stage): dem_input, product, output_type='idw' if product == 'dtm' else 'max', - radiuses=map(str, radius_steps), + radiuses=list(map(str, radius_steps)), gapfill=args.dem_gapfill_steps > 0, outdir=odm_dem_root, resolution=resolution / 100.0, @@ -128,9 +129,12 @@ class ODMDEMStage(types.ODM_Stage): commands.compute_euclidean_map(unfilled_dem_path, io.related_file_path(dem_geotiff_path, postfix=".euclideand"), overwrite=True) - + if pseudo_georeference: pseudogeo.add_pseudo_georeferencing(dem_geotiff_path) + + if args.tiles: + generate_dem_tiles(dem_geotiff_path, tree.path("%s_tiles" % product), args.max_concurrency) progress += 30 self.update_progress(progress) diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index a889a362..bfff1e34 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -120,7 +120,12 @@ class ODMGeoreferencingStage(types.ODM_Stage): log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud") cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model') - decimation_step = 40 if args.fast_orthophoto or args.use_opensfm_dense else 90 + if args.fast_orthophoto: + decimation_step = 10 + elif args.use_opensfm_dense: + decimation_step = 40 + else: + decimation_step = 90 # More aggressive decimation for large datasets if not args.fast_orthophoto: diff --git a/stages/odm_orthophoto.py b/stages/odm_orthophoto.py index 0be25aa7..1546a5b0 100644 --- a/stages/odm_orthophoto.py +++ b/stages/odm_orthophoto.py @@ -148,7 +148,7 @@ class ODMOrthoPhotoStage(types.ODM_Stage): os.path.join(tree.odm_orthophoto, "odm_orthophoto_cut.tif"), blend_distance=20, only_max_coords_feature=True) - orthophoto.post_orthophoto_steps(args, bounds_file_path, tree.odm_orthophoto_tif) + orthophoto.post_orthophoto_steps(args, bounds_file_path, tree.odm_orthophoto_tif, tree.orthophoto_tiles) # Generate feathered orthophoto also if args.orthophoto_cutline: diff --git a/stages/splitmerge.py b/stages/splitmerge.py index 03173dd4..1414f329 100644 --- a/stages/splitmerge.py +++ b/stages/splitmerge.py @@ -18,7 +18,7 @@ from opendm.remote import LocalRemoteExecutor from opendm.shots import merge_geojson_shots from opendm import point_cloud from pipes import quote - +from opendm.tiles.tiler import generate_dem_tiles class ODMSplitStage(types.ODM_Stage): def process(self, args, outputs): @@ -157,9 +157,9 @@ class ODMSplitStage(types.ODM_Stage): #Create image lists with open(path+"/opensfm/image_list.txt", "w") as o: - o.writelines(map(lambda x: "../images/"+x+'\n', v["shots"].keys())) + o.writelines(list(map(lambda x: "../images/"+x+'\n', v["shots"].keys()))) with open(path+"/img_list.txt", "w") as o: - o.writelines(map(lambda x: x+'\n', v["shots"].keys())) + o.writelines(list(map(lambda x: x+'\n', v["shots"].keys()))) i+=1 os.rename(octx.path("../submodels"), octx.path("../unaligned_submodels")) @@ -216,7 +216,7 @@ class ODMSplitStage(types.ODM_Stage): argv = get_submodel_argv(args, tree.submodels_path, sp_octx.name()) # Re-run the ODM toolchain on the submodel - system.run(" ".join(map(quote, argv)), env_vars=os.environ.copy()) + system.run(" ".join(map(quote, map(str, argv))), env_vars=os.environ.copy()) else: lre.set_projects([os.path.abspath(os.path.join(p, "..")) for p in submodel_paths]) lre.run_toolchain() @@ -293,7 +293,7 @@ class ODMMergeStage(types.ODM_Stage): orthophoto_vars = orthophoto.get_orthophoto_vars(args) orthophoto.merge(all_orthos_and_ortho_cuts, tree.odm_orthophoto_tif, orthophoto_vars) - orthophoto.post_orthophoto_steps(args, merged_bounds_file, tree.odm_orthophoto_tif) + orthophoto.post_orthophoto_steps(args, merged_bounds_file, tree.odm_orthophoto_tif, tree.orthophoto_tiles) elif len(all_orthos_and_ortho_cuts) == 1: # Simply copy log.ODM_WARNING("A single orthophoto/cutline pair was found between all submodels.") @@ -331,8 +331,12 @@ class ODMMergeStage(types.ODM_Stage): if args.crop > 0: Cropper.crop(merged_bounds_file, dem_file, dem_vars, keep_original=not args.optimize_disk_space) log.ODM_INFO("Created %s" % dem_file) + + if args.tiles: + generate_dem_tiles(dem_file, tree.path("%s_tiles" % human_name.lower()), args.max_concurrency) else: log.ODM_WARNING("Cannot merge %s, %s was not created" % (human_name, dem_file)) + else: log.ODM_WARNING("Found merged %s in %s" % (human_name, dem_filename)) diff --git a/start-dev-env.sh b/start-dev-env.sh index 69fa4ad6..fd747c4c 100755 --- a/start-dev-env.sh +++ b/start-dev-env.sh @@ -11,8 +11,9 @@ if [ "$1" = "--setup" ]; then #bash configure.sh reinstall touch .setupdevenv - apt install -y vim - chown -R $3:$4 /code /var/www + apt update && apt install -y vim + chown -R $3:$4 /code + chown -R $3:$4 /var/www fi echo "Adding $2 to /etc/passwd" @@ -44,7 +45,13 @@ if [ "$1" = "--setup" ]; then # Colors echo "alias ls='ls --color=auto'" >> $HOME/.bashrc - su -c bash $2 + # Python paths + echo $(python /code/opendm/context.py) >> $HOME/.bashrc + + # Misc aliases + echo "alias pdal=/code/SuperBuild/install/bin/pdal" >> $HOME/.bashrc + + su -c bash $2 exit 0 fi @@ -75,6 +82,7 @@ fi export PORT="${PORT:=3000}" export QTC="${QTC:=NO}" +export IMAGE="${IMAGE:=opendronemap/nodeodm}" if [ -z "$DATA" ]; then echo "Usage: DATA=/path/to/datasets [VARS] $0" @@ -82,6 +90,7 @@ if [ -z "$DATA" ]; then echo "VARS:" echo " DATA Path to directory that contains datasets for testing. The directory will be mounted in /datasets. If you don't have any, simply set it to a folder outside the ODM repository." echo " PORT Port to expose for NodeODM (default: $PORT)" + echo " IMAGE Docker image to use (default: $IMAGE)" echo " QTC When set to YES, installs QT Creator for C++ development (default: $QTC)" exit 1 fi @@ -89,8 +98,9 @@ fi echo "Starting development environment..." echo "Datasets path: $DATA" -echo "NodeODM port: $PORT" +echo "Expose port: $PORT" echo "QT Creator: $QTC" +echo "Image: $IMAGE" if [ ! -e "$HOME"/.odm-dev-home ]; then mkdir -p "$HOME"/.odm-dev-home @@ -100,5 +110,5 @@ USER_ID=$(id -u) GROUP_ID=$(id -g) USER=$(id -un) xhost + -docker run -ti --entrypoint bash --name odmdev -v $(pwd):/code -v "$DATA":/datasets -p $PORT:3000 --privileged -e DISPLAY -e LANG=C.UTF-8 -e LC_ALL=C.UTF-8 -v="/tmp/.X11-unix:/tmp/.X11-unix:rw" -v="$HOME/.odm-dev-home:/home/$USER" opendronemap/nodeodm -c "/code/start-dev-env.sh --setup $USER $USER_ID $GROUP_ID $QTC" -exit 0 \ No newline at end of file +docker run -ti --entrypoint bash --name odmdev -v $(pwd):/code -v "$DATA":/datasets -p $PORT:3000 --privileged -e DISPLAY -e LANG=C.UTF-8 -e LC_ALL=C.UTF-8 -v="/tmp/.X11-unix:/tmp/.X11-unix:rw" -v="$HOME/.odm-dev-home:/home/$USER" $IMAGE -c "/code/start-dev-env.sh --setup $USER $USER_ID $GROUP_ID $QTC" +exit 0 diff --git a/test.sh b/test.sh index 91fe5153..fb79218c 100755 --- a/test.sh +++ b/test.sh @@ -1,5 +1,5 @@ if [ ! -z "$1" ]; then - python -m unittest discover tests "test_$1.py" + python3 -m unittest discover tests "test_$1.py" else - python -m unittest discover tests "test_*.py" + python3 -m unittest discover tests "test_*.py" fi diff --git a/tests/test_camera.py b/tests/test_camera.py index fb0045f2..8c1b539b 100644 --- a/tests/test_camera.py +++ b/tests/test_camera.py @@ -15,7 +15,7 @@ class TestCamera(unittest.TestCase): def test_camera(self): c = camera.get_cameras_from_opensfm("tests/assets/reconstruction.json") self.assertEqual(len(c.keys()), 1) - camera_id = c.keys()[0] + camera_id = list(c.keys())[0] self.assertTrue('v2 ' not in camera_id) self.assertRaises(RuntimeError, camera.get_cameras_from_opensfm, 'tests/assets/nonexistant.json') @@ -27,7 +27,7 @@ class TestCamera(unittest.TestCase): osfm_c = camera.get_opensfm_camera_models(c) self.assertEqual(len(osfm_c.keys()), 1) - c1 = osfm_c[osfm_c.keys()[0]] + c1 = osfm_c[list(osfm_c.keys())[0]] self.assertTrue('k1_prior' in c1) self.assertTrue('k2_prior' in c1) self.assertFalse('test' in c1) diff --git a/tests/test_gcp.py b/tests/test_gcp.py index 061faf57..154ac4fc 100644 --- a/tests/test_gcp.py +++ b/tests/test_gcp.py @@ -33,8 +33,8 @@ class TestGcp(unittest.TestCase): copy = GCPFile(gcp.create_utm_copy("tests/assets/output/gcp_utm.txt")) self.assertTrue(copy.exists()) self.assertEqual(copy.raw_srs, "WGS84 UTM 16N") - self.assertEqual(copy.get_entry(0).x, 609865.707705) - self.assertEqual(copy.get_entry(0).y, 4950688.36182) + self.assertEqual(copy.get_entry(0).x, 609865.7077054137) + self.assertEqual(copy.get_entry(0).y, 4950688.361817497) def test_utm_conversion_feet(self): gcp = GCPFile("tests/assets/gcp_michigan_feet_valid.txt") @@ -43,7 +43,7 @@ class TestGcp(unittest.TestCase): self.assertEqual(copy.raw_srs, "WGS84 UTM 16N") self.assertEqual(round(copy.get_entry(0).x, 3), 609925.818) self.assertEqual(round(copy.get_entry(0).y, 3), 4950688.772) - self.assertEqual(round(copy.get_entry(0).z, 3), 171.663) + self.assertEqual(round(copy.get_entry(0).z, 3), 563.199) def test_filtered_copy(self): gcp = GCPFile('tests/assets/gcp_latlon_valid.txt')