resolve merge conflict from master

Former-commit-id: 2b83086c20
pull/1161/head
Stephen Mather 2019-03-27 14:02:35 +00:00
commit e1ebc79827
34 zmienionych plików z 2185 dodań i 417 usunięć

Wyświetl plik

@ -13,15 +13,15 @@ RUN apt-get update -y
# All packages (Will install much faster)
RUN apt-get install --no-install-recommends -y git cmake python-pip build-essential software-properties-common python-software-properties libgdal-dev gdal-bin libgeotiff-dev \
libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev python-numpy libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \
libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \
libproj-dev libxext-dev liblapack-dev libeigen3-dev libvtk6-dev python-networkx libgoogle-glog-dev libsuitesparse-dev libboost-filesystem-dev libboost-iostreams-dev \
libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside python-scipy \
libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside \
liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel libboost-log-dev libjsoncpp-dev python-gdal
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/1.0.0.zip loky shapely numpy==1.15.4 pyproj psutil repoze.lru && pip install -U scipy --ignore-installed
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/numpyfix.zip loky shapely scipy numpy==1.15.4 pyproj psutil repoze.lru
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"
@ -33,7 +33,6 @@ RUN mkdir /code
WORKDIR /code
# Copy repository files
COPY ccd_defs_check.py /code/ccd_defs_check.py
COPY CMakeLists.txt /code/CMakeLists.txt
COPY configure.sh /code/configure.sh
COPY /modules/ /code/modules/

Wyświetl plik

@ -38,7 +38,7 @@ docker run -it --rm \
-v "$(pwd)/images:/code/images" \
-v "$(pwd)/odm_orthophoto:/code/odm_orthophoto" \
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
opendronemap/opendronemap
opendronemap/odm
```
### Native Install (Ubuntu 16.04)
@ -167,7 +167,7 @@ instructions through "Create a Docker group". Once Docker is installed, the fast
-v "$(pwd)/images:/code/images" \
-v "$(pwd)/odm_orthophoto:/code/odm_orthophoto" \
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
opendronemap/opendronemap
opendronemap/odm
If you want to build your own Docker image from sources, type:
@ -200,7 +200,7 @@ If you want to get all intermediate outputs, run the following command:
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
-v "$(pwd)/opensfm:/code/opensfm" \
-v "$(pwd)/mve:/code/mve" \
opendronemap/opendronemap
opendronemap/odm
To pass in custom parameters to the run.py script, simply pass it as arguments to the `docker run` command. For example:
@ -208,7 +208,7 @@ To pass in custom parameters to the run.py script, simply pass it as arguments t
-v "$(pwd)/images:/code/images" \
-v "$(pwd)/odm_orthophoto:/code/odm_orthophoto" \
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
opendronemap/opendronemap --resize-to 1800 --force-ccd 6.16
opendronemap/odm --resize-to 1800
If you want to pass in custom parameters using the settings.yaml file, you can pass it as a -v volume binding:
@ -217,7 +217,7 @@ If you want to pass in custom parameters using the settings.yaml file, you can p
-v "$(pwd)/odm_orthophoto:/code/odm_orthophoto" \
-v "$(pwd)/odm_texturing:/code/odm_texturing" \
-v "$(pwd)/settings.yaml:/code/settings.yaml" \
opendronemap/opendronemap
opendronemap/odm
When building your own Docker image, if image size is of importance to you, you should use the ```--squash``` flag, like so:

Wyświetl plik

@ -71,7 +71,7 @@ SETUP_EXTERNAL_PROJECT(OpenCV ${ODM_OpenCV_Version} ${ODM_BUILD_OpenCV})
# ---------------------------------------------------------------------------------------------
# Point Cloud Library (PCL)
#
set(ODM_PCL_Version 1.7.2)
set(ODM_PCL_Version 1.8.0)
option(ODM_BUILD_PCL "Force to build PCL library" OFF)
SETUP_EXTERNAL_PROJECT(PCL ${ODM_PCL_Version} ${ODM_BUILD_PCL})
@ -132,7 +132,7 @@ endforeach()
externalproject_add(mve
GIT_REPOSITORY https://github.com/simonfuhrmann/mve.git
GIT_TAG 97c5b741bebcb5b74976db679344acefab320e70
GIT_TAG fb942b4458dbf8490c9a4c6b81b9b9f57c593c0f
UPDATE_COMMAND ""
SOURCE_DIR ${SB_SOURCE_DIR}/elibs/mve
CONFIGURE_COMMAND ""

Wyświetl plik

@ -6,7 +6,7 @@ ExternalProject_Add(${_proj_name}
TMP_DIR ${_SB_BINARY_DIR}/tmp
STAMP_DIR ${_SB_BINARY_DIR}/stamp
DOWNLOAD_DIR ${SB_DOWNLOAD_DIR}/${_proj_name}
URL http://www.exiv2.org/builds/exiv2-0.27.0-Source.tar.gz
URL https://github.com/Exiv2/exiv2/archive/0.27.tar.gz
SOURCE_DIR ${SB_SOURCE_DIR}/${_proj_name}
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX=${SB_INSTALL_DIR}

Wyświetl plik

@ -1,25 +0,0 @@
#!/usr/bin/python
import sys
import os
import json
BIN_PATH_ABS = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
def get_ccd_widths():
"""Return the CCD Width of the camera listed in the JSON defs file."""
with open(BIN_PATH_ABS + '/data/ccd_defs.json') as jsonFile:
return json.load(jsonFile)
try:
ccd_defs = get_ccd_widths()
print "CCD_DEFS compiles OK"
print "Definitions in file: {0}".format(len(ccd_defs))
exit_code=0
except IOError as e:
print "I/O error with CCD_DEFS file: {0}".format(e.strerror)
exit_code=255
except:
print "Error with CCD_DEFS file: {0}".format(sys.exc_info()[1])
exit_code=255
sys.exit(exit_code)

Wyświetl plik

@ -96,10 +96,7 @@ install() {
libboost-log-dev
echo "Installing split-merge Dependencies"
pip install -U scipy shapely numpy==1.15.4 pyproj
pip install -U https://github.com/gipit/gippy/archive/1.0.0.zip psutil
pip install -U scipy numpy==1.15.4 shapely pyproj https://github.com/OpenDroneMap/gippy/archive/numpyfix.zip psutil
echo "Compiling SuperBuild"
cd ${RUNPATH}/SuperBuild

Wyświetl plik

@ -11,15 +11,15 @@ RUN apt-get update -y
# All packages (Will install much faster)
RUN apt-get install --no-install-recommends -y git cmake python-pip build-essential software-properties-common python-software-properties libgdal-dev gdal-bin libgeotiff-dev \
libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev python-numpy libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \
libgtk2.0-dev libavcodec-dev libavformat-dev libswscale-dev python-dev libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libflann-dev \
libproj-dev libxext-dev liblapack-dev libeigen3-dev libvtk6-dev python-networkx libgoogle-glog-dev libsuitesparse-dev libboost-filesystem-dev libboost-iostreams-dev \
libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside python-scipy \
libboost-regex-dev libboost-python-dev libboost-date-time-dev libboost-thread-dev python-pyproj python-empy python-nose python-pyside \
liblas-bin python-matplotlib libatlas-base-dev swig2.0 python-wheel libboost-log-dev libjsoncpp-dev python-gdal
RUN apt-get remove libdc1394-22-dev
RUN pip install --upgrade pip
RUN pip install setuptools
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/gipit/gippy/archive/1.0.0.zip loky shapely numpy==1.15.4 pyproj psutil repoze.lru && pip install -U scipy --ignore-installed
RUN pip install -U PyYAML exifread gpxpy xmltodict catkin-pkg appsettings https://github.com/OpenDroneMap/gippy/archive/numpyfix.zip loky shapely scipy numpy==1.15.4 pyproj psutil repoze.lru
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/install/lib/python2.7/dist-packages"
ENV PYTHONPATH="$PYTHONPATH:/code/SuperBuild/src/opensfm"
@ -31,7 +31,6 @@ RUN mkdir /code
WORKDIR /code
# Copy repository files
COPY ccd_defs_check.py /code/ccd_defs_check.py
COPY CMakeLists.txt /code/CMakeLists.txt
COPY configure.sh /code/configure.sh
COPY /modules/ /code/modules/

Wyświetl plik

@ -8,6 +8,8 @@ add_subdirectory(odm_extract_utm)
add_subdirectory(odm_georef)
add_subdirectory(odm_orthophoto)
add_subdirectory(odm_cleanmesh)
add_subdirectory(odm_filterpoints)
if (ODM_BUILD_SLAM)
add_subdirectory(odm_slam)
endif ()

Wyświetl plik

@ -0,0 +1,21 @@
project(odm_filterpoints)
cmake_minimum_required(VERSION 2.8)
# Add compiler options.
add_definitions(-Wall -Wextra -Wconversion -pedantic -std=c++11)
# PDAL and jsoncpp
find_package(PDAL REQUIRED CONFIG)
include_directories(${PDAL_INCLUDE_DIRS})
include_directories("${PROJECT_SOURCE_DIR}/../../SuperBuild/src/pdal/vendor/jsoncpp/dist")
link_directories(${PDAL_LIBRARY_DIRS})
add_definitions(${PDAL_DEFINITIONS})
# Add source directory
aux_source_directory("./src" SRC_LIST)
# Add exectuteable
add_executable(${PROJECT_NAME} ${SRC_LIST})
# Link
target_link_libraries(${PROJECT_NAME} jsoncpp ${PDAL_LIBRARIES})

Wyświetl plik

@ -0,0 +1,106 @@
/*
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#ifndef CMD_LINE_PARSER_INCLUDED
#define CMD_LINE_PARSER_INCLUDED
#include <stdarg.h>
#include <cstring>
#include <cstdlib>
#include <string>
#include <vector>
#ifdef WIN32
int strcasecmp( const char* c1 , const char* c2 );
#endif // WIN32
class cmdLineReadable
{
public:
bool set;
char *name;
cmdLineReadable( const char *name );
virtual ~cmdLineReadable( void );
virtual int read( char** argv , int argc );
virtual void writeValue( char* str ) const;
};
template< class Type > void cmdLineWriteValue( Type t , char* str );
template< class Type > void cmdLineCleanUp( Type* t );
template< class Type > Type cmdLineInitialize( void );
template< class Type > Type cmdLineCopy( Type t );
template< class Type > Type cmdLineStringToType( const char* str );
template< class Type >
class cmdLineParameter : public cmdLineReadable
{
public:
Type value;
cmdLineParameter( const char *name );
cmdLineParameter( const char *name , Type v );
~cmdLineParameter( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type , int Dim >
class cmdLineParameterArray : public cmdLineReadable
{
public:
Type values[Dim];
cmdLineParameterArray( const char *name, const Type* v=NULL );
~cmdLineParameterArray( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
template< class Type >
class cmdLineParameters : public cmdLineReadable
{
public:
int count;
Type *values;
cmdLineParameters( const char* name );
~cmdLineParameters( void );
int read( char** argv , int argc );
void writeValue( char* str ) const;
bool expectsArg( void ) const { return true; }
};
void cmdLineParse( int argc , char **argv, cmdLineReadable** params );
char* FileExtension( char* fileName );
char* LocalFileName( char* fileName );
char* DirectoryName( char* fileName );
char* GetFileExtension( const char* fileName );
char* GetLocalFileName( const char* fileName );
char** ReadWords( const char* fileName , int& cnt );
#include "CmdLineParser.inl"
#endif // CMD_LINE_PARSER_INCLUDED

Wyświetl plik

@ -0,0 +1,300 @@
/* -*- C++ -*-
Copyright (c) 2006, Michael Kazhdan and Matthew Bolitho
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of
conditions and the following disclaimer. Redistributions in binary form must reproduce
the above copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the distribution.
Neither the name of the Johns Hopkins University nor the names of its contributors
may be used to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
*/
#include <cassert>
#include <string.h>
#if defined( WIN32 ) || defined( _WIN64 )
inline int strcasecmp( const char* c1 , const char* c2 ){ return _stricmp( c1 , c2 ); }
#endif // WIN32 || _WIN64
template< > void cmdLineCleanUp< int >( int* t ){ *t = 0; }
template< > void cmdLineCleanUp< float >( float* t ){ *t = 0; }
template< > void cmdLineCleanUp< double >( double* t ){ *t = 0; }
template< > void cmdLineCleanUp< char* >( char** t ){ if( *t ) free( *t ) ; *t = NULL; }
template< > int cmdLineInitialize< int >( void ){ return 0; }
template< > float cmdLineInitialize< float >( void ){ return 0.f; }
template< > double cmdLineInitialize< double >( void ){ return 0.; }
template< > char* cmdLineInitialize< char* >( void ){ return NULL; }
template< > void cmdLineWriteValue< int >( int t , char* str ){ sprintf( str , "%d" , t ); }
template< > void cmdLineWriteValue< float >( float t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< double >( double t , char* str ){ sprintf( str , "%f" , t ); }
template< > void cmdLineWriteValue< char* >( char* t , char* str ){ if( t ) sprintf( str , "%s" , t ) ; else str[0]=0; }
template< > int cmdLineCopy( int t ){ return t; }
template< > float cmdLineCopy( float t ){ return t; }
template< > double cmdLineCopy( double t ){ return t; }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineCopy( char* t ){ return _strdup( t ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineCopy( char* t ){ return strdup( t ); }
#endif // WIN32 || _WIN64
template< > int cmdLineStringToType( const char* str ){ return atoi( str ); }
template< > float cmdLineStringToType( const char* str ){ return float( atof( str ) ); }
template< > double cmdLineStringToType( const char* str ){ return double( atof( str ) ); }
#if defined( WIN32 ) || defined( _WIN64 )
template< > char* cmdLineStringToType( const char* str ){ return _strdup( str ); }
#else // !WIN32 && !_WIN64
template< > char* cmdLineStringToType( const char* str ){ return strdup( str ); }
#endif // WIN32 || _WIN64
/////////////////////
// cmdLineReadable //
/////////////////////
#if defined( WIN32 ) || defined( _WIN64 )
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = _strdup( name ); }
#else // !WIN32 && !_WIN64
inline cmdLineReadable::cmdLineReadable( const char *name ) : set(false) { this->name = strdup( name ); }
#endif // WIN32 || _WIN64
inline cmdLineReadable::~cmdLineReadable( void ){ if( name ) free( name ) ; name = NULL; }
inline int cmdLineReadable::read( char** , int ){ set = true ; return 0; }
inline void cmdLineReadable::writeValue( char* str ) const { str[0] = 0; }
//////////////////////
// cmdLineParameter //
//////////////////////
template< class Type > cmdLineParameter< Type >::~cmdLineParameter( void ) { cmdLineCleanUp( &value ); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name ) : cmdLineReadable( name ){ value = cmdLineInitialize< Type >(); }
template< class Type > cmdLineParameter< Type >::cmdLineParameter( const char *name , Type v ) : cmdLineReadable( name ){ value = cmdLineCopy< Type >( v ); }
template< class Type >
int cmdLineParameter< Type >::read( char** argv , int argc )
{
if( argc>0 )
{
cmdLineCleanUp< Type >( &value ) , value = cmdLineStringToType< Type >( argv[0] );
set = true;
return 1;
}
else return 0;
}
template< class Type >
void cmdLineParameter< Type >::writeValue( char* str ) const { cmdLineWriteValue< Type >( value , str ); }
///////////////////////////
// cmdLineParameterArray //
///////////////////////////
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::cmdLineParameterArray( const char *name , const Type* v ) : cmdLineReadable( name )
{
if( v ) for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineCopy< Type >( v[i] );
else for( int i=0 ; i<Dim ; i++ ) values[i] = cmdLineInitialize< Type >();
}
template< class Type , int Dim >
cmdLineParameterArray< Type , Dim >::~cmdLineParameterArray( void ){ for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ); }
template< class Type , int Dim >
int cmdLineParameterArray< Type , Dim >::read( char** argv , int argc )
{
if( argc>=Dim )
{
for( int i=0 ; i<Dim ; i++ ) cmdLineCleanUp< Type >( values+i ) , values[i] = cmdLineStringToType< Type >( argv[i] );
set = true;
return Dim;
}
else return 0;
}
template< class Type , int Dim >
void cmdLineParameterArray< Type , Dim >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<Dim ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
///////////////////////
// cmdLineParameters //
///////////////////////
template< class Type >
cmdLineParameters< Type >::cmdLineParameters( const char* name ) : cmdLineReadable( name ) , values(NULL) , count(0) { }
template< class Type >
cmdLineParameters< Type >::~cmdLineParameters( void )
{
if( values ) delete[] values;
values = NULL;
count = 0;
}
template< class Type >
int cmdLineParameters< Type >::read( char** argv , int argc )
{
if( values ) delete[] values;
values = NULL;
if( argc>0 )
{
count = atoi(argv[0]);
if( count <= 0 || argc <= count ) return 1;
values = new Type[count];
if( !values ) return 0;
for( int i=0 ; i<count ; i++ ) values[i] = cmdLineStringToType< Type >( argv[i+1] );
set = true;
return count+1;
}
else return 0;
}
template< class Type >
void cmdLineParameters< Type >::writeValue( char* str ) const
{
char* temp=str;
for( int i=0 ; i<count ; i++ )
{
cmdLineWriteValue< Type >( values[i] , temp );
temp = str+strlen( str );
}
}
inline char* FileExtension( char* fileName )
{
char* temp = fileName;
for( unsigned int i=0 ; i<strlen(fileName) ; i++ ) if( fileName[i]=='.' ) temp = &fileName[i+1];
return temp;
}
inline char* GetFileExtension( const char* fileName )
{
char* fileNameCopy;
char* ext=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,".");
while(temp!=NULL)
{
if(ext!=NULL){delete[] ext;}
ext=new char[strlen(temp)+1];
assert(ext);
strcpy(ext,temp);
temp=strtok(NULL,".");
}
delete[] fileNameCopy;
return ext;
}
inline char* GetLocalFileName( const char* fileName )
{
char* fileNameCopy;
char* name=NULL;
char* temp;
fileNameCopy=new char[strlen(fileName)+1];
assert(fileNameCopy);
strcpy(fileNameCopy,fileName);
temp=strtok(fileNameCopy,"\\");
while(temp!=NULL){
if(name!=NULL){delete[] name;}
name=new char[strlen(temp)+1];
assert(name);
strcpy(name,temp);
temp=strtok(NULL,"\\");
}
delete[] fileNameCopy;
return name;
}
inline char* LocalFileName( char* fileName )
{
char* temp = fileName;
for( int i=0 ; i<(int)strlen(fileName) ; i++ ) if( fileName[i] =='\\' ) temp = &fileName[i+1];
return temp;
}
inline char* DirectoryName( char* fileName )
{
for( int i=int( strlen(fileName) )-1 ; i>=0 ; i-- )
if( fileName[i] =='\\' )
{
fileName[i] = 0;
return fileName;
}
fileName[0] = 0;
return fileName;
}
inline void cmdLineParse( int argc , char **argv , cmdLineReadable** params )
{
while( argc>0 )
{
if( argv[0][0]=='-' )
{
cmdLineReadable* readable=NULL;
for( int i=0 ; params[i]!=NULL && readable==NULL ; i++ ) if( !strcasecmp( params[i]->name , argv[0]+1 ) ) readable = params[i];
if( readable )
{
int j = readable->read( argv+1 , argc-1 );
argv += j , argc -= j;
}
else
{
fprintf( stderr , "[WARNING] Invalid option: %s\n" , argv[0] );
for( int i=0 ; params[i]!=NULL ; i++ ) printf( "\t-%s\n" , params[i]->name );
}
}
else fprintf( stderr , "[WARNING] Parameter name should be of the form -<name>: %s\n" , argv[0] );
++argv , --argc;
}
}
inline char** ReadWords(const char* fileName,int& cnt)
{
char** names;
char temp[500];
FILE* fp;
fp=fopen(fileName,"r");
if(!fp){return NULL;}
cnt=0;
while(fscanf(fp," %s ",temp)==1){cnt++;}
fclose(fp);
names=new char*[cnt];
if(!names){return NULL;}
fp=fopen(fileName,"r");
if(!fp){
delete[] names;
cnt=0;
return NULL;
}
cnt=0;
while(fscanf(fp," %s ",temp)==1){
names[cnt]=new char[strlen(temp)+1];
if(!names){
for(int j=0;j<cnt;j++){delete[] names[j];}
delete[] names;
cnt=0;
fclose(fp);
return NULL;
}
strcpy(names[cnt],temp);
cnt++;
}
fclose(fp);
return names;
}

Wyświetl plik

@ -0,0 +1,454 @@
/******************************************************************************
* Copyright (c) 2015, Peter J. Gadomski <pete.gadomski@gmail.com>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
// Modified to not cast to double and to use certain type identifier ("float" vs "float32")
#include "FloatPlyReader.hpp"
#include <sstream>
#include <pdal/PDALUtils.hpp>
#include <pdal/PointView.hpp>
#include <pdal/util/IStream.hpp>
namespace pdal
{
FloatPlyReader::FloatPlyReader() : m_vertexElt(nullptr)
{}
std::string FloatPlyReader::readLine()
{
m_line.clear();
if (m_lines.size())
{
m_line = m_lines.top();
m_lines.pop();
}
else
{
do
{
std::getline(*m_stream, m_line);
} while (m_line.empty() && m_stream->good());
}
Utils::trimTrailing(m_line);
m_linePos = Utils::extract(m_line, 0,
[](char c){ return !std::isspace(c); });
return std::string(m_line, 0, m_linePos);
}
void FloatPlyReader::pushLine()
{
m_lines.push(m_line);
}
std::string FloatPlyReader::nextWord()
{
std::string s;
std::string::size_type cnt = Utils::extractSpaces(m_line, m_linePos);
m_linePos += cnt;
if (m_linePos == m_line.size())
return s;
cnt = Utils::extract(m_line, m_linePos,
[](char c){ return !std::isspace(c); });
s = std::string(m_line, m_linePos, cnt);
m_linePos += cnt;
return s;
}
void FloatPlyReader::extractMagic()
{
std::string first = readLine();
if (first != "ply")
throwError("File isn't a PLY file. 'ply' not found.");
if (m_linePos != m_line.size())
throwError("Text found following 'ply' keyword.");
}
void FloatPlyReader::extractEnd()
{
std::string first = readLine();
if (first != "end_header")
throwError("'end_header' expected but found line beginning with '" +
first + "' instead.");
if (m_linePos != m_line.size())
throwError("Text found following 'end_header' keyword.");
}
void FloatPlyReader::extractFormat()
{
std::string word = readLine();
if (word != "format")
throwError("Expected format line not found in PLY file.");
word = nextWord();
if (word == "ascii")
m_format = Format::Ascii;
else if (word == "binary_big_endian")
m_format = Format::BinaryBe;
else if (word == "binary_little_endian")
m_format = Format::BinaryLe;
else
throwError("Unrecognized PLY format: '" + word + "'.");
word = nextWord();
if (word != "1.0")
throwError("Unsupported PLY version: '" + word + "'.");
}
Dimension::Type FloatPlyReader::getType(const std::string& name)
{
static std::map<std::string, Dimension::Type> types =
{
{ "int8", Dimension::Type::Signed8 },
{ "uint8", Dimension::Type::Unsigned8 },
{ "int16", Dimension::Type::Signed16 },
{ "uint16", Dimension::Type::Unsigned16 },
{ "int32", Dimension::Type::Signed32 },
{ "uint32", Dimension::Type::Unsigned32 },
{ "float32", Dimension::Type::Float },
{ "float64", Dimension::Type::Double },
{ "char", Dimension::Type::Signed8 },
{ "uchar", Dimension::Type::Unsigned8 },
{ "short", Dimension::Type::Signed16 },
{ "ushort", Dimension::Type::Unsigned16 },
{ "int", Dimension::Type::Signed32 },
{ "uint", Dimension::Type::Unsigned32 },
{ "float", Dimension::Type::Float },
{ "double", Dimension::Type::Double }
};
try
{
return types.at(name);
}
catch (std::out_of_range&)
{}
return Dimension::Type::None;
}
void FloatPlyReader::extractProperty(Element& element)
{
std::string word = nextWord();
Dimension::Type type = getType(word);
if (type != Dimension::Type::None)
{
std::string name = nextWord();
if (name.empty())
throwError("No name for property of element '" +
element.m_name + "'.");
element.m_properties.push_back(
std::unique_ptr<Property>(new SimpleProperty(name, type)));
}
else if (word == "list")
{
if (element.m_name == "vertex")
throwError("List properties are not supported for the 'vertex' "
"element.");
word = nextWord();
Dimension::Type countType = getType(word);
if (countType == Dimension::Type::None)
throwError("No valid count type for list property of element '" +
element.m_name + "'.");
word = nextWord();
Dimension::Type listType = getType(word);
if (listType == Dimension::Type::None)
throwError("No valid list type for list property of element '" +
element.m_name + "'.");
std::string name = nextWord();
if (name.empty())
throwError("No name for property of element '" +
element.m_name + "'.");
element.m_properties.push_back(
std::unique_ptr<Property>(new ListProperty(name, countType,
listType)));
}
else
throwError("Invalid property type '" + word + "'.");
}
void FloatPlyReader::extractProperties(Element& element)
{
while (true)
{
std::string word = readLine();
if (word == "comment" || word == "obj_info")
continue;
else if (word == "property")
extractProperty(element);
else
{
pushLine();
break;
}
}
}
bool FloatPlyReader::extractElement()
{
std::string word = readLine();
if (word == "comment" || word == "obj_info")
return true;
else if (word == "end_header")
{
pushLine();
return false;
}
else if (word == "element")
{
std::string name = nextWord();
if (name.empty())
throwError("Missing element name.");
long count = std::stol(nextWord());
if (count < 0)
throwError("Invalid count for element '" + name + "'.");
m_elements.emplace_back(name, count);
extractProperties(m_elements.back());
return true;
}
throwError("Invalid keyword '" + word + "' when expecting an element.");
return false; // quiet compiler
}
void FloatPlyReader::extractHeader()
{
m_elements.clear();
extractMagic();
extractFormat();
while (extractElement())
;
extractEnd();
m_dataPos = m_stream->tellg();
for (Element& elt : m_elements)
if (elt.m_name == "vertex")
m_vertexElt = &elt;
if (!m_vertexElt)
throwError("Can't read PLY file without a 'vertex' element.");
}
std::string FloatPlyReader::getName() const
{
return "FloatPlyReader";
}
void FloatPlyReader::initialize()
{
m_stream = Utils::openFile(m_filename, true);
if (!m_stream)
throwError("Couldn't open '" + m_filename + "'.");
extractHeader();
Utils::closeFile(m_stream);
m_stream = nullptr;
}
void FloatPlyReader::addDimensions(PointLayoutPtr layout)
{
// Override XYZ
// layout->registerDim(Dimension::Id::X);
// layout->registerDim(Dimension::Id::Y);
// layout->registerDim(Dimension::Id::Z);
for (auto& elt : m_elements)
{
if (elt.m_name == "vertex")
{
for (auto& prop : elt.m_properties)
{
auto vprop = static_cast<SimpleProperty *>(prop.get());
layout->registerOrAssignDim(vprop->m_name, vprop->m_type);
vprop->setDim(
layout->registerOrAssignDim(vprop->m_name, vprop->m_type));
}
return;
}
}
throwError("No 'vertex' element in header.");
}
bool FloatPlyReader::readProperty(Property *prop, PointRef& point)
{
if (!m_stream->good())
return false;
prop->read(m_stream, m_format, point);
return true;
}
void FloatPlyReader::SimpleProperty::read(std::istream *stream,
FloatPlyReader::Format format, PointRef& point)
{
if (format == Format::Ascii)
{
double d;
*stream >> d;
point.setField(m_dim, d);
}
else if (format == Format::BinaryLe)
{
ILeStream in(stream);
Everything e = Utils::extractDim(in, m_type);
point.setField(m_dim, m_type, &e);
}
else if (format == Format::BinaryBe)
{
IBeStream in(stream);
Everything e = Utils::extractDim(in, m_type);
point.setField(m_dim, m_type, &e);
}
}
// Right now we don't support list properties for point data. We just
// read the data and throw it away.
void FloatPlyReader::ListProperty::read(std::istream *stream,
FloatPlyReader::Format format, PointRef& point)
{
if (format == Format::Ascii)
{
size_t cnt;
*stream >> cnt;
double d;
while (cnt--)
*stream >> d;
}
else if (format == Format::BinaryLe)
{
ILeStream istream(stream);
Everything e = Utils::extractDim(istream, m_countType);
size_t cnt = (size_t)Utils::toDouble(e, m_countType);
cnt *= Dimension::size(m_listType);
istream.seek(cnt, std::ios_base::cur);
}
else if (format == Format::BinaryBe)
{
IBeStream istream(stream);
Everything e = Utils::extractDim(istream, m_countType);
size_t cnt = (size_t)Utils::toDouble(e, m_countType);
cnt *= Dimension::size(m_listType);
istream.seek(cnt, std::ios_base::cur);
}
}
void FloatPlyReader::readElement(Element& elt, PointRef& point)
{
for (auto& prop : elt.m_properties)
if (!readProperty(prop.get(), point))
throwError("Error reading data for point/element " +
std::to_string(point.pointId()) + ".");
}
void FloatPlyReader::ready(PointTableRef table)
{
m_stream = Utils::openFile(m_filename, true);
if (m_stream)
m_stream->seekg(m_dataPos);
for (Element& elt : m_elements)
{
if (&elt == m_vertexElt)
break;
// We read an element into point 0. Since the element's properties
// weren't registered as dimensions, we'll try to write the data
// to a NULL dimension, which is a noop.
// This essentially just gets us to the vertex element.
// In binary mode, this is all silliness, since we should be able
// to seek right where we want to go, but in text mode, you've got
// to go through the data.
PointRef point(table, 0);
for (PointId idx = 0; idx < elt.m_count; ++idx)
readElement(elt, point);
}
m_index = 0;
}
bool FloatPlyReader::processOne(PointRef& point)
{
if (m_index < m_vertexElt->m_count)
{
readElement(*m_vertexElt, point);
m_index++;
return true;
}
return false;
}
// We're just reading the vertex element here.
point_count_t FloatPlyReader::read(PointViewPtr view, point_count_t num)
{
point_count_t cnt(0);
PointRef point(view->point(0));
for (PointId idx = 0; idx < m_vertexElt->m_count && idx < num; ++idx)
{
point.setPointId(idx);
processOne(point);
cnt++;
}
return cnt;
}
void FloatPlyReader::done(PointTableRef table)
{
Utils::closeFile(m_stream);
}
} // namespace pdal

Wyświetl plik

@ -0,0 +1,156 @@
/******************************************************************************
* Copyright (c) 2015, Peter J. Gadomski <pete.gadomski@gmail.com>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
#pragma once
#include <pdal/io/PlyReader.hpp>
#include <pdal/PointTable.hpp>
#include <pdal/PointView.hpp>
#include <pdal/Options.hpp>
#include <pdal/Filter.hpp>
#include <stack>
#include <pdal/Dimension.hpp>
#include <pdal/Reader.hpp>
#include <pdal/StageFactory.hpp>
namespace pdal
{
class PDAL_DLL FloatPlyReader : public Reader
{
public:
std::string getName() const;
typedef std::map<std::string, Dimension::Id> DimensionMap;
FloatPlyReader();
private:
enum class Format
{
Ascii,
BinaryLe,
BinaryBe
};
struct Property
{
Property(const std::string& name) : m_name(name)
{}
virtual ~Property()
{}
std::string m_name;
virtual void setDim(Dimension::Id id)
{}
virtual void read(std::istream *stream, FloatPlyReader::Format format,
PointRef& point) = 0;
};
struct SimpleProperty : public Property
{
SimpleProperty(const std::string& name, Dimension::Type type) :
Property(name), m_type(type), m_dim(Dimension::Id::Unknown)
{}
Dimension::Type m_type;
Dimension::Id m_dim;
virtual void read(std::istream *stream, FloatPlyReader::Format format,
PointRef& point) override;
virtual void setDim(Dimension::Id id) override
{ m_dim = id; }
};
struct ListProperty : public Property
{
ListProperty(const std::string& name, Dimension::Type countType,
Dimension::Type listType) : Property(name), m_countType(countType),
m_listType(listType)
{}
Dimension::Type m_countType;
Dimension::Type m_listType;
virtual void read(std::istream *stream, FloatPlyReader::Format format,
PointRef& point) override;
};
struct Element
{
Element(const std::string name, size_t count) :
m_name(name), m_count(count)
{}
std::string m_name;
size_t m_count;
std::vector<std::unique_ptr<Property>> m_properties;
};
Format m_format;
std::string m_line;
std::string::size_type m_linePos;
std::stack<std::string> m_lines;
std::istream *m_stream;
std::istream::streampos m_dataPos;
std::vector<Element> m_elements;
PointId m_index;
Element *m_vertexElt;
virtual void initialize();
virtual void addDimensions(PointLayoutPtr layout);
virtual void ready(PointTableRef table);
virtual point_count_t read(PointViewPtr view, point_count_t num);
virtual void done(PointTableRef table);
virtual bool processOne(PointRef& point);
std::string readLine();
void pushLine();
std::string nextWord();
void extractMagic();
void extractEnd();
void extractFormat();
Dimension::Type getType(const std::string& name);
void extractProperty(Element& element);
void extractProperties(Element& element);
bool extractElement();
void extractHeader();
void readElement(Element& elt, PointRef& point);
bool readProperty(Property *prop, PointRef& point);
};
} // namespace pdal

Wyświetl plik

@ -0,0 +1,33 @@
#include <cstdio>
#include <cstdarg>
#include "CmdLineParser.h"
struct Logger{
bool verbose;
const char* outputFile;
Logger(){
this->verbose = false;
this->outputFile = NULL;
}
void operator() ( const char* format , ... )
{
if( outputFile )
{
FILE* fp = fopen( outputFile , "a" );
va_list args;
va_start( args , format );
vfprintf( fp , format , args );
fclose( fp );
va_end( args );
}
if( verbose )
{
va_list args;
va_start( args , format );
vprintf( format , args );
va_end( args );
}
}
};

Wyświetl plik

@ -0,0 +1,276 @@
/******************************************************************************
* Copyright (c) 2015, Peter J. Gadomski <pete.gadomski@gmail.com>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
// Modified to output certain property names in normalized format ("nx", "ny", ... instead of "normalx", "normaly", etc.)
#include "ModifiedPlyWriter.hpp"
#include <limits>
#include <sstream>
#include <pdal/util/OStream.hpp>
#include <pdal/util/ProgramArgs.hpp>
namespace pdal
{
std::string ModifiedPlyWriter::getName() const { return "ModifiedPlyWriter"; }
ModifiedPlyWriter::ModifiedPlyWriter()
{}
void ModifiedPlyWriter::addArgs(ProgramArgs& args)
{
args.add("filename", "Output filename", m_filename).setPositional();
args.add("storage_mode", "PLY Storage Mode", m_format, Format::Ascii);
args.add("dims", "Dimension names", m_dimNames);
args.add("faces", "Write faces", m_faces);
m_precisionArg = &args.add("precision", "Output precision", m_precision, 3);
}
void ModifiedPlyWriter::prepared(PointTableRef table)
{
if (m_precisionArg->set() && m_format != Format::Ascii)
throwError("Option 'precision' can only be set of the 'storage_mode' "
"is ascii.");
if (m_dimNames.size())
{
for (auto& name : m_dimNames)
{
auto id = table.layout()->findDim(name);
if (id == Dimension::Id::Unknown)
throwError("Unknown dimension '" + name + "' in provided "
"dimension list.");
m_dims.push_back(id);
}
}
else
{
m_dims = table.layout()->dims();
for (auto dim : m_dims)
m_dimNames.push_back(Utils::tolower(table.layout()->dimName(dim)));
}
}
std::string ModifiedPlyWriter::getType(Dimension::Type type) const
{
static std::map<Dimension::Type, std::string> types =
{
{ Dimension::Type::Signed8, "char" },
{ Dimension::Type::Unsigned8, "uchar" },
{ Dimension::Type::Signed16, "short" },
{ Dimension::Type::Unsigned16, "ushort" },
{ Dimension::Type::Signed32, "int" },
{ Dimension::Type::Unsigned32, "uint" },
{ Dimension::Type::Float, "float" },
{ Dimension::Type::Double, "double" }
};
try
{
return types.at(type);
}
catch (std::out_of_range&)
{
throwError("Can't write dimension of type '" +
Dimension::interpretationName(type) + "'.");
}
return "";
}
void ModifiedPlyWriter::writeHeader(PointLayoutPtr layout) const
{
*m_stream << "ply" << std::endl;
*m_stream << "format " << m_format << " 1.0" << std::endl;
*m_stream << "comment Generated by odm_filterpoints" << std::endl;
*m_stream << "element vertex " << pointCount() << std::endl;
auto ni = m_dimNames.begin();
for (auto dim : m_dims)
{
std::string name = *ni++;
std::string typeString = getType(layout->dimType(dim));
// Normalize certain property names
if (name == "normalx" || name == "normal_x") name = "nx";
if (name == "normaly" || name == "normal_y") name = "ny";
if (name == "normalz" || name == "normal_z") name = "nz";
if (name == "diffuse_red") name = "red";
if (name == "diffuse_green") name = "green";
if (name == "diffuse_blue") name = "blue";
*m_stream << "property " << typeString << " " << name << std::endl;
}
if (m_faces)
{
*m_stream << "element face " << faceCount() << std::endl;
*m_stream << "property list uchar uint vertex_indices" << std::endl;
}
*m_stream << "end_header" << std::endl;
}
void ModifiedPlyWriter::ready(PointTableRef table)
{
if (pointCount() > (std::numeric_limits<uint32_t>::max)())
throwError("Can't write PLY file. Only " +
std::to_string((std::numeric_limits<uint32_t>::max)()) +
" points supported.");
m_stream = Utils::createFile(m_filename, true);
if (m_format == Format::Ascii && m_precisionArg->set())
{
*m_stream << std::fixed;
m_stream->precision(m_precision);
}
writeHeader(table.layout());
}
void ModifiedPlyWriter::write(const PointViewPtr data)
{
m_views.push_back(data);
}
void ModifiedPlyWriter::writeValue(PointRef& point, Dimension::Id dim,
Dimension::Type type)
{
if (m_format == Format::Ascii)
{
double d = point.getFieldAs<double>(dim);
*m_stream << d;
}
else if (m_format == Format::BinaryLe)
{
OLeStream out(m_stream);
Everything e;
point.getField((char *)&e, dim, type);
Utils::insertDim(out, type, e);
}
else if (m_format == Format::BinaryBe)
{
OBeStream out(m_stream);
Everything e;
point.getField((char *)&e, dim, type);
Utils::insertDim(out, type, e);
}
}
void ModifiedPlyWriter::writePoint(PointRef& point, PointLayoutPtr layout)
{
for (auto it = m_dims.begin(); it != m_dims.end();)
{
Dimension::Id dim = *it;
writeValue(point, dim, layout->dimType(dim));
++it;
if (m_format == Format::Ascii && it != m_dims.end())
*m_stream << " ";
}
if (m_format == Format::Ascii)
*m_stream << std::endl;
}
void ModifiedPlyWriter::writeTriangle(const Triangle& t, size_t offset)
{
if (m_format == Format::Ascii)
{
*m_stream << "3 " << (t.m_a + offset) << " " <<
(t.m_b + offset) << " " << (t.m_c + offset) << std::endl;
}
else if (m_format == Format::BinaryLe)
{
OLeStream out(m_stream);
unsigned char count = 3;
uint32_t a = (uint32_t)(t.m_a + offset);
uint32_t b = (uint32_t)(t.m_b + offset);
uint32_t c = (uint32_t)(t.m_c + offset);
out << count << a << b << c;
}
else if (m_format == Format::BinaryBe)
{
OBeStream out(m_stream);
unsigned char count = 3;
uint32_t a = (uint32_t)(t.m_a + offset);
uint32_t b = (uint32_t)(t.m_b + offset);
uint32_t c = (uint32_t)(t.m_c + offset);
out << count << a << b << c;
}
}
// Deferring write until this time allows both points and faces from multiple
// point views to be written.
void ModifiedPlyWriter::done(PointTableRef table)
{
for (auto& v : m_views)
{
PointRef point(*v, 0);
for (PointId idx = 0; idx < v->size(); ++idx)
{
point.setPointId(idx);
writePoint(point, table.layout());
}
}
if (m_faces)
{
PointId offset = 0;
for (auto& v : m_views)
{
TriangularMesh *mesh = v->mesh();
if (mesh)
{
for (size_t id = 0; id < mesh->size(); ++id)
{
const Triangle& t = (*mesh)[id];
writeTriangle(t, offset);
}
}
offset += v->size();
}
}
Utils::closeFile(m_stream);
m_stream = nullptr;
getMetadata().addList("filename", m_filename);
}
} // namespace pdal

Wyświetl plik

@ -0,0 +1,116 @@
/******************************************************************************
* Copyright (c) 2015, Peter J. Gadomski <pete.gadomski@gmail.com>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
#include <pdal/PointView.hpp>
#include <pdal/Writer.hpp>
namespace pdal
{
class Triangle;
class PDAL_DLL ModifiedPlyWriter : public Writer
{
public:
enum class Format
{
Ascii,
BinaryLe,
BinaryBe
};
std::string getName() const;
ModifiedPlyWriter();
private:
virtual void addArgs(ProgramArgs& args);
virtual void prepared(PointTableRef table);
virtual void ready(PointTableRef table);
virtual void write(const PointViewPtr data);
virtual void done(PointTableRef table);
std::string getType(Dimension::Type type) const;
void writeHeader(PointLayoutPtr layout) const;
void writeValue(PointRef& point, Dimension::Id dim, Dimension::Type type);
void writePoint(PointRef& point, PointLayoutPtr layout);
void writeTriangle(const Triangle& t, size_t offset);
std::ostream *m_stream;
std::string m_filename;
Format m_format;
bool m_faces;
StringList m_dimNames;
Dimension::IdList m_dims;
int m_precision;
Arg *m_precisionArg;
std::vector<PointViewPtr> m_views;
};
inline std::istream& operator>>(std::istream& in, ModifiedPlyWriter::Format& f)
{
std::string s;
std::getline(in, s);
Utils::trim(s);
Utils::tolower(s);
if (s == "ascii" || s == "default")
f = ModifiedPlyWriter::Format::Ascii;
else if (s == "little endian" || s == "binary_little_endian")
f = ModifiedPlyWriter::Format::BinaryLe;
else if (s == "big endian" || s == "binary_big_endian")
f = ModifiedPlyWriter::Format::BinaryBe;
else
in.setstate(std::ios_base::failbit);
return in;
}
inline std::ostream& operator<<(std::ostream& out, const ModifiedPlyWriter::Format& f)
{
switch (f)
{
case ModifiedPlyWriter::Format::Ascii:
out << "ascii";
break;
case ModifiedPlyWriter::Format::BinaryLe:
out << "binary_little_endian";
break;
case ModifiedPlyWriter::Format::BinaryBe:
out << "binary_big_endian";
break;
}
return out;
}
}

Wyświetl plik

@ -0,0 +1,95 @@
#include <iostream>
#include <pdal/filters/OutlierFilter.hpp>
#include <pdal/filters/RangeFilter.hpp>
#include "CmdLineParser.h"
#include "Logger.h"
#include "FloatPlyReader.hpp"
#include "ModifiedPlyWriter.hpp"
Logger logWriter;
cmdLineParameter< char* >
InputFile( "inputFile" ) ,
OutputFile( "outputFile" );
cmdLineParameter< float >
StandardDeviation( "sd" ) ,
MeanK ( "meank" );
cmdLineReadable
Verbose( "verbose" );
cmdLineReadable* params[] = {
&InputFile , &OutputFile , &StandardDeviation, &MeanK, &Verbose ,
NULL
};
void help(char *ex){
std::cout << "Usage: " << ex << std::endl
<< "\t -" << InputFile.name << " <input PLY point cloud>" << std::endl
<< "\t -" << OutputFile.name << " <output PLY point cloud>" << std::endl
<< "\t [-" << StandardDeviation.name << " <standard deviation threshold>]" << std::endl
<< "\t [-" << MeanK.name << " <mean number of neighbors >]" << std::endl
<< "\t [-" << Verbose.name << "]" << std::endl;
exit(EXIT_FAILURE);
}
void logArgs(cmdLineReadable* params[], Logger& logWriter){
logWriter("Running with parameters:\n");
char str[1024];
for( int i=0 ; params[i] ; i++ ){
if( params[i]->set ){
params[i]->writeValue( str );
if( strlen( str ) ) logWriter( "\t--%s %s\n" , params[i]->name , str );
else logWriter( "\t--%s\n" , params[i]->name );
}
}
}
int main(int argc, char **argv) {
cmdLineParse( argc-1 , &argv[1] , params );
if( !InputFile.set || !OutputFile.set ) help(argv[0]);
if( !StandardDeviation.set ) StandardDeviation.value = 2.0;
if( !MeanK.set ) MeanK.value = 8;
logWriter.verbose = Verbose.set;
logArgs(params, logWriter);
logWriter("Filtering point cloud...\n");
pdal::Options inPlyOpts;
inPlyOpts.add("filename", InputFile.value);
pdal::PointTable table;
pdal::FloatPlyReader plyReader;
plyReader.setOptions(inPlyOpts);
pdal::Options outlierOpts;
outlierOpts.add("method", "statistical");
outlierOpts.add("mean_k", MeanK.value);
outlierOpts.add("multiplier", StandardDeviation.value);
pdal::OutlierFilter outlierFilter;
outlierFilter.setInput(plyReader);
outlierFilter.setOptions(outlierOpts);
pdal::Options rangeOpts;
rangeOpts.add("limits", "Classification![7:7]"); // Remove outliers
pdal::RangeFilter rangeFilter;
rangeFilter.setInput(outlierFilter);
rangeFilter.setOptions(rangeOpts);
pdal::Options outPlyOpts;
outPlyOpts.add("storage_mode", "little endian");
outPlyOpts.add("filename", OutputFile.value);
pdal::ModifiedPlyWriter plyWriter;
plyWriter.setOptions(outPlyOpts);
plyWriter.setInput(rangeFilter);
plyWriter.prepare(table);
plyWriter.execute(table);
logWriter("Done!\n");
}

Wyświetl plik

@ -244,6 +244,7 @@ Georef::Georef() : log_(false)
transformFilename_ = "";
exportCoordinateFile_ = false;
exportGeorefSystem_ = false;
useTransform_ = false;
}
Georef::~Georef()
@ -972,7 +973,7 @@ void Georef::createGeoreferencedModelFromGCPData()
void Georef::createGeoreferencedModelFromExifData()
{
readCameras();
// Read coords from coord file generated by extract_utm tool
std::ifstream coordStream(inputCoordFilename_.c_str());
if (!coordStream.good())
@ -1330,8 +1331,13 @@ void Georef::performFinalTransform(Mat4 &transMat, pcl::TextureMesh &mesh, pcl::
double transX = static_cast<double>(transMat.r1c4_);
double transY = static_cast<double>(transMat.r2c4_);
transform(0, 3) = static_cast<double>(0.0f);
transform(1, 3) = static_cast<double>(0.0f);
if (addUTM){
transform(0, 3) = transX;
transform(1, 3) = transY;
}else{
transform(0, 3) = 0.0f;
transform(1, 3) = 0.0f;
}
transform(2, 3) = static_cast<double>(transMat.r3c4_);
transform(3, 3) = static_cast<double>(transMat.r4c4_);
@ -1367,20 +1373,17 @@ void Georef::performFinalTransform(Mat4 &transMat, pcl::TextureMesh &mesh, pcl::
log_ << "Successfully saved model.\n";
}
transform(0, 3) = transX;
transform(1, 3) = transY;
// GCPs and EXIF modes includes a translation
// but not UTM offsets. We want our point cloud
// and odm_georeferencing_model_geo.txt file
// to include the UTM offset.
// OpenSfM already has UTM offsets
if (addUTM){
georefSystem_.eastingOffset_ += transX;
georefSystem_.northingOffset_ += transY;
transform(0, 3) = georefSystem_.eastingOffset_;
transform(1, 3) = georefSystem_.northingOffset_;
transform(0, 3) = georefSystem_.eastingOffset_ + transX;
transform(1, 3) = georefSystem_.northingOffset_ + transY;
}else{
transform(0, 3) = transX;
transform(1, 3) = transY;
}
printFinalTransform(transform);

Wyświetl plik

@ -53,12 +53,6 @@ def config():
help='resizes images by the largest side for opensfm. '
'Set to -1 to disable. Default: %(default)s')
parser.add_argument('--start-with', '-s',
metavar='<string>',
default='resize',
choices=processopts,
help=('Can be one of: ' + ' | '.join(processopts)))
parser.add_argument('--end-with', '-e',
metavar='<string>',
default='odm_orthophoto',
@ -91,21 +85,10 @@ def config():
metavar='<string>',
help='Path to config file for orb-slam')
parser.add_argument('--force-focal',
metavar='<positive float>',
type=float,
help=('Override the focal length information for the '
'images'))
parser.add_argument('--proj',
metavar='<PROJ4 string>',
help='Projection used to transform the model into geographic coordinates')
parser.add_argument('--force-ccd',
metavar='<positive float>',
type=float,
help='Override the ccd width information for the images')
parser.add_argument('--min-num-features',
metavar='<integer>',
default=8000,
@ -272,13 +255,10 @@ def config():
'Default: %(default)s'))
parser.add_argument('--pc-classify',
metavar='<string>',
default='none',
choices=['none', 'smrf', 'pmf'],
help='Classify the .LAS point cloud output using either '
'a Simple Morphological Filter or a Progressive Morphological Filter. '
'If --dtm is set this parameter defaults to smrf. '
'You can control the behavior of both smrf and pmf by tweaking the --dem-* parameters. '
action='store_true',
default=False,
help='Classify the point cloud outputs using a Simple Morphological Filter. '
'You can control the behavior of this option by tweaking the --dem-* parameters. '
'Default: '
'%(default)s')
@ -286,6 +266,19 @@ def config():
action='store_true',
default=False,
help='Export the georeferenced point cloud in CSV format. Default: %(default)s')
parser.add_argument('--pc-las',
action='store_true',
default=False,
help='Export the georeferenced point cloud in LAS format. Default: %(default)s')
parser.add_argument('--pc-filter',
metavar='<positive float>',
type=float,
default=2.5,
help='Filters the point cloud by removing points that deviate more than N standard deviations from the local mean. Set to 0 to disable filtering.'
'\nDefault: '
'%(default)s')
parser.add_argument('--texturing-data-term',
metavar='<string>',
@ -393,39 +386,6 @@ def config():
help='DSM/DTM resolution in cm / pixel.'
'\nDefault: %(default)s')
parser.add_argument('--dem-maxangle',
metavar='<positive float>',
type=float,
default=20,
help='Points that are more than maxangle degrees off-nadir are discarded. '
'\nDefault: '
'%(default)s')
parser.add_argument('--dem-maxsd',
metavar='<positive float>',
type=float,
default=2.5,
help='Points that deviate more than maxsd standard deviations from the local mean '
'are discarded. \nDefault: '
'%(default)s')
parser.add_argument('--dem-initial-distance',
metavar='<positive float>',
type=float,
default=0.15,
help='Used to classify ground vs non-ground points. Set this value to account for Z noise in meters. '
'If you have an uncertainty of around 15 cm, set this value large enough to not exclude these points. '
'Too small of a value will exclude valid ground points, while too large of a value will misclassify non-ground points for ground ones. '
'\nDefault: '
'%(default)s')
parser.add_argument('--dem-approximate',
action='store_true',
default=False,
help='Use this tag use the approximate progressive '
'morphological filter, which computes DEMs faster '
'but is not as accurate.')
parser.add_argument('--dem-decimation',
metavar='<positive integer>',
default=1,
@ -434,17 +394,6 @@ def config():
'100 decimates ~99%% of the points. Useful for speeding up '
'generation.\nDefault=%(default)s')
parser.add_argument('--dem-terrain-type',
metavar='<string>',
choices=['FlatNonForest', 'FlatForest', 'ComplexNonForest', 'ComplexForest'],
default='ComplexForest',
help='One of: %(choices)s. Specifies the type of terrain. This mainly helps reduce processing time. '
'\nFlatNonForest: Relatively flat region with little to no vegetation'
'\nFlatForest: Relatively flat region that is forested'
'\nComplexNonForest: Varied terrain with little to no vegetation'
'\nComplexForest: Varied terrain that is forested'
'\nDefault=%(default)s')
parser.add_argument('--orthophoto-resolution',
metavar='<float > 0.0>',
default=5,
@ -452,14 +401,6 @@ def config():
help=('Orthophoto resolution in cm / pixel.\n'
'Default: %(default)s'))
parser.add_argument('--orthophoto-target-srs',
metavar="<EPSG:XXXX>",
type=str,
default=None,
help='Target spatial reference for orthophoto creation. '
'Not implemented yet.\n'
'Default: %(default)s')
parser.add_argument('--orthophoto-no-tiled',
action='store_true',
default=False,
@ -490,11 +431,6 @@ def config():
default=False,
help='Build orthophoto overviews using gdaladdo.')
parser.add_argument('--zip-results',
action='store_true',
default=False,
help='compress the results using gunzip')
parser.add_argument('--verbose', '-v',
action='store_true',
default=False,
@ -526,12 +462,12 @@ def config():
log.ODM_INFO('Fast orthophoto is turned on, automatically setting --skip-3dmodel')
args.skip_3dmodel = True
if args.dtm and args.pc_classify == 'none':
if args.dtm and not args.pc_classify:
log.ODM_INFO("DTM is turned on, automatically turning on point cloud classification")
args.pc_classify = "smrf"
args.pc_classify = True
if args.skip_3dmodel and args.use_3dmesh:
log.ODM_WARNING('--skip-3dmodel is set, but so is --use-3dmesh. You can\'t have both!')
sys.exit(1)
log.ODM_WARNING('--skip-3dmodel is set, but so is --use-3dmesh. --use_3dmesh will be ignored.')
args.use_3dmesh = False
return args

Wyświetl plik

@ -18,7 +18,6 @@ sys.path.append(pyopencv_path)
# define opensfm path
opensfm_path = os.path.join(superbuild_path, "src/opensfm")
ccd_widths_path = os.path.join(opensfm_path, 'opensfm/data/sensor_data.json')
# define orb_slam2 path
orb_slam2_path = os.path.join(superbuild_path, "src/orb_slam2")

Wyświetl plik

@ -9,15 +9,11 @@ from functools import partial
from . import pdal
def classify(lasFile, smrf=False, slope=1, cellsize=3, maxWindowSize=10, maxDistance=1,
approximate=False, initialDistance=0.7, verbose=False):
def classify(lasFile, slope=0.15, cellsize=1, maxWindowSize=18, verbose=False):
start = datetime.now()
try:
if smrf:
pdal.run_pdaltranslate_smrf(lasFile, lasFile, slope, cellsize, maxWindowSize, verbose)
else:
pdal.run_pdalground(lasFile, lasFile, slope, cellsize, maxWindowSize, maxDistance, approximate=approximate, initialDistance=initialDistance, verbose=verbose)
pdal.run_pdaltranslate_smrf(lasFile, lasFile, slope, cellsize, maxWindowSize, verbose)
except:
raise Exception("Error creating classified file %s" % fout)
@ -60,7 +56,6 @@ def create_dems(filenames, demtype, radius=['0.56'], gapfill=False,
def create_dem(filenames, demtype, radius, decimation=None,
maxsd=None, maxz=None, maxangle=None, returnnum=None,
products=['idw'], outdir='', suffix='', verbose=False, resolution=0.1):
""" Create DEM from collection of LAS files """
start = datetime.now()
@ -75,10 +70,6 @@ def create_dem(filenames, demtype, radius, decimation=None,
# JSON pipeline
json = pdal.json_gdal_base(bname, products, radius, resolution)
# A DSM for meshing does not use additional filters
if demtype != 'mesh_dsm':
json = pdal.json_add_filters(json, maxsd, maxz, maxangle, returnnum)
if demtype == 'dsm':
json = pdal.json_add_classification_filter(json, 2, equality='max')
elif demtype == 'dtm':

Wyświetl plik

@ -102,75 +102,6 @@ def json_add_classification_filter(json, classification, equality="equals"):
return json
def json_add_maxsd_filter(json, meank=20, thresh=3.0):
""" Add outlier Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.outlier',
'method': 'statistical',
'mean_k': meank,
'multiplier': thresh
})
return json
def json_add_maxz_filter(json, maxz):
""" Add max elevation Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'Z[:{0}]'.format(maxz)
})
return json
def json_add_maxangle_filter(json, maxabsangle):
""" Add scan angle Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'ScanAngleRank[{0}:{1}]'.format(str(-float(maxabsangle)), maxabsangle)
})
return json
def json_add_scanedge_filter(json, value):
""" Add EdgeOfFlightLine Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'EdgeOfFlightLine[{0}:{0}]'.format(value)
})
return json
def json_add_returnnum_filter(json, value):
""" Add ReturnNum Filter element and return """
json['pipeline'].insert(0, {
'type': 'filters.range',
'limits': 'ReturnNum[{0}:{0}]'.format(value)
})
return json
def json_add_filters(json, maxsd=None, maxz=None, maxangle=None, returnnum=None):
if maxsd is not None:
json = json_add_maxsd_filter(json, thresh=maxsd)
if maxz is not None:
json = json_add_maxz_filter(json, maxz)
if maxangle is not None:
json = json_add_maxangle_filter(json, maxangle)
if returnnum is not None:
json = json_add_returnnum_filter(json, returnnum)
return json
def json_add_crop_filter(json, wkt):
""" Add cropping polygon as Filter Element and return """
json['pipeline'].insert(0, {
'type': 'filters.crop',
'polygon': wkt
})
return json
def is_ply_file(filename):
_, ext = os.path.splitext(filename)
return ext.lower() == '.ply'
@ -233,33 +164,6 @@ def run_pipeline(json, verbose=False):
os.remove(jsonfile)
def run_pdalground(fin, fout, slope, cellsize, maxWindowSize, maxDistance, approximate=False, initialDistance=0.7, verbose=False):
""" Run PDAL ground """
cmd = [
'pdal',
'ground',
'-i %s' % fin,
'-o %s' % fout,
'--slope %s' % slope,
'--cell_size %s' % cellsize,
'--initial_distance %s' % initialDistance
]
if maxWindowSize is not None:
cmd.append('--max_window_size %s' %maxWindowSize)
if maxDistance is not None:
cmd.append('--max_distance %s' %maxDistance)
if approximate:
cmd.append('--approximate')
if verbose:
cmd.append('--developer-debug')
print ' '.join(cmd)
print ' '.join(cmd)
out = system.run(' '.join(cmd))
if verbose:
print out
def run_pdaltranslate_smrf(fin, fout, slope, cellsize, maxWindowSize, verbose=False):
""" Run PDAL translate """
cmd = [

Wyświetl plik

@ -0,0 +1,440 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
"""
get_image_size.py
====================
:Name: get_image_size
:Purpose: extract image dimensions given a file path
:Author: Paulo Scardine (based on code from Emmanuel VAÏSSE)
:Created: 26/09/2013
:Copyright: (c) Paulo Scardine 2013
:Licence: MIT
"""
import collections
import json
import os
import io
import struct
FILE_UNKNOWN = "Sorry, don't know how to get size for this file."
class UnknownImageFormat(Exception):
pass
types = collections.OrderedDict()
BMP = types['BMP'] = 'BMP'
GIF = types['GIF'] = 'GIF'
ICO = types['ICO'] = 'ICO'
JPEG = types['JPEG'] = 'JPEG'
PNG = types['PNG'] = 'PNG'
TIFF = types['TIFF'] = 'TIFF'
image_fields = ['path', 'type', 'file_size', 'width', 'height']
class Image(collections.namedtuple('Image', image_fields)):
def to_str_row(self):
return ("%d\t%d\t%d\t%s\t%s" % (
self.width,
self.height,
self.file_size,
self.type,
self.path.replace('\t', '\\t'),
))
def to_str_row_verbose(self):
return ("%d\t%d\t%d\t%s\t%s\t##%s" % (
self.width,
self.height,
self.file_size,
self.type,
self.path.replace('\t', '\\t'),
self))
def to_str_json(self, indent=None):
return json.dumps(self._asdict(), indent=indent)
def get_image_size(file_path):
"""
Return (width, height) for a given img file content - no external
dependencies except the os and struct builtin modules
"""
img = get_image_metadata(file_path)
return (img.width, img.height)
def get_image_size_from_bytesio(input, size):
"""
Return (width, height) for a given img file content - no external
dependencies except the os and struct builtin modules
Args:
input (io.IOBase): io object support read & seek
size (int): size of buffer in byte
"""
img = get_image_metadata_from_bytesio(input, size)
return (img.width, img.height)
def get_image_metadata(file_path):
"""
Return an `Image` object for a given img file content - no external
dependencies except the os and struct builtin modules
Args:
file_path (str): path to an image file
Returns:
Image: (path, type, file_size, width, height)
"""
size = os.path.getsize(file_path)
# be explicit with open arguments - we need binary mode
with io.open(file_path, "rb") as input:
return get_image_metadata_from_bytesio(input, size, file_path)
def get_image_metadata_from_bytesio(input, size, file_path=None):
"""
Return an `Image` object for a given img file content - no external
dependencies except the os and struct builtin modules
Args:
input (io.IOBase): io object support read & seek
size (int): size of buffer in byte
file_path (str): path to an image file
Returns:
Image: (path, type, file_size, width, height)
"""
height = -1
width = -1
data = input.read(26)
msg = " raised while trying to decode as JPEG."
if (size >= 10) and data[:6] in (b'GIF87a', b'GIF89a'):
# GIFs
imgtype = GIF
w, h = struct.unpack("<HH", data[6:10])
width = int(w)
height = int(h)
elif ((size >= 24) and data.startswith(b'\211PNG\r\n\032\n')
and (data[12:16] == b'IHDR')):
# PNGs
imgtype = PNG
w, h = struct.unpack(">LL", data[16:24])
width = int(w)
height = int(h)
elif (size >= 16) and data.startswith(b'\211PNG\r\n\032\n'):
# older PNGs
imgtype = PNG
w, h = struct.unpack(">LL", data[8:16])
width = int(w)
height = int(h)
elif (size >= 2) and data.startswith(b'\377\330'):
# JPEG
imgtype = JPEG
input.seek(0)
input.read(2)
b = input.read(1)
try:
while (b and ord(b) != 0xDA):
while (ord(b) != 0xFF):
b = input.read(1)
while (ord(b) == 0xFF):
b = input.read(1)
if (ord(b) >= 0xC0 and ord(b) <= 0xC3):
input.read(3)
h, w = struct.unpack(">HH", input.read(4))
break
else:
input.read(
int(struct.unpack(">H", input.read(2))[0]) - 2)
b = input.read(1)
width = int(w)
height = int(h)
except struct.error:
raise UnknownImageFormat("StructError" + msg)
except ValueError:
raise UnknownImageFormat("ValueError" + msg)
except Exception as e:
raise UnknownImageFormat(e.__class__.__name__ + msg)
elif (size >= 26) and data.startswith(b'BM'):
# BMP
imgtype = 'BMP'
headersize = struct.unpack("<I", data[14:18])[0]
if headersize == 12:
w, h = struct.unpack("<HH", data[18:22])
width = int(w)
height = int(h)
elif headersize >= 40:
w, h = struct.unpack("<ii", data[18:26])
width = int(w)
# as h is negative when stored upside down
height = abs(int(h))
else:
raise UnknownImageFormat(
"Unkown DIB header size:" +
str(headersize))
elif (size >= 8) and data[:4] in (b"II\052\000", b"MM\000\052"):
# Standard TIFF, big- or little-endian
# BigTIFF and other different but TIFF-like formats are not
# supported currently
imgtype = TIFF
byteOrder = data[:2]
boChar = ">" if byteOrder == "MM" else "<"
# maps TIFF type id to size (in bytes)
# and python format char for struct
tiffTypes = {
1: (1, boChar + "B"), # BYTE
2: (1, boChar + "c"), # ASCII
3: (2, boChar + "H"), # SHORT
4: (4, boChar + "L"), # LONG
5: (8, boChar + "LL"), # RATIONAL
6: (1, boChar + "b"), # SBYTE
7: (1, boChar + "c"), # UNDEFINED
8: (2, boChar + "h"), # SSHORT
9: (4, boChar + "l"), # SLONG
10: (8, boChar + "ll"), # SRATIONAL
11: (4, boChar + "f"), # FLOAT
12: (8, boChar + "d") # DOUBLE
}
ifdOffset = struct.unpack(boChar + "L", data[4:8])[0]
try:
countSize = 2
input.seek(ifdOffset)
ec = input.read(countSize)
ifdEntryCount = struct.unpack(boChar + "H", ec)[0]
# 2 bytes: TagId + 2 bytes: type + 4 bytes: count of values + 4
# bytes: value offset
ifdEntrySize = 12
for i in range(ifdEntryCount):
entryOffset = ifdOffset + countSize + i * ifdEntrySize
input.seek(entryOffset)
tag = input.read(2)
tag = struct.unpack(boChar + "H", tag)[0]
if(tag == 256 or tag == 257):
# if type indicates that value fits into 4 bytes, value
# offset is not an offset but value itself
type = input.read(2)
type = struct.unpack(boChar + "H", type)[0]
if type not in tiffTypes:
raise UnknownImageFormat(
"Unkown TIFF field type:" +
str(type))
typeSize = tiffTypes[type][0]
typeChar = tiffTypes[type][1]
input.seek(entryOffset + 8)
value = input.read(typeSize)
value = int(struct.unpack(typeChar, value)[0])
if tag == 256:
width = value
else:
height = value
if width > -1 and height > -1:
break
except Exception as e:
raise UnknownImageFormat(str(e))
elif size >= 2:
# see http://en.wikipedia.org/wiki/ICO_(file_format)
imgtype = 'ICO'
input.seek(0)
reserved = input.read(2)
if 0 != struct.unpack("<H", reserved)[0]:
raise UnknownImageFormat(FILE_UNKNOWN)
format = input.read(2)
assert 1 == struct.unpack("<H", format)[0]
num = input.read(2)
num = struct.unpack("<H", num)[0]
if num > 1:
import warnings
warnings.warn("ICO File contains more than one image")
# http://msdn.microsoft.com/en-us/library/ms997538.aspx
w = input.read(1)
h = input.read(1)
width = ord(w)
height = ord(h)
else:
raise UnknownImageFormat(FILE_UNKNOWN)
return Image(path=file_path,
type=imgtype,
file_size=size,
width=width,
height=height)
import unittest
class Test_get_image_size(unittest.TestCase):
data = [{
'path': 'lookmanodeps.png',
'width': 251,
'height': 208,
'file_size': 22228,
'type': 'PNG'}]
def setUp(self):
pass
def test_get_image_size_from_bytesio(self):
img = self.data[0]
p = img['path']
with io.open(p, 'rb') as fp:
b = fp.read()
fp = io.BytesIO(b)
sz = len(b)
output = get_image_size_from_bytesio(fp, sz)
self.assertTrue(output)
self.assertEqual(output,
(img['width'],
img['height']))
def test_get_image_metadata_from_bytesio(self):
img = self.data[0]
p = img['path']
with io.open(p, 'rb') as fp:
b = fp.read()
fp = io.BytesIO(b)
sz = len(b)
output = get_image_metadata_from_bytesio(fp, sz)
self.assertTrue(output)
for field in image_fields:
self.assertEqual(getattr(output, field), None if field == 'path' else img[field])
def test_get_image_metadata(self):
img = self.data[0]
output = get_image_metadata(img['path'])
self.assertTrue(output)
for field in image_fields:
self.assertEqual(getattr(output, field), img[field])
def test_get_image_metadata__ENOENT_OSError(self):
with self.assertRaises(OSError):
get_image_metadata('THIS_DOES_NOT_EXIST')
def test_get_image_metadata__not_an_image_UnknownImageFormat(self):
with self.assertRaises(UnknownImageFormat):
get_image_metadata('README.rst')
def test_get_image_size(self):
img = self.data[0]
output = get_image_size(img['path'])
self.assertTrue(output)
self.assertEqual(output,
(img['width'],
img['height']))
def tearDown(self):
pass
def main(argv=None):
"""
Print image metadata fields for the given file path.
Keyword Arguments:
argv (list): commandline arguments (e.g. sys.argv[1:])
Returns:
int: zero for OK
"""
import logging
import optparse
import sys
prs = optparse.OptionParser(
usage="%prog [-v|--verbose] [--json|--json-indent] <path0> [<pathN>]",
description="Print metadata for the given image paths "
"(without image library bindings).")
prs.add_option('--json',
dest='json',
action='store_true')
prs.add_option('--json-indent',
dest='json_indent',
action='store_true')
prs.add_option('-v', '--verbose',
dest='verbose',
action='store_true',)
prs.add_option('-q', '--quiet',
dest='quiet',
action='store_true',)
prs.add_option('-t', '--test',
dest='run_tests',
action='store_true',)
argv = list(argv) if argv is not None else sys.argv[1:]
(opts, args) = prs.parse_args(args=argv)
loglevel = logging.INFO
if opts.verbose:
loglevel = logging.DEBUG
elif opts.quiet:
loglevel = logging.ERROR
logging.basicConfig(level=loglevel)
log = logging.getLogger()
log.debug('argv: %r', argv)
log.debug('opts: %r', opts)
log.debug('args: %r', args)
if opts.run_tests:
import sys
sys.argv = [sys.argv[0]] + args
import unittest
return unittest.main()
output_func = Image.to_str_row
if opts.json_indent:
import functools
output_func = functools.partial(Image.to_str_json, indent=2)
elif opts.json:
output_func = Image.to_str_json
elif opts.verbose:
output_func = Image.to_str_row_verbose
EX_OK = 0
EX_NOT_OK = 2
if len(args) < 1:
prs.print_help()
print('')
prs.error("You must specify one or more paths to image files")
errors = []
for path_arg in args:
try:
img = get_image_metadata(path_arg)
print(output_func(img))
except KeyboardInterrupt:
raise
except OSError as e:
log.error((path_arg, e))
errors.append((path_arg, e))
except Exception as e:
log.exception(e)
errors.append((path_arg, e))
pass
if len(errors):
import pprint
print("ERRORS", file=sys.stderr)
print("======", file=sys.stderr)
print(pprint.pformat(errors, indent=2), file=sys.stderr)
return EX_NOT_OK
return EX_OK
if __name__ == "__main__":
import sys
sys.exit(main(argv=sys.argv[1:]))

Wyświetl plik

@ -0,0 +1,54 @@
import os, sys
from opendm import system
from opendm import log
from opendm import context
def filter(pointCloudPath, standard_deviation=2.5, meank=16, verbose=False):
"""
Filters a point cloud in place (it will replace the input file with the filtered result).
"""
if standard_deviation <= 0 or meank <= 0:
log.ODM_INFO("Skipping point cloud filtering")
return
log.ODM_INFO("Filtering point cloud (statistical, meanK {}, standard deviation {})".format(meank, standard_deviation))
if not os.path.exists(pointCloudPath):
log.ODM_ERROR("{} does not exist, cannot filter point cloud. The program will now exit.".format(pointCloudPath))
sys.exit(1)
filter_program = os.path.join(context.odm_modules_path, 'odm_filterpoints')
if not os.path.exists(filter_program):
log.ODM_WARNING("{} program not found. Will skip filtering, but this installation should be fixed.")
return
pc_path, pc_filename = os.path.split(pointCloudPath)
# pc_path = path/to
# pc_filename = pointcloud.ply
basename, ext = os.path.splitext(pc_filename)
# basename = pointcloud
# ext = .ply
tmpPointCloud = os.path.join(pc_path, "{}.tmp{}".format(basename, ext))
filterArgs = {
'bin': filter_program,
'inputFile': pointCloudPath,
'outputFile': tmpPointCloud,
'sd': standard_deviation,
'meank': meank,
'verbose': '--verbose' if verbose else '',
}
system.run('{bin} -inputFile {inputFile} '
'-outputFile {outputFile} '
'-sd {sd} '
'-meank {meank} {verbose} '.format(**filterArgs))
# Remove input file, swap temp file
if os.path.exists(tmpPointCloud):
os.remove(pointCloudPath)
os.rename(tmpPointCloud, pointCloudPath)
else:
log.ODM_WARNING("{} not found, filtering has failed.".format(tmpPointCloud))

Wyświetl plik

@ -3,6 +3,7 @@ import exifread
import re
from fractions import Fraction
from opensfm.exif import sensor_string
from opendm import get_image_size
from pyproj import Proj
import log
@ -15,15 +16,11 @@ class ODM_Photo:
""" ODMPhoto - a class for ODMPhotos
"""
def __init__(self, path_file, force_focal, force_ccd):
def __init__(self, path_file):
# general purpose
self.filename = io.extract_file_from_path_file(path_file)
# useful attibutes
self.width = None
self.height = None
self.ccd_width = None
self.focal_length = None
self.focal_length_px = None
# other attributes
self.camera_make = ''
self.camera_model = ''
@ -32,33 +29,17 @@ class ODM_Photo:
self.longitude = None
self.altitude = None
# parse values from metadata
self.parse_exif_values(path_file, force_focal, force_ccd)
# compute focal length into pixels
self.update_focal()
self.parse_exif_values(path_file)
# print log message
log.ODM_DEBUG('Loaded {}'.format(self))
def __str__(self):
return '{} | camera: {} | dimensions: {} x {} | focal: {} | ccd: {} | lat: {} | lon: {} | alt: {}'.format(
self.filename, self.make_model, self.width, self.height, self.focal_length,
self.ccd_width, self.latitude, self.longitude, self.altitude)
return '{} | camera: {} | dimensions: {} x {} | lat: {} | lon: {} | alt: {}'.format(
self.filename, self.make_model, self.width, self.height, self.latitude, self.longitude, self.altitude)
def update_focal(self):
# compute focal length in pixels
if self.focal_length and self.ccd_width:
# take width or height as reference
if self.width > self.height:
# f(px) = w(px) * f(mm) / ccd(mm)
self.focal_length_px = \
self.width * (self.focal_length / self.ccd_width)
else:
# f(px) = h(px) * f(mm) / ccd(mm)
self.focal_length_px = \
self.height * (self.focal_length / self.ccd_width)
def parse_exif_values(self, _path_file, _force_focal, _force_ccd):
def parse_exif_values(self, _path_file):
# Disable exifread log
logging.getLogger('exifread').setLevel(logging.CRITICAL)
@ -70,8 +51,6 @@ class ODM_Photo:
self.camera_make = tags['Image Make'].values.encode('utf8')
if 'Image Model' in tags:
self.camera_model = tags['Image Model'].values.encode('utf8')
if 'EXIF FocalLength' in tags:
self.focal_length = self.float_values(tags['EXIF FocalLength'])[0]
if 'GPS GPSAltitude' in tags:
self.altitude = self.float_values(tags['GPS GPSAltitude'])[0]
if 'GPS GPSAltitudeRef' in tags and self.int_values(tags['GPS GPSAltitudeRef'])[0] > 0:
@ -87,28 +66,13 @@ class ODM_Photo:
self.make_model = sensor_string(self.camera_make, self.camera_model)
# needed to do that since sometimes metadata contains wrong data
img = cv2.imread(_path_file)
self.width = img.shape[1]
self.height = img.shape[0]
# force focal and ccd_width with user parameter
if _force_focal:
self.focal_length = _force_focal
if _force_ccd:
self.ccd_width = _force_ccd
# find ccd_width from file if needed
if self.ccd_width is None and self.camera_model is not None:
# load ccd_widths from file
ccd_widths = system.get_ccd_widths()
# search ccd by camera model
key = [x for x in ccd_widths.keys() if self.make_model in x]
# convert to float if found
if key:
self.ccd_width = float(ccd_widths[key[0]])
else:
log.ODM_WARNING('Could not find ccd_width in file. Use --force-ccd or edit the sensor_data.json '
'file to manually input ccd width')
try:
self.width, self.height = get_image_size.get_image_size(_path_file)
except get_image_size.UnknownImageFormat:
# Fallback to slower cv2
img = cv2.imread(_path_file)
self.width = img.shape[1]
self.height = img.shape[0]
def dms_to_decimal(self, dms, sign):
"""Converts dms coords to decimal degrees"""
@ -126,7 +90,7 @@ class ODM_Photo:
def int_values(self, tag):
return map(int, tag.values)
# TODO: finish this class
class ODM_Reconstruction(object):
"""docstring for ODMReconstruction"""
@ -163,7 +127,16 @@ class ODM_Reconstruction(object):
utm_pole = (ref[2][len(ref[2]) - 1]).upper()
utm_zone = int(ref[2][:len(ref[2]) - 1])
return Proj(proj="utm", zone=utm_zone, south=utm_pole == 'S', datum=datum, no_defs=True)
proj_args = {
'proj': "utm",
'zone': utm_zone,
'datum': datum,
'no_defs': True
}
if utm_pole == 'S':
proj_args['south'] = True
return Proj(**proj_args)
elif '+proj' in line:
return Proj(line.strip('\''))
elif 'epsg' in line.lower():
@ -188,15 +161,6 @@ class ODM_Reconstruction(object):
log.ODM_EXCEPTION('Could not set projection. Please use a proj4 string')
class ODM_GCPoint(object):
"""docstring for ODMPoint"""
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
class ODM_GeoRef(object):
"""docstring for ODMUtmZone"""
@ -249,24 +213,6 @@ class ODM_GeoRef(object):
self.utm_east_offset = float(offsets[0])
self.utm_north_offset = float(offsets[1])
def create_gcps(self, _file):
if not io.file_exists(_file):
log.ODM_ERROR('Could not find file %s' % _file)
return
with open(_file) as f:
# parse coordinates
lines = f.readlines()[2:]
for l in lines:
xyz = l.split(' ')
if len(xyz) == 3:
x, y, z = xyz[:3]
elif len(xyz) == 2:
x, y = xyz[:2]
z = 0
self.gcps.append(ODM_GCPoint(float(x), float(y), float(z)))
# Write to json file
def parse_transformation_matrix(self, _file):
if not io.file_exists(_file):
log.ODM_ERROR('Could not find file %s' % _file)
@ -365,6 +311,8 @@ class ODM_Tree(object):
self.odm_georeferencing, 'las.json')
self.odm_georeferencing_model_laz = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.laz')
self.odm_georeferencing_model_las = io.join_paths(
self.odm_georeferencing, 'odm_georeferenced_model.las')
self.odm_georeferencing_dem = io.join_paths(
self.odm_georeferencing, 'odm_georeferencing_model_dem.tif')

Wyświetl plik

@ -9,12 +9,6 @@ from opendm import log
from opendm import system
from shutil import copyfile
def make_odm_photo(force_focal, force_ccd, path_file):
return types.ODM_Photo(path_file,
force_focal,
force_ccd)
def save_images_database(photos, database_file):
with open(database_file, 'w') as f:
f.write(json.dumps(map(lambda p: p.__dict__, photos)))
@ -45,10 +39,6 @@ def load_images_database(database_file):
class ODMLoadDatasetCell(ecto.Cell):
def declare_params(self, params):
params.declare("force_focal", 'Override the focal length information for the '
'images', None)
params.declare("force_ccd", 'Override the ccd width information for the '
'images', None)
params.declare("verbose", 'indicate verbosity', False)
params.declare("proj", 'Geographic projection', None)
@ -106,8 +96,8 @@ class ODMLoadDatasetCell(ecto.Cell):
photos = []
with open(tree.dataset_list, 'w') as dataset_list:
for files in path_files:
photos += [make_odm_photo(self.params.force_focal, self.params.force_ccd, files)]
for f in path_files:
photos += [types.ODM_Photo(f)]
dataset_list.write(photos[-1].filename + '\n')
# Save image database for faster restart

Wyświetl plik

@ -4,6 +4,7 @@ from opendm import log
from opendm import io
from opendm import system
from opendm import context
from opendm import point_cloud
class ODMMveCell(ecto.Cell):
@ -111,6 +112,9 @@ class ODMMveCell(ecto.Cell):
old_file = mve_files[-1]
if not (io.rename_file(old_file, tree.mve_model)):
log.ODM_WARNING("File %s does not exist, cannot be renamed. " % old_file)
# Filter
point_cloud.filter(tree.smvs_model, standard_deviation=args.pc_filter, verbose=args.verbose)
else:
log.ODM_WARNING("Cannot find a valid point cloud (mve-XX.ply) in %s. Check the console output for errors." % tree.mve)
else:

Wyświetl plik

@ -36,9 +36,7 @@ class ODMApp(ecto.BlackBox):
Only cells from which something is forwarded have to be declared
"""
cells = {'args': ecto.Constant(value=p.args),
'dataset': ODMLoadDatasetCell(force_focal=p.args.force_focal,
force_ccd=p.args.force_ccd,
verbose=p.args.verbose,
'dataset': ODMLoadDatasetCell(verbose=p.args.verbose,
proj=p.args.proj),
'opensfm': ODMOpenSfMCell(use_exif_size=False,
feature_process_size=p.args.resize_to,
@ -73,7 +71,6 @@ class ODMApp(ecto.BlackBox):
'dem': ODMDEMCell(max_concurrency=p.args.max_concurrency,
verbose=p.args.verbose),
'orthophoto': ODMOrthoPhotoCell(resolution=p.args.orthophoto_resolution,
t_srs=p.args.orthophoto_target_srs,
no_tiled=p.args.orthophoto_no_tiled,
compress=p.args.orthophoto_compression,
bigtiff=p.args.orthophoto_bigtiff,

Wyświetl plik

@ -39,45 +39,33 @@ class ODMDEMCell(ecto.Cell):
(args.rerun_from is not None and
'odm_dem' in args.rerun_from)
log.ODM_INFO('Classify: ' + str(args.pc_classify != "none"))
log.ODM_INFO('Classify: ' + str(args.pc_classify))
log.ODM_INFO('Create DSM: ' + str(args.dsm))
log.ODM_INFO('Create DTM: ' + str(args.dtm))
log.ODM_INFO('DEM input file {0} found: {1}'.format(tree.odm_georeferencing_model_laz, str(las_model_found)))
# Setup terrain parameters
terrain_params_map = {
'flatnonforest': (1, 3),
'flatforest': (1, 2),
'complexnonforest': (5, 2),
'complexforest': (10, 2)
}
terrain_params = terrain_params_map[args.dem_terrain_type.lower()]
slope, cellsize = terrain_params
slope, cellsize = (0.15, 1)
# define paths and create working directories
odm_dem_root = tree.path('odm_dem')
if not io.dir_exists(odm_dem_root):
system.mkdir_p(odm_dem_root)
if args.pc_classify != "none" and las_model_found:
if args.pc_classify and las_model_found:
pc_classify_marker = os.path.join(odm_dem_root, 'pc_classify_done.txt')
if not io.file_exists(pc_classify_marker) or rerun_cell:
log.ODM_INFO("Classifying {} using {}".format(tree.odm_georeferencing_model_laz, args.pc_classify))
log.ODM_INFO("Classifying {} using Simple Morphological Filter".format(tree.odm_georeferencing_model_laz))
commands.classify(tree.odm_georeferencing_model_laz,
args.pc_classify == "smrf",
slope,
cellsize,
approximate=args.dem_approximate,
initialDistance=args.dem_initial_distance,
verbose=args.verbose
)
with open(pc_classify_marker, 'w') as f:
f.write('Classify: {}\n'.format(args.pc_classify))
f.write('Classify: smrf\n')
f.write('Slope: {}\n'.format(slope))
f.write('Cellsize: {}\n'.format(cellsize))
f.write('Approximate: {}\n'.format(args.dem_approximate))
f.write('InitialDistance: {}\n'.format(args.dem_initial_distance))
# Do we need to process anything here?
if (args.dsm or args.dtm) and las_model_found:
@ -105,8 +93,6 @@ class ODMDEMCell(ecto.Cell):
gapfill=True,
outdir=odm_dem_root,
resolution=resolution / 100.0,
maxsd=args.dem_maxsd,
maxangle=args.dem_maxangle,
decimation=args.dem_decimation,
verbose=args.verbose,
max_workers=get_max_concurrency_for_dem(args.max_concurrency,tree.odm_georeferencing_model_laz)

Wyświetl plik

@ -9,6 +9,7 @@ from opendm import types
from opendm import system
from opendm import context
from opendm.cropper import Cropper
from opendm import point_cloud
class ODMGeoreferencingCell(ecto.Cell):
@ -60,11 +61,15 @@ class ODMGeoreferencingCell(ecto.Cell):
runs = []
if not args.use_3dmesh:
runs += [{
# Make sure 2.5D mesh is georeferenced before the 3D mesh
# Because it will be used to calculate a transform
# for the point cloud. If we use the 3D model transform,
# DEMs and orthophoto might not align!
runs.insert(0, {
'georeferencing_dir': tree.odm_25dgeoreferencing,
'texturing_dir': tree.odm_25dtexturing,
'model': os.path.join(tree.odm_25dtexturing, tree.odm_textured_model_obj)
}]
})
for r in runs:
odm_georeferencing_model_obj_geo = os.path.join(r['texturing_dir'], tree.odm_georeferencing_model_obj_geo)
@ -103,7 +108,7 @@ class ODMGeoreferencingCell(ecto.Cell):
if transformPointCloud:
kwargs['pc_params'] = '-inputPointCloudFile {input_pc_file} -outputPointCloudFile {output_pc_file}'.format(**kwargs)
if geo_ref.projection and geo_ref.projection.srs:
if geo_ref and geo_ref.projection and geo_ref.projection.srs:
kwargs['pc_params'] += ' -outputPointCloudSrs %s' % pipes.quote(geo_ref.projection.srs)
else:
log.ODM_WARNING('NO SRS: The output point cloud will not have a SRS.')
@ -157,7 +162,16 @@ class ODMGeoreferencingCell(ecto.Cell):
"--writers.text.keep_unspecified=false ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_xyz_file))
# LAS point cloud output
if args.pc_las:
log.ODM_INFO("Creating geo-referenced LAS file")
system.run("pdal translate -i \"{}\" "
"-o \"{}\" ".format(
tree.odm_georeferencing_model_laz,
tree.odm_georeferencing_model_las))
if args.crop > 0:
log.ODM_INFO("Calculating cropping area and generating bounds shapefile from point cloud")
cropper = Cropper(tree.odm_georeferencing, 'odm_georeferenced_model')

Wyświetl plik

@ -13,7 +13,6 @@ from opendm.cropper import Cropper
class ODMOrthoPhotoCell(ecto.Cell):
def declare_params(self, params):
params.declare("resolution", 'Orthophoto resolution in cm / pixel', 5)
params.declare("t_srs", 'Target SRS', None)
params.declare("no_tiled", 'Do not tile tiff', False)
params.declare("compress", 'Compression type', 'DEFLATE')
params.declare("bigtiff", 'Make BigTIFF orthophoto', 'IF_SAFER')
@ -69,7 +68,8 @@ class ODMOrthoPhotoCell(ecto.Cell):
# TODO: we should move this to a more central
# location (perhaps during the dataset initialization)
if georef and not georef.utm_east_offset:
odm_georeferencing_model_txt_geo_file = os.path.join(tree.odm_georeferencing, tree.odm_georeferencing_model_txt_geo)
georeferencing_dir = tree.odm_georeferencing if args.use_3dmesh and not args.skip_3dmodel else tree.odm_25dgeoreferencing
odm_georeferencing_model_txt_geo_file = os.path.join(georeferencing_dir, tree.odm_georeferencing_model_txt_geo)
if io.file_exists(odm_georeferencing_model_txt_geo_file):
georef.extract_offsets(odm_georeferencing_model_txt_geo_file)

Wyświetl plik

@ -1,10 +1,13 @@
import ecto
import sys
import os
from opendm import log
from opendm import io
from opendm import system
from opendm import context
from opendm import gsd
from opendm import point_cloud
class ODMOpenSfMCell(ecto.Cell):
def declare_params(self, params):
@ -135,6 +138,17 @@ class ODMOpenSfMCell(ecto.Cell):
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
tree.opensfm_reconstruction)
# Check that a reconstruction file has been created
if not io.file_exists(tree.opensfm_reconstruction):
log.ODM_ERROR("The program could not process this dataset using the current settings. "
"Check that the images have enough overlap, "
"that there are enough recognizable features "
"and that the images are in focus. "
"You could also try to increase the --min-num-features parameter."
"The program will now exit.")
sys.exit(1)
# Always export VisualSFM's reconstruction and undistort images
# as we'll use these for texturing (after GSD estimation and resizing)
if not args.ignore_gsd:
@ -158,6 +172,9 @@ class ODMOpenSfMCell(ecto.Cell):
if args.fast_orthophoto:
system.run('PYTHONPATH=%s %s/bin/opensfm export_ply --no-cameras %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
# Filter
point_cloud.filter(os.path.join(tree.opensfm, 'reconstruction.ply'), standard_deviation=args.pc_filter, verbose=args.verbose)
elif args.use_opensfm_dense:
# Undistort images at full scale in JPG
# (TODO: we could compare the size of the PNGs if they are < than depthmap_resolution
@ -167,6 +184,8 @@ class ODMOpenSfMCell(ecto.Cell):
system.run('PYTHONPATH=%s %s/bin/opensfm compute_depthmaps %s' %
(context.pyopencv_path, context.opensfm_path, tree.opensfm))
# Filter
point_cloud.filter(tree.opensfm_model, standard_deviation=args.pc_filter, verbose=args.verbose)
else:
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' %
tree.opensfm_reconstruction)

Wyświetl plik

@ -5,7 +5,7 @@
# or --force-ccd n will have to be set in the command line (if you need to)
# This line is really important to set up properly
project_path: '' # Example: '/home/user/ODMProjects
project_path: '' # Example: '/home/user/ODMProjects'
# The rest of the settings will default to the values set unless you uncomment and change them
#resize_to: 2048

Wyświetl plik

@ -33,7 +33,7 @@ def setup_module():
def teardown_module():
# Delete generated test directories
dirnames = ['images_resize', 'opensfm', 'pmvs', 'odm_meshing',
dirnames = ['opensfm', 'odm_meshing',
'odm_texturing', 'odm_georeferencing', 'odm_orthophoto']
for n in dirnames:
rmpath = os.path.join(context.tests_data_path, n)
@ -41,30 +41,6 @@ def teardown_module():
shutil.rmtree(rmpath)
class TestResize(unittest.TestCase):
"""
Tests the resize function
"""
def setUp(self):
# rerun resize cell and set params
options.rerun = 'resize'
options.resize_to = 1600
# rebuild app
self.app, self.plasm = appSetup(options)
run_plasm(options, self.plasm)
def test_resize(self):
# assert each image is sized to the option.resize_to
self.assertEquals(max(self.app.resize.outputs.photos[0].height, self.app.resize.outputs.photos[0].width),
options.resize_to)
def test_all_resized(self):
# assert the number of images in images == number of images in resize
self.assertEquals(len(self.app.resize.outputs.photos), len(self.app.dataset.outputs.photos))
class TestOpenSfM(unittest.TestCase):
"""
Tests the OpenSfM module
@ -79,28 +55,6 @@ class TestOpenSfM(unittest.TestCase):
self.assertTrue(os.path.isfile(self.app.opensfm.inputs.tree.opensfm_reconstruction))
class TestCMVS(unittest.TestCase):
def setUp(self):
options.rerun = 'cmvs'
self.app, self.plasm = appSetup(options)
run_plasm(options, self.plasm)
def test_cmvs(self):
self.assertTrue(os.path.isfile(self.app.cmvs.inputs.tree.pmvs_bundle))
class TestPMVS(unittest.TestCase):
def setUp(self):
options.rerun = 'pmvs'
self.app, self.plasm = appSetup(options)
run_plasm(options, self.plasm)
def test_pmvs(self):
self.assertTrue(os.path.isfile(self.app.pmvs.inputs.tree.pmvs_model))
class TestMeshing(unittest.TestCase):
def setUp(self):