diff --git a/.gitignore b/.gitignore index 72092ce..8ddf153 100644 --- a/.gitignore +++ b/.gitignore @@ -18,8 +18,12 @@ [Rr]el[Ww]ith[Dd]eb[Ii]nfo x64/ [Bb]uild2017/ +[Bb]uild2019/ [Bb]uildvs2017/ build.vs2017 +build_ubuntu +build_vscode +build bld/ [Bb]in/ [Oo]bj/ diff --git a/Examples/docker/Dockerfile.base.ubuntu.18.04 b/Examples/docker/Dockerfile.base.ubuntu.18.04 index af35417..2a66d7a 100644 --- a/Examples/docker/Dockerfile.base.ubuntu.18.04 +++ b/Examples/docker/Dockerfile.base.ubuntu.18.04 @@ -42,6 +42,7 @@ RUN apt-get update -y && apt-get install -y \ nasm \ postgresql-client-10 \ git \ + gdb \ && apt-get -y autoremove \ && apt-get clean \ && ln -sf /usr/include/eigen3 /usr/local/include/eigen @@ -105,8 +106,13 @@ RUN cd src && mkdir libfmt-${FMT_VERSION} && tar -xzf ${FMT_VERSION}.tar.gz -C && cd $ROOTDIR && rm -Rf src/libfmt* ## SQLite +#ADD https://www.sqlite.org/2019/sqlite-autoconf-${SQLITE_VERSION}.tar.gz $ROOTDIR/src/ +#RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/include/ \ +# && cd $ROOTDIR && rm -Rf src/sqlite* + ADD https://www.sqlite.org/2019/sqlite-autoconf-${SQLITE_VERSION}.tar.gz $ROOTDIR/src/ -RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/include/ \ +RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/local/ \ + && cp /usr/local/sqlite-autoconf-${SQLITE_VERSION}/sqlite3.c /usr/include/ \ && cd $ROOTDIR && rm -Rf src/sqlite* ## gdal @@ -133,3 +139,12 @@ RUN cmake .. \ && make clean \ && cd $ROOTDIR +## Turtle +WORKDIR $ROOTDIR/src/ +RUN wget https://sourceforge.net/projects/turtle/files/turtle/1.3.1/turtle-1.3.1.tar.gz \ + && tar xzf turtle-1.3.1.tar.gz -C /usr/local/ \ + && cd $ROOTDIR && rm -Rf src/turtle* + +#ADD http://downloads.sourceforge.net/project/turtle/turtle/1.3.1/turtle-1.3.1.tar.bz2 $ROOTDIR/src/ +#RUN tar xvf turtle-1.3.0.tar.bz2 -C /usr/local/ +WORKDIR $ROOTDIR/src diff --git a/README.md b/README.md index e2b7458..5b337a0 100644 --- a/README.md +++ b/README.md @@ -26,216 +26,56 @@ The FLINT is using the lessons learned from first generation tools, to build a n ### Development Environment How-To for Windows -These instructions are for building the FLINT on Windows using Visual Studio 2017 (version 15.9.11). +These instructions are for building the FLINT on Windows using Visual Studio 2017, or Visual Studio 2019. #### Required Installs -##### Windows 8.1 SDK - -- download [Windows 8.1 SDK](http://msdn.microsoft.com/en-US/windows/desktop/bg162891) - ##### CMake -- download [cmake-3.14.3-win64-x64.msi](https://github.com/Kitware/CMake/releases/download/v3.14.3/cmake-3.14.3-win64-x64.msi) - -##### Python 3 - -- download [python-3.6.3-amd64](https://www.python.org/ftp/python/3.6.3/python-3.6.3-amd64.exe) - -##### OpenSSL Library - -- download [Win64OpenSSL-1_1_1b.exe](http://slproweb.com/download/Win64OpenSSL-1_1_1b.exe) -- Install using the defaults and ignore any errors about the VS2008 redistributables. - -#### Required Libraries - -##### Boost C++ Libraries - -While the Boost libraries can be built, for the purpose of this document we will download the pre-built binaries. These can be downloaded from [Prebuilt Windows Boost C++ Libraries](https://www.boost.org/users/download/). - -Current version in use: 1.70.0 (*boost_1_70_0-msvc-14.1-64.exe*). - -##### POCO C++ Libraries - -Source code can be downloaded from [Poco project](https://pocoproject.org/download.html). Current version in use '*1.9.0 (Basic Edition)*' download [poco-1.9.0](https://pocoproject.org/releases/poco-1.9.0/poco-1.9.0.zip): - -The **Basic Edition** ([poco-1.9.0.tar.gz](https://pocoproject.org/releases/poco-1.9.0/poco-1.9.0.tar.gz) or [poco-1.9.0.zip](https://pocoproject.org/releases/poco-1.9.0/poco-1.9.0.zip)) only contains the Foundation, JSON, XML, Util and Net libraries, but does not require any external dependencies. - -###### Build instructions - -- Extract the archive into `C:\Development\poco-1.9.0` -- Edit the file “components” and delete the lines: - -``` - CppUnit - CppUnit/WinTestRunner - Data/MySQL - NetSSL/OpenSSL - Crypto - PageCompiler - PageCompiler/File2PageA -``` - -- **Visual Studio 2017:** Open up a command prompt where the archive was extracted and type: - -``` -buildwin 150 build all both x64 -``` - -##### Turtle Library - -This is a C++ mock object library for Boost ([Turtle](https://github.com/mat007/turtle)), used in the test projects. Download at [turtle-1.3.1](https://github.com/mat007/turtle/releases/download/v1.3.1/turtle-1.3.1.zip) - -- Extract the archive into `C:\Development\turtle-1.3.1` - -##### SQLite Library - -[download sqlite-amalgamation-3260000](https://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip) - -- Extract the archive into `C:\Development\sqlite-amalgamation-3260000` - -#### Non-Essential Libraries - -Various FLINT Providers and Modules have been separated into sub-projects. These projects can be excluded/included in builds by setting ***ENABLE_XXX*** flags in cmake (i.e. ***ENABLE_MOJA.MODULES.LIBPQ*** & ***ENABLE_MOJA.MODULES.ZIPPER***). - -By disabling certain of these projects some external libraries will not be required. Making the build process less onerous. - -##### PostgreSQL - -CMake Flag: **ENABLE_MOJA.MODULES.LIBPQ** - -Currently using version 9.5.6-2 (found at http://www.postgresql.org/) - -- [download postgresql-9.5.6-2-windows-x64.exe](http://get.enterprisedb.com/postgresql/postgresql-9.5.6-2-windows-x64.exe) -- This includes LibPQ library which is required. I had some issues with this not being found in the PATH (or the wrong version being found in QGis first). Moved the PostgreSQL version in the environment variable to before the GDAL version. - -##### Zlib - -CMake Flag: **ENABLE_MOJA.MODULES.ZIPPER** - -[download zlib-1.2.11](http://www.zlib.net/zlib1211.zip) - -- Extract the archive into `C:\Development\zlib-1.2.11` -- Run cmake-gui and set options: - - source code: `C:\Development\zlib-1.2.11` - - binaries: `C:\Development\zlib-1.2.11\build` - - CMAKE_INSTALL_PREFIX: `C:\Development\zlib-1.2.11\dist` - +Edit all INSTALL paths (INSTALL_BIN_DIR, etc.) to point to location for CMAKE_INSTALL_PREFIX -- Configure / Generate -- Build Release, Release-Install, Debug, and Debug-Install -- if the global.moja cmake zlib find package doesn't have your install path, set system environment vars: - - `ZLIB_SOURCE=C:\Development\zlib-1.2.11` - - `ZLIBROOT=%ZLIB_SOURCE%\dist` +- download [cmake-3.15.2-win64-x64.msi](https://github.com/Kitware/CMake/releases/download/v3.15.2/cmake-3.15.2-win64-x64.msi) -##### Zipper +#### Using vcpkg to install required libraries -**NEED TO MAKE THIS A MOJA VERSION** +A fork of a *Vcpkg* repository has been created for the FLINT required libraries. To build these libraries you can use the following process: -[github for Zipper for in SLEEK-TOOLS](https://github.com/SLEEK-TOOLS/zipper) ++ Clone the Vcpkg repository: https://github.com/moja-global/vcpkg -[git clone link for repository](https://github.com/SLEEK-TOOLS/zipper.git) ++ Start a command shell in the Vcpkg repository folder and use the following commands: -- Install Zlib as described in this document -- Clone the Zipper repository into `C:\Development\zipper` -- make sure to do a recursive clone (--recursive) -- Run cmake-gui and set options: - - source code: `C:\Development\zipper` - - binaries: `C:\Development\zipper\build` - - CMAKE_INSTALL_PREFIX: `C:\Development\zipper\dist` -- **WARNING:** check that the correct zlib (lib & source) is found -- Configure / Generate -- Build Release, Release-Install, Debug, and Debug-Install -- if the global.moja cmake zipper find package doesn't have your install path, Set system environment varibales: - - `ZIPPER_ROOT=C:\Development\zipper` - -[Original Zipper on GitHub](https://github.com/sebastiandev/zipper) - -Changes made in the SLEEK-TOOLS fork: - -Edited `zipper\CMakeLists.txt` and insert this text at line 105: - -```cmake -IF(CMAKE_BUILD_TYPE MATCHES RELEASE) -ELSE() - IF(NOT CMAKE_DEBUG_POSTFIX) - SET(CMAKE_DEBUG_POSTFIX d) - ENDIF() -ENDIF() -``` - -#### Set Environment Variables - -To help the current build system find the required libraries we use Environment Variables. - -Add this to your system environment variables: Control Panel -> System -> Advanced system settings -> Environment Variables -> System variables - -``` -BOOST_ROOT= -``` - -##### for example: - -``` -BOOST_ROOT=C:\Development\boost\boost_1_70_0 -``` - -For FLINT to run (both in and external to the IDE) the various libraries built need to be found. The easiest way is to add the builds you have done to the Path. - -``` -BOOST_LIB_DIR= -POCO_LIB_DIR= -ZLIB_LIB_DIR -ZIPPER_LIB_DIR -``` - -for example: + ```powershell + # bootstrap + bootstrap-vcpkg.bat + + # install packages + vcpkg.exe install boost-test:x64-windows boost-program-options:x64-windows boost-log:x64-windows turtle:x64-windows zipper:x64-windows poco:x64-windows libpq:x64-windows gdal:x64-windows sqlite3:x64-windows boost-ublas:x64-windows + ``` -``` -BOOST_LIB_DIR=C:\Development\boost\boost_1_70_0\lib64-msvc-14.1 -POCO_LIB_DIR=C:\Development\poco-1.9.0\bin64 -``` ++ Once this has completed, start a command shell in you FLINT repository folder. Now use the following commands to create the Visual Studio solution: -The Environment variables can be added to the System Path. + ```powershell + # Create a build folder under the Source folder + cd Source + mkdir build + cd build + + # now create the Visual Studio Solution (2019) + cmake -G "Visual Studio 16 2019" -DCMAKE_INSTALL_PREFIX=C:/Development/Software/moja -DVCPKG_TARGET_TRIPLET=x64-windows -DENABLE_TESTS=OFF -DENABLE_MOJA.MODULES.ZIPPER=OFF -DCMAKE_TOOLCHAIN_FILE=c:\Development\moja-global\vcpkg\scripts\buildsystems\vcpkg.cmake .. + + # OR Visual Studio Solution (2017) + cmake -G "Visual Studio 15 2017" -DCMAKE_INSTALL_PREFIX=C:/Development/Software/moja -DVCPKG_TARGET_TRIPLET=x64-windows -DENABLE_TESTS=OFF -DENABLE_MOJA.MODULES.ZIPPER=OFF -DCMAKE_TOOLCHAIN_FILE=c:\Development\moja-global\vcpkg\scripts\buildsystems\vcpkg.cmake .. + ``` #### Install Moja Libraries -It is possible to use the Visual Studio moja solution to install built versions of the Moja libraries. To do this you need to set the CMAKE variable '***CMAKE_INSTALL_PREFIX***' to the install path (i.e. "*C:/Development/Software/moja/moja_develop*"). Then re-generate your Moja project file. +It is possible to use the Visual Studio moja solution to install built versions of the Moja libraries. To do this you need to set the CMAKE variable '***CMAKE_INSTALL_PREFIX***' to your install path (i.e. "*C:/Development/Software/moja*"). -Now, in the Visual Studio solution, select the project INSTALL and build the target you want to install (i.e. Release, Debug, etc.). +#### Make edits to the Visual Studio Solution using CMake -This will build all files required for other projects to use the Moja library (DLLS, LIBS and copy in required header files). - -#### Generate Visual Studio Solution Using CMake - -1. Launch the CMake GUI (current version 3.14.3) -2. Click “Browse Source…” and point to `C:\Development\moja.global\Source` -3. To do an ‘*out-of-source build*’ set the build directory to `C:\Development\moja.global\Source\Build`. This will build everything under the directory you specify, making it easier to cleanup when required. -4. Click “***Configure***” – assuming all libraries and required software has been installed you should have no errors. If there are errors the most likely cause is the cmake packages aren't setup to find in the correct paths. Our versions of these can be found in your cloned git folder for moja.global. So, `C:\Development\moja.global\Source\cmake`. Depending on which library is failing, load the required Find package and check the search paths listed. - -``` -FindLibpq.cmake -FindPoco.cmake -FindTurtle.cmake -``` +1. Launch the CMake GUI +2. In the '*Where to build the binaries*' field click “Browse Build…” and select the folder you created above (i.e. `C:\Development\moja-global\FLINT\Source\build`)`. The '*Where is the source code:*' field should update, if not, set it correctly. +4. You should be able to edit any CMake setting now (i.e. ENABLE flags like `ENABLE_TESTS`), then click “***Configure***” – assuming all libraries and required software has been installed you should have no errors. Now click ***"Generate"*** and the Solution with adjustments should be ready to load into Visual Studio. -Once you have configured you can adjust some of the values to change the project that will be generated: - -``` -BOOST_TEST_REPORTING_LEVEL =[DETAILED|SHORT] -ENABLE_TESTS =[ON|OFF] -RUN_UNIT_TESTS_ON_BUILD =[ON|OFF] - -ENABLE_MOJA.MODULES.ZIPPER =[ON|OFF] -ENABLE_MOJA.MODULES.POCO =[ON|OFF] -ENABLE_MOJA.MODULES.LIBPQ =[ON|OFF] -ENABLE_MOJA.MODULES.GDAL =[ON|OFF] -ENABLE_MOJA.CLI =[ON|OFF] -ENABLE_MOJA.SYSTEMTEST =[ON|OFF] -``` - -By turning the enable flags to `OFF` you can stop certain parts of the project being generated. - -##### Install Other Useful Tools +#### Other Useful Tools ##### SQLIte Studio @@ -252,7 +92,7 @@ Containers are a simple way to build FLINT and all required dependencies. Exampl #### Building the containers -The build has been split into 2\two Dockerfiles, the first to get and build required libraries. The second to get and build the moja FLINT libraries and CLI program. +The build has been split into two Dockerfiles, the first to get and build required libraries. The second to get and build the moja FLINT libraries and CLI program. ```bash # working from the examples folder "flint/tree/master/Examples/docker" @@ -305,7 +145,7 @@ Configuration file options: moja global welcomes a wide range of contributions as explained in [Contributing document](https://github.com/moja-global/About-moja-global/blob/master/CONTRIBUTING.md) and in the [About moja-global Wiki](https://github.com/moja-global/.github/wiki). - + ## FAQ and Other Questions * You can find FAQs on the [Wiki](https://github.com/moja.global/.github/wiki). @@ -323,7 +163,6 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
Rob de Ligt
Rob de Ligt

📖 🚧
Mal
Mal

💻
James Leitch
James Leitch

💻
Max Fellows
Max Fellows

💻
- This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome! @@ -333,7 +172,6 @@ This project follows the [all-contributors](https://github.com/all-contributors/ The following people are Maintainers of this repository
Mal
Mal

💻
James Leitch
James Leitch

💻
Max Fellows
Max Fellows

💻
- **Maintainers** review and accept proposed changes **Reviewers** check proposed changes before they go to the Maintainers **Ambassadors** are available to provide training related to this repository diff --git a/Source/.devcontainer/devcontainer.json b/Source/.devcontainer/devcontainer.json new file mode 100644 index 0000000..9af078b --- /dev/null +++ b/Source/.devcontainer/devcontainer.json @@ -0,0 +1,36 @@ +// See https://aka.ms/vscode-remote/devcontainer.json for format details. +{ + // See https://aka.ms/vscode-remote/devcontainer.json for format details. + "name": "Existing Dockerfile", + + // Sets the run context to one level up instead of the .devcontainer folder. + "context": "..", + + // Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. + "dockerFile": "../docker/Dockerfile.base.ubuntu.18.04", + + // The optional 'runArgs' property can be used to specify additional runtime arguments. + "runArgs": [ + // Uncomment the next line if you want to use Docker from the container. See the docker-in-docker definition for details. + // "-v","/var/run/docker.sock:/var/run/docker.sock", + + // Uncomment the next line if you will be using a ptrace-based debugger like C++, Go, and Rust. + "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" + ], + + // Uncomment the next line if you want to publish any ports. + // "appPort": [], + + // Uncomment the next line if you want to add in default container specific settings.json values + // "settings": { "workbench.colorTheme": "Quiet Light" }, + + // Uncomment the next line to run commands after the container is created. + // "postCreateCommand": "uname -a", + + // Add the IDs of any extensions you want installed in the array below. + "extensions": [ + "vector-of-bool.cmake-tools", + "ms-vscode.cpptools", + "austin.code-gnu-global" + ] +} \ No newline at end of file diff --git a/Source/.gitignore b/Source/.gitignore index bfa408f..ac37f90 100644 --- a/Source/.gitignore +++ b/Source/.gitignore @@ -34,6 +34,7 @@ DartConfiguration.tcl *.vcxproj *.sln *.vcxproj.filters +.vs/ # CTest # ######### @@ -53,6 +54,8 @@ CTestTestfile.cmake [Rr]elease/ x64/ build/ +build_lin/ +build_win/ build2017/ build2017_dc/ bld/ diff --git a/Source/.vscode/c_cpp_properties.json b/Source/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..aa8ca86 --- /dev/null +++ b/Source/.vscode/c_cpp_properties.json @@ -0,0 +1,18 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/gcc", + "cStandard": "c11", + "cppStandard": "c++17", + "intelliSenseMode": "clang-x64", + "configurationProvider": "vector-of-bool.cmake-tools", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + } + ], + "version": 4 +} \ No newline at end of file diff --git a/Source/.vscode/launch.json b/Source/.vscode/launch.json new file mode 100644 index 0000000..ceba5b7 --- /dev/null +++ b/Source/.vscode/launch.json @@ -0,0 +1,29 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "g++-7 build and debug active file", + "type": "cppdbg", + "request": "launch", + "program": "${fileDirname}/${fileBasenameNoExtension}", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ], + "preLaunchTask": "g++-7 build active file", + "miDebuggerPath": "/usr/bin/gdb" + } + ] +} \ No newline at end of file diff --git a/Source/.vscode/settings.json b/Source/.vscode/settings.json new file mode 100644 index 0000000..0db5873 --- /dev/null +++ b/Source/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "cmake.configureOnOpen": true +} \ No newline at end of file diff --git a/Source/.vscode/tasks.json b/Source/.vscode/tasks.json new file mode 100644 index 0000000..0be46f7 --- /dev/null +++ b/Source/.vscode/tasks.json @@ -0,0 +1,19 @@ +{ + "tasks": [ + { + "type": "shell", + "label": "g++-7 build active file", + "command": "/usr/bin/g++-7", + "args": [ + "-g", + "${file}", + "-o", + "${fileDirname}/${fileBasenameNoExtension}" + ], + "options": { + "cwd": "/usr/bin" + } + } + ], + "version": "2.0.0" +} \ No newline at end of file diff --git a/Source/CMakeLists.txt b/Source/CMakeLists.txt index 7341d0a..dc5e0a5 100644 --- a/Source/CMakeLists.txt +++ b/Source/CMakeLists.txt @@ -1,9 +1,11 @@ -project(moja) +cmake_minimum_required(VERSION 3.10.0) +project(moja VERSION 1.0.6 LANGUAGES CXX) #turn on using solution folders set_property( GLOBAL PROPERTY USE_FOLDERS ON) -cmake_minimum_required(VERSION 3.4.0) +#turn on parallel builds +add_compile_options($<$:/MP>) set(MOJA_VERSION_MAJOR "1") set(MOJA_VERSION_MINOR "0") @@ -11,15 +13,21 @@ set(MOJA_VERSION_PATCH "0") set(MOJA_VERSION_REVISION "0") set(MOJA_VERSION "${MOJA_VERSION_MAJOR}.${MOJA_VERSION_MINOR}.${MOJA_VERSION_PATCH}") +# set the path for custom cmake scripts +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake) + +# Include some common macros to simpilfy the Moja CMake files +include(MojaMacros) + # Collect the built libraries and include dirs, the will be used to create the moja.cmake file set(Moja_COMPONENTS "") # Allow enabling and disabling components -option(ENABLE_MOJA.MODULES.ZIPPER "moja.modules.zipper" ON) -option(ENABLE_MOJA.MODULES.POCO "moja.modules.poco" OFF) -option(ENABLE_MOJA.MODULES.LIBPQ "moja.modules.libpq" ON) -option(ENABLE_MOJA.MODULES.GDAL "moja.modules.gdal" OFF) -option(ENABLE_MOJA.CLI "moja.cli" ON) +option(ENABLE_MOJA.MODULES.ZIPPER "moja.modules.zipper" OFF) +option(ENABLE_MOJA.MODULES.POCO "moja.modules.poco" OFF) +option(ENABLE_MOJA.MODULES.LIBPQ "moja.modules.libpq" OFF) +option(ENABLE_MOJA.MODULES.GDAL "moja.modules.gdal" OFF) +option(ENABLE_MOJA.CLI "moja.cli" ON) option(ENABLE_MOJA.SYSTEMTEST "moja.systemtest" OFF) if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) @@ -37,10 +45,6 @@ set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) # For Debug build types, append a "d" to the library names. set(CMAKE_DEBUG_POSTFIX "d" CACHE STRING "Set debug library postfix" FORCE) -# set the path for custom cmake scripts -set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake) -#include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/macros.cmake) - option(ENABLE_TESTS "Set to OFF|ON (default is ON) to control build of moja tests" ON) option(RUN_UNIT_TESTS_ON_BUILD "Set to OFF|ON (default is OFF) to control automatic running of moja tests at build time" OFF) option(MOJA_STATIC "Set to OFF|ON (default is OFF) to control build of moja as STATIC library" OFF) @@ -59,7 +63,6 @@ IF (ENABLE_TESTS) include(CTest) set(BOOST_TEST_REPORTING_LEVEL "SHORT" CACHE STRING "Boost unit test reporting level") set_property(CACHE BOOST_TEST_REPORTING_LEVEL PROPERTY STRINGS "SHORT" "DETAILED") - include_directories(moja.test/include) add_subdirectory(moja.test) ENABLE_TESTING() message(STATUS "Building with unittests") @@ -74,10 +77,6 @@ unset(Boost_INCLUDE_DIR CACHE) unset(Boost_LIBRARY_DIRS CACHE) if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - - #set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--kill-at -static-libgcc -static-libstdc++") add_definitions(-DUSE_STATIC_BOOST) set(Boost_USE_STATIC_LIBS ON) set(Boost_USE_STATIC_RUNTIME ON) @@ -87,33 +86,12 @@ else(MOJA_STATIC) set(Boost_USE_STATIC_RUNTIME OFF) endif(MOJA_STATIC) -set(BOOST_COMPONENTS_NEEDED system thread filesystem date_time chrono program_options log log_setup ) -find_package(Boost COMPONENTS REQUIRED ${BOOST_COMPONENTS_NEEDED}) - -if(Boost_FOUND) - if(MOJA_STATIC) - # empty - else(MOJA_STATIC) - include_directories(${Boost_INCLUDE_DIRS}) - link_directories(${Boost_LIBRARY_DIRS}) - endif(MOJA_STATIC) -endif() - if(USE_STATIC_BOOST) message(STATUS "Linking against boost static libraries") else(USE_STATIC_BOOST) message(STATUS "Linking against boost dynamic libraries") endif(USE_STATIC_BOOST) -# Poco -find_package(Poco REQUIRED) - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() - # OS Detection include(CheckCXXCompilerFlag) @@ -124,14 +102,10 @@ if(CMAKE_SYSTEM MATCHES "Windows") if (CMAKE_C_COMPILER_ID MATCHES "MSVC") message(STATUS "XXX: MS Visual Compiler detected") CHECK_CXX_COMPILER_FLAG("/std:c++17" COMPILER_SUPPORTS_CXX17) - CHECK_CXX_COMPILER_FLAG("/std:c++14" COMPILER_SUPPORTS_CXX14) - #if(COMPILER_SUPPORTS_CXX17) - # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP /bigobj /std:c++17") - #else - if(COMPILER_SUPPORTS_CXX14) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP /bigobj /std:c++14") + if(COMPILER_SUPPORTS_CXX17) + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has C++17 support.") else() - message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++14 support. Please use a different C++ compiler.") + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++17 support. Please use a different C++ compiler.") endif() endif (CMAKE_C_COMPILER_ID MATCHES "MSVC") endif(CMAKE_SYSTEM MATCHES "Windows") @@ -139,19 +113,15 @@ endif(CMAKE_SYSTEM MATCHES "Windows") if (CMAKE_SYSTEM MATCHES "Linux" ) add_definitions( -DMOJA_OS_FAMILY_UNIX ) # Standard 'must be' defines - add_definitions( -D_XOPEN_SOURCE=500 -D_REENTRANT -D_THREAD_SAFE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64) - set(SYSLIBS pthread dl rt) + #add_definitions( -D_XOPEN_SOURCE=500 -D_REENTRANT -D_THREAD_SAFE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64) + #set(SYSLIBS pthread dl rt) CHECK_CXX_COMPILER_FLAG("-std=c++17" COMPILER_SUPPORTS_CXX17) - CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) - - #if(COMPILER_SUPPORTS_CXX17) - # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -fPIC") - #else - if(COMPILER_SUPPORTS_CXX14) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -fPIC") + + if(COMPILER_SUPPORTS_CXX17) + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has C++17 support.") else() - message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++14 support. Please use a different C++ compiler.") + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++17 support. Please use a different C++ compiler.") endif() endif(CMAKE_SYSTEM MATCHES "Linux" ) @@ -264,6 +234,42 @@ set(CPACK_SOURCE_IGNORE_FILES include(CPack) add_custom_target(dist COMMAND ${CMAKE_MAKE_PROGRAM} package_source) +############################################################# +# cmake config files + +include(CMakePackageConfigHelpers) +write_basic_package_version_file( + "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}/${PROJECT_NAME}ConfigVersion.cmake" + VERSION ${PROJECT_VERSION} + COMPATIBILITY AnyNewerVersion +) + +# Set config script install location in a location that find_package() will +# look for, which is different on MS Windows than for UNIX +# Note: also set in MOJA_GENERATE_PACKAGE macro in cmake/MojaMacros.cmake +if (WIN32) + set(MojaConfigPackageLocation "cmake") +else() + set(MojaConfigPackageLocation "lib/cmake/${PROJECT_NAME}") +endif() + +configure_file(cmake/${PROJECT_NAME}Config.cmake.in "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}/${PROJECT_NAME}Config.cmake" @ONLY) +install( + FILES + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}/${PROJECT_NAME}Config.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}/${PROJECT_NAME}ConfigVersion.cmake + DESTINATION + "${MojaConfigPackageLocation}" + COMPONENT + Devel +) + +# Install pkg-config files +install(FILES ${MOJA_PKG_CONFIG_FILES} + DESTINATION lib${LIB_SUFFIX}/pkgconfig) + +export(PACKAGE Moja) + message(STATUS "CMake ${CMAKE_VERSION} successfully configured ${PROJECT_NAME} using ${CMAKE_GENERATOR} generator") message(STATUS "Installation target path: ${CMAKE_INSTALL_PREFIX}") diff --git a/Source/cmake/FindBlosc.cmake b/Source/cmake/FindBlosc.cmake deleted file mode 100644 index 34d4483..0000000 --- a/Source/cmake/FindBlosc.cmake +++ /dev/null @@ -1,86 +0,0 @@ -find_path(BLOSC_INCLUDE_DIR blosc.h) - -find_library(BLOSC_LIBRARY NAMES blosc) - -if (BLOSC_INCLUDE_DIR AND BLOSC_LIBRARY) - set(BLOSC_FOUND TRUE) - message(STATUS "Found blosc library: ${BLOSC_LIBRARY}") -endif () - - -SET(SEARCH_PATH_BLOSC "" CACHE PATH "Additional blosc search path") -SET(BLOSC_DIR_SEARCH - $ENV{BLOSC_ROOT} - ${SEARCH_PATH_BLOSC} - ${BLOSC_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(BLOSC_DIR_SEARCH - ${BLOSC_DIR_SEARCH} - "C:/Development/c-blosc-1.12.1/dist/" - "C:/Development/Software/c-blosc/" - ) - endif() - if (EXISTS "D:/") - SET(BLOSC_DIR_SEARCH - ${BLOSC_DIR_SEARCH} - "D:/Development/c-blosc-1.12.1/dist/" - "D:/Development/Software/c-blosc/" - ) - endif() - - find_path(Blosc_INCLUDE_PATH blosc.h - PATH_SUFFIXES include - PATHS - ${BLOSC_DIR_SEARCH} - ) - - find_library(Blosc_LIB blosc - PATH_SUFFIXES lib - PATHS - ${BLOSC_DIR_SEARCH} - ) -endif() - -set(ADDITIONAL_LIB_DIRS) -if (CMAKE_SYSTEM MATCHES "Linux" ) - if(CMAKE_SIZEOF_VOID_P EQUAL 4) - else() - set(ADDITIONAL_LIB_DIRS "/usr/lib64" "/usr/lib/x86_64-linux-gnu/") - endif() - - SET(BLOSC_LIB_SEARCH - ${ADDITIONAL_LIB_DIRS} - ${BLOSC_LIB_SEARCH} - /usr/local/c-blosc-master/ - /usr/local/lib/ - /usr/lib/ - ) - - find_library(Blosc_LIB blosc - PATHS - ${BLOSC_LIB_SEARCH} - ) - - SET(BLOSC_INCLUDE_SEARCH - ${BLOSC_INCLUDE_SEARCH} - /usr/local/c-blosc-master/ - /usr/local/ - /usr/ - ) - - find_path(Blosc_INCLUDE_PATH blosc.h - PATH_SUFFIXES include - PATHS - ${BLOSC_INCLUDE_SEARCH} - ) - -endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set BLOSC_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Blosc DEFAULT_MSG Blosc_INCLUDE_PATH) - -MARK_AS_ADVANCED(Blosc_INCLUDE_PATH) diff --git a/Source/cmake/FindCanFire.cmake b/Source/cmake/FindCanFire.cmake deleted file mode 100644 index da0ab1f..0000000 --- a/Source/cmake/FindCanFire.cmake +++ /dev/null @@ -1,76 +0,0 @@ -SET(SEARCH_PATH_CANFIRE "" CACHE PATH "Additional CanFire search path") -SET(CANFIRE_DIR_SEARCH - $ENV{CANFIRE_ROOT} - ${SEARCH_PATH_CANFIRE} - ${CANFIRE_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(CANFIRE_DIR_SEARCH - ${CANFIRE_DIR_SEARCH} - "C:/Dev/CanFIREDLL_x64_v0.9/x64/" - "C:/Development/CanFIREDLL_x64_v0.9/x64/" - ) - endif() - if (EXISTS "D:/") - SET(CANFIRE_DIR_SEARCH - ${CANFIRE_DIR_SEARCH} - "D:/Dev/CanFIREDLL_x64_v0.9/x64/" - "D:/Development/CanFIREDLL_x64_v0.9/x64/" - ) - endif() - - find_path(CanFire_INCLUDE_PATH headers/CanFIRE.h - PATH_SUFFIXES include - PATHS - ${CANFIRE_DIR_SEARCH} - ) - - find_library(CanFire_LIB CanFIREdll - PATH_SUFFIXES lib - PATHS - ${CANFIRE_DIR_SEARCH} - ) -endif() - -set(ADDITIONAL_LIB_DIRS) -if (CMAKE_SYSTEM MATCHES "Linux" ) - if(CMAKE_SIZEOF_VOID_P EQUAL 4) - else() - set(ADDITIONAL_LIB_DIRS "/usr/lib64" "/usr/lib/x86_64-linux-gnu/") - endif() - - SET(CANFIRE_LIB_SEARCH - ${ADDITIONAL_LIB_DIRS} - ${CANFIRE_LIB_SEARCH} - /usr/local/CanFIREDLL_x64_v0.9/ - /usr/local/lib/ - /usr/lib/ - ) - - find_library(CanFire_LIB z - PATHS - ${CANFIRE_LIB_SEARCH} - ) - - SET(CANFIRE_INCLUDE_SEARCH - ${CANFIRE_INCLUDE_SEARCH} - /usr/local/CanFIREDLL_x64_v0.9/headers/ - /usr/local/ - /usr/ - ) - - find_path(CanFire_INCLUDE_PATH CanFIRE.h - PATH_SUFFIXES include - PATHS - ${CANFIRE_INCLUDE_SEARCH} - ) - -endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set CANFIRE_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(CanFire DEFAULT_MSG CanFire_INCLUDE_PATH) - -MARK_AS_ADVANCED(CanFire_INCLUDE_PATH) diff --git a/Source/cmake/FindEigen.cmake b/Source/cmake/FindEigen.cmake deleted file mode 100644 index e646c74..0000000 --- a/Source/cmake/FindEigen.cmake +++ /dev/null @@ -1,59 +0,0 @@ -SET(SEARCH_PATH_EIGEN "" CACHE PATH "Additional Eigen search path") - -SET(EIGEN_DIR_SEARCH - $ENV{EIGEN_ROOT} - ${SEARCH_PATH_EIGEN} - ${EIGEN_DIR_SEARCH} -) - -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(EIGEN_DIR_SEARCH - ${EIGEN_DIR_SEARCH} - "C:/Development/eigen-3.3.4/" - "C:/Development/eigen-3.3.3/" - "C:/Development/eigen-3.2.9/" - "C:/Development/eigen-3.2.8/" - "C:/Development/eigen-3.2.5/" - "C:/Development/eigen-3.2.4/" - "C:/Development/moja.global/Third Party/" - "C:/Program Files/eigen/" - ) - endif() - - if (EXISTS "D:/") - SET(EIGEN_DIR_SEARCH - ${EIGEN_DIR_SEARCH} - "D:/Development/eigen-3.3.4/" - "D:/Development/eigen-3.3.3/" - "D:/Development/eigen-3.2.9/" - "D:/Development/eigen-3.2.8/" - "D:/Development/eigen-3.2.5/" - "D:/Development/eigen-3.2.4/" - "D:/Development/moja.global/Third Party/" - "D:/Program Files/eigen/" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(EIGEN_DIR_SEARCH - ${SEARCH_PATH_EIGEN} - ${EIGEN_DIR_SEARCH} - /usr/local/include/eigen/ - /usr/include/eigen3/ - /usr/local/include/eigen3/ - ) -endif() - -FIND_PATH(Eigen_INCLUDE_PATH Eigen/Eigen - PATHS - ${EIGEN_DIR_SEARCH} -) - -# handle the QUIETLY and REQUIRED arguments and set OPENAL_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Eigen DEFAULT_MSG Eigen_INCLUDE_PATH) - -MARK_AS_ADVANCED(Eigen_INCLUDE_PATH) \ No newline at end of file diff --git a/Source/cmake/FindLibXL.cmake b/Source/cmake/FindLibXL.cmake deleted file mode 100644 index bef0ca9..0000000 --- a/Source/cmake/FindLibXL.cmake +++ /dev/null @@ -1,73 +0,0 @@ -SET(SEARCH_PATH_LIBXL "" CACHE PATH "Additional libXL search path") -SET(LIBXL_DIR_SEARCH - $ENV{LIBXL_ROOT} - ${SEARCH_PATH_LIBXL} - ${LIBXL_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(LIBXL_DIR_SEARCH - ${LIBXL_DIR_SEARCH} - "C:/Development/libxl-3.8.1.0" - ) - endif() - if (EXISTS "D:/") - SET(LIBXL_DIR_SEARCH - ${LIBXL_DIR_SEARCH} - "D:/Development/libxl-3.8.1.0" - ) - endif() - - find_path(libXL_INCLUDE_PATH libxl.h - PATH_SUFFIXES include_cpp - PATHS - ${LIBXL_DIR_SEARCH} - ) - - find_library(libXL_LIB libxl - PATH_SUFFIXES lib64 - PATHS - ${LIBXL_DIR_SEARCH} - ) -endif() - -## Have not purchased the library for linux yet! -## So disabled Linux cmake part -## -##set(ADDITIONAL_LIB_DIRS) -##if (CMAKE_SYSTEM MATCHES "Linux" ) -## SET(LIBXL_LIB_SEARCH -## ${ADDITIONAL_LIB_DIRS} -## ${LIBXL_LIB_SEARCH} -## /usr/local/libxl/ -## /usr/local/lib/ -## /usr/lib/ -## ) -## -## find_library(libXL_LIB z -## PATHS -## ${LIBXL_LIB_SEARCH} -## ) -## -## SET(ZLIB_INCLUDE_SEARCH -## ${ZLIB_INCLUDE_SEARCH} -## /usr/local/zipper-master/ -## /usr/local/ -## /usr/ -## ) -## -## find_path(libXL_INCLUDE_PATH zlib.h -## PATH_SUFFIXES -## include_cpp -## PATHS -## ${ZLIB_INCLUDE_SEARCH} -## ) -## -##endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set ZLIB_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(libXL DEFAULT_MSG libXL_INCLUDE_PATH) - -MARK_AS_ADVANCED(libXL_INCLUDE_PATH) diff --git a/Source/cmake/FindLibpq.cmake b/Source/cmake/FindLibpq.cmake deleted file mode 100644 index bbe6141..0000000 --- a/Source/cmake/FindLibpq.cmake +++ /dev/null @@ -1,92 +0,0 @@ -SET(SEARCH_PATH_LIBPQ "" CACHE PATH "Additional Libpq search path") -SET(SEARCH_PATH_LIB_LIBPQ "" CACHE PATH "Additional Libpq library search path") - -SET(LIBPQ_DIR_SEARCH - $ENV{LIBPQ_ROOT} - ${SEARCH_PATH_LIBPQ} - ${LIBPQ_DIR_SEARCH} -) -SET(LIBPQ_LIB_DIR_SEARCH - $ENV{LIBPQ_ROOT} - ${SEARCH_PATH_LIB_LIBPQ} - ${LIBPQ_DIR_SEARCH} -) - -if(CMAKE_SYSTEM MATCHES "Windows") - SET(LIBPQ_LIBNAME "libpq") - if (EXISTS "C:/") - SET(LIBPQ_DIR_SEARCH - ${LIBPQ_DIR_SEARCH} - "C:/Program Files/PostgreSQL/11/include" - "C:/Program Files/PostgreSQL/10/include" - "C:/Program Files/PostgreSQL/9.6/include" - "C:/Development/PG9.5/include" - "C:/Program Files/PostgreSQL/9.5/include" - "C:/Program Files/PostgreSQL/9.4/include" - ) - SET(LIBPQ_LIB_DIR_SEARCH - ${LIBPQ_LIB_DIR_SEARCH} - "C:/Program Files/PostgreSQL/11/lib" - "C:/Program Files/PostgreSQL/10/lib" - "C:/Program Files/PostgreSQL/9.6/lib" - "C:/Development/PG9.5/lib" - "C:/Program Files/PostgreSQL/9.5/lib" - "C:/Program Files/PostgreSQL/9.4/lib" - ) - endif() - if (EXISTS "D:/") - SET(LIBPQ_DIR_SEARCH - ${LIBPQ_DIR_SEARCH} - D:/Development - "D:/Program Files/PostgreSQL/10/include" - "D:/Program Files/PostgreSQL/9.6/include" - "D:/Program Files/PostgreSQL/9.5/include" - "D:/Program Files/PostgreSQL/9.4/include" - ) - SET(LIBPQ_LIB_DIR_SEARCH - ${LIBPQ_LIB_DIR_SEARCH} - "D:/Program Files/PostgreSQL/10/lib" - "D:/Program Files/PostgreSQL/9.6/lib" - "D:/Program Files/PostgreSQL/9.5/lib" - "D:/Program Files/PostgreSQL/9.4/lib" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(LIBPQ_LIBNAME "pq") - SET(LIBPQ_DIR_SEARCH - ${LIBPQ_DIR_SEARCH} - /usr/include/postgresql/ - /usr/pgsql-9.5/include - /usr/include/pgsql95/ - ) - SET(LIBPQ_LIB_DIR_SEARCH - ${LIBPQ_LIB_DIR_SEARCH} - /usr/lib/x86_64-linux-gnu/ - /usr/pgsql-9.5/lib - /usr/lib64/ - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -FIND_PATH(LIBPQ_INCLUDE_PATH libpq-fe.h - PATHS - ${LIBPQ_DIR_SEARCH} -) - -IF(NOT LIBPQ_LIBRARY_DIR) - -FIND_LIBRARY(LIBPQ_LIB NAMES ${LIBPQ_LIBNAME} - PATHS # Look in other places. - ${LIBPQ_LIB_DIR_SEARCH} -) -GET_FILENAME_COMPONENT(LIBPQ_LIBRARY_DIR ${LIBPQ_LIB} PATH CACHE) - -ENDIF(NOT LIBPQ_LIBRARY_DIR) - -# handle the QUIETLY and REQUIRED arguments and set OPENAL_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(LIBPQ DEFAULT_MSG LIBPQ_INCLUDE_PATH) - -MARK_AS_ADVANCED(LIBPQ_INCLUDE_PATH) \ No newline at end of file diff --git a/Source/cmake/FindMoja.cmake b/Source/cmake/FindMoja.cmake deleted file mode 100644 index 623ff56..0000000 --- a/Source/cmake/FindMoja.cmake +++ /dev/null @@ -1,321 +0,0 @@ -SET(SEARCH_PATH_MOJA "" CACHE PATH "Additional Moja search path") -SET(SEARCH_PATH_LIB_MOJA "" CACHE PATH "Additional Moja library search path") - -SET(MOJA_INCLUDE_PATH_DESCRIPTION "top-level directory containing the moja include directories. E.g /usr/local/include/ or c:\\moja\\include\\moja_1_0_3") -SET(MOJA_INCLUDE_DIR_MESSAGE "Set the MOJA_INCLUDE_DIR cmake cache entry to the ${MOJA_INCLUDE_PATH_DESCRIPTION}") -SET(MOJA_LIBRARY_PATH_DESCRIPTION "top-level directory containing the moja libraries.") -SET(MOJA_LIBRARY_DIR_MESSAGE "Set the Moja_LIBRARY_DIR cmake cache entry to the ${MOJA_LIBRARY_PATH_DESCRIPTION}") - -SET(SEARCH_PATH_MOJA "" CACHE PATH "Additional Moja search path") - -if(CMAKE_SYSTEM MATCHES "Windows") - - SET(MOJA_DIR_SEARCH - $ENV{MOJA_ROOT} - ${SEARCH_PATH_MOJA} - ${MOJA_DIR_SEARCH} - ) - SET(MOJA_LIB_DIR_SEARCH - $ENV{MOJA_ROOT} - ${SEARCH_PATH_LIB_MOJA} - ${MOJA_DIR_SEARCH} - ) - - SET(MOJA_LIBNAME "moja") - if (EXISTS "C:/") - SET(MOJA_DIR_SEARCH - ${MOJA_DIR_SEARCH} - "C:/Development/moja.global/dist" - "C:/Development/moja.global/dist/lib/Debug" - "C:/Development/moja.global/dist/lib/Release" - "C:/Development/Software/moja/moja_develop" - ) - SET(MOJA_LIB_DIR_SEARCH - ${MOJA_LIB_DIR_SEARCH} - "C:/Development/moja.global/dist" - "C:/Development/moja.global/dist/lib/Debug" - "C:/Development/moja.global/dist/lib/Release" - "C:/Development/Software/moja/moja_develop" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - - SET(MOJA_DIR_SEARCH - ${MOJA_DIR_SEARCH} -# /usr/local/include/ - /usr/local/ - ) - SET(MOJA_LIB_DIR_SEARCH - ${MOJA_LIB_DIR_SEARCH} -# /usr/local/lib/ - /usr/local/ - /usr/lib/x86_64-linux-gnu/ - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -SET(SUFFIX_FOR_INCLUDE_PATH - moja_develop - moja.develop - moja_1_0_5 - moja_1_0_4 - moja_1_0_3 - moja_1_0_1 - moja -) - -SET(SUFFIX_FOR_LIBRARY_PATH - moja_develop/lib - moja_develop/bin - moja.develop/lib - moja.develop/bin - moja_1_0_5/lib - moja_1_0_5/bin - moja_1_0_4/lib - moja_1_0_4/bin - moja_1_0_3/lib - moja_1_0_3/bin - moja_1_0_1/lib - moja_1_0_1/bin - lib - bin - moja/lib - moja/bin -) - -# -# Look for an installation. -# -FIND_PATH( - MOJA_INCLUDE_DIR NAMES - include/moja/itiming.h - PATH_SUFFIXES - ${SUFFIX_FOR_INCLUDE_PATH} - PATHS - # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_DIR_SEARCH} - DOC - # Help the user find it if we cannot. - "The ${MOJA_INCLUDE_PATH_DESCRIPTION}" -) - -# -# Look for standard unix include paths -# -IF(NOT MOJA_INCLUDE_DIR) - FIND_PATH( - MOJA_INCLUDE_DIR - moja/itiming.h - DOC - "The ${MOJA_INCLUDE_PATH_DESCRIPTION}" - ) -ENDIF(NOT MOJA_INCLUDE_DIR) - -# -# Assume we didn't find it. -# -SET(Moja_FOUND 0) - -# -# Now try to get the include and library path. -# -SET(Moja_INCLUDE_DIRS - ${MOJA_INCLUDE_DIR}/include - CACHE PATH - "Location of Moja include files" -) -SET(Moja_FOUND 1) - -IF(NOT MOJA_LIBRARY_DIR) - - FIND_LIBRARY( - MOJA_LIB NAMES - moja.core moja.cored - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS - # Look in other places. - ${Moja_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - DOC - # Help the user find it if we cannot. - "The ${MOJA_LIBRARY_PATH_DESCRIPTION}" - ) - GET_FILENAME_COMPONENT(MOJA_LIBRARY_DIR ${MOJA_LIB} PATH CACHE) - - IF(Moja_LIBRARY_DIR) - # Look for the moja binary path. - SET(Moja_BINARY_DIR ${MOJA_INCLUDE_DIR}) - - IF(Moja_BINARY_DIR AND EXISTS "${Moja_BINARY_DIR}/bin64") - SET(Moja_BINARY_DIRS ${Moja_BINARY_DIR}/bin64 CACHE PATH "Path to Moja binaries") - ENDIF(Moja_BINARY_DIR AND EXISTS "${Moja_BINARY_DIR}/bin64") - ENDIF(Moja_LIBRARY_DIR) - - # Debug lubraries - find_library( - Moja_CORE_DEBUG NAMES - moja.cored - moja.cored_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_DATAREPOSITORY_DEBUG NAMES - moja.datarepositoryd - moja.datarepositoryd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_FLINT_CONFIGURATION_DEBUG NAMES - moja.flint.configurationd - moja.flint.configurationd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_FLINT_DEBUG NAMES - moja.flintd - moja.flintd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - # Release lubraries - find_library( - Moja_CORE_RELEASE NAMES - moja.core - moja.core_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_DATAREPOSITORY_RELEASE NAMES - moja.datarepository - moja.datarepository_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_FLINT_CONFIGURATION_RELEASE NAMES - moja.flint.configuration - moja.flint.configuration_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - find_library( - Moja_FLINT_RELEASE NAMES - moja.flint - moja.flint_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${MOJA_INCLUDE_DIR} - ${MOJA_LIB_DIR_SEARCH} - ) - - if(Moja_CORE_DEBUG AND NOT Moja_CORE_RELEASE) - set(Moja_CORE ${Moja_CORE_DEBUG} CACHE STRING "Moja core link library text") - endif() - if(Moja_CORE_RELEASE AND NOT Moja_CORE_DEBUG) - set(Moja_CORE ${Moja_CORE_RELEASE} CACHE STRING "Moja core link library text") - endif() - if(Moja_CORE_RELEASE AND Moja_CORE_DEBUG) - set(Moja_CORE debug ${Moja_CORE_DEBUG} - optimized ${Moja_CORE_RELEASE} - CACHE STRING "Moja core link library text") - endif() - - if(Moja_DATAREPOSITORY_DEBUG AND NOT Moja_DATAREPOSITORY_RELEASE) - set(Moja_DATAREPOSITORY ${Moja_DATAREPOSITORY_DEBUG} CACHE STRING "Moja datarepository link library text") - endif() - if(Moja_DATAREPOSITORY_RELEASE AND NOT Moja_DATAREPOSITORY_DEBUG) - set(Moja_DATAREPOSITORY ${Moja_DATAREPOSITORY_RELEASE} CACHE STRING "Moja datarepository link library text") - endif() - if(Moja_DATAREPOSITORY_RELEASE AND Moja_DATAREPOSITORY_DEBUG) - set(Moja_DATAREPOSITORY debug ${Moja_DATAREPOSITORY_DEBUG} - optimized ${Moja_DATAREPOSITORY_RELEASE} - CACHE STRING "Moja datarepository link library text") - endif() - - if(Moja_FLINT_CONFIGURATION_DEBUG AND NOT Moja_FLINT_CONFIGURATION_RELEASE) - set(Moja_FLINT_CONFIGURATION ${Moja_FLINT_CONFIGURATION_DEBUG} CACHE STRING "Moja flint configuration link library text") - endif() - if(Moja_FLINT_CONFIGURATION_RELEASE AND NOT Moja_FLINT_CONFIGURATION_DEBUG) - set(Moja_FLINT_CONFIGURATION ${Moja_FLINT_CONFIGURATION_RELEASE} CACHE STRING "Moja flint configuration link library text") - endif() - if(Moja_FLINT_CONFIGURATION_RELEASE AND Moja_FLINT_CONFIGURATION_DEBUG) - set(Moja_FLINT_CONFIGURATION debug ${Moja_FLINT_CONFIGURATION_DEBUG} - optimized ${Moja_FLINT_CONFIGURATION_RELEASE} - CACHE STRING "Moja flint configuration link library text") - endif() - - if(Moja_FLINT_DEBUG AND NOT Moja_FLINT_RELEASE) - set(Moja_FLINT ${Moja_FLINT_DEBUG} CACHE STRING "Moja flint link library text") - endif() - if(Moja_FLINT_RELEASE AND NOT Moja_FLINT_DEBUG) - set(Moja_FLINT ${Moja_FLINT_RELEASE} CACHE STRING "Moja flint link library text") - endif() - if(Moja_FLINT_RELEASE AND Moja_FLINT_DEBUG) - set(Moja_FLINT debug ${Moja_FLINT_DEBUG} - optimized ${Moja_FLINT_RELEASE} - CACHE STRING "Moja flint link library text") - endif() - - -ENDIF(NOT MOJA_LIBRARY_DIR) - -IF(NOT Moja_FOUND) - IF(Moja_FIND_QUIETLY) - message(STATUS "Moja was not found. ${MOJA_INCLUDE_DIR_MESSAGE}") - ELSE(Moja_FIND_QUIETLY) - IF(Moja_FIND_REQUIRED) - message(FATAL_ERROR "Moja was not found. ${MOJA_INCLUDE_DIR_MESSAGE}") - ENDIF(Moja_FIND_REQUIRED) - ENDIF(Moja_FIND_QUIETLY) -ENDIF(NOT Moja_FOUND) - -# handle the QUIETLY and REQUIRED arguments and set OPENAL_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(MOJA DEFAULT_MSG MOJA_INCLUDE_DIR) - -MARK_AS_ADVANCED(MOJA_INCLUDE_DIR) \ No newline at end of file diff --git a/Source/cmake/FindMojaCanada.cmake b/Source/cmake/FindMojaCanada.cmake deleted file mode 100644 index 3b7fc27..0000000 --- a/Source/cmake/FindMojaCanada.cmake +++ /dev/null @@ -1,83 +0,0 @@ -SET(SEARCH_PATH_MOJA.CANADA "" CACHE PATH "Additional Moja.Canada search path") -SET(MOJA.CANADA_DIR_SEARCH - $ENV{MOJA.CANADA_ROOT} - ${SEARCH_PATH_MOJA.CANADA} - ${MOJA.CANADA_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(MOJA.CANADA_DIR_SEARCH - ${MOJA.CANADA_DIR_SEARCH} - "C:/Development/SLEEK-TOOLS/Moja.Canada/dist/" - "C:/Development/Software/Moja.Canada/" - ) - endif() - if (EXISTS "D:/") - SET(MOJA.CANADA_DIR_SEARCH - ${MOJA.CANADA_DIR_SEARCH} - "D:/Development/SLEEK-TOOLS/Moja.Canada/dist/" - "D:/Development/Software/Moja.Canada/" - ) - endif() - - find_path(Moja.Canada_INCLUDE_PATH cbmmodulebase.h - PATH_SUFFIXES - include/moja/modules/cbm - PATHS - ${MOJA.CANADA_DIR_SEARCH} - ) - - find_library(Moja.Canada_LIB_DEBUG moja.modules.cbmd - PATH_SUFFIXES - lib - PATHS - ${MOJA.CANADA_DIR_SEARCH} - ) - - find_library(Moja.Canada_LIB_RELEASE moja.modules.cbm - PATH_SUFFIXES - lib - PATHS - ${MOJA.CANADA_DIR_SEARCH} - ) - - set(Moja.Canada_LIB - debug ${Moja.Canada_LIB_DEBUG} - optimized ${Moja.Canada_LIB_RELEASE} - CACHE STRING "Moja.Canada library") - -endif() - -if (CMAKE_SYSTEM MATCHES "Linux") - SET(MOJA.CANADA_DIR_SEARCH - ${MOJA.CANADA_DIR_SEARCH} - /usr/local/ - /usr/ - ) - - find_path(Moja.Canada_INCLUDE_PATH cbmmodulebase.h - PATH_SUFFIXES - include/moja/modules/cbm - PATHS - ${MOJA.CANADA_DIR_SEARCH} - ) - - find_library(Moja.Canada_LIB moja.modules.cbm - PATH_SUFFIXES - lib - PATHS - ${MOJA.CANADA_DIR_SEARCH} - ) - - # set(Moja.Canada_LIB - # ${Moja.Canada_LIB_RELEASE} - # CACHE STRING "Moja.Canada library") - -endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set MOJA.CANADA_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Moja.Canada DEFAULT_MSG Moja.Canada_INCLUDE_PATH) - -MARK_AS_ADVANCED(Moja.Canada_INCLUDE_PATH) diff --git a/Source/cmake/FindMongoDBCPlusPlusDriver.cmake b/Source/cmake/FindMongoDBCPlusPlusDriver.cmake deleted file mode 100644 index 48f9a9a..0000000 --- a/Source/cmake/FindMongoDBCPlusPlusDriver.cmake +++ /dev/null @@ -1,88 +0,0 @@ -SET(SEARCH_PATH_MONGODBCPLUSPLUSDRIVER "" CACHE PATH "Additional MongoDBCPlusPlusDriver search path") - -if(CMAKE_SYSTEM MATCHES "Windows") - - set(MONGODBCPLUSPLUSDRIVER_DIR_SEARCH - $ENV{MONGODBCPLUSPLUSDRIVER_ROOT} - ${SEARCH_PATH_MONGODBCPLUSPLUSDRIVER} - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} - ) - - if (EXISTS "C:/") - set(MONGODBCPLUSPLUSDRIVER_DIR_SEARCH - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} - C:/Development - "C:/Development/moja.global/Third Party" - "C:/Program Files/" - "C:/Program Files/mongo" - "C:/Program Files/mongo-cxx-driver" - ) - endif() - - if (EXISTS "D:/") - set(MONGODBCPLUSPLUSDRIVER_DIR_SEARCH - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} - D:/Development - "D:/Development/moja.global/Third Party" - "D:/Program Files/" - "D:/Program Files/mongo" - "D:/Program Files/mongo-cxx-driver" - ) - endif() - -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - set(MONGODBCPLUSPLUSDRIVER_DIR_SEARCH - $ENV{MONGODBCPLUSPLUSDRIVER_ROOT} - ${SEARCH_PATH_MONGODBCPLUSPLUSDRIVER} - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} - /home/vagrant/moja.global/Third Party/ - /usr/ - /Library/Frameworks - /System/Library/Frameworks - /Network/Library/Frameworks - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -# Add in some path suffixes. These will have to be updated whenever a new Driver version version comes out. -SET(SUFFIX_FOR_INCLUDE_PATH - mongo-cxx-driver-legacy-1.1.0/include - mongo-cxx-driver-legacy-1.0.6/include - mongo-cxx-driver/include -) - -# MESSAGE("SUFFIX_FOR_INCLUDE_PATH: " ${SUFFIX_FOR_INCLUDE_PATH}) - -find_path(MONGODBCPLUSPLUSDRIVER_INCLUDE_PATH mongo/config.h - HINTS - $ENV{MONGODBCPLUSPLUSDRIVER_ROOT} - PATH_SUFFIXES ${SUFFIX_FOR_INCLUDE_PATH} - PATHS - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} -) - -SET(SUFFIX_FOR_LIBRARY_PATH - mongo-cxx-driver-legacy-1.1.0/lib - mongo-cxx-driver-legacy-1.0.6/lib - mongo-cxx-driver/lib -) - -IF(NOT MONGODBCPLUSPLUSDRIVER_LIBRARY_DIR) - -FIND_LIBRARY(MONGODBCPLUSPLUSDRIVER_LIB NAMES mongoclient mongoclient-gd libmongoclient libmongoclient-gd - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${MONGODBCPLUSPLUSDRIVER_DIR_SEARCH} -) -GET_FILENAME_COMPONENT(MONGODBCPLUSPLUSDRIVER_LIBRARY_DIR ${MONGODBCPLUSPLUSDRIVER_LIB} PATH CACHE) - -ENDIF(NOT MONGODBCPLUSPLUSDRIVER_LIBRARY_DIR) - -# handle the QUIETLY and REQUIRED arguments and set OPENAL_FOUND to TRUE if -# all listed variables are TRUE -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(MONGODBCPLUSPLUSDRIVER DEFAULT_MSG MONGODBCPLUSPLUSDRIVER_INCLUDE_PATH) - -mark_as_advanced(MONGODBCPLUSPLUSDRIVER_INCLUDE_PATH) \ No newline at end of file diff --git a/Source/cmake/FindNetCDF.cmake b/Source/cmake/FindNetCDF.cmake deleted file mode 100644 index 240b8a9..0000000 --- a/Source/cmake/FindNetCDF.cmake +++ /dev/null @@ -1,191 +0,0 @@ -# - Find NetCDF -# Find the native NetCDF includes and library -# -# NETCDF_INCLUDE_DIR - user modifiable choice of where netcdf headers are -# NETCDF_LIBRARY - user modifiable choice of where netcdf libraries are -# -# Your package can require certain interfaces to be FOUND by setting these -# -# NETCDF_CXX - require the C++ interface and link the C++ library -# NETCDF_F77 - require the F77 interface and link the fortran library -# NETCDF_F90 - require the F90 interface and link the fortran library -# -# Or equivalently by calling FindNetCDF with a COMPONENTS argument containing one or -# more of "CXX;F77;F90". -# -# When interfaces are requested the user has access to interface specific hints: -# -# NETCDF_${LANG}_INCLUDE_DIR - where to search for interface header files -# NETCDF_${LANG}_LIBRARY - where to search for interface libraries -# -# This module returns these variables for the rest of the project to use. -# -# NETCDF_FOUND - True if NetCDF found including required interfaces (see below) -# NETCDF_LIBRARIES - All netcdf related libraries. -# NETCDF_INCLUDE_DIRS - All directories to include. -# NETCDF_HAS_INTERFACES - Whether requested interfaces were found or not. -# NETCDF_${LANG}_INCLUDE_DIRS/NETCDF_${LANG}_LIBRARIES - C/C++/F70/F90 only interface -# -# Normal usage would be: -# set (NETCDF_F90 "YES") -# find_package (NetCDF REQUIRED) -# target_link_libraries (uses_everthing ${NETCDF_LIBRARIES}) -# target_link_libraries (only_uses_f90 ${NETCDF_F90_LIBRARIES}) - -#search starting from user editable cache var -if (NETCDF_INCLUDE_DIR AND NETCDF_LIBRARY) - # Already in cache, be silent - set (NETCDF_FIND_QUIETLY TRUE) -endif () - -set(USE_DEFAULT_PATHS "NO_DEFAULT_PATH") -if(NETCDF_USE_DEFAULT_PATHS) - set(USE_DEFAULT_PATHS "") -endif() - -find_path (NETCDF_INCLUDE_DIR netcdf.h - PATHS - "${NETCDF_DIR}/Debug/include" - "C:/Development/Software/NetCDF/Debug/include" -) -#set(NETCDF_INCLUDE_DIR ${NETCDF_INCLUDE_DIR_DEBUG}) -mark_as_advanced (NETCDF_INCLUDE_DIR) -set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR}) - -find_library (NETCDF_LIBRARY_DEBUG NAMES netcdf - PATHS - "${NETCDF_DIR}/Debug/lib" - "C:/Development/Software/NetCDF/Debug/lib" - HINTS "${NETCDF_INCLUDE_DIR}/Debug/../lib") -#mark_as_advanced (NETCDF_LIBRARY_DEBUG) -#set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY_DEBUG}) - -find_library (NETCDF_LIBRARY_RELEASE NAMES netcdf - PATHS - "${NETCDF_DIR}/Release/lib" - "C:/Development/Software/NetCDF/Release/lib" - HINTS "${NETCDF_INCLUDE_DIR}/Release/../lib") -#mark_as_advanced (NETCDF_LIBRARY_RELEASE) -#set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY_RELEASE}) - -set (NETCDF_LIBRARY debug ${NETCDF_LIBRARY_DEBUG} - optimized ${NETCDF_LIBRARY_RELEASE} - CACHE STRING "NETCDF_LIBRARY libraries") -mark_as_advanced(NETCDF_LIBRARY) -set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY}) - -#start finding requested language components -set (NetCDF_libs "") -set (NetCDF_includes "${NETCDF_INCLUDE_DIR}") - -get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH) -set (NETCDF_HAS_INTERFACES "YES") # will be set to NO if we're missing any interfaces - -macro (NetCDF_check_interface lang header libs) - if (NETCDF_${lang}) - #search starting from user modifiable cache var - find_path (NETCDF_${lang}_INCLUDE_DIR NAMES ${header} - HINTS "${NETCDF_INCLUDE_DIR}" - HINTS "${NETCDF_${lang}_ROOT}/include" - ${USE_DEFAULT_PATHS}) - - find_library (NETCDF_${lang}_LIBRARY NAMES ${libs} - HINTS "${NetCDF_lib_dirs}" - HINTS "${NETCDF_${lang}_ROOT}/lib" - ${USE_DEFAULT_PATHS}) - - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR NETCDF_${lang}_LIBRARY) - - #export to internal varS that rest of project can use directly - set (NETCDF_${lang}_LIBRARIES ${NETCDF_${lang}_LIBRARY}) - set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR}) - - if (NETCDF_${lang}_INCLUDE_DIR AND NETCDF_${lang}_LIBRARY) - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIR}) - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro () - -macro (NetCDF_check_interface_config lang config header libs) - if (NETCDF_${lang}) - #search starting from user modifiable cache var - find_path (NETCDF_${lang}_INCLUDE_DIR_${config} NAMES ${header} - PATHS - "${NETCDF_CXX_DIR}/${Config}/include" - "C:/Development/Software/NCXX/${config}/include" - HINTS "${NETCDF_INCLUDE_DIR}/${config}" - HINTS "${NETCDF_${lang}_ROOT}/${config}/include" - #${USE_DEFAULT_PATHS} - ) - - find_library (NETCDF_${lang}_LIBRARY_${config} NAMES ${libs} - PATHS - "${NETCDF_CXX_DIR}/${config}/lib" - "C:/Development/Software/NCXX/${config}/lib" - HINTS "${NetCDF_lib_dirs}/${config}" - HINTS "${NETCDF_${lang}_ROOT}/${config}/lib" - #${USE_DEFAULT_PATHS} - ) - - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR_${config} NETCDF_${lang}_LIBRARY_${config}) - - if (NETCDF_${lang}_INCLUDE_DIR_${config} AND NETCDF_${lang}_LIBRARY_${config}) - # - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro () - -macro (NetCDF_set_interface_config lang config_release config_debug) - if (NETCDF_${lang}) - - set (NETCDF_${lang}_LIBRARIES debug ${NETCDF_${lang}_LIBRARY_${config_debug}} - optimized ${NETCDF_${lang}_LIBRARY_${config_release}} - CACHE STRING "NETCDF_${lang}_LIBRARIES") - - set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR_${config_debug}} - CACHE STRING "NETCDF_${lang}_INCLUDE_DIRS") - - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARIES}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIRS}) - endif () -endmacro () - - -list (FIND NetCDF_FIND_COMPONENTS "CXX" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_CXX 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F77" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F77 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F90" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F90 1) -endif () - -NetCDF_check_interface_config (CXX Debug ncVar.h netcdf-cxx4) -NetCDF_check_interface_config (CXX Release ncVar.h netcdf-cxx4) -NetCDF_set_interface_config (CXX Release Debug) - -#NetCDF_check_interface (CXX netcdfcpp.h netcdf_c++) -NetCDF_check_interface (F77 netcdf.inc netcdff) -NetCDF_check_interface (F90 netcdf.mod netcdff) - -#export accumulated results to internal varS that rest of project can depend on -list (APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}") -set (NETCDF_LIBRARIES ${NetCDF_libs}) -set (NETCDF_INCLUDE_DIRS ${NetCDF_includes}) - -# handle the QUIETLY and REQUIRED arguments and set NETCDF_FOUND to TRUE if -# all listed variables are TRUE -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args (NetCDF - DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS NETCDF_HAS_INTERFACES) \ No newline at end of file diff --git a/Source/cmake/FindNetCDF.jim.cmake b/Source/cmake/FindNetCDF.jim.cmake deleted file mode 100644 index b1dbcc0..0000000 --- a/Source/cmake/FindNetCDF.jim.cmake +++ /dev/null @@ -1,191 +0,0 @@ -# - Find NetCDF -# Find the native NetCDF includes and library -# -# NETCDF_INCLUDE_DIR - user modifiable choice of where netcdf headers are -# NETCDF_LIBRARY - user modifiable choice of where netcdf libraries are -# -# Your package can require certain interfaces to be FOUND by setting these -# -# NETCDF_CXX - require the C++ interface and link the C++ library -# NETCDF_F77 - require the F77 interface and link the fortran library -# NETCDF_F90 - require the F90 interface and link the fortran library -# -# Or equivalently by calling FindNetCDF with a COMPONENTS argument containing one or -# more of "CXX;F77;F90". -# -# When interfaces are requested the user has access to interface specific hints: -# -# NETCDF_${LANG}_INCLUDE_DIR - where to search for interface header files -# NETCDF_${LANG}_LIBRARY - where to search for interface libraries -# -# This module returns these variables for the rest of the project to use. -# -# NETCDF_FOUND - True if NetCDF found including required interfaces (see below) -# NETCDF_LIBRARIES - All netcdf related libraries. -# NETCDF_INCLUDE_DIRS - All directories to include. -# NETCDF_HAS_INTERFACES - Whether requested interfaces were found or not. -# NETCDF_${LANG}_INCLUDE_DIRS/NETCDF_${LANG}_LIBRARIES - C/C++/F70/F90 only interface -# -# Normal usage would be: -# set (NETCDF_F90 "YES") -# find_package (NetCDF REQUIRED) -# target_link_libraries (uses_everthing ${NETCDF_LIBRARIES}) -# target_link_libraries (only_uses_f90 ${NETCDF_F90_LIBRARIES}) - -#search starting from user editable cache var -if (NETCDF_INCLUDE_DIR AND NETCDF_LIBRARY) - # Already in cache, be silent - set (NETCDF_FIND_QUIETLY TRUE) -endif () - -set(USE_DEFAULT_PATHS "NO_DEFAULT_PATH") -if(NETCDF_USE_DEFAULT_PATHS) - set(USE_DEFAULT_PATHS "") -endif() - -#### find_path (NETCDF_INCLUDE_DIR netcdf.h -#### PATHS "${NETCDF_DIR}/include") -#### mark_as_advanced (NETCDF_INCLUDE_DIR) -#### set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR}) - -find_path (NETCDF_INCLUDE_DIR_DEBUG netcdf.h - PATHS - "${NETCDF_DIR}/Debug/include" - "C:/Development/Software/NetCDF/Debug/include" -) -mark_as_advanced (NETCDF_INCLUDE_DIR_DEBUG) -set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR_DEBUG}) - -find_path (NETCDF_INCLUDE_DIR_RELEASE netcdf.h - PATHS - "${NETCDF_DIR}/Release/include" - "C:/Development/Software/NetCDF/Release/include" -) -mark_as_advanced (NETCDF_INCLUDE_DIR_RELEASE) -set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR_RELEASE}) - -#### find_library (NETCDF_LIBRARY NAMES netcdf -#### PATHS "${NETCDF_DIR}/lib" -#### HINTS "${NETCDF_INCLUDE_DIR}/../lib") -#### mark_as_advanced (NETCDF_LIBRARY) -#### set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY}) - -find_library (NETCDF_LIBRARY_DEBUG NAMES netcdf - PATHS - "${NETCDF_DIR}/Debug/lib" - "C:/Development/Software/NetCDF/Debug/lib" - HINTS "${NETCDF_INCLUDE_DIR}/Debug/../lib") -mark_as_advanced (NETCDF_LIBRARY_DEBUG) -set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY_DEBUG}) - -find_library (NETCDF_LIBRARY_RELEASE NAMES netcdf - PATHS - "${NETCDF_DIR}/Release/lib" - "C:/Development/Software/NetCDF/Release/lib" - HINTS "${NETCDF_INCLUDE_DIR}/Release/../lib") -mark_as_advanced (NETCDF_LIBRARY_RELEASE) -set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY_RELEASE}) - -#start finding requested language components -set (NetCDF_libs "") -set (NetCDF_includes "${NETCDF_INCLUDE_DIR}") - -get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH) -set (NETCDF_HAS_INTERFACES "YES") # will be set to NO if we're missing any interfaces - -macro (NetCDF_check_interface lang header libs) - if (NETCDF_${lang}) - #search starting from user modifiable cache var - find_path (NETCDF_${lang}_INCLUDE_DIR NAMES ${header} - HINTS "${NETCDF_INCLUDE_DIR}" - HINTS "${NETCDF_${lang}_ROOT}/include" - ${USE_DEFAULT_PATHS}) - - find_library (NETCDF_${lang}_LIBRARY NAMES ${libs} - HINTS "${NetCDF_lib_dirs}" - HINTS "${NETCDF_${lang}_ROOT}/lib" - ${USE_DEFAULT_PATHS}) - - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR NETCDF_${lang}_LIBRARY) - - #export to internal varS that rest of project can use directly - set (NETCDF_${lang}_LIBRARIES ${NETCDF_${lang}_LIBRARY}) - set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR}) - - if (NETCDF_${lang}_INCLUDE_DIR AND NETCDF_${lang}_LIBRARY) - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIR}) - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro () - -macro (NetCDF_check_interface_config lang config header libs) - if (NETCDF_${lang}) - #search starting from user modifiable cache var - find_path (NETCDF_${lang}_INCLUDE_DIR_${config} NAMES ${header} - PATHS - "${NETCDF_CXX_DIR}/${Config}/include" - "C:/Development/Software/NCXX/${config}/include" - HINTS "${NETCDF_INCLUDE_DIR}/${config}" - HINTS "${NETCDF_${lang}_ROOT}/${config}/include" - #${USE_DEFAULT_PATHS} - ) - - find_library (NETCDF_${lang}_LIBRARY_${config} NAMES ${libs} - PATHS - "${NETCDF_CXX_DIR}/${config}/lib" - "C:/Development/Software/NCXX/${config}/lib" - HINTS "${NetCDF_lib_dirs}/${config}" - HINTS "${NETCDF_${lang}_ROOT}/${config}/lib" - #${USE_DEFAULT_PATHS} - ) - - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR_${config} NETCDF_${lang}_LIBRARY_${config}) - - #export to internal varS that rest of project can use directly - set (NETCDF_${lang}_LIBRARIES_${config} ${NETCDF_${lang}_LIBRARY_${config}}) - set (NETCDF_${lang}_INCLUDE_DIRS_${config} ${NETCDF_${lang}_INCLUDE_DIR_${config}}) - - if (NETCDF_${lang}_INCLUDE_DIR_${config} AND NETCDF_${lang}_LIBRARY_${config}) - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY_${config}}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIR_${config}}) - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro () - - -list (FIND NetCDF_FIND_COMPONENTS "CXX" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_CXX 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F77" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F77 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F90" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F90 1) -endif () - -NetCDF_check_interface_config (CXX Debug netcdfcpp.h netcdf_c++) -NetCDF_check_interface_config (CXX Release netcdfcpp.h netcdf_c++) -#NetCDF_check_interface (CXX netcdfcpp.h netcdf_c++) -NetCDF_check_interface (F77 netcdf.inc netcdff) -NetCDF_check_interface (F90 netcdf.mod netcdff) - -#export accumulated results to internal varS that rest of project can depend on -list (APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}") -set (NETCDF_LIBRARIES ${NetCDF_libs}) -set (NETCDF_INCLUDE_DIRS ${NetCDF_includes}) - -# handle the QUIETLY and REQUIRED arguments and set NETCDF_FOUND to TRUE if -# all listed variables are TRUE -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args (NetCDF - DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS NETCDF_HAS_INTERFACES) \ No newline at end of file diff --git a/Source/cmake/FindNetCDF.orig.cmake b/Source/cmake/FindNetCDF.orig.cmake deleted file mode 100644 index d02ee16..0000000 --- a/Source/cmake/FindNetCDF.orig.cmake +++ /dev/null @@ -1,119 +0,0 @@ -# - Find NetCDF -# Find the native NetCDF includes and library -# -# NETCDF_INCLUDE_DIR - user modifiable choice of where netcdf headers are -# NETCDF_LIBRARY - user modifiable choice of where netcdf libraries are -# -# Your package can require certain interfaces to be FOUND by setting these -# -# NETCDF_CXX - require the C++ interface and link the C++ library -# NETCDF_F77 - require the F77 interface and link the fortran library -# NETCDF_F90 - require the F90 interface and link the fortran library -# -# Or equivalently by calling FindNetCDF with a COMPONENTS argument containing one or -# more of "CXX;F77;F90". -# -# When interfaces are requested the user has access to interface specific hints: -# -# NETCDF_${LANG}_INCLUDE_DIR - where to search for interface header files -# NETCDF_${LANG}_LIBRARY - where to search for interface libraries -# -# This module returns these variables for the rest of the project to use. -# -# NETCDF_FOUND - True if NetCDF found including required interfaces (see below) -# NETCDF_LIBRARIES - All netcdf related libraries. -# NETCDF_INCLUDE_DIRS - All directories to include. -# NETCDF_HAS_INTERFACES - Whether requested interfaces were found or not. -# NETCDF_${LANG}_INCLUDE_DIRS/NETCDF_${LANG}_LIBRARIES - C/C++/F70/F90 only interface -# -# Normal usage would be: -# set (NETCDF_F90 "YES") -# find_package (NetCDF REQUIRED) -# target_link_libraries (uses_everthing ${NETCDF_LIBRARIES}) -# target_link_libraries (only_uses_f90 ${NETCDF_F90_LIBRARIES}) - -#search starting from user editable cache var -if (NETCDF_INCLUDE_DIR AND NETCDF_LIBRARY) - # Already in cache, be silent - set (NETCDF_FIND_QUIETLY TRUE) -endif () - -set(USE_DEFAULT_PATHS "NO_DEFAULT_PATH") -if(NETCDF_USE_DEFAULT_PATHS) - set(USE_DEFAULT_PATHS "") -endif() - -find_path (NETCDF_INCLUDE_DIR netcdf.h - PATHS "${NETCDF_DIR}/include") -mark_as_advanced (NETCDF_INCLUDE_DIR) -set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR}) - -find_library (NETCDF_LIBRARY NAMES netcdf - PATHS "${NETCDF_DIR}/lib" - HINTS "${NETCDF_INCLUDE_DIR}/../lib") -mark_as_advanced (NETCDF_LIBRARY) - -set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY}) - -#start finding requested language components -set (NetCDF_libs "") -set (NetCDF_includes "${NETCDF_INCLUDE_DIR}") - -get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH) -set (NETCDF_HAS_INTERFACES "YES") # will be set to NO if we're missing any interfaces - -macro (NetCDF_check_interface lang header libs) - if (NETCDF_${lang}) - #search starting from user modifiable cache var - find_path (NETCDF_${lang}_INCLUDE_DIR NAMES ${header} - HINTS "${NETCDF_INCLUDE_DIR}" - HINTS "${NETCDF_${lang}_ROOT}/include" - ${USE_DEFAULT_PATHS}) - - find_library (NETCDF_${lang}_LIBRARY NAMES ${libs} - HINTS "${NetCDF_lib_dirs}" - HINTS "${NETCDF_${lang}_ROOT}/lib" - ${USE_DEFAULT_PATHS}) - - mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR NETCDF_${lang}_LIBRARY) - - #export to internal varS that rest of project can use directly - set (NETCDF_${lang}_LIBRARIES ${NETCDF_${lang}_LIBRARY}) - set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR}) - - if (NETCDF_${lang}_INCLUDE_DIR AND NETCDF_${lang}_LIBRARY) - list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY}) - list (APPEND NetCDF_includes ${NETCDF_${lang}_INCLUDE_DIR}) - else () - set (NETCDF_HAS_INTERFACES "NO") - message (STATUS "Failed to find NetCDF interface for ${lang}") - endif () - endif () -endmacro () - -list (FIND NetCDF_FIND_COMPONENTS "CXX" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_CXX 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F77" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F77 1) -endif () -list (FIND NetCDF_FIND_COMPONENTS "F90" _nextcomp) -if (_nextcomp GREATER -1) - set (NETCDF_F90 1) -endif () -NetCDF_check_interface (CXX netcdfcpp.h netcdf_c++) -NetCDF_check_interface (F77 netcdf.inc netcdff) -NetCDF_check_interface (F90 netcdf.mod netcdff) - -#export accumulated results to internal varS that rest of project can depend on -list (APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}") -set (NETCDF_LIBRARIES ${NetCDF_libs}) -set (NETCDF_INCLUDE_DIRS ${NetCDF_includes}) - -# handle the QUIETLY and REQUIRED arguments and set NETCDF_FOUND to TRUE if -# all listed variables are TRUE -include (FindPackageHandleStandardArgs) -find_package_handle_standard_args (NetCDF - DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS NETCDF_HAS_INTERFACES) \ No newline at end of file diff --git a/Source/cmake/FindPQXX.cmake b/Source/cmake/FindPQXX.cmake deleted file mode 100644 index 19b104c..0000000 --- a/Source/cmake/FindPQXX.cmake +++ /dev/null @@ -1,80 +0,0 @@ -# Find PostGreSQL C++ library and header file -# Sets -# PQXX_FOUND to 0 or 1 depending on result -# PQXX_INCLUDE_DIRECTORIES to the directory containing pqxx/pqxx -# PQXX_LIBRARIES to the PQXX library (and any dependents required) -# If PQXX_REQUIRED is defined, then a fatal error message will be generated if libpqxx is not found - -SET(SEARCH_PATH_PQXX "" CACHE PATH "Additional PQXX search path") -SET(PQXX_DIR_SEARCH - $ENV{PQXX_ROOT} - ${SEARCH_PATH_PQXX} - ${PQXX_DIR_SEARCH} -) - -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(PQXX_DIR_SEARCH - ${PQXX_DIR_SEARCH} - "C:/Development/SLEEK-TOOLS/libpqxx/dist/" - "C:/Development/Software/libpqxx/" - ) - endif() - if (EXISTS "D:/") - SET(PQXX_DIR_SEARCH - ${PQXX_DIR_SEARCH} - "D:/Development/SLEEK-TOOLS/libpqxx/dist/" - "D:/Development/Software/libpqxx/" - ) - endif() - - find_path(PQXX_INCLUDE_DIRECTORIES pqxx/pqxx - PATH_SUFFIXES - include - PATHS - ${PQXX_DIR_SEARCH} - ) - - find_library(PQXX_LIBRARIES pqxx - PATH_SUFFIXES - lib - PATHS - ${PQXX_DIR_SEARCH} - ) - - set(LibPqxx_LIB - debug ${LibPqxx_LIB_DEBUG} - optimized ${LibPqxx_LIB_RELEASE} - CACHE STRING "PQXX library") - -endif() - -if (CMAKE_SYSTEM MATCHES "Linux") - SET(PQXX_DIR_SEARCH - ${PQXX_DIR_SEARCH} - /usr/local/ - /usr/ - ) - - find_path(PQXX_INCLUDE_DIRECTORIES pqxx/pqxx - PATH_SUFFIXES - include - PATHS - ${PQXX_DIR_SEARCH} - ) - - find_library(PQXX_LIBRARIES pqxx - PATH_SUFFIXES - lib - PATHS - ${PQXX_DIR_SEARCH} - ) - -endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set PQXX_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(PQXX DEFAULT_MSG PQXX_INCLUDE_DIRECTORIES) - -MARK_AS_ADVANCED(PQXX_INCLUDE_DIRECTORIES) diff --git a/Source/cmake/FindPoco.cmake b/Source/cmake/FindPoco.cmake deleted file mode 100644 index 11408fa..0000000 --- a/Source/cmake/FindPoco.cmake +++ /dev/null @@ -1,407 +0,0 @@ -# - Find the Poco includes and libraries. -# The following variables are set if Poco is found. If Poco is not -# found, Poco_FOUND is set to false. -# Poco_FOUND - True when the Poco include directory is found. -# Poco_INCLUDE_DIRS - the path to where the poco include files are. -# Poco_LIBRARY_DIRS - The path to where the poco library files are. -# Poco_BINARY_DIRS - The path to where the poco dlls are. - -# ---------------------------------------------------------------------------- -# If you have installed Poco in a non-standard location. -# Then you have three options. -# In the following comments, it is assumed that -# points to the root directory of the include directory of Poco. e.g -# If you have put poco in C:\development\Poco then is -# "C:/development/Poco" and in this directory there will be two -# directories called "include" and "lib". -# 1) After CMake runs, set Poco_INCLUDE_DIR to /poco<-version> -# 2) Use CMAKE_INCLUDE_PATH to set a path to /poco<-version>. This will allow FIND_PATH() -# to locate Poco_INCLUDE_DIR by utilizing the PATH_SUFFIXES option. e.g. -# SET(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} "/include") -# 3) Set an environment variable called ${POCO_ROOT} that points to the root of where you have -# installed Poco, e.g. . It is assumed that there is at least a subdirectory called -# Foundation/include/Poco in this path. -# -# Note: -# 1) If you are just using the poco headers, then you do not need to use -# Poco_LIBRARY_DIR in your CMakeLists.txt file. -# 2) If Poco has not been installed, then when setting Poco_LIBRARY_DIR -# the script will look for /lib first and, if this fails, then for /stage/lib. -# -# Usage: -# In your CMakeLists.txt file do something like this: -# ... -# # Poco -# FIND_PACKAGE(Poco) -# ... -# INCLUDE_DIRECTORIES(${Poco_INCLUDE_DIRS}) -# LINK_DIRECTORIES(${Poco_LIBRARY_DIR}) -# -# In Windows, we make the assumption that, if the Poco files are installed, the default directory -# will be C:\poco or C:\Program Files\Poco or C:\Programme\Poco. - -SET(POCO_INCLUDE_PATH_DESCRIPTION "top-level directory containing the poco include directories. E.g /usr/local/include/ or c:\\poco\\include\\poco-1.3.2") -SET(POCO_INCLUDE_DIR_MESSAGE "Set the Poco_INCLUDE_DIR cmake cache entry to the ${POCO_INCLUDE_PATH_DESCRIPTION}") -SET(POCO_LIBRARY_PATH_DESCRIPTION "top-level directory containing the poco libraries.") -SET(POCO_LIBRARY_DIR_MESSAGE "Set the Poco_LIBRARY_DIR cmake cache entry to the ${POCO_LIBRARY_PATH_DESCRIPTION}") - -SET(SEARCH_PATH_POCO "" CACHE PATH "Additional Poco search path") - -if(CMAKE_SYSTEM MATCHES "Windows") - - SET(POCO_DIR_SEARCH - $ENV{POCO_ROOT} - ${SEARCH_PATH_POCO} - ${POCO_DIR_SEARCH} - ) - - if (EXISTS "C:/") - SET(POCO_DIR_SEARCH - ${POCO_DIR_SEARCH} - C:/Development - C:/poco - "C:/Program Files/poco" - "C:/Programme/poco" - ) - ENDIF() - - if (EXISTS "D:/") - SET(POCO_DIR_SEARCH - ${POCO_DIR_SEARCH} - D:/Development - D:/poco - "D:/Program Files/poco" - "D:/Programme/poco" - ) - ENDIF() - -ENDIF() - - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(POCO_DIR_SEARCH - $ENV{POCO_ROOT} - ${SEARCH_PATH_POCO} - ${POCO_DIR_SEARCH} - /usr/local - ) -ENDIF() - -# Add in some path suffixes. These will have to be updated whenever a new Poco version comes out. -SET(SUFFIX_FOR_INCLUDE_PATH - poco-1.9.0 - poco-1.9.0-all - poco-1.8.1-all - poco-1.7.9-all - poco-1.7.8p3-all - poco-1.7.8-all - poco-1.7.7-all - poco-1.7.4-all - poco-1.7.1-all - poco.develop - poco -) - -SET(SUFFIX_FOR_LIBRARY_PATH - bin64 - lib64 - poco//bin64 - poco//lib64 - poco.develop//bin64 - poco.develop//lib64 -) - -# -# Look for an installation. -# -FIND_PATH( - Poco_INCLUDE_DIR NAMES - Foundation/include/Poco/AbstractCache.h - PATH_SUFFIXES - ${SUFFIX_FOR_INCLUDE_PATH} PATHS - # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - DOC # Help the user find it if we cannot. - "The ${POCO_INCLUDE_PATH_DESCRIPTION}" -) - -IF(NOT Poco_INCLUDE_DIR) - # Look for standard unix include paths - FIND_PATH(Poco_INCLUDE_DIR Poco/Poco.h DOC "The ${POCO_INCLUDE_PATH_DESCRIPTION}") -ENDIF(NOT Poco_INCLUDE_DIR) - -# Assume we didn't find it. -SET(Poco_FOUND 0) - -# Now try to get the include and library path. -IF(EXISTS ${Poco_INCLUDE_DIR}) - IF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR}/CppUnit/include - ${Poco_INCLUDE_DIR}/Foundation/include - ${Poco_INCLUDE_DIR}/Data/include - ${Poco_INCLUDE_DIR}/Data/SQLite/include - ${Poco_INCLUDE_DIR}/Data/ODBC/include - ${Poco_INCLUDE_DIR}/MongoDB/include - ${Poco_INCLUDE_DIR}/Net/include - ${Poco_INCLUDE_DIR}/Util/include - ${Poco_INCLUDE_DIR}/XML/include - ${Poco_INCLUDE_DIR}/Zip/include - ${Poco_INCLUDE_DIR}/JSON/include - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ELSEIF(EXISTS "${Poco_INCLUDE_DIR}/Poco/Poco.h") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR} - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ENDIF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - - IF(NOT Poco_LIBRARY_DIR) - FIND_LIBRARY(Poco_FOUNDATION_LIB NAMES PocoFoundation PocoFoundationd - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - DOC # Help the user find it if we cannot. - "The ${POCO_LIBRARY_PATH_DESCRIPTION}" - ) - GET_FILENAME_COMPONENT(Poco_LIBRARY_DIR ${Poco_FOUNDATION_LIB} PATH CACHE) - - IF(Poco_LIBRARY_DIR) - # Look for the poco binary path. - SET(Poco_BINARY_DIR ${Poco_INCLUDE_DIR}) - - IF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - SET(Poco_BINARY_DIRS ${Poco_BINARY_DIR}/bin64 CACHE PATH "Path to Poco binaries") - ENDIF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - ENDIF(Poco_LIBRARY_DIR) - - find_library(Poco_DATA_SQLITE_DEBUG NAMES PocoDataSQLited PocoDataSQLited_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_SQLITE_RELEASE NAMES PocoDataSQLite PocoDataSQLite_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_ODBC_DEBUG NAMES PocoDataODBCd PocoDataODBCd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_ODBC_RELEASE NAMES PocoDataODBC PocoDataODBC_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_FOUNDATION_DEBUG NAMES PocoFoundationd PocoFoundationd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_FOUNDATION_RELEASE NAMES PocoFoundation PocoFoundation_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_NET_DEBUG NAMES PocoNetd PocoNetd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_NET_RELEASE NAMES PocoNet PocoNet_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_MONGODB_DEBUG NAMES PocoMongoDBd PocoMongoDBd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_MONGODB_RELEASE NAMES PocoMongoDB PocoMongoDB_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - - find_library(Poco_DATA_DEBUG NAMES PocoDatad PocoDatad_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_RELEASE NAMES PocoData PocoData_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_JSON_DEBUG NAMES PocoJSONd PocoJSONd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_JSON_RELEASE NAMES PocoJSON PocoJSON_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_UTIL_DEBUG NAMES PocoUtild PocoUtild_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_UTIL_RELEASE NAMES PocoUtil PocoUtil_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - if(Poco_FOUNDATION_DEBUG AND NOT Poco_FOUNDATION_RELEASE) - set(Poco_FOUNDATION_RELEASE ${Poco_FOUNDATION_DEBUG}) - endif(Poco_FOUNDATION_DEBUG AND NOT Poco_FOUNDATION_RELEASE) - - set( Poco_FOUNDATION debug ${Poco_FOUNDATION_DEBUG} - optimized ${Poco_FOUNDATION_RELEASE} - CACHE STRING "Poco Foundation link library text") - - if(Poco_DATA_SQLITE_DEBUG AND NOT Poco_DATA_SQLITE_RELEASE) - set(Poco_DATA_SQLITE_RELEASE ${Poco_DATA_SQLITE_DEBUG}) - endif(Poco_DATA_SQLITE_DEBUG AND NOT Poco_DATA_SQLITE_RELEASE) - - set( Poco_DATA_SQLITE debug ${Poco_DATA_SQLITE_DEBUG} - optimized ${Poco_DATA_SQLITE_RELEASE} - CACHE STRING "Poco SQLite link library text") - - if(Poco_DATA_ODBC_DEBUG AND NOT Poco_DATA_ODBC_RELEASE) - set(Poco_DATA_ODBC_RELEASE ${Poco_DATA_ODBC_DEBUG}) - endif(Poco_DATA_ODBC_DEBUG AND NOT Poco_DATA_ODBC_RELEASE) - - set( Poco_DATA_ODBC debug ${Poco_DATA_ODBC_DEBUG} - optimized ${Poco_DATA_ODBC_RELEASE} - CACHE STRING "Poco ODBC link library text") - - if(Poco_DATA_DEBUG AND NOT Poco_DATA_RELEASE) - set(Poco_DATA_RELEASE ${Poco_DATA_DEBUG}) - endif(Poco_DATA_DEBUG AND NOT Poco_DATA_RELEASE) - - set( Poco_DATA debug ${Poco_DATA_DEBUG} - optimized ${Poco_DATA_RELEASE} - CACHE STRING "Poco Data link library text") - - if(Poco_JSON_DEBUG AND NOT Poco_JSON_RELEASE) - set(Poco_JSON_RELEASE ${Poco_JSON_DEBUG}) - endif(Poco_JSON_DEBUG AND NOT Poco_JSON_RELEASE) - - set( Poco_JSON debug ${Poco_JSON_DEBUG} - optimized ${Poco_JSON_RELEASE} - CACHE STRING "Poco JSON link library text") - - if(Poco_MONGODB_DEBUG AND NOT Poco_MONGODB_RELEASE) - set(Poco_MONGODB_RELEASE ${Poco_MONGODB_DEBUG}) - endif(Poco_MONGODB_DEBUG AND NOT Poco_MONGODB_RELEASE) - - set( Poco_MONGODB debug ${Poco_MONGODB_DEBUG} - optimized ${Poco_MONGODB_RELEASE} - CACHE STRING "Poco MONGODB link library text") - - if(Poco_NET_DEBUG AND NOT Poco_NET_RELEASE) - set(Poco_NET_RELEASE ${Poco_NET_DEBUG}) - endif(Poco_NET_DEBUG AND NOT Poco_NET_RELEASE) - - set( Poco_NET debug ${Poco_NET_DEBUG} - optimized ${Poco_NET_RELEASE} - CACHE STRING "Poco NET link library text") - - if(Poco_UTIL_DEBUG AND NOT Poco_UTIL_RELEASE) - set(Poco_UTIL_RELEASE ${Poco_UTIL_DEBUG}) - endif(Poco_UTIL_DEBUG AND NOT Poco_UTIL_RELEASE) - - set( Poco_UTIL debug ${Poco_UTIL_DEBUG} - optimized ${Poco_UTIL_RELEASE} - CACHE STRING "Poco Util link library text") - - ENDIF(NOT Poco_LIBRARY_DIR) - -IF(NOT Poco_FOUND) - IF(Poco_FIND_QUIETLY) - message(STATUS "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ELSE(Poco_FIND_QUIETLY) - IF(Poco_FIND_REQUIRED) - message(FATAL_ERROR "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ENDIF(Poco_FIND_REQUIRED) - ENDIF(Poco_FIND_QUIETLY) -ENDIF(NOT Poco_FOUND) - -ENDIF() - - diff --git a/Source/cmake/FindPoco.max.cmake b/Source/cmake/FindPoco.max.cmake deleted file mode 100644 index a974f8f..0000000 --- a/Source/cmake/FindPoco.max.cmake +++ /dev/null @@ -1,358 +0,0 @@ -# - Find the Poco includes and libraries. -# The following variables are set if Poco is found. If Poco is not -# found, Poco_FOUND is set to false. -# Poco_FOUND - True when the Poco include directory is found. -# Poco_INCLUDE_DIRS - the path to where the poco include files are. -# Poco_LIBRARY_DIRS - The path to where the poco library files are. -# Poco_BINARY_DIRS - The path to where the poco dlls are. - -# ---------------------------------------------------------------------------- -# If you have installed Poco in a non-standard location. -# Then you have three options. -# In the following comments, it is assumed that -# points to the root directory of the include directory of Poco. e.g -# If you have put poco in C:\development\Poco then is -# "C:/development/Poco" and in this directory there will be two -# directories called "include" and "lib". -# 1) After CMake runs, set Poco_INCLUDE_DIR to /poco<-version> -# 2) Use CMAKE_INCLUDE_PATH to set a path to /poco<-version>. This will allow FIND_PATH() -# to locate Poco_INCLUDE_DIR by utilizing the PATH_SUFFIXES option. e.g. -# SET(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} "/include") -# 3) Set an environment variable called ${POCO_ROOT} that points to the root of where you have -# installed Poco, e.g. . It is assumed that there is at least a subdirectory called -# Foundation/include/Poco in this path. -# -# Note: -# 1) If you are just using the poco headers, then you do not need to use -# Poco_LIBRARY_DIR in your CMakeLists.txt file. -# 2) If Poco has not been installed, then when setting Poco_LIBRARY_DIR -# the script will look for /lib first and, if this fails, then for /stage/lib. -# -# Usage: -# In your CMakeLists.txt file do something like this: -# ... -# # Poco -# FIND_PACKAGE(Poco) -# ... -# INCLUDE_DIRECTORIES(${Poco_INCLUDE_DIRS}) -# LINK_DIRECTORIES(${Poco_LIBRARY_DIR}) -# -# In Windows, we make the assumption that, if the Poco files are installed, the default directory -# will be C:\poco or C:\Program Files\Poco or C:\Programme\Poco. -# -#include("cmake/FindPkgMacros.cmake") -# -SET(POCO_ROOT $ENV{POCO_ROOT} CACHE PATH "Path to the Poco library root") -SET(POCO_INCLUDE_PATH_DESCRIPTION "top-level directory containing the poco include directories. E.g /usr/local/include/ or c:\\poco\\include\\poco-1.3.2") -SET(POCO_INCLUDE_DIR_MESSAGE "Set the Poco_INCLUDE_DIR cmake cache entry to the ${POCO_INCLUDE_PATH_DESCRIPTION}") -SET(POCO_LIBRARY_PATH_DESCRIPTION "top-level directory containing the poco libraries.") -SET(POCO_LIBRARY_DIR_MESSAGE "Set the Poco_LIBRARY_DIR cmake cache entry to the ${POCO_LIBRARY_PATH_DESCRIPTION}") - -if(CMAKE_SYSTEM MATCHES "Windows") - SET(POCO_DIR_SEARCH - ${POCO_ROOT} - ${POCO_DIR_SEARCH} - C:/Development - C:/poco - D:/poco - "C:/Program Files/poco" - "C:/Programme/poco" - "D:/Program Files/poco" - "D:/Programme/poco" - ) -ENDIF() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(POCO_DIR_SEARCH - ${POCO_DIR_SEARCH} - /usr/local - ) -ENDIF() - -# Add in some path suffixes. These will have to be updated whenever a new Poco version comes out. -SET(SUFFIX_FOR_INCLUDE_PATH - poco-1.6.1-all - poco-1.6.0-all -) - -SET(SUFFIX_FOR_LIBRARY_PATH - poco-1.6.1-all/bin64 - poco-1.6.1-all/lib64 - #poco-1.6.1-all/lib/Linux/i686 - #poco-1.6.1-all/lib/Linux/x86_64 - #lib - #lib/Linux/i686 - #lib/Linux/x86_64 -) - -# Look for an installation. -FIND_PATH(Poco_INCLUDE_DIR NAMES Foundation/include/Poco/AbstractCache.h PATH_SUFFIXES ${SUFFIX_FOR_INCLUDE_PATH} PATHS - # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - - # Help the user find it if we cannot. - DOC "The ${POCO_INCLUDE_PATH_DESCRIPTION}" -) - -IF(NOT Poco_INCLUDE_DIR) - # Look for standard unix include paths - FIND_PATH(Poco_INCLUDE_DIR Poco/Poco.h DOC "The ${POCO_INCLUDE_PATH_DESCRIPTION}") -ENDIF(NOT Poco_INCLUDE_DIR) - -IF(NOT Poco_INCLUDE_DIR) - # Fall back to a Poco root set through cmake-gui. - SET(POCO_ROOT CACHE PATH "Poco root") - FIND_PATH(Poco_INCLUDE_DIR NAMES Foundation/include/Poco/AbstractCache.h PATHS ${POCO_ROOT}) - SET(POCO_DIR_SEARCH ${POCO_DIR_SEARCH} ${POCO_ROOT}) - SET(SUFFIX_FOR_LIBRARY_PATH ${SUFFIX_FOR_LIBRARY_PATH} bin64 lib64) -ENDIF(NOT Poco_INCLUDE_DIR) - -# Assume we didn't find it. -SET(Poco_FOUND 0) - - -# Now try to get the include and library path. -IF(EXISTS ${Poco_INCLUDE_DIR}) - IF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR}/CppUnit/include - ${Poco_INCLUDE_DIR}/Foundation/include - ${Poco_INCLUDE_DIR}/Data/include - ${Poco_INCLUDE_DIR}/Data/SQLite/include - ${Poco_INCLUDE_DIR}/MongoDB/include - ${Poco_INCLUDE_DIR}/Net/include - ${Poco_INCLUDE_DIR}/Util/include - ${Poco_INCLUDE_DIR}/XML/include - ${Poco_INCLUDE_DIR}/Zip/include - ${Poco_INCLUDE_DIR}/JSON/include - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ELSEIF(EXISTS "${Poco_INCLUDE_DIR}/Poco/Poco.h") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR} - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ENDIF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - - IF(NOT Poco_LIBRARY_DIR) - FIND_LIBRARY(Poco_FOUNDATION_LIB NAMES PocoFoundation PocoFoundationd - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - DOC # Help the user find it if we cannot. - "The ${POCO_LIBRARY_PATH_DESCRIPTION}" - ) - GET_FILENAME_COMPONENT(Poco_LIBRARY_DIR ${Poco_FOUNDATION_LIB} PATH CACHE) - - IF(Poco_LIBRARY_DIR) - # Look for the poco binary path. - SET(Poco_BINARY_DIR ${Poco_INCLUDE_DIR}) - - IF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - SET(Poco_BINARY_DIRS ${Poco_BINARY_DIR}/bin64 CACHE PATH "Path to Poco binaries") - ENDIF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - ENDIF(Poco_LIBRARY_DIR) - - find_library(Poco_DATA_SQLITE_DEBUG NAMES PocoDataSQLited PocoDataSQLited_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_SQLITE_RELEASE NAMES PocoDataSQLite PocoDataSQLite_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_FOUNDATION_DEBUG NAMES PocoFoundationd PocoFoundationd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_FOUNDATION_RELEASE NAMES PocoFoundation PocoFoundation_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_NET_DEBUG NAMES PocoNetd PocoNetd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_NET_RELEASE NAMES PocoNet PocoNet_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_MONGODB_DEBUG NAMES PocoMongoDBd PocoMongoDBd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_MONGODB_RELEASE NAMES PocoMongoDB PocoMongoDB_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - - find_library(Poco_DATA_DEBUG NAMES PocoDatad PocoDatad_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_DATA_RELEASE NAMES PocoData PocoData_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_JSON_DEBUG NAMES PocoJSONd PocoJSONd_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_JSON_RELEASE NAMES PocoJSON PocoJSON_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_UTIL_DEBUG NAMES PocoUtild PocoUtild_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - debug - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - find_library(Poco_UTIL_RELEASE NAMES PocoUtil PocoUtil_dll - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - release - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - if(Poco_FOUNDATION_DEBUG AND NOT Poco_FOUNDATION_RELEASE) - set(Poco_FOUNDATION_RELEASE ${Poco_FOUNDATION_DEBUG}) - endif(Poco_FOUNDATION_DEBUG AND NOT Poco_FOUNDATION_RELEASE) - - set( Poco_FOUNDATION debug ${Poco_FOUNDATION_DEBUG} - optimized ${Poco_FOUNDATION_RELEASE} - CACHE STRING "Poco Foundation link library text") - - if(Poco_DATA_SQLITE_DEBUG AND NOT Poco_DATA_SQLITE_RELEASE) - set(Poco_DATA_SQLITE_RELEASE ${Poco_DATA_SQLITE_DEBUG}) - endif(Poco_DATA_SQLITE_DEBUG AND NOT Poco_DATA_SQLITE_RELEASE) - - set( Poco_DATA_SQLITE debug ${Poco_DATA_SQLITE_DEBUG} - optimized ${Poco_DATA_SQLITE_RELEASE} - CACHE STRING "Poco SQLite link library text") - - if(Poco_DATA_DEBUG AND NOT Poco_DATA_RELEASE) - set(Poco_DATA_RELEASE ${Poco_DATA_DEBUG}) - endif(Poco_DATA_DEBUG AND NOT Poco_DATA_RELEASE) - - set( Poco_DATA debug ${Poco_DATA_DEBUG} - optimized ${Poco_DATA_RELEASE} - CACHE STRING "Poco Data link library text") - - if(Poco_JSON_DEBUG AND NOT Poco_JSON_RELEASE) - set(Poco_JSON_RELEASE ${Poco_JSON_DEBUG}) - endif(Poco_JSON_DEBUG AND NOT Poco_JSON_RELEASE) - - set( Poco_JSON debug ${Poco_JSON_DEBUG} - optimized ${Poco_JSON_RELEASE} - CACHE STRING "Poco JSON link library text") - - if(Poco_MONGODB_DEBUG AND NOT Poco_MONGODB_RELEASE) - set(Poco_MONGODB_RELEASE ${Poco_MONGODB_DEBUG}) - endif(Poco_MONGODB_DEBUG AND NOT Poco_MONGODB_RELEASE) - - set( Poco_MONGODB debug ${Poco_MONGODB_DEBUG} - optimized ${Poco_MONGODB_RELEASE} - CACHE STRING "Poco MONGODB link library text") - - if(Poco_NET_DEBUG AND NOT Poco_NET_RELEASE) - set(Poco_NET_RELEASE ${Poco_NET_DEBUG}) - endif(Poco_NET_DEBUG AND NOT Poco_NET_RELEASE) - - set( Poco_NET debug ${Poco_NET_DEBUG} - optimized ${Poco_NET_RELEASE} - CACHE STRING "Poco NET link library text") - - if(Poco_UTIL_DEBUG AND NOT Poco_UTIL_RELEASE) - set(Poco_UTIL_RELEASE ${Poco_UTIL_DEBUG}) - endif(Poco_UTIL_DEBUG AND NOT Poco_UTIL_RELEASE) - - set( Poco_UTIL debug ${Poco_UTIL_DEBUG} - optimized ${Poco_UTIL_RELEASE} - CACHE STRING "Poco Util link library text") - - ENDIF(NOT Poco_LIBRARY_DIR) - -IF(NOT Poco_FOUND) - IF(Poco_FIND_QUIETLY) - message(STATUS "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ELSE(Poco_FIND_QUIETLY) - IF(Poco_FIND_REQUIRED) - message(FATAL_ERROR "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ENDIF(Poco_FIND_REQUIRED) - ENDIF(Poco_FIND_QUIETLY) -ENDIF(NOT Poco_FOUND) - -ENDIF() - - diff --git a/Source/cmake/FindPoco.mt.cmake b/Source/cmake/FindPoco.mt.cmake deleted file mode 100644 index c3e5d09..0000000 --- a/Source/cmake/FindPoco.mt.cmake +++ /dev/null @@ -1,326 +0,0 @@ -# - Find the Poco includes and libraries. -# The following variables are set if Poco is found. If Poco is not -# found, Poco_FOUND is set to false. -# Poco_FOUND - True when the Poco include directory is found. -# Poco_INCLUDE_DIRS - the path to where the poco include files are. -# Poco_LIBRARY_DIRS - The path to where the poco library files are. -# Poco_BINARY_DIRS - The path to where the poco dlls are. - -# ---------------------------------------------------------------------------- -# If you have installed Poco in a non-standard location. -# Then you have three options. -# In the following comments, it is assumed that -# points to the root directory of the include directory of Poco. e.g -# If you have put poco in C:\development\Poco then is -# "C:/development/Poco" and in this directory there will be two -# directories called "include" and "lib". -# 1) After CMake runs, set Poco_INCLUDE_DIR to /poco<-version> -# 2) Use CMAKE_INCLUDE_PATH to set a path to /poco<-version>. This will allow FIND_PATH() -# to locate Poco_INCLUDE_DIR by utilizing the PATH_SUFFIXES option. e.g. -# SET(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} "/include") -# 3) Set an environment variable called ${POCO_ROOT} that points to the root of where you have -# installed Poco, e.g. . It is assumed that there is at least a subdirectory called -# Foundation/include/Poco in this path. -# -# Note: -# 1) If you are just using the poco headers, then you do not need to use -# Poco_LIBRARY_DIR in your CMakeLists.txt file. -# 2) If Poco has not been installed, then when setting Poco_LIBRARY_DIR -# the script will look for /lib first and, if this fails, then for /stage/lib. -# -# Usage: -# In your CMakeLists.txt file do something like this: -# ... -# # Poco -# FIND_PACKAGE(Poco) -# ... -# INCLUDE_DIRECTORIES(${Poco_INCLUDE_DIRS}) -# LINK_DIRECTORIES(${Poco_LIBRARY_DIR}) -# -# In Windows, we make the assumption that, if the Poco files are installed, the default directory -# will be C:\poco or C:\Program Files\Poco or C:\Programme\Poco. - -SET(POCO_INCLUDE_PATH_DESCRIPTION "top-level directory containing the poco include directories. E.g /usr/local/include/ or c:\\poco\\include\\poco-1.3.2") -SET(POCO_INCLUDE_DIR_MESSAGE "Set the Poco_INCLUDE_DIR cmake cache entry to the ${POCO_INCLUDE_PATH_DESCRIPTION}") -SET(POCO_LIBRARY_PATH_DESCRIPTION "top-level directory containing the poco libraries.") -SET(POCO_LIBRARY_DIR_MESSAGE "Set the Poco_LIBRARY_DIR cmake cache entry to the ${POCO_LIBRARY_PATH_DESCRIPTION}") - -SET(SEARCH_PATH_POCO "" CACHE PATH "Additional Poco search path") - -if(CMAKE_SYSTEM MATCHES "Windows") - - SET(POCO_DIR_SEARCH - $ENV{POCO_ROOT} - ${SEARCH_PATH_POCO} - ${POCO_DIR_SEARCH} - ) - - if (EXISTS "C:/") - SET(POCO_DIR_SEARCH - ${POCO_DIR_SEARCH} - C:/Development - C:/poco - "C:/Program Files/poco" - "C:/Programme/poco" - ) - ENDIF() - - if (EXISTS "D:/") - SET(POCO_DIR_SEARCH - ${POCO_DIR_SEARCH} - D:/Development - D:/poco - "D:/Program Files/poco" - "D:/Programme/poco" - ) - ENDIF() - -ENDIF() - - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(POCO_DIR_SEARCH - $ENV{POCO_ROOT} - ${SEARCH_PATH_POCO} - ${POCO_DIR_SEARCH} - /usr/local - ) -ENDIF() - -# Add in some path suffixes. These will have to be updated whenever a new Poco version comes out. -SET(SUFFIX_FOR_INCLUDE_PATH - poco-1.7.8-all - poco-1.7.7-all - poco-1.7.4-all - poco-1.7.1-all - poco-1.6.1-all - poco-1.6.0-all - poco.develop - poco -) - -SET(SUFFIX_FOR_LIBRARY_PATH -# bin64 - lib64 -# poco//bin64 - poco//lib64 -# poco.develop//bin64 - poco.develop//lib64 -# poco-1.6.1-all.develop/bin64 - poco-1.6.1-all.develop/lib64 -# poco-1.7.1-all/bin64 - poco-1.7.1-all/lib64 -# poco-1.6.1-all/bin64 - poco-1.6.1-all/lib64 - #poco-1.6.1-all/lib/Linux/i686 - #poco-1.6.1-all/lib/Linux/x86_64 - #lib - #lib/Linux/i686 - #lib/Linux/x86_64 -) - -# -# Look for an installation. -# -FIND_PATH(Poco_INCLUDE_DIR NAMES Foundation/include/Poco/AbstractCache.h PATH_SUFFIXES ${SUFFIX_FOR_INCLUDE_PATH} PATHS - - # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - - # Help the user find it if we cannot. - DOC "The ${POCO_INCLUDE_PATH_DESCRIPTION}" -) - -IF(NOT Poco_INCLUDE_DIR) - - # Look for standard unix include paths - FIND_PATH(Poco_INCLUDE_DIR Poco/Poco.h DOC "The ${POCO_INCLUDE_PATH_DESCRIPTION}") - -ENDIF(NOT Poco_INCLUDE_DIR) - -# Assume we didn't find it. -SET(Poco_FOUND 0) - -# Now try to get the include and library path. -IF(EXISTS ${Poco_INCLUDE_DIR}) - IF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR}/CppUnit/include - ${Poco_INCLUDE_DIR}/Foundation/include - ${Poco_INCLUDE_DIR}/Data/include - ${Poco_INCLUDE_DIR}/Data/SQLite/include -# ${Poco_INCLUDE_DIR}/Data/PostgreSQL/include - ${Poco_INCLUDE_DIR}/MongoDB/include - ${Poco_INCLUDE_DIR}/Net/include - ${Poco_INCLUDE_DIR}/Util/include - ${Poco_INCLUDE_DIR}/XML/include - ${Poco_INCLUDE_DIR}/Zip/include - ${Poco_INCLUDE_DIR}/JSON/include - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ELSEIF(EXISTS "${Poco_INCLUDE_DIR}/Poco/Poco.h") - SET(Poco_INCLUDE_DIRS - ${Poco_INCLUDE_DIR} - CACHE PATH "Location of Poco include files" - ) - SET(Poco_FOUND 1) - ENDIF(EXISTS "${Poco_INCLUDE_DIR}/Foundation") - - IF(NOT Poco_LIBRARY_DIR) - FIND_LIBRARY(Poco_FOUNDATION_LIB NAMES PocoFoundationmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - DOC # Help the user find it if we cannot. - "The ${POCO_LIBRARY_PATH_DESCRIPTION}" - ) - GET_FILENAME_COMPONENT(Poco_LIBRARY_DIR ${Poco_FOUNDATION_LIB} PATH CACHE) - - IF(Poco_LIBRARY_DIR) - # Look for the poco binary path. - SET(Poco_BINARY_DIR ${Poco_INCLUDE_DIR}) - - IF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - SET(Poco_BINARY_DIRS ${Poco_BINARY_DIR}/bin64 CACHE PATH "Path to Poco binaries") - ENDIF(Poco_BINARY_DIR AND EXISTS "${Poco_BINARY_DIR}/bin64") - ENDIF(Poco_LIBRARY_DIR) - -# find_library(Poco_DATA_SQLITE_DEBUG NAMES PocoDataSQLited PocoDataSQLited_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_DATA_SQLITE_RELEASE NAMES PocoDataSQLitemt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_FOUNDATION_DEBUG NAMES PocoFoundationd PocoFoundationd_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_FOUNDATION_RELEASE NAMES PocoFoundationmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_NET_DEBUG NAMES PocoNetd PocoNetd_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_NET_RELEASE NAMES PocoNetmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_MONGODB_DEBUG NAMES PocoMongoDBd PocoMongoDBd_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_MONGODB_RELEASE NAMES PocoMongoDBmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_DATA_DEBUG NAMES PocoDatad PocoDatad_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_DATA_RELEASE NAMES PocoDatamt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_JSON_DEBUG NAMES PocoJSONd PocoJSONd_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_JSON_RELEASE NAMES PocoJSONmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - -# find_library(Poco_UTIL_DEBUG NAMES PocoUtild PocoUtild_dll -# PATH_SUFFIXES -# ${SUFFIX_FOR_LIBRARY_PATH} -# PATHS # Look in other places. -# ${Poco_INCLUDE_DIR} -# ${POCO_DIR_SEARCH} -# ) - - find_library(Poco_UTIL_RELEASE NAMES PocoUtilmt - PATH_SUFFIXES - ${SUFFIX_FOR_LIBRARY_PATH} - PATHS # Look in other places. - ${Poco_INCLUDE_DIR} - ${POCO_DIR_SEARCH} - ) - - set( Poco_FOUNDATION ${Poco_FOUNDATION_RELEASE} CACHE STRING "Poco Foundation link library text") - set( Poco_DATA_SQLITE ${Poco_DATA_SQLITE_RELEASE} CACHE STRING "Poco SQLite link library text") - set( Poco_DATA ${Poco_DATA_RELEASE} CACHE STRING "Poco Data link library text") - set( Poco_JSON ${Poco_JSON_RELEASE} CACHE STRING "Poco JSON link library text") - set( Poco_MONGODB ${Poco_MONGODB_RELEASE} CACHE STRING "Poco MONGODB link library text") - set( Poco_NET ${Poco_NET_RELEASE} CACHE STRING "Poco NET link library text") - set( Poco_UTIL ${Poco_UTIL_RELEASE} CACHE STRING "Poco Util link library text") - - ENDIF(NOT Poco_LIBRARY_DIR) - -IF(NOT Poco_FOUND) - IF(Poco_FIND_QUIETLY) - message(STATUS "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ELSE(Poco_FIND_QUIETLY) - IF(Poco_FIND_REQUIRED) - message(FATAL_ERROR "Poco was not found. ${POCO_INCLUDE_DIR_MESSAGE}") - ENDIF(Poco_FIND_REQUIRED) - ENDIF(Poco_FIND_QUIETLY) -ENDIF(NOT Poco_FOUND) - -ENDIF() - - diff --git a/Source/cmake/FindSQLite.cmake b/Source/cmake/FindSQLite.cmake deleted file mode 100644 index ec8243a..0000000 --- a/Source/cmake/FindSQLite.cmake +++ /dev/null @@ -1,51 +0,0 @@ -SET(SEARCH_PATH_SQLITE "" CACHE PATH "Additional Sqlite search path") -SET(SQLITE_DIR_SEARCH - $ENV{SQLITE_ROOT} - ${SEARCH_PATH_SQLITE} - ${SQLITE_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(SQLITE_DIR_SEARCH - ${SQLITE_DIR_SEARCH} - "C:/Development/sqlite-amalgamation-3260000/" - "C:/Development/sqlite-amalgamation-3210000/" - "C:/Development/sqlite-amalgamation-3200100/" - "C:/Development/sqlite-amalgamation-3200000/" - "C:/Development/sqlite-amalgamation-3170000/" - ) - endif() - if (EXISTS "D:/") - SET(SQLITE_DIR_SEARCH - ${SQLITE_DIR_SEARCH} - "D:/Development/sqlite-amalgamation-3260000/" - "D:/Development/sqlite-amalgamation-3210000/" - "D:/Development/sqlite-amalgamation-3200100/" - "D:/Development/sqlite-amalgamation-3200000/" - "D:/Development/sqlite-amalgamation-3170000/" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(SQLITE_DIR_SEARCH - ${SQLITE_DIR_SEARCH} - /usr/local/sqlite-autoconf-3260000/ - /usr/local/sqlite-autoconf-3210000/ - /usr/local/sqlite-autoconf-3200100/ - /usr/local/sqlite-autoconf-3200000/ - /usr/local/sqlite-autoconf-3170000/ - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -find_path(Sqlite_INCLUDE_PATH sqlite3.h - PATHS - ${SQLITE_DIR_SEARCH} -) - -# handle the QUIETLY and REQUIRED arguments and set SQLITE_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Sqlite DEFAULT_MSG Sqlite_INCLUDE_PATH) - -MARK_AS_ADVANCED(Sqlite_INCLUDE_PATH) diff --git a/Source/cmake/FindSimpleAmqpClient.cmake b/Source/cmake/FindSimpleAmqpClient.cmake deleted file mode 100644 index 4381ccf..0000000 --- a/Source/cmake/FindSimpleAmqpClient.cmake +++ /dev/null @@ -1,90 +0,0 @@ -#Find the SimpleAmqpClient library - -INCLUDE(LibFindMacros) - -SET(SIMPLEAMQPCLIENT_DIR_SEARCH - ${SimpleAmqpClient_DIR} - $ENV{SimpleAmqpClient_ROOT} - ${SEARCH_PATH_SimpleAmqpClient} - ${SimpleAmqpClient_DIR_SEARCH} -) - -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(SIMPLEAMQPCLIENT_DIR_SEARCH - ${SIMPLEAMQPCLIENT_DIR_SEARCH} - "C:/Development/Software/SimpleAmqpClient" - "C:/Development/SimpleAmqpClient-2.4.0/" - "C:/Development/SimpleAmqpClient/" - ) - endif() - - if (EXISTS "D:/") - SET(SIMPLEAMQPCLIENT_DIR_SEARCH - ${SIMPLEAMQPCLIENT_DIR_SEARCH} - "D:/Development/Software/SimpleAmqpClient" - "D:/Development/SimpleAmqpClient-2.4.0/" - "D:/Development/SimpleAmqpClient/" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(SimpleAmqpClient_LIBNAME "SimpleAmqpClient") - SET(SimpleAmqpClient_DIR_SEARCH - ${SIMPLEAMQPCLIENT_DIR_SEARCH} - /usr/local/include/SimpleAmqpClient/ - ) - SET(SIMPLEAMQPCLIENT_LIB_DIR_SEARCH - ${SIMPLEAMQPCLIENT_LIB_DIR_SEARCH} - /usr/local/lib/ - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -# Find the include directories -FIND_PATH(SimpleAmqpClient_INCLUDE_DIR - NAMES SimpleAmqpClient/SimpleAmqpClient.h - HINTS src - PATH_SUFFIXES src - PATHS ${SIMPLEAMQPCLIENT_DIR_SEARCH} - ) - -if(CMAKE_SYSTEM MATCHES "Windows") - FIND_LIBRARY(SimpleAmqpClient_LIBRARY_Release - NAMES SimpleAmqpClient.2 - HINTS build/Release - PATH_SUFFIXES - lib - build/Release - - PATHS ${SIMPLEAMQPCLIENT_DIR_SEARCH} - ) - - FIND_LIBRARY(SimpleAmqpClient_LIBRARY_Debug - NAMES SimpleAmqpClient.2d - HINTS build/Debug - PATH_SUFFIXES - lib - build/Debug - PATHS ${SIMPLEAMQPCLIENT_DIR_SEARCH} - ) - - set( SimpleAmqpClient_LIBRARY debug ${SimpleAmqpClient_LIBRARY_Debug} - optimized ${SimpleAmqpClient_LIBRARY_Release} - CACHE STRING "SimpleAmqpClient library text") -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - FIND_LIBRARY(SimpleAmqpClient_LIBRARY - NAMES ${SimpleAmqpClient_LIBNAME} - PATHS ${SIMPLEAMQPCLIENT_LIB_DIR_SEARCH} - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -# SET(SimpleAmqpClient_PROCESS_INCLUDES SimpleAmqpClient_INCLUDE_DIR) -# SET(SimpleAmqpClient_PROCESS_LIBS SimpleAmqpClient_LIBRARY) - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(SIMPLEAMQPCLIENT DEFAULT_MSG SimpleAmqpClient_INCLUDE_DIR) - -# LIBFIND_PROCESS(SimpleAmqpClient) diff --git a/Source/cmake/FindTBB.cmake b/Source/cmake/FindTBB.cmake deleted file mode 100644 index 907c0e4..0000000 --- a/Source/cmake/FindTBB.cmake +++ /dev/null @@ -1,247 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2015 Justus Calvin -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -# -# FindTBB -# ------- -# -# Find TBB include directories and libraries. -# -# Usage: -# -# find_package(TBB [major[.minor]] [EXACT] -# [QUIET] [REQUIRED] -# [[COMPONENTS] [components...]] -# [OPTIONAL_COMPONENTS components...]) -# -# where the allowed components are tbbmalloc and tbb_preview. Users may modify -# the behavior of this module with the following variables: -# -# * TBB_ROOT_DIR - The base directory the of TBB installation. -# * TBB_INCLUDE_DIR - The directory that contains the TBB headers files. -# * TBB_LIBRARY - The directory that contains the TBB library files. -# * TBB__LIBRARY - The path of the TBB the corresponding TBB library. -# These libraries, if specified, override the -# corresponding library search results, where -# may be tbb, tbb_debug, tbbmalloc, tbbmalloc_debug, -# tbb_preview, or tbb_preview_debug. -# * TBB_USE_DEBUG_BUILD - The debug version of tbb libraries, if present, will -# be used instead of the release version. -# -# Users may modify the behavior of this module with the following environment -# variables: -# -# * TBB_INSTALL_DIR -# * TBBROOT -# * LIBRARY_PATH -# -# This module will set the following variables: -# -# * TBB_FOUND - Set to false, or undefined, if we havent found, or -# dont want to use TBB. -# * TBB__FOUND - If False, optional part of TBB sytem is -# not available. -# * TBB_VERSION - The full version string -# * TBB_VERSION_MAJOR - The major version -# * TBB_VERSION_MINOR - The minor version -# * TBB_INTERFACE_VERSION - The interface version number defined in -# tbb/tbb_stddef.h. -# * TBB__LIBRARY_RELEASE - The path of the TBB release version of -# , where may be tbb, tbb_debug, -# tbbmalloc, tbbmalloc_debug, tbb_preview, or -# tbb_preview_debug. -# * TBB__LIBRARY_DEGUG - The path of the TBB release version of -# , where may be tbb, tbb_debug, -# tbbmalloc, tbbmalloc_debug, tbb_preview, or -# tbb_preview_debug. -# -# The following varibles should be used to build and link with TBB: -# -# * TBB_INCLUDE_DIRS - The include directory for TBB. -# * TBB_LIBRARIES - The libraries to link against to use TBB. -# * TBB_DEFINITIONS - Definitions to use when compiling code that uses TBB. - -include(FindPackageHandleStandardArgs) - -if(NOT TBB_FOUND) - - ################################## - # Check the build type - ################################## - - if(NOT DEFINED TBB_USE_DEBUG_BUILD) - if(CMAKE_BUILD_TYPE MATCHES "[Debug|DEBUG|debug|RelWithDebInfo|RELWITHDEBINFO|relwithdebinfo]") - set(TBB_USE_DEBUG_BUILD TRUE) - else() - set(TBB_USE_DEBUG_BUILD FALSE) - endif() - endif() - - ################################## - # Set the TBB search directories - ################################## - - # Define search paths based on user input and environment variables - set(TBB_SEARCH_DIR ${TBB_ROOT_DIR} $ENV{TBB_INSTALL_DIR} $ENV{TBBROOT}) - - # Define the search directories based on the current platform - if(CMAKE_SYSTEM_NAME STREQUAL "Windows") - set(TBB_DEFAULT_SEARCH_DIR "C:/Development/tbb44_20160526oss" - "C:/Development/tbb44_20160128oss" - "C:/Program Files/Intel/TBB" - "C:/Program Files (x86)/Intel/TBB") - - # Set the target architecture - if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(TBB_ARCHITECTURE "intel64") - else() - set(TBB_ARCHITECTURE "ia32") - endif() - - # Set the TBB search library path search suffix based on the version of VC - if(WINDOWS_STORE) - set(TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc11_ui") - elseif(MSVC14) - set(TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc14") - elseif(MSVC12) - set(TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc12") - elseif(MSVC11) - set(TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc11") - elseif(MSVC10) - set(TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc10") - endif() - - # Add the library path search suffix for the VC independent version of TBB - list(APPEND TBB_LIB_PATH_SUFFIX "lib/${TBB_ARCHITECTURE}/vc_mt") - - elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin") - # OS X - set(TBB_DEFAULT_SEARCH_DIR "/opt/intel/tbb") - - # TODO: Check to see which C++ library is being used by the compiler. - if(NOT ${CMAKE_SYSTEM_VERSION} VERSION_LESS 13.0) - # The default C++ library on OS X 10.9 and later is libc++ - set(TBB_LIB_PATH_SUFFIX "lib/libc++") - else() - set(TBB_LIB_PATH_SUFFIX "lib") - endif() - elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux") - # Linux - set(TBB_DEFAULT_SEARCH_DIR "/usr/include" "/opt/intel/tbb") - - # TODO: Check compiler version to see the suffix should be /gcc4.1 or - # /gcc4.1. For now, assume that the compiler is more recent than - # gcc 4.4.x or later. -## if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64") -## set(TBB_LIB_PATH_SUFFIX "lib/intel64/gcc4.4") -## elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$") -## set(TBB_LIB_PATH_SUFFIX "lib/ia32/gcc4.4") -## endif() - endif() - - ################################## - # Find the TBB include dir - ################################## - - find_path(TBB_INCLUDE_DIRS tbb/tbb.h - HINTS ${TBB_INCLUDE_DIR} ${TBB_SEARCH_DIR} - PATHS ${TBB_DEFAULT_SEARCH_DIR} - PATH_SUFFIXES include) - - ################################## - # Find TBB components - ################################## - - # Find each component - foreach(_comp tbb_preview tbbmalloc tbb) - # Search for the libraries - find_library(TBB_${_comp}_LIBRARY_RELEASE ${_comp} - HINTS ${TBB_LIBRARY} ${TBB_SEARCH_DIR} - PATHS ${TBB_DEFAULT_SEARCH_DIR} - PATH_SUFFIXES ${TBB_LIB_PATH_SUFFIX}) - - find_library(TBB_${_comp}_LIBRARY_DEBUG ${_comp}_debug - HINTS ${TBB_LIBRARY} ${TBB_SEARCH_DIR} - PATHS ${TBB_DEFAULT_SEARCH_DIR} ENV LIBRARY_PATH - PATH_SUFFIXES ${TBB_LIB_PATH_SUFFIX}) - - - # Set the library to be used for the component - if(NOT TBB_${_comp}_LIBRARY) - if(TBB_USE_DEBUG_BUILD AND TBB_${_comp}_LIBRARY_DEBUG) - set(TBB_${_comp}_LIBRARY "${TBB_${_comp}_LIBRARY_DEBUG}") - elseif(TBB_${_comp}_LIBRARY_RELEASE) - set(TBB_${_comp}_LIBRARY "${TBB_${_comp}_LIBRARY_RELEASE}") - elseif(TBB_${_comp}_LIBRARY_DEBUG) - set(TBB_${_comp}_LIBRARY "${TBB_${_comp}_LIBRARY_DEBUG}") - endif() - endif() - - # Set the TBB library list and component found variables - if(TBB_${_comp}_LIBRARY) - list(APPEND TBB_LIBRARIES "${TBB_${_comp}_LIBRARY}") - set(TBB_${_comp}_FOUND TRUE) - else() - set(TBB_${_comp}_FOUND FALSE) - endif() - - mark_as_advanced(TBB_${_comp}_LIBRARY_RELEASE) - mark_as_advanced(TBB_${_comp}_LIBRARY_DEBUG) - mark_as_advanced(TBB_${_comp}_LIBRARY) - - endforeach() - - ################################## - # Set compile flags - ################################## - - if(TBB_tbb_LIBRARY MATCHES "debug") - set(TBB_DEFINITIONS "-DTBB_USE_DEBUG=1") - endif() - - ################################## - # Set version strings - ################################## - - if(TBB_INCLUDE_DIRS) - file(READ "${TBB_INCLUDE_DIRS}/tbb/tbb_stddef.h" _tbb_version_file) - string(REGEX REPLACE ".*#define TBB_VERSION_MAJOR ([0-9]+).*" "\\1" - TBB_VERSION_MAJOR "${_tbb_version_file}") - string(REGEX REPLACE ".*#define TBB_VERSION_MINOR ([0-9]+).*" "\\1" - TBB_VERSION_MINOR "${_tbb_version_file}") - string(REGEX REPLACE ".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1" - TBB_INTERFACE_VERSION "${_tbb_version_file}") - set(TBB_VERSION "${TBB_VERSION_MAJOR}.${TBB_VERSION_MINOR}") - endif() - - find_package_handle_standard_args(TBB - REQUIRED_VARS TBB_INCLUDE_DIRS TBB_LIBRARIES - HANDLE_COMPONENTS - VERSION_VAR TBB_VERSION) - - mark_as_advanced(TBB_INCLUDE_DIRS TBB_LIBRARIES) - - unset(TBB_ARCHITECTURE) - unset(TBB_LIB_PATH_SUFFIX) - unset(TBB_DEFAULT_SEARCH_DIR) - -endif() diff --git a/Source/cmake/FindTurtle.cmake b/Source/cmake/FindTurtle.cmake index b67ab04..a6c0537 100644 --- a/Source/cmake/FindTurtle.cmake +++ b/Source/cmake/FindTurtle.cmake @@ -1,56 +1,22 @@ -SET(SEARCH_PATH_TURTLE "" CACHE PATH "Additional Turtle search path") - -SET(TURTLE_DIR_SEARCH - $ENV{TURTLE_ROOT} - ${SEARCH_PATH_TURTLE} - ${TURTLE_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(TURTLE_DIR_SEARCH - ${TURTLE_DIR_SEARCH} - "C:/Development/turtle/include/" - "C:/Development/turtle-1.3.1/include/" - "C:/Development/turtle-1.3.0/include/" - "C:/Development/turtle-1.2.9/include/" - "C:/Development/turtle-1.2.8/include/" - "C:/Development/turtle-1.2.7/include/" - "C:/Development/turtle-1.2.6/include/" - "C:/Program Files/" - "C:/Program Files/turtle/" - ) - endif() - if (EXISTS "D:/") - SET(TURTLE_DIR_SEARCH - ${TURTLE_DIR_SEARCH} - "D:/Development/turtle/include/" - "D:/Development/turtle-1.3.1/include/" - "D:/Development/turtle-1.3.0/include/" - "D:/Development/turtle-1.2.9/include/" - "D:/Development/turtle-1.2.8/include/" - "D:/Development/turtle-1.2.7/include/" - "D:/Development/turtle-1.2.6/include/" - "D:/Program Files/" - "D:/Program Files/turtle/" - ) - endif() -endif() - -if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(TURTLE_DIR_SEARCH - ${TURTLE_DIR_SEARCH} - /usr/local/include - ) -endif(CMAKE_SYSTEM MATCHES "Linux") - -find_path(Turtle_INCLUDE_PATH turtle/mock.hpp - PATHS - ${TURTLE_DIR_SEARCH} +find_path(Turtle_INCLUDE_DIR + NAMES mock.hpp + PATH_SUFFIXES turtle ) # handle the QUIETLY and REQUIRED arguments and set TURTLE_FOUND to TRUE if # all listed variables are TRUE INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Turtle DEFAULT_MSG Turtle_INCLUDE_PATH) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(Turtle DEFAULT_MSG Turtle_INCLUDE_DIR) + +MARK_AS_ADVANCED(Turtle_FOUND Turtle_INCLUDE_DIR) + +if(Turtle_FOUND) + set(Turtle_INCLUDE_DIRS ${Turtle_INCLUDE_DIR}) -MARK_AS_ADVANCED(Turtle_INCLUDE_PATH) \ No newline at end of file + if(NOT TARGET Turtle::Turtle) + add_library(Turtle::Turtle INTERFACE IMPORTED) + set_target_properties(Turtle::Turtle PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Turtle_INCLUDE_DIR}" + ) + endif() +endif() diff --git a/Source/cmake/FindZipper.cmake b/Source/cmake/FindZipper.cmake index 27f9962..22c04ec 100644 --- a/Source/cmake/FindZipper.cmake +++ b/Source/cmake/FindZipper.cmake @@ -1,83 +1,69 @@ -SET(SEARCH_PATH_ZIPPER "" CACHE PATH "Additional Zipper search path") -SET(ZIPPER_DIR_SEARCH - $ENV{ZIPPER_ROOT} - ${SEARCH_PATH_ZIPPER} - ${ZIPPER_DIR_SEARCH} +find_path(Zipper_INCLUDE_DIR + NAMES zipper.h + PATH_SUFFIXES zipper ) if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(ZIPPER_DIR_SEARCH - ${ZIPPER_DIR_SEARCH} - "C:/Development/SLEEK-TOOLS/zipper/dist/" - "C:/Development/Software/zipper/" - ) - endif() - if (EXISTS "D:/") - SET(ZIPPER_DIR_SEARCH - ${ZIPPER_DIR_SEARCH} - "D:/Development/SLEEK-TOOLS/zipper/dist/" - "D:/Development/Software/zipper/" - ) - endif() - - find_path(Zipper_INCLUDE_PATH zipper/zipper.h - PATH_SUFFIXES - include - PATHS - ${ZIPPER_DIR_SEARCH} - ) - find_library(Zipper_LIB_DEBUG libZipper-staticd - PATH_SUFFIXES - lib - PATHS - ${ZIPPER_DIR_SEARCH} - ) + find_library(Zipper_LIBRARY_DEBUG libZipper-staticd Zipper-staticd + PATH_SUFFIXES lib ) - find_library(Zipper_LIB_RELEASE libZipper-static - PATH_SUFFIXES - lib - PATHS - ${ZIPPER_DIR_SEARCH} - ) + find_library(Zipper_LIBRARY_RELEASE libZipper-static Zipper-static + PATH_SUFFIXES lib ) - set(Zipper_LIB - debug ${Zipper_LIB_DEBUG} - optimized ${Zipper_LIB_RELEASE} - CACHE STRING "Zipper library") + include(SelectLibraryConfigurations) + select_library_configurations(Zipper) + set(Zipper_LIBRARY + debug ${Zipper_LIBRARY_DEBUG} + optimized ${Zipper_LIBRARY_RELEASE} + CACHE STRING "Zipper library") endif() if (CMAKE_SYSTEM MATCHES "Linux" ) - SET(ZIPPER_DIR_SEARCH - ${ZIPPER_DIR_SEARCH} - /usr/local/ - /usr/ - ) - find_path(Zipper_INCLUDE_PATH zipper/zipper.h - PATH_SUFFIXES - include - PATHS - ${ZIPPER_DIR_SEARCH} - ) + find_library(Zipper_LIBRARY Zipper-static + PATH_SUFFIXES lib ) - find_library(Zipper_LIB Zipper-static - PATH_SUFFIXES - lib - PATHS - ${ZIPPER_DIR_SEARCH} - ) +endif(CMAKE_SYSTEM MATCHES "Linux") - # set(Zipper_LIB - # ${Zipper_LIB_RELEASE} - # CACHE STRING "Zipper library") +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Zipper + REQUIRED_VARS Zipper_INCLUDE_DIR Zipper_LIBRARY +) -endif(CMAKE_SYSTEM MATCHES "Linux") +mark_as_advanced(Zipper_FOUND Zipper_INCLUDE_DIR Zipper_LIBRARY) -# handle the QUIETLY and REQUIRED arguments and set ZIPPER_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Zipper DEFAULT_MSG Zipper_INCLUDE_PATH) +if(Zipper_FOUND) + set(Zipper_INCLUDE_DIRS ${Zipper_INCLUDE_DIR}) + if(NOT Zipper_LIBRARIES) + set(Zipper_LIBRARIES ${Zipper_LIBRARY}) + endif() -MARK_AS_ADVANCED(Zipper_INCLUDE_PATH) + if(NOT TARGET Zipper::Zipper) + add_library(Zipper::Zipper UNKNOWN IMPORTED) + set_target_properties(Zipper::Zipper PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + INTERFACE_INCLUDE_DIRECTORIES "${Zipper_INCLUDE_DIR}" + ) + + if (Zipper_LIBRARY_RELEASE) + set_property(TARGET Zipper::Zipper APPEND PROPERTY + IMPORTED_CONFIGURATIONS RELEASE) + set_target_properties( Zipper::Zipper PROPERTIES + IMPORTED_LOCATION_RELEASE ${Zipper_LIBRARY_RELEASE} ) + endif () + + if (Zipper_LIBRARY_DEBUG) + set_property(TARGET Zipper::Zipper APPEND PROPERTY + IMPORTED_CONFIGURATIONS DEBUG) + set_target_properties( Zipper::Zipper PROPERTIES + IMPORTED_LOCATION_DEBUG ${Zipper_LIBRARY_DEBUG} ) + endif () + + if(NOT Zipper_LIBRARY_DEBUG AND NOT Zipper_LIBRARY_RELEASE) + set_target_properties( Zipper::Zipper PROPERTIES + IMPORTED_LOCATION ${Zipper_LIBRARY} ) + endif () + + endif() +endif() diff --git a/Source/cmake/FindZlib.cmake b/Source/cmake/FindZlib.cmake deleted file mode 100644 index 6aa96ba..0000000 --- a/Source/cmake/FindZlib.cmake +++ /dev/null @@ -1,76 +0,0 @@ -SET(SEARCH_PATH_ZLIB "" CACHE PATH "Additional Zlib search path") -SET(ZLIB_DIR_SEARCH - $ENV{ZLIB_ROOT} - ${SEARCH_PATH_ZLIB} - ${ZLIB_DIR_SEARCH} -) -if(CMAKE_SYSTEM MATCHES "Windows") - if (EXISTS "C:/") - SET(ZLIB_DIR_SEARCH - ${ZLIB_DIR_SEARCH} - "C:/Development/zlib-1.2.11/dist/" - "C:/Development/Software/zlib/" - ) - endif() - if (EXISTS "D:/") - SET(ZLIB_DIR_SEARCH - ${ZLIB_DIR_SEARCH} - "D:/Development/zlib-1.2.11/dist/" - "D:/Development/Software/zlib/" - ) - endif() - - find_path(Zlib_INCLUDE_PATH zlib.h - PATH_SUFFIXES include - PATHS - ${ZLIB_DIR_SEARCH} - ) - - find_library(Zlib_LIB zlib - PATH_SUFFIXES lib - PATHS - ${ZLIB_DIR_SEARCH} - ) -endif() - -set(ADDITIONAL_LIB_DIRS) -if (CMAKE_SYSTEM MATCHES "Linux" ) - if(CMAKE_SIZEOF_VOID_P EQUAL 4) - else() - set(ADDITIONAL_LIB_DIRS "/usr/lib64" "/usr/lib/x86_64-linux-gnu/") - endif() - - SET(ZLIB_LIB_SEARCH - ${ADDITIONAL_LIB_DIRS} - ${ZLIB_LIB_SEARCH} - /usr/local/zipper-master/ - /usr/local/lib/ - /usr/lib/ - ) - - find_library(Zlib_LIB z - PATHS - ${ZLIB_LIB_SEARCH} - ) - - SET(ZLIB_INCLUDE_SEARCH - ${ZLIB_INCLUDE_SEARCH} - /usr/local/zipper-master/ - /usr/local/ - /usr/ - ) - - find_path(Zlib_INCLUDE_PATH zlib.h - PATH_SUFFIXES include - PATHS - ${ZLIB_INCLUDE_SEARCH} - ) - -endif(CMAKE_SYSTEM MATCHES "Linux") - -# handle the QUIETLY and REQUIRED arguments and set ZLIB_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Zlib DEFAULT_MSG Zlib_INCLUDE_PATH) - -MARK_AS_ADVANCED(Zlib_INCLUDE_PATH) diff --git a/Source/cmake/MojaMacros.cmake b/Source/cmake/MojaMacros.cmake new file mode 100644 index 0000000..eb3e446 --- /dev/null +++ b/Source/cmake/MojaMacros.cmake @@ -0,0 +1,130 @@ +#=============================================================================== +# Macros for Package generation +# +# MOJA_GENERATE_PACKAGE - Generates *Config.cmake +# Usage: MOJA_GENERATE_PACKAGE(target_name) +# INPUT: +# target_name the name of the target. e.g. Flint for MojaFlint +# Example: MOJA_GENERATE_PACKAGE(Flint) +macro(MOJA_GENERATE_PACKAGE target_name) +include(CMakePackageConfigHelpers) +write_basic_package_version_file( + "${CMAKE_BINARY_DIR}/${PROJECT_NAME}/${target_name}ConfigVersion.cmake" + VERSION ${PROJECT_VERSION} + COMPATIBILITY AnyNewerVersion +) +export(EXPORT "${target_name}Targets" + FILE "${CMAKE_BINARY_DIR}/${PROJECT_NAME}/${target_name}Targets.cmake" + NAMESPACE "${PROJECT_NAME}::" +) +configure_file("cmake/${target_name}Config.cmake" + "${CMAKE_BINARY_DIR}/${PROJECT_NAME}/${target_name}Config.cmake" + @ONLY +) + +# Set config script install location in a location that find_package() will +# look for, which is different on MS Windows than for UNIX +# Note: also set in root CMakeLists.txt +if (WIN32) + set(MojaConfigPackageLocation "cmake") +else() + set(MojaConfigPackageLocation "lib/cmake/${PROJECT_NAME}") +endif() + +install( + EXPORT "${target_name}Targets" + FILE "${target_name}Targets.cmake" + NAMESPACE "${PROJECT_NAME}::" + DESTINATION "${MojaConfigPackageLocation}" + ) + +install( + FILES + "${CMAKE_BINARY_DIR}/${PROJECT_NAME}/${target_name}Config.cmake" + "${CMAKE_BINARY_DIR}/${PROJECT_NAME}/${target_name}ConfigVersion.cmake" + DESTINATION "${MojaConfigPackageLocation}" + COMPONENT Devel + ) + +endmacro() + +#=============================================================================== +# Macros for simplified installation +# +# MOJA_INSTALL - Install the given target +# Usage: MOJA_INSTALL(target_name) +# INPUT: +# target_name the name of the target. e.g. Flint for MojaFlint +# Example: MOJA_INSTALL(Flint) +macro(MOJA_INSTALL target_name) +install( + DIRECTORY include/moja + DESTINATION include + COMPONENT Devel + ) + +install( + TARGETS "${target_name}" EXPORT "${target_name}Targets" + LIBRARY DESTINATION lib${LIB_SUFFIX} + ARCHIVE DESTINATION lib${LIB_SUFFIX} + RUNTIME DESTINATION bin + INCLUDES DESTINATION include + ) + +if (MSVC) +# install the targets pdb + MOJA_INSTALL_PDB(${target_name}) +endif() + +endmacro() + +#=============================================================================== +# Macros for simplified installation of package not following the Moja standard as CppUnit +# +# SIMPLE_INSTALL - Install the given target +# Usage: SIMPLE_INSTALL(target_name) +# INPUT: +# target_name the name of the target. e.g. CppUnit +# Example: SIMPLE_INSTALL(Flint) +macro(SIMPLE_INSTALL target_name) +install( + DIRECTORY include + DESTINATION include + COMPONENT Devel + PATTERN ".svn" EXCLUDE + ) + +install( + TARGETS "${target_name}" EXPORT "${target_name}Targets" + LIBRARY DESTINATION lib${LIB_SUFFIX} + ARCHIVE DESTINATION lib${LIB_SUFFIX} + RUNTIME DESTINATION bin + INCLUDES DESTINATION include + ) + +if (MSVC) +# install the targets pdb + MOJA_INSTALL_PDB(${target_name}) +endif() + +endmacro() + +# MOJA_INSTALL_PDB - Install the given target's companion pdb file (if present) +# Usage: MOJA_INSTALL_PDB(target_name) +# INPUT: +# target_name the name of the target. e.g. Flint for MojaFlint +# Example: MOJA_INSTALL_PDB(Flint) +# +# This is an internal macro meant only to be used by MOJA_INSTALL. +macro(MOJA_INSTALL_PDB target_name) + + get_property(type TARGET ${target_name} PROPERTY TYPE) + if ("${type}" STREQUAL "SHARED_LIBRARY" OR "${type}" STREQUAL "EXECUTABLE") + install( + FILES $ + DESTINATION bin + COMPONENT Devel + OPTIONAL + ) + endif() +endmacro() \ No newline at end of file diff --git a/Source/cmake/mojaConfig.cmake.in b/Source/cmake/mojaConfig.cmake.in new file mode 100644 index 0000000..5fc30c4 --- /dev/null +++ b/Source/cmake/mojaConfig.cmake.in @@ -0,0 +1,53 @@ +if (CMAKE_VERSION VERSION_LESS 3.10.0) + message(FATAL_ERROR "moja requires at least CMake version 3.10.0") +endif() + +if (NOT moja_FIND_COMPONENTS) + set(moja_NOT_FOUND_MESSAGE "The moja package requires at least one component") + set(moja_FOUND False) + return() +endif() + + +set(_moja_FIND_PARTS_REQUIRED) +if (moja_FIND_REQUIRED) + set(_moja_FIND_PARTS_REQUIRED REQUIRED) +endif() +set(_moja_FIND_PARTS_QUIET) +if (moja_FIND_QUIETLY) + set(_moja_FIND_PARTS_QUIET QUIET) +endif() + +get_filename_component(_moja_install_prefix "${CMAKE_CURRENT_LIST_DIR}" ABSOLUTE) + +set(_moja_NOTFOUND_MESSAGE) + +# Let components find each other, but don't overwrite CMAKE_PREFIX_PATH +set(_moja_CMAKE_PREFIX_PATH_old ${CMAKE_PREFIX_PATH}) +set(CMAKE_PREFIX_PATH ${_moja_install_prefix}) + +foreach(module ${moja_FIND_COMPONENTS}) + find_package(${module} + ${_moja_FIND_PARTS_QUIET} + ${_moja_FIND_PARTS_REQUIRED} + PATHS "${_moja_install_prefix}" NO_DEFAULT_PATH + ) + if (NOT ${module}_FOUND) + if (moja_FIND_REQUIRED_${module}) + set(_moja_NOTFOUND_MESSAGE "${_moja_NOTFOUND_MESSAGE}Failed to find moja component \"${module}\" config file at \"${_moja_install_prefix}/${module}/${module}Config.cmake\"\n") + elseif(NOT moja_FIND_QUIETLY) + message(WARNING "Failed to find moja component \"${module}\" config file at \"${_moja_install_prefix}/${module}/${module}Config.cmake\"") + endif() + endif() + + # For backward compatibility set the LIBRARIES variable + list(APPEND moja_LIBRARIES "moja::${module}") +endforeach() + +# Restore the original CMAKE_PREFIX_PATH value +set(CMAKE_PREFIX_PATH ${_moja_CMAKE_PREFIX_PATH_old}) + +if (_moja_NOTFOUND_MESSAGE) + set(moja_NOT_FOUND_MESSAGE "${_moja_NOTFOUND_MESSAGE}") + set(moja_FOUND False) +endif() diff --git a/Source/docker/Dockerfile.base.ubuntu.18.04 b/Source/docker/Dockerfile.base.ubuntu.18.04 new file mode 100644 index 0000000..2a66d7a --- /dev/null +++ b/Source/docker/Dockerfile.base.ubuntu.18.04 @@ -0,0 +1,150 @@ +# ================================================================================================================== +# +# Docker to ubuntu 18.04 base image (for required libraries) used by flint docker +# +# Building this Docker: +# docker build -f Dockerfile.base.ubuntu.18.04 --build-arg NUM_CPU=4 -t moja/baseimage:ubuntu-18.04 . +# +# ================================================================================================================== + +# Ubuntu 18.04 Bionic Beaver +FROM ubuntu:bionic + +LABEL maintainer="info@moja.global" + +ARG DEBIAN_FRONTEND=noninteractive + +WORKDIR $ROOTDIR/ + +# Install basic dependencies +RUN apt-get update -y && apt-get install -y \ + software-properties-common \ + build-essential \ + python3-dev \ + python3-numpy \ + python3-pip \ + libspatialite-dev \ + libeigen3-dev \ + sqlite3 \ + openssl \ + libssl-dev \ + libpq-dev \ + libcurl4-gnutls-dev \ + libproj-dev \ + libxml2-dev \ + libgeos-dev \ + libnetcdf-dev \ + libpoppler-dev \ + libhdf4-alt-dev \ + libhdf5-serial-dev \ + wget \ + bash-completion \ + nasm \ + postgresql-client-10 \ + git \ + gdb \ + && apt-get -y autoremove \ + && apt-get clean \ + && ln -sf /usr/include/eigen3 /usr/local/include/eigen + +# set versions of libraries used and some paths +ENV ROOTDIR /usr/local/ +ENV GDAL_VERSION 2.4.0 +ENV CMAKE_VERSION 3.14.3 +ENV POCO_VERSION 1.9.0 +ENV BOOST_VERSION 1_69_0 +ENV BOOST_VERSION_DOT 1.69.0 +ENV FMT_VERSION 5.3.0 +ENV SQLITE_VERSION 3270200 + +# set environment variables +ENV PATH /usr/local/bin:$PATH +ENV LD_LIBRARY_PATH /usr/local/lib:$LD_LIBRARY_PATH +ENV PYTHONPATH /usr/local/lib:$PYTHONPATH + +ARG NUM_CPU=1 +WORKDIR $ROOTDIR + +## CMake +ADD https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && tar -xzf cmake-${CMAKE_VERSION}.tar.gz && cd cmake-${CMAKE_VERSION} \ + && ./bootstrap \ + && make -s -j $NUM_CPU \ + && make install \ + && make clean \ + && cd $ROOTDIR + +# build user-config.jam files +RUN echo "using python : 3.6 : /usr ;" > ~/user-config.jam + +## Poco +WORKDIR $ROOTDIR +#ADD https://pocoproject.org/releases/poco-${POCO_VERSION}/poco-${POCO_VERSION}-all.tar.gz $ROOTDIR/src/ +#RUN cd src && tar -xzf poco-${POCO_VERSION}-all.tar.gz && cd poco-${POCO_VERSION}-all \ +ADD https://pocoproject.org/releases/poco-${POCO_VERSION}/poco-${POCO_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && tar -xzf poco-${POCO_VERSION}.tar.gz && cd poco-${POCO_VERSION} \ + && ./configure --omit=Data/ODBC,Data/MySQL,FSM,Redis --no-samples --no-tests \ + && make -s -j $NUM_CPU DEFAULT_TARGET=shared_release \ + && make install \ + && make clean \ + && cd $ROOTDIR + +## Boost +ADD https://dl.bintray.com/boostorg/release/${BOOST_VERSION_DOT}/source/boost_${BOOST_VERSION}.tar.bz2 $ROOTDIR/src/ +RUN cd src && tar --bzip2 -xf boost_${BOOST_VERSION}.tar.bz2 && cd boost_${BOOST_VERSION} \ + && ./bootstrap.sh --prefix=/usr/local \ + && ./b2 -j $NUM_CPU cxxstd=14 install \ + && ./b2 clean \ + && cd $ROOTDIR + +## FMT +ADD https://github.com/fmtlib/fmt/archive/${FMT_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && mkdir libfmt-${FMT_VERSION} && tar -xzf ${FMT_VERSION}.tar.gz -C libfmt-${FMT_VERSION} --strip-components=1 && cd libfmt-${FMT_VERSION} \ + && cmake -G"Unix Makefiles" -DCMAKE_INSTALL_PREFIX=/usr/local . \ + && make -j $NUM_CPU install \ + && make clean \ + && cd $ROOTDIR && rm -Rf src/libfmt* + +## SQLite +#ADD https://www.sqlite.org/2019/sqlite-autoconf-${SQLITE_VERSION}.tar.gz $ROOTDIR/src/ +#RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/include/ \ +# && cd $ROOTDIR && rm -Rf src/sqlite* + +ADD https://www.sqlite.org/2019/sqlite-autoconf-${SQLITE_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/local/ \ + && cp /usr/local/sqlite-autoconf-${SQLITE_VERSION}/sqlite3.c /usr/include/ \ + && cd $ROOTDIR && rm -Rf src/sqlite* + +## gdal +ADD http://download.osgeo.org/gdal/${GDAL_VERSION}/gdal-${GDAL_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && tar -xvf gdal-${GDAL_VERSION}.tar.gz && cd gdal-${GDAL_VERSION} \ + && ./configure \ + --with-python --with-spatialite --with-pg --with-curl \ + --with-netcdf --with-hdf5=/usr/lib/x86_64-linux-gnu/hdf5/serial \ + --with-curl \ + && make -j $NUM_CPU && make install && ldconfig \ + && apt-get update -y \ + && apt-get remove -y --purge build-essential \ + && cd $ROOTDIR && cd src/gdal-${GDAL_VERSION}/swig/python \ + && python3 setup.py build \ + && python3 setup.py install \ + && cd $ROOTDIR && rm -Rf src/gdal* + +## Zipper +RUN cd src && git clone --recursive https://github.com/sebastiandev/zipper.git +WORKDIR $ROOTDIR/src/zipper/build +RUN cmake .. \ + && make -s -j $NUM_CPU \ + && make install \ + && make clean \ + && cd $ROOTDIR + +## Turtle +WORKDIR $ROOTDIR/src/ +RUN wget https://sourceforge.net/projects/turtle/files/turtle/1.3.1/turtle-1.3.1.tar.gz \ + && tar xzf turtle-1.3.1.tar.gz -C /usr/local/ \ + && cd $ROOTDIR && rm -Rf src/turtle* + +#ADD http://downloads.sourceforge.net/project/turtle/turtle/1.3.1/turtle-1.3.1.tar.bz2 $ROOTDIR/src/ +#RUN tar xvf turtle-1.3.0.tar.bz2 -C /usr/local/ +WORKDIR $ROOTDIR/src diff --git a/Source/docker/Dockerfile.flint.ubuntu.18.04 b/Source/docker/Dockerfile.flint.ubuntu.18.04 new file mode 100644 index 0000000..fd13c2e --- /dev/null +++ b/Source/docker/Dockerfile.flint.ubuntu.18.04 @@ -0,0 +1,59 @@ +# ================================================================================================================== +# +# Docker to ubuntu 16.04 image for Moja flint libraries and executables +# +# Building this Docker: +# docker build -f Dockerfile.flint.ubuntu.18.04 --build-arg NUM_CPU=4 --build-arg GITHUB_AT=[TOKEN] --build-arg FLINT_BRANCH=[BRANCH] -t moja/flint:ubuntu-18.04 . +# +# ================================================================================================================== + +FROM moja/baseimage:ubuntu-18.04 + +LABEL maintainer="info@moja.global" + +ARG GITHUB_AT +ARG FLINT_BRANCH +ARG NUM_CPU=1 +ARG DEBIAN_FRONTEND=noninteractive + +ENV SQLITE_VERSION 3270200 +ENV ROOTDIR /usr/local/ + +WORKDIR $ROOTDIR/ + +# set environment variables +ENV PATH /usr/local/bin:$PATH +ENV LD_LIBRARY_PATH /usr/local/lib:$LD_LIBRARY_PATH +ENV PYTHONPATH /usr/local/lib:$PYTHONPATH +ENV CURL_CA_BUNDLE /etc/ssl/certs/ca-certificates.crt +ENV GDAL_DATA=/usr/share/gdal +ENV GDAL_HTTP_VERSION 2 + +ADD https://www.sqlite.org/2019/sqlite-autoconf-${SQLITE_VERSION}.tar.gz $ROOTDIR/src/ +RUN cd src && tar -xzf sqlite-autoconf-${SQLITE_VERSION}.tar.gz -C /usr/local/ \ + && cp /usr/local/sqlite-autoconf-${SQLITE_VERSION}/sqlite3.c /usr/include/ \ + && cd $ROOTDIR && rm -Rf src/sqlite* + + +# GET FLINT +WORKDIR $ROOTDIR/ +RUN cd src && git clone -b ${FLINT_BRANCH} https://${GITHUB_AT}@github.com/moja-global/flint.git flint + +WORKDIR $ROOTDIR/src/flint/Source/build +RUN cmake -DCMAKE_BUILD_TYPE=RELEASE \ + -DCMAKE_INSTALL_PREFIX=/usr/local \ + -DENABLE_MOJA.MODULES.ZIPPER=ON \ + -DENABLE_MOJA.MODULES.POCO=OFF \ + -DENABLE_MOJA.MODULES.LIBPQ=ON \ + -DENABLE_MOJA.MODULES.GDAL=ON \ + -DENABLE_MOJA.CLI=ON \ + -DENABLE_TESTS:BOOL=OFF .. \ + && make -s -j $NUM_CPU \ + && make install \ + && make clean + +WORKDIR $ROOTDIR/src + +RUN ln -s /usr/local/lib/libmoja.modules.* /usr/local/bin +RUN rm -Rf /usr/local/src/* +WORKDIR /tmp/flint_runenv \ No newline at end of file diff --git a/Source/moja.cli/CMakeLists.txt b/Source/moja.cli/CMakeLists.txt index 1e5fbcd..60acf2e 100644 --- a/Source/moja.cli/CMakeLists.txt +++ b/Source/moja.cli/CMakeLists.txt @@ -1,27 +1,6 @@ set(EXENAME "moja.cli") -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - -find_package(Boost COMPONENTS system thread filesystem date_time chrono program_options log log_setup REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -include_directories( include ../moja.core/include ../moja.flint/include ../moja.flint.configuration/include ../moja.datarepository/include) - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() +find_package(Boost COMPONENTS program_options REQUIRED) set(MOJA_EXE_SRC src/moja.cpp @@ -29,22 +8,20 @@ set(MOJA_EXE_SRC add_executable(${EXENAME} ${MOJA_EXE_SRC}) -target_link_libraries( - ${EXENAME} - moja.core - moja.flint - moja.flint.configuration - ${Boost_LIBRARIES} - ${SYSLIBS} - ${Poco_FOUNDATION} - ${Poco_JSON} - ) - -add_dependencies(${EXENAME} moja.core) -add_dependencies(${EXENAME} moja.flint) -add_dependencies(${EXENAME} moja.flint.configuration) +target_link_libraries(${EXENAME} + PRIVATE + moja::moja.flint Boost::program_options +) install(TARGETS ${EXENAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) + +if(MSVC) + INSTALL( + FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${EXENAME}.pdb + DESTINATION ${CMAKE_INSTALL_BINDIR} + CONFIGURATIONS Debug + ) +endif() diff --git a/Source/moja.cli/src/moja.cpp b/Source/moja.cli/src/moja.cpp index 1fcd895..4f50b22 100644 --- a/Source/moja.cli/src/moja.cpp +++ b/Source/moja.cli/src/moja.cpp @@ -157,12 +157,11 @@ int main(int argc, char* argv[]) { } if (args.count("logging_config")) { - auto loggingConf = args["logging_config"].as(); - if (!checkFilePath(loggingConf)) { - return EXIT_FAILURE; - } - - moja::Logging::setConfigurationFile(loggingConf); + auto loggingConf = args["logging_config"].as(); + if (!checkFilePath(loggingConf)) { + return EXIT_FAILURE; + } + moja::Logging::setConfigurationFile(loggingConf); } if (!args.count("config")) { @@ -172,18 +171,20 @@ int main(int argc, char* argv[]) { return EXIT_SUCCESS; } - auto configPath = args["config"].as>(); + auto configPath = args["config"].as>(); for (const auto& filePath : configPath) { if (!checkFilePath(filePath)) { return EXIT_FAILURE; } } - - auto configProviderPath = args["config_provider"].as>(); - for (const auto& filePath : configProviderPath) { - if (!checkFilePath(filePath)) { - return EXIT_FAILURE; - } + std::vector configProviderPath; + if (args.count("config_provider")) { + configProviderPath = args["config_provider"].as>(); + for (const auto& filePath : configProviderPath) { + if (!checkFilePath(filePath)) { + return EXIT_FAILURE; + } + } } MOJA_LOG_INFO << "Config has files: " << configPath.size(); @@ -207,13 +208,16 @@ int main(int argc, char* argv[]) { for (auto configFilePath : configPath) { MOJA_LOG_INFO << configFilePath; } - MOJA_LOG_INFO << "Using provider configurations: "; - for (auto configFilePath : configProviderPath) { - MOJA_LOG_INFO << configFilePath; - } - - conf::JSON2ConfigurationProvider jsonConfigProvider{configPath, configProviderPath}; - auto config = jsonConfigProvider.createConfiguration(); + if (!configProviderPath.empty()) { + MOJA_LOG_INFO << "Using provider configurations: "; + for (auto configFilePath : configProviderPath) { + MOJA_LOG_INFO << configFilePath; + } + } + auto config = + configProviderPath.empty() + ? conf::JSON2ConfigurationProvider(configPath).createConfiguration() + : conf::JSON2ConfigurationProvider(configPath, configProviderPath).createConfiguration(); MOJA_LOG_INFO << "Using operation manager: " << config->localDomain()->operationManagerObject()->name(); diff --git a/Source/moja.core/CMakeLists.txt b/Source/moja.core/CMakeLists.txt index 581b19a..7c8749f 100644 --- a/Source/moja.core/CMakeLists.txt +++ b/Source/moja.core/CMakeLists.txt @@ -4,26 +4,8 @@ string(TOUPPER "${PACKAGE}" LIBNAME_EXPORT) include(${CMAKE_MODULE_PATH}/generate_product_version.cmake) -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - -find_package(Boost COMPONENTS atomic system thread filesystem date_time chrono program_options log log_setup REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() +find_package(Poco REQUIRED Foundation JSON) +find_package(Boost COMPONENTS log log_setup REQUIRED) if (MSVC) generate_product_version(ProductVersionFiles @@ -90,7 +72,6 @@ set(MOJA_Core_external_tlx_die_sources external/tlx/die/core.cpp ) - source_group("Header Files\\external\\tlx\\container" FILES ${MOJA_Core_external_tlx_container_headers}) source_group("Header Files\\external\\tlx\\die" FILES ${MOJA_Core_external_tlx_die_headers}) source_group("Source Files\\external\\tlx\\die" FILES ${MOJA_Core_external_tlx_die_sources}) @@ -136,47 +117,50 @@ set(MOJA_Core_sources set(SRCS ${MOJA_Core_sources} ${MOJA_Core_headers} ${MOJA_Core_Enum_headers} ${DATE_SRCS} ${FOLLY_SRCS} ${TLX_SRCS}) add_library(${LIBNAME} ${LIB_MODE} ${SRCS} ${ProductVersionFiles}) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) +#Set target properties set_target_properties(${LIBNAME} PROPERTIES VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} + OUTPUT_NAME ${LIBNAME} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS + CXX_EXTENSIONS OFF ) -target_compile_definitions(${LIBNAME} PRIVATE FOLLY_NO_CONFIG) +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src +) -target_link_libraries(${LIBNAME} - ${Boost_LIBRARIES} - ${Poco_FOUNDATION} - ${Poco_JSON} - ${SYSLIBS} - ) +target_compile_definitions(${LIBNAME} + PUBLIC + BOOST_ALL_DYN_LINK + PRIVATE + FOLLY_NO_CONFIG +) -target_include_directories(${LIBNAME} - PUBLIC - external - include - ) +target_compile_features(${LIBNAME} PUBLIC cxx_std_14) -install( - DIRECTORY include/moja external - DESTINATION include -) +target_link_libraries(${LIBNAME} + PUBLIC + Boost::log Boost::log_setup Poco::Foundation Poco::JSON + PRIVATE + ${SYSLIBS} + ) -install( - TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin -) +############################################## +# Installation instructions +include(GNUInstallDirs) +install(DIRECTORY external/ DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) -if(MSVC) - INSTALL( - FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${LIBNAME}${CMAKE_DEBUG_POSTFIX}.pdb - DESTINATION bin - CONFIGURATIONS Debug - ) -endif() +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if (ENABLE_TESTS) add_subdirectory( tests ) diff --git a/Source/moja.core/cmake/moja.coreConfig.cmake b/Source/moja.core/cmake/moja.coreConfig.cmake new file mode 100644 index 0000000..48101af --- /dev/null +++ b/Source/moja.core/cmake/moja.coreConfig.cmake @@ -0,0 +1,7 @@ +include(CMakeFindDependencyMacro) +#find_dependency(Poco REQUIRED COMPONENTS Foundation JSON) +#find_dependency(Boost COMPONENTS log REQUIRED) + +if(NOT TARGET moja::moja.core) + include("${CMAKE_CURRENT_LIST_DIR}/moja.coreTargets.cmake") +endif() \ No newline at end of file diff --git a/Source/moja.core/include/moja/environment_unix.h b/Source/moja.core/include/moja/environment_unix.h index e532a9a..edde9e1 100644 --- a/Source/moja.core/include/moja/environment_unix.h +++ b/Source/moja.core/include/moja/environment_unix.h @@ -6,10 +6,10 @@ namespace moja { class EnvironmentImpl { -public: - static std::string startProcessFolderImpl(); + public: + static std::string startProcessFolderImpl(); }; -} // namespace moja +} // namespace moja -#endif // MOJA_Core_Environment_UNIX_INCLUDED +#endif // MOJA_Core_Environment_UNIX_INCLUDED diff --git a/Source/moja.core/include/moja/environment_win32.h b/Source/moja.core/include/moja/environment_win32.h index d9083cd..762891a 100644 --- a/Source/moja.core/include/moja/environment_win32.h +++ b/Source/moja.core/include/moja/environment_win32.h @@ -6,10 +6,10 @@ namespace moja { class EnvironmentImpl { -public: - static std::string startProcessFolderImpl(); + public: + static std::string startProcessFolderImpl(); }; -} // namespace moja +} // namespace moja -#endif // MOJA_Core_Environment_WIN32_INCLUDED +#endif // MOJA_Core_Environment_WIN32_INCLUDED diff --git a/Source/moja.core/include/moja/environment_win32u.h b/Source/moja.core/include/moja/environment_win32u.h index 5153dea..edf1026 100644 --- a/Source/moja.core/include/moja/environment_win32u.h +++ b/Source/moja.core/include/moja/environment_win32u.h @@ -6,10 +6,10 @@ namespace moja { class EnvironmentImpl { -public: - static std::string startProcessFolderImpl(); + public: + static std::string startProcessFolderImpl(); }; -} // namespace moja +} // namespace moja -#endif // MOJA_Core_Environment_WIN32U_INCLUDED +#endif // MOJA_Core_Environment_WIN32U_INCLUDED diff --git a/Source/moja.core/include/moja/hash.h b/Source/moja.core/include/moja/hash.h index 9e11129..55f32e2 100644 --- a/Source/moja.core/include/moja/hash.h +++ b/Source/moja.core/include/moja/hash.h @@ -6,10 +6,17 @@ #include -#include - +// clang-format off #include // needed by folly +#include +// clang-format on +namespace folly { +template +struct hasher> { + size_t operator()(const Poco::Nullable& key) const { return key.isNull() ? 0 : Hash()(key.value()); } +}; +} // namespace folly namespace moja { using folly::apply; using folly::Hash; @@ -28,11 +35,6 @@ using folly::hash::hash_range; // using folly::hash::SpookyHashV2; -template -struct hasher> { - size_t operator()(const Poco::Nullable& key) const { return key.isNull() ? 0 : Hash()(key.value()); } -}; - } // namespace hash } // namespace moja diff --git a/Source/moja.core/src/datetime.cpp b/Source/moja.core/src/datetime.cpp index 6d67d12..a290b67 100644 --- a/Source/moja.core/src/datetime.cpp +++ b/Source/moja.core/src/datetime.cpp @@ -1,4 +1,5 @@ #include "moja/datetime.h" + #include "moja/exception.h" using namespace date; @@ -9,104 +10,104 @@ namespace moja { DateTime::~DateTime() {} DateTime DateTime::now() { - auto tp = system_clock::now(); - auto dp = floor(tp); - DateTime date; - date._date= dp; - date._tod=make_time(duration_cast(tp - dp)); - return date; + auto tp = system_clock::now(); + auto dp = floor(tp); + DateTime date; + date._date = dp; + date._tod = make_time(duration_cast(tp - dp)); + return date; } DateTime& DateTime::operator+=(const Timespan& span) { - add_micro( span.duration() ); - return *this; + add_micro(span.duration()); + return *this; } DateTime& DateTime::addYears(int years) { - _date += date::years(years); - return *this; + _date += date::years(years); + return *this; } // This is more complicated than it appears // Simply adding the days in a month to the current DateTime will not be succifient. -// We need to increment only the month, keeping the day the same (if possible). +// We need to increment only the month, keeping the day the same (if possible). // If the resulting date is not valid (ie going from Jan 31 -> Feb 31 shoudl equal Feb 28 (or 29 in a leap year) -// The AddMonths method calculates the resulting month and year, taking into account leap years and the number of days in a month, -// then adjusts the day part of the resulting DateTime object.If the resulting day is not a valid day in the resulting month, the last -// valid day of the resulting month is used. -// For example, March 31st + 1 month = April 30th, and March 31st - 1 month = February 28 for a non - leap year and February 29 for a leap year. -// The time - of - day part of the resulting DateTime object remains the same as this instance. +// The AddMonths method calculates the resulting month and year, taking into account leap years and the number of days +// in a month, then adjusts the day part of the resulting DateTime object.If the resulting day is not a valid day in the +// resulting month, the last valid day of the resulting month is used. For example, March 31st + 1 month = April 30th, +// and March 31st - 1 month = February 28 for a non - leap year and February 29 for a leap year. The time - of - day +// part of the resulting DateTime object remains the same as this instance. DateTime& DateTime::addMonths(int months) { - _date += date::months(months); - if (!_date.ok()) - _date = _date.year() / _date.month() / last; - return *this; + _date += date::months(months); + if (!_date.ok()) _date = _date.year() / _date.month() / last; + return *this; } DateTime& DateTime::addDays(double days) { - auto tp = sys_days(_date) + _tod.to_duration() + duration_cast(duration(days)); - auto dp = floor(tp); - _date = dp; - _tod = make_time(tp - dp); - return *this; + auto tp = + sys_days(_date) + _tod.to_duration() + duration_cast(duration(days)); + auto dp = floor(tp); + _date = dp; + _tod = make_time(tp - dp); + return *this; } void DateTime::add(duration value) { - auto tod = _tod.to_duration() + value; - if (tod > microseconds{ 0 } && tod < days{ 1 }) { - _tod = make_time(duration_cast(tod)); - } else { - auto tp = time_point_cast(sys_days(_date) + tod); - auto dp = floor(tp); - _date = dp; - _tod = make_time(tp - dp); - } + auto tod = _tod.to_duration() + value; + if (tod > microseconds{0} && tod < days{1}) { + _tod = make_time(duration_cast(tod)); + } else { + auto tp = time_point_cast(sys_days(_date) + tod); + auto dp = floor(tp); + _date = dp; + _tod = make_time(tp - dp); + } } void DateTime::add_micro(duration value) { - auto tod = _tod.to_duration() + value; - if (tod > microseconds{ 0 } && tod < days{ 1 }) { - _tod = make_time(duration_cast(tod)); - } else { - auto tp = sys_days(_date) + tod; - auto dp = floor(tp); - _date = dp; - _tod = make_time(duration_cast(tp - dp)); - } + auto tod = _tod.to_duration() + value; + if (tod > microseconds{0} && tod < days{1}) { + _tod = make_time(duration_cast(tod)); + } else { + auto tp = sys_days(_date) + tod; + auto dp = floor(tp); + _date = dp; + _tod = make_time(duration_cast(tp - dp)); + } } DateTime& DateTime::addHours(double hours) { - add(duration(hours)); - return *this; + add(duration(hours)); + return *this; } DateTime& DateTime::addMinutes(double minutes) { - add(duration(minutes)); - return *this; + add(duration(minutes)); + return *this; } DateTime& DateTime::addSeconds(double seconds) { - add(duration(seconds)); - return *this; + add(duration(seconds)); + return *this; } DateTime& DateTime::addMilliseconds(double milliseconds) { - add(duration(milliseconds)); - return *this; + add(duration(milliseconds)); + return *this; } DateTime& DateTime::addMicroseconds(double microseconds) { - add(duration(microseconds)); - return *this; + add(duration(microseconds)); + return *this; } double DateTime::decimalYear() const { - const auto year = _date.year(); - double days = (sys_days(_date) - sys_days{ year / jan / 0 }).count(); - double daysInYear = DateTime::daysInYear(int(year)); - return int(year) + (days / daysInYear); + const auto year = _date.year(); + double days = (sys_days(_date) - sys_days{year / jan / 0}).count(); + double daysInYear = DateTime::daysInYear(int(year)); + return int(year) + (days / daysInYear); } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/environment.cpp b/Source/moja.core/src/environment.cpp index ad30d81..365e1af 100644 --- a/Source/moja.core/src/environment.cpp +++ b/Source/moja.core/src/environment.cpp @@ -10,8 +10,6 @@ namespace moja { -std::string Environment::startProcessFolder() { - return EnvironmentImpl::startProcessFolderImpl(); -} +std::string Environment::startProcessFolder() { return EnvironmentImpl::startProcessFolderImpl(); } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/environment_unix.cpp b/Source/moja.core/src/environment_unix.cpp index 63cf26b..b4b5cba 100644 --- a/Source/moja.core/src/environment_unix.cpp +++ b/Source/moja.core/src/environment_unix.cpp @@ -3,26 +3,26 @@ #include #include -#include #include +#include namespace moja { std::string EnvironmentImpl::startProcessFolderImpl() { - static bool bHaveResult = false; - static char path[1024] = ""; + static bool bHaveResult = false; + static char path[1024] = ""; - if (!bHaveResult) { - if (readlink("/proc/self/exe", path, sizeof(path) - 1) == -1) { - int ErrNo = errno; - // unreachable - return ""; - } - bHaveResult = true; - } - auto folder = Poco::Path(path).parent(); - return folder.toString();; + if (!bHaveResult) { + if (readlink("/proc/self/exe", path, sizeof(path) - 1) == -1) { + int ErrNo = errno; + // unreachable + return ""; + } + bHaveResult = true; + } + auto folder = Poco::Path(path).parent(); + return folder.toString(); + ; } -} // namespace moja - +} // namespace moja diff --git a/Source/moja.core/src/environment_win32.cpp b/Source/moja.core/src/environment_win32.cpp index 08aa70f..adc67fc 100644 --- a/Source/moja.core/src/environment_win32.cpp +++ b/Source/moja.core/src/environment_win32.cpp @@ -1,22 +1,23 @@ #include "moja/environment_win32.h" -#include -#include #include #include +#include +#include + #pragma comment(lib, "psapi.lib") namespace moja { std::string EnvironmentImpl::startProcessFolderImpl() { - static char path[512] = ""; - if (!path[0]) { - // Get directory this executable was launched from. - GetModuleFileNameA(NULL, path, sizeof(path) - 1); - } - auto folder = Poco::Path(path).parent(); - return folder.toString(); + static char path[512] = ""; + if (!path[0]) { + // Get directory this executable was launched from. + GetModuleFileNameA(NULL, path, sizeof(path) - 1); + } + auto folder = Poco::Path(path).parent(); + return folder.toString(); } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/environment_win32u.cpp b/Source/moja.core/src/environment_win32u.cpp index 0f7f9d2..e0ebdbf 100644 --- a/Source/moja.core/src/environment_win32u.cpp +++ b/Source/moja.core/src/environment_win32u.cpp @@ -1,22 +1,23 @@ #include "moja/environment_win32u.h" -#include -#include #include -#include #include +#include + +#include +#include namespace moja { static std::string EnvironmentImpl::startProcessFolderImpl() { - static wchar_t path[512] = ""; - if (!path[0]) { - // Get directory this executable was launched from. - GetModuleFileNameW(NULL, path, sizeof(path) - 1); - } - Poco::UnicodeConverter::toUTF8(path, result); - auto folder = Poco::Path(path).parent(); - return folder.toString(); + static wchar_t path[512] = ""; + if (!path[0]) { + // Get directory this executable was launched from. + GetModuleFileNameW(NULL, path, sizeof(path) - 1); + } + Poco::UnicodeConverter::toUTF8(path, result); + auto folder = Poco::Path(path).parent(); + return folder.toString(); } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/exception.cpp b/Source/moja.core/src/exception.cpp index fbf5039..ea7aa3b 100644 --- a/Source/moja.core/src/exception.cpp +++ b/Source/moja.core/src/exception.cpp @@ -1,4 +1,5 @@ #include "moja/exception.h" + #include namespace moja { @@ -8,68 +9,57 @@ Exception::Exception(int code) : _pNested(0), _code(code) {} Exception::Exception(const std::string& msg, int code) : _msg(msg), _pNested(0), _code(code) {} Exception::Exception(const std::string& msg, const std::string& arg, int code) : _msg(msg), _pNested(0), _code(code) { - if (!arg.empty()) { - _msg.append(": "); - _msg.append(arg); - } + if (!arg.empty()) { + _msg.append(": "); + _msg.append(arg); + } } -Exception::Exception(const std::string& msg, const Exception& nested, int code) : _msg(msg), _pNested(nested.clone()), _code(code) {} +Exception::Exception(const std::string& msg, const Exception& nested, int code) + : _msg(msg), _pNested(nested.clone()), _code(code) {} Exception::Exception(const Exception& exc) : std::exception(exc), _msg(exc._msg), _code(exc._code) { - _pNested = exc._pNested ? exc._pNested->clone() : 0; + _pNested = exc._pNested ? exc._pNested->clone() : 0; } -Exception::~Exception() throw() { - delete _pNested; -} +Exception::~Exception() throw() { delete _pNested; } -Exception& Exception::operator = (const Exception& exc) { - if (&exc != this) { - delete _pNested; - _msg = exc._msg; - _pNested = exc._pNested ? exc._pNested->clone() : 0; - _code = exc._code; - } +Exception& Exception::operator=(const Exception& exc) { + if (&exc != this) { + delete _pNested; + _msg = exc._msg; + _pNested = exc._pNested ? exc._pNested->clone() : 0; + _code = exc._code; + } - return *this; + return *this; } -const char* Exception::name() const throw() { - return "Exception"; -} +const char* Exception::name() const throw() { return "Exception"; } -const char* Exception::className() const throw() { - return typeid(*this).name(); -} +const char* Exception::className() const throw() { return typeid(*this).name(); } -const char* Exception::what() const throw() { - return name(); -} +const char* Exception::what() const throw() { return name(); } std::string Exception::displayText() const { - std::string txt = name(); - if (!_msg.empty()) { - txt.append(": "); - txt.append(_msg); - } - return txt; + std::string txt = name(); + if (!_msg.empty()) { + txt.append(": "); + txt.append(_msg); + } + return txt; } void Exception::extendedMessage(const std::string& arg) { - if (!arg.empty()) { - if (!_msg.empty()) _msg.append(": "); - _msg.append(arg); - } + if (!arg.empty()) { + if (!_msg.empty()) _msg.append(": "); + _msg.append(arg); + } } -Exception* Exception::clone() const { - return new Exception(*this); -} +Exception* Exception::clone() const { return new Exception(*this); } -void Exception::rethrow() const { - throw *this; -} +void Exception::rethrow() const { throw *this; } MOJA_IMPLEMENT_EXCEPTION(LogicException, Exception, "Logic exception") MOJA_IMPLEMENT_EXCEPTION(AssertionViolationException, LogicException, "Assertion violation") @@ -121,4 +111,4 @@ MOJA_IMPLEMENT_EXCEPTION(UnknownURISchemeException, RuntimeException, "Unknown U MOJA_IMPLEMENT_EXCEPTION(ApplicationException, Exception, "Application exception") MOJA_IMPLEMENT_EXCEPTION(BadCastException, RuntimeException, "Bad cast exception") -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/logging.cpp b/Source/moja.core/src/logging.cpp index 7839ff0..ce20133 100644 --- a/Source/moja.core/src/logging.cpp +++ b/Source/moja.core/src/logging.cpp @@ -5,16 +5,15 @@ #include #include -#include -#include #include #include +#include +#include #include namespace moja { - const std::string Logging::_defaultFileName = "logging.conf"; bool Logging::_explicitConfigurationFileSet = false; bool Logging::_explicitConfigurationTextSet = false; @@ -23,56 +22,54 @@ std::string Logging::_explicitConfigurationText = ""; std::string Logging::_loggingConfigurationFile = "unknown"; void Logging::init() { - namespace logging = boost::log; - namespace attrs = boost::log::attributes; + namespace logging = boost::log; + namespace attrs = boost::log::attributes; - static auto initialized = false; - if (initialized) { return; } + static auto initialized = false; + if (initialized) { + return; + } - logging::register_simple_formatter_factory("Severity"); - logging::register_simple_filter_factory("Severity"); + logging::register_simple_formatter_factory("Severity"); + logging::register_simple_filter_factory("Severity"); - // Determine which log config to load, searching the working folder then the exe folder. - if (_explicitConfigurationFileSet && Poco::File(_explicitConfigurationFile).exists()) { - std::ifstream loggingConfig(_explicitConfigurationFile); - logging::init_from_stream(loggingConfig); - _loggingConfigurationFile = _explicitConfigurationFile; - } - else if (_explicitConfigurationTextSet && _explicitConfigurationText.length() > 0) { - std::stringstream s; - s << _explicitConfigurationText; - boost::log::init_from_stream(s); - _loggingConfigurationFile = "internal text"; - } - else { - std::string filenameToCheck = Poco::Path::current() + _defaultFileName; - if (Poco::File(filenameToCheck).exists()) { - std::ifstream loggingConfig(filenameToCheck); - logging::init_from_stream(loggingConfig); - _loggingConfigurationFile = filenameToCheck; - } - else { - filenameToCheck = moja::Environment::startProcessFolder() + _defaultFileName; - if (Poco::File(filenameToCheck).exists()) { - std::ifstream loggingConfig(filenameToCheck); - logging::init_from_stream(loggingConfig); - _loggingConfigurationFile = filenameToCheck; - } - else { - std::stringstream s; - s << "[Sinks.console]" << std::endl; - s << "Destination=Console" << std::endl; - s << "Asynchronous = false" << std::endl; - s << "AutoFlush = true" << std::endl; - s << "Format = \"<%TimeStamp%> (%Severity%) - %Message%\"" << std::endl; - s << "Filter = \"%Severity% >= info\"" << std::endl; - boost::log::init_from_stream(s); - _loggingConfigurationFile = "internal default"; - } - } - } + // Determine which log config to load, searching the working folder then the exe folder. + if (_explicitConfigurationFileSet && Poco::File(_explicitConfigurationFile).exists()) { + std::ifstream loggingConfig(_explicitConfigurationFile); + logging::init_from_stream(loggingConfig); + _loggingConfigurationFile = _explicitConfigurationFile; + } else if (_explicitConfigurationTextSet && _explicitConfigurationText.length() > 0) { + std::stringstream s; + s << _explicitConfigurationText; + boost::log::init_from_stream(s); + _loggingConfigurationFile = "internal text"; + } else { + std::string filenameToCheck = Poco::Path::current() + _defaultFileName; + if (Poco::File(filenameToCheck).exists()) { + std::ifstream loggingConfig(filenameToCheck); + logging::init_from_stream(loggingConfig); + _loggingConfigurationFile = filenameToCheck; + } else { + filenameToCheck = moja::Environment::startProcessFolder() + _defaultFileName; + if (Poco::File(filenameToCheck).exists()) { + std::ifstream loggingConfig(filenameToCheck); + logging::init_from_stream(loggingConfig); + _loggingConfigurationFile = filenameToCheck; + } else { + std::stringstream s; + s << "[Sinks.console]" << std::endl; + s << "Destination=Console" << std::endl; + s << "Asynchronous = false" << std::endl; + s << "AutoFlush = true" << std::endl; + s << "Format = \"<%TimeStamp%> (%Severity%) - %Message%\"" << std::endl; + s << "Filter = \"%Severity% >= info\"" << std::endl; + boost::log::init_from_stream(s); + _loggingConfigurationFile = "internal default"; + } + } + } - logging::core::get()->add_global_attribute("TimeStamp", attrs::local_clock()); + logging::core::get()->add_global_attribute("TimeStamp", attrs::local_clock()); } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/mathex.cpp b/Source/moja.core/src/mathex.cpp index b7d841c..b84d659 100644 --- a/Source/moja.core/src/mathex.cpp +++ b/Source/moja.core/src/mathex.cpp @@ -4,4 +4,4 @@ namespace moja { const double MathEx::k0Plus = 0.000000001; -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/src/notificationcenter.cpp b/Source/moja.core/src/notificationcenter.cpp index bbb921c..0fb28d7 100644 --- a/Source/moja.core/src/notificationcenter.cpp +++ b/Source/moja.core/src/notificationcenter.cpp @@ -1,5 +1,3 @@ #include "moja/notificationcenter.h" -namespace moja { - -} // namespace moja +namespace moja {} // namespace moja diff --git a/Source/moja.core/src/pocojsonutils.cpp b/Source/moja.core/src/pocojsonutils.cpp index 2a435f3..d00fe9c 100644 --- a/Source/moja.core/src/pocojsonutils.cpp +++ b/Source/moja.core/src/pocojsonutils.cpp @@ -1,4 +1,5 @@ #include "moja/pocojsonutils.h" + #include "moja/coreexceptions.h" #include "moja/exception.h" @@ -7,8 +8,8 @@ #include #include -using moja::FileNotFoundException; using moja::FileName; +using moja::FileNotFoundException; using moja::NotImplementedException; namespace moja { @@ -17,288 +18,255 @@ namespace moja { // "yyyy/mm/dd" -> DateTime DateTime parseSimpleDate(const std::string& yyyymmdd) { - std::vector tokens; - boost::split(tokens, yyyymmdd, boost::is_any_of("/")); - return DateTime(std::stoi(tokens[0]), - std::stoi(tokens[1]), - std::stoi(tokens[2])); + std::vector tokens; + boost::split(tokens, yyyymmdd, boost::is_any_of("/")); + return DateTime(std::stoi(tokens[0]), std::stoi(tokens[1]), std::stoi(tokens[2])); } // -------------------------------------------------------------------------------------------- DynamicVector parsePocoJSONToDynamic(const Poco::JSON::Array::Ptr& val) { - DynamicVector arrayDocument; - const auto count = val->size(); - for (auto i = 0; i < count; i++) { - - if (val->isObject(i)) { - auto object = val->getObject(i); - auto subDocument = parsePocoJSONToDynamic(object); - arrayDocument.push_back(subDocument); - } - else if (val->isArray(i)) { - auto object = val->getArray(i); - auto subDocument = parsePocoJSONToDynamic(object); - arrayDocument.push_back(subDocument); - } - else { - auto object = val->get(i); - if (object.isArray()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Array")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (object.isBoolean()) { - auto value = object.extract(); - arrayDocument.push_back(value); - } - else if (object.isDeque()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Deque")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (object.isEmpty()) { - } - else if (object.isInteger()) { - try { - auto value = object.extract(); - arrayDocument.push_back(value); - } - catch (Poco::BadCastException) { - auto value = object.extract(); - arrayDocument.push_back(value); - } - } - else if (object.isList()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - List")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (object.isNumeric()) { - auto value = object.extract(); - arrayDocument.push_back(value); - } - else if (object.isSigned()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Signed")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (object.isString()) { - auto value = object.extract(); - arrayDocument.push_back(value); - } - else if (object.isStruct()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Struct")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (object.isVector()) { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Vector")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else { - auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Unexpected type")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - } - } - return arrayDocument; + DynamicVector arrayDocument; + const auto count = val->size(); + for (auto i = 0; i < count; i++) { + if (val->isObject(i)) { + auto object = val->getObject(i); + auto subDocument = parsePocoJSONToDynamic(object); + arrayDocument.push_back(subDocument); + } else if (val->isArray(i)) { + auto object = val->getArray(i); + auto subDocument = parsePocoJSONToDynamic(object); + arrayDocument.push_back(subDocument); + } else { + auto object = val->get(i); + if (object.isArray()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Array")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (object.isBoolean()) { + auto value = object.extract(); + arrayDocument.push_back(value); + } else if (object.isDeque()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Deque")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (object.isEmpty()) { + } else if (object.isInteger()) { + try { + auto value = object.extract(); + arrayDocument.push_back(value); + } catch (Poco::BadCastException) { + auto value = object.extract(); + arrayDocument.push_back(value); + } + } else if (object.isList()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - List")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (object.isNumeric()) { + auto value = object.extract(); + arrayDocument.push_back(value); + } else if (object.isSigned()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Signed")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (object.isString()) { + auto value = object.extract(); + arrayDocument.push_back(value); + } else if (object.isStruct()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Struct")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (object.isVector()) { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Vector")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else { + auto msg = (boost::format("Unhandled data type in parse of json into dynamic - Unexpected type")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } + } + } + return arrayDocument; } // -------------------------------------------------------------------------------------------- DynamicVar parsePocoJSONToDynamic(const Poco::JSON::Object::Ptr& val) { - const auto& data = *(val.get()); + const auto& data = *(val.get()); - // Check for flint dspecial data $date, $time_series etc - if (data.size() == 1) { - auto item = *data.begin(); - if (item.first == "$date") { - const auto& dateStr = item.second.extract(); - DynamicVar result = parseSimpleDate(dateStr); - return result; - } - } - auto document = DynamicObject(); - for (auto& var : data) { - if (val->isObject(var.first)) { - const auto& object = var.second.extract(); - const auto subDocument = parsePocoJSONToDynamic(object); - document[var.first] = subDocument; - } - else if (val->isArray(var.first)) { - const auto object = var.second.extract(); - const auto subDocument = parsePocoJSONToDynamic(object); - document[var.first] = subDocument; - } - else { - if (var.second.isArray()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Array")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (var.second.isBoolean()) { - auto value = var.second.extract(); - document[var.first] = value; - } - else if (var.second.isDeque()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Deque")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (var.second.isEmpty()) { - } - else if (var.second.isInteger()) { - try { - auto value = var.second.extract(); - document[var.first] = value; - } - catch (Poco::BadCastException) { - auto value = var.second.extract(); - document[var.first] = value; - } - } - else if (var.second.isList()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - List")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (var.second.isNumeric()) { - auto value = var.second.extract(); - document[var.first] = value; - } - else if (var.second.isSigned()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Signed")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (var.second.isString()) { - auto value = var.second.extract(); - document[var.first] = value; - } - else if (var.second.isStruct()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Struct")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else if (var.second.isVector()) { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Vector")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - else { - auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Unexpected type")).str(); - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); - } - } - } - return document; + // Check for flint dspecial data $date, $time_series etc + if (data.size() == 1) { + auto item = *data.begin(); + if (item.first == "$date") { + const auto& dateStr = item.second.extract(); + DynamicVar result = parseSimpleDate(dateStr); + return result; + } + } + auto document = DynamicObject(); + for (auto& var : data) { + if (val->isObject(var.first)) { + const auto& object = var.second.extract(); + const auto subDocument = parsePocoJSONToDynamic(object); + document[var.first] = subDocument; + } else if (val->isArray(var.first)) { + const auto object = var.second.extract(); + const auto subDocument = parsePocoJSONToDynamic(object); + document[var.first] = subDocument; + } else { + if (var.second.isArray()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Array")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (var.second.isBoolean()) { + auto value = var.second.extract(); + document[var.first] = value; + } else if (var.second.isDeque()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Deque")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (var.second.isEmpty()) { + } else if (var.second.isInteger()) { + try { + auto value = var.second.extract(); + document[var.first] = value; + } catch (Poco::BadCastException) { + auto value = var.second.extract(); + document[var.first] = value; + } + } else if (var.second.isList()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - List")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (var.second.isNumeric()) { + auto value = var.second.extract(); + document[var.first] = value; + } else if (var.second.isSigned()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Signed")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (var.second.isString()) { + auto value = var.second.extract(); + document[var.first] = value; + } else if (var.second.isStruct()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Struct")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else if (var.second.isVector()) { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Vector")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } else { + auto msg = (boost::format("Unhandled data type in parse of json into Dynamic - Unexpected type")).str(); + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + } + } + } + return document; } // -------------------------------------------------------------------------------------------- DynamicVar parsePocoJSONToDynamic(const DynamicVar& data) { - if (data.isEmpty()) - return DynamicVar(); - if (data.type() == typeid(Poco::JSON::Object::Ptr)) { - const auto& object = data.extract(); - return parsePocoJSONToDynamic(object); - } - if (data.type() == typeid(Poco::JSON::Array::Ptr)) { - const auto& object = data.extract(); - return parsePocoJSONToDynamic(object); - } - if (data.isEmpty()) { - return DynamicVar(); - } - if (data.isBoolean()) { - DynamicVar result = data.extract(); - return result; - } - if (data.isString()) { - DynamicVar result = data.extract(); - return result; - } - if (data.isInteger()) { - DynamicVar result = data.extract(); - return result; - } - if (data.isNumeric()) { - DynamicVar result = data.extract(); - return result; - } - auto msg = "Unhandled data type in parse of json into dynamic"; - BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); + if (data.isEmpty()) return DynamicVar(); + if (data.type() == typeid(Poco::JSON::Object::Ptr)) { + const auto& object = data.extract(); + return parsePocoJSONToDynamic(object); + } + if (data.type() == typeid(Poco::JSON::Array::Ptr)) { + const auto& object = data.extract(); + return parsePocoJSONToDynamic(object); + } + if (data.isEmpty()) { + return DynamicVar(); + } + if (data.isBoolean()) { + DynamicVar result = data.extract(); + return result; + } + if (data.isString()) { + DynamicVar result = data.extract(); + return result; + } + if (data.isInteger()) { + DynamicVar result = data.extract(); + return result; + } + if (data.isNumeric()) { + DynamicVar result = data.extract(); + return result; + } + auto msg = "Unhandled data type in parse of json into dynamic"; + BOOST_THROW_EXCEPTION(CoreAssertionViolationException() << moja::AssertMsg(msg)); } // -------------------------------------------------------------------------------------------- DynamicVar parsePocoVarToDynamic(const DynamicVar& var) { - if (var.isEmpty()) { - return DynamicVar(); - } - if (var.isString()) { - return DynamicVar(var.extract()); - } - if (var.isBoolean()) { - return DynamicVar(var.extract()); - } - if (var.isInteger()) { - try { - return DynamicVar(var.extract()); - } - catch (Poco::BadCastException) { - return DynamicVar(var.convert()); - } - } - if (var.isNumeric()) { - return DynamicVar(var.extract()); - } - if (var.type() == typeid(Poco::JSON::Array::Ptr)) { - auto arr = var.extract(); - std::vector values; - for (auto it = arr->begin(); it != arr->end(); ++it) { - values.push_back(parsePocoVarToDynamic(*it)); - } - return values; - } - if (var.isArray()) { - auto arr = var.extract(); - std::vector values; - for (auto value : arr) { - values.push_back(parsePocoVarToDynamic(value)); - } - return values; - } - if (var.isStruct()) { - auto s = var.extract(); - if (s.size() == 1) { // We may have a special data type here - "$date" ... - auto keyValPair = s.begin(); - if (keyValPair->first == "$date") { - // we have found a date - // so are expecting a string as the value - // Expecting { "$date" : "2006/01/01" } - auto dateStr = keyValPair->second.extract(); - DynamicVar result = parseSimpleDate(dateStr); - return result; - } - if (keyValPair->first == "$uint32") { - // we have found a uint32 - // so are expecting a string as the value - // Expecting { "$uint32" : 1158849901 } - int num = keyValPair->second; - DynamicVar result = UInt32(num); - return result; - } - } - DynamicObject result; - for (auto item : s) { - result[item.first] = parsePocoVarToDynamic(item.second); - } - return result; - } + if (var.isEmpty()) { + return DynamicVar(); + } + if (var.isString()) { + return DynamicVar(var.extract()); + } + if (var.isBoolean()) { + return DynamicVar(var.extract()); + } + if (var.isInteger()) { + try { + return DynamicVar(var.extract()); + } catch (Poco::BadCastException) { + return DynamicVar(var.convert()); + } + } + if (var.isNumeric()) { + return DynamicVar(var.extract()); + } + if (var.type() == typeid(Poco::JSON::Array::Ptr)) { + auto arr = var.extract(); + std::vector values; + for (auto it = arr->begin(); it != arr->end(); ++it) { + values.push_back(parsePocoVarToDynamic(*it)); + } + return values; + } + if (var.isArray()) { + auto arr = var.extract(); + std::vector values; + for (auto value : arr) { + values.push_back(parsePocoVarToDynamic(value)); + } + return values; + } + if (var.isStruct()) { + auto s = var.extract(); + if (s.size() == 1) { // We may have a special data type here - "$date" ... + auto keyValPair = s.begin(); + if (keyValPair->first == "$date") { + // we have found a date + // so are expecting a string as the value + // Expecting { "$date" : "2006/01/01" } + auto dateStr = keyValPair->second.extract(); + DynamicVar result = parseSimpleDate(dateStr); + return result; + } + if (keyValPair->first == "$uint32") { + // we have found a uint32 + // so are expecting a string as the value + // Expecting { "$uint32" : 1158849901 } + int num = keyValPair->second; + DynamicVar result = UInt32(num); + return result; + } + } + DynamicObject result; + for (auto item : s) { + result[item.first] = parsePocoVarToDynamic(item.second); + } + return result; + } - try { - auto obj = var.extract(); - DynamicObject result; - for (auto kvp : *obj) { - result[kvp.first] = parsePocoVarToDynamic(kvp.second); - } - return result; - } - catch (std::exception) {} + try { + auto obj = var.extract(); + DynamicObject result; + for (auto kvp : *obj) { + result[kvp.first] = parsePocoVarToDynamic(kvp.second); + } + return result; + } catch (std::exception) { + } - auto msg = (boost::format("Unhandled data type in parse of Poco::Dynamic::Var into Dynamic")).str(); - BOOST_THROW_EXCEPTION(CoreUnhandledType() << moja::TypeErrorMsg(msg)); + auto msg = (boost::format("Unhandled data type in parse of Poco::Dynamic::Var into Dynamic")).str(); + BOOST_THROW_EXCEPTION(CoreUnhandledType() << moja::TypeErrorMsg(msg)); } -} // namespace moja \ No newline at end of file +} // namespace moja \ No newline at end of file diff --git a/Source/moja.core/src/publisher.cpp b/Source/moja.core/src/publisher.cpp index f924c91..f69d5fd 100644 --- a/Source/moja.core/src/publisher.cpp +++ b/Source/moja.core/src/publisher.cpp @@ -3,15 +3,14 @@ namespace moja { void Publisher::AddSubscriber(std::shared_ptr subscriber) { - if (subscriber == nullptr) - throw std::invalid_argument("subscriber"); - _subscribers.push_back(subscriber); + if (subscriber == nullptr) throw std::invalid_argument("subscriber"); + _subscribers.push_back(subscriber); } void Publisher::SendMessage(std::shared_ptr message) const { - for (auto& subscriber : _subscribers) { - subscriber->ReceiveMessage(message); - } + for (auto& subscriber : _subscribers) { + subscriber->ReceiveMessage(message); + } } -} // moja +} // namespace moja diff --git a/Source/moja.core/src/spooky.cpp b/Source/moja.core/src/spooky.cpp deleted file mode 100644 index 34060e3..0000000 --- a/Source/moja.core/src/spooky.cpp +++ /dev/null @@ -1,310 +0,0 @@ -// Spooky Hash -// A 128-bit noncryptographic hash, for checksums and table lookup -// By Bob Jenkins. Public domain. -// Oct 31 2010: published framework, disclaimer ShortHash isn't right -// Nov 7 2010: disabled ShortHash -// Oct 31 2011: replace End, ShortMix, ShortEnd, enable ShortHash again -// April 10 2012: buffer overflow on platforms without unaligned reads -// July 12 2012: was passing out variables in final to in/out in short -// July 30 2012: I reintroduced the buffer overflow -// August 5 2012: SpookyV2: d = should be d += in short hash, and remove extra mix from long hash - -#include "moja/hash/spooky.h" -#include "moja/types.h" - -#include - -#define ALLOW_UNALIGNED_READS 1 - -namespace moja { -namespace hash { - -// -// short hash ... it could be used on any message, -// but it's used by Spooky just for short messages. -// -void SpookyHash::Short(const void *message, size_t length, moja::UInt64 *hash1, moja::UInt64 *hash2) { - moja::UInt64 buf[2 * sc_numVars]; - union { - const moja::UInt8 *p8; - moja::UInt32 *p32; - moja::UInt64 *p64; - size_t i; - } u; - - u.p8 = (const moja::UInt8 *)message; - - if (!ALLOW_UNALIGNED_READS && (u.i & 0x7)) { - memcpy(buf, message, length); - u.p64 = buf; - } - - size_t remainder = length % 32; - moja::UInt64 a = *hash1; - moja::UInt64 b = *hash2; - moja::UInt64 c = sc_const; - moja::UInt64 d = sc_const; - - if (length > 15) { - const moja::UInt64 *end = u.p64 + (length / 32) * 4; - - // handle all complete sets of 32 bytes - for (; u.p64 < end; u.p64 += 4) { - c += u.p64[0]; - d += u.p64[1]; - ShortMix(a, b, c, d); - a += u.p64[2]; - b += u.p64[3]; - } - - //Handle the case of 16+ remaining bytes. - if (remainder >= 16) { - c += u.p64[0]; - d += u.p64[1]; - ShortMix(a, b, c, d); - u.p64 += 2; - remainder -= 16; - } - } - - // Handle the last 0..15 bytes, and its length - d += ((moja::UInt64)length) << 56; - switch (remainder) { - case 15: - d += ((moja::UInt64)u.p8[14]) << 48; - case 14: - d += ((moja::UInt64)u.p8[13]) << 40; - case 13: - d += ((moja::UInt64)u.p8[12]) << 32; - case 12: - d += u.p32[2]; - c += u.p64[0]; - break; - case 11: - d += ((moja::UInt64)u.p8[10]) << 16; - case 10: - d += ((moja::UInt64)u.p8[9]) << 8; - case 9: - d += (moja::UInt64)u.p8[8]; - case 8: - c += u.p64[0]; - break; - case 7: - c += ((moja::UInt64)u.p8[6]) << 48; - case 6: - c += ((moja::UInt64)u.p8[5]) << 40; - case 5: - c += ((moja::UInt64)u.p8[4]) << 32; - case 4: - c += u.p32[0]; - break; - case 3: - c += ((moja::UInt64)u.p8[2]) << 16; - case 2: - c += ((moja::UInt64)u.p8[1]) << 8; - case 1: - c += (moja::UInt64)u.p8[0]; - break; - case 0: - c += sc_const; - d += sc_const; - } - ShortEnd(a, b, c, d); - *hash1 = a; - *hash2 = b; -} - -// do the whole hash in one call -void SpookyHash::Hash128(const void *message, size_t length, moja::UInt64 *hash1, moja::UInt64 *hash2) { - if (length < sc_bufSize) { - Short(message, length, hash1, hash2); - return; - } - - moja::UInt64 h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11; - moja::UInt64 buf[sc_numVars]; - moja::UInt64 *end; - union { - const moja::UInt8 *p8; - moja::UInt64 *p64; - size_t i; - } u; - size_t remainder; - - h0 = h3 = h6 = h9 = *hash1; - h1 = h4 = h7 = h10 = *hash2; - h2 = h5 = h8 = h11 = sc_const; - - u.p8 = (const moja::UInt8 *)message; - end = u.p64 + (length / sc_blockSize)*sc_numVars; - - // handle all whole sc_blockSize blocks of bytes - if (ALLOW_UNALIGNED_READS || ((u.i & 0x7) == 0)) { - while (u.p64 < end) { - Mix(u.p64, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - u.p64 += sc_numVars; - } - } else { - while (u.p64 < end) { - memcpy(buf, u.p64, sc_blockSize); - Mix(buf, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - u.p64 += sc_numVars; - } - } - - // handle the last partial block of sc_blockSize bytes - remainder = (length - ((const moja::UInt8 *)end - (const moja::UInt8 *)message)); - memcpy(buf, end, remainder); - memset(((moja::UInt8 *)buf) + remainder, 0, sc_blockSize - remainder); - ((moja::UInt8 *)buf)[sc_blockSize - 1] = remainder; - - // do some final mixing - End(buf, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - *hash1 = h0; - *hash2 = h1; -} - -// init spooky state -void SpookyHash::Init(moja::UInt64 seed1, moja::UInt64 seed2) { - m_length = 0; - m_remainder = 0; - m_state[0] = seed1; - m_state[1] = seed2; -} - -// add a message fragment to the state -void SpookyHash::Update(const void *message, size_t length) { - moja::UInt64 h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11; - size_t newLength = length + m_remainder; - moja::UInt8 remainder; - union { - const moja::UInt8 *p8; - moja::UInt64 *p64; - size_t i; - } u; - const moja::UInt64 *end; - - // Is this message fragment too short? If it is, stuff it away. - if (newLength < sc_bufSize) { - memcpy(&((moja::UInt8 *)m_data)[m_remainder], message, length); - m_length = length + m_length; - m_remainder = (moja::UInt8)newLength; - return; - } - - // init the variables - if (m_length < sc_bufSize) { - h0 = h3 = h6 = h9 = m_state[0]; - h1 = h4 = h7 = h10 = m_state[1]; - h2 = h5 = h8 = h11 = sc_const; - } else { - h0 = m_state[0]; - h1 = m_state[1]; - h2 = m_state[2]; - h3 = m_state[3]; - h4 = m_state[4]; - h5 = m_state[5]; - h6 = m_state[6]; - h7 = m_state[7]; - h8 = m_state[8]; - h9 = m_state[9]; - h10 = m_state[10]; - h11 = m_state[11]; - } - m_length = length + m_length; - - // if we've got anything stuffed away, use it now - if (m_remainder) { - moja::UInt8 prefix = sc_bufSize - m_remainder; - memcpy(&(((moja::UInt8 *)m_data)[m_remainder]), message, prefix); - u.p64 = m_data; - Mix(u.p64, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - Mix(&u.p64[sc_numVars], h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - u.p8 = ((const moja::UInt8 *)message) + prefix; - length -= prefix; - } else { - u.p8 = (const moja::UInt8 *)message; - } - - // handle all whole blocks of sc_blockSize bytes - end = u.p64 + (length / sc_blockSize)*sc_numVars; - remainder = (moja::UInt8)(length - ((const moja::UInt8 *)end - u.p8)); - if (ALLOW_UNALIGNED_READS || (u.i & 0x7) == 0) { - while (u.p64 < end) { - Mix(u.p64, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - u.p64 += sc_numVars; - } - } else { - while (u.p64 < end) { - memcpy(m_data, u.p8, sc_blockSize); - Mix(m_data, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - u.p64 += sc_numVars; - } - } - - // stuff away the last few bytes - m_remainder = remainder; - memcpy(m_data, end, remainder); - - // stuff away the variables - m_state[0] = h0; - m_state[1] = h1; - m_state[2] = h2; - m_state[3] = h3; - m_state[4] = h4; - m_state[5] = h5; - m_state[6] = h6; - m_state[7] = h7; - m_state[8] = h8; - m_state[9] = h9; - m_state[10] = h10; - m_state[11] = h11; -} - -// report the hash for the concatenation of all message fragments so far -void SpookyHash::Final(moja::UInt64 *hash1, moja::UInt64 *hash2) { - // init the variables - if (m_length < sc_bufSize) { - *hash1 = m_state[0]; - *hash2 = m_state[1]; - Short(m_data, m_length, hash1, hash2); - return; - } - - const moja::UInt64 *data = (const moja::UInt64 *)m_data; - moja::UInt8 remainder = m_remainder; - - moja::UInt64 h0 = m_state[0]; - moja::UInt64 h1 = m_state[1]; - moja::UInt64 h2 = m_state[2]; - moja::UInt64 h3 = m_state[3]; - moja::UInt64 h4 = m_state[4]; - moja::UInt64 h5 = m_state[5]; - moja::UInt64 h6 = m_state[6]; - moja::UInt64 h7 = m_state[7]; - moja::UInt64 h8 = m_state[8]; - moja::UInt64 h9 = m_state[9]; - moja::UInt64 h10 = m_state[10]; - moja::UInt64 h11 = m_state[11]; - - if (remainder >= sc_blockSize) { - // m_data can contain two blocks; handle any whole first block - Mix(data, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - data += sc_numVars; - remainder -= sc_blockSize; - } - - // mix in the last partial block, and the length mod sc_blockSize - memset(&((moja::UInt8 *)data)[remainder], 0, (sc_blockSize - remainder)); - - ((moja::UInt8 *)data)[sc_blockSize - 1] = remainder; - - // do some final mixing - End(data, h0, h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11); - - *hash1 = h0; - *hash2 = h1; -} - -} // namespace hash -} // namespace moja diff --git a/Source/moja.core/src/timespan.cpp b/Source/moja.core/src/timespan.cpp index 901a0a4..02aa4f2 100644 --- a/Source/moja.core/src/timespan.cpp +++ b/Source/moja.core/src/timespan.cpp @@ -5,94 +5,70 @@ namespace moja { -Timespan::Timespan() : - _span(0) {} +Timespan::Timespan() : _span(0) {} -Timespan::Timespan(std::chrono::microseconds microSeconds) : - _span(microSeconds) {} +Timespan::Timespan(std::chrono::microseconds microSeconds) : _span(microSeconds) {} -Timespan::Timespan(long seconds, long microSeconds) : - _span(std::chrono::seconds{ seconds } + - std::chrono::microseconds{ microSeconds }) {} +Timespan::Timespan(long seconds, long microSeconds) + : _span(std::chrono::seconds{seconds} + std::chrono::microseconds{microSeconds}) {} -Timespan::Timespan(int days, int hours, int minutes, int seconds, int microSeconds) : - _span(std::chrono::microseconds { microSeconds } - + std::chrono::seconds { seconds } - + std::chrono::minutes { minutes } - + std::chrono::hours { hours } - + date::days{ days }) {} +Timespan::Timespan(int days, int hours, int minutes, int seconds, int microSeconds) + : _span(std::chrono::microseconds{microSeconds} + std::chrono::seconds{seconds} + std::chrono::minutes{minutes} + + std::chrono::hours{hours} + date::days{days}) {} -Timespan::Timespan(const Timespan& timespan) : - _span(timespan._span) {} +Timespan::Timespan(const Timespan& timespan) : _span(timespan._span) {} -int Timespan::days() const { - return std::chrono::duration_cast(_span).count(); -} +int Timespan::days() const { return std::chrono::duration_cast(_span).count(); } double Timespan::totalDays() const { - using fdays = std::chrono::duration; - return std::chrono::duration_cast(_span).count(); + using fdays = std::chrono::duration; + return std::chrono::duration_cast(_span).count(); } -Timespan& Timespan::operator = (const Timespan& timespan) { - _span = timespan._span; - return *this; +Timespan& Timespan::operator=(const Timespan& timespan) { + _span = timespan._span; + return *this; } Timespan& Timespan::assign(int days, int hours, int minutes, int seconds, int microSeconds) { - _span = std::chrono::microseconds{ microSeconds } - +std::chrono::seconds{ seconds } - +std::chrono::minutes{ minutes } - +std::chrono::hours{ hours } - +date::days{ days }; - return *this; + _span = std::chrono::microseconds{microSeconds} + std::chrono::seconds{seconds} + std::chrono::minutes{minutes} + + std::chrono::hours{hours} + date::days{days}; + return *this; } Timespan& Timespan::assign(long seconds, long microSeconds) { - _span = std::chrono::seconds { seconds } + - std::chrono::microseconds { microSeconds }; - return *this; + _span = std::chrono::seconds{seconds} + std::chrono::microseconds{microSeconds}; + return *this; } -void Timespan::swap(Timespan& timespan) MOJA_NOEXCEPT -{ - std::swap(_span, timespan._span); -} +void Timespan::swap(Timespan& timespan) MOJA_NOEXCEPT { std::swap(_span, timespan._span); } -Timespan Timespan::operator + (const Timespan& d) const { - return Timespan(_span + d._span); -} +Timespan Timespan::operator+(const Timespan& d) const { return Timespan(_span + d._span); } -Timespan Timespan::operator - (const Timespan& d) const { - return Timespan(_span - d._span); -} +Timespan Timespan::operator-(const Timespan& d) const { return Timespan(_span - d._span); } -Timespan& Timespan::operator += (const Timespan& d) { - _span += d._span; - return *this; +Timespan& Timespan::operator+=(const Timespan& d) { + _span += d._span; + return *this; } -Timespan& Timespan::operator -= (const Timespan& d) { - _span -= d._span; - return *this; +Timespan& Timespan::operator-=(const Timespan& d) { + _span -= d._span; + return *this; } -Timespan Timespan::operator+(const std::chrono::microseconds& d) const { - return Timespan(_span + d); -} +Timespan Timespan::operator+(const std::chrono::microseconds& d) const { return Timespan(_span + d); } -Timespan Timespan::operator - (const std::chrono::microseconds& d) const { - return Timespan(_span - d); -} +Timespan Timespan::operator-(const std::chrono::microseconds& d) const { return Timespan(_span - d); } -Timespan& Timespan::operator += (const std::chrono::microseconds& d) { - _span += d; - return *this; +Timespan& Timespan::operator+=(const std::chrono::microseconds& d) { + _span += d; + return *this; } -Timespan& Timespan::operator -= (const std::chrono::microseconds& d) { - _span -= d; - return *this; +Timespan& Timespan::operator-=(const std::chrono::microseconds& d) { + _span -= d; + return *this; } -} // namespace moja +} // namespace moja diff --git a/Source/moja.core/tests/CMakeLists.txt b/Source/moja.core/tests/CMakeLists.txt index 213a81e..46438fb 100644 --- a/Source/moja.core/tests/CMakeLists.txt +++ b/Source/moja.core/tests/CMakeLists.txt @@ -1,15 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -27,21 +18,18 @@ set(TEST_SRCS src/timespantests.cpp ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) -target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.core + Boost::unit_test_framework ) -add_dependencies(${TESTUNIT} moja.core testmodule) +add_dependencies(${TESTUNIT} testmodule) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} @@ -49,30 +37,4 @@ add_custom_command(TARGET ${TESTUNIT} ENDIF () add_subdirectory(testmodule) -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.core/tests/testmodule/CMakeLists.txt b/Source/moja.core/tests/testmodule/CMakeLists.txt index 9227e43..7119ac4 100644 --- a/Source/moja.core/tests/testmodule/CMakeLists.txt +++ b/Source/moja.core/tests/testmodule/CMakeLists.txt @@ -8,30 +8,24 @@ set(TestModule_sources src/externalmodel.cpp ) -include_directories( include ) - set (SRCS ${TestModule_sources} ${TestModule_headers}) add_library( ${LIBNAME} ${LIB_MODE} ${SRCS} ) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) + set_target_properties( ${LIBNAME} PROPERTIES VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL TestModule_EXPORTS) -target_link_libraries( - ${LIBNAME} - moja.core - ) - -#install( -# DIRECTORY include/moja -# DESTINATION include -# PATTERN ".svn" EXCLUDE -# ) -# -#install( -# TARGETS ${LIBNAME} -# LIBRARY DESTINATION lib${LIB_SUFFIX} -# ARCHIVE DESTINATION lib${LIB_SUFFIX} -# RUNTIME DESTINATION bin -# ) \ No newline at end of file +target_include_directories(${LIBNAME} + PUBLIC + $ + $ +) + +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.core +) + diff --git a/Source/moja.datarepository/CMakeLists.txt b/Source/moja.datarepository/CMakeLists.txt index d7f6d04..ff090ac 100644 --- a/Source/moja.datarepository/CMakeLists.txt +++ b/Source/moja.datarepository/CMakeLists.txt @@ -4,37 +4,9 @@ string(TOUPPER "${PACKAGE}" LIBNAME_EXPORT) include(${CMAKE_MODULE_PATH}/generate_product_version.cmake) -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() - -# SQLite -find_package(SQLite) - -include_directories( - include - ../moja.core/include - ../moja.flint.configuration/include - ${Poco_INCLUDE_DIRS} - ${Sqlite_INCLUDE_PATH} - ) +find_package(Poco REQUIRED Foundation JSON) + +find_package(SQLite3 REQUIRED) # Version Info if (MSVC) @@ -57,37 +29,15 @@ configure_file(../templates/exports.h ${CMAKE_CURRENT_SOURCE_DIR}/include/moja/$ set(MOJA_DataRepository_Relational_headers include/moja/${PACKAGE}/providerrelationalsqlite.h -# include/moja/${PACKAGE}/providerrelationalpocopostgresql.h -# include/moja/${PACKAGE}/providerrelationallibpqpostgresql.h ) set(MOJA_DataRepository_Relational_sources src/providerrelationalsqlite.cpp - # src/providerrelationalpocopostgresql.cpp -# src/providerrelationallibpqpostgresql.cpp ) set(MOJA_DataRepository_NoSQL_headers -## include/moja/${PACKAGE}/providernosqlpocomongodb.h -## include/moja/${PACKAGE}/providernosqlpocojson.h ) set(MOJA_DataRepository_NoSQL_sources -## src/providernosqlpocomongodb.cpp -## src/providernosqlpocojson.cpp -) - -set(MOJA_DataRepository_SpatialRasterGDAL_headers - include/moja/${PACKAGE}/providerspatialrastergdal.h -) -set(MOJA_DataRepository_SpatialRasterGDAL_sources - src/providerspatialrastergdal.cpp -) - -set(MOJA_DataRepository_SpatialVectorGDAL_headers - include/moja/${PACKAGE}/providerspatialvectorgdal.h -) -set(MOJA_DataRepository_SpatialVectorGDAL_sources - src/providerspatialvectorgdal.cpp ) set(MOJA_DataRepository_SpatialRasterTiled_headers @@ -155,58 +105,51 @@ source_group("header files\\spatialvector" FILES ${MOJA_DataRepository_SpatialV source_group("source files\\spatialvector" FILES ${MOJA_DataRepository_SpatialVectorGDAL_sources}) set(SRCS - ${Sqlite_INCLUDE_PATH}/sqlite3.c ${MOJA_DataRepository_sources} ${MOJA_DataRepository_headers} ${MOJA_DataRepository_Relational_sources} ${MOJA_DataRepository_Relational_headers} ${MOJA_DataRepository_NoSQL_sources} ${MOJA_DataRepository_NoSQL_headers} - ${MOJA_DataRepository_SpatialRasterGDAL_sources} ${MOJA_DataRepository_SpatialRasterGDAL_headers} - ${MOJA_DataRepository_SpatialVectorGDAL_sources} ${MOJA_DataRepository_SpatialVectorGDAL_headers} ${MOJA_DataRepository_SpatialRasterTiled_sources} ${MOJA_DataRepository_SpatialRasterTiled_headers} ${MOJA_DataRepository_SpatialRaster_sources} ${MOJA_DataRepository_SpatialRaster_headers} ) -include_directories( include ) - -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) add_library( ${LIBNAME} ${LIB_MODE} ${SRCS} ${ProductVersionFiles}) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) + +#Set target properties set_target_properties( ${LIBNAME} PROPERTIES VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS) -target_link_libraries( ${LIBNAME} - moja.core - moja.flint.configuration - ${Poco_FOUNDATION} - ${Poco_JSON} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src +) + +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.core moja::moja.flint.configuration Poco::Foundation Poco::JSON + PRIVATE + SQLite::SQLite3 ) if (CMAKE_SYSTEM MATCHES "Linux" ) - target_link_libraries( ${LIBNAME} dl) + target_link_libraries( ${LIBNAME} + PRIVATE + dl + ) endif(CMAKE_SYSTEM MATCHES "Linux" ) -add_dependencies(${LIBNAME} moja.core moja.flint.configuration) - -install( - DIRECTORY include/moja - DESTINATION include - PATTERN ".svn" EXCLUDE - ) +############################################## +# Installation instructions -install( - TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin - ) +include(GNUInstallDirs) -if(MSVC) - INSTALL( - FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${LIBNAME}${CMAKE_DEBUG_POSTFIX}.pdb - DESTINATION bin - CONFIGURATIONS Debug - ) -endif() +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if (ENABLE_TESTS) add_subdirectory( tests ) diff --git a/Source/moja.datarepository/cmake/moja.datarepositoryConfig.cmake b/Source/moja.datarepository/cmake/moja.datarepositoryConfig.cmake new file mode 100644 index 0000000..07959e2 --- /dev/null +++ b/Source/moja.datarepository/cmake/moja.datarepositoryConfig.cmake @@ -0,0 +1,9 @@ +include(CMakeFindDependencyMacro) + +#find_dependency(Poco REQUIRED COMPONENTS Foundation JSON) +find_dependency(moja REQUIRED COMPONENTS moja.core moja.flint.configuration) + +if(NOT TARGET moja::moja.datarepository) + include("${CMAKE_CURRENT_LIST_DIR}/moja.datarepositoryTargets.cmake") +endif() + diff --git a/Source/moja.datarepository/include/moja/datarepository/metadatarasterreaderinterface.h b/Source/moja.datarepository/include/moja/datarepository/metadatarasterreaderinterface.h deleted file mode 100644 index de0adb4..0000000 --- a/Source/moja.datarepository/include/moja/datarepository/metadatarasterreaderinterface.h +++ /dev/null @@ -1,47 +0,0 @@ -#ifndef MOJA_DATAREPOSITORY_METADATARASTERREADERINTERFACE_H_ -#define MOJA_DATAREPOSITORY_METADATARASTERREADERINTERFACE_H_ - -#include "moja/datarepository/_datarepository_exports.h" -#include "moja/utility.h" - -#include -#include -#include -#include -#include -#include - -#include -#include -#include "moja/dynamic.h" - -namespace moja { -namespace datarepository { - -class TileBlockCellIndexer; -struct BlockIdx; - -class DATAREPOSITORY_API MetaDataRasterReader { -public: - MetaDataRasterReader(const std::string& path, const Point& origin, const std::string& prefix) {}; - virtual ~MetaDataRasterReader() = default; - - virtual DynamicObject readMetaData(const BlockIdx& blkIdx, int nSeries, std::vector* block) const = 0; - -protected: -}; - -// -------------------------------------------------------------------------------------------- - -class DATAREPOSITORY_API StackRasterReaderFactoryInterface { -public: - StackRasterReaderFactoryInterface() {} - virtual ~StackRasterReaderFactoryInterface() = default; - virtual std::unique_ptr getRasterReader(const std::string& path, const Point& origin, const std::string& prefix, const TileBlockCellIndexer& indexer) const = 0; -}; - - -} -} // moja::datarepository - -#endif // MOJA_DATAREPOSITORY_METADATARASTERREADERINTERFACE_H_ \ No newline at end of file diff --git a/Source/moja.datarepository/include/moja/datarepository/providermanager.h b/Source/moja.datarepository/include/moja/datarepository/providermanager.h deleted file mode 100644 index 2cb6eb7..0000000 --- a/Source/moja.datarepository/include/moja/datarepository/providermanager.h +++ /dev/null @@ -1,157 +0,0 @@ -// -// ProviderManager.h -// - -#ifndef ProviderManager_INCLUDED -#define ProviderManager_INCLUDED - -#include "moja/datarepository/_datarepository_exports.h" -#include "iproviderinterface.h" -#include "moja/sharedlibrary.h" - -#include -#include - -#if defined(_WIN32) -#define PROVIDERMANAGER_LIB_API __declspec(dllexport) -#endif - -#if !defined(PROVIDERMANAGER_LIB_API) -#if defined (__GNUC__) && (__GNUC__ >= 4) -#define PROVIDERMANAGER_LIB_API __attribute__ ((visibility ("default"))) -#else -#define PROVIDERMANAGER_LIB_API -#endif -#endif - -namespace moja { -namespace datarepository { - -//class IInTApi; -//class ILandUnitController; - -/** -* Function pointer type for InitializeProvider(). -* -* All providers must have an InitializeProvider() function. Usually this is declared automatically using -* the IMPLEMENT_PROVIDER macro below. The function must be declared using as 'extern "C"' so that the -* name remains undecorated. The object returned will be "owned" by the caller, and will be deleted -* by the caller before the provider is unloaded. -*/ -typedef IProviderInterface* (*InitializeProviderFunctionPtr)(const char* providerName); -typedef void(*DestroyProviderFunctionPtr)(void* obj); - -// -------------------------------------------------------------------------------------------- -enum class ProviderType { - Unknown, - Internal, - External -}; - -// -------------------------------------------------------------------------------------------- -class ProviderInfoBase { -public: - /** Constructor */ - ProviderInfoBase(); - virtual ~ProviderInfoBase() { - //std::cout << "[ProviderInfoBase::Destructor]:" << "LoadOrder ("<< LoadOrder << ")"<< std::endl; - }; - - virtual ProviderType GetProviderType() const = 0; - - InitializeProviderFunctionPtr InitializeProvider; - DestroyProviderFunctionPtr DestroyProvider; - - /** The provider object for this provider. We actually *own* this provider, so it's lifetime is controlled by the scope of this shared pointer. */ - std::vector ProviderList; - - /** Arbitrary number that encodes the load order of this provider, so we can shut them down in reverse order. */ - Int32 LoadOrder; - - /** count of how many of this type have been instantiate **/ - Int32 ProviderNumber; -}; - -// -------------------------------------------------------------------------------------------- -class ExternalProviderInfo : public ProviderInfoBase { -public: - ExternalProviderInfo(); - virtual ~ExternalProviderInfo() { - //std::cout << "[ExternalProviderInfo::Destructor]" << std::endl; - } - ProviderType GetProviderType() const { return ProviderType::External; } - - std::string OriginalFilename; - - /** File name of this Provider (.dll or .so file name) */ - std::string Filename; - - /** Handle to this Provider (library handle), if it's currently loaded */ - std::unique_ptr Handle; -}; - -// -------------------------------------------------------------------------------------------- -class InternalProviderInfo : public ProviderInfoBase { -public: - InternalProviderInfo(); - virtual ~InternalProviderInfo() { - //std::cout << "[InternalProviderInfo::Destructor]" << std::endl; - } - ProviderType GetProviderType() const { return ProviderType::Internal; } -}; - -// -------------------------------------------------------------------------------------------- -class DATAREPOSITORY_API ProviderManager { -public: - ProviderManager() = default; - ~ProviderManager(); - - /** static that tracks the current load number. Incremented whenever we add a new Provider */ - static Int32 CurrentLoadOrder; - - void AddProvider(ProviderType type, const std::string& ProviderName); - IProviderInterface::Ptr GetProvider(const std::string& providerName); - bool IsProviderLoaded(const std::string& providerName) const; - bool LoadInternalProvider(const std::string& providerName); - bool LoadExternalProvider(const std::string& ProviderName); - - bool UnloadProvider(const std::string& ProviderName, bool isShutdown = false) { return true; } - - typedef std::map> ProviderMap; - -private: - /** Compares file versions between the current executing flint version and the specified library */ - static bool CheckProviderCompatibility(const std::string& filename); - /** Finds Providers matching a given name wildcard. */ - std::map FindProviderPaths(const std::string& NamePattern) const; - std::map FindProviderPathsExact(const std::string& NamePattern) const; - std::map FindProviderPathsInDirectory(const std::string& InDirectoryName, const std::string& NamePattern, bool useSuffix) const; - - /** Map of all Providers. Maps the case-insensitive Provider name to information about that Provider, loaded or not. */ - ProviderMap _providers; -}; - -// -------------------------------------------------------------------------------------------- -// A default minimal implementation of a provider that does nothing at startup and shutdown -class DefaultProviderImpl : public IProviderInterface {}; - -// -------------------------------------------------------------------------------------------- - -#define IMPLEMENT_PROVIDER( ProviderImplClass, ProviderName ) \ - \ - /**/ \ - /* InitializeProvider function, called by provider manager after this provider's DLL has been loaded */ \ - extern "C" PROVIDERMANAGER_LIB_API moja::flint::IProviderInterface* InitializeProvider(const char* providerName) \ - { \ - return new ProviderImplClass(); \ - } \ - /**/ \ - /* DestroyProvider function */ \ - extern "C" PROVIDERMANAGER_LIB_API void DestroyProvider(void* obj) \ - { \ - /* std::cout << "[DestroyProvider]: " << #ProviderImplClass << ", " << #ProviderName << std::endl;*/ \ - delete ((ProviderImplClass*)(obj)); \ - } - -}} //moja::DataRepository -#endif //ProviderManager_INCLUDED diff --git a/Source/moja.datarepository/include/moja/datarepository/providersleeknosqljson.h b/Source/moja.datarepository/include/moja/datarepository/providersleeknosqljson.h deleted file mode 100644 index d1378b5..0000000 --- a/Source/moja.datarepository/include/moja/datarepository/providersleeknosqljson.h +++ /dev/null @@ -1,36 +0,0 @@ -// -// ProviderNoSQLPocoJSON.h -// - -#ifndef ProviderNoSQLPocoJSON_INCLUDED -#define ProviderNoSQLPocoJSON_INCLUDED - -#include "moja/datarepository/iproviderinterface.h" - -#include "moja/Dynamic.h" - -#include - -namespace moja { -namespace datarepository { - -// -------------------------------------------------------------------------------------------- -/** -* SLEEK Implmentation of a NoSQL data provider (using MongoDB as a data source) derived -* from IProviderSpatialVectorInterface. -*/ -class DATAREPOSITORY_API ProviderNoSQLPocoJSON : public IProviderNoSQLInterface { - -public: - ProviderNoSQLPocoJSON(); - virtual ~ProviderNoSQLPocoJSON() {} - - virtual Dynamic GetObject(int id) override; - virtual Dynamic GetObjectStack(const std::string& query) override; - - typedef std::tuple provider_object; -}; - -}} // moja::datarepository - -#endif //ProviderNoSQLPocoJSON_INCLUDED diff --git a/Source/moja.datarepository/include/moja/datarepository/providerspatialrastergdal.h b/Source/moja.datarepository/include/moja/datarepository/providerspatialrastergdal.h deleted file mode 100644 index ff959c7..0000000 --- a/Source/moja.datarepository/include/moja/datarepository/providerspatialrastergdal.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef MOJA_DATAREPOSITORY_PROVIDERSPATIALRASTERGDAL_H_ -#define MOJA_DATAREPOSITORY_PROVIDERSPATIALRASTERGDAL_H_ - -#include "moja/datarepository/iproviderspatialrasterinterface.h" - -#include - -#include - -namespace moja { -namespace datarepository { - -// -------------------------------------------------------------------------------------------- -// Moja Implmentation of a Spatial Raster data provider derived from IProviderSpatialRasterInterface. - -class DATAREPOSITORY_API ProviderSpatialRasterGDAL : public IProviderSpatialRasterInterface { - public: - explicit ProviderSpatialRasterGDAL(DynamicObject settings) {} - ProviderSpatialRasterGDAL(ProviderSpatialRasterGDAL const&) = delete; - void operator=(ProviderSpatialRasterGDAL const&) = delete; - virtual ~ProviderSpatialRasterGDAL() = default; - - DynamicVar GetValue(const std::string& name, double Latitude, double Longitude) override; - DynamicVar GetValueStack(const std::string& name, double Latitude, double Longitude) override; -}; - -} // namespace datarepository -} // namespace moja - -#endif // MOJA_DATAREPOSITORY_PROVIDERSPATIALRASTERGDAL_H_ diff --git a/Source/moja.datarepository/include/moja/datarepository/providerspatialvectorgdal.h b/Source/moja.datarepository/include/moja/datarepository/providerspatialvectorgdal.h deleted file mode 100644 index 0519a12..0000000 --- a/Source/moja.datarepository/include/moja/datarepository/providerspatialvectorgdal.h +++ /dev/null @@ -1,31 +0,0 @@ -#ifndef MOJA_DATAREPOSITORY_PROVIDERSPATIALVECTORGDAL_H_ -#define MOJA_DATAREPOSITORY_PROVIDERSPATIALVECTORGDAL_H_ - -#include "moja/datarepository/_datarepository_exports.h" -#include "moja/datarepository/iproviderspatialvectorinterface.h" - -#include - -namespace moja { -namespace datarepository { - -// -------------------------------------------------------------------------------------------- -/** - * Moja Implmentation of a Spatial Vector data provider derived from IProviderSpatialVectorInterface. - */ -class DATAREPOSITORY_API ProviderSpatialVectorGDAL : public IProviderSpatialVectorInterface { - public: - ProviderSpatialVectorGDAL(); - virtual ~ProviderSpatialVectorGDAL() {} - - DynamicVar GetValue(const std::string& name, double Latitude, double Longitude) override; - DynamicVar GetValueStack(const std::string& name, double Latitude, double Longitude) override; - - DynamicVar GetAttribute(const std::string& name, double Latitude, double Longitude) override; - DynamicVar GetAttributeStack(const std::string& name, double Latitude, double Longitude) override; -}; - -} // namespace datarepository -} // namespace moja - -#endif // MOJA_DATAREPOSITORY_PROVIDERSPATIALVECTORGDAL_H_ diff --git a/Source/moja.datarepository/src/providermanager.cpp b/Source/moja.datarepository/src/providermanager.cpp deleted file mode 100644 index 0163ff7..0000000 --- a/Source/moja.datarepository/src/providermanager.cpp +++ /dev/null @@ -1,232 +0,0 @@ -// -// ProviderManager.cpp -// - -#include "moja/datarepository/providermanager.h" -#include "moja/environment.h" -#include "moja/sharedlibrary.h" -#include "moja/string.h" -#include "moja/directoryiterator.h" - -#include "moja/datarepository/providersleekspatialraster.h" -#include "moja/datarepository/providersleekspatialvector.h" -#include "moja/datarepository/providersleekrelationalsqlite.h" -#include "moja/datarepository/providersleeknosqlmongodb.h" - -#include -#include - -namespace moja { -namespace datarepository { - -Int32 ProviderManager::CurrentLoadOrder = 1; - -ProviderInfoBase::ProviderInfoBase() { - LoadOrder = ProviderManager::CurrentLoadOrder++; - ProviderNumber = 1; - InitializeProvider = nullptr; - DestroyProvider = nullptr; -}; - -ExternalProviderInfo::ExternalProviderInfo() : Handle(nullptr) {} - -InternalProviderInfo::InternalProviderInfo() {} - -ProviderManager::~ProviderManager() { - // NOTE: It may not be safe to unload providers by this point (static deinitialization), as other - // libraries may have already been unloaded, which means we can't safely call clean up methods -} - -void ProviderManager::AddProvider(ProviderType providerType, const std::string& InProviderName) { - // Do we already know about this provider? If not, we'll create information for this provider now. - if (_providers.find(InProviderName) == _providers.end()) { - - std::unique_ptr providerInfo; - switch (providerType) { - case ProviderType::External: { - auto provider = std::make_unique(); - std::map ProviderPathMap = FindProviderPaths(InProviderName); - if (ProviderPathMap.size() == 1) { - // Add this provider to the set of providers that we know about - provider->OriginalFilename = (*std::begin(ProviderPathMap)).second; - provider->Filename = provider->OriginalFilename; - providerInfo = std::move(provider); - } - break; - } - case ProviderType::Internal: { - // Add this provider to the set of providers that we know about - providerInfo = std::make_unique(); - break; - } - } - // Update hash table - _providers[InProviderName] = std::move(providerInfo); - } -} - -std::map ProviderManager::FindProviderPaths(const std::string& NamePattern) const { - return FindProviderPathsInDirectory(moja::Environment::startProcessFolder(), NamePattern, true); -} - -std::map ProviderManager::FindProviderPathsExact(const std::string& NamePattern) const { - return FindProviderPathsInDirectory(moja::Environment::startProcessFolder(), NamePattern, false); -} - -std::map ProviderManager::FindProviderPathsInDirectory(const std::string& directoryName, const std::string& NamePattern, bool useSuffix) const { - - //const char* suf = useSuffix ? "suffex=T] " : "suffex=F] "; - //std::cout << "[FindProviderPathsInDirectory 1," << suf << NamePattern << std::endl; - std::map OutProviderPaths; - - DirectoryIterator dirIterator(directoryName); - DirectoryIterator end; - - std::string prefix = SharedLibrary::prefix(); - std::string suffix = useSuffix ? SharedLibrary::suffix() : ""; - //std::cout << "[FindProviderPathsInDirectory 1a," << prefix << NamePattern << suffix << std::endl; - - // Parse all the matching provider names - while (dirIterator != end) { - auto providerPath = Path(dirIterator->path()); - //std::cout << "[FindProviderPathsInDirectory 2]" << dirIterator->path() << std::endl; - auto file = providerPath.getFileName(); - if (icompare(file, 0, prefix.size(), prefix) == 0 - && icompare(file, file.size() - suffix.size(), suffix.size(), suffix) == 0) { - auto it = file.begin() + prefix.size(); - auto end = file.end() - suffix.size(); - std::string providerName; - while (it != end) providerName += *it++; - - bool match = NamePattern == providerName; - //std::cout << "[FindProviderPathsInDirectory 3]" << NamePattern << " ** " << providerName << " ** " << providerPath.toString() << " ** " << match << std::endl; - - if (NamePattern.size() == 0 || NamePattern == providerName) - OutProviderPaths[providerName] = providerPath.toString(); - } - ++dirIterator; - } - // std::cout << "[FindProviderPathsInDirectory 4:finished]" << OutProviderPaths.size() << std::endl; - return OutProviderPaths; -} - -IProviderInterface* CreateInternalProvider(const char* providerName) { - //std::cout << "[InitializeProvider]: " << "Internal, " << providerName << std::endl; - std::string _providerNames(providerName); - if (_providerNames == "ProviderSLEEKSpatialRaster") - return new ProviderSLEEKSpatialRaster(); - else if (_providerNames == "ProviderSLEEKSpatialVector") - return new ProviderSLEEKSpatialVector(); - else if (_providerNames == "ProviderSLEEKRelationalSQLite") - return new ProviderSLEEKRelationalSQLite(); - else if (_providerNames == "ProviderSLEEKNoSQLMongoDB") - return new ProviderSLEEKNoSQLMongoDB(); - - throw LibraryLoadException("Unknown internal provider"); -} - -IProviderInterface::Ptr ProviderManager::GetProvider(const std::string& providerName) { - // Do we even know about this provider? - const auto providerInfoIt = _providers.find(providerName); - if (providerInfoIt == std::end(_providers)) - return nullptr; - - std::shared_ptr providerInfo = (*providerInfoIt).second; - if (providerInfo->InitializeProvider == nullptr) - throw LibraryLoadException("Failed to initialise the provider"); - - std::shared_ptr providerInterface; - if (providerInfo->GetProviderType() == ProviderType::Internal) { - providerInterface = std::shared_ptr(providerInfo->InitializeProvider(providerName.c_str())); - } - else { - providerInterface = std::shared_ptr(providerInfo->InitializeProvider(providerName.c_str()), providerInfo->DestroyProvider); - } - if (providerInterface) { - auto provider = std::static_pointer_cast(providerInterface); - // TODO: resolve when landUnitController gets passed to the providerWrapper - //auto landUnitDataHandle = std::make_unique(&landUnitController, provider.get()); - - // TODO: work this part out for Providers - dont need wrapper stuff - //auto landUnitDataHandle = std::make_unique(provider.get()); - //providerInfo->ProviderList.push_back(providerInterface); - //ProviderMetaData metaData; - //metaData.providerInfoId = providerInfo->LoadOrder; - //metaData.providerId = providerInfo->ProviderNumber++; - //metaData.providerName = providerName; - //metaData.providerType = (int)(providerInfo->GetProviderType()); - //providerInterface->StartupProvider(std::move(landUnitDataHandle), metaData); - } - else - throw LibraryLoadException("Failed to initialise the provider"); - - return providerInterface; -} - -bool ProviderManager::LoadInternalProvider(const std::string& providerName) { - // Update our set of known providers, in case we don't already know about this provider - AddProvider(ProviderType::Internal, providerName); - std::shared_ptr providerInfo = _providers[providerName]; - - if (providerInfo->InitializeProvider == nullptr) { - if (providerInfo->GetProviderType() != ProviderType::Internal) - throw LibraryLoadException("Attempt to load provider already loaded as different type"); - - auto internalProviderInfo = std::static_pointer_cast(providerInfo); - internalProviderInfo->InitializeProvider = CreateInternalProvider; - internalProviderInfo->DestroyProvider = nullptr; - } - return true; -} - -bool ProviderManager::LoadExternalProvider(const std::string& providerName) { - - // Update our set of known providers, in case we don't already know about this provider - AddProvider(ProviderType::External, providerName); - - // Grab the provider info. This has the file name of the provider, as well as other info. - std::shared_ptr providerInfo = _providers[providerName]; - //std::cout << (providerInfo==nullptr?std::string("null"):std::string("not null")) <InitializeProvider == nullptr) { - if (providerInfo->GetProviderType() != ProviderType::External) - throw LibraryLoadException("Attempt to load provider already loaded as different type"); - - auto externalProviderInfo = std::static_pointer_cast(_providers[providerName]); - //std::cout << externalProviderInfo->Filename <Filename); - - // Clear the handle and set it again below if the provider is successfully loaded - externalProviderInfo->Handle = nullptr; - - // Skip this check if file manager has not yet been initialized - if (ProviderFileToLoad.exists()) { - if (CheckProviderCompatibility(ProviderFileToLoad.path())) { - externalProviderInfo->Handle = std::make_unique(ProviderFileToLoad.path()); - externalProviderInfo->InitializeProvider = (InitializeProviderFunctionPtr)externalProviderInfo->Handle->getSymbol("InitializeProvider"); - externalProviderInfo->DestroyProvider = (DestroyProviderFunctionPtr)externalProviderInfo->Handle->getSymbol("DestroyProvider"); - } - else { - throw LibraryLoadException("Provider not compatible"); - } - } - else { - //std::cout << externalProviderInfo->Filename << std::endl; - //std::cout << ProviderFileToLoad.path() << std::endl; - throw LibraryLoadException("Provider not found"); - } - } - return true; -} - -bool ProviderManager::CheckProviderCompatibility(const std::string& filename) { - //Int32 ProviderApiVersion = MOJA_VERSION; - //if (ProviderApiVersion != MOJA_API_VERSION) { - // return false; - //} - return true; -} - -}} // namespace moja::datarepository diff --git a/Source/moja.datarepository/src/providersleeknosqljson.cpp b/Source/moja.datarepository/src/providersleeknosqljson.cpp deleted file mode 100644 index e504c8e..0000000 --- a/Source/moja.datarepository/src/providersleeknosqljson.cpp +++ /dev/null @@ -1,29 +0,0 @@ -#include "moja/datarepository/providernosqlpocojson.h" - -#include -#include - -namespace moja { -namespace datarepository { - -ProviderNoSQLPocoJSON::ProviderNoSQLPocoJSON() { -} - -Dynamic ProviderNoSQLPocoJSON::GetObject(int id) { - provider_object object1 = std::make_tuple("test1", 1, 42.42, true); - Dynamic x = object1; - return x; -} - -Dynamic ProviderNoSQLPocoJSON::GetObjectStack(const std::string& query) { - provider_object object1 = std::make_tuple("object1", 1, 42.42, true); - provider_object object2 = std::make_tuple("object2", 1, 11.11, false); - - std::vector values; - values.push_back(object1); - values.push_back(object2); - Dynamic x = values; - return x; -} - -}} // namespace moja::datarepository \ No newline at end of file diff --git a/Source/moja.datarepository/src/providerspatialrastergdal.cpp b/Source/moja.datarepository/src/providerspatialrastergdal.cpp deleted file mode 100644 index 9684c27..0000000 --- a/Source/moja.datarepository/src/providerspatialrastergdal.cpp +++ /dev/null @@ -1,19 +0,0 @@ -#include "moja/datarepository/providerspatialrastergdal.h" - -namespace moja { -namespace datarepository { - -DynamicVar ProviderSpatialRasterGDAL::GetValue(const std::string& name, double Latitude, double Longitude) { - double value = 42.42; - DynamicVar x = value; - return x; -}; - -DynamicVar ProviderSpatialRasterGDAL::GetValueStack(const std::string& name, double Latitude, double Longitude) { - double value = 42.42; - DynamicVar x = value; - return x; -}; - -} // namespace datarepository -} // namespace moja \ No newline at end of file diff --git a/Source/moja.datarepository/src/providerspatialvectorgdal.cpp b/Source/moja.datarepository/src/providerspatialvectorgdal.cpp deleted file mode 100644 index 69a200c..0000000 --- a/Source/moja.datarepository/src/providerspatialvectorgdal.cpp +++ /dev/null @@ -1,37 +0,0 @@ -#include "moja/datarepository/providerspatialvectorgdal.h" - -#include - -#include - -namespace moja { -namespace datarepository { - -ProviderSpatialVectorGDAL::ProviderSpatialVectorGDAL() {} - -DynamicVar ProviderSpatialVectorGDAL::GetValue(const std::string& name, double Latitude, double Longitude) { - double value = 42.42; - DynamicVar x = value; - return x; -} - -DynamicVar ProviderSpatialVectorGDAL::GetValueStack(const std::string& name, double Latitude, double Longitude) { - std::vector values = {42.42, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9}; - DynamicVar x = values; - return x; -} - -DynamicVar ProviderSpatialVectorGDAL::GetAttribute(const std::string& name, double Latitude, double Longitude) { - double value = 42.42; - DynamicVar x = value; - return x; -} - -DynamicVar ProviderSpatialVectorGDAL::GetAttributeStack(const std::string& name, double Latitude, double Longitude) { - std::vector values = {42.42, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9}; - DynamicVar x = values; - return x; -} - -} // namespace datarepository -} // namespace moja \ No newline at end of file diff --git a/Source/moja.datarepository/tests/CMakeLists.txt b/Source/moja.datarepository/tests/CMakeLists.txt index a04766d..359bc71 100644 --- a/Source/moja.datarepository/tests/CMakeLists.txt +++ b/Source/moja.datarepository/tests/CMakeLists.txt @@ -1,17 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework serialization log log_setup REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -include_directories(include) +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -25,68 +14,28 @@ set(TEST_SRCS src/providerspatialrastertiledtests.cpp ) -set(TEST_TEST_DATA -) - -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS} ${TEST_TEST_DATA}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.datarepository + moja::moja.test + Boost::unit_test_framework + ) if(WIN32) target_link_libraries(${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} + PRIVATE wsock32 ws2_32 ) -ELSE() - target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} - ) endif() -add_dependencies(${TESTUNIT} moja.datarepository) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:${poco_bin}:$ENV{LD_LIBRARY_PATH}") -endif() -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.datarepository/tests/src/datarepositorytests.cpp b/Source/moja.datarepository/tests/src/datarepositorytests.cpp index 2f72352..5ea2e0c 100644 --- a/Source/moja.datarepository/tests/src/datarepositorytests.cpp +++ b/Source/moja.datarepository/tests/src/datarepositorytests.cpp @@ -33,7 +33,7 @@ BOOST_AUTO_TEST_CASE(datarepository_Test_Tile_Iteration) { {"layers", moja::DynamicVector({moja::DynamicObject({{"name", "county"}, {"layer_type", "GridLayer"}, {"layer_data", "UInt8"}, - {"layer_path", ".\\data\\SLEEK\\County"}, + {"layer_path", "./data/SLEEK/County"}, {"layer_prefix", "counties_pop"}, {"tileLatSize", 1.0}, {"tileLonSize", 1.0}, @@ -45,7 +45,7 @@ BOOST_AUTO_TEST_CASE(datarepository_Test_Tile_Iteration) { moja::DynamicObject({{"name", "forests"}, {"layer_type", "GridLayer"}, {"layer_data", "UInt8"}, - {"layer_path", ".\\Data\\SLEEK\\Forests"}, + {"layer_path", "./Data/SLEEK/Forests"}, {"layer_prefix", "ke_forests"}, {"tileLatSize", 1.0}, {"tileLonSize", 1.0}, @@ -57,7 +57,7 @@ BOOST_AUTO_TEST_CASE(datarepository_Test_Tile_Iteration) { moja::DynamicObject({{"name", "plantations"}, {"layer_type", "GridLayer"}, {"layer_data", "UInt8"}, - {"layer_path", ".\\Data\\SLEEK\\Plantations"}, + {"layer_path", "./Data/SLEEK/Plantations"}, {"layer_prefix", "ke_tree-plantations"}, {"tileLatSize", 1.0}, {"tileLonSize", 1.0}, diff --git a/Source/moja.flint.configuration/CMakeLists.txt b/Source/moja.flint.configuration/CMakeLists.txt index 76f2305..01a32c4 100644 --- a/Source/moja.flint.configuration/CMakeLists.txt +++ b/Source/moja.flint.configuration/CMakeLists.txt @@ -4,28 +4,7 @@ string(TOUPPER "${PACKAGE}" LIBNAME_EXPORT) include(${CMAKE_MODULE_PATH}/generate_product_version.cmake) -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() - -include_directories(include ../moja.core/include) +find_package(Poco REQUIRED Foundation JSON) # Version Info if (MSVC) @@ -41,10 +20,6 @@ if (MSVC) ) endif () -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) -endif() - # HEADERS AND SOURCE configure_file(../templates/exports.h ${CMAKE_CURRENT_SOURCE_DIR}/include/moja/flint/${PACKAGE}/_${PACKAGE}_exports.h) @@ -116,47 +91,45 @@ set(MOJA_FLINT_Configuration_sources ) set (SRCS ${MOJA_FLINT_Configuration_sources} ${MOJA_FLINT_Configuration_headers}) -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) + add_library(${LIBNAME} ${LIB_MODE} ${SRCS} ${ProductVersionFiles}) +add_library(moja::moja.flint.configuration ALIAS ${LIBNAME}) +#Set target properties set_target_properties(${LIBNAME} PROPERTIES VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS) -target_link_libraries( - ${LIBNAME} - moja.core - ${Poco_FOUNDATION} - ${Poco_JSON} - ) +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src +) + +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.core Poco::Foundation Poco::JSON + PRIVATE + ${SYSLIBS} +) if (CMAKE_SYSTEM MATCHES "Linux") - target_link_libraries(${LIBNAME} dl) + target_link_libraries(${LIBNAME} + PRIVATE + dl + ) endif(CMAKE_SYSTEM MATCHES "Linux") -add_dependencies(${LIBNAME} moja.core) -install( - DIRECTORY include/moja - DESTINATION include - PATTERN ".svn" EXCLUDE -) - -install( - TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin -) +############################################## +# Installation instructions -if(MSVC) - INSTALL( - FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${LIBNAME}${CMAKE_DEBUG_POSTFIX}.pdb - DESTINATION bin - CONFIGURATIONS Debug - ) -endif() +include(GNUInstallDirs) +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if (ENABLE_TESTS) add_subdirectory(tests) diff --git a/Source/moja.flint.configuration/cmake/moja.flint.configurationConfig.cmake b/Source/moja.flint.configuration/cmake/moja.flint.configurationConfig.cmake new file mode 100644 index 0000000..662f36c --- /dev/null +++ b/Source/moja.flint.configuration/cmake/moja.flint.configurationConfig.cmake @@ -0,0 +1,8 @@ +include(CMakeFindDependencyMacro) + +#find_dependency(Poco REQUIRED COMPONENTS Foundation JSON) +find_dependency(moja REQUIRED COMPONENTS moja.core ) + +if(NOT TARGET moja::moja.flint.configuration) + include("${CMAKE_CURRENT_LIST_DIR}/moja.flint.configurationTargets.cmake") +endif() \ No newline at end of file diff --git a/Source/moja.flint.configuration/include/moja/flint/configuration/iteratorbase.h b/Source/moja.flint.configuration/include/moja/flint/configuration/iteratorbase.h deleted file mode 100644 index b2653a2..0000000 --- a/Source/moja.flint.configuration/include/moja/flint/configuration/iteratorbase.h +++ /dev/null @@ -1,47 +0,0 @@ -#ifndef Configuration_IterationBase_INCLUDED -#define Configuration_IterationBase_INCLUDED - -#include "moja/flint/configuration/_configuration_exports.h" - -#include - -namespace moja { - namespace InT { - namespace Configuration { - - enum class LocalDomainIterationType { - NotAnIteration, // i.e Point sim - LandscapeTiles, - AreaOfInterest, - TileIndex, - BlockIndex - }; - - static LocalDomainIterationType convertStrToLocalDomainIterationType(std::string iterationTypeStr) { - if (iterationTypeStr == "LandscapeTiles") - return LocalDomainIterationType::LandscapeTiles; - else if (iterationTypeStr == "AreaOfInterest") - return LocalDomainIterationType::LandscapeTiles; - else if (iterationTypeStr == "TileIndex") - return LocalDomainIterationType::TileIndex; - else if (iterationTypeStr == "BlockIndex") - return LocalDomainIterationType::BlockIndex; - return LocalDomainIterationType::NotAnIteration; - } - - class CONFIGURATION_API IterationBase { - public: - IterationBase(LocalDomainIterationType iterationType); - virtual ~IterationBase() { } - - virtual inline LocalDomainIterationType iterationType() const { return _iterationType; } - - private: - LocalDomainIterationType _iterationType; - }; - - } - } -} // namespace moja::flint::Configuration - -#endif // Configuration_IterationBase_INCLUDED diff --git a/Source/moja.flint.configuration/tests/CMakeLists.txt b/Source/moja.flint.configuration/tests/CMakeLists.txt index c4e2345..6e94984 100644 --- a/Source/moja.flint.configuration/tests/CMakeLists.txt +++ b/Source/moja.flint.configuration/tests/CMakeLists.txt @@ -1,21 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -36,57 +21,20 @@ set(TEST_SRCS src/configcelltests.cpp ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) -target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} - ${Poco_FOUNDATION} - ${Poco_JSON} +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.flint.configuration + moja::moja.test + Boost::unit_test_framework ) -add_dependencies(${TESTUNIT} moja.flint.configuration) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:${poco_bin}:$ENV{LD_LIBRARY_PATH}") -endif() - -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.flint/CMakeLists.txt b/Source/moja.flint/CMakeLists.txt index 6bc11c7..8e6200a 100644 --- a/Source/moja.flint/CMakeLists.txt +++ b/Source/moja.flint/CMakeLists.txt @@ -21,35 +21,12 @@ if(ENABLE_MOJAPY) message(FATAL_ERROR "Unable to find PythonLibs.") endif() - include_directories(../mojapy/include) endif() include(${CMAKE_MODULE_PATH}/generate_product_version.cmake) -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - if (ENABLE_MOJAPY) find_package(Boost COMPONENTS python3 REQUIRED) -else() - find_package(Boost) -endif() - -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -# Poco -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) endif() # Version Info @@ -68,13 +45,6 @@ endif () # HEADERS AND SOURCE -include_directories( - include - ../moja.core/include - ../moja.datarepository/include - ../moja.flint.configuration/include -) - configure_file(../templates/exports.h ${CMAKE_CURRENT_SOURCE_DIR}/include/moja/${PACKAGE}/_${PACKAGE}_exports.h) set(MOJA_INT_GENERATED_HEADERS @@ -129,7 +99,6 @@ set(MOJA_FLINT_FLINTDATA_HEADERS set(MOJA_FLINT_HEADERS include/moja/flint/aspatiallocaldomaincontroller.h -# include/moja/flint/aspatialnosqllocaldomaincontroller.h include/moja/flint/calendarandeventsequencer.h include/moja/flint/calendarandeventflintdatasequencer.h include/moja/flint/calendarsequencer.h @@ -197,6 +166,7 @@ set(MOJA_FLINT_OPERATION_MANAGER_SIMPLE_HEADERS include/moja/flint/operationresultfluxiteratorsimple.h include/moja/flint/operationproportionalsimple.h include/moja/flint/operationstocksimple.h + include/moja/flint/operationresultsimple.h include/moja/flint/operationtransfersimple.h include/moja/flint/poolsimple.h ) @@ -251,9 +221,7 @@ set(MOJA_FLINT_TRANSFORM_SOURCES src/locationidxfromflintdatatransform.cpp src/lookuptransform.cpp src/lookuprandomtransform.cpp -# src/nosqlcollectiontransform.cpp src/sqlquerytransform.cpp -# src/pocomongodbquerytransform.cpp src/sumpoolstransform.cpp ) @@ -265,7 +233,6 @@ set(MOJA_FLINT_FLINTDATA_SOURCES set(MOJA_FLINT_SOURCES src/aspatiallocaldomaincontroller.cpp -# src/aspatialnosqllocaldomaincontroller.cpp src/calendarandeventsequencer.cpp src/calendarandeventflintdatasequencer.cpp src/calendarsequencer.cpp @@ -342,57 +309,50 @@ set (SRCS ${MOJA_FLINT_GENERATED_HEADERS} ${MOJA_FLINT_OPERATION_MANAGER_SIMPLECACHE_HEADERS} ${MOJA_FLINT_OPERATION_MANAGER_SIMPLECACHE_SOURCE} ${MOJA_FLINT_OPERATION_MANAGER_UBLAS_HEADERS} ${MOJA_FLINT_OPERATION_MANAGER_UBLAS_SOURCE}) -#add_definitions(-DPOCO_NO_AUTOMATIC_LIBS -DBOOST_ALL_DYN_LINK) -add_definitions(-DPOCO_NO_AUTOMATIC_LIBS -DUSE_STATIC_BOOST) add_library( ${LIBNAME} ${LIB_MODE} ${SRCS} ${ProductVersionFiles}) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) + +#Set target properties set_target_properties( ${LIBNAME} PROPERTIES VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS) -set(LINK_LIBRARIES - ${LIBNAME} - moja.core - moja.flint.configuration - moja.datarepository - ${Poco_FOUNDATION} - ${Poco_JSON} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src ) +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.core moja::moja.flint.configuration moja::moja.datarepository + ) + if (ENABLE_MOJAPY) - set(LINK_LIBRARIES ${LINK_LIBRARIES} ${PYTHON_LIBRARIES}) + target_link_libraries(${LIBNAME} + PRIVATE + ${PYTHON_LIBRARIES} + ) endif() -target_link_libraries(${LINK_LIBRARIES}) - if (CMAKE_SYSTEM MATCHES "Linux" ) - target_link_libraries( ${LIBNAME} dl) + target_link_libraries( ${LIBNAME} + PRIVATE + dl + ) endif(CMAKE_SYSTEM MATCHES "Linux" ) +############################################## +# Installation instructions -add_dependencies(${LIBNAME} moja.core moja.flint.configuration) - -install( - DIRECTORY include/moja - DESTINATION include - PATTERN ".svn" EXCLUDE - ) +include(GNUInstallDirs) -install( - TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin - ) - -if(MSVC) - INSTALL( - FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${LIBNAME}${CMAKE_DEBUG_POSTFIX}.pdb - DESTINATION bin - CONFIGURATIONS Debug - ) -endif() +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if (ENABLE_TESTS) add_subdirectory( tests ) diff --git a/Source/moja.flint/cmake/moja.flintConfig.cmake b/Source/moja.flint/cmake/moja.flintConfig.cmake new file mode 100644 index 0000000..a4cf26a --- /dev/null +++ b/Source/moja.flint/cmake/moja.flintConfig.cmake @@ -0,0 +1,8 @@ +include(CMakeFindDependencyMacro) +#find_dependency(Boost 1.70 REQUIRED COMPONENTS log) +find_dependency(moja REQUIRED COMPONENTS moja.core moja.flint.configuration moja.datarepository ) + +if(NOT TARGET moja::moja.flint) + include("${CMAKE_CURRENT_LIST_DIR}/moja.flintTargets.cmake") +endif() + diff --git a/Source/moja.flint/include/moja/flint/aspatialnosqllocaldomaincontroller.h b/Source/moja.flint/include/moja/flint/aspatialnosqllocaldomaincontroller.h deleted file mode 100644 index 0fe4759..0000000 --- a/Source/moja.flint/include/moja/flint/aspatialnosqllocaldomaincontroller.h +++ /dev/null @@ -1,33 +0,0 @@ -#ifndef MOJA_FLINT_ASPATIALNOSQLLOCALDOMAINCONTROLLER_H_ -#define MOJA_FLINT_ASPATIALNOSQLLOCALDOMAINCONTROLLER_H_ -#include "localdomaincontrollerbase.h" -#include "moja/datarepository/iprovidernosqlinterface.h" - -//#include "moja/datarepository/aspatialtileinfocollection.h" -//#include "moja/flint/configuration/configuration.h" -//#include "moja/flint/localdomaincontrollerbase.h" -//#include "moja/flint/spinuplandunitcontroller.h" -//#include - -namespace moja { -namespace flint { - -class FLINT_API AspatialNoSQLLocalDomainController final : public flint::LocalDomainControllerBase { -public: - AspatialNoSQLLocalDomainController(): _count(0) {} - ~AspatialNoSQLLocalDomainController() = default; - - virtual void configure(const flint::configuration::Configuration& config) override; - virtual void run() override; - -private: - std::shared_ptr _provider; - int _count; - DynamicVector _idSet; - //std::unique_ptr _tiles; -}; - -} -} - -#endif // MOJA_FLINT_ASPATIALNOSQLLOCALDOMAINCONTROLLER_H_ \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/libraryfactory.h b/Source/moja.flint/include/moja/flint/libraryfactory.h index 818e9c3..87b9d81 100644 --- a/Source/moja.flint/include/moja/flint/libraryfactory.h +++ b/Source/moja.flint/include/moja/flint/libraryfactory.h @@ -6,12 +6,11 @@ namespace moja { namespace flint { -extern "C" int getFlintModuleRegistrations(ModuleRegistration* outModuleRegistrations); -extern "C" int getFlintTransformRegistrations(TransformRegistration* outTransformRegistrations); -extern "C" int getFlintFlintDataRegistrations(FlintDataRegistration* outFlintDataRegistrations); -extern "C" int getFlintFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); -extern "C" int getDataRepositoryProviderRegistrations( - DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); +extern "C" int getFlintModuleRegistrations (ModuleRegistration* outModuleRegistrations); +extern "C" int getFlintTransformRegistrations (TransformRegistration* outTransformRegistrations); +extern "C" int getFlintFlintDataRegistrations (FlintDataRegistration* outFlintDataRegistrations); +extern "C" int getFlintFlintDataFactoryRegistrations (FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); +extern "C" int getProviderRegistrations (DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); } // namespace flint } // namespace moja diff --git a/Source/moja.flint/include/moja/flint/lmcontrol.h b/Source/moja.flint/include/moja/flint/lmcontrol.h deleted file mode 100644 index eeda825..0000000 --- a/Source/moja.flint/include/moja/flint/lmcontrol.h +++ /dev/null @@ -1,1192 +0,0 @@ -#ifndef LMCONTROL_INCLUDED -#define LMCONTROL_INCLUDED - -#include "moja/flint/flint.h" -#include "moja/flint/modulemanager.h" - -#include -#include - -namespace moja { -namespace flint { - -enum class LMMinInfo { - /// - /// termination requested by user-supplied routine evaluate; - /// - Terminated = -1, - /// - /// improper input parameters; - /// - InproperInput = 0, - /// - /// both actual and predicted relative reductions - /// in the sum of squares are at most ftol; - /// - FTOL = 1, - /// - /// relative error between two consecutive iterates - /// is at most xtol; - /// - XTOL = 2, - /// - /// conditions for FTOL and XTOL both hold - /// - FTOLAndXTOL = 3, - /// - /// the cosine of the angle between fvec and any - /// column of the jacobian is at most gtol in - /// absolute value - /// - GTOLCosine = 4, - /// - /// number of calls to lm_fcn has reached or - /// exceeded maxfev; - /// - MaxCalls = 5, - /// - /// ftol is too small. no further reduction in - /// the sum of squares is possible; - /// - FTOLTooSmall = 6, - /// - /// info = 7 xtol is too small. no further improvement in - /// the approximate solution x is possible; - /// - XTOLTooSmall = 7, - /// - /// gtol is too small. fvec is orthogonal to the - /// columns of the jacobian to machine precision; - /// - GTOLTooSmall = 8, - a = 9, - b = 10 -}; - -/// -/// ADT - control object used in LMMin computing -/// -struct LMControl { - double ftol; /* relative error desired in the sum of squares. */ - double xtol; /* relative error between last two approximations. */ - double gtol; /* orthogonality desired between fvec and its derivs. */ - double epsilon; /* step used to calculate the jacobian. */ - double stepbound; /* initial bound to steps in the outer loop. */ - double fnorm; /* norm of the residue vector fvec. */ - int maxcall; /* maximum number of iterations. */ - int nfev; /* actual number of iterations. */ - LMMinInfo info; /* status of minimization. */ - - LMControl() { - ftol = 1.0e-14; - xtol = 1.0e-14; - gtol = 1.0e-14; - epsilon = 1.0e-14; - stepbound = 100.0; - maxcall = 1000; - } -}; - - -/// -///Project LevenbergMarquardtLeastSquaresFitting -///Release lmfit2.3 -/// -///Based on lmdif and other routines from the public-domain library -///netlib::Minpack, Argonne National Laboratories, March 1980, -///by Burton S. Garbow, Kenneth E. Hillstrom, Jorge J. More. -///C translation by Steve Moshier. Code converted into C++ -///compatible ANSI style and wrapped by Joachim Wuttke, 2004- -/// -///Web sites: http://www.messen-und-deuten.de/lmfit/index.html -/// http://sourceforge.net/projects/lmfit/ -/// -///Bug reports, feature requests, and other comments: mail to -///Joachim Wuttke, (first name).(last name)@messen-und-deuten.de -/// -///File lmmin.c -/// -///Solves or Minimizes the sum of squares of m nonlinear functions -///of n variables. -/// -///Converted to C# by Scott Morken Nov 2010 -/// -/// The type of data to be fitted -class LMMin { - static double Square(double x) { return x * x; } - - /* the following values seem good for an x86: */ - /// - /// resolution of arithmetic - /// - const double LM_MACHEP = 0.555e-16; - /// - /// smallest nonzero number - /// - const double LM_DWARF = 9.9e-324; - /// - /// square should not underflow - /// - const double LM_SQRT_DWARF = 1.0e-160; - /// - /// square should not overflow - /// - const double LM_SQRT_GIANT = 1.0e150; - -public: - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - /// - void lmMinimize(int m_dat, int n_par, double[] par, LmEvaluate evaluate, LmPrintOut printout, LMControl control, double[] xValue, double[] yValue) { - /*** allocate work space. ***/ - double[] fvec, diag, fjac, qtf, wa1, wa2, wa3, wa4; - int[] ipvt; - - int n = n_par; - int m = m_dat; - - fvec = new double[m]; - diag = new double[n]; - qtf = new double[m]; - fjac = new double[n * m]; - wa1 = new double[n]; - wa2 = new double[n]; - wa3 = new double[n]; - wa4 = new double[m]; - ipvt = new int[n]; - control.info = LMMinInfo.a; //9 - - /*** perform fit. ***/ - - control.info = 0; - control.nfev = 0; - - /* this goes through the modified legacy interface: */ - lm_lmdif(m, n, par, fvec, control.ftol, control.xtol, control.gtol, - control.maxcall * (n + 1), control.epsilon, diag, 1, - control.stepbound, control.info, - ref control.nfev, fjac, ipvt, qtf, wa1, wa2, wa3, wa4, - evaluate, printout, xValue, yValue); - - printout(n, par, m, fvec, -1, 0, control.nfev, xValue, yValue); - control.fnorm = lm_enorm(m, 0, fvec); - - if (control.info < 0) - control.info = LMMinInfo::b; //10; - - - } /*** lm_Minimize. ***/ - - /// - ///the purpose of lmdif is to minimize the sum of the squares of - ///m nonlinear functions in n variables by a modification of - ///the levenberg-marquardt algorithm. the user must provide a - ///subroutine evaluate which calculates the functions. the jacobian - ///is then calculated by a forward-difference approximation. - /// - ///the multi-parameter interface lm_lmdif is for users who want - ///full control and flexibility. most users will be better off using - ///the simpler interface lm_minimize provided above. - ///the parameters are the same as in the legacy FORTRAN implementation, - ///with the following exceptions: - /// - /// the old parameter ldfjac which gave leading dimension of fjac has - /// been deleted because this C translation makes no use of two- - /// dimensional arrays; - /// the old parameter nprint has been deleted; printout is now controlled - /// by the user-supplied routine *printout; - /// the parameter field *data and the function parameters *evaluate and - /// *printout have been added; they help avoiding global variables. - /// - /// m is a positive integer input variable set to the number - /// of functions. - /// n is a positive integer input variable set to the number - /// of variables. n must not exceed m. - /// x is an array of length n. on input x must contain - /// an initial estimate of the solution vector. on output x - /// contains the final estimate of the solution vector. - /// fvec is an output array of length m which contains - /// the functions evaluated at the output x. - /// ftol is a nonnegative input variable. termination - /// occurs when both the actual and predicted relative - /// reductions in the sum of squares are at most ftol. - /// therefore, ftol measures the relative error desired - /// in the sum of squares. - /// xtol is a nonnegative input variable. termination - /// occurs when the relative error between two consecutive - /// iterates is at most xtol. therefore, xtol measures the - /// relative error desired in the approximate solution. - /// gtol is a nonnegative input variable. termination - /// occurs when the cosine of the angle between fvec and - /// any column of the jacobian is at most gtol in absolute - /// value. therefore, gtol measures the orthogonality - /// desired between the function vector and the columns - /// of the jacobian. - /// maxfev is a positive integer input variable. termination - /// occurs when the number of calls to lm_fcn is at least - /// maxfev by the end of an iteration. - /// epsfcn is an input variable used in determining a suitable - /// step length for the forward-difference approximation. this - /// approximation assumes that the relative errors in the - /// functions are of the order of epsfcn. if epsfcn is less - /// than the machine precision, it is assumed that the relative - /// errors in the functions are of the order of the machine - /// precision. - /// diag is an array of length n. if mode = 1 (see below), diag is - /// internally set. if mode = 2, diag must contain positive entries - /// that serve as multiplicative scale factors for the variables. - /// mode is an integer input variable. if mode = 1, the - /// variables will be scaled internally. if mode = 2, - /// the scaling is specified by the input diag. other - /// values of mode are equivalent to mode = 1. - /// factor is a positive input variable used in determining the - /// initial step bound. this bound is set to the product of - /// factor and the euclidean norm of diag*x if nonzero, or else - /// to factor itself. in most cases factor should lie in the - /// interval (.1,100.0). 100. is a generally recommended value. - /// ndicates the termination status of lm_lmdif - /// nfev is an output variable set to the number of calls to the - /// user-supplied routine *evaluate. - /// fjac is an output m by n array. the upper n by n submatrix - /// of fjac contains an upper triangular matrix r with - /// diagonal elements of nonincreasing magnitude such that - /// t t t - /// p *(jac *jac)*p = r *r, - /// where p is a permutation matrix and jac is the final - /// calculated jacobian. column j of p is column ipvt(j) - /// (see below) of the identity matrix. the lower trapezoidal - /// part of fjac contains information generated during - /// the computation of r. - /// ipvt is an integer output array of length n. ipvt - /// defines a permutation matrix p such that jac*p = q*r, - /// where jac is the final calculated jacobian, q is - /// orthogonal (not stored), and r is upper triangular - /// with diagonal elements of nonincreasing magnitude. - /// column j of p is column ipvt(j) of the identity matrix. - /// qtf is an output array of length n which contains - /// the first n elements of the vector (q transpose)*fvec. - /// work array of length n. - /// work array of length n. - /// work array of length n. - /// wa4 is a work array of length m. - /// subroutine which calculates the functions - /// the subroutine which nforms about fit progress - /// the data to be fitted - void lm_lmdif(int m, int n, double* x, double* fvec, double ftol, - double xtol, double gtol, int maxfev, double epsfcn, - double* diag, int mode, double factor, LMMinInfo info, int& nfev, - double* fjac, int* ipvt, double* qtf, double* wa1, - double* wa2, double* wa3, double* wa4, - LmEvaluate evaluate, LmPrintOut printout, - double[] xValue, double[] yValue) { - int i, iter, j; - double actred, delta, dirder, eps, fnorm, fnorm1, gnorm, par, pnorm, - prered, ratio, step, sum, temp, temp1, temp2, temp3, xnorm; - const double p1 = 0.1; - const double p5 = 0.5; - const double p25 = 0.25; - const double p75 = 0.75; - const double p0001 = 1.0e-4; - - nfev = 0; /* function evaluation counter */ - iter = 1; /* outer loop counter */ - par = 0; /* levenberg-marquardt parameter */ - delta = 0; /* to prevent a warning (initialization within if-clause) */ - xnorm = 0; /* ditto */ - temp = std::max(epsfcn, LM_MACHEP); - eps = std::sqrt(temp); /* for calculating the Jacobian by forward differences */ - - /*** lmdif: check input parameters for errors. ***/ - - if ((n <= 0) || (m < n) || (ftol < 0.0) - || (xtol < 0.0) || (gtol < 0.0) || (maxfev <= 0) || (factor <= 0.0)) { - info = LMMinInfo::InproperInput; // invalid parameter - return; - } - if (mode == 2) { /* scaling by diag[] */ - for (j = 0; j < n; j++) { /* check for nonpositive elements */ - if (diag[j] <= 0.0) { - info = LMMinInfo::InproperInput; // invalid parameter - return; - } - } - } -#if BUG - printf("lmdif\n"); -#endif - - /*** lmdif: evaluate function at starting point and calculate norm. ***/ - - info = LMMinInfo::InproperInput; - evaluate(x, m, fvec, xValue, yValue); - printout(n, x, m, fvec, 0, 0, nfev++, xValue, yValue); - if (info < LMMinInfo::InproperInput) - return; - fnorm = lm_enorm(m, 0, fvec); - - /*** lmdif: the outer loop. ***/ - - do { -#if BUG - printf("lmdif/ outer loop iter=%d nfev=%d fnorm=%.10e\n", - iter, nfev, fnorm); -#endif - - /*** outer: calculate the jacobian matrix. ***/ - - for (j = 0; j < n; j++) { - temp = x[j]; - step = eps * std::abs(temp); - if (step == 0.0) - step = eps; - x[j] = temp + step; - info = LMMinInfo::InproperInput; - evaluate(x, m, wa4, xValue, yValue); - printout(n, x, m, wa4, 1, iter, nfev++, xValue, yValue); - if (info < LMMinInfo::InproperInput) - return; /* user requested break */ - for (i = 0; i < m; i++) /* changed in 2.3, Mark Bydder */ - fjac[j * m + i] = (wa4[i] - fvec[i]) / (x[j] - temp); - x[j] = temp; - } - //#if BUG>1 - // /* DEBUG: print the entire matrix */ - // for (i = 0; i < m; i++) { - // for (j = 0; j < n; j++) - // printf("%.5e ", fjac[j * m + i]); - // printf("\n"); - // } - //#endif - - /*** outer: compute the qr factorization of the jacobian. ***/ - - lm_qrfac(m, n, fjac, true, ipvt, wa1, wa2, wa3); - - if (iter == 1) { /* first iteration */ - if (mode != 2) { - /* diag := norms of the columns of the initial jacobian */ - for (j = 0; j < n; j++) { - diag[j] = wa2[j]; - if (wa2[j] == 0.0) - diag[j] = 1.0; - } - } - /* use diag to scale x, then calculate the norm */ - for (j = 0; j < n; j++) - wa3[j] = diag[j] * x[j]; - xnorm = lm_enorm(n, 0, wa3); - /* initialize the step bound delta. */ - delta = factor * xnorm; - if (delta == 0.0) - delta = factor; - } - - /*** outer: form (q transpose)*fvec and store first n components in qtf. ***/ - - for (i = 0; i < m; i++) - wa4[i] = fvec[i]; - - for (j = 0; j < n; j++) { - temp3 = fjac[j * m + j]; - if (temp3 != 0.0) { - sum = 0; - for (i = j; i < m; i++) - sum += fjac[j * m + i] * wa4[i]; - temp = -sum / temp3; - for (i = j; i < m; i++) - wa4[i] += fjac[j * m + i] * temp; - } - fjac[j * m + j] = wa1[j]; - qtf[j] = wa4[j]; - } - - /** outer: compute norm of scaled gradient and test for convergence. ***/ - - gnorm = 0; - if (fnorm != 0) { - for (j = 0; j < n; j++) { - if (wa2[ipvt[j]] == 0) - continue; - - sum = 0.0; - for (i = 0; i <= j; i++) - sum += fjac[j * m + i] * qtf[i] / fnorm; - gnorm = std::max(gnorm, std::abs(sum / wa2[ipvt[j]])); - } - } - - if (gnorm <= gtol) { - info = LMMinInfo::GTOLCosine; - return; - } - - /*** outer: rescale if necessary. ***/ - - if (mode != 2) { - for (j = 0; j < n; j++) - diag[j] = std::max(diag[j], wa2[j]); - } - - /*** the inner loop. ***/ - do { -#if BUG - printf("lmdif/ inner loop iter=%d nfev=%d\n", iter, nfev); -#endif - - /*** inner: determine the levenberg-marquardt parameter. ***/ - - lm_lmpar(n, fjac, m, ipvt, diag, qtf, delta, par, - wa1, wa2, wa3, wa4); - - /*** inner: store the direction p and x + p; calculate the norm of p. ***/ - - for (j = 0; j < n; j++) { - wa1[j] = -wa1[j]; - wa2[j] = x[j] + wa1[j]; - wa3[j] = diag[j] * wa1[j]; - } - pnorm = lm_enorm(n, 0, wa3); - - /*** inner: on the first iteration, adjust the initial step bound. ***/ - - if (nfev <= 1 + n) - delta = std::min(delta, pnorm); - - /* evaluate the function at x + p and calculate its norm. */ - - info = LMMinInfo::InproperInput; - evaluate(wa2, m, wa4, xValue, yValue); - printout(n, x, m, wa4, 2, iter, nfev++, xValue, yValue); - if (info < LMMinInfo::InproperInput) - return; /* user requested break. */ - - fnorm1 = lm_enorm(m, 0, wa4); -#if BUG - printf("lmdif/ pnorm %.10e fnorm1 %.10e fnorm %.10e" - " delta=%.10e par=%.10e\n", - pnorm, fnorm1, fnorm, delta, par); -#endif - - /*** inner: compute the scaled actual reduction. ***/ - - if (p1 * fnorm1 < fnorm) - actred = 1 - Square(fnorm1 / fnorm); - else - actred = -1; - - /*** inner: compute the scaled predicted reduction and - the scaled directional derivative. ***/ - - for (j = 0; j < n; j++) { - wa3[j] = 0; - for (i = 0; i <= j; i++) - wa3[i] += fjac[j * m + i] * wa1[ipvt[j]]; - } - temp1 = lm_enorm(n, 0, wa3) / fnorm; - temp2 = std::sqrt(par) * pnorm / fnorm; - prered = Square(temp1) + 2 * Square(temp2); - dirder = -(Square(temp1) + Square(temp2)); - - /*** inner: compute the ratio of the actual to the predicted reduction. ***/ - - ratio = prered != 0 ? actred / prered : 0; -#if BUG - printf("lmdif/ actred=%.10e prered=%.10e ratio=%.10e" - " sq(1)=%.10e sq(2)=%.10e dd=%.10e\n", - actred, prered, prered != 0 ? ratio : 0.0, - SQR(temp1), SQR(temp2), dirder); -#endif - - /*** inner: update the step bound. ***/ - - if (ratio <= p25) { - if (actred >= 0.0) - temp = p5; - else - temp = p5 * dirder / (dirder + p5 * actred); - if (p1 * fnorm1 >= fnorm || temp < p1) - temp = p1; - delta = temp * std::min(delta, pnorm / p1); - par /= temp; - } - else if (par == 0.0 || ratio >= p75) { - delta = pnorm / p5; - par *= p5; - } - - /*** inner: test for successful iteration. ***/ - - if (ratio >= p0001) { - /* yes, success: update x, fvec, and their norms. */ - for (j = 0; j < n; j++) { - x[j] = wa2[j]; - wa2[j] = diag[j] * x[j]; - } - for (i = 0; i < m; i++) - fvec[i] = wa4[i]; - xnorm = lm_enorm(n, 0, wa2); - fnorm = fnorm1; - iter++; - } -#if BUG - else { - printf("ATTN: iteration considered unsuccessful\n"); - } -#endif - - /*** inner: tests for convergence ( otherwise info = 1, 2, or 3 ). ***/ - - info = LMMinInfo::InproperInput; /* do not terminate (unless overwritten by nonzero) */ - if (std::abs(actred) <= ftol && prered <= ftol && p5 * ratio <= 1) - info = LMMinInfo::FTOL; - if (delta <= xtol * xnorm) - info += 2; - if (info != LMMinInfo::InproperInput) - return; - - /*** inner: tests for termination and stringent tolerances. ***/ - - if (nfev >= maxfev) - info = (LMMinInfo)5; - if (std::abs(actred) <= LM_MACHEP && - prered <= LM_MACHEP && p5 * ratio <= 1) - info = (LMMinInfo)6; - if (delta <= LM_MACHEP * xnorm) - info = (LMMinInfo)7; - if (gnorm <= LM_MACHEP) - info = (LMMinInfo)8; - if (info != LMMinInfo::InproperInput) - return; - - /*** inner: end of the loop. repeat if iteration unsuccessful. ***/ - - } while (ratio < p0001); - - /*** outer: end of the loop. ***/ - - } while (true); - - } /*** lm_lmdif. ***/ - - void lm_lmpar(int n, double* r, int ldr, int* ipvt, double* diag, - double* qtb, double delta, double& par, double* x, - double* sdiag, double* wa1, double* wa2) { - /* given an m by n matrix a, an n by n nonsingular diagonal - * matrix d, an m-vector b, and a positive number delta, - * the problem is to detemine a value for the parameter - * par such that if x solves the system - * - * a*x = b , std::sqrt(par)*d*x = 0 , - * - * in the least squares sense, and dxnorm is the euclidean - * norm of d*x, then either par is 0. and - * - * (dxnorm-delta) .le. 0.1*delta , - * - * or par is positive and - * - * abs(dxnorm-delta) .le. 0.1*delta . - * - * this subroutine completes the solution of the problem - * if it is provided with the necessary information from the - * qr factorization, with column pivoting, of a. that is, if - * a*p = q*r, where p is a permutation matrix, q has orthogonal - * columns, and r is an upper triangular matrix with diagonal - * elements of nonincreasing magnitude, then lmpar expects - * the full upper triangle of r, the permutation matrix p, - * and the first n components of (q transpose)*b. on output - * lmpar also provides an upper triangular matrix s such that - * - * t t t - * p *(a *a + par*d*d)*p = s *s . - * - * s is employed within lmpar and may be of separate interest. - * - * only a few iterations are generally needed for convergence - * of the algorithm. if, however, the limit of 10 iterations - * is reached, then the output par will contain the best - * value obtained so far. - * - * parameters: - * - * n is a positive integer input variable set to the order of r. - * - * r is an n by n array. on input the full upper triangle - * must contain the full upper triangle of the matrix r. - * on output the full upper triangle is unaltered, and the - * strict lower triangle contains the strict upper triangle - * (transposed) of the upper triangular matrix s. - * - * ldr is a positive integer input variable not less than n - * which specifies the leading dimension of the array r. - * - * ipvt is an integer input array of length n which defines the - * permutation matrix p such that a*p = q*r. column j of p - * is column ipvt(j) of the identity matrix. - * - * diag is an input array of length n which must contain the - * diagonal elements of the matrix d. - * - * qtb is an input array of length n which must contain the first - * n elements of the vector (q transpose)*b. - * - * delta is a positive input variable which specifies an upper - * bound on the euclidean norm of d*x. - * - * par is a nonnegative variable. on input par contains an - * initial estimate of the levenberg-marquardt parameter. - * on output par contains the final estimate. - * - * x is an output array of length n which contains the least - * squares solution of the system a*x = b, std::sqrt(par)*d*x = 0, - * for the output par. - * - * sdiag is an output array of length n which contains the - * diagonal elements of the upper triangular matrix s. - * - * wa1 and wa2 are work arrays of length n. - * - */ - int i, iter, j, nsing; - double dxnorm, fp, fp_old, gnorm, parc, parl, paru; - double sum, temp; - const double p1 = 0.1; - const double p001 = 0.001; - -#if BUG - printf("lmpar\n"); -#endif - - /*** lmpar: compute and store in x the gauss-newton direction. if the - jacobian is rank-deficient, obtain a least squares solution. ***/ - - nsing = n; - for (j = 0; j < n; j++) { - wa1[j] = qtb[j]; - if (r[j * ldr + j] == 0 && nsing == n) - nsing = j; - if (nsing < n) - wa1[j] = 0; - } -#if BUG - printf("nsing %d ", nsing); -#endif - for (j = nsing - 1; j >= 0; j--) { - wa1[j] = wa1[j] / r[j + ldr * j]; - temp = wa1[j]; - for (i = 0; i < j; i++) - wa1[i] -= r[j * ldr + i] * temp; - } - - for (j = 0; j < n; j++) - x[ipvt[j]] = wa1[j]; - - // lmpar: initialize the iteration counter, evaluate the function at the - // origin, and test for acceptance of the gauss-newton direction. - - iter = 0; - for (j = 0; j < n; j++) - wa2[j] = diag[j] * x[j]; - dxnorm = lm_enorm(n, 0, wa2); - fp = dxnorm - delta; - if (fp <= p1 * delta) { -#if BUG - printf("lmpar/ terminate (fp= n) { - for (j = 0; j < n; j++) - wa1[j] = diag[ipvt[j]] * wa2[ipvt[j]] / dxnorm; - - for (j = 0; j < n; j++) { - sum = 0.0; - for (i = 0; i < j; i++) - sum += r[j * ldr + i] * wa1[i]; - wa1[j] = (wa1[j] - sum) / r[j + ldr * j]; - } - temp = lm_enorm(n, 0, wa1); - parl = fp / delta / temp / temp; - } - - // lmpar: calculate an upper bound, paru, for the 0. of the function. - - for (j = 0; j < n; j++) { - sum = 0; - for (i = 0; i <= j; i++) - sum += r[j * ldr + i] * qtb[i]; - wa1[j] = sum / diag[ipvt[j]]; - } - gnorm = lm_enorm(n, 0, wa1); - paru = gnorm / delta; - if (paru == 0.0) - paru = LM_DWARF / std::min(delta, p1); - - // lmpar: if the input par lies outside of the interval (parl,paru), - // set par to the closer endpoint. - - par = std::max(par, parl); - par = std::min(par, paru); - if (par == 0.0) - par = gnorm / dxnorm; -#if BUG - printf("lmpar/ parl %.4e par %.4e paru %.4e\n", parl, par, paru); -#endif - - //lmpar: iterate. - - for (;; iter++) { - - // evaluate the function at the current value of par. - - if (par == 0.0) - par = std::max(LM_DWARF, p001 * paru); - temp = std::sqrt(par); - for (j = 0; j < n; j++) - wa1[j] = temp * diag[j]; - lm_qrsolv(n, r, ldr, ipvt, wa1, qtb, x, sdiag, wa2); - for (j = 0; j < n; j++) - wa2[j] = diag[j] * x[j]; - dxnorm = lm_enorm(n, 0, wa2); - fp_old = fp; - fp = dxnorm - delta; - - // if the function is small enough, accept the current value - // of par. also test for the exceptional cases where parl - // is 0. or the number of iterations has reached 10. - - if (std::abs(fp) <= p1 * delta - || (parl == 0.0 && fp <= fp_old && fp_old < 0.0) - || iter == 10) - break; /* the only exit from the iteration. */ - - // compute the Newton correction. - - for (j = 0; j < n; j++) - wa1[j] = diag[ipvt[j]] * wa2[ipvt[j]] / dxnorm; - - for (j = 0; j < n; j++) { - wa1[j] = wa1[j] / sdiag[j]; - for (i = j + 1; i < n; i++) - wa1[i] -= r[j * ldr + i] * wa1[j]; - } - temp = lm_enorm(n, 0, wa1); - parc = fp / delta / temp / temp; - - // depending on the sign of the function, update parl or paru. - - if (fp > 0) - parl = std::max(parl, par); - else if (fp < 0) - paru = std::min(paru, par); - // the case fp==0 is precluded by the break condition - - // compute an improved estimate for par. - - par = std::max(parl, par + parc); - - } - - } // lm_lmpar. - - void lm_qrfac(int m, int n, double* a, bool pivot, int* ipvt, - double* rdiag, double* acnorm, double* wa) { - /* - * this subroutine uses householder transformations with column - * pivoting (optional) to compute a qr factorization of the - * m by n matrix a. that is, qrfac determines an orthogonal - * matrix q, a permutation matrix p, and an upper trapezoidal - * matrix r with diagonal elements of nonincreasing magnitude, - * such that a*p = q*r. the householder transformation for - * column k, k = 1,2,...,min(m,n), is of the form - * - * t - * i - (1/u(k))*u*u - * - * where u has 0.s in the first k-1 positions. the form of - * this transformation and the method of pivoting first - * appeared in the corresponding linpack subroutine. - * - * parameters: - * - * m is a positive integer input variable set to the number - * of rows of a. - * - * n is a positive integer input variable set to the number - * of columns of a. - * - * a is an m by n array. on input a contains the matrix for - * which the qr factorization is to be computed. on output - * the strict upper trapezoidal part of a contains the strict - * upper trapezoidal part of r, and the lower trapezoidal - * part of a contains a factored form of q (the non-trivial - * elements of the u vectors described above). - * - * pivot is a logical input variable. if pivot is set true, - * then column pivoting is enforced. if pivot is set false, - * then no column pivoting is done. - * - * ipvt is an integer output array of length lipvt. ipvt - * defines the permutation matrix p such that a*p = q*r. - * column j of p is column ipvt(j) of the identity matrix. - * if pivot is false, ipvt is not referenced. - * - * rdiag is an output array of length n which contains the - * diagonal elements of r. - * - * acnorm is an output array of length n which contains the - * norms of the corresponding columns of the input matrix a. - * if this information is not needed, then acnorm can coincide - * with rdiag. - * - * wa is a work array of length n. if pivot is false, then wa - * can coincide with rdiag. - * - */ - int i, j, k, kmax, MINmn; - double ajnorm, sum, temp; - const double p05 = 0.05; - - /*** qrfac: compute initial column norms and initialize several arrays. ***/ - - for (j = 0; j < n; j++) { - acnorm[j] = lm_enorm(m, j * m, a); - rdiag[j] = acnorm[j]; - wa[j] = rdiag[j]; - if (pivot) - ipvt[j] = j; - } -#if BUG - printf("qrfac\n"); -#endif - - /*** qrfac: reduce a to r with householder transformations. ***/ - - MINmn = std::min(m, n); - for (j = 0; j < MINmn; j++) { - if (!pivot) - goto pivot_ok; - - /** bring the column of largest norm into the pivot position. **/ - - kmax = j; - for (k = j + 1; k < n; k++) - if (rdiag[k] > rdiag[kmax]) - kmax = k; - if (kmax == j) - goto pivot_ok; - - for (i = 0; i < m; i++) { - temp = a[j * m + i]; - a[j * m + i] = a[kmax * m + i]; - a[kmax * m + i] = temp; - } - rdiag[kmax] = rdiag[j]; - wa[kmax] = wa[j]; - k = ipvt[j]; - ipvt[j] = ipvt[kmax]; - ipvt[kmax] = k; - - pivot_ok: - /** compute the Householder transformation to reduce the - j-th column of a to a multiple of the j-th unit vector. **/ - - ajnorm = lm_enorm(m - j, j * m + j, a); - if (ajnorm == 0.0) { - rdiag[j] = 0; - continue; - } - - if (a[j * m + j] < 0.0) - ajnorm = -ajnorm; - for (i = j; i < m; i++) - a[j * m + i] /= ajnorm; - a[j * m + j] += 1; - - /** apply the transformation to the remaining columns - and update the norms. **/ - - for (k = j + 1; k < n; k++) { - sum = 0; - - for (i = j; i < m; i++) - sum += a[j * m + i] * a[k * m + i]; - - temp = sum / a[j + m * j]; - - for (i = j; i < m; i++) - a[k * m + i] -= temp * a[j * m + i]; - - if (pivot && rdiag[k] != 0.0) { - temp = a[m * k + j] / rdiag[k]; - temp = std::max(0.0, 1 - temp * temp); - rdiag[k] *= std::sqrt(temp); - temp = rdiag[k] / wa[k]; - if (p05 * Square(temp) <= LM_MACHEP) { - rdiag[k] = lm_enorm(m - j - 1, m * k + j + 1, a); - wa[k] = rdiag[k]; - } - } - } - - rdiag[j] = -ajnorm; - } - } - - /// - /// given an m by n matrix a, an n by n diagonal matrix d, - /// and an m-vector b, the problem is to determine an x which - /// solves the system - /// - /// a*x = b , d*x = 0 , - /// - /// in the least squares sense. - /// - ///this subroutine completes the solution of the problem - ///if it is provided with the necessary information from the - ///qr factorization, with column pivoting, of a. that is, if - ///a*p = q*r, where p is a permutation matrix, q has orthogonal - ///columns, and r is an upper triangular matrix with diagonal - ///elements of nonincreasing magnitude, then qrsolv expects - ///the full upper triangle of r, the permutation matrix p, - ///and the first n components of (q transpose)*b. the system - /// a*x = b, d*x = 0, is then equivalent to - /// - /// t t - /// r*z = q *b , p *d*p*z = 0 , - /// - /// where x = p*z. if this system does not have full rank, - /// then a least squares solution is obtained. on output qrsolv - /// also provides an upper triangular matrix s such that - /// - /// t t t - /// p *(a *a + d*d)*p = s *s . - /// - ///s is computed within qrsolv and may be of separate interest. - /// - /// n is a positive integer input variable set to the order of r. - /// r is an n by n array. on input the full upper triangle - /// must contain the full upper triangle of the matrix r. - /// on output the full upper triangle is unaltered, and the - /// strict lower triangle contains the strict upper triangle - /// (transposed) of the upper triangular matrix s. - /// ldr is a positive integer input variable not less than n - /// which specifies the leading dimension of the array r. - /// ipvt is an integer input array of length n which defines the - /// permutation matrix p such that a*p = q*r. column j of p - /// is column ipvt(j) of the identity matrix. - /// diag is an input array of length n which must contain the - /// diagonal elements of the matrix d. - /// qtb is an input array of length n which must contain the first - /// n elements of the vector (q transpose)*b. - /// x is an output array of length n which contains the least - /// squares solution of the system a*x = b, d*x = 0. - /// sdiag is an output array of length n which contains the - /// diagonal elements of the upper triangular matrix s. - /// wa is a work array of length n. - void lm_qrsolv(int n, double* r, int ldr, int* ipvt, double* diag, - double* qtb, double* x, double* sdiag, double* wa) { - int i, kk, j, k, nsing; - double qtbpj, sum, temp; - double _sin, _cos, _tan, _cot; /* local variables, not functions */ - const double p25 = 0.25; - const double p5 = 0.5; - - /*** qrsolv: copy r and (q transpose)*b to preserve input and initialize s. - in particular, save the diagonal elements of r in x. ***/ - - for (j = 0; j < n; j++) { - for (i = j; i < n; i++) - r[j * ldr + i] = r[i * ldr + j]; - x[j] = r[j * ldr + j]; - wa[j] = qtb[j]; - } -#if BUG - printf("qrsolv\n"); -#endif - - /*** qrsolv: eliminate the diagonal matrix d using a givens rotation. ***/ - - for (j = 0; j < n; j++) { - - /*** qrsolv: prepare the row of d to be eliminated, locating the - diagonal element using p from the qr factorization. ***/ - - if (diag[ipvt[j]] == 0.0) - goto L90; - for (k = j; k < n; k++) - sdiag[k] = 0.0; - sdiag[j] = diag[ipvt[j]]; - - /*** qrsolv: the transformations to eliminate the row of d modify only - a single element of (q transpose)*b beyond the first n, which is - initially 0.. ***/ - - qtbpj = 0.0; - for (k = j; k < n; k++) { - - /** determine a givens rotation which eliminates the - appropriate element in the current row of d. **/ - - if (sdiag[k] == 0.0) - continue; - kk = k + ldr * k; - if (std::abs(r[kk]) < std::abs(sdiag[k])) { - _cot = r[kk] / sdiag[k]; - _sin = p5 / std::sqrt(p25 + p25 * Square(_cot)); - _cos = _sin * _cot; - } - else { - _tan = sdiag[k] / r[kk]; - _cos = p5 / std::sqrt(p25 + p25 * Square(_tan)); - _sin = _cos * _tan; - } - - /** compute the modified diagonal element of r and - the modified element of ((q transpose)*b,0). **/ - - r[kk] = _cos * r[kk] + _sin * sdiag[k]; - temp = _cos * wa[k] + _sin * qtbpj; - qtbpj = -_sin * wa[k] + _cos * qtbpj; - wa[k] = temp; - - /** accumulate the tranformation in the row of s. **/ - - for (i = k + 1; i < n; i++) { - temp = _cos * r[k * ldr + i] + _sin * sdiag[i]; - sdiag[i] = -_sin * r[k * ldr + i] + _cos * sdiag[i]; - r[k * ldr + i] = temp; - } - } - - L90: - /** store the diagonal element of s and restore - the corresponding diagonal element of r. **/ - - sdiag[j] = r[j * ldr + j]; - r[j * ldr + j] = x[j]; - } - - /*** qrsolv: solve the triangular system for z. if the system is - singular, then obtain a least squares solution. ***/ - - nsing = n; - for (j = 0; j < n; j++) { - if (sdiag[j] == 0.0 && nsing == n) - nsing = j; - if (nsing < n) - wa[j] = 0; - } - - for (j = nsing - 1; j >= 0; j--) { - sum = 0; - for (i = j + 1; i < nsing; i++) - sum += r[j * ldr + i] * wa[i]; - wa[j] = (wa[j] - sum) / sdiag[j]; - } - - /*** qrsolv: permute the components of z back to components of x. ***/ - - for (j = 0; j < n; j++) - x[ipvt[j]] = wa[j]; - - } /*** lm_qrsolv. ***/ - - /// - /// given an n-vector x, this function calculates the euclidean norm of x. - /// - /// the euclidean norm is computed by accumulating the sum of - /// squares in three different sums. the sums of squares for the - /// small and large components are scaled so that no overflows - /// occur. non-destructive underflows are permitted. underflows - /// and overflows do not occur in the computation of the unscaled - /// sum of squares for the intermediate components. - /// the definitions of small, intermediate and large components - /// depend on two constants, LM_SQRT_DWARF and LM_SQRT_GIANT. the main - /// restrictions on these constants are that LM_SQRT_DWARF**2 not - /// underflow and LM_SQRT_GIANT**2 not overflow. - /// - /// Length is a positive integer input variable. - /// The offset into array x. - /// x is an input array of length n. - /// - double lm_enorm(int Length, int Offset, double[] x) { - int i; - double agiant, s1, s2, s3, xabs, x1max, x3max, temp; - - s1 = 0; - s2 = 0; - s3 = 0; - x1max = 0; - x3max = 0; - agiant = LM_SQRT_GIANT / ((double)Length); - - /** sum squares. **/ - for (i = Offset; i < Offset + Length; i++) { - xabs = std::abs(x[i]); - if (xabs > LM_SQRT_DWARF && xabs < agiant) { - /* sum for intermediate components. */ - s2 += xabs * xabs; - continue; - } - - if (xabs > LM_SQRT_DWARF) { - /* sum for large components. */ - if (xabs > x1max) { - temp = x1max / xabs; - s1 = 1 + s1 * Square(temp); - x1max = xabs; - } - else { - temp = xabs / x1max; - s1 += Square(temp); - } - continue; - } - /* sum for small components. */ - if (xabs > x3max) { - temp = x3max / xabs; - s3 = 1 + s3 * Square(temp); - x3max = xabs; - } - else { - if (xabs != 0.0) { - temp = xabs / x3max; - s3 += Square(temp); - } - } - } - - /** calculation of norm. **/ - - if (s1 != 0) - return x1max * std::sqrt(s1 + (s2 / x1max) / x1max); - if (s2 != 0) { - if (s2 >= x3max) - return std::sqrt(s2 * (1 + (x3max / s2) * (x3max * s3))); - else - return std::sqrt(x3max * ((s2 / x3max) + (x3max * s3))); - } - - return x3max * std::sqrt(s3); - - } /*** lm_enorm. ***/ -}; -} -} // namespace moja::flint -#endif // LMCONTROL_INCLUDED \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/matrixublas.h b/Source/moja.flint/include/moja/flint/matrixublas.h index b20d971..cabe1b9 100644 --- a/Source/moja.flint/include/moja/flint/matrixublas.h +++ b/Source/moja.flint/include/moja/flint/matrixublas.h @@ -1,7 +1,6 @@ #ifndef MOJA_FLINT_MATRIXUBLAS_H_ #define MOJA_FLINT_MATRIXUBLAS_H_ -#define BOOST_ALL_NO_LIB #define BOOST_UBLAS_NDEBUG // cuts some time off run // (http://www.boost.org/doc/libs/1_49_0/libs/numeric/ublas/doc/index.htm) diff --git a/Source/moja.flint/include/moja/flint/mojalibapi.h b/Source/moja.flint/include/moja/flint/mojalibapi.h index e865a6f..826039d 100644 --- a/Source/moja.flint/include/moja/flint/mojalibapi.h +++ b/Source/moja.flint/include/moja/flint/mojalibapi.h @@ -58,8 +58,8 @@ struct FlintDataFactoryRegistration { InitializeFlintDataFactoryFunctionPtr initializer; }; -typedef std::shared_ptr (*InitializeDataRepositoryProviderFunctionPtr)( - const DynamicObject&); +typedef std::shared_ptr (*InitializeDataRepositoryProviderFunctionPtr)(const DynamicObject&); + struct DataRepositoryProviderRegistration { const char* providerName; int providerType; @@ -74,11 +74,11 @@ struct DataRepositoryProviderRegistration { * function must be declared using as 'extern "C"' so that the name remains * undecorated. */ -typedef int (*GetModuleRegistrationsFunctionPtr)(ModuleRegistration*); -typedef int (*GetTransformRegistrationsFunctionPtr)(TransformRegistration*); -typedef int (*GetFlintDataRegistrationsFunctionPtr)(FlintDataRegistration*); -typedef int (*GetFlintDataFactoryRegistrationsFunctionPtr)(FlintDataFactoryRegistration*); -typedef int (*GetDataRepositoryProviderRegistrationsFunctionPtr)(DataRepositoryProviderRegistration*); +typedef int (*GetModuleRegistrationsFunctionPtr) (ModuleRegistration*); +typedef int (*GetTransformRegistrationsFunctionPtr) (TransformRegistration*); +typedef int (*GetFlintDataRegistrationsFunctionPtr) (FlintDataRegistration*); +typedef int (*GetFlintDataFactoryRegistrationsFunctionPtr) (FlintDataFactoryRegistration*); +typedef int (*GetDataRepositoryProviderRegistrationsFunctionPtr) (DataRepositoryProviderRegistration*); /** * The module registry maps modules by name to a function wrapper around diff --git a/Source/moja.flint/include/moja/flint/operationresultfluxeigen.h b/Source/moja.flint/include/moja/flint/operationresultfluxeigen.h deleted file mode 100644 index 704213c..0000000 --- a/Source/moja.flint/include/moja/flint/operationresultfluxeigen.h +++ /dev/null @@ -1,54 +0,0 @@ -#ifndef MOJA_FLINT_OPERATIONRESULTFLUXEIGEN_H_ -#define MOJA_FLINT_OPERATIONRESULTFLUXEIGEN_H_ - -#include "moja/flint/_flint_exports.h" -#include "moja/flint/imodule.h" -#include "moja/flint/ioperationresultflux.h" - -namespace moja { -namespace flint { - -class OperationResultFluxEigen : public IOperationResultFlux { -public: - friend class OperationResultFluxIteratorEigen; - - OperationResultFluxEigen() = default; - OperationResultFluxEigen(OperationTransferType transferType, const ModuleMetaData* metaData, int source, int sink, double value); - virtual ~OperationResultFluxEigen() override; - - virtual int source() const override; - virtual int sink() const override; - virtual double value() const override; - - virtual OperationTransferType transferType() const override; - virtual const ModuleMetaData* metaData() const override; - -private: - OperationTransferType _transferType; - const ModuleMetaData* _metaData; - int _source; - int _sink; - double _value; -}; - -inline OperationResultFluxEigen::OperationResultFluxEigen(OperationTransferType transferType, const ModuleMetaData* metaData, int source, int sink, double value) - : _transferType(transferType), _metaData(metaData) , _source(source), _sink(sink), _value(value) {} - -inline OperationResultFluxEigen::~OperationResultFluxEigen() {} - -inline int OperationResultFluxEigen::source() const { return _source; } - -inline int OperationResultFluxEigen::sink() const { return _sink; } - -inline double OperationResultFluxEigen::value() const { return _value; } - -inline OperationTransferType OperationResultFluxEigen::transferType() const { return _transferType; } - -inline const ModuleMetaData* OperationResultFluxEigen::metaData() const { return _metaData; } - -typedef std::vector OperationResultFluxEigenNewVector; - -} -} // moja::flint - -#endif // MOJA_FLINT_OPERATIONRESULTFLUXEIGEN_H_ \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/operationresultfluxeigenfull.h b/Source/moja.flint/include/moja/flint/operationresultfluxeigenfull.h deleted file mode 100644 index b4eebcf..0000000 --- a/Source/moja.flint/include/moja/flint/operationresultfluxeigenfull.h +++ /dev/null @@ -1,54 +0,0 @@ -#ifndef MOJA_FLINT_OPERATIONRESULTFLUXEIGENFULL_H_ -#define MOJA_FLINT_OPERATIONRESULTFLUXEIGENFULL_H_ - -#include "moja/flint/_flint_exports.h" -#include "moja/flint/imodule.h" -#include "moja/flint/ioperationresultflux.h" - -namespace moja { -namespace flint { - -class OperationResultFluxEigenFull : public IOperationResultFlux { -public: - friend class OperationResultFluxIteratorEigenFull; - - OperationResultFluxEigenFull() = default; - OperationResultFluxEigenFull(OperationTransferType transferType, const ModuleMetaData* metaData, int source, int sink, double value); - virtual ~OperationResultFluxEigenFull() override; - - virtual int source() const override; - virtual int sink() const override; - virtual double value() const override; - - virtual OperationTransferType transferType() const override; - virtual const ModuleMetaData* metaData() const override; - -private: - OperationTransferType _transferType; - const ModuleMetaData* _metaData; - int _source; - int _sink; - double _value; -}; - -inline OperationResultFluxEigenFull::OperationResultFluxEigenFull(OperationTransferType transferType, const ModuleMetaData* metaData, int source, int sink, double value) - : _transferType(transferType), _metaData(metaData) , _source(source), _sink(sink), _value(value) {} - -inline OperationResultFluxEigenFull::~OperationResultFluxEigenFull() {} - -inline int OperationResultFluxEigenFull::source() const { return _source; } - -inline int OperationResultFluxEigenFull::sink() const { return _sink; } - -inline double OperationResultFluxEigenFull::value() const { return _value; } - -inline OperationTransferType OperationResultFluxEigenFull::transferType() const { return _transferType; } - -inline const ModuleMetaData* OperationResultFluxEigenFull::metaData() const { return _metaData; } - -typedef std::vector OperationResultFluxEigenNewVector; - -} -} // moja::flint - -#endif // MOJA_FLINT_OPERATIONRESULTFLUXEIGENFULL_H_ \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/operationresultsimple.h b/Source/moja.flint/include/moja/flint/operationresultsimple.h index 54fbdd8..78024ba 100644 --- a/Source/moja.flint/include/moja/flint/operationresultsimple.h +++ b/Source/moja.flint/include/moja/flint/operationresultsimple.h @@ -2,10 +2,10 @@ #define MOJA_FLINT_OPERATIONRESULTSIMPLE_H_ #include "moja/flint/ioperationresult.h" -#include "moja/flint/timing.h" #include "moja/flint/operationresultfluxsimple.h" +#include "moja/flint/timing.h" -#include "moja/dynamic.h" +#include namespace moja { namespace flint { @@ -13,44 +13,45 @@ class IOperation; // -------------------------------------------------------------------------------------------- class OperationResultSimple : public IOperationResult { - friend class OperationManagerSimple; - friend class StockOperationSimple; - friend class ProportionalOperationSimple; -public: - typedef std::shared_ptr Ptr; + friend class OperationManagerSimple; + friend class StockOperationSimple; + friend class ProportionalOperationSimple; + + public: + typedef std::shared_ptr Ptr; - explicit OperationResultSimple(IOperation& operation); - virtual ~OperationResultSimple() = default; - OperationResultSimple(const OperationResultSimple&) = delete; + explicit OperationResultSimple(IOperation& operation); + virtual ~OperationResultSimple() = default; + OperationResultSimple(const OperationResultSimple&) = delete; - virtual OperationResultFluxCollection operationResultFluxCollection() override; + virtual OperationResultFluxCollection operationResultFluxCollection() override; - virtual OperationTransferType transferType() const override; - virtual const ModuleMetaData* metaData() const override; - virtual const DynamicVar& dataPackage() const override; - virtual bool hasDataPackage() const override; + virtual OperationTransferType transferType() const override; + virtual const ModuleMetaData* metaData() const override; + virtual const DynamicVar& dataPackage() const override; + virtual bool hasDataPackage() const override; - virtual const Timing& timingWhenApplied() const override { return _timingWhenApplied; }; - virtual void setTimingWhenApplied(const Timing& timing) override { _timingWhenApplied = timing; }; + virtual const Timing& timingWhenApplied() const override { return _timingWhenApplied; }; + virtual void setTimingWhenApplied(const Timing& timing) override { _timingWhenApplied = timing; }; - std::vector& fluxes() { return _fluxes; } + std::vector& fluxes() { return _fluxes; } -private: - void addFlux(int source, int sink, double value); + private: + void addFlux(int source, int sink, double value); -protected: - OperationTransferType _transferType; - const ModuleMetaData* _metaData; - const DynamicVar _dataPackage; - const bool _hasDataPackage; - std::vector _fluxes; + protected: + OperationTransferType _transferType; + const ModuleMetaData* _metaData; + const DynamicVar _dataPackage; + const bool _hasDataPackage; + std::vector _fluxes; - Timing _timingWhenApplied; + Timing _timingWhenApplied; }; #undef USE_INT_ITERATOR -} -} // moja::flint +} // namespace flint +} // namespace moja -#endif // MOJA_FLINT_OPERATIONRESULTSIMPLE_H_ +#endif // MOJA_FLINT_OPERATIONRESULTSIMPLE_H_ diff --git a/Source/moja.flint/include/moja/flint/operationtransfereigen.h b/Source/moja.flint/include/moja/flint/operationtransfereigen.h deleted file mode 100644 index 3743b38..0000000 --- a/Source/moja.flint/include/moja/flint/operationtransfereigen.h +++ /dev/null @@ -1,45 +0,0 @@ -#ifndef MOJA_FLINT_OPERATIONTRANSFEREIGEN_H_ -#define MOJA_FLINT_OPERATIONTRANSFEREIGEN_H_ - -#include "moja/flint/ioperationtransfer.h" - -namespace moja { -namespace flint { - -// -------------------------------------------------------------------------------------------- -class OperationTransferEigen : public IOperationTransfer { -public: - OperationTransferEigen() = default; - OperationTransferEigen(OperationTransferType type, int source, int sink, double value, const ModuleMetaData* metaData) : _type(type), _source(source), _sink(sink), _value(value), _metaData(metaData) {} - virtual ~OperationTransferEigen() override; - - virtual int source() const override; - virtual int sink() const override; - virtual double value() const override; - virtual OperationTransferType transferType() const override; - virtual const ModuleMetaData* metaData() const override; - -private: - OperationTransferType _type; - int _source; - int _sink; - double _value; - const ModuleMetaData* _metaData; -}; - -inline OperationTransferEigen::~OperationTransferEigen() {} - -inline int OperationTransferEigen::source() const { return _source; } - -inline int OperationTransferEigen::sink() const { return _sink; } - -inline double OperationTransferEigen::value() const { return _value; } - -inline OperationTransferType OperationTransferEigen::transferType() const { return _type; } - -inline const ModuleMetaData* OperationTransferEigen::metaData() const { return _metaData; } - -} -} // moja::flint - -#endif // MOJA_FLINT_OPERATIONTRANSFEREIGEN_H_ \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/operationtransfereigenfull.h b/Source/moja.flint/include/moja/flint/operationtransfereigenfull.h deleted file mode 100644 index 26b70cc..0000000 --- a/Source/moja.flint/include/moja/flint/operationtransfereigenfull.h +++ /dev/null @@ -1,45 +0,0 @@ -#ifndef MOJA_FLINT_OPERATIONTRANSFEREIGENFULL_H_ -#define MOJA_FLINT_OPERATIONTRANSFEREIGENFULL_H_ - -#include "moja/flint/ioperationtransfer.h" - -namespace moja { -namespace flint { - -// -------------------------------------------------------------------------------------------- -class OperationTransferEigenFull : public IOperationTransfer { -public: - OperationTransferEigenFull() = default; - OperationTransferEigenFull(OperationTransferType type, int source, int sink, double value, const ModuleMetaData* metaData) : _type(type), _source(source), _sink(sink), _value(value), _metaData(metaData) {} - virtual ~OperationTransferEigenFull() override; - - virtual int source() const override; - virtual int sink() const override; - virtual double value() const override; - virtual OperationTransferType transferType() const override; - virtual const ModuleMetaData* metaData() const override; - -private: - OperationTransferType _type; - int _source; - int _sink; - double _value; - const ModuleMetaData* _metaData; -}; - -inline OperationTransferEigenFull::~OperationTransferEigenFull() {} - -inline int OperationTransferEigenFull::source() const { return _source; } - -inline int OperationTransferEigenFull::sink() const { return _sink; } - -inline double OperationTransferEigenFull::value() const { return _value; } - -inline OperationTransferType OperationTransferEigenFull::transferType() const { return _type; } - -inline const ModuleMetaData* OperationTransferEigenFull::metaData() const { return _metaData; } - -} -} // moja::flint - -#endif // MOJA_FLINT_OPERATIONTRANSFEREIGENFULL_H_ \ No newline at end of file diff --git a/Source/moja.flint/include/moja/flint/recordaccumulator.h b/Source/moja.flint/include/moja/flint/recordaccumulator.h index 6ef3bca..4067492 100644 --- a/Source/moja.flint/include/moja/flint/recordaccumulator.h +++ b/Source/moja.flint/include/moja/flint/recordaccumulator.h @@ -199,6 +199,8 @@ class RecordAccumulator2 { rec_accu_size_type size() const { return _records.size(); } + const rec_accu_vec& records() const { return _records; } + private: Int64 _nextId = 1; rec_accu_set _recordsIdx; @@ -240,7 +242,7 @@ class RecordAccumulatorMap { std::vector getPersistableCollection() const { std::vector persistables; - for (auto rec : _records) { + for (const auto& rec : _records) { persistables.emplace_back(TRecordConv::asPersistable(rec.first, rec.second)); } return persistables; @@ -251,7 +253,7 @@ class RecordAccumulatorMap { std::vector persistables; size_t chunkPosition = 0; for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { - persistables.push_back(TRecordConv::asPersistable(rangeStart.key(), rangeStart.data())); + persistables.push_back(TRecordConv::asPersistable((*rangeStart).first, (*rangeStart).second)); } return persistables; } @@ -265,6 +267,85 @@ class RecordAccumulatorMap { rec_accu_map _records; }; + +template +class RecordAccumulatorMap2 { + public: + typedef tlx::btree_map rec_accu_map; + typedef typename rec_accu_map::size_type rec_accu_size_type; + + void insert(Int64 id, const TKey& key, const TValue& value) { + // ID has been assigned by user, assume that we can run with this + _nextId = id + 1; // can't guarantee that this will be called in 'id increasing' order but a good guess perhaps + auto newValue = value; + newValue._id = id; + _records.insert(key, newValue); + } + + Int64 accumulate(const TKey& key, const TValue& value) { return accumulate(key, value, _nextId); } + + Int64 accumulate(const TKey& key, const TValue& value, Int64 requestedId) { + auto it = _records.find(key); + if (it != _records.end()) { + it->second += value; + return it->second._id; + } + // First time seeing this key - assign an ID. + _nextId = requestedId + + 1; // can't guarantee that this will be called in 'id increasing' order but a good guess perhaps + auto newValue = value; + newValue._id = requestedId; + _records.insert(std::make_pair(key, newValue)); + return newValue._id; + } + + const rec_accu_map& getRecords() const { return _records; } + + std::vector getPersistableCollection() const { + std::vector persistables; + for (const auto& rec : _records) { + persistables.emplace_back(TRecordConv::asPersistable(rec.first, rec.second)); + } + return persistables; + } + + std::vector getPersistableCollectionRange(typename rec_accu_map::const_iterator& rangeStart, + size_t chunkSize) const { + std::vector persistables; + size_t chunkPosition = 0; + for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { + persistables.push_back(TRecordConv::asPersistable((*rangeStart).first, (*rangeStart).second)); + } + return persistables; + } + + std::vector getTupleCollection() { + std::vector tuples(_records.size()); + for (const auto& rec : _records) { + tuples.emplace_back(TRecordConv::asTuple(rec.first, rec.second)); + } + return tuples; + } + + std::vector getTupleCollectionRange(typename rec_accu_map::const_iterator& rangeStart, size_t chunkSize) { + std::vector tuples; + size_t chunkPosition = 0; + for (; (rangeStart != _records.end() && chunkPosition++ < chunkSize); ++rangeStart) { + tuples.push_back(TRecordConv::asTuple((*rangeStart).first, (*rangeStart).second)); + } + return tuples; + } + + void clear() { _records.clear(); } + + rec_accu_size_type size() const { return _records.size(); } + + private: + Int64 _nextId = 1; + rec_accu_map _records; +}; + + } // namespace flint } // namespace moja diff --git a/Source/moja.flint/src/aspatialnosqllocaldomaincontroller.cpp b/Source/moja.flint/src/aspatialnosqllocaldomaincontroller.cpp deleted file mode 100644 index 9a32662..0000000 --- a/Source/moja.flint/src/aspatialnosqllocaldomaincontroller.cpp +++ /dev/null @@ -1,101 +0,0 @@ -#include "moja/flint/aspatialnosqllocaldomaincontroller.h" - -#include "moja/flint/configuration/localdomain.h" -#include "moja/flint/configuration/spinup.h" -#include "moja/flint/ivariable.h" -#include "moja/flint/configuration/configuration.h" -#include "moja/flint/sequencermodulebase.h" - -#include "moja/datarepository/iprovidernosqlinterface.h" -#include "moja/datarepository/providernosqlpocomongodb.h" - -#include "moja/logging.h" -#include "moja/signals.h" -#include "moja/exception.h" - -#include - -using moja::flint::configuration::LocalDomainType; -using moja::flint::ILocalDomainController; - -namespace moja { -namespace flint { - -void AspatialNoSQLLocalDomainController::configure(const configuration::Configuration& config) { - // Call base class configure - LocalDomainControllerBase::configure(config); - - // Build landscape - const auto& landscapeObject = config.localDomain()->landscapeObject(); - auto iterator = landscapeObject->iterationASpatialIndex(); - _provider = std::static_pointer_cast(_dataRepository.getProvider(landscapeObject->providerName())); - _count = _provider->Count(); - - auto mongoProvider = std::static_pointer_cast(_provider); - - std::string queryStr = "{}"; - std::string fieldsStr = "{\"_id\": 1 }"; - _idSet = mongoProvider->SendQueryRequest(queryStr, fieldsStr); - - //_tiles = std::make_unique(*provider.get(), iterator->maxTileSize(), iterator->tileCacheSize()); -} - -void AspatialNoSQLLocalDomainController::run() { - auto startTime = DateTime::now(); - - startup(); - - _notificationCenter.postNotification(moja::signals::LocalDomainInit); - - MOJA_LOG_DEBUG << "LandUnit Count: " << _count; - - auto current = 0; - for (auto lu : _idSet) { - auto obj = lu.extract(); - auto id = obj["_id"].extract(); - - current++; - MOJA_LOG_DEBUG << "LandUnit Current #: " << current; - - try { - MOJA_LOG_INFO << std::setfill(' ') << std::setw(10) << current << " of " << std::setfill(' ') << std::setw(10) << _count; - MOJA_LOG_DEBUG << "LandUnit Id: " << id; - - //auto idVariable = _landUnitController.getVariable("LandUnitId"); - //idVariable->set_value(id); - - _landUnitController.initialiseData(); - if (!_simulateLandUnit->value()) - continue; - _notificationCenter.postNotification(moja::signals::PreTimingSequence); - if (!_landUnitBuildSuccess->value()) - continue; - _sequencer->Run(_notificationCenter, _landUnitController); - } - catch (const Exception& e) { - MOJA_LOG_FATAL << "[" << current << "]"; - MOJA_LOG_FATAL << e.displayText(); - } - catch (const boost::exception& e) { - MOJA_LOG_FATAL << "[" << current << "]"; - MOJA_LOG_FATAL << boost::diagnostic_information(e); - } - catch (const std::exception& e) { - MOJA_LOG_FATAL << "[" << current << "]"; - MOJA_LOG_FATAL << e.what(); - } - _landUnitController.clearAllOperationResults(); - } - - shutdown(); - - auto endTime = DateTime::now(); - auto ldSpan = endTime - startTime; - MOJA_LOG_INFO << "LocalDomain: Start Time : " << startTime; - MOJA_LOG_INFO << "LocalDomain: Finish Time : " << endTime; - MOJA_LOG_INFO << "LocalDomain: Total Time (seconds) : " << ldSpan.totalSeconds(); -} - -} -} // namespace moja::flint - diff --git a/Source/moja.flint/src/errorscreenwriter.cpp b/Source/moja.flint/src/errorscreenwriter.cpp index 9faf87d..2f71bb4 100644 --- a/Source/moja.flint/src/errorscreenwriter.cpp +++ b/Source/moja.flint/src/errorscreenwriter.cpp @@ -1,7 +1,7 @@ #include "moja/flint/errorscreenwriter.h" -#include -#include +#include "moja/flint/ivariable.h" +#include "moja/flint/spatiallocationinfo.h" #include #include diff --git a/Source/moja.flint/src/landunitcontroller.cpp b/Source/moja.flint/src/landunitcontroller.cpp index 55f68ba..1b14b14 100644 --- a/Source/moja.flint/src/landunitcontroller.cpp +++ b/Source/moja.flint/src/landunitcontroller.cpp @@ -1,5 +1,7 @@ #include "moja/flint/landunitcontroller.h" +#include "moja/flint/configuration/configuration.h" +#include "moja/flint/configuration/localdomain.h" #include "moja/flint/flintexceptions.h" #include "moja/flint/ioperation.h" #include "moja/flint/ivariable.h" @@ -7,9 +9,6 @@ #include "moja/flint/operationmanagersimplecache.h" #include "moja/flint/operationmanagerublas.h" -#include -#include - #include #include diff --git a/Source/moja.flint/src/libraryfactory.cpp b/Source/moja.flint/src/libraryfactory.cpp index d884e23..82ea1fd 100644 --- a/Source/moja.flint/src/libraryfactory.cpp +++ b/Source/moja.flint/src/libraryfactory.cpp @@ -2,6 +2,10 @@ // For Internal Library Factory +#include +#include +#include + // Modules #include "moja/flint/aggregatorfilewriter.h" #include "moja/flint/aggregatorfluxstep.h" @@ -102,6 +106,8 @@ std::shared_ptr createEventQueueFactory(const std::string& eventType extern "C" { int getFlintModuleRegistrations(moja::flint::ModuleRegistration* outModuleRegistrations) { + MOJA_LOG_DEBUG << (boost::format("getFlintModuleRegistrations: %s") % "entered").str(); + auto index = 0; outModuleRegistrations[index++] = ModuleRegistration{ "AggregatorLandUnit", []() -> flint::IModule* { @@ -157,10 +163,15 @@ int getFlintModuleRegistrations(moja::flint::ModuleRegistration* outModuleRegist ModuleRegistration{"TestModule2", []() -> flint::IModule* { return new TestModule2(); }}; outModuleRegistrations[index++] = ModuleRegistration{"TestModule3", []() -> flint::IModule* { return new TestModule3(); }}; + + + MOJA_LOG_DEBUG << (boost::format("getFlintModuleRegistrations: %s - %d") % "exit" % index).str(); return index; } int getFlintTransformRegistrations(moja::flint::TransformRegistration* outTransformRegistrations) { + MOJA_LOG_DEBUG << (boost::format("getFlintTransformRegistrations: %s") % "entered").str(); + auto index = 0; outTransformRegistrations[index++] = TransformRegistration{"SQLQueryTransform", []() -> flint::ITransform* { return new SQLQueryTransform(); }}; @@ -181,10 +192,14 @@ int getFlintTransformRegistrations(moja::flint::TransformRegistration* outTransf TransformRegistration{"CompositeTransform", []() -> flint::ITransform* { return new CompositeTransform(); }}; outTransformRegistrations[index++] = TransformRegistration{"SumPoolsTransform", []() -> flint::ITransform* { return new SumPoolsTransform(); }}; + + MOJA_LOG_DEBUG << (boost::format("getFlintTransformRegistrations: %s - %d") % "exit" % index).str(); return index; } int getFlintFlintDataRegistrations(moja::flint::FlintDataRegistration* outFlintDataRegistrations) { + MOJA_LOG_DEBUG << (boost::format("getFlintFlintDataRegistrations: %s") % "entered").str(); + auto index = 0; outFlintDataRegistrations[index++] = FlintDataRegistration{"IdNameCollection", []() -> flint::IFlintData* { return new IdNameCollection(); }}; @@ -202,18 +217,25 @@ int getFlintFlintDataRegistrations(moja::flint::FlintDataRegistration* outFlintD FlintDataRegistration{"EventQueue", []() -> flint::IFlintData* { return new EventQueue(); }}; outFlintDataRegistrations[index++] = FlintDataRegistration{ "SimulationUnitDataBase", []() -> flint::IFlintData* { return new SimulationUnitDataBase(); }}; + + MOJA_LOG_DEBUG << (boost::format("getFlintFlintDataRegistrations: %s - %d") % "exit" % index).str(); return index; } int getFlintFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations) { + MOJA_LOG_DEBUG << (boost::format("getFlintFlintDataFactoryRegistrations: %s") % "entered").str(); + auto index = 0; outFlintDataFactoryRegistrations[index++] = FlintDataFactoryRegistration{"internal.flint", "EventQueue", &createEventQueueFactory}; - return index; + + MOJA_LOG_DEBUG << (boost::format("getFlintFlintDataFactoryRegistrations: %s - %d") % "exit" % index).str(); + return index; } -int getDataRepositoryProviderRegistrations( - moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { +int getProviderRegistrations(DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { + MOJA_LOG_DEBUG << (boost::format("getProviderRegistrations: %s") % "entered").str(); + auto index = 0; outDataRepositoryProviderRegistration[index++] = DataRepositoryProviderRegistration{ "RasterTiled", static_cast(datarepository::ProviderTypes::Raster), @@ -226,8 +248,12 @@ int getDataRepositoryProviderRegistrations( [](const DynamicObject& settings) -> std::shared_ptr { return std::make_shared(settings); }}; + + MOJA_LOG_DEBUG << (boost::format("getProviderRegistrations: %s - %d") % "exit" % index).str(); return index; } + } + } // namespace flint } // namespace moja diff --git a/Source/moja.flint/src/librarymanager.cpp b/Source/moja.flint/src/librarymanager.cpp index e74672d..e347d54 100644 --- a/Source/moja.flint/src/librarymanager.cpp +++ b/Source/moja.flint/src/librarymanager.cpp @@ -9,6 +9,10 @@ #include #include +#include +#include +#include + #if defined(ENABLE_MOJAPY) #include "moja/flint/libraryinfopython.h" @@ -39,18 +43,22 @@ const char* libPrefix = "lib"; Int32 LibraryManager::currentLoadOrder = 1; LibraryManager::LibraryManager() { // Load all internal + MOJA_LOG_DEBUG << (boost::format("LibraryManager: %s") % "contructor no args" ).str(); + const auto internalHandles = std::make_shared( "internal.flint", getFlintModuleRegistrations, getFlintTransformRegistrations, getFlintFlintDataRegistrations, - getFlintFlintDataFactoryRegistrations, getDataRepositoryProviderRegistrations); + getFlintFlintDataFactoryRegistrations, getProviderRegistrations); //getDataRepositoryProviderRegistrations); LoadInternalLibrary(internalHandles); } LibraryManager::LibraryManager(std::shared_ptr libraryHandles) { // Load Flint internal modules + MOJA_LOG_DEBUG << (boost::format("LibraryManager: %s") % "contructor no args").str(); + const auto internalHandles = std::make_shared( "internal.flint", getFlintModuleRegistrations, getFlintTransformRegistrations, getFlintFlintDataRegistrations, - getFlintFlintDataFactoryRegistrations, getDataRepositoryProviderRegistrations); + getFlintFlintDataFactoryRegistrations, getProviderRegistrations); LoadInternalLibrary(internalHandles); @@ -67,8 +75,11 @@ LibraryManager::~LibraryManager() { void LibraryManager::AddLibrary(LibraryType libraryType, const std::string& inLibraryName, const std::string& path, const std::string& fileName) { + MOJA_LOG_DEBUG << (boost::format("AddLibrary: %s : %s") % "entered" % inLibraryName).str(); + // Do we already know about this library? if (_libraries.find(inLibraryName) != _libraries.end()) { + MOJA_LOG_DEBUG << (boost::format("AddLibrary: %s : %s") % "we know this library" % inLibraryName).str(); return; } @@ -390,10 +401,19 @@ void LibraryManager::RegisterProviders(std::shared_ptr libraryI DataRepositoryProviderRegistration registrations[100]; - const auto count = libraryInfo->libraryHandles->getDataRepositoryProviderRegistrations(registrations); + auto count = -1; + count = libraryInfo->libraryHandles->getDataRepositoryProviderRegistrations(registrations); + MOJA_LOG_DEBUG << (boost::format("RegisterProviders: %s : %s : count %d") % "entered" % libraryName % count).str(); + for (auto i = 0; i < count; i++) { + + MOJA_LOG_DEBUG << (boost::format("RegisterProviders: %s: %d") % "loop" % i).str(); + auto registration = registrations[i]; _providers[std::make_pair(libraryName, registration.providerName)] = libraryInfo; + + MOJA_LOG_DEBUG << (boost::format("RegisterProviders: %s: %s : %s") % "loop" % libraryName % registration.providerName).str(); + auto fp = [registration](const DynamicObject& settings) { return ProviderInterfacePtr(registration.initializer(settings)); }; @@ -402,8 +422,13 @@ void LibraryManager::RegisterProviders(std::shared_ptr libraryI } bool LibraryManager::LoadInternalLibrary(std::shared_ptr libraryHandles) { + + MOJA_LOG_DEBUG << (boost::format("LibraryMLoadInternalLibraryanager: %s") % "entered").str(); + if (libraryHandles == nullptr) return true; + MOJA_LOG_DEBUG << (boost::format("LibraryMLoadInternalLibraryanager: %s : %s") % "calling AddLibrary" % libraryHandles->libraryName).str(); + // Update our set of known libraries, in case we don't already know about this library AddLibrary(LibraryType::Internal, libraryHandles->libraryName); auto libraryInfo = _libraries[libraryHandles->libraryName]; @@ -412,6 +437,8 @@ bool LibraryManager::LoadInternalLibrary(std::shared_ptr li } if (libraryInfo->libraryHandles->getModuleRegistrations == nullptr) { + MOJA_LOG_DEBUG << (boost::format("LibraryMLoadInternalLibraryanager: %s") % "calling registrations").str(); + if (libraryInfo->GetLibraryType() != LibraryType::Internal) { throw LibraryLoadException("Attempt to load library already loaded as different type", libraryHandles->libraryName); @@ -420,11 +447,12 @@ bool LibraryManager::LoadInternalLibrary(std::shared_ptr li auto internalLibraryInfo = std::static_pointer_cast(libraryInfo); internalLibraryInfo->libraryHandles = libraryHandles; - RegisterModules(internalLibraryInfo, libraryHandles->libraryName); - RegisterTransforms(internalLibraryInfo, libraryHandles->libraryName); - RegisterFlintData(internalLibraryInfo, libraryHandles->libraryName); - RegisterFlintDataFactory(internalLibraryInfo, libraryHandles->libraryName); - RegisterProviders(internalLibraryInfo, libraryHandles->libraryName); + RegisterModules(internalLibraryInfo, libraryHandles->libraryName); + RegisterTransforms(internalLibraryInfo, libraryHandles->libraryName); + RegisterFlintData(internalLibraryInfo, libraryHandles->libraryName); + RegisterFlintDataFactory(internalLibraryInfo, libraryHandles->libraryName); + RegisterProviders(internalLibraryInfo, libraryHandles->libraryName); + RegisterProviders(internalLibraryInfo, libraryHandles->libraryName); } return true; } diff --git a/Source/moja.flint/src/operationresulteigenfull.cpp b/Source/moja.flint/src/operationresulteigenfull.cpp deleted file mode 100644 index 4811921..0000000 --- a/Source/moja.flint/src/operationresulteigenfull.cpp +++ /dev/null @@ -1,37 +0,0 @@ -#include "moja/flint/operationresulteigenfull.h" - -#include "moja/flint/operationresultfluxiteratoreigenfull.h" -#include "moja/flint/ioperation.h" - -namespace moja { -namespace flint { - -OperationResultEigenFull::OperationResultEigenFull(IOperation& operation, int poolCount) - : _transferType(operation.transferType()), _metaData(operation.metaData()), _fluxes(poolCount, poolCount), _dataPackage(operation.dataPackage()), _hasDataPackage(operation.hasDataPackage()) { _fluxes.setZero(); } - -OperationResultEigenFull::OperationResultEigenFull(IOperation& operation, Int64 poolCount) - : _transferType(operation.transferType()), _metaData(operation.metaData()), _fluxes(poolCount, poolCount), _dataPackage(operation.dataPackage()), _hasDataPackage(operation.hasDataPackage()) { _fluxes.setZero(); } - -OperationResultFluxCollection OperationResultEigenFull::operationResultFluxCollection() { - auto it = std::make_shared(_transferType, _metaData, _fluxes); - return OperationResultFluxCollection(it); -} - -OperationTransferType OperationResultEigenFull::transferType() const { - return _transferType; -} - -const ModuleMetaData* OperationResultEigenFull::metaData() const { - return _metaData; -} - -const DynamicVar& OperationResultEigenFull::dataPackage() const { - return _dataPackage; -} - -bool OperationResultEigenFull::hasDataPackage() const { - return _hasDataPackage; -} - -} -} // moja::flint diff --git a/Source/moja.flint/tests/CMakeLists.txt b/Source/moja.flint/tests/CMakeLists.txt index 626dd8e..3bf5f88 100644 --- a/Source/moja.flint/tests/CMakeLists.txt +++ b/Source/moja.flint/tests/CMakeLists.txt @@ -1,19 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework serialization log REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -# HEADERS AND SOURCE - -include_directories( include ../../moja.core/include ../../moja.flint/include ../../moja.datarepository/include) +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -36,61 +23,26 @@ set(TEST_SRCS src/recordaccumulatorintegrationtests.cpp ) -add_definitions(-DBOOST_ALL_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) -target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} - ) +target_include_directories(${TESTUNIT} + PUBLIC + $ + $ +) -add_dependencies(${TESTUNIT} moja.core) -add_dependencies(${TESTUNIT} moja.datarepository) -add_dependencies(${TESTUNIT} moja.flint) -# add_dependencies(${TESTUNIT} moja.modules.sleek) -#add_dependencies(${TESTUNIT} moja.modules.fullcam) -#add_dependencies(${TESTUNIT} moja.modules.hansen) +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.flint + moja::moja.test + Boost::unit_test_framework +) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() - -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.flint/tests/src/operationmanagerublastests.cpp b/Source/moja.flint/tests/src/operationmanagerublastests.cpp index 188a1c1..aa6a26a 100644 --- a/Source/moja.flint/tests/src/operationmanagerublastests.cpp +++ b/Source/moja.flint/tests/src/operationmanagerublastests.cpp @@ -49,62 +49,62 @@ struct UblasFixture { BOOST_FIXTURE_TEST_SUITE(Ublas_operationmanagertests, UblasFixture); -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_NoPoolIteration) { test_NoPoolIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_NoPoolIteration) { test_NoPoolIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SinglePoolIteration) { test_SinglePoolIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SinglePoolIteration) { test_SinglePoolIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_MultiplePoolIteration) { test_MultiplePoolIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_MultiplePoolIteration) { test_MultiplePoolIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_NoResultIteration) { test_NoResultIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_NoResultIteration) { test_NoResultIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SingleResultIteration) { test_SingleResultIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SingleResultIteration) { test_SingleResultIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_MultipleResultIteration) { test_MultipleResultIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_MultipleResultIteration) { test_MultipleResultIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_NoResultFluxIteration) { test_NoResultFluxIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_NoResultFluxIteration) { test_NoResultFluxIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SingleResultFluxIteration) { test_SingleResultFluxIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SingleResultFluxIteration) { test_SingleResultFluxIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_MultipleResultFluxIteration) { test_MultipleResultFluxIteration(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_MultipleResultFluxIteration) { test_MultipleResultFluxIteration(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SingleProportionTransfer) { test_SingleProportionTransfer(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SingleProportionTransfer) { test_SingleProportionTransfer(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SingleStockTransfer) { test_SingleStockTransfer(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SingleStockTransfer) { test_SingleStockTransfer(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_DoubleProportionalTransfer) { test_DoubleProportionalTransfer(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_DoubleProportionalTransfer) { test_DoubleProportionalTransfer(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_DoubleStockTransfer) { test_DoubleStockTransfer(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_DoubleStockTransfer) { test_DoubleStockTransfer(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_DoubleStockAndApplyTransfer) { test_DoubleStockAndApplyTransfer(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_DoubleStockAndApplyTransfer) { test_DoubleStockAndApplyTransfer(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_TwoOperationsStockAndProportional) { - test_TwoOperationsStockAndProportional(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_TwoOperationsStockAndProportional) { +// test_TwoOperationsStockAndProportional(manager, module); +// } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_Kahan_summation_issues_Proportion) { - test_Kahan_summation_issues_Proportion(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_Kahan_summation_issues_Proportion) { +// test_Kahan_summation_issues_Proportion(manager, module); +// } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_Kahan_summation_issues_Proportion_with_SpinUp) { - test_Kahan_summation_issues_Proportion_with_SpinUp(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_Kahan_summation_issues_Proportion_with_SpinUp) { +// test_Kahan_summation_issues_Proportion_with_SpinUp(manager, module); +// } #if 0 // don't do performance testing, too slow for blas @@ -125,31 +125,31 @@ BOOST_AUTO_TEST_CASE(Ublas_PerformanceTestCBM) { #endif -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_SubmitOperationAddsToPendingQueue) { SubmitOperationAddsToPendingQueue(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_SubmitOperationAddsToPendingQueue) { SubmitOperationAddsToPendingQueue(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ClearLastAppliedOperationResults) { ClearLastAppliedOperationResults(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ClearLastAppliedOperationResults) { ClearLastAppliedOperationResults(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsAppendsToCommittedQueue) { - ApplyOperationsAppendsToCommittedQueue(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsAppendsToCommittedQueue) { +// ApplyOperationsAppendsToCommittedQueue(manager, module); +// } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsCorrectlyUpdatesPoolsForSimpleCase) { - ApplyOperationsCorrectlyUpdatesPoolsForSimpleCase(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsCorrectlyUpdatesPoolsForSimpleCase) { +// ApplyOperationsCorrectlyUpdatesPoolsForSimpleCase(manager, module); +// } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsCorrectlyUpdatesPoolsForComplexCase) { - ApplyOperationsCorrectlyUpdatesPoolsForComplexCase(manager, module); -} +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsCorrectlyUpdatesPoolsForComplexCase) { +// ApplyOperationsCorrectlyUpdatesPoolsForComplexCase(manager, module); +// } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ApplyAndGetOperationsLastApplied) { ApplyAndGetOperationsLastApplied(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ApplyAndGetOperationsLastApplied) { ApplyAndGetOperationsLastApplied(manager, module); } -// -------------------------------------------------------------------------------------------- -BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsClearsPendingQueue) { ApplyOperationsClearsPendingQueue(manager, module); } +// // -------------------------------------------------------------------------------------------- +// BOOST_AUTO_TEST_CASE(Ublas_ApplyOperationsClearsPendingQueue) { ApplyOperationsClearsPendingQueue(manager, module); } BOOST_AUTO_TEST_SUITE_END(); diff --git a/Source/moja.modules.gdal/CMakeLists.txt b/Source/moja.modules.gdal/CMakeLists.txt index 54a969c..a5ae4b1 100644 --- a/Source/moja.modules.gdal/CMakeLists.txt +++ b/Source/moja.modules.gdal/CMakeLists.txt @@ -3,22 +3,7 @@ set(LIBNAME "moja.modules.${PACKAGE}") string(REPLACE "." "_" NEW_PACKAGE "${PACKAGE}") string(TOUPPER "${NEW_PACKAGE}" LIBNAME_EXPORT) -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(GDAL) -if(GDAL_FOUND) - include_directories(${GDAL_INCLUDE_DIR}) -endif() - -include_directories( - include - ../moja.core/include - ../moja.flint/include - ../moja.datarepository/include -) +find_package(GDAL REQUIRED) configure_file( ../templates/exports.h @@ -79,38 +64,36 @@ set(SRCS ${PROJECT_PROVIDER_HEADERS} ${PROJECT_PROVIDER_SOURCES} ) -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) - -add_library( - ${LIBNAME} - ${LIB_MODE} - ${SRCS} -) +add_library(${LIBNAME} ${LIB_MODE} ${SRCS}) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) +#Set target properties set_target_properties(${LIBNAME} PROPERTIES - VERSION ${MOJA_MULLIONGROUP_VERSION} - SOVERSION ${MOJA_MULLIONGROUP_VERSION_MAJOR} + VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS ) -target_link_libraries( - ${LIBNAME} - moja.core - moja.flint - moja.datarepository - ${Poco_FOUNDATION} - ${Poco_JSON} - ${GDAL_LIBRARY} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src ) -# Set local include path -include_directories(${Poco_INCLUDE_DIRS}) +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.flint ${GDAL_LIBRARY} +) + +############################################## +# Installation instructions + +include(GNUInstallDirs) -install(TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if(ENABLE_TESTS) add_subdirectory(tests) diff --git a/Source/moja.modules.gdal/cmake/moja.modules.gdalConfig.cmake b/Source/moja.modules.gdal/cmake/moja.modules.gdalConfig.cmake new file mode 100644 index 0000000..1adb6da --- /dev/null +++ b/Source/moja.modules.gdal/cmake/moja.modules.gdalConfig.cmake @@ -0,0 +1,7 @@ +include(CMakeFindDependencyMacro) +find_dependency(GDAL REQUIRED) +find_dependency(moja REQUIRED COMPONENTS moja.flint ) + +if(NOT TARGET moja::moja.modules.gdal) + include("${CMAKE_CURRENT_LIST_DIR}/moja.modules.gdalTargets.cmake") +endif() \ No newline at end of file diff --git a/Source/moja.modules.gdal/include/moja/modules/gdal/libraryfactory.h b/Source/moja.modules.gdal/include/moja/modules/gdal/libraryfactory.h index 768bdf6..1dc42a8 100644 --- a/Source/moja.modules.gdal/include/moja/modules/gdal/libraryfactory.h +++ b/Source/moja.modules.gdal/include/moja/modules/gdal/libraryfactory.h @@ -7,12 +7,16 @@ namespace moja { namespace modules { namespace gdal { -extern "C" MOJA_LIB_API int getModuleRegistrations (moja::flint::ModuleRegistration* outModuleRegistrations); -extern "C" MOJA_LIB_API int getTransformRegistrations (moja::flint::TransformRegistration* outTransformRegistrations); -extern "C" MOJA_LIB_API int getFlintDataRegistrations (moja::flint::FlintDataRegistration* outFlintDataRegistrations); -extern "C" MOJA_LIB_API int getFlintDataFactoryRegistrations (moja::flint::FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); -extern "C" MOJA_LIB_API int getDataRepositoryProviderRegistrations (moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); +extern "C" MOJA_LIB_API int getModuleRegistrations(moja::flint::ModuleRegistration* outModuleRegistrations); +extern "C" MOJA_LIB_API int getTransformRegistrations(moja::flint::TransformRegistration* outTransformRegistrations); +extern "C" MOJA_LIB_API int getFlintDataRegistrations(moja::flint::FlintDataRegistration* outFlintDataRegistrations); +extern "C" MOJA_LIB_API int getFlintDataFactoryRegistrations( + moja::flint::FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); +extern "C" MOJA_LIB_API int getDataRepositoryProviderRegistrations( + moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); -}}} +} // namespace gdal +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_GDAL_LIBRARYFACTORY_H_ \ No newline at end of file +#endif // MOJA_MODULES_GDAL_LIBRARYFACTORY_H_ \ No newline at end of file diff --git a/Source/moja.modules.gdal/include/moja/modules/gdal/writevariablegeotiff.h b/Source/moja.modules.gdal/include/moja/modules/gdal/writevariablegeotiff.h index bb3f77d..25241ff 100644 --- a/Source/moja.modules.gdal/include/moja/modules/gdal/writevariablegeotiff.h +++ b/Source/moja.modules.gdal/include/moja/modules/gdal/writevariablegeotiff.h @@ -3,176 +3,177 @@ #include "moja/modules/gdal/_modules.gdal_exports.h" -#include "moja/flint/modulebase.h" -#include "moja/flint/ioperationresult.h" -#include "moja/flint/ioperationresultflux.h" -#include "moja/flint/ipool.h" -#include "moja/flint/flux.h" +#include +#include +#include +#include -#include - -#include - -#include #include namespace moja { namespace flint { - class SpatialLocationInfo; -} // namespace moja::flint +class SpatialLocationInfo; +} // namespace flint namespace modules { namespace gdal { class GDAL_API WriteVariableGeotiff : public flint::ModuleBase { -public: - explicit WriteVariableGeotiff(Poco::Mutex& fileHandlingMutex) : ModuleBase(), _fileHandlingMutex(fileHandlingMutex), _useIndexesForFolderName(false), _forceVariableFolderName(true), _applyAreaAdjustment(false){} - virtual ~WriteVariableGeotiff() = default; - - void configure(const DynamicObject& config) override; - void subscribe(NotificationCenter& notificationCenter) override; - - // Notification handlers - void onSystemInit() override; - void onLocalDomainInit() override; - void onLocalDomainProcessingUnitInit() override; - void onLocalDomainProcessingUnitShutdown() override; - void onPreTimingSequence() override; - void onTimingInit() override; - void onTimingShutdown() override; - void onOutputStep() override; - void onError(std::string msg) override; - - // --- RAII class for file handle - class FileHandle { - typedef FILE *ptr; - public: - explicit FileHandle(std::string const& name, std::string const& mode = std::string("r")) : - _wrapped_file(fopen(name.c_str(), mode.c_str())) {} - ~FileHandle() { if (_wrapped_file) fclose(_wrapped_file); } - operator ptr() const { return _wrapped_file; } - private: - ptr _wrapped_file; - }; - - // --- Base classs for data layer - class DataSettingsB { - public: - DataSettingsB(Poco::Mutex& fileHandlingMutex, GDALDataType dataType) : - notificationType(OnNotificationType::TimingInit), - _useIndexesForFolderName(false), - _forceVariableFolderName(true), - _applyAreaAdjustment(false), - _subtractPrevValue(false), - _isArray(false), - _arrayIndex(0), - _outputAnnually(false), - _dataType(dataType), - _variable(nullptr), - _pool(), - _fileHandlingMutex(fileHandlingMutex) {} - - virtual ~DataSettingsB() = default; - - virtual void configure(std::string& globalOutputPath, bool useIndexesForFolderName, bool forceVariableFolderName, bool applyAreaAdjustment, const DynamicObject& config) = 0; - virtual void doSystemInit(flint::ILandUnitDataWrapper* _landUnitData) = 0; - virtual void doLocalDomainInit(flint::ILandUnitDataWrapper* _landUnitData) = 0; - virtual void doLocalDomainProcessingUnitInit(std::shared_ptr spatialLocationInfo) = 0; - virtual void doLocalDomainProcessingUnitShutdown(std::shared_ptr spatialLocationInfo) = 0; - virtual void setLUValue(std::shared_ptr spatialLocationInfo, int timestep) = 0; - - virtual void initData(std::shared_ptr spatialLocationInfo, int timeStep) = 0; - - enum class OnNotificationType { - TimingInit, - OutputStep, - TimingShutdown, - Error - }; - - // Set by onNotification string: only - OnNotificationType notificationType; - - protected: - friend class WriteVariableGeotiff; - - // Config settings - std::string _name; // "data_name" - bool _useIndexesForFolderName; - bool _forceVariableFolderName; - bool _applyAreaAdjustment; - bool _subtractPrevValue; - std::string _outputPath; // "output_path" - std::string _variableName; // "variable_name" - std::string _propertyName; // "property_name" - std::vector _poolName; // "pool_name" - std::vector _flux; // "flux" - flux groups to aggregate - std::string _variableDataType; // "variable_data_type" - std::string _onNotification; // when to capture the variable value (which notification method) - TimingInit [default], TimingShutdown, Error - bool _isArray; // "is_array" - int _arrayIndex; // "array_index" - bool _outputAnnually; // Output last step of a year only - int _outputInterval = 1; // output every nth timestep (default: every timestep) - - GDALDataType _dataType; // GDAL Data type - - // Other - const flint::IVariable* _variable; - std::vector _pool; - std::string _tileFolderPath; - Poco::Mutex& _fileHandlingMutex; - }; - - // --- Templated version of Base classs for data layer types - template - class DataSettingsT : public DataSettingsB { - public: - DataSettingsT(Poco::Mutex& fileHandlingMutex, GDALDataType dataType) : DataSettingsB(fileHandlingMutex, dataType) {}; - ~DataSettingsT() = default; - - virtual void configure(std::string& globalOutputPath, bool useIndexesForFolderName, bool forceVariableFolderName, bool applyAreaAdjustment, const DynamicObject& config) override; - - void doSystemInit(flint::ILandUnitDataWrapper* _landUnitData) override; - void doLocalDomainInit(flint::ILandUnitDataWrapper* _landUnitData) override; - void doLocalDomainProcessingUnitInit(std::shared_ptr spatialLocationInfo) override; - void doLocalDomainProcessingUnitShutdown(std::shared_ptr spatialLocationInfo) override; - T applyValueAdjustment(std::shared_ptr spatialLocationInfo, int timestep, const T val); - void setLUValue(std::shared_ptr spatialLocationInfo, int timestep) override; - void initData(std::shared_ptr spatialLocationInfo, int timeStep) override; - private: - // templated data - T _nodataValue; // "nodata_value" - std::unordered_map> _data; // array of data - flint::ILandUnitDataWrapper* _landUnitData; - - void initializeData(std::shared_ptr spatialLocationInfo, std::vector& data); - void setLUVariableValue(std::shared_ptr spatialLocationInfo, int timestep); - void setLUPoolValue(std::shared_ptr spatialLocationInfo, int timestep); - void setLUFluxValue(std::shared_ptr spatialLocationInfo, int timestep); - void addFlux(const DynamicVar& fluxGroupConfig); - }; - -private: - // Mutexes - Poco::Mutex& _fileHandlingMutex; - - // FlintData - std::shared_ptr _spatialLocationInfo; - - // Other - bool _useIndexesForFolderName; - bool _forceVariableFolderName; - bool _applyAreaAdjustment; - std::string _globalOutputPath; // global "output_path" - std::vector> _dataVecT; // Spatial Output Data Vector - - int getTimestep() const; + public: + enum class data_type + { + unknown = 0, + byte = 1, + u_int16 = 2, + int16 = 3, + u_int32 = 4, + int32 = 5, + float32 = 6, + float64 = 7, + }; + + explicit WriteVariableGeotiff(Poco::Mutex& fileHandlingMutex) + : _fileHandlingMutex(fileHandlingMutex), + _useIndexesForFolderName(false), + _forceVariableFolderName(true), + _applyAreaAdjustment(false) {} + virtual ~WriteVariableGeotiff() = default; + + void configure(const DynamicObject& config) override; + void subscribe(NotificationCenter& notificationCenter) override; + + // Notification handlers + void onSystemInit() override; + void onLocalDomainInit() override; + void onLocalDomainProcessingUnitInit() override; + void onLocalDomainProcessingUnitShutdown() override; + void onPreTimingSequence() override; + void onTimingInit() override; + void onTimingShutdown() override; + void onOutputStep() override; + void onError(std::string msg) override; + + // --- Base class for data layer + class DataSettingsB { + public: + DataSettingsB(Poco::Mutex& fileHandlingMutex, data_type dataType) + : notificationType(OnNotificationType::TimingInit), + _useIndexesForFolderName(false), + _forceVariableFolderName(true), + _applyAreaAdjustment(false), + _subtractPrevValue(false), + _isArray(false), + _arrayIndex(0), + _outputAnnually(false), + _dataType(dataType), + _variable(nullptr), + _fileHandlingMutex(fileHandlingMutex) {} + + virtual ~DataSettingsB() = default; + + virtual void configure(std::string& globalOutputPath, bool useIndexesForFolderName, bool forceVariableFolderName, + bool applyAreaAdjustment, const DynamicObject& config) = 0; + virtual void doSystemInit(flint::ILandUnitDataWrapper* _landUnitData) = 0; + virtual void doLocalDomainInit(flint::ILandUnitDataWrapper* _landUnitData) = 0; + virtual void doLocalDomainProcessingUnitInit( + std::shared_ptr spatialLocationInfo) = 0; + virtual void doLocalDomainProcessingUnitShutdown( + std::shared_ptr spatialLocationInfo) = 0; + virtual void setLUValue(std::shared_ptr spatialLocationInfo, int timestep) = 0; + + virtual void initData(std::shared_ptr spatialLocationInfo, int timeStep) = 0; + + enum class OnNotificationType { TimingInit, OutputStep, TimingShutdown, Error }; + + // Set by onNotification string: only + OnNotificationType notificationType; + + protected: + friend class WriteVariableGeotiff; + + // Config settings + std::string _name; // "data_name" + bool _useIndexesForFolderName; + bool _forceVariableFolderName; + bool _applyAreaAdjustment; + bool _subtractPrevValue; + std::string _outputPath; // "output_path" + std::string _variableName; // "variable_name" + std::string _propertyName; // "property_name" + std::vector _poolName; // "pool_name" + std::vector _flux; // "flux" - flux groups to aggregate + std::string _variableDataType; // "variable_data_type" + std::string _onNotification; // when to capture the variable value (which notification method) - TimingInit + // [default], TimingShutdown, Error + bool _isArray; // "is_array" + int _arrayIndex; // "array_index" + bool _outputAnnually; // Output last step of a year only + int _outputInterval = 1; // output every nth timestep (default: every timestep) + + data_type _dataType; + + // Other + const flint::IVariable* _variable; + std::vector _pool; + std::string _tileFolderPath; + Poco::Mutex& _fileHandlingMutex; + }; + + // --- Templated version of Base class for data layer types + template + class DataSettingsT : public DataSettingsB { + public: + DataSettingsT(Poco::Mutex& fileHandlingMutex, data_type dataType) + : DataSettingsB(fileHandlingMutex, dataType){}; + ~DataSettingsT() = default; + + void configure(std::string& globalOutputPath, bool useIndexesForFolderName, bool forceVariableFolderName, + bool applyAreaAdjustment, const DynamicObject& config) override; + + void doSystemInit(flint::ILandUnitDataWrapper* _landUnitData) override; + void doLocalDomainInit(flint::ILandUnitDataWrapper* _landUnitData) override; + void doLocalDomainProcessingUnitInit( + std::shared_ptr spatialLocationInfo) override; + void doLocalDomainProcessingUnitShutdown( + std::shared_ptr spatialLocationInfo) override; + T applyValueAdjustment(std::shared_ptr spatialLocationInfo, int timestep, + const T val); + void setLUValue(std::shared_ptr spatialLocationInfo, int timestep) override; + void initData(std::shared_ptr spatialLocationInfo, int timeStep) override; + + private: + // templated data + T _nodataValue; // "nodata_value" + std::unordered_map> _data; // array of data + flint::ILandUnitDataWrapper* _landUnitData; + + void initializeData(std::shared_ptr spatialLocationInfo, std::vector& data); + void setLUVariableValue(std::shared_ptr spatialLocationInfo, int timestep); + void setLUPoolValue(std::shared_ptr spatialLocationInfo, int timestep); + void setLUFluxValue(std::shared_ptr spatialLocationInfo, int timestep); + void addFlux(const DynamicVar& fluxGroupConfig); + }; + + private: + // Mutexes + Poco::Mutex& _fileHandlingMutex; + + // FlintData + std::shared_ptr _spatialLocationInfo; + + // Other + bool _useIndexesForFolderName; + bool _forceVariableFolderName; + bool _applyAreaAdjustment; + std::string _globalOutputPath; // global "output_path" + std::vector> _dataVecT; // Spatial Output Data Vector + + int getTimestep() const; }; +} // namespace gdal +} // namespace modules +} // namespace moja -} -} -} // namespace moja::modules::gdal - -#endif // MOJA_MODULES_GDAL_WRITEVARIABLEGEOTIFF_H_ \ No newline at end of file +#endif // MOJA_MODULES_GDAL_WRITEVARIABLEGEOTIFF_H_ \ No newline at end of file diff --git a/Source/moja.modules.gdal/src/libraryfactory.cpp b/Source/moja.modules.gdal/src/libraryfactory.cpp index 627a5df..e7981ae 100644 --- a/Source/moja.modules.gdal/src/libraryfactory.cpp +++ b/Source/moja.modules.gdal/src/libraryfactory.cpp @@ -1,17 +1,17 @@ #include "moja/modules/gdal/libraryfactory.h" -#include - #include "moja/modules/gdal/writevariablegeotiff.h" +#include + +using moja::flint::DataRepositoryProviderRegistration; +using moja::flint::FlintDataFactoryRegistration; +using moja::flint::FlintDataRegistration; +using moja::flint::IFlintData; using moja::flint::IModule; using moja::flint::ITransform; -using moja::flint::IFlintData; using moja::flint::ModuleRegistration; using moja::flint::TransformRegistration; -using moja::flint::FlintDataRegistration; -using moja::flint::FlintDataFactoryRegistration; -using moja::flint::DataRepositoryProviderRegistration; namespace moja { namespace modules { @@ -19,34 +19,38 @@ namespace gdal { extern "C" { - Poco::Mutex _fileHandlingMutexVarGeotiffWriter; +Poco::Mutex _fileHandlingMutexVarGeotiffWriter; - MOJA_LIB_API int getModuleRegistrations(ModuleRegistration* outModuleRegistrations) { - int index = 0; - outModuleRegistrations[index++] = ModuleRegistration{ "WriteVariableGeotiff", []() -> flint::IModule* { return new WriteVariableGeotiff(_fileHandlingMutexVarGeotiffWriter); } }; - return index; - } - - MOJA_LIB_API int getTransformRegistrations(TransformRegistration* outTransformRegistrations) { - int index = 0; - return index; - } +MOJA_LIB_API int getModuleRegistrations(ModuleRegistration* outModuleRegistrations) { + int index = 0; + outModuleRegistrations[index++] = ModuleRegistration{ + "WriteVariableGeotiff", + []() -> flint::IModule* { return new WriteVariableGeotiff(_fileHandlingMutexVarGeotiffWriter); }}; + return index; +} - MOJA_LIB_API int getFlintDataRegistrations(FlintDataRegistration* outFlintDataRegistrations) { - auto index = 0; - return index; - } +MOJA_LIB_API int getTransformRegistrations(TransformRegistration* outTransformRegistrations) { + int index = 0; + return index; +} - MOJA_LIB_API int getFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations) { - auto index = 0; - return index; - } +MOJA_LIB_API int getFlintDataRegistrations(FlintDataRegistration* outFlintDataRegistrations) { + auto index = 0; + return index; +} - MOJA_LIB_API int getDataRepositoryProviderRegistrations(moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { - auto index = 0; - return index; - } +MOJA_LIB_API int getFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations) { + auto index = 0; + return index; +} +MOJA_LIB_API int getDataRepositoryProviderRegistrations( + moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { + auto index = 0; + return index; +} } -}}} +} // namespace gdal +} // namespace modules +} // namespace moja diff --git a/Source/moja.modules.gdal/src/writevariablegeotiff.cpp b/Source/moja.modules.gdal/src/writevariablegeotiff.cpp index 050f824..e9c7e97 100644 --- a/Source/moja.modules.gdal/src/writevariablegeotiff.cpp +++ b/Source/moja.modules.gdal/src/writevariablegeotiff.cpp @@ -1,575 +1,674 @@ -#include +#include "moja/modules/gdal/writevariablegeotiff.h" -#include "moja/flint/ivariable.h" -#include "moja/flint/iflintdata.h" -#include "moja/flint/variableandpoolstringbuilder.h" -#include "moja/flint/spatiallocationinfo.h" -#include "moja/flint/ipool.h" +#include "moja/exception.h" -#include "moja/signals.h" -#include "moja/notificationcenter.h" -#include "moja/flint/itiming.h" +#include +#include +#include +#include +#include +#include + +#include +#include #include -#include #include +#include -#include -#include +#include #include #include +#include +#include #include -#include -#include -#include +#include namespace moja { namespace modules { namespace gdal { void WriteVariableGeotiff::configure(const DynamicObject& config) { - _globalOutputPath = config.contains("output_path") ? config["output_path"].convert() : ""; - - _useIndexesForFolderName = false; - if (config.contains("use_indexes_for_folder_name")) { - _useIndexesForFolderName = config["use_indexes_for_folder_name"]; - } - _forceVariableFolderName = true; - if (config.contains("force_variable_folder_name")) { - _forceVariableFolderName = config["force_variable_folder_name"]; - } - _applyAreaAdjustment = false; - if (config.contains("apply_area_adjustment")) { - _applyAreaAdjustment = config["apply_area_adjustment"]; - } - - const auto& items = config["items"].extract(); - - for (auto& item : items) { - const auto& itemConfig = item.extract(); - auto enabled = true; - if (itemConfig.contains("enabled")) - enabled = itemConfig["enabled"]; - if (enabled) { - const auto variableDataType = itemConfig["variable_data_type"].convert(); - - if (variableDataType == "UInt8" ) { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Byte)); } - else if (variableDataType == "UInt16") { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_UInt16)); } - else if (variableDataType == "Int16" ) { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Int16)); } - else if (variableDataType == "Int32" ) { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Int32)); } - else if (variableDataType == "UInt32") { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_UInt32)); } - else if (variableDataType == "Int64" ) { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Int32)); } - else if (variableDataType == "UInt64") { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_UInt32)); } - else if (variableDataType == "float" ) { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Float32)); } - else if (variableDataType == "double") { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Float64)); } - else { _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, GDALDataType::GDT_Int16)); } - - _dataVecT.back()->configure(_globalOutputPath, _useIndexesForFolderName, _forceVariableFolderName, _applyAreaAdjustment, itemConfig); - } - } + _globalOutputPath = config.contains("output_path") ? config["output_path"].convert() : ""; + + _useIndexesForFolderName = false; + if (config.contains("use_indexes_for_folder_name")) { + _useIndexesForFolderName = config["use_indexes_for_folder_name"]; + } + _forceVariableFolderName = true; + if (config.contains("force_variable_folder_name")) { + _forceVariableFolderName = config["force_variable_folder_name"]; + } + _applyAreaAdjustment = false; + if (config.contains("apply_area_adjustment")) { + _applyAreaAdjustment = config["apply_area_adjustment"]; + } + + const auto& items = config["items"].extract(); + + for (auto& item : items) { + const auto& itemConfig = item.extract(); + auto enabled = true; + if (itemConfig.contains("enabled")) enabled = itemConfig["enabled"]; + if (enabled) { + const auto variableDataType = itemConfig["variable_data_type"].convert(); + + if (variableDataType == "UInt8") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::byte)); + } else if (variableDataType == "UInt16") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::u_int16)); + } else if (variableDataType == "Int16") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::int16)); + } else if (variableDataType == "Int32") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::int32)); + } else if (variableDataType == "UInt32") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::u_int32)); + } else if (variableDataType == "Int64") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::int32)); + } else if (variableDataType == "UInt64") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::u_int32)); + } else if (variableDataType == "float") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::float32)); + } else if (variableDataType == "double") { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::float64)); + } else { + _dataVecT.emplace_back(std::make_unique>(_fileHandlingMutex, data_type::int16)); + } + + _dataVecT.back()->configure(_globalOutputPath, _useIndexesForFolderName, _forceVariableFolderName, + _applyAreaAdjustment, itemConfig); + } + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::subscribe(NotificationCenter& notificationCenter) { - notificationCenter.subscribe(signals::SystemInit , &WriteVariableGeotiff::onSystemInit , *this); - notificationCenter.subscribe(signals::LocalDomainInit , &WriteVariableGeotiff::onLocalDomainInit , *this); - notificationCenter.subscribe(signals::LocalDomainProcessingUnitInit , &WriteVariableGeotiff::onLocalDomainProcessingUnitInit , *this); - notificationCenter.subscribe(signals::OutputStep , &WriteVariableGeotiff::onOutputStep , *this); - notificationCenter.subscribe(signals::LocalDomainProcessingUnitShutdown , &WriteVariableGeotiff::onLocalDomainProcessingUnitShutdown , *this); - notificationCenter.subscribe(signals::PreTimingSequence , &WriteVariableGeotiff::onPreTimingSequence , *this); - notificationCenter.subscribe(signals::TimingInit , &WriteVariableGeotiff::onTimingInit , *this); - notificationCenter.subscribe(signals::TimingShutdown , &WriteVariableGeotiff::onTimingShutdown , *this); - notificationCenter.subscribe(signals::Error , &WriteVariableGeotiff::onError , *this); + notificationCenter.subscribe(signals::SystemInit, &WriteVariableGeotiff::onSystemInit, *this); + notificationCenter.subscribe(signals::LocalDomainInit, &WriteVariableGeotiff::onLocalDomainInit, *this); + notificationCenter.subscribe(signals::LocalDomainProcessingUnitInit, + &WriteVariableGeotiff::onLocalDomainProcessingUnitInit, *this); + notificationCenter.subscribe(signals::OutputStep, &WriteVariableGeotiff::onOutputStep, *this); + notificationCenter.subscribe(signals::LocalDomainProcessingUnitShutdown, + &WriteVariableGeotiff::onLocalDomainProcessingUnitShutdown, *this); + notificationCenter.subscribe(signals::PreTimingSequence, &WriteVariableGeotiff::onPreTimingSequence, *this); + notificationCenter.subscribe(signals::TimingInit, &WriteVariableGeotiff::onTimingInit, *this); + notificationCenter.subscribe(signals::TimingShutdown, &WriteVariableGeotiff::onTimingShutdown, *this); + notificationCenter.subscribe(signals::Error, &WriteVariableGeotiff::onError, *this); } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onSystemInit() { - // build the filename using pools and variable values - flint::VariableAndPoolStringBuilder databaseNameBuilder(_landUnitData.get(), _globalOutputPath); - _globalOutputPath = databaseNameBuilder.result(); + // build the filename using pools and variable values + flint::VariableAndPoolStringBuilder databaseNameBuilder(_landUnitData.get(), _globalOutputPath); + _globalOutputPath = databaseNameBuilder.result(); - for (auto& item : _dataVecT) { - item->doSystemInit(_landUnitData.get()); - } + for (auto& item : _dataVecT) { + item->doSystemInit(_landUnitData.get()); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onLocalDomainInit() { - _spatialLocationInfo = std::static_pointer_cast( - _landUnitData->getVariable("spatialLocationInfo")->value() - .extract>()); + _spatialLocationInfo = std::static_pointer_cast( + _landUnitData->getVariable("spatialLocationInfo")->value().extract>()); - for (auto& item : _dataVecT) { - item->doLocalDomainInit(_landUnitData.get()); - } + for (auto& item : _dataVecT) { + item->doLocalDomainInit(_landUnitData.get()); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onLocalDomainProcessingUnitInit() { - for (auto& item : _dataVecT) { - item->doLocalDomainProcessingUnitInit(_spatialLocationInfo); - } + for (auto& item : _dataVecT) { + item->doLocalDomainProcessingUnitInit(_spatialLocationInfo); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onLocalDomainProcessingUnitShutdown() { - for (auto& item : _dataVecT) { - item->doLocalDomainProcessingUnitShutdown(_spatialLocationInfo); - } + for (auto& item : _dataVecT) { + item->doLocalDomainProcessingUnitShutdown(_spatialLocationInfo); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onPreTimingSequence() { - for (auto& item : _dataVecT) { - if (item->notificationType == DataSettingsB::OnNotificationType::TimingInit - || item->notificationType == DataSettingsB::OnNotificationType::TimingShutdown - || item->notificationType == DataSettingsB::OnNotificationType::Error) { - item->initData(_spatialLocationInfo, 0); - } - } + for (auto& item : _dataVecT) { + if (item->notificationType == DataSettingsB::OnNotificationType::TimingInit || + item->notificationType == DataSettingsB::OnNotificationType::TimingShutdown || + item->notificationType == DataSettingsB::OnNotificationType::Error) { + item->initData(_spatialLocationInfo, 0); + } + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onTimingInit() { - for (auto& item : _dataVecT) { - if (item->notificationType == DataSettingsB::OnNotificationType::TimingInit) - /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively - // Best to force timestep to 0 for now - item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); - } + for (auto& item : _dataVecT) { + if (item->notificationType == DataSettingsB::OnNotificationType::TimingInit) + /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively + // Best to force timestep to 0 for now + item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onTimingShutdown() { - for (auto& item : _dataVecT) { - if (item->notificationType == DataSettingsB::OnNotificationType::TimingShutdown) - /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively - // Best to force timestep to 0 for now - item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); - } + for (auto& item : _dataVecT) { + if (item->notificationType == DataSettingsB::OnNotificationType::TimingShutdown) + /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively + // Best to force timestep to 0 for now + item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onOutputStep() { - for (auto& item : _dataVecT) { - if (item->notificationType == DataSettingsB::OnNotificationType::OutputStep) { - const auto timing = _landUnitData->timing(); - if (!item->_outputAnnually || timing->curStartDate().month() == 12) - item->setLUValue(_spatialLocationInfo, getTimestep()); - } - } + for (auto& item : _dataVecT) { + if (item->notificationType == DataSettingsB::OnNotificationType::OutputStep) { + const auto timing = _landUnitData->timing(); + if (!item->_outputAnnually || timing->curStartDate().month() == 12) + item->setLUValue(_spatialLocationInfo, getTimestep()); + } + } } // -------------------------------------------------------------------------------------------- void WriteVariableGeotiff::onError(std::string msg) { - for (auto& item : _dataVecT) { - if (item->notificationType == DataSettingsB::OnNotificationType::Error) - /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively - // Best to force timestep to 0 for now - item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); - } + for (auto& item : _dataVecT) { + if (item->notificationType == DataSettingsB::OnNotificationType::Error) + /// if we have timeStep here it's treated like a timeseries of data, which increase data volume massively + // Best to force timestep to 0 for now + item->setLUValue(_spatialLocationInfo, 0); // getTimestep()); + } } // -------------------------------------------------------------------------------------------- int WriteVariableGeotiff::getTimestep() const { - const auto timing = _landUnitData->timing(); - return timing->step(); + const auto timing = _landUnitData->timing(); + return timing->step(); } - // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::configure(std::string& globalOutputPath, bool useIndexesForFolderName, bool forceVariableFolderName, bool applyAreaAdjustment, const DynamicObject& config) { - _name = config["data_name"].convert(); - - _useIndexesForFolderName = config.contains("use_indexes_for_folder_name") ? config["use_indexes_for_folder_name"].convert() : useIndexesForFolderName; - _forceVariableFolderName = config.contains("force_variable_folder_name") ? config["force_variable_folder_name"].convert() : forceVariableFolderName; - _applyAreaAdjustment = config.contains("apply_area_adjustment") ? config["apply_area_adjustment"].convert() : applyAreaAdjustment; - _subtractPrevValue = config.contains("subtract_previous_value") ? config["subtract_previous_value"].convert() : false; - _outputPath = config.contains("output_path") ? config["output_path"].convert() : globalOutputPath; - _propertyName = config.contains("property_name") ? config["property_name"].convert() : ""; - _variableName = config.contains("variable_name") ? config["variable_name"].convert() : ""; - _variableDataType = config["variable_data_type"].convert(); - _onNotification = config.contains("on_notification") ? config["on_notification"].convert() : "TimingInit"; - _isArray = config.contains("is_array") ? config["is_array"].convert() : false; - _arrayIndex = config.contains("array_index") ? config["array_index"].convert() : 0; - _outputInterval = config.contains("output_interval") ? config["output_interval"].convert() : 1; - _nodataValue = config.contains("nodata_value") ? config["nodata_value"].convert() - : std::numeric_limits::is_integer ? std::numeric_limits::lowest() - : -pow(10.0, std::numeric_limits::max_exponent10); - - if (config.contains("pool_name")) { - if (config["pool_name"].isVector()) { - for (const auto& s : config["pool_name"]) { - _poolName.push_back(s); - } - } - else { - _poolName.push_back(config["pool_name"].convert()); - } - } - - if (config.contains("flux")) { - if (config["flux"].isVector()) { - for (const auto& fluxGroup : config["flux"]) { - addFlux(fluxGroup); - } - } else { - addFlux(config["flux"]); - } - } - - if (_onNotification == "TimingInit") - notificationType = OnNotificationType::TimingInit; - else if (_onNotification == "TimingShutdown") - notificationType = OnNotificationType::TimingShutdown; - else if (_onNotification == "Error") - notificationType = OnNotificationType::Error; - else if (_onNotification == "OutputStep") { - _outputAnnually = config.contains("output_annual_only") ? config["output_annual_only"].convert() : false; - notificationType = OnNotificationType::OutputStep; - } +void WriteVariableGeotiff::DataSettingsT::configure(std::string& globalOutputPath, bool useIndexesForFolderName, + bool forceVariableFolderName, bool applyAreaAdjustment, + const DynamicObject& config) { + _name = config["data_name"].convert(); + + _useIndexesForFolderName = config.contains("use_indexes_for_folder_name") + ? config["use_indexes_for_folder_name"].convert() + : useIndexesForFolderName; + _forceVariableFolderName = config.contains("force_variable_folder_name") + ? config["force_variable_folder_name"].convert() + : forceVariableFolderName; + _applyAreaAdjustment = + config.contains("apply_area_adjustment") ? config["apply_area_adjustment"].convert() : applyAreaAdjustment; + _subtractPrevValue = + config.contains("subtract_previous_value") ? config["subtract_previous_value"].convert() : false; + _outputPath = config.contains("output_path") ? config["output_path"].convert() : globalOutputPath; + _propertyName = config.contains("property_name") ? config["property_name"].convert() : ""; + _variableName = config.contains("variable_name") ? config["variable_name"].convert() : ""; + _variableDataType = config["variable_data_type"].convert(); + _onNotification = + config.contains("on_notification") ? config["on_notification"].convert() : "TimingInit"; + _isArray = config.contains("is_array") ? config["is_array"].convert() : false; + _arrayIndex = config.contains("array_index") ? config["array_index"].convert() : 0; + _outputInterval = config.contains("output_interval") ? config["output_interval"].convert() : 1; + _nodataValue = config.contains("nodata_value") + ? config["nodata_value"].convert() + : std::numeric_limits::is_integer ? std::numeric_limits::lowest() + : T(-pow(10.0, std::numeric_limits::max_exponent10)); + + if (config.contains("pool_name")) { + if (config["pool_name"].isVector()) { + for (const auto& s : config["pool_name"]) { + _poolName.push_back(s); + } + } else { + _poolName.push_back(config["pool_name"].convert()); + } + } + + if (config.contains("flux")) { + if (config["flux"].isVector()) { + for (const auto& fluxGroup : config["flux"]) { + addFlux(fluxGroup); + } + } else { + addFlux(config["flux"]); + } + } + + if (_onNotification == "TimingInit") + notificationType = OnNotificationType::TimingInit; + else if (_onNotification == "TimingShutdown") + notificationType = OnNotificationType::TimingShutdown; + else if (_onNotification == "Error") + notificationType = OnNotificationType::Error; + else if (_onNotification == "OutputStep") { + _outputAnnually = config.contains("output_annual_only") ? config["output_annual_only"].convert() : false; + notificationType = OnNotificationType::OutputStep; + } } // -------------------------------------------------------------------------------------------- template void WriteVariableGeotiff::DataSettingsT::addFlux(const DynamicVar& fluxConfig) { - const auto& fluxGroup = fluxConfig.extract(); - - std::vector sourcePoolNames; - for (const auto& poolName : fluxGroup["from"]) { - sourcePoolNames.push_back(poolName); - } - - std::vector destPoolNames; - for (const auto& poolName : fluxGroup["to"]) { - destPoolNames.push_back(poolName); - } - - auto fluxSource = flint::FluxSource::COMBINED; - if (fluxGroup.contains("flux_source")) { - auto src = fluxGroup["flux_source"].convert(); - fluxSource = boost::iequals(src, "disturbance") ? flint::FluxSource::DISTURBANCE - : boost::iequals(src, "annual_process") ? flint::FluxSource::ANNUAL_PROCESS - : fluxSource; - } - - bool subtract = false; - if (fluxGroup.contains("subtract")) { - subtract = fluxGroup["subtract"]; - } - - _flux.push_back(flint::Flux(sourcePoolNames, destPoolNames, fluxSource, subtract)); + const auto& fluxGroup = fluxConfig.extract(); + + std::vector sourcePoolNames; + for (const auto& poolName : fluxGroup["from"]) { + sourcePoolNames.push_back(poolName); + } + + std::vector destPoolNames; + for (const auto& poolName : fluxGroup["to"]) { + destPoolNames.push_back(poolName); + } + + auto fluxSource = flint::FluxSource::COMBINED; + if (fluxGroup.contains("flux_source")) { + auto src = fluxGroup["flux_source"].convert(); + fluxSource = boost::iequals(src, "disturbance") + ? flint::FluxSource::DISTURBANCE + : boost::iequals(src, "annual_process") ? flint::FluxSource::ANNUAL_PROCESS : fluxSource; + } + + bool subtract = false; + if (fluxGroup.contains("subtract")) { + subtract = fluxGroup["subtract"]; + } + + _flux.push_back(flint::Flux(sourcePoolNames, destPoolNames, fluxSource, subtract)); } // -------------------------------------------------------------------------------------------- template void WriteVariableGeotiff::DataSettingsT::doSystemInit(flint::ILandUnitDataWrapper* _landUnitData) { - - flint::VariableAndPoolStringBuilder databaseNameBuilder(_landUnitData, _outputPath); - _outputPath = databaseNameBuilder.result(); - - std::string variableFolder; - if (_forceVariableFolderName) { - variableFolder = (boost::format("%1%%2%") - % Poco::Path::separator() - % _name - ).str(); - } - else { - variableFolder = ""; - } - - Poco::File workingFolder(_outputPath); - const auto spatialOutputFolderPath = (boost::format("%1%%2%") - % workingFolder.path() - % variableFolder - // % Poco::Path::separator() - // % _name - ).str(); - - try { - workingFolder.createDirectories(); - } - catch (Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ } - - Poco::File spatialOutputFolder(spatialOutputFolderPath); - try { - spatialOutputFolder.createDirectories(); - } - catch (Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ } + flint::VariableAndPoolStringBuilder databaseNameBuilder(_landUnitData, _outputPath); + _outputPath = databaseNameBuilder.result(); + + std::string variableFolder; + if (_forceVariableFolderName) { + variableFolder = (boost::format("%1%%2%") % Poco::Path::separator() % _name).str(); + } else { + variableFolder = ""; + } + + Poco::File workingFolder(_outputPath); + const auto spatialOutputFolderPath = (boost::format("%1%%2%") % workingFolder.path() % variableFolder + // % Poco::Path::separator() + // % _name + ) + .str(); + + try { + workingFolder.createDirectories(); + } catch ( + Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ + } + + Poco::File spatialOutputFolder(spatialOutputFolderPath); + try { + spatialOutputFolder.createDirectories(); + } catch ( + Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ + } } // -------------------------------------------------------------------------------------------- template void WriteVariableGeotiff::DataSettingsT::doLocalDomainInit(flint::ILandUnitDataWrapper* _landUnitData) { - this->_landUnitData = _landUnitData; - if (!_poolName.empty()) { - for (std::string s : _poolName) { - _pool.push_back(_landUnitData->getPool(s)); - } - } else if (!_flux.empty()) { - for (auto& flux : _flux) { - flux.init(_landUnitData); - } - } else { - _variable = _landUnitData->getVariable(_variableName); - } + this->_landUnitData = _landUnitData; + if (!_poolName.empty()) { + for (std::string s : _poolName) { + _pool.push_back(_landUnitData->getPool(s)); + } + } else if (!_flux.empty()) { + for (auto& flux : _flux) { + flux.init(_landUnitData); + } + } else { + _variable = _landUnitData->getVariable(_variableName); + } } // -------------------------------------------------------------------------------------------- template void WriteVariableGeotiff::DataSettingsT::doLocalDomainProcessingUnitInit( - std::shared_ptr spatialLocationInfo) { - Poco::File workingFolder(_outputPath); - std::string variableFolder; - - if (_forceVariableFolderName) { - variableFolder = (boost::format("%1%%2%") - % Poco::Path::separator() - % _name - ).str(); - } else { - variableFolder = ""; - } - - if (_useIndexesForFolderName) { - _tileFolderPath = (boost::format("%1%%2%%3%%4%") - % workingFolder.path() - // % Poco::Path::separator() - // % _name - % variableFolder - % Poco::Path::separator() - % boost::io::group(std::setfill('0'), std::setw(6), spatialLocationInfo->_tileIdx) - ).str(); - } - else { - _tileFolderPath = (boost::format("%1%%2%%3%%4%%5%_%6%%7%") - % workingFolder.path() - // % Poco::Path::separator() - // % _name - % variableFolder - % Poco::Path::separator() - % (spatialLocationInfo->_tileLatLon.lon < 0 ? "-" : "") - % boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lon)) - % (spatialLocationInfo->_tileLatLon.lat < 0 ? "-" : "") - % boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lat)) - ).str(); - } - - Poco::File tileFolder(_tileFolderPath); - Poco::Mutex::ScopedLock lock(_fileHandlingMutex); - try { - tileFolder.createDirectories(); - } - catch (Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ } + std::shared_ptr spatialLocationInfo) { + Poco::File workingFolder(_outputPath); + std::string variableFolder; + + if (_forceVariableFolderName) { + variableFolder = (boost::format("%1%%2%") % Poco::Path::separator() % _name).str(); + } else { + variableFolder = ""; + } + + if (_useIndexesForFolderName) { + _tileFolderPath = (boost::format("%1%%2%%3%%4%") % + workingFolder.path() + // % Poco::Path::separator() + // % _name + % variableFolder % Poco::Path::separator() % + boost::io::group(std::setfill('0'), std::setw(6), spatialLocationInfo->_tileIdx)) + .str(); + } else { + _tileFolderPath = + (boost::format("%1%%2%%3%%4%%5%_%6%%7%") % + workingFolder.path() + // % Poco::Path::separator() + // % _name + % variableFolder % Poco::Path::separator() % (spatialLocationInfo->_tileLatLon.lon < 0 ? "-" : "") % + boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lon)) % + (spatialLocationInfo->_tileLatLon.lat < 0 ? "-" : "") % + boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lat))) + .str(); + } + + Poco::File tileFolder(_tileFolderPath); + Poco::Mutex::ScopedLock lock(_fileHandlingMutex); + try { + tileFolder.createDirectories(); + } catch ( + Poco::FileExistsException&) { /* Poco has a bug here, exception shouldn't be thrown, has been fixed in 1.7.8 */ + } } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::initializeData(std::shared_ptr spatialLocationInfo, std::vector& data) { - int numCells = spatialLocationInfo->_cellRows * spatialLocationInfo->_cellCols; - data.resize(numCells, _nodataValue); +void WriteVariableGeotiff::DataSettingsT::initializeData( + std::shared_ptr spatialLocationInfo, std::vector& data) { + int numCells = spatialLocationInfo->_cellRows * spatialLocationInfo->_cellCols; + data.resize(numCells, _nodataValue); } // -------------------------------------------------------------------------------------------- +GDALDataType gdal_type(WriteVariableGeotiff::data_type type) { + switch (type) { + case WriteVariableGeotiff::data_type::int32: + return GDALDataType::GDT_Int32; + case WriteVariableGeotiff::data_type::unknown: + return GDALDataType::GDT_Unknown; + case WriteVariableGeotiff::data_type::byte: + return GDALDataType::GDT_Byte; + case WriteVariableGeotiff::data_type::u_int16: + return GDALDataType::GDT_UInt16; + case WriteVariableGeotiff::data_type::int16: + return GDALDataType::GDT_Int16; + case WriteVariableGeotiff::data_type::u_int32: + return GDALDataType::GDT_UInt32; + case WriteVariableGeotiff::data_type::float32: + return GDALDataType::GDT_Float32; + case WriteVariableGeotiff::data_type::float64: + return GDALDataType::GDT_Float64; + default: + return GDALDataType::GDT_Unknown; + } +} + +struct dataset_closer { + dataset_closer(GDALDataset* dataset) : gdal_dataset_(dataset) {} + + void operator()(GDALRasterBand* band) const { + auto gdal_dataset = band ? band->GetDataset() : nullptr; + assert(gdal_dataset == gdal_dataset_); + if (gdal_dataset) { + if (band && band->GetAccess() == GA_Update) { + double min, max, mean, std_dev; + auto try_statistics = band->GetStatistics(FALSE, FALSE, &min, &max, &mean, &std_dev); + if (try_statistics != CE_Warning) { + band->ComputeStatistics(FALSE, &min, &max, &mean, &std_dev, nullptr, nullptr); + band->SetStatistics(min, max, mean, std_dev); + } + } + GDALFlushCache(gdal_dataset); + GDALClose(gdal_dataset); + } + } + GDALDataset* gdal_dataset_; +}; + +static std::shared_ptr create_gdalraster(const Poco::File& path, int rows, int cols, + GDALDataType datatype, double* transform) { + if (GDALGetDriverCount() == 0) { + GDALAllRegister(); + } + + GDALDriver* driver = GetGDALDriverManager()->GetDriverByName("GTiff"); + + char** options = nullptr; + options = CSLSetNameValue(options, "TILED", "YES"); + options = CSLSetNameValue(options, "COMPRESS", "DEFLATE"); + + auto dataset = driver->Create(path.path().c_str(), cols, rows, 1, datatype, options); + if (dataset == nullptr) { + std::ostringstream oss; + oss << "Could not create raster file: " << path.path() << std::endl; + throw ApplicationException(oss.str()); + } + dataset->SetGeoTransform(transform); + OGRSpatialReference srs; + srs.SetWellKnownGeogCS("EPSG:4326"); + char* srs_wkt = nullptr; + srs.exportToWkt(&srs_wkt); + dataset->SetProjection(srs_wkt); + CPLFree(srs_wkt); + auto band = dataset->GetRasterBand(1); + return std::shared_ptr(band, dataset_closer(dataset)); +} template void WriteVariableGeotiff::DataSettingsT::doLocalDomainProcessingUnitShutdown( - std::shared_ptr spatialLocationInfo) { - Poco::Mutex::ScopedLock lock(_fileHandlingMutex); - - typename std::unordered_map>::iterator itPrev; - for (auto it = _data.begin(); it != _data.end(); ++it) { - const auto& timestepData = (*it); - - Poco::File tileFolder(_tileFolderPath); - std::string folderLocStr; - if (_useIndexesForFolderName) { - folderLocStr = (boost::format("%1%_%2%") - % boost::io::group(std::setfill('0'), std::setw(6), spatialLocationInfo->_tileIdx) - % boost::io::group(std::setfill('0'), std::setw(2), spatialLocationInfo->_blockIdx) - ).str(); - } else { - folderLocStr = (boost::format("%1%%2%_%3%%4%_%5%") - % (spatialLocationInfo->_tileLatLon.lon < 0 ? "-" : "") - % boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lon)) - % (spatialLocationInfo->_tileLatLon.lat < 0 ? "-" : "") - % boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lat)) - % boost::io::group(std::setfill('0'), std::setw(2), spatialLocationInfo->_blockIdx) - ).str(); - } - - auto filename = (boost::format("%1%%2%%3%_%4%_%5%.tif") - % tileFolder.path() - % Poco::Path::separator() - % _name - % folderLocStr - % timestepData.first).str(); - - Poco::File block(filename); - if (block.exists()) { - block.remove(false); // delete existing file - } - - int cellRows = spatialLocationInfo->_cellRows; - int cellCols = spatialLocationInfo->_cellCols; - - GDALAllRegister(); - GDALDriver* driver = GetGDALDriverManager()->GetDriverByName("GTiff"); - - char** options = NULL; - options = CSLSetNameValue(options, "COMPRESS", "DEFLATE"); - - GDALDataset* dataset; - dataset = driver->Create(filename.c_str(), cellCols, cellRows, 1, _dataType, options); - double adfGeoTransform[6] = { - spatialLocationInfo->_blockLatLon.lon, 1.0 / spatialLocationInfo->_blockCols / cellCols, 0, - spatialLocationInfo->_blockLatLon.lat, 0, -1.0 / spatialLocationInfo->_blockRows / cellRows - }; - - dataset->SetGeoTransform(adfGeoTransform); - OGRSpatialReference srs; - srs.SetWellKnownGeogCS("EPSG:4326"); - char* srsWkt = NULL; - srs.exportToWkt(&srsWkt); - dataset->SetProjection(srsWkt); - CPLFree(srsWkt); - - GDALRasterBand* band = dataset->GetRasterBand(1); - band->SetNoDataValue(_nodataValue); - - if (_subtractPrevValue) - { - if (it == _data.begin()) { // prev value is 0 - band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, (void*)timestepData.second.data(), cellCols, cellRows, _dataType, 0, 0); - } else { - // I know i have a prevIt - const auto& timestepDataPrev = (*itPrev); - std::vector newData; - - auto it1 = timestepData.second.begin(); - auto it2 = timestepDataPrev.second.begin(); - for (; it1 != timestepData.second.end() && it2 != timestepDataPrev.second.end(); ++it1, ++it2) { - newData.push_back(*it1 - *it2); - } - - band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, (void*)newData.data(), cellCols, cellRows, _dataType, 0, 0); - } - } else { - band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, (void*)timestepData.second.data(), cellCols, cellRows, _dataType, 0, 0); - } - - GDALClose(dataset); - itPrev = it; - } - - _data.clear(); -} + std::shared_ptr spatialLocationInfo) { + Poco::Mutex::ScopedLock lock(_fileHandlingMutex); + + Poco::File tileFolder(_tileFolderPath); + std::string folderLocStr; + if (_useIndexesForFolderName) { + folderLocStr = + (boost::format("%1%_%2%") % boost::io::group(std::setfill('0'), std::setw(6), spatialLocationInfo->_tileIdx) % + boost::io::group(std::setfill('0'), std::setw(2), spatialLocationInfo->_blockIdx)) + .str(); + } else { + folderLocStr = + (boost::format("%1%%2%_%3%%4%_%5%") % (spatialLocationInfo->_tileLatLon.lon < 0 ? "-" : "") % + boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lon)) % + (spatialLocationInfo->_tileLatLon.lat < 0 ? "-" : "") % + boost::io::group(std::setfill('0'), std::setw(3), std::abs(spatialLocationInfo->_tileLatLon.lat)) % + boost::io::group(std::setfill('0'), std::setw(2), spatialLocationInfo->_blockIdx)) + .str(); + } + + int cellRows = spatialLocationInfo->_cellRows; + int cellCols = spatialLocationInfo->_cellCols; + double adfGeoTransform[6] = {spatialLocationInfo->_blockLatLon.lon, + 1.0 / spatialLocationInfo->_blockCols / cellCols, + 0, + spatialLocationInfo->_blockLatLon.lat, + 0, + -1.0 / spatialLocationInfo->_blockRows / cellRows}; + + typename std::unordered_map>::iterator itPrev; + for (auto it = _data.begin(); it != _data.end(); ++it) { + auto& timestepData = (*it); + + auto filename = (boost::format("%1%%2%%3%_%4%_%5%.tif") % tileFolder.path() % Poco::Path::separator() % _name % + folderLocStr % timestepData.first) + .str(); + + Poco::File block_path(filename); + if (block_path.exists()) { + block_path.remove(false); // delete existing file + } + + auto band = create_gdalraster(block_path, cellRows, cellCols, gdal_type(_dataType), adfGeoTransform); + auto err = band->SetNoDataValue(_nodataValue); + if (err != CE_None) { + std::ostringstream oss; + oss << "Could not set raster file nodata: " << block_path.path() << " (" << _nodataValue << ")" << std::endl; + throw ApplicationException(oss.str()); + } + + if (_subtractPrevValue) { + if (it == _data.begin()) { // prev value is 0 + auto err = band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, timestepData.second.data(), cellCols, + cellRows, gdal_type(_dataType), 0, 0); + if (err != CE_None) { + std::ostringstream oss; + oss << "Could not write to raster file: " << block_path.path() << std::endl; + throw ApplicationException(oss.str()); + } + } else { + // I know i have a prevIt + const auto& timestepDataPrev = (*itPrev); + std::vector newData; + + auto it1 = timestepData.second.begin(); + auto it2 = timestepDataPrev.second.begin(); + for (; it1 != timestepData.second.end() && it2 != timestepDataPrev.second.end(); ++it1, ++it2) { + newData.push_back(*it1 - *it2); + } + + auto err = band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, newData.data(), cellCols, cellRows, + gdal_type(_dataType), 0, 0); + if (err != CE_None) { + std::ostringstream oss; + oss << "Could not write to raster file: " << block_path.path() << std::endl; + throw ApplicationException(oss.str()); + } + } + } else { + auto err = band->RasterIO(GF_Write, 0, 0, cellCols, cellRows, timestepData.second.data(), cellCols, + cellRows, gdal_type(_dataType), 0, 0); + if (err != CE_None) { + std::ostringstream oss; + oss << "Could not write to raster file: " << block_path.path() << std::endl; + throw ApplicationException(oss.str()); + } + } + itPrev = it; + } + + _data.clear(); +} // namespace gdal // -------------------------------------------------------------------------------------------- template -T WriteVariableGeotiff::DataSettingsT::applyValueAdjustment(std::shared_ptr spatialLocationInfo, int timestep, const T val) { - if (_applyAreaAdjustment) { - return T(val * spatialLocationInfo->_landUnitArea); - } - return val; +T WriteVariableGeotiff::DataSettingsT::applyValueAdjustment( + std::shared_ptr spatialLocationInfo, int timestep, const T val) { + if (_applyAreaAdjustment) { + return T(val * spatialLocationInfo->_landUnitArea); + } + return val; } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::setLUValue(std::shared_ptr spatialLocationInfo, int timestep) { - if ((timestep - 1) % _outputInterval != 0) { - return; - } - - initData(spatialLocationInfo, timestep); - if (_variable != nullptr) { setLUVariableValue(spatialLocationInfo, timestep); } - else if (!_pool.empty()) { setLUPoolValue(spatialLocationInfo, timestep); } - else if (!_flux.empty()) { setLUFluxValue(spatialLocationInfo, timestep); } +void WriteVariableGeotiff::DataSettingsT::setLUValue( + std::shared_ptr spatialLocationInfo, int timestep) { + if ((timestep - 1) % _outputInterval != 0) { + return; + } + + initData(spatialLocationInfo, timestep); + if (_variable != nullptr) { + setLUVariableValue(spatialLocationInfo, timestep); + } else if (!_pool.empty()) { + setLUPoolValue(spatialLocationInfo, timestep); + } else if (!_flux.empty()) { + setLUFluxValue(spatialLocationInfo, timestep); + } } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::setLUVariableValue(std::shared_ptr spatialLocationInfo, int timestep) { - if (_propertyName != "") { - auto flintDataVariable = _variable->value().extract>(); - if (!_isArray) { - auto variablePropertyValue = flintDataVariable->getProperty(_propertyName); - _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, variablePropertyValue.convert()); - } - } - else { - auto variableValue = _variable->value(); - if (_isArray) { - auto val = variableValue.extract>>()[_arrayIndex]; - if (val.is_initialized()) { - _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, val.value()); - } - } - else { - if (!variableValue.isEmpty()) - _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, variableValue.convert()); - } - } +void WriteVariableGeotiff::DataSettingsT::setLUVariableValue( + std::shared_ptr spatialLocationInfo, int timestep) { + if (_propertyName != "") { + auto flintDataVariable = _variable->value().extract>(); + if (!_isArray) { + auto variablePropertyValue = flintDataVariable->getProperty(_propertyName); + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, variablePropertyValue.convert()); + } + } else { + auto variableValue = _variable->value(); + if (_isArray) { + auto val = variableValue.extract>>()[_arrayIndex]; + if (val.is_initialized()) { + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, val.value()); + } + } else { + if (!variableValue.isEmpty()) + _data[timestep][spatialLocationInfo->_cellIdx] = + applyValueAdjustment(spatialLocationInfo, timestep, variableValue.convert()); + } + } } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::setLUPoolValue(std::shared_ptr spatialLocationInfo, int timestep) { - /// TODO: check this. Pool values are always doubles so shouldn't be defined by T - double sum = 0.0; - for (auto p : _pool) { - sum += p->value(); - } - _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, T(sum)); +void WriteVariableGeotiff::DataSettingsT::setLUPoolValue( + std::shared_ptr spatialLocationInfo, int timestep) { + /// TODO: check this. Pool values are always doubles so shouldn't be defined by T + double sum = 0.0; + for (auto p : _pool) { + sum += p->value(); + } + _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, T(sum)); } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::setLUFluxValue(std::shared_ptr spatialLocationInfo, int timestep) { - double sum = 0.0; - for (auto operationResult : _landUnitData->getOperationLastAppliedIterator()) { - for (auto& flux : _flux) { - sum += flux.calculate(operationResult); - } - } - _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, T(sum)); +void WriteVariableGeotiff::DataSettingsT::setLUFluxValue( + std::shared_ptr spatialLocationInfo, int timestep) { + double sum = 0.0; + for (auto operationResult : _landUnitData->getOperationLastAppliedIterator()) { + for (auto& flux : _flux) { + sum += flux.calculate(operationResult); + } + } + _data[timestep][spatialLocationInfo->_cellIdx] = applyValueAdjustment(spatialLocationInfo, timestep, T(sum)); } // -------------------------------------------------------------------------------------------- template -void WriteVariableGeotiff::DataSettingsT::initData(std::shared_ptr spatialLocationInfo, int timestep) { - if (_data.find(timestep) == _data.end()) { - _data[timestep] = std::vector(); - initializeData(spatialLocationInfo, _data[timestep]); - } +void WriteVariableGeotiff::DataSettingsT::initData( + std::shared_ptr spatialLocationInfo, int timestep) { + if (_data.find(timestep) == _data.end()) { + _data[timestep] = std::vector(); + initializeData(spatialLocationInfo, _data[timestep]); + } } -} -} -} // namespace moja::modules::gdal +} // namespace gdal +} // namespace modules +} // namespace moja diff --git a/Source/moja.modules.gdal/tests/CMakeLists.txt b/Source/moja.modules.gdal/tests/CMakeLists.txt index 45652d2..7e070a1 100644 --- a/Source/moja.modules.gdal/tests/CMakeLists.txt +++ b/Source/moja.modules.gdal/tests/CMakeLists.txt @@ -1,35 +1,7 @@ ### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -# Poco -find_package(Poco) -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() - -find_package(GDAL) -if(GDAL_FOUND) - include_directories(${GDAL_INCLUDE_DIR}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -include_directories(${TURTLE_INCLUDE_PATH}) -include_directories( - include - ../moja.core/include - ../moja.datarepository/include - ../moja.flint.configuration/include -) +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -37,54 +9,18 @@ set(TEST_SRCS src/_unittestdefinition.cpp ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) target_link_libraries(${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${Poco_FOUNDATION} - ${GDAL_LIBRARY} - ${SYSLIBS}) + PRIVATE + moja::moja.modules.gdal Boost::unit_test_framework + ) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() - -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.modules.libpq/CMakeLists.txt b/Source/moja.modules.libpq/CMakeLists.txt index 34dc67e..fb694e9 100644 --- a/Source/moja.modules.libpq/CMakeLists.txt +++ b/Source/moja.modules.libpq/CMakeLists.txt @@ -3,24 +3,8 @@ set(LIBNAME "moja.modules.${PACKAGE}") string(REPLACE "." "_" NEW_PACKAGE "${PACKAGE}") string(TOUPPER "${NEW_PACKAGE}" LIBNAME_EXPORT) -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -#for Moja -#find_package(Moja) - # Libpq -find_package(Libpq) - -include_directories( - include - ../moja.core/include - ../moja.flint/include - ../moja.datarepository/include - ${LIBPQ_INCLUDE_PATH} -) +find_package(PostgreSQL REQUIRED) configure_file( ../templates/exports.h @@ -81,50 +65,36 @@ set(SRCS ${PROJECT_PROVIDER_HEADERS} ${PROJECT_PROVIDER_SOURCES} ) -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) - -add_library( - ${LIBNAME} - ${LIB_MODE} - ${SRCS} - ) +add_library( ${LIBNAME} ${LIB_MODE} ${SRCS} ) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) +#Set target properties set_target_properties(${LIBNAME} PROPERTIES - VERSION ${MOJA_MULLIONGROUP_VERSION} - SOVERSION ${MOJA_MULLIONGROUP_VERSION_MAJOR} + VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS - ) +) -#add_dependencies( -# ${LIBNAME} -#) - -target_link_libraries( - ${LIBNAME} - moja.core - moja.flint - moja.datarepository - ${LIBPQ_LIB} - ${Poco_FOUNDATION} - ${Poco_JSON} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src ) -# Set local include path -include_directories(${Poco_INCLUDE_DIRS}) +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.flint PostgreSQL::PostgreSQL +) -install(TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) +############################################## +# Installation instructions - if(MSVC) - INSTALL( - FILES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/Debug/${LIBNAME}${CMAKE_DEBUG_POSTFIX}.pdb - DESTINATION bin - CONFIGURATIONS Debug - ) -endif() +include(GNUInstallDirs) + +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if(ENABLE_TESTS) add_subdirectory(tests) diff --git a/Source/moja.modules.libpq/cmake/moja.modules.libpqConfig.cmake b/Source/moja.modules.libpq/cmake/moja.modules.libpqConfig.cmake new file mode 100644 index 0000000..7710d89 --- /dev/null +++ b/Source/moja.modules.libpq/cmake/moja.modules.libpqConfig.cmake @@ -0,0 +1,8 @@ + +include(CMakeFindDependencyMacro) +find_dependency(PostgreSQL REQUIRED) +find_dependency(moja REQUIRED COMPONENTS moja.flint ) + +if(NOT TARGET moja::moja.modules.libpq) + include("${CMAKE_CURRENT_LIST_DIR}/moja::moja.modules.libpqTargets.cmake") +endif() diff --git a/Source/moja.modules.libpq/tests/CMakeLists.txt b/Source/moja.modules.libpq/tests/CMakeLists.txt index 53497e6..38edac4 100644 --- a/Source/moja.modules.libpq/tests/CMakeLists.txt +++ b/Source/moja.modules.libpq/tests/CMakeLists.txt @@ -1,15 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -18,65 +9,25 @@ set(TEST_SRCS src/providerrelationallibpqpostgresqltests.cpp ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.modules.libpq Boost::unit_test_framework + ) + if(WIN32) target_link_libraries(${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} - wsock32 - ws2_32 - ) -ELSE() - target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} + PRIVATE + wsock32 ws2_32 ) endif() -add_dependencies(${TESTUNIT} moja.core moja.flint moja.datarepository) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() - -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.modules.poco/CMakeLists.txt b/Source/moja.modules.poco/CMakeLists.txt index adc83c2..1ee0f35 100644 --- a/Source/moja.modules.poco/CMakeLists.txt +++ b/Source/moja.modules.poco/CMakeLists.txt @@ -3,28 +3,21 @@ set(LIBNAME "moja.modules.${PACKAGE}") string(REPLACE "." "_" NEW_PACKAGE "${PACKAGE}") string(TOUPPER "${NEW_PACKAGE}" LIBNAME_EXPORT) -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -#for Moja -#find_package(Moja) - # Poco -find_package(Poco REQUIRED) - -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) - include_directories(${Poco_INCLUDE_DIRS}) -endif() - -include_directories( - include - ../moja.core/include - ../moja.flint/include - ../moja.datarepository/include -) +find_package(Poco REQUIRED DataSQLite MongoDB) + +#if (MSVC) +# generate_product_version(ProductVersionFiles +# NAME ${LIBNAME} +# FILE_DESCRIPTION "moja modules poco" +# VERSION_MAJOR ${MOJA_VERSION_MAJOR} +# VERSION_MINOR ${MOJA_VERSION_MINOR} +# VERSION_PATCH ${MOJA_VERSION_PATCH} +# VERSION_REVISION ${MOJA_VERSION_REVISION} +# COMPANY_NAME "moja global" +## VERSION_REVISION ${BUILD_ID} +# ) +#endif () configure_file( ../templates/exports.h @@ -67,13 +60,11 @@ set(PROJECT_FLINTDATA_SOURCES set(PROJECT_PROVIDER_HEADERS include/moja/modules/${PACKAGE}/providernosqlpocomongodb.h include/moja/modules/${PACKAGE}/providernosqlpocojson.h -# include/moja/modules/${PACKAGE}/providerrelationalpocopostgresql.h ) set(PROJECT_PROVIDER_SOURCES src/providernosqlpocomongodb.cpp src/providernosqlpocojson.cpp -# src/providerrelationalpocopostgresql.cpp ) source_group("header files\\other" FILES ${PROJECT_HEADERS}) @@ -95,42 +86,35 @@ set(SRCS ${PROJECT_PROVIDER_HEADERS} ${PROJECT_PROVIDER_SOURCES} ) -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) - -add_library( - ${LIBNAME} - ${LIB_MODE} - ${SRCS} - ) +add_library(${LIBNAME} ${LIB_MODE} ${SRCS} ) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) set_target_properties(${LIBNAME} PROPERTIES - VERSION ${MOJA_MULLIONGROUP_VERSION} - SOVERSION ${MOJA_MULLIONGROUP_VERSION_MAJOR} + VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS - ) +) -target_link_libraries( - ${LIBNAME} - moja.core - moja.flint - moja.datarepository - ${Poco_FOUNDATION} - ${Poco_DATA} - ${Poco_NET} - ${Poco_MONGODB} - ${Poco_JSON} - ${Poco_DATA_SQLITE} - ${Poco_DATA_POSTGRESQL} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src +) + +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.flint Poco::DataSQLite Poco::MongoDB ) -# Set local include path -include_directories(${Poco_INCLUDE_DIRS}) +############################################## +# Installation instructions + +include(GNUInstallDirs) -install(TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if(ENABLE_TESTS) add_subdirectory(tests) diff --git a/Source/moja.modules.poco/cmake/moja.modules.pocoConfig.cmake b/Source/moja.modules.poco/cmake/moja.modules.pocoConfig.cmake new file mode 100644 index 0000000..e80a687 --- /dev/null +++ b/Source/moja.modules.poco/cmake/moja.modules.pocoConfig.cmake @@ -0,0 +1,9 @@ + +include(CMakeFindDependencyMacro) +find_dependency(Poco REQUIRED COMPONENTS DataSQLite MongoDB) +find_dependency(moja REQUIRED COMPONENTS moja.flint ) + +if(NOT TARGET moja::moja.modules.poco) + include("${MojaModulesPoco_CMAKE_DIR}/moja.modules.pocoTargets.cmake") +endif() + diff --git a/Source/moja.modules.poco/include/moja/modules/poco/experimental/providerrelationalpocopostgresql.h b/Source/moja.modules.poco/include/moja/modules/poco/experimental/providerrelationalpocopostgresql.h deleted file mode 100644 index 5806882..0000000 --- a/Source/moja.modules.poco/include/moja/modules/poco/experimental/providerrelationalpocopostgresql.h +++ /dev/null @@ -1,52 +0,0 @@ -#ifndef MOJA_MODULES_POCO_PROVIDERRELATIONALPOCOPOSTGRESQL_H_ -#define MOJA_MODULES_POCO_PROVIDERRELATIONALPOCOPOSTGRESQL_H_ - -//#if defined(_WIN32) -//#include -//#include -//#endif - -#if 0 -https://www.postgresql.org/docs/9.0/static/populate.html - -https://www.postgresql.org/docs/current/static/libpq-copy.html - -http://stackoverflow.com/questions/10510539/simple-postgresql-libpq-code-too-slow - -#endif - -#include "moja/datarepository/_datarepository_exports.h" -#include "moja/datarepository/iproviderrelationalinterface.h" - -#include -#include "Poco/LRUCache.h" -#include "Poco/Data/PostgreSQL/Utility.h" -#include "Poco/Post - -//Parameters for testing -#define POSTGRESQL_USER "postgres" -#define POSTGRESQL_PWD "admin" -#define POSTGRESQL_HOST "localhost" -#define POSTGRESQL_PORT "5432" -#define POSTGRESQL_DB "postgres" - -namespace moja { -namespace datarepository { - -class DATAREPOSITORY_API ProviderRelationalPocoPostgreSQL : public IProviderRelationalInterface{ -public: - - explicit ProviderRelationalPocoPostgreSQL(DynamicObject settings); - virtual ~ProviderRelationalPocoPostgreSQL(); - DynamicVar GetDataSet(const std::string& query) const override; - -private: - PGconn * _conn = 0; - mutable PGresult * pResult = 0; - std::string _dbConnString; - mutable Poco::LRUCache _cache; -}; - -}} // moja::datarepository - -#endif // MOJA_MODULES_POCO_PROVIDERRELATIONALPOCOPOSTGRESQL_H_ \ No newline at end of file diff --git a/Source/moja.modules.poco/include/moja/modules/poco/libraryfactory.h b/Source/moja.modules.poco/include/moja/modules/poco/libraryfactory.h index 26e8c67..8df130c 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/libraryfactory.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/libraryfactory.h @@ -7,13 +7,16 @@ namespace moja { namespace modules { namespace poco { +extern "C" MOJA_LIB_API int getModuleRegistrations(moja::flint::ModuleRegistration* outModuleRegistrations); +extern "C" MOJA_LIB_API int getTransformRegistrations(moja::flint::TransformRegistration* outTransformRegistrations); +extern "C" MOJA_LIB_API int getFlintDataRegistrations(moja::flint::FlintDataRegistration* outFlintDataRegistrations); +extern "C" MOJA_LIB_API int getFlintDataFactoryRegistrations( + moja::flint::FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); +extern "C" MOJA_LIB_API int getDataRepositoryProviderRegistrations( + moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); -extern "C" MOJA_LIB_API int getModuleRegistrations (moja::flint::ModuleRegistration* outModuleRegistrations); -extern "C" MOJA_LIB_API int getTransformRegistrations (moja::flint::TransformRegistration* outTransformRegistrations); -extern "C" MOJA_LIB_API int getFlintDataRegistrations (moja::flint::FlintDataRegistration* outFlintDataRegistrations); -extern "C" MOJA_LIB_API int getFlintDataFactoryRegistrations (moja::flint::FlintDataFactoryRegistration* outFlintDataFactoryRegistrations); -extern "C" MOJA_LIB_API int getDataRepositoryProviderRegistrations (moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration); +} // namespace poco +} // namespace modules +} // namespace moja -}}} - -#endif // MOJA_MODULES_POCO_LIBRARYFACTORY_H_ \ No newline at end of file +#endif // MOJA_MODULES_POCO_LIBRARYFACTORY_H_ \ No newline at end of file diff --git a/Source/moja.modules.poco/include/moja/modules/poco/nosqlcollectiontransform.h b/Source/moja.modules.poco/include/moja/modules/poco/nosqlcollectiontransform.h index 172d78e..40b0476 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/nosqlcollectiontransform.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/nosqlcollectiontransform.h @@ -2,49 +2,54 @@ #define MOJA_MODULES_POCO_NOSQLCOLLECTIONTRANSFORM_H_ #include "moja/modules/poco/_modules.poco_exports.h" -#include "moja/flint/itransform.h" + +#include #include #include namespace moja { namespace datarepository { - class IProviderNoSQLInterface; +class IProviderNoSQLInterface; } namespace flint { - class IVariable; +class IVariable; } namespace modules { namespace poco { -/// This transform will cache an entire nosql collection and allow the use to get a document based on a specified key value +/// This transform will cache an entire nosql collection and allow the use to get a document based on a specified key +/// value class POCO_API NoSQLCollectionTransform : public flint::ITransform { -public: - NoSQLCollectionTransform(); - NoSQLCollectionTransform(const NoSQLCollectionTransform&) = delete; + public: + NoSQLCollectionTransform(); + NoSQLCollectionTransform(const NoSQLCollectionTransform&) = delete; - void configure(DynamicObject config, const flint::ILandUnitController& landUnitController, moja::datarepository::DataRepository& dataRepository) override; + void configure(DynamicObject config, const flint::ILandUnitController& landUnitController, + moja::datarepository::DataRepository& dataRepository) override; - NoSQLCollectionTransform& operator=(const NoSQLCollectionTransform&) = delete; - void controllerChanged(const flint::ILandUnitController& controller) override; - const DynamicVar& value() const override; + NoSQLCollectionTransform& operator=(const NoSQLCollectionTransform&) = delete; + void controllerChanged(const flint::ILandUnitController& controller) override; + const DynamicVar& value() const override; -private: - DynamicObject _config; - const flint::ILandUnitController* _landUnitController; - moja::datarepository::DataRepository* _dataRepository; - std::shared_ptr _provider; + private: + DynamicObject _config; + const flint::ILandUnitController* _landUnitController; + moja::datarepository::DataRepository* _dataRepository; + std::shared_ptr _provider; - std::string _keyName; - std::string _keyValueVariable; - const flint::IVariable* _keyVariable; - std::unordered_map _collection; + std::string _keyName; + std::string _keyValueVariable; + const flint::IVariable* _keyVariable; + std::unordered_map _collection; - mutable DynamicVar _dataSet; + mutable DynamicVar _dataSet; }; -}}} +} // namespace poco +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_POCO_NOSQLCOLLECTIONTRANSFORM_H_ \ No newline at end of file +#endif // MOJA_MODULES_POCO_NOSQLCOLLECTIONTRANSFORM_H_ \ No newline at end of file diff --git a/Source/moja.modules.poco/include/moja/modules/poco/pocomongodbquerytransform.h b/Source/moja.modules.poco/include/moja/modules/poco/pocomongodbquerytransform.h index 2111bad..f38311b 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/pocomongodbquerytransform.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/pocomongodbquerytransform.h @@ -2,21 +2,23 @@ #define MOJA_MODULES_POCO_POCOMONGODBQUERYTRANSFORM_H_ #include "moja/modules/poco/_modules.poco_exports.h" -#include "moja/flint/itransform.h" + +#include + +#include #include #include -#include "moja/dynamic.h" namespace moja { namespace datarepository { - class DataRepository; +class DataRepository; } namespace flint { - class IPool; - class IVariable; - class ILandUnitController; -} +class IPool; +class IVariable; +class ILandUnitController; +} // namespace flint namespace modules { namespace poco { @@ -24,46 +26,49 @@ namespace poco { class ProviderNoSQLPocoMongoDB; class POCO_API PocoMongoDBQueryTransform : public flint::ITransform { -public: - PocoMongoDBQueryTransform(); - PocoMongoDBQueryTransform(const PocoMongoDBQueryTransform&) = delete; - - void configure(DynamicObject config, const flint::ILandUnitController& landUnitController, moja::datarepository::DataRepository& dataRepository) override; - - PocoMongoDBQueryTransform& operator=(const PocoMongoDBQueryTransform&) = delete; - void controllerChanged(const flint::ILandUnitController& controller) override; - const DynamicVar& value() const override; - -private: - enum class ResultType { - SingleColumnSingleRow, - SingleColumnMultiRow, - MultiColumnSingleRow, - MultiColumnMultiRow, - Empty - }; - - DynamicObject _config; - const flint::ILandUnitController* _landUnitController; - const std::string varMarker = "var"; - const std::string poolMarker = "pool"; - datarepository::DataRepository* _dataRepository; - std::shared_ptr _provider; - - std::string _queryStr; - std::string _filterStr; - - std::vector> _variables; - std::vector _pools; - - mutable DynamicVar _dataSet; - - static std::vector extractTokens(const std::string& tokenType, const std::string& query); - static void replaceTokens(const std::string& tokenType, std::string& query, std::vector values); - static std::string formatVariableValues(const flint::IVariable& var, DynamicVar& property); - static ResultType detectResultType(const DynamicVector&); + public: + PocoMongoDBQueryTransform(); + PocoMongoDBQueryTransform(const PocoMongoDBQueryTransform&) = delete; + + void configure(DynamicObject config, const flint::ILandUnitController& landUnitController, + moja::datarepository::DataRepository& dataRepository) override; + + PocoMongoDBQueryTransform& operator=(const PocoMongoDBQueryTransform&) = delete; + void controllerChanged(const flint::ILandUnitController& controller) override; + const DynamicVar& value() const override; + + private: + enum class ResultType { + SingleColumnSingleRow, + SingleColumnMultiRow, + MultiColumnSingleRow, + MultiColumnMultiRow, + Empty + }; + + DynamicObject _config; + const flint::ILandUnitController* _landUnitController; + const std::string varMarker = "var"; + const std::string poolMarker = "pool"; + datarepository::DataRepository* _dataRepository; + std::shared_ptr _provider; + + std::string _queryStr; + std::string _filterStr; + + std::vector> _variables; + std::vector _pools; + + mutable DynamicVar _dataSet; + + static std::vector extractTokens(const std::string& tokenType, const std::string& query); + static void replaceTokens(const std::string& tokenType, std::string& query, std::vector values); + static std::string formatVariableValues(const flint::IVariable& var, DynamicVar& property); + static ResultType detectResultType(const DynamicVector&); }; -}}} +} // namespace poco +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_POCO_POCOMONGODBQUERYTRANSFORM_H_ \ No newline at end of file +#endif // MOJA_MODULES_POCO_POCOMONGODBQUERYTRANSFORM_H_ \ No newline at end of file diff --git a/Source/moja.modules.poco/include/moja/modules/poco/pocomongoutils.h b/Source/moja.modules.poco/include/moja/modules/poco/pocomongoutils.h index 09d09ce..dbbfc0c 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/pocomongoutils.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/pocomongoutils.h @@ -2,26 +2,29 @@ #define MOJA_MODULES_POCO_MONGOUTILS_H_ #include "moja/modules/poco/_modules.poco_exports.h" -#include "moja/dynamic.h" + +#include #include -#include -#include #include +#include +#include namespace moja { namespace modules { namespace poco { -DynamicVector POCO_API ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Array::Ptr& arr); -DynamicObject POCO_API ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Document::Ptr& document); -void POCO_API ConvertPocoMongoDocumentToDynamic(DynamicVar& dynamic, Poco::MongoDB::Document::Vector& documents); -void POCO_API ConvertPocoMongoDocumentToDynamic(DynamicVector& vec, Poco::MongoDB::Document::Vector& documents); +DynamicVector POCO_API ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Array::Ptr& arr); +DynamicObject POCO_API ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Document::Ptr& document); +void POCO_API ConvertPocoMongoDocumentToDynamic(DynamicVar& dynamic, Poco::MongoDB::Document::Vector& documents); +void POCO_API ConvertPocoMongoDocumentToDynamic(DynamicVector& vec, Poco::MongoDB::Document::Vector& documents); Poco::MongoDB::Document::Ptr POCO_API parsePocoJSONToMongoDBObj(Poco::DynamicAny& data); -Poco::MongoDB::Array::Ptr POCO_API parsePocoJSONToMongoDBObj(Poco::JSON::Array::Ptr& val); +Poco::MongoDB::Array::Ptr POCO_API parsePocoJSONToMongoDBObj(Poco::JSON::Array::Ptr& val); Poco::MongoDB::Document::Ptr POCO_API parsePocoJSONToMongoDBObj(Poco::JSON::Object::Ptr& val); -}}} +} // namespace poco +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_POCO_MONGOUTILS_H_ \ No newline at end of file +#endif // MOJA_MODULES_POCO_MONGOUTILS_H_ \ No newline at end of file diff --git a/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocojson.h b/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocojson.h index 4e8d67f..402392b 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocojson.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocojson.h @@ -2,9 +2,10 @@ #define MOJA_MODULES_POCO_PROVIDERNOSQLPOCOJSON_H_ #include "moja/modules/poco/_modules.poco_exports.h" -#include "moja/datarepository/iprovidernosqlinterface.h" -#include "moja/dynamic.h" +#include + +#include #include @@ -16,29 +17,30 @@ namespace poco { // -------------------------------------------------------------------------------------------- /** -* Moja Implmentation of a NoSQL data provider (using MongoDB as a data source) derived -* from IProviderSpatialVectorInterface. -*/ + * Moja Implmentation of a NoSQL data provider (using MongoDB as a data source) derived + * from IProviderSpatialVectorInterface. + */ class POCO_API ProviderNoSQLPocoJSON : public datarepository::IProviderNoSQLInterface { + public: + explicit ProviderNoSQLPocoJSON(DynamicObject settings); + virtual ~ProviderNoSQLPocoJSON() {} -public: - explicit ProviderNoSQLPocoJSON(DynamicObject settings); - virtual ~ProviderNoSQLPocoJSON() {} - - virtual DynamicVector GetDataSet(const std::string& query) const override; + virtual DynamicVector GetDataSet(const std::string& query) const override; - virtual int Count() const override; + virtual int Count() const override; -private: - Poco::File _file; - Poco::Timestamp _lastModified; + private: + Poco::File _file; + Poco::Timestamp _lastModified; - std::string _filePath; - std::string _jsonStr; + std::string _filePath; + std::string _jsonStr; - DynamicVar _data; + DynamicVar _data; }; -}}} +} // namespace poco +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_POCO_PROVIDERNOSQLPOCOJSON_H_ +#endif // MOJA_MODULES_POCO_PROVIDERNOSQLPOCOJSON_H_ diff --git a/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocomongodb.h b/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocomongodb.h index d4902f6..24928da 100644 --- a/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocomongodb.h +++ b/Source/moja.modules.poco/include/moja/modules/poco/providernosqlpocomongodb.h @@ -2,12 +2,13 @@ #define MOJA_MODULES_POCO_PROVIDERNOSQLPOCOMONGODB_H_ #include "moja/modules/poco/_modules.poco_exports.h" -#include "moja/datarepository/iprovidernosqlinterface.h" + +#include #include -#include #include +#include #include @@ -16,40 +17,43 @@ namespace modules { namespace poco { /** -* Moja Implmentation of a NoSQL data provider (using MongoDB as a data source) derived -* from IProviderSpatialVectorInterface. -*/ + * Moja Implmentation of a NoSQL data provider (using MongoDB as a data source) derived + * from IProviderSpatialVectorInterface. + */ class POCO_API ProviderNoSQLPocoMongoDB : public datarepository::IProviderNoSQLInterface { -public: - typedef std::shared_ptr Ptr; - - ProviderNoSQLPocoMongoDB() = default; - explicit ProviderNoSQLPocoMongoDB(DynamicObject settings); - virtual ~ProviderNoSQLPocoMongoDB(); - - DynamicVector GetDataSet(const std::string& query) const override; - - int Count() const override; - - virtual DynamicVector SendCmdRequest(const std::string& command) const; - virtual int SendCountRequest() const; - virtual DynamicVector SendDeleteRequest(const std::string& query) const; - virtual DynamicVector SendInsertRequest(const std::string& query) const; - virtual DynamicVector SendQueryRequest(const std::string& query, int numberToReturn = 100) const; - virtual DynamicVector SendQueryRequest(const std::string& query, const std::string& fields, int numberToReturn = 100) const; - virtual DynamicVector SendUpdateRequest(const std::string& query) const; - -private: - std::string _host; - int _port; - std::string _collection; - std::string _database; - - mutable bool _connected; - mutable Poco::MongoDB::Connection _connection; - mutable Poco::LRUCache _cache; + public: + typedef std::shared_ptr Ptr; + + ProviderNoSQLPocoMongoDB() = default; + explicit ProviderNoSQLPocoMongoDB(DynamicObject settings); + virtual ~ProviderNoSQLPocoMongoDB(); + + DynamicVector GetDataSet(const std::string& query) const override; + + int Count() const override; + + virtual DynamicVector SendCmdRequest(const std::string& command) const; + virtual int SendCountRequest() const; + virtual DynamicVector SendDeleteRequest(const std::string& query) const; + virtual DynamicVector SendInsertRequest(const std::string& query) const; + virtual DynamicVector SendQueryRequest(const std::string& query, int numberToReturn = 100) const; + virtual DynamicVector SendQueryRequest(const std::string& query, const std::string& fields, + int numberToReturn = 100) const; + virtual DynamicVector SendUpdateRequest(const std::string& query) const; + + private: + std::string _host; + int _port; + std::string _collection; + std::string _database; + + mutable bool _connected; + mutable Poco::MongoDB::Connection _connection; + mutable Poco::LRUCache _cache; }; -}}} +} // namespace poco +} // namespace modules +} // namespace moja -#endif // MOJA_MODULES_POCO_PROVIDERNOSQLPOCOMONGODB_H_ +#endif // MOJA_MODULES_POCO_PROVIDERNOSQLPOCOMONGODB_H_ diff --git a/Source/moja.modules.poco/src/experimental/providerrelationalpocopostgresql.cpp b/Source/moja.modules.poco/src/experimental/providerrelationalpocopostgresql.cpp deleted file mode 100644 index 5e8c45b..0000000 --- a/Source/moja.modules.poco/src/experimental/providerrelationalpocopostgresql.cpp +++ /dev/null @@ -1,303 +0,0 @@ -#include "moja/modules/poco/providerrelationalpocopostgresql.h" -#include "moja/datarepository/datarepositoryexceptions.h" -#include "Poco/Data/PostgreSQL/Connector.h" -#include -#include - -namespace moja { -namespace datarepository { - -static void noticeProcessor(void *arg, const char *message) { - //MOJA_LOG_INFO << message; -} - -static void noticeReceiver(void *arg, const PGresult *res) { - //MOJA_LOG_INFO << message; -} - -#if 0 -const char* COL_TEXT = "TEXT"; -const char* COL_VARCHAR = "VARCHAR"; -const char* COL_INTEGER = "INTEGER"; -const char* COL_FLOAT = "FLOAT"; - -/* pg_type.h has more codes for DataColumnTypes kindly refer the codes for more data types*/ -enum class DataColumnType { - BOOLEAN = 16, - CHARACTER = 18, - NAME = 19, - INTEGER = 23, - STRING = 25, - JSONOID = 114, - FLOAT = 700, //FLOAT4 in db -- single precision floating point - DOUBLE = 701, // FLOAT8 in db -- double precision floating point - MONEY = 790, - VARCHAR2 = 1042, - VARCHAROID = 1043, - REGPROCOID = 24, - OIDOID = 26, - INT2OID = 21, - INT8OID = 20, //BigInt - POINTOID = 600, // geometric point - LSEGOID = 601, // geometric line segment - BOXOID = 603, // geometric box - POLYGONOID = 604, // geometric polygon - LINEOID = 628 //geometric line -}; -#endif - -enum class ColumnType { - BOOLEAN, - CHARACTER, - NAME, - INTEGER, - STRING, - JSONOID, - FLOAT, - DOUBLE, - MONEY, - VARCHAR, - VARCHAR2, - OIDOID, - REGPROCOID, - INT2OID, - INT8OID -}; - - -ProviderRelationalPocoPostgreSQL::ProviderRelationalPocoPostgreSQL(DynamicObject settings) : - _cache(10000) { - - _dbConnString = settings["connectionstring"].convert(); - Poco::Data::PostgreSQL::Connector::registerConnector(); - _conn = PQconnectdb(_dbConnString.c_str()); - - /* Check to see that the backend connection was successfully made */ - if (PQstatus(_conn) != CONNECTION_OK) { - std::string msg = (boost::format("Database connection failed - '%1%' with connection string '%2%") % PQerrorMessage(_conn) % _dbConnString).str(); - MOJA_LOG_ERROR << "ProviderRelationalPocoPostgreSQL: " << msg; - BOOST_THROW_EXCEPTION(ConnectionFailedException() << ConnectionError(msg)); - } - PQsetNoticeReceiver(_conn, noticeReceiver, nullptr); - PQsetNoticeProcessor(_conn, noticeProcessor, nullptr); -} - -ProviderRelationalPocoPostgreSQL::~ProviderRelationalPocoPostgreSQL() { - PQfinish(_conn); - Poco::Data::PostgreSQL::Connector::unregisterConnector(); -} - -Dynamic ProviderRelationalPocoPostgreSQL::GetDataSet(const std::string& query) const { - auto cachedValue = _cache.get(query); - if (!cachedValue.isNull()) { - return *cachedValue; - } - - std::vector result; - - /// - results from this query come back as text, hence the Poco::tryParseXXX methods below - /// We can get results as binary, currently text works but Int, doubles, floats are all in network byte order - /// which we need to convert to local byte order. - /// (http://www.postgresql.org/docs/current/static/libpq-example.html) - pResult = PQexec(_conn, query.c_str()); - - //// - this query call gets results in binary format - - //const char *paramValues[1]; - //paramValues[0] = "joe's place"; - //pResult = PQexecParams( - // _conn, - // query.c_str(), - // 0, /* one param */ - // NULL, /* let the backend deduce param type */ - // paramValues, - // NULL, /* don't need param lengths since text */ - // NULL, /* default to all text params */ - // 1); /* ask for binary results */ - - int resultcode = PQresultStatus(pResult); - - switch (resultcode) { - case PGRES_EMPTY_QUERY: - //std::cout << "EMPTY QUERY" << std::endl; - break; - case PGRES_COMMAND_OK: - //std::cout << "QUERY SUCCESS AND NO DATA FOUND" << std::endl; - break; - case PGRES_TUPLES_OK: { - //std::cout << "Number of tuples returned :::: " << PQntuples(pResult) << std::endl; - - int nRows = PQntuples(pResult); // # of rows - int nCols = PQnfields(pResult); // # of cols - std::vector columnTypes(nCols); - std::vector columnNames(nCols); - - if (PQntuples(pResult) != 0) { - for (int j = 0; j < nCols; j++) { - std::string name = PQfname(pResult, j); - columnNames[j] = name; - - int columnType = PQftype(pResult, j); - if (columnType != 16 - && columnType != 18 - && columnType != 19 - && columnType != 20 - && columnType != 21 - && columnType != 23 - && columnType != 24 - && columnType != 25 - && columnType != 26 - && columnType != 114 - && columnType != 700 - && columnType != 701 - && columnType != 790 - && columnType != 1042 - && columnType != 1043) { - auto columnTypeInt = PQfformat(pResult, j); - if (columnTypeInt == 0) { //handling only text data - columnType = 25; - } - } - - switch (columnType) { - case 16: columnTypes[j] = ColumnType::BOOLEAN; break; - case 18: columnTypes[j] = ColumnType::CHARACTER; break; - case 19: columnTypes[j] = ColumnType::NAME; break; - case 20: columnTypes[j] = ColumnType::INT8OID; break; - case 21: columnTypes[j] = ColumnType::INT2OID; break; - case 23: columnTypes[j] = ColumnType::INTEGER; break; - case 24: columnTypes[j] = ColumnType::REGPROCOID; break; - case 25: columnTypes[j] = ColumnType::STRING; break; - case 26: columnTypes[j] = ColumnType::OIDOID; break; - case 114: columnTypes[j] = ColumnType::JSONOID; break; - case 700: columnTypes[j] = ColumnType::FLOAT; break; - case 701: columnTypes[j] = ColumnType::DOUBLE; break; - case 790: columnTypes[j] = ColumnType::MONEY; break; - case 1042: columnTypes[j] = ColumnType::VARCHAR2; break; - case 1043: columnTypes[j] = ColumnType::VARCHAR; break; - default: - PQclear(pResult); - throw std::runtime_error("Unsupported column type"); - } - } // end for loop j - - for (int i = 0; i < nRows; i++) { - DynamicObject row; - for (int k = 0; k < nCols; k++) { - if (PQgetisnull(pResult, i, k)) { - //std::cout << "value is null" << std::endl; - row.insert(columnNames[k], Dynamic()); - } else { - //std::cout << "Fetch Data" << std::endl; - auto rawValue = PQgetvalue(pResult, i, k); - - switch (columnTypes[k]) { - case ColumnType::BOOLEAN: { - bool value; - if ('t' == *rawValue) { - value = true; - } else { - value = false; - } - row.insert(columnNames[k], value); - break; - } - case ColumnType::CHARACTER: row.insert(columnNames[k], rawValue); break; - case ColumnType::NAME: row.insert(columnNames[k], rawValue); break; - case ColumnType::INTEGER: { - Poco::Int32 data; - if (!Poco::NumberParser::tryParse(rawValue, data)) - row.insert(columnNames[k], Dynamic()); - else { - int value = data; - row.insert(columnNames[k], value); - } - break; - } - case ColumnType::JSONOID: row.insert(columnNames[k], rawValue); break; - case ColumnType::FLOAT: { - double data; - if (!Poco::NumberParser::tryParseFloat(rawValue, data)) - row.insert(columnNames[k], Dynamic()); - else { - float value = data; - row.insert(columnNames[k], value); - } - break; - } - case ColumnType::DOUBLE: { - auto x = static_cast(rawValue); - - double value; - if (!Poco::NumberParser::tryParseFloat(rawValue, value)) - row.insert(columnNames[k], Dynamic()); - else { - row.insert(columnNames[k], value); - } - break; - } - case ColumnType::MONEY: row.insert(columnNames[k], rawValue); break; - case ColumnType::VARCHAR2: row.insert(columnNames[k], rawValue); break; - case ColumnType::STRING: - case ColumnType::VARCHAR: { - auto value = rawValue == nullptr ? Dynamic() : Dynamic(std::string(static_cast(rawValue))); - row.insert(columnNames[k], value); - break; - } - case ColumnType::OIDOID: row.insert(columnNames[k], rawValue); break; - case ColumnType::REGPROCOID: row.insert(columnNames[k], rawValue); break; - case ColumnType::INT2OID: row.insert(columnNames[k], rawValue); break; - case ColumnType::INT8OID: { - Poco::Int64 data; - if (!Poco::NumberParser::tryParse64(rawValue, data)) - row.insert(columnNames[k], Dynamic()); - else { - Int64 value = data; - row.insert(columnNames[k], value); - } - break; - } - default: - throw std::runtime_error("Unsupported column type"); - } - } - } // end for loop k -- cols - result.push_back(row); - } // end for loop i -- rows - - } - break; - } - case PGRES_COPY_OUT: - break; - case PGRES_COPY_IN: - break; - case PGRES_BAD_RESPONSE: - break; - case PGRES_NONFATAL_ERROR: - break; - case PGRES_FATAL_ERROR: { - std::string msg = (boost::format("PostgreSQL fatal error - '%1%' with connection string '%2%") % PQerrorMessage(_conn) % _dbConnString).str(); - MOJA_LOG_ERROR << "ProviderRelationalPocoPostgreSQL: " << msg; - BOOST_THROW_EXCEPTION(QueryException() << SQL(query) << SQLError(msg)); - break; - } - case PGRES_COPY_BOTH: - break; - case PGRES_SINGLE_TUPLE: - MOJA_LOG_ERROR << "ProviderRelationalPocoPostgreSQL: " << "Not sure what to do with PGRES_SINGLE_TUPLE"; - //std::cout << PQgetvalue(pResult, 0, 0) << std::endl; - break; - default: - break; - } // end switch resultcode - - PQclear(pResult); - - Dynamic dataset{ result }; - _cache.add(query, dataset); - return dataset; - -} -} -} // moja::datarepository diff --git a/Source/moja.modules.poco/src/libraryfactory.cpp b/Source/moja.modules.poco/src/libraryfactory.cpp index 13af58e..5b8bbe7 100644 --- a/Source/moja.modules.poco/src/libraryfactory.cpp +++ b/Source/moja.modules.poco/src/libraryfactory.cpp @@ -5,8 +5,8 @@ // Transforms -#include "moja/modules/poco/pocomongodbquerytransform.h" #include "moja/modules/poco/nosqlcollectiontransform.h" +#include "moja/modules/poco/pocomongodbquerytransform.h" // Flint Data @@ -17,14 +17,14 @@ // Other +using moja::flint::DataRepositoryProviderRegistration; +using moja::flint::FlintDataFactoryRegistration; +using moja::flint::FlintDataRegistration; +using moja::flint::IFlintData; using moja::flint::IModule; using moja::flint::ITransform; -using moja::flint::IFlintData; using moja::flint::ModuleRegistration; using moja::flint::TransformRegistration; -using moja::flint::FlintDataRegistration; -using moja::flint::FlintDataFactoryRegistration; -using moja::flint::DataRepositoryProviderRegistration; namespace moja { namespace modules { @@ -32,37 +32,50 @@ namespace poco { extern "C" { - MOJA_LIB_API int getModuleRegistrations(ModuleRegistration* outModuleRegistrations) { - int index = 0; - return index; - } - - MOJA_LIB_API int getTransformRegistrations(TransformRegistration* outTransformRegistrations) { - int index = 0; - //outTransformRegistrations[index++] = ModuleRegistration{ "WriteSpatialAWSS3", []() -> flint::IModule* { return new base::WriteVariableGrid(std::make_shared()); } }; - - outTransformRegistrations[index++] = TransformRegistration{ "PocoMongoDBQueryTransform", []() -> flint::ITransform* { return new PocoMongoDBQueryTransform(); } }; - outTransformRegistrations[index++] = TransformRegistration{ "NoSQLCollectionTransform", []() -> flint::ITransform* { return new NoSQLCollectionTransform(); } }; - return index; - } - - MOJA_LIB_API int getFlintDataRegistrations(FlintDataRegistration* outFlintDataRegistrations) { - auto index = 0; - return index; - } - - MOJA_LIB_API int getFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations) { - auto index = 0; - return index; - } - - MOJA_LIB_API int getDataRepositoryProviderRegistrations(moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { - auto index = 0; - outDataRepositoryProviderRegistration[index++] = DataRepositoryProviderRegistration{ "PocoJSON", static_cast(datarepository::ProviderTypes::Relational), [](const DynamicObject& settings) ->std::shared_ptr { return std::make_shared(settings); } }; - outDataRepositoryProviderRegistration[index++] = DataRepositoryProviderRegistration{ "PocoMongoDB", static_cast(datarepository::ProviderTypes::Relational), [](const DynamicObject& settings) ->std::shared_ptr { return std::make_shared(settings); } }; - return index; - } +MOJA_LIB_API int getModuleRegistrations(ModuleRegistration* outModuleRegistrations) { + int index = 0; + return index; +} + +MOJA_LIB_API int getTransformRegistrations(TransformRegistration* outTransformRegistrations) { + int index = 0; + // outTransformRegistrations[index++] = ModuleRegistration{ "WriteSpatialAWSS3", []() -> flint::IModule* { return + // new base::WriteVariableGrid(std::make_shared()); } }; + outTransformRegistrations[index++] = TransformRegistration{ + "PocoMongoDBQueryTransform", []() -> flint::ITransform* { return new PocoMongoDBQueryTransform(); }}; + outTransformRegistrations[index++] = TransformRegistration{ + "NoSQLCollectionTransform", []() -> flint::ITransform* { return new NoSQLCollectionTransform(); }}; + return index; +} + +MOJA_LIB_API int getFlintDataRegistrations(FlintDataRegistration* outFlintDataRegistrations) { + auto index = 0; + return index; +} + +MOJA_LIB_API int getFlintDataFactoryRegistrations(FlintDataFactoryRegistration* outFlintDataFactoryRegistrations) { + auto index = 0; + return index; +} + +MOJA_LIB_API int getDataRepositoryProviderRegistrations( + moja::flint::DataRepositoryProviderRegistration* outDataRepositoryProviderRegistration) { + auto index = 0; + outDataRepositoryProviderRegistration[index++] = DataRepositoryProviderRegistration{ + "PocoJSON", static_cast(datarepository::ProviderTypes::Relational), + [](const DynamicObject& settings) -> std::shared_ptr { + return std::make_shared(settings); + }}; + outDataRepositoryProviderRegistration[index++] = DataRepositoryProviderRegistration{ + "PocoMongoDB", static_cast(datarepository::ProviderTypes::Relational), + [](const DynamicObject& settings) -> std::shared_ptr { + return std::make_shared(settings); + }}; + return index; +} } -}}} +} // namespace poco +} // namespace modules +} // namespace moja diff --git a/Source/moja.modules.poco/src/nosqlcollectiontransform.cpp b/Source/moja.modules.poco/src/nosqlcollectiontransform.cpp index 2364096..3e0fb8b 100644 --- a/Source/moja.modules.poco/src/nosqlcollectiontransform.cpp +++ b/Source/moja.modules.poco/src/nosqlcollectiontransform.cpp @@ -1,11 +1,11 @@ #include "moja/modules/poco/nosqlcollectiontransform.h" -#include "moja/flint/flintexceptions.h" -#include "moja/flint/ivariable.h" -#include "moja/flint/ilandunitcontroller.h" - #include "moja/modules/poco/providernosqlpocomongodb.h" +#include +#include +#include + #include using moja::datarepository::IProviderNoSQLInterface; @@ -14,38 +14,42 @@ namespace moja { namespace modules { namespace poco { -NoSQLCollectionTransform::NoSQLCollectionTransform() : _landUnitController(nullptr), _dataRepository(nullptr), _keyVariable(nullptr) {} - -void NoSQLCollectionTransform::configure(DynamicObject config, const flint::ILandUnitController& landUnitController, moja::datarepository::DataRepository& dataRepository) { - _config = config; - _keyName = config["key_name"].convert(); - _keyValueVariable = config["key_value_name"].convert(); - _landUnitController = &landUnitController; - _dataRepository = &dataRepository; - - _keyVariable = _landUnitController->getVariable(_keyValueVariable); - - auto mongodbSettings = _config["mogo_settings"].extract(); - _provider = std::make_shared(mongodbSettings); - auto arr = _provider->GetDataSet("{}"); - for (auto& item : arr) { - int keyValue = item[_keyName]; - _collection[keyValue] = item; - } - _dataSet = DynamicObject(); +NoSQLCollectionTransform::NoSQLCollectionTransform() + : _landUnitController(nullptr), _dataRepository(nullptr), _keyVariable(nullptr) {} + +void NoSQLCollectionTransform::configure(DynamicObject config, const flint::ILandUnitController& landUnitController, + moja::datarepository::DataRepository& dataRepository) { + _config = config; + _keyName = config["key_name"].convert(); + _keyValueVariable = config["key_value_name"].convert(); + _landUnitController = &landUnitController; + _dataRepository = &dataRepository; + + _keyVariable = _landUnitController->getVariable(_keyValueVariable); + + auto mongodbSettings = _config["mogo_settings"].extract(); + _provider = std::make_shared(mongodbSettings); + auto arr = _provider->GetDataSet("{}"); + for (auto& item : arr) { + int keyValue = item[_keyName]; + _collection[keyValue] = item; + } + _dataSet = DynamicObject(); } void NoSQLCollectionTransform::controllerChanged(const flint::ILandUnitController& controller) { - configure(_config, controller, *_dataRepository); + configure(_config, controller, *_dataRepository); }; const DynamicVar& NoSQLCollectionTransform::value() const { - int lookupVal = _keyVariable->value(); - auto it = _collection.find(lookupVal); - if (it != _collection.end()) { - return it->second; - } - return _dataSet; + int lookupVal = _keyVariable->value(); + auto it = _collection.find(lookupVal); + if (it != _collection.end()) { + return it->second; + } + return _dataSet; } -}}} +} // namespace poco +} // namespace modules +} // namespace moja diff --git a/Source/moja.modules.poco/src/pocomongodbquerytransform.cpp b/Source/moja.modules.poco/src/pocomongodbquerytransform.cpp index 50162ee..1a705b6 100644 --- a/Source/moja.modules.poco/src/pocomongodbquerytransform.cpp +++ b/Source/moja.modules.poco/src/pocomongodbquerytransform.cpp @@ -1,15 +1,16 @@ #include "moja/modules/poco/pocomongodbquerytransform.h" -#include "moja/flint/flintexceptions.h" -#include "moja/flint/ioperationmanager.h" -#include "moja/flint/ilandunitcontroller.h" -#include "moja/flint/ipool.h" -#include "moja/flint/ivariable.h" - -#include "moja/datarepository/datarepository.h" #include "moja/modules/poco/providernosqlpocomongodb.h" -#include "moja/dynamic.h" +#include +#include +#include +#include +#include + +#include + +#include #include @@ -21,81 +22,81 @@ namespace poco { PocoMongoDBQueryTransform::PocoMongoDBQueryTransform() : _landUnitController(nullptr), _dataRepository(nullptr) {} -void PocoMongoDBQueryTransform::configure(DynamicObject config, const flint::ILandUnitController& landUnitController, moja::datarepository::DataRepository& dataRepository) { - _config = config; - _queryStr = config["queryString"].convert(); - _filterStr = config["filterString"].convert(); - _landUnitController = &landUnitController; - _dataRepository = &dataRepository; - - const std::string providerName = config["provider"]; - _provider = std::static_pointer_cast(_dataRepository->getProvider(providerName)); - - // Search string for pool names. - _pools.clear(); - auto poolNames = extractTokens(poolMarker, _queryStr); - for (auto name : poolNames) { - _pools.push_back(landUnitController.operationManager()->getPool(name)); - } - - // Search string for variable names. - _variables.clear(); - auto varNames = extractTokens(varMarker, _queryStr); - for (auto var : varNames) { - auto propertySeparator = var.find_first_of('.'); - if (propertySeparator != std::string::npos) { - // Reference to a multi-property variable. - auto prop = var.substr(propertySeparator + 1, var.length()); - var = var.substr(0, propertySeparator); - auto val = landUnitController.getVariable(var); - _variables.push_back(std::make_tuple(val, prop)); - } - else { - // Reference to a single-value variable. - auto val = landUnitController.getVariable(var); - _variables.push_back(std::make_tuple(val, DynamicVar())); - } - } +void PocoMongoDBQueryTransform::configure(DynamicObject config, const flint::ILandUnitController& landUnitController, + moja::datarepository::DataRepository& dataRepository) { + _config = config; + _queryStr = config["queryString"].convert(); + _filterStr = config["filterString"].convert(); + _landUnitController = &landUnitController; + _dataRepository = &dataRepository; + + const std::string providerName = config["provider"]; + _provider = std::static_pointer_cast(_dataRepository->getProvider(providerName)); + + // Search string for pool names. + _pools.clear(); + auto poolNames = extractTokens(poolMarker, _queryStr); + for (auto name : poolNames) { + _pools.push_back(landUnitController.operationManager()->getPool(name)); + } + + // Search string for variable names. + _variables.clear(); + auto varNames = extractTokens(varMarker, _queryStr); + for (auto var : varNames) { + auto propertySeparator = var.find_first_of('.'); + if (propertySeparator != std::string::npos) { + // Reference to a multi-property variable. + auto prop = var.substr(propertySeparator + 1, var.length()); + var = var.substr(0, propertySeparator); + auto val = landUnitController.getVariable(var); + _variables.push_back(std::make_tuple(val, prop)); + } else { + // Reference to a single-value variable. + auto val = landUnitController.getVariable(var); + _variables.push_back(std::make_tuple(val, DynamicVar())); + } + } } void PocoMongoDBQueryTransform::controllerChanged(const flint::ILandUnitController& controller) { - configure(_config, controller, *_dataRepository); + configure(_config, controller, *_dataRepository); }; const DynamicVar& PocoMongoDBQueryTransform::value() const { - // Build string here from current variable and pool values. - std::vector _variablesValues; - for (auto v : _variables) { - const flint::IVariable* var; - DynamicVar property; - std::tie(var, property) = v; - auto values = formatVariableValues(*var, property); - _variablesValues.push_back(values); - } - - std::vector _poolValues; - for (auto p : _pools) { - std::stringstream ss; - ss << std::setprecision(15) << p->value(); - _poolValues.push_back(ss.str()); - } - - auto query(_queryStr); - replaceTokens(varMarker, query, _variablesValues); - replaceTokens(poolMarker, query, _poolValues); - - auto itemCount = _provider->Count(); - auto result = _provider->SendQueryRequest(query, _filterStr, itemCount); - switch (detectResultType(result)) { - case PocoMongoDBQueryTransform::ResultType::MultiColumnMultiRow: { - _dataSet = result; - break; - } - case PocoMongoDBQueryTransform::ResultType::MultiColumnSingleRow: { - _dataSet = result[0]; - break; - } -#if 0 // TODO: Fix this + // Build string here from current variable and pool values. + std::vector _variablesValues; + for (auto v : _variables) { + const flint::IVariable* var; + DynamicVar property; + std::tie(var, property) = v; + auto values = formatVariableValues(*var, property); + _variablesValues.push_back(values); + } + + std::vector _poolValues; + for (auto p : _pools) { + std::stringstream ss; + ss << std::setprecision(15) << p->value(); + _poolValues.push_back(ss.str()); + } + + auto query(_queryStr); + replaceTokens(varMarker, query, _variablesValues); + replaceTokens(poolMarker, query, _poolValues); + + auto itemCount = _provider->Count(); + auto result = _provider->SendQueryRequest(query, _filterStr, itemCount); + switch (detectResultType(result)) { + case PocoMongoDBQueryTransform::ResultType::MultiColumnMultiRow: { + _dataSet = result; + break; + } + case PocoMongoDBQueryTransform::ResultType::MultiColumnSingleRow: { + _dataSet = result[0]; + break; + } +#if 0 // TODO: Fix this case PocoMongoDBQueryTransform::ResultType::SingleColumnMultiRow: { auto results = result.extract(); std::vector dataSet; @@ -111,119 +112,115 @@ const DynamicVar& PocoMongoDBQueryTransform::value() const { break; } #endif - default: - case PocoMongoDBQueryTransform::ResultType::Empty: { - _dataSet = DynamicVar(); - break; - } - } - - return _dataSet; + default: + case PocoMongoDBQueryTransform::ResultType::Empty: { + _dataSet = DynamicVar(); + break; + } + } + + return _dataSet; } PocoMongoDBQueryTransform::ResultType PocoMongoDBQueryTransform::detectResultType(const DynamicVector& results) { - - if (results.size() == 0) { - return PocoMongoDBQueryTransform::ResultType::Empty; - } - - auto numRows = results.size(); - auto numCols = results[0].size(); - - if (numCols > 1) { - return numRows > 1 ? PocoMongoDBQueryTransform::ResultType::MultiColumnMultiRow - : PocoMongoDBQueryTransform::ResultType::MultiColumnSingleRow; - } - return numRows > 1 ? PocoMongoDBQueryTransform::ResultType::SingleColumnMultiRow - : PocoMongoDBQueryTransform::ResultType::SingleColumnSingleRow; + if (results.size() == 0) { + return PocoMongoDBQueryTransform::ResultType::Empty; + } + + auto numRows = results.size(); + auto numCols = results[0].size(); + + if (numCols > 1) { + return numRows > 1 ? PocoMongoDBQueryTransform::ResultType::MultiColumnMultiRow + : PocoMongoDBQueryTransform::ResultType::MultiColumnSingleRow; + } + return numRows > 1 ? PocoMongoDBQueryTransform::ResultType::SingleColumnMultiRow + : PocoMongoDBQueryTransform::ResultType::SingleColumnSingleRow; } std::string PocoMongoDBQueryTransform::formatVariableValues(const flint::IVariable& var, DynamicVar& property) { - - auto& varValue = var.value(); - std::vector values; - if (property.isEmpty()) { - // Single-value variable reference. - if (varValue.isVector()) { - auto varVector = varValue.extract>(); - for (auto value : varVector) { - values.push_back(value); - } - } - else { - values.push_back(varValue); - } - } - else { - // Multi-value variable reference. - std::string propertyName = property; - if (varValue.isVector()) { - auto varVector = varValue.extract>(); - for (auto value : varVector) { - values.push_back(value[propertyName]); - } - } - else { - if (varValue.isStruct()) { - values.push_back(varValue[propertyName]); - } - } - } - - std::vector strings; - for (auto value : values) { - auto str = value.convert(); - if (value.isString()) { - // Enclose string values in SQL single quotes. - str = "\"" + str + "\""; - } - strings.push_back(str); - } - return boost::algorithm::join(strings, ", "); + auto& varValue = var.value(); + std::vector values; + if (property.isEmpty()) { + // Single-value variable reference. + if (varValue.isVector()) { + auto varVector = varValue.extract>(); + for (auto value : varVector) { + values.push_back(value); + } + } else { + values.push_back(varValue); + } + } else { + // Multi-value variable reference. + std::string propertyName = property; + if (varValue.isVector()) { + auto varVector = varValue.extract>(); + for (auto value : varVector) { + values.push_back(value[propertyName]); + } + } else { + if (varValue.isStruct()) { + values.push_back(varValue[propertyName]); + } + } + } + + std::vector strings; + for (auto value : values) { + auto str = value.convert(); + if (value.isString()) { + // Enclose string values in SQL single quotes. + str = "\"" + str + "\""; + } + strings.push_back(str); + } + return boost::algorithm::join(strings, ", "); } // Searching for tokens of the form "{:name[.property]}", for example "{pool:totalCM}" // or "{var:Fractions.clay}" and return token names. -std::vector PocoMongoDBQueryTransform::extractTokens(const std::string& tokenType, const std::string& query) { - - std::string tokenStart = "{" + tokenType + ":"; - char tokenEnd = '}'; - auto tokenStartLen = tokenStart.length(); - auto tokenStartPos = query.find(tokenStart); // look for a token start - - std::vector tokens; - while (tokenStartPos != std::string::npos) { - auto tokenValueStartPos = tokenStartPos + tokenStartLen; - auto tokenEndPos = query.find(tokenEnd, tokenStartPos + 1); // look for the end of the token - if (tokenEndPos != std::string::npos && tokenEndPos > tokenValueStartPos) { - auto tokenValueLen = tokenEndPos - tokenValueStartPos; - tokens.push_back(query.substr(tokenValueStartPos, tokenValueLen)); - tokenStartPos = query.find(tokenStart, tokenEndPos + 1); - } - else { // didn't find an end token marker so ignore this token - tokenStartPos = query.find(tokenStart, tokenStartPos + 1); - } - } - - return tokens; +std::vector PocoMongoDBQueryTransform::extractTokens(const std::string& tokenType, + const std::string& query) { + std::string tokenStart = "{" + tokenType + ":"; + char tokenEnd = '}'; + auto tokenStartLen = tokenStart.length(); + auto tokenStartPos = query.find(tokenStart); // look for a token start + + std::vector tokens; + while (tokenStartPos != std::string::npos) { + auto tokenValueStartPos = tokenStartPos + tokenStartLen; + auto tokenEndPos = query.find(tokenEnd, tokenStartPos + 1); // look for the end of the token + if (tokenEndPos != std::string::npos && tokenEndPos > tokenValueStartPos) { + auto tokenValueLen = tokenEndPos - tokenValueStartPos; + tokens.push_back(query.substr(tokenValueStartPos, tokenValueLen)); + tokenStartPos = query.find(tokenStart, tokenEndPos + 1); + } else { // didn't find an end token marker so ignore this token + tokenStartPos = query.find(tokenStart, tokenStartPos + 1); + } + } + + return tokens; } -void PocoMongoDBQueryTransform::replaceTokens(const std::string& tokenType, std::string& query, std::vector values) { - std::vector tokens; - std::string tokenStart = "{" + tokenType + ":"; - auto tokenEnd = '}'; - - auto tokenPos = query.find(tokenStart); // look for a token start - for (auto value : values) { - auto tokenEndPos = query.find(tokenEnd, tokenPos + 1); // look for the end of the token - if (tokenEndPos != std::string::npos) { - query.replace(tokenPos, tokenEndPos - tokenPos + 1, value); - tokenPos = query.find(tokenStart, tokenPos + value.length()); - } - else { // didn't find an end token marker so ignore this token - tokenPos = query.find(tokenStart, tokenPos + 1); - } - } +void PocoMongoDBQueryTransform::replaceTokens(const std::string& tokenType, std::string& query, + std::vector values) { + std::vector tokens; + std::string tokenStart = "{" + tokenType + ":"; + auto tokenEnd = '}'; + + auto tokenPos = query.find(tokenStart); // look for a token start + for (auto value : values) { + auto tokenEndPos = query.find(tokenEnd, tokenPos + 1); // look for the end of the token + if (tokenEndPos != std::string::npos) { + query.replace(tokenPos, tokenEndPos - tokenPos + 1, value); + tokenPos = query.find(tokenStart, tokenPos + value.length()); + } else { // didn't find an end token marker so ignore this token + tokenPos = query.find(tokenStart, tokenPos + 1); + } + } } -}}} +} // namespace poco +} // namespace modules +} // namespace moja diff --git a/Source/moja.modules.poco/src/pocomongoutils.cpp b/Source/moja.modules.poco/src/pocomongoutils.cpp index acda80e..7580c4e 100644 --- a/Source/moja.modules.poco/src/pocomongoutils.cpp +++ b/Source/moja.modules.poco/src/pocomongoutils.cpp @@ -1,20 +1,22 @@ #include "moja/modules/poco/pocomongoutils.h" -#include "moja/datarepository/datarepositoryexceptions.h" -#include "moja/dynamic.h" -#include +#include + +#include + +#include #include +#include #include -#include #include +using moja::datarepository::ConnectionFailedException; +using moja::datarepository::FileName; using moja::datarepository::FileNotFoundException; using moja::datarepository::NotImplementedException; -using moja::datarepository::FileName; using moja::datarepository::QueryException; using moja::datarepository::SQL; -using moja::datarepository::ConnectionFailedException; namespace moja { namespace modules { @@ -23,271 +25,258 @@ namespace poco { // -------------------------------------------------------------------------------------------- DynamicVector ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Array::Ptr& arr) { - DynamicVector vec; - auto count = arr->size(); - for (int i = 0; i < count; i++) { - if (arr->isType(i)) { - auto x = arr->get(i); - vec.push_back(ConvertPocoMongoDocumentToDynamic(x)); - } - else if (arr->isType(i)) { - auto a = arr->get(i); - vec.push_back(ConvertPocoMongoDocumentToDynamic(a)); - } - else if (arr->isType(i)) { - vec.push_back(arr->get(i)); - } - else if (arr->isType(i)) { - vec.push_back(arr->get(i)); - } - else if (arr->isType(i)) { - vec.push_back(arr->get(i)); - } - else if (arr->isType(i)) { - vec.push_back(arr->get(i)); - } - else if (arr->isType(i)) { - vec.push_back(arr->get(i)->toString()); - } - } - return vec; + DynamicVector vec; + auto count = arr->size(); + for (int i = 0; i < count; i++) { + if (arr->isType(i)) { + auto x = arr->get(i); + vec.push_back(ConvertPocoMongoDocumentToDynamic(x)); + } else if (arr->isType(i)) { + auto a = arr->get(i); + vec.push_back(ConvertPocoMongoDocumentToDynamic(a)); + } else if (arr->isType(i)) { + vec.push_back(arr->get(i)); + } else if (arr->isType(i)) { + vec.push_back(arr->get(i)); + } else if (arr->isType(i)) { + vec.push_back(arr->get(i)); + } else if (arr->isType(i)) { + vec.push_back(arr->get(i)); + } else if (arr->isType(i)) { + vec.push_back(arr->get(i)->toString()); + } + } + return vec; } DynamicObject ConvertPocoMongoDocumentToDynamic(Poco::MongoDB::Document::Ptr& document) { - DynamicObject result; + DynamicObject result; - std::vector elementNames; - document->elementNames(elementNames); + std::vector elementNames; + document->elementNames(elementNames); - for (auto& elementName : elementNames) { + for (auto& elementName : elementNames) { + if (document->isType(elementName)) { + auto subDocument = document->get(elementName); + result[elementName] = ConvertPocoMongoDocumentToDynamic(subDocument); + } else if (document->isType(elementName)) { + auto arr = document->get(elementName); + result[elementName] = ConvertPocoMongoDocumentToDynamic(arr); + } else if (document->isType(elementName)) { + auto objId = document->get(elementName); + result[elementName] = objId->toString(); + } else if (document->isType(elementName)) { + result[elementName] = document->get(elementName); + } else if (document->isType(elementName)) { + result[elementName] = document->get(elementName); + } else if (document->isType(elementName)) { + result[elementName] = document->get(elementName); + } else if (document->isType(elementName)) { + result[elementName] = document->get(elementName); + } + } - if (document->isType(elementName)) { - auto subDocument = document->get(elementName); - result[elementName] = ConvertPocoMongoDocumentToDynamic(subDocument); - } - else if (document->isType(elementName)) { - auto arr = document->get(elementName); - result[elementName] = ConvertPocoMongoDocumentToDynamic(arr); - } - else if (document->isType(elementName)) { - auto objId = document->get(elementName); - result[elementName] = objId->toString(); - } - else if (document->isType(elementName)) { - result[elementName] = document->get(elementName); - } - else if (document->isType(elementName)) { - result[elementName] = document->get(elementName); - } - else if (document->isType(elementName)) { - result[elementName] = document->get(elementName); - } - else if (document->isType(elementName)) { - result[elementName] = document->get(elementName); - } - } - - return result; + return result; } void ConvertPocoMongoDocumentToDynamic(DynamicVar& dynamic, Poco::MongoDB::Document::Vector& documents) { - - // TODO: look into it - // Not sure on this so will always return a Vector of documents - //if (documents.size() == 1) { - // dynamic = ConvertPocoMongoDocumentToDynamic(documents[0]); - //} - if (documents.size() >= 1) { - DynamicVector vec; - for (auto& document : documents) { - vec.push_back(ConvertPocoMongoDocumentToDynamic(document)); - } - dynamic = vec; - } - else { - dynamic = DynamicObject(); - } + // TODO: look into it + // Not sure on this so will always return a Vector of documents + // if (documents.size() == 1) { + // dynamic = ConvertPocoMongoDocumentToDynamic(documents[0]); + //} + if (documents.size() >= 1) { + DynamicVector vec; + for (auto& document : documents) { + vec.push_back(ConvertPocoMongoDocumentToDynamic(document)); + } + dynamic = vec; + } else { + dynamic = DynamicObject(); + } } void ConvertPocoMongoDocumentToDynamic(DynamicVector& vec, Poco::MongoDB::Document::Vector& documents) { - - // TODO: look into it - // Not sure on this so will always return a Vector of documents - //if (documents.size() == 1) { - // dynamic = ConvertPocoMongoDocumentToDynamic(documents[0]); - //} - if (documents.size() >= 1) { - for (auto& document : documents) { - vec.push_back(ConvertPocoMongoDocumentToDynamic(document)); - } - } - else { - } + // TODO: look into it + // Not sure on this so will always return a Vector of documents + // if (documents.size() == 1) { + // dynamic = ConvertPocoMongoDocumentToDynamic(documents[0]); + //} + if (documents.size() >= 1) { + for (auto& document : documents) { + vec.push_back(ConvertPocoMongoDocumentToDynamic(document)); + } + } else { + } } // -------------------------------------------------------------------------------------------- Poco::MongoDB::Array::Ptr parsePocoJSONToMongoDBObj(Poco::JSON::Array::Ptr& val) { - Poco::MongoDB::Array::Ptr arrayDocument = new Poco::MongoDB::Array(); - auto count = val->size(); - for (auto i = 0; i < count; i++) { - auto indexStr = (boost::format("%1%") % i).str(); + Poco::MongoDB::Array::Ptr arrayDocument = new Poco::MongoDB::Array(); + auto count = val->size(); + for (auto i = 0; i < count; i++) { + auto indexStr = (boost::format("%1%") % i).str(); - if (val->isObject(i)) { - auto object = val->getObject(i); - auto subDocument = parsePocoJSONToMongoDBObj(object); - arrayDocument->add(indexStr, subDocument); - } - else if (val->isArray(i)) { - auto object = val->getArray(i); - auto subDocument = parsePocoJSONToMongoDBObj(object); - arrayDocument->add(indexStr, subDocument); - } - else { - auto object = val->get(i); - if (object.isArray()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Array")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (object.isBoolean()) { - auto value = object.extract(); - arrayDocument->add(indexStr, value); - } - else if (object.isDeque()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Deque")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (object.isEmpty()) { - } - else if (object.isInteger()) { - auto value = object.extract(); - arrayDocument->add(indexStr, value); - } - else if (object.isList()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - List")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (object.isNumeric()) { - auto value = object.extract(); - arrayDocument->add(indexStr, value); - } - else if (object.isSigned()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Signed")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (object.isString()) { - auto value = object.extract(); - // TODO: Check other types here, Date etc... - if (value.find("ObjectId(") != std::string::npos) { - auto oidStr = value.substr(9, 24); - Poco::MongoDB::ObjectId::Ptr oid = new Poco::MongoDB::ObjectId(oidStr); - arrayDocument->add(indexStr, oid); - } - else - arrayDocument->add(indexStr, value); - } - else if (object.isStruct()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Struct")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (object.isVector()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Vector")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Unexpected type")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - } - } - return arrayDocument; + if (val->isObject(i)) { + auto object = val->getObject(i); + auto subDocument = parsePocoJSONToMongoDBObj(object); + arrayDocument->add(indexStr, subDocument); + } else if (val->isArray(i)) { + auto object = val->getArray(i); + auto subDocument = parsePocoJSONToMongoDBObj(object); + arrayDocument->add(indexStr, subDocument); + } else { + auto object = val->get(i); + if (object.isArray()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Array")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (object.isBoolean()) { + auto value = object.extract(); + arrayDocument->add(indexStr, value); + } else if (object.isDeque()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Deque")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (object.isEmpty()) { + } else if (object.isInteger()) { + auto value = object.extract(); + arrayDocument->add(indexStr, value); + } else if (object.isList()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - List")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (object.isNumeric()) { + auto value = object.extract(); + arrayDocument->add(indexStr, value); + } else if (object.isSigned()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Signed")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (object.isString()) { + auto value = object.extract(); + // TODO: Check other types here, Date etc... + if (value.find("ObjectId(") != std::string::npos) { + auto oidStr = value.substr(9, 24); + Poco::MongoDB::ObjectId::Ptr oid = new Poco::MongoDB::ObjectId(oidStr); + arrayDocument->add(indexStr, oid); + } else + arrayDocument->add(indexStr, value); + } else if (object.isStruct()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Struct")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (object.isVector()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Vector")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Unexpected type")) + .str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } + } + } + return arrayDocument; } Poco::MongoDB::Document::Ptr parsePocoJSONToMongoDBObj(Poco::JSON::Object::Ptr& val) { - Poco::MongoDB::Document::Ptr document(new Poco::MongoDB::Document()); - auto& data = *(val.get()); + Poco::MongoDB::Document::Ptr document(new Poco::MongoDB::Document()); + auto& data = *(val.get()); - for (auto& var : data) { - if (val->isObject(var.first)) { - auto object = var.second.extract(); - auto subDocument = parsePocoJSONToMongoDBObj(object); - document->add(var.first, subDocument); - } - else if (val->isArray(var.first)) { - auto object = var.second.extract(); - auto subDocument = parsePocoJSONToMongoDBObj(object); - document->add(var.first, subDocument); - } - else { - if (var.second.isArray()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Array")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (var.second.isBoolean()) { - auto value = var.second.extract(); - document->add(var.first, value); - } - else if (var.second.isDeque()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Deque")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (var.second.isEmpty()) { - } - else if (var.second.isInteger()) { - auto value = var.second.extract(); - document->add(var.first, value); - } - else if (var.second.isList()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - List")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (var.second.isNumeric()) { - auto value = var.second.extract(); - document->add(var.first, value); - } - else if (var.second.isSigned()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Signed")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (var.second.isString()) { - auto value = var.second.extract(); - // TODO: Check other types here, Date etc... - if (value.find("ObjectId(") != std::string::npos) { - auto oidStr = value.substr(9, 24); - Poco::MongoDB::ObjectId::Ptr oid = new Poco::MongoDB::ObjectId(oidStr); - document->add(var.first, oid); - } - else - document->add(var.first, value); - } - else if (var.second.isStruct()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Struct")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else if (var.second.isVector()) { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Vector")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - else { - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Unexpected type")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); - } - } - } - return document; + for (auto& var : data) { + if (val->isObject(var.first)) { + auto object = var.second.extract(); + auto subDocument = parsePocoJSONToMongoDBObj(object); + document->add(var.first, subDocument); + } else if (val->isArray(var.first)) { + auto object = var.second.extract(); + auto subDocument = parsePocoJSONToMongoDBObj(object); + document->add(var.first, subDocument); + } else { + if (var.second.isArray()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Array")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (var.second.isBoolean()) { + auto value = var.second.extract(); + document->add(var.first, value); + } else if (var.second.isDeque()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Deque")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (var.second.isEmpty()) { + } else if (var.second.isInteger()) { + auto value = var.second.extract(); + document->add(var.first, value); + } else if (var.second.isList()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - List")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (var.second.isNumeric()) { + auto value = var.second.extract(); + document->add(var.first, value); + } else if (var.second.isSigned()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Signed")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (var.second.isString()) { + auto value = var.second.extract(); + // TODO: Check other types here, Date etc... + if (value.find("ObjectId(") != std::string::npos) { + auto oidStr = value.substr(9, 24); + Poco::MongoDB::ObjectId::Ptr oid = new Poco::MongoDB::ObjectId(oidStr); + document->add(var.first, oid); + } else + document->add(var.first, value); + } else if (var.second.isStruct()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Struct")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else if (var.second.isVector()) { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Vector")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } else { + auto msg = + (boost::format("Unhandled data type in parse of json into poco::mongodb::document - Unexpected type")) + .str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() + << moja::datarepository::AssertMsg(msg)); + } + } + } + return document; } Poco::MongoDB::Document::Ptr parsePocoJSONToMongoDBObj(Poco::DynamicAny& data) { - if (data.type() == typeid(Poco::JSON::Object::Ptr)) { - auto object = data.extract(); - return parsePocoJSONToMongoDBObj(object); - } - if (data.type() == typeid(Poco::JSON::Array::Ptr)) { - auto object = data.extract(); - return parsePocoJSONToMongoDBObj(object); - } - auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document")).str(); - BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); + if (data.type() == typeid(Poco::JSON::Object::Ptr)) { + auto object = data.extract(); + return parsePocoJSONToMongoDBObj(object); + } + if (data.type() == typeid(Poco::JSON::Array::Ptr)) { + auto object = data.extract(); + return parsePocoJSONToMongoDBObj(object); + } + auto msg = (boost::format("Unhandled data type in parse of json into poco::mongodb::document")).str(); + BOOST_THROW_EXCEPTION(datarepository::AssertionViolationException() << moja::datarepository::AssertMsg(msg)); } -}}} \ No newline at end of file +} // namespace poco +} // namespace modules +} // namespace moja \ No newline at end of file diff --git a/Source/moja.modules.poco/src/providernosqlpocojson.cpp b/Source/moja.modules.poco/src/providernosqlpocojson.cpp index 0733460..a4c46bb 100644 --- a/Source/moja.modules.poco/src/providernosqlpocojson.cpp +++ b/Source/moja.modules.poco/src/providernosqlpocojson.cpp @@ -1,55 +1,54 @@ #include "moja/modules/poco/providernosqlpocojson.h" -#include "moja/datarepository/datarepositoryexceptions.h" -#include "moja/pocojsonutils.h" -#include "moja/dynamic.h" +#include +#include +#include + +#include #include #include -#include #include -#include #include +#include -using moja::datarepository::FileNotFoundException; using moja::datarepository::FileName; +using moja::datarepository::FileNotFoundException; namespace moja { namespace modules { namespace poco { ProviderNoSQLPocoJSON::ProviderNoSQLPocoJSON(DynamicObject settings) - : _filePath(settings["file_path"].convert()) { - - _file = Poco::File(_filePath); - if (!_file.exists()) { - BOOST_THROW_EXCEPTION(FileNotFoundException() << FileName(_filePath)); - } - _lastModified = _file.getLastModified(); - std::ostringstream ostr; - if (_file.exists()) - { - Poco::FileInputStream fis(_filePath); - Poco::StreamCopier::copyStream(fis, ostr); - fis.close(); - } - _jsonStr = ostr.str(); - - Poco::JSON::Parser jsonParser; - auto parsedJSON = jsonParser.parse(ostr.str()); - auto parsedResult = jsonParser.result(); - _data = parsePocoJSONToDynamic(parsedResult); + : _filePath(settings["file_path"].convert()) { + _file = Poco::File(_filePath); + if (!_file.exists()) { + BOOST_THROW_EXCEPTION(FileNotFoundException() << FileName(_filePath)); + } + _lastModified = _file.getLastModified(); + std::ostringstream ostr; + if (_file.exists()) { + Poco::FileInputStream fis(_filePath); + Poco::StreamCopier::copyStream(fis, ostr); + fis.close(); + } + _jsonStr = ostr.str(); + + Poco::JSON::Parser jsonParser; + auto parsedJSON = jsonParser.parse(ostr.str()); + auto parsedResult = jsonParser.result(); + _data = parsePocoJSONToDynamic(parsedResult); } DynamicVector ProviderNoSQLPocoJSON::GetDataSet(const std::string& query) const { - DynamicVector results; - results.push_back(_data); - return results; + DynamicVector results; + results.push_back(_data); + return results; } -int ProviderNoSQLPocoJSON::Count() const { - return 0; -} +int ProviderNoSQLPocoJSON::Count() const { return 0; } -}}} \ No newline at end of file +} // namespace poco +} // namespace modules +} // namespace moja \ No newline at end of file diff --git a/Source/moja.modules.poco/src/providernosqlpocomongodb.cpp b/Source/moja.modules.poco/src/providernosqlpocomongodb.cpp index b6ec805..4e2a7f5 100644 --- a/Source/moja.modules.poco/src/providernosqlpocomongodb.cpp +++ b/Source/moja.modules.poco/src/providernosqlpocomongodb.cpp @@ -1,26 +1,26 @@ #include "moja/modules/poco/providernosqlpocomongodb.h" + #include "moja/modules/poco/pocomongoutils.h" -#include "moja/datarepository/datarepositoryexceptions.h" -#include "moja/logging.h" +#include +#include #include #include -#include #include -#include +#include #include #include #include +using moja::datarepository::ConnectionFailedException; +using moja::datarepository::FileName; using moja::datarepository::FileNotFoundException; using moja::datarepository::NotImplementedException; -using moja::datarepository::FileName; using moja::datarepository::QueryException; using moja::datarepository::SQL; -using moja::datarepository::ConnectionFailedException; namespace moja { namespace modules { @@ -29,86 +29,83 @@ namespace poco { //#define POCO_MONGO_DEBUG_INFO ProviderNoSQLPocoMongoDB::ProviderNoSQLPocoMongoDB(DynamicObject settings) : _connected(false), _cache(10000) { - _host = settings["host"].convert(); - _port = settings["port"]; - _collection = settings["collection"].convert(); - _database = settings["database"].convert(); - - try { - _connection.connect(_host, _port); - _connected = true; - } - catch (Poco::Net::ConnectionRefusedException&) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + _host = settings["host"].convert(); + _port = settings["port"]; + _collection = settings["collection"].convert(); + _database = settings["database"].convert(); + + try { + _connection.connect(_host, _port); + _connected = true; + } catch (Poco::Net::ConnectionRefusedException&) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } } ProviderNoSQLPocoMongoDB::~ProviderNoSQLPocoMongoDB() { - if (_connected) { - _connection.disconnect(); - _connected = false; - } + if (_connected) { + _connection.disconnect(); + _connected = false; + } } DynamicVector ProviderNoSQLPocoMongoDB::GetDataSet(const std::string& query) const { - auto cachedValue = _cache.get(query); - if (!cachedValue.isNull()) { - return *cachedValue; - } + auto cachedValue = _cache.get(query); + if (!cachedValue.isNull()) { + return *cachedValue; + } - try { - DynamicVector result; + try { + DynamicVector result; - // >> PARSE JSON << - Poco::JSON::Parser jsonParser; - auto parsedJSON = jsonParser.parse(query); - auto parsedResult = jsonParser.result(); - auto document = parsePocoJSONToMongoDBObj(parsedResult); + // >> PARSE JSON << + Poco::JSON::Parser jsonParser; + auto parsedJSON = jsonParser.parse(query); + auto parsedResult = jsonParser.result(); + auto document = parsePocoJSONToMongoDBObj(parsedResult); - // >> CONNECT << - if (!_connected) { - _connection.connect(_host, _port); - _connected = true; - } + // >> CONNECT << + if (!_connected) { + _connection.connect(_host, _port); + _connected = true; + } - Poco::MongoDB::Database db(_database); - auto queryPtr = db.createQueryRequest(_collection); + Poco::MongoDB::Database db(_database); + auto queryPtr = db.createQueryRequest(_collection); - queryPtr->selector() = *document; + queryPtr->selector() = *document; - Poco::MongoDB::ResponseMessage response; - _connection.sendRequest(*queryPtr, response); + Poco::MongoDB::ResponseMessage response; + _connection.sendRequest(*queryPtr, response); #if defined(POCO_MONGO_DEBUG_INFO) - // >> DEBUG << - auto str = queryPtr->selector().toString(); - MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:GetDataSet:resloved query =\t" << str; + // >> DEBUG << + auto str = queryPtr->selector().toString(); + MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:GetDataSet:resloved query =\t" << str; #endif - // >> CHECK RESPONSE << - if (response.documents().size() > 0) - ConvertPocoMongoDocumentToDynamic(result, response.documents()); - - _cache.add(query, result); - return result; - } - catch (Poco::Net::ConnectionRefusedException&) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + // >> CHECK RESPONSE << + if (response.documents().size() > 0) ConvertPocoMongoDocumentToDynamic(result, response.documents()); + + _cache.add(query, result); + return result; + } catch (Poco::Net::ConnectionRefusedException&) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } } -int ProviderNoSQLPocoMongoDB::Count() const { - return SendCountRequest(); -} +int ProviderNoSQLPocoMongoDB::Count() const { return SendCountRequest(); } /// /// For a list of commands see: https://docs.mongodb.org/manual/reference/command/ @@ -117,202 +114,202 @@ int ProviderNoSQLPocoMongoDB::Count() const { /// { "create": "newCollectionName" } /// DynamicVector ProviderNoSQLPocoMongoDB::SendCmdRequest(const std::string& command) const { - try { - DynamicVector result; + try { + DynamicVector result; - // >> PARSE JSON << - Poco::JSON::Parser jsonParser; - auto parsedJSON = jsonParser.parse(command); - auto parsedResult = jsonParser.result(); - auto document = parsePocoJSONToMongoDBObj(parsedResult); + // >> PARSE JSON << + Poco::JSON::Parser jsonParser; + auto parsedJSON = jsonParser.parse(command); + auto parsedResult = jsonParser.result(); + auto document = parsePocoJSONToMongoDBObj(parsedResult); - // >> CONNECT << - if (!_connected) { - _connection.connect(_host, _port); - _connected = true; - } + // >> CONNECT << + if (!_connected) { + _connection.connect(_host, _port); + _connected = true; + } - Poco::MongoDB::Database db(_database); - auto cmdPtr = db.createCommand(); - cmdPtr->selector() = *document; + Poco::MongoDB::Database db(_database); + auto cmdPtr = db.createCommand(); + cmdPtr->selector() = *document; - Poco::MongoDB::ResponseMessage response; - _connection.sendRequest(*cmdPtr, response); + Poco::MongoDB::ResponseMessage response; + _connection.sendRequest(*cmdPtr, response); #if defined(POCO_MONGO_DEBUG_INFO) - // >> DEBUG << - auto str = cmdPtr->selector().toString(); - MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendCmdRequest:resloved query =\t" << str; + // >> DEBUG << + auto str = cmdPtr->selector().toString(); + MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendCmdRequest:resloved query =\t" << str; #endif - // >> CHECK RESPONSE << - if (response.documents().size() > 0) - ConvertPocoMongoDocumentToDynamic(result, response.documents()); - - return result; - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + // >> CHECK RESPONSE << + if (response.documents().size() > 0) ConvertPocoMongoDocumentToDynamic(result, response.documents()); + + return result; + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; int ProviderNoSQLPocoMongoDB::SendCountRequest() const { - try { - if (!_connected) { - _connection.connect(_host, _port); - _connected = true; - } - - Poco::MongoDB::Database db(_database); - auto countRequest = db.createCountRequest(_collection); - Poco::MongoDB::ResponseMessage response; - _connection.sendRequest(*countRequest, response); - - if (response.documents().size() > 0) { - auto doc = response.documents()[0]; - return doc->get("n"); - } - - return -1; - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + try { + if (!_connected) { + _connection.connect(_host, _port); + _connected = true; + } + + Poco::MongoDB::Database db(_database); + auto countRequest = db.createCountRequest(_collection); + Poco::MongoDB::ResponseMessage response; + _connection.sendRequest(*countRequest, response); + + if (response.documents().size() > 0) { + auto doc = response.documents()[0]; + return doc->get("n"); + } + + return -1; + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; DynamicVector ProviderNoSQLPocoMongoDB::SendDeleteRequest(const std::string& query) const { - try { - DynamicVector result; - return result; - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + try { + DynamicVector result; + return result; + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; DynamicVector ProviderNoSQLPocoMongoDB::SendInsertRequest(const std::string& query) const { - try { - DynamicVector result; - return result; - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + try { + DynamicVector result; + return result; + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; -DynamicVector ProviderNoSQLPocoMongoDB::SendQueryRequest(const std::string& query, int numberToReturn) const { - try { - DynamicVector result; +DynamicVector ProviderNoSQLPocoMongoDB::SendQueryRequest(const std::string& query, int numberToReturn) const { + try { + DynamicVector result; - // >> PARSE QUERY JSON << - Poco::JSON::Parser jsonParser; - auto parsedJSON = jsonParser.parse(query); - auto parsedResult = jsonParser.result(); - auto document = parsePocoJSONToMongoDBObj(parsedResult); + // >> PARSE QUERY JSON << + Poco::JSON::Parser jsonParser; + auto parsedJSON = jsonParser.parse(query); + auto parsedResult = jsonParser.result(); + auto document = parsePocoJSONToMongoDBObj(parsedResult); - // >> CONNECT << - if (!_connected) { - _connection.connect(_host, _port); - _connected = true; - } + // >> CONNECT << + if (!_connected) { + _connection.connect(_host, _port); + _connected = true; + } - Poco::MongoDB::Database db(_database); - auto queryPtr = db.createQueryRequest(_collection); - queryPtr->setNumberToReturn(numberToReturn); + Poco::MongoDB::Database db(_database); + auto queryPtr = db.createQueryRequest(_collection); + queryPtr->setNumberToReturn(numberToReturn); - queryPtr->selector() = *document; + queryPtr->selector() = *document; - Poco::MongoDB::ResponseMessage response; - _connection.sendRequest(*queryPtr, response); + Poco::MongoDB::ResponseMessage response; + _connection.sendRequest(*queryPtr, response); #if defined(POCO_MONGO_DEBUG_INFO) - // >> DEBUG << - auto str = queryPtr->selector().toString(); - MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendQueryRequest:resloved query =\t" << str; + // >> DEBUG << + auto str = queryPtr->selector().toString(); + MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendQueryRequest:resloved query =\t" << str; #endif - // >> CHECK RESPONSE << - if (response.documents().size() > 0) - ConvertPocoMongoDocumentToDynamic(result, response.documents()); - - return result; - } - catch (Poco::Net::ConnectionRefusedException&) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + // >> CHECK RESPONSE << + if (response.documents().size() > 0) ConvertPocoMongoDocumentToDynamic(result, response.documents()); + + return result; + } catch (Poco::Net::ConnectionRefusedException&) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } } -DynamicVector ProviderNoSQLPocoMongoDB::SendQueryRequest(const std::string& query, const std::string& fields, int numberToReturn) const { - try { - DynamicVector result; +DynamicVector ProviderNoSQLPocoMongoDB::SendQueryRequest(const std::string& query, const std::string& fields, + int numberToReturn) const { + try { + DynamicVector result; - // >> PARSE QUERY JSON << - Poco::JSON::Parser jsonParser; - auto parsedJSON = jsonParser.parse(query); - auto parsedResult = jsonParser.result(); - auto document = parsePocoJSONToMongoDBObj(parsedResult); + // >> PARSE QUERY JSON << + Poco::JSON::Parser jsonParser; + auto parsedJSON = jsonParser.parse(query); + auto parsedResult = jsonParser.result(); + auto document = parsePocoJSONToMongoDBObj(parsedResult); - // >> PARSE FIELDS JSON << - Poco::JSON::Parser jsonParser2; - auto parsedFieldsJSON = jsonParser2.parse(fields); - auto parsedFieldsResult = jsonParser2.result(); - auto documentFields = parsePocoJSONToMongoDBObj(parsedFieldsResult); + // >> PARSE FIELDS JSON << + Poco::JSON::Parser jsonParser2; + auto parsedFieldsJSON = jsonParser2.parse(fields); + auto parsedFieldsResult = jsonParser2.result(); + auto documentFields = parsePocoJSONToMongoDBObj(parsedFieldsResult); - // >> CONNECT << - if (!_connected) { - _connection.connect(_host, _port); - _connected = true; - } + // >> CONNECT << + if (!_connected) { + _connection.connect(_host, _port); + _connected = true; + } - Poco::MongoDB::Database db(_database); - auto queryPtr = db.createQueryRequest(_collection); - queryPtr->setNumberToReturn(numberToReturn); + Poco::MongoDB::Database db(_database); + auto queryPtr = db.createQueryRequest(_collection); + queryPtr->setNumberToReturn(numberToReturn); - queryPtr->selector() = *document; - queryPtr->returnFieldSelector() = *documentFields; + queryPtr->selector() = *document; + queryPtr->returnFieldSelector() = *documentFields; - Poco::MongoDB::ResponseMessage response; - _connection.sendRequest(*queryPtr, response); + Poco::MongoDB::ResponseMessage response; + _connection.sendRequest(*queryPtr, response); #if defined(POCO_MONGO_DEBUG_INFO) - // >> DEBUG << - auto str = queryPtr->selector().toString(); - MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendQueryRequest:resloved query =\t" << str; + // >> DEBUG << + auto str = queryPtr->selector().toString(); + MOJA_LOG_DEBUG << "*****\tProviderNoSQLPocoMongoDB:SendQueryRequest:resloved query =\t" << str; #endif - // >> CHECK RESPONSE << - if (response.documents().size() > 0) - ConvertPocoMongoDocumentToDynamic(result, response.documents()); - - return result; - } - catch (Poco::Net::ConnectionRefusedException&) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + // >> CHECK RESPONSE << + if (response.documents().size() > 0) ConvertPocoMongoDocumentToDynamic(result, response.documents()); + + return result; + } catch (Poco::Net::ConnectionRefusedException&) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; DynamicVector ProviderNoSQLPocoMongoDB::SendUpdateRequest(const std::string& query) const { - try { - DynamicVector result; - return result; - } - catch (...) { - auto connection_error_message = (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); - BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); - } + try { + DynamicVector result; + return result; + } catch (...) { + auto connection_error_message = + (boost::format("host %1%, port=%2%, database=%3%") % _host % _port % _database).str(); + BOOST_THROW_EXCEPTION(ConnectionFailedException() << datarepository::ConnectionError(connection_error_message)); + } }; #undef POCO_MONGO_DEBUG_INFO -}}} \ No newline at end of file +} // namespace poco +} // namespace modules +} // namespace moja \ No newline at end of file diff --git a/Source/moja.modules.poco/tests/CMakeLists.txt b/Source/moja.modules.poco/tests/CMakeLists.txt index 76ede67..ba3befc 100644 --- a/Source/moja.modules.poco/tests/CMakeLists.txt +++ b/Source/moja.modules.poco/tests/CMakeLists.txt @@ -1,26 +1,9 @@ ### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() +find_package(Boost COMPONENTS unit_test_framework REQUIRED) find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -#for Moja -#find_package(Moja) - -include_directories(${TURTLE_INCLUDE_PATH}) -include_directories( - include - ../moja.core/include - ../moja.datarepository/include - ../moja.flint.configuration/include -) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -30,34 +13,26 @@ set(TEST_SRCS src/providernosqlpocojsontests.cpp src/providernosqlpocomongodbtests.cpp src/providerrelationalsqlitetests.cpp -# src/providerrelationalpocopostgresqltests.cpp ) set(TEST_TEST_DATA data/providernosqlpocomongodbtestsdata_trees.json ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS} ${TEST_TEST_DATA}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) +target_link_libraries(${TESTUNIT} + PRIVATE + moja::moja.modules.poco Boost::unit_test_framework +) + if(WIN32) target_link_libraries(${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} - wsock32 - ws2_32 - ) -ELSE() - target_link_libraries( - ${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS} + PRIVATE + wsock32 ws2_32 ) endif() @@ -68,49 +43,3 @@ add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN - NAMES - PocoFoundation.dll - PocoFoundation64.dll - PocoJSON64.dll - PocoJSON64d.dll - PocoData64.dll - PocoData64d.dll - PocoMongoDB64.dll - PocoMongoDB64d.dll - PocoNet64.dll - PocoNet64d.dll - PATHS - ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES - $ - ) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) - -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() - -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.modules.zipper/CMakeLists.txt b/Source/moja.modules.zipper/CMakeLists.txt index f75853e..f0ef3d4 100644 --- a/Source/moja.modules.zipper/CMakeLists.txt +++ b/Source/moja.modules.zipper/CMakeLists.txt @@ -3,24 +3,8 @@ set(LIBNAME "moja.modules.${PACKAGE}") string(REPLACE "." "_" NEW_PACKAGE "${PACKAGE}") string(TOUPPER "${NEW_PACKAGE}" LIBNAME_EXPORT) -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - # Zipper -find_package(Zipper) -include_directories(${Zipper_INCLUDE_PATH}) - -find_package(Zlib) -include_directories(${Zlib_INCLUDE_PATH}) - -include_directories( - include - ../moja.core/include - ../moja.flint/include - ../moja.datarepository/include -) +find_package(Zipper REQUIRED) configure_file( ../templates/exports.h @@ -81,39 +65,37 @@ set(SRCS ${PROJECT_PROVIDER_HEADERS} ${PROJECT_PROVIDER_SOURCES} ) -add_definitions( -DPOCO_NO_AUTOMATIC_LIBS ) - -add_library( - ${LIBNAME} - ${LIB_MODE} - ${SRCS} - ) +add_library(${LIBNAME} ${LIB_MODE} ${SRCS}) +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) set_target_properties(${LIBNAME} PROPERTIES - VERSION ${MOJA_MULLIONGROUP_VERSION} - SOVERSION ${MOJA_MULLIONGROUP_VERSION_MAJOR} + VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS - ) +) -target_link_libraries( - ${LIBNAME} - moja.core - moja.flint - moja.datarepository - ${Zlib_LIB} - ${Zipper_LIB} - ${Poco_FOUNDATION} - ${Poco_JSON} +target_include_directories(${LIBNAME} + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src ) -# Set local include path -include_directories(${Poco_INCLUDE_DIRS}) +target_link_libraries(${LIBNAME} + PUBLIC + moja::moja.flint + PRIVATE + Zipper::Zipper +) + +############################################## +# Installation instructions + +include(GNUInstallDirs) -install(TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) if(ENABLE_TESTS) add_subdirectory(tests) diff --git a/Source/moja.modules.zipper/cmake/moja.modules.zipperConfig.cmake b/Source/moja.modules.zipper/cmake/moja.modules.zipperConfig.cmake new file mode 100644 index 0000000..24b3d1e --- /dev/null +++ b/Source/moja.modules.zipper/cmake/moja.modules.zipperConfig.cmake @@ -0,0 +1,7 @@ +include(CMakeFindDependencyMacro) +find_dependency(zipper) +find_dependency(moja REQUIRED COMPONENTS moja.flint ) + +if(NOT TARGET moja::moja.modules.zipper) + include("${MojaModulesPoco_CMAKE_DIR}/moja.modules.zipperTargets.cmake") +endif() \ No newline at end of file diff --git a/Source/moja.modules.zipper/tests/CMakeLists.txt b/Source/moja.modules.zipper/tests/CMakeLists.txt index 51b81b0..95f09ea 100644 --- a/Source/moja.modules.zipper/tests/CMakeLists.txt +++ b/Source/moja.modules.zipper/tests/CMakeLists.txt @@ -1,26 +1,6 @@ -### Unit test ### set(TESTUNIT "${LIBNAME}.test") -find_package(Boost COMPONENTS system filesystem unit_test_framework REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -#for Moja -#find_package(Moja) - -include_directories(${TURTLE_INCLUDE_PATH}) -include_directories( - include - ../moja.core/include - ../moja.datarepository/include - ../moja.flint.configuration/include -) +find_package(Boost COMPONENTS unit_test_framework REQUIRED) configure_file(../../templates/unittestdefinition.cpp ${CMAKE_CURRENT_SOURCE_DIR}/src/_unittestdefinition.cpp) @@ -29,52 +9,21 @@ set(TEST_SRCS src/providerspatialrastertiledtests.cpp ) -add_definitions(-DBOOST_TEST_DYN_LINK) - add_executable(${TESTUNIT} ${TEST_SRCS}) add_test(NAME ${LIBNAME} WORKING_DIRECTORY ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} COMMAND ${TESTUNIT} --result_code=yes --report_level=no) target_link_libraries(${TESTUNIT} - ${LIBNAME} - ${Boost_LIBRARIES} - ${SYSLIBS}) + PRIVATE + moja::moja.modules.zipper Boost::unit_test_framework + ) IF (RUN_UNIT_TESTS_ON_BUILD) add_custom_command(TARGET ${TESTUNIT} POST_BUILD COMMAND ${CMAKE_CTEST_COMMAND} ARGS -C $) ENDIF () -### End unit test ### - -### Begin PATH boilerplate for dependent libraries -- adapted from ### -### http://www.cmake.org/pipermail/cmake/2009-May/029464.html ### -### This allows unit tests to run without having to manually add ### -### dependent libraries to the system path. ### - -# Include an entry for each library that needs to be in the system path. -find_path(POCO_BIN NAMES PocoFoundation.dll PocoFoundation64.dll - PATHS ${POCO_PATH}/bin ${POCO_PATH}/bin64 - PATH_SUFFIXES $) - -file(TO_NATIVE_PATH "${Boost_LIBRARY_DIR}" boost_lib) -file(TO_NATIVE_PATH "${POCO_BIN}" poco_bin) -# Determine which environment variable controls the locating of -# DLL's and set that variable. -if(WIN32) - set(LD_VARNAME "PATH") - set(LD_PATH "${boost_lib};${poco_bin};$ENV{PATH}") - # IMPORTANT NOTE: The set_tests_properties(), below, internally - # stores its name/value pairs with a semicolon delimiter. - # because of this we must protect the semicolons in the path. - string(REPLACE ";" "\\;" LD_PATH "${LD_PATH}") -else() - set(LD_VARNAME "LD_LIBRARY_PATH") - set(LD_PATH "${boost_lib}:$ENV{LD_LIBRARY_PATH}") -endif() -set_tests_properties(${LIBNAME} PROPERTIES ENVIRONMENT "${LD_VARNAME}=${LD_PATH}") -### End PATH boilerplate ### diff --git a/Source/moja.modules.zipper/tests/src/providerspatialrastertiledtests.cpp b/Source/moja.modules.zipper/tests/src/providerspatialrastertiledtests.cpp index 4c9bf05..6bc7e6d 100644 --- a/Source/moja.modules.zipper/tests/src/providerspatialrastertiledtests.cpp +++ b/Source/moja.modules.zipper/tests/src/providerspatialrastertiledtests.cpp @@ -30,9 +30,9 @@ using Poco::FileOutputStream; struct ProviderZipperSpatialRasterTiledTestsFixture { - const std::string test_tile_path = "./data/TestTile/TestTile_034_001.blk"; - const std::string test_stack_path = "./data/TestTile/TestStack_034_001.blk"; - const std::string test_tile_zip_path = "./data/TestTileZip.zip"; + const std::string test_tile_path = "/workspaces/FLINT/Source/build/bin/data/TestTile/TestTile_034_001.blk"; + const std::string test_stack_path = "/workspaces/FLINT/Source/build/bin/data/TestTile/TestStack_034_001.blk"; + const std::string test_tile_zip_path = "/workspaces/FLINT/Source/build/bin/data/TestTileZip.zip"; moja::DynamicObject settings; @@ -70,7 +70,7 @@ struct ProviderZipperSpatialRasterTiledTestsFixture { { "name", std::string("testdata") }, { "layer_type", std::string("GridLayer") }, { "layer_data", std::string("Int32") }, - { "layer_path", std::string("./data/TestTile") }, + { "layer_path", std::string("/workspaces/FLINT/Source/build/bin/data/TestTile") }, { "layer_prefix", std::string("TestTile") }, { "tileLatSize", 1.0 }, { "tileLonSize", 1.0 }, @@ -84,7 +84,7 @@ struct ProviderZipperSpatialRasterTiledTestsFixture { { "name", "teststack" }, { "layer_type", "StackLayer" }, { "layer_data", "UInt8" }, - { "layer_path", "./data/TestTile" }, + { "layer_path", "/workspaces/FLINT/Source/build/bin/data/TestTile" }, { "layer_prefix", "TestStack" }, { "nLayers", 14 }, { "tileLatSize", 1.0 }, @@ -99,7 +99,7 @@ struct ProviderZipperSpatialRasterTiledTestsFixture { { "name", "testdatazip" }, { "layer_type", "GridLayer" }, { "layer_data", "Int32" }, - { "layer_path", "./data/TestTileZip" }, + { "layer_path", "/workspaces/FLINT/Source/build/bin/data/TestTileZip" }, { "layer_prefix", "TestTileZip" }, { "tileLatSize", 1.0 }, { "tileLonSize", 1.0 }, diff --git a/Source/moja.systemtest/CMakeLists.txt b/Source/moja.systemtest/CMakeLists.txt index a60ef93..c47d86a 100644 --- a/Source/moja.systemtest/CMakeLists.txt +++ b/Source/moja.systemtest/CMakeLists.txt @@ -1,36 +1,9 @@ set(EXENAME "moja.systemtest") -find_package(Boost COMPONENTS system thread filesystem date_time chrono program_options log log_setup REQUIRED) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -# MongoDBCPlusPlus Driver -find_package(MongoDBCPlusPlusDriver) -if(MONGODBCPLUSPLUSDRIVER_FOUND) - # MESSAGE("${EXENAME}: ${PACKAGE}: MONGODBCPLUSPLUSDRIVER_FOUND: ${MONGODBCPLUSPLUSDRIVER_FOUND}") - - include_directories(${MONGODBCPLUSPLUSDRIVER_INCLUDE_PATH}) - link_directories(${MONGODBCPLUSPLUSDRIVER_LIBRARY_DIR}) -endif() - -find_package(Libpq) -if(LIBPQ_FOUND) - include_directories(${LIBPQ_INCLUDE_PATH}) - link_directories(${LIBPQ_LIBRARY_DIR}) -endif() - -include_directories( include ../moja.core/include ../moja.flint/include ../moja.datarepository/include ../moja.flint.configuration/include ${Poco_INCLUDE_DIRS}) -include_directories(${Sqlite_INCLUDE_PATH}) - -if(Poco_FOUND) - link_directories(${Poco_BINARY_DIRS}) -endif() +find_package(Boost COMPONENTS system REQUIRED) +find_package(Poco REQUIRED Foundation) +find_package(libmongocxx) +find_package(PostgreSQL REQUIRED) set(MOJA_EXE_HDRS include/moja.systemtest.h @@ -40,28 +13,15 @@ set(MOJA_EXE_SRC src/moja.systemtest.cpp ) -add_definitions(-DBOOST_ALL_DYN_LINK) - add_executable(${EXENAME} ${MOJA_EXE_HDRS} ${MOJA_EXE_SRC}) +target_include_directories(${EXENAME} + PRIVATE + $ +) + target_link_libraries(${EXENAME} - moja.core - moja.flint - moja.flint.configuration - ${Boost_LIBRARIES} - ${SYSLIBS} - ${Poco_FOUNDATION} -# ${Poco_DATA} - ${Poco_NET} - ${Poco_JSON} -# ${Poco_DATA_SQLITE} -# ${Poco_MONGODB} - ${LIBPQ_LIB} + PRIVATE + moja::moja.flint Boost::system Poco::Foundation PostgreSQL::PostgreSQL ) -add_dependencies(${EXENAME} moja.core) -add_dependencies(${EXENAME} moja.flint) -add_dependencies(${EXENAME} moja.flint.configuration) - -# Set local include path -include_directories(${Poco_INCLUDE_DIRS}) diff --git a/Source/moja.test/CMakeLists.txt b/Source/moja.test/CMakeLists.txt index bd25f98..9f5d9c3 100644 --- a/Source/moja.test/CMakeLists.txt +++ b/Source/moja.test/CMakeLists.txt @@ -2,28 +2,9 @@ set(PACKAGE "test") set(LIBNAME "moja.${PACKAGE}") string(TOUPPER "${PACKAGE}" LIBNAME_EXPORT) -if(MOJA_STATIC) - set(CMAKE_CXX_FLAGS_RELEASE "/MT") - set(CMAKE_CXX_FLAGS_DEBUG "/MTd") - add_definitions(-DUSE_STATIC_BOOST) - set(Boost_USE_STATIC_LIBS ON) -else(MOJA_STATIC) - add_definitions(-DBOOST_ALL_DYN_LINK) - set(Boost_USE_STATIC_LIBS OFF) -endif(MOJA_STATIC) - -find_package(Boost) -if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIRS}) -endif() - -find_package(Turtle) -if(TURTLE_FOUND) - include_directories(${Turtle_INCLUDE_PATH}) -endif() - -include_directories( include ../moja.core/include ../moja.flint/include ../moja.datarepository/include) +find_package(Boost COMPONENTS unit_test_framework REQUIRED) +find_package(Turtle REQUIRED) configure_file(../templates/exports.h ${CMAKE_CURRENT_SOURCE_DIR}/include/moja/${PACKAGE}/_${PACKAGE}_exports.h) @@ -62,30 +43,31 @@ set(MOJA_Test_headers ) set(MOJA_Test_sources - src/moja.test.cpp) + #src/moja.test.cpp + ) set(SRCS ${MOJA_Test_sources} ${MOJA_Test_headers}) -add_library(${LIBNAME} ${LIB_MODE} ${SRCS}) -set_target_properties(${LIBNAME} - PROPERTIES - VERSION ${MOJA_VERSION} SOVERSION ${MOJA_VERSION_MAJOR} - DEFINE_SYMBOL ${LIBNAME_EXPORT}_EXPORTS) +add_library(${LIBNAME} INTERFACE) +if(MSVC) + add_custom_target(${LIBNAME}.headers SOURCES ${SRCS}) +endif() +add_library(${PROJECT_NAME}::${LIBNAME} ALIAS ${LIBNAME}) -target_link_libraries( - ${LIBNAME} - moja.core - moja.flint - moja.flint.configuration - moja.datarepository - ) +target_include_directories(${LIBNAME} + INTERFACE + $ + $ +) + +target_link_libraries(${LIBNAME} INTERFACE moja::moja.flint Turtle::Turtle) + +############################################## +# Installation instructions + +include(GNUInstallDirs) -install(DIRECTORY include/moja - DESTINATION include - PATTERN ".svn" EXCLUDE) +MOJA_INSTALL(${LIBNAME}) +MOJA_GENERATE_PACKAGE(${LIBNAME}) -install(TARGETS ${LIBNAME} - LIBRARY DESTINATION lib${LIB_SUFFIX} - ARCHIVE DESTINATION lib${LIB_SUFFIX} - RUNTIME DESTINATION bin) diff --git a/Source/moja.test/cmake/moja.testConfig.cmake b/Source/moja.test/cmake/moja.testConfig.cmake new file mode 100644 index 0000000..5b8c1d5 --- /dev/null +++ b/Source/moja.test/cmake/moja.testConfig.cmake @@ -0,0 +1,7 @@ + +include(CMakeFindDependencyMacro) +find_dependency(moja REQUIRED COMPONENTS moja.flint ) + +if(NOT TARGET moja::moja.test) + include("${CMAKE_CURRENT_LIST_DIR}/moja.testTargets.cmake") +endif() diff --git a/Source/moja.test/include/moja/test/mocktiming.h b/Source/moja.test/include/moja/test/mocktiming.h index d2ac309..6f905b3 100644 --- a/Source/moja.test/include/moja/test/mocktiming.h +++ b/Source/moja.test/include/moja/test/mocktiming.h @@ -1,17 +1,18 @@ #ifndef MOJA_TEST_MOCKTIMING_H_ #define MOJA_TEST_MOCKTIMING_H_ -#include +#include #include + namespace moja { namespace test { -MOCK_BASE_CLASS(MockTiming, ITiming) { +MOCK_BASE_CLASS(MockTiming, flint::ITiming) { MockTiming() = default; - MOCK_METHOD(stepping, 0, TimeStepping()); + MOCK_METHOD(stepping, 0, flint::TimeStepping()); MOCK_METHOD(startDate, 0, DateTime()); MOCK_METHOD(endDate, 0, DateTime()); @@ -45,7 +46,7 @@ MOCK_BASE_CLASS(MockTiming, ITiming) { MOCK_METHOD(setSubStep, 1, void(int)); MOCK_METHOD(setIsFullStep, 1, void(bool)); - MOCK_METHOD(setStepping, 1, void(TimeStepping)); + MOCK_METHOD(setStepping, 1, void(flint::TimeStepping)); MOCK_METHOD(init, 0, void()); };