Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • asapo/asapo
  • joao.alvim/asapo
  • philipp.middendorf/asapo
  • stefan.dietrich/asapo
4 results
Show changes
Showing
with 6316 additions and 45 deletions
include /etc/openldap/schema/core.schema
include /etc/openldap/schema/cosine.schema
include /etc/openldap/schema/nis.schema
access to * by * write
access to * by * manage
access to * by * read
allow bind_anon_cred
allow bind_anon_dn
allow update_anon
database bdb
suffix "ou=rgy,o=desy,c=de"
# read https://github.com/cristal-ise/kernel/wiki/Install-OpenLDAP
include /etc/openldap/schema/core.schema
include /etc/openldap/schema/cosine.schema
include /etc/openldap/schema/nis.schema
modulepath /usr/lib/ldap
moduleload back_bdb.la
access to * by * write
access to * by * manage
access to * by * read
allow bind_anon_cred
allow bind_anon_dn
allow update_anon
database bdb
suffix "ou=rgy,o=desy,c=de"
......@@ -4,7 +4,7 @@ cd $1
wget https://curl.haxx.se/download/curl-7.58.0.tar.gz
tar xzf curl-7.58.0.tar.gz
cd curl-7.58.0
./configure --without-ssl --disable-shared --disable-manual --disable-ares --disable-cookies \
./configure --without-ssl --disable-shared --disable-manual --disable-ares \
--disable-crypto-auth --disable-ipv6 --disable-proxy --disable-unix-sockets \
--without-libidn --without-librtmp --without-zlib --disable-ldap \
--disable-libcurl-option --prefix=`pwd`/../
......
:: download and untar libcurl sources to dir
:: https://curl.haxx.se/download/curl-7.58.0.tar.gz
:: set directory with libcurl sources
SET dir=c:\tmp\curl-7.58.0
:: set directory where libcurl should be installed
SET install_dir=c:\Curl
set mypath=%cd%
call "c:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars64.bat"
cd /d %dir%\winbuild
#nmake.exe /f Makefile.vc mode=static VC=16 MACHINE=X64 RTLIBCFG=static
xcopy /isvy %dir%\builds\libcurl-vc16-X64-release-static-ipv6-sspi-winssl\include %install_dir%\include
xcopy /isvy %dir%\builds\libcurl-vc16-X64-release-static-ipv6-sspi-winssl\lib %install_dir%\lib
rename %install_dir%\lib\libcurl_a.lib libcurl.lib
:: download and untar mongoc driver sources to dir
:: https://github.com/mongodb/mongo-c-driver/releases/download/1.9.0/mongo-c-driver-1.9.0.tar.gz
:: https://github.com/mongodb/mongo-c-driver/releases/download/1.15.2/mongo-c-driver-1.15.2.tar.gz
:: set directory with mongoc driver sources
SET dir=c:\tmp\mongo-c-driver-1.9.0
SET dir=c:\tmp\mongo-c-driver-1.15.2
set mypath=%cd%
cd /d %dir%
:: install libbson
cd src\libbson
cmake "-DCMAKE_INSTALL_PREFIX=C:\mongo-c-driver" ^
"-DCMAKE_BUILD_TYPE=Release" ^
"-DCMAKE_C_FLAGS_RELEASE=/MT"
cmake --build . --config Release
cmake --build . --target install --config Release
:: install mongoc
cd %dir%
cmake "-DCMAKE_INSTALL_PREFIX=C:\mongo-c-driver" ^
......
#!/usr/bin/env bash
set -e
if [[ p$1 == "p" ]]; then
echo "install folder missing"
exit 1
fi
cd $1
wget https://github.com/mongodb/mongo-c-driver/releases/download/1.9.0/mongo-c-driver-1.9.0.tar.gz
tar xzf mongo-c-driver-1.9.0.tar.gz
cd mongo-c-driver-1.9.0
./configure --disable-automatic-init-and-cleanup --enable-static=yes --enable-shared=no --enable-examples=no --enable-ssl=no --enable-sasl=no
make
wget https://github.com/mongodb/mongo-c-driver/releases/download/1.17.2/mongo-c-driver-1.17.2.tar.gz
tar xzf mongo-c-driver-1.17.2.tar.gz
cd mongo-c-driver-1.17.2
#you can add -DENABLE_ZSTD=OFF to disable lzstd
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_SSL=OFF -DENABLE_SASL=OFF -DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF -DMONGOC_ENABLE_STATIC=ON .
make -j 4
#sudo make install
......
wget https://raw.githubusercontent.com/cesanta/mongoose/7.4/mongoose.c
wget https://raw.githubusercontent.com/cesanta/mongoose/7.4/mongoose.h
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
......@@ -210,15 +210,15 @@ inline DiyFp GetCachedPowerByIndex(size_t index) {
};
static const int16_t kCachedPowers_E[] = {
-1220, -1193, -1166, -1140, -1113, -1087, -1060, -1034, -1007, -980,
-954, -927, -901, -874, -847, -821, -794, -768, -741, -715,
-688, -661, -635, -608, -582, -555, -529, -502, -475, -449,
-422, -396, -369, -343, -316, -289, -263, -236, -210, -183,
-157, -130, -103, -77, -50, -24, 3, 30, 56, 83,
109, 136, 162, 189, 216, 242, 269, 295, 322, 348,
375, 402, 428, 455, 481, 508, 534, 561, 588, 614,
641, 667, 694, 720, 747, 774, 800, 827, 853, 880,
907, 933, 960, 986, 1013, 1039, 1066
};
-954, -927, -901, -874, -847, -821, -794, -768, -741, -715,
-688, -661, -635, -608, -582, -555, -529, -502, -475, -449,
-422, -396, -369, -343, -316, -289, -263, -236, -210, -183,
-157, -130, -103, -77, -50, -24, 3, 30, 56, 83,
109, 136, 162, 189, 216, 242, 269, 295, 322, 348,
375, 402, 428, 455, 481, 508, 534, 561, 588, 614,
641, 667, 694, 720, 747, 774, 800, 827, 853, 880,
907, 933, 960, 986, 1013, 1039, 1066
};
return DiyFp(kCachedPowers_F[index], kCachedPowers_E[index]);
}
......
......@@ -50,7 +50,7 @@ RAPIDJSON_DIAG_OFF(switch - enum)
0); //!< Represents an invalid index in GenericRegex::State::out, out1
static const SizeType kRegexInvalidRange = ~SizeType(0);
//! Regular expression engine with subset of ECMAscript grammar.
//! Regular expression engine with dataset of ECMAscript grammar.
/*!
Supported regular expression syntax:
- \c ab Concatenation
......
......@@ -1223,8 +1223,7 @@ class GenericReader {
}
i = i * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
else
} else
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i >= 429496729)) { // 2^32 - 1 = 4294967295
if (RAPIDJSON_LIKELY(i != 429496729 || s.Peek() > '5')) {
......@@ -1265,8 +1264,7 @@ class GenericReader {
}
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
else
} else
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i64 >= RAPIDJSON_UINT64_C2(0x19999999, 0x99999999))) // 2^64 - 1 = 18446744073709551615
if (RAPIDJSON_LIKELY(i64 != RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) || s.Peek() > '5')) {
......
......@@ -680,7 +680,7 @@ inline T* make_ptr(T* ptr, std::size_t) {
/**
\rst
A buffer supporting a subset of ``std::vector``'s operations.
A buffer supporting a dataset of ``std::vector``'s operations.
\endrst
*/
template <typename T>
......@@ -2372,19 +2372,19 @@ template <std::size_t N>
struct ArgArray<N, true/*IsPacked*/> {
typedef Value Type[N > 0 ? N : 1];
template <typename Formatter, typename T>
static Value make(const T& value) {
template <typename Formatter, typename T>
static Value make(const T& value) {
#ifdef __clang__
Value result = MakeValue<Formatter>(value);
// Workaround a bug in Apple LLVM version 4.2 (clang-425.0.28) of clang:
// https://github.com/fmtlib/fmt/issues/276
(void)result.custom.format;
return result;
Value result = MakeValue<Formatter>(value);
// Workaround a bug in Apple LLVM version 4.2 (clang-425.0.28) of clang:
// https://github.com/fmtlib/fmt/issues/276
(void)result.custom.format;
return result;
#else
return MakeValue<Formatter>(value);
return MakeValue<Formatter>(value);
#endif
}
};
}
};
template <std::size_t N>
struct ArgArray<N, false/*IsPacked*/> {
......
## Develop
See [documentation](docs/site/changelog/Develop.md).
## 22.10.0
IMPROVEMENTS
* Clients try to use ethernet over infiniband if available to retrieve data from the receiver.
BUILD CHANGES
* Repository and CI moved to DESY Gitlab.
## 22.03.0
FEATURES
* Monitoring: Added detailed monitoring and pipeline visualization
* Consumer API: return kDataNotInCache/AsapoDataNotInCacheError error if data is not in cache and cannot be on disk (due to the ingest mode producer used)
IMPROVEMENTS
* renamed and hid C++ macros from client code
BUG FIXES
* Producer API: fixed bug segfault in Python code when sending data object which memory is from some other object
VERSION COMPATIBILITY
* Previous C consumer & producer clients will break due to two extra parameters for instance id and pipeline step id in *asapo_create_source_credentials*.
INTERNAL
* Do not return error when receiver cannot get slot in shared cache - just allocate own memory slot
## 21.12.0
FEATURES
* Consumer API: Get last within consumer group returns message only once
* Producer API: An option to write raw data to core filesystem directly
* Consumer/Producer API - packages for Debian 11.1, wheel for Python 3.9
* Consumer/Producer API - dropped Python 2 support for wheels and packages for new Debian/CentOS versions
INTERNAL
* Improved logging - tags for beamline, beamtime, ...
* Updated orchestration tools to latest version
## 21.09.0
FEATURES
* Producer API: C client
* Introduce a token to send data in "raw" mode without LDAP authorization
IMPROVEMENTS
* Allow using ASAPO for commissioning beamtimes
* Implement token revocation
* Updated website
BUG FIXES
* Consumer/Producer API: fixed bug with "_" in stream name
INTERNAL
* Improved authoriation service caching
* Added profiling for Go services
* Added metrics and alerts for asapo services
## 21.06.0
FEATURES
* Consumer API: C client
* Producer API: An option to automatically generate message id (use sparingly, reduced performance possible)
IMPROVEMENTS
* Consumer/Producer API - allow any characters in source/stream/group names
* Consumer/Producer API - introduce stream metadata
* Consumer API - an option to auto discovery of data folder when consumer client uses file transfer service (has_filesystem=False)
* Improved build procedure - shared libaries, added pkg-config and cmake config for asapo clients
BUG FIXES
* Consumer API: multiple consumers from same group receive stream finished error
* Consumer API: return ServiceUnavailable instead of Unauthorized in case an authorization service is unreachable
## 21.03.3
BUG FIXES
* Consumer API: fix return error type when sending acknowledgement second time
* Producer API: fix GetStreamInfo/stream_info and GetLastStream/last_stream for datasets
## 21.03.2
FEATURES
* implemented possibility to delete stream (only metadata, not files yet)
IMPROVEMENTS
* Consumer API - retry file delivery/reading with timeout (can be useful for the case file arrives after was metadta ingested, e.g. for slow NFS transfer,...)
BUG FIXES
* Consumer API: fix race condition in GetStreamList/get_stream_list
* Producer API: fix segfault in send_stream_finished_flag
* Producer API: fix deadlock in producer timeout
## 21.03.1
BUG FIXES
* Core services: fix LDAP authorization for raw data type Producers
## 21.03.0
IMPROVEMENTS
* Producer API - queue limits in Python, for C++ return original data in error custom data
* Consumer API - add GetCurrentDatasetCount/get_current_dataset_count function with option to include or exclude incomplete datasets
* Consumer API - GetStreamList/get_stream_list - can filter finished/unfinished streams now
* Producer/Consumer API - StreamInfo structure/Python dictionary include more information (is stream finished or not, ...)
* Switch to JWT tokens (token has more symbols, expiration time, can be revoked and there are two type of tokens - with read/write access rights)
* Improved versioning. Producer/Consumer API - introduce GetVersionInfo/get_version_info, compatiblity check between clients and server
BREAKING CHANGES
* Consumer API (C++ only)- GetStreamList has now extra argument StreamFilter
* Consumer/Producer libraries need to be updated due to protocol changes
## 20.12.0
FEATURES
* implemented possibility to send data without writing to database (no need of consecutive indexes, etc. but will not be able to consume such data)
* allow to return incomplete datasets (wihout error if one sets minimum dataset size, otherwise with "partial data" error)
IMPROVEMENTS
* Consumer API - change behavior of GetLast/get_last - do not change current pointer after call
* Consumer API - add interrupt_current_operation to allow interrupting (from a separate thread) long consumer operation
* Producer API - return original data in callback payload.
* Producer API - allow to set queue limits (number of pending requests and/or max memory), reject new requests if reached the limits
* building rpm, deb and exe packages for client libs
BREAKING CHANGES
* Consumer API - get_next_dataset, get_last_dataset, get_dataset_by_id return dictionary with 'id','expected_size','content' fields, not tuple (id,content) as before
* Consumer API - remove group_id argument from get_last/get_by_id/get_last_dataset/get_dataset_by_id functions
* Producer API - changed meaning of subsets (subset_id replaced with dataset_substream and this means now id of the image within a subset (e.g. module number for multi-module detector)), message_id is now a global id of a multi-set data (i.g. multi-image id)
#### renaming - general
* stream -> data_source, substream -> stream
* use millisecond everywhere for timeout/delay
* use term `message` for blob of information we send around, rename related structs, parameters, ...
* C++ - get rid of duplicate functions with default stream
#### renaming - Producer API
* SendData/send_data -> Send/send
* SendXX/send_xx -> swap parameters (stream to the end)
* id_in_subset -> dataset_substream
* subset_size -> dataset_size (and in general replace subset with dataset)
#### renaming - Consumer API
* broker -> consumer
* SetLastReadMarker/set_lastread_marker -> swap arguments
* GetUnacknowledgedTupleIds/get_unacknowledged_tuple_ids -> GetUnacknowledgedMessages/get_unacknowledged_messages
* GetLastAcknowledgedTulpeId/get_last_acknowledged_tuple_id -> GetLastAcknowledgedMessage/get_last_acknowledged_message
* GetUnacknowledgedMessages, -> swap parameters (stream to the end)
BUG FIXES
* fix memory leak bug in Python consumer library (lead to problems when creating many consumer instances)
## 20.09.1
FEATURES
* New function GetLastStream/last_stream in Producer API - returns info for a stream which was created last
IMPROVEMENTS
* Each message automatically gets a timestamp (nanoseconds from Linux epoch) at the moment it is being inserted to a database
* GetStreamList/get_stream_list returns now sorted (by timestamp of the earliest message) list of streams. Parameter `from` allows to limit the list
BREAKING CHANGES
* GetStreamList/get_stream_list returns now not an array of strings, but array of StreamInfos/dictionaries
## 20.09.0
FEATURES
* implemented negative acknowledges and data redelivery - data will be redelivered automatically for get_next calls if it is not acknowledged during a given period or a consumer sent a negative acknowledge
* introduced data source types - "raw" data is written to beamline filesystem and this can only be done from a certain IPs (detector PC,..),
"processed" data is written to core filesystem. File paths must now start with `raw/` or `processed/`
* Added RDMA support for the communication between consumer and receiver. (Improves transfer speeds while using less CPU resources)
Requires LibFabric v1.11.0
Receiver must have network mode 'Fabric' enabled and RDMAable AdvertiseURI. See config `DataServer.{AdvertiseURI, NetworkMode}`
* Added 'ASAPO_PRINT_FALLBACK_REASON' as an environment variable for the consumer in order to get a message why TCP was used
* Added new consumer broker API call 'ForceNoRdma' to always use TCP and ignore any RDMA capabilities
* Added new consumer broker API call 'CurrentConnectionType' to see what connection type is currently used
BUG FIXES
* fix data query images when beamtime_id starts with number
BREAKING CHANGES
* an extra parameter in producer constructor for data source type
* path of the files that are send from producer to asapo must start with `raw/` for raw source type or `processed/` for processed source type, otherwise the files will not be written and an error will be sent back
## 20.06.3
BUG FIXES
* fix retrieve_data in Python modules for data ingested using metadata only mode
* fix asapo orchestration image stabilize nginx and update fluentd configuration to follow Nomad jobs log rotation
## 20.06.2
BUG FIXES
* file size obtained automatically when retrieving remote data ingested using metadata only mode
## 20.06.1
IMPROVEMENTS
* allow arbitrary group id
## 20.06.0
FEATURES
* implemented acknowledeges - one can acknowledge a message, get last acknowledged tuple id, get list of unacknowledged tuple ids
* implement getting stream info (contains last id) by producer client (not need to have consumer client)
IMPROVEMENTS
* change behavior when trying to get data from a stream that does not exist - return EndOfStream instead of WrongInput
* change behavior of GetLastXX/get_lastXX functions - current pointer is not being set to the end of a stream after this command anymore
* stream name added to producer callback output for Python
* added simple C++ examples
BUG FIXES
* check message ids should be positive
## 20.03.0
FEATURES
* introduced streams for producer/consumer
* introduced timeout for producer requests
* producer accepts "auto" for beamtime, will automatically select a current one for a given beamline
* introduced file transfer service - possibility for consumer clients to receive data also in case filesystem is inaccessible
IMPROVEMENTS
* switch to MongoDB 4.2
* API documentation is available for C++ and Python
* switch to using cmake 3.7+
* error messages in Python as Python strings, not byte objects
BUG FIXES
* consumer operation timout - take duration of the operation into account
* giving warning/error on attempt to send data/metadata with same id
......@@ -7,12 +7,11 @@ if(ASTYLE_EXECUTABLE)
${ASTYLE_EXECUTABLE} -i
--exclude=${PROJECT_BINARY_DIR}
--recursive -n --style=google --indent=spaces=4 --max-code-length=120
--max-instatement-indent=50 --pad-oper --align-pointer=type
--max-instatement-indent=50 --pad-oper --align-pointer=type --quiet
"${PROJECT_SOURCE_DIR}/*.cpp" "${PROJECT_SOURCE_DIR}/*.h"
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
VERBATIM
WORKING_DIRECTORY ..
VERBATIM
)
else()
message(WARNING "Unable to find astyle. Code formatting will be skipped")
endif()
set(CMAKE_POLICY_DEFAULT_CMP0074 NEW) #allow use package_ROOT variable to find package
if (BUILD_PYTHON)
set(BUILD_PYTHON_PACKAGES "" CACHE STRING "which python packages to build")
set_property(CACHE BUILD_PYTHON_PACKAGES PROPERTY STRINGS source rpm deb win)
endif ()
set(CMAKE_PREFIX_PATH "${LIBCURL_DIR}")
find_package(CURL REQUIRED)
message(STATUS "Found cURL libraries: ${CURL_LIBRARIES}")
message(STATUS "cURL include: ${CURL_INCLUDE_DIRS}")
if (CURL_FOUND) #old FindCURL versions do not create CURL::libcurl target, so we do it here if CURL::libcurl is missing
if (NOT TARGET CURL::libcurl)
add_library(CURL::libcurl UNKNOWN IMPORTED)
set_target_properties(CURL::libcurl PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${CURL_INCLUDE_DIRS}")
set_target_properties(CURL::libcurl PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_LOCATION "${CURL_LIBRARIES}")
endif ()
endif ()
if (NOT BUILD_CLIENTS_ONLY)
find_package(RdKafka REQUIRED)
message(STATUS "Found rdkafka++ libraries: ${RDKAFKA_LIBRARIES}")
message(STATUS "rdkafka++ include dir : ${RDKAFKA_INCLUDE_DIR}")
if (WIN32)
message(STATUS "rdkafka++ binary dir (dll): ${RDKAFKA_BIN_DIR}")
endif ()
endif ()
# python is needed anyway, even if no Python packages are build (e.g. to parse test results)
if(NOT Python_EXECUTABLE)
set(Python_EXECUTABLE $ENV{Python_EXECUTABLE})
endif()
if(NOT Python_EXECUTABLE)
set(python_components Interpreter)
if(BUILD_PYTHON)
list(APPEND python_components Development NumPy)
endif()
find_package(Python COMPONENTS REQUIRED ${python_components})
if (NOT Python_EXECUTABLE MATCHES "python3")
message(FATAL_ERROR "Expected python3, found ${Python_EXECUTABLE}")
endif ()
endif()
message(STATUS "Using Python: ${Python_EXECUTABLE}")
include(libfabric)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_C_STANDARD 99)
IF(WIN32)
set(CMAKE_CXX_FLAGS_DEBUG "/MTd")
set(CMAKE_CXX_FLAGS_RELEASE "/MT")
add_definitions(-DWIN32)
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
ELSEIF(CMAKE_C_COMPILER_ID STREQUAL "GNU")
add_compile_options(-Wall -Wextra -pedantic -Wconversion -Wno-missing-field-initializers -fPIC)
IF (STOP_BUILD_ON_WARNINGS)
add_compile_options(-Werror)
ENDIF()
SET( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
ELSEIF(CMAKE_C_COMPILER_ID MATCHES "Clang")
IF (STOP_BUILD_ON_WARNINGS)
add_compile_options(-Werror)
ENDIF()
add_compile_options(-Wall -Wextra -pedantic -Wconversion)
ENDIF(WIN32)
set (ASAPO_CXX_COMMON_INCLUDE_DIR ${PROJECT_SOURCE_DIR}/common/cpp/include)
find_package (Threads)
#TODO: Better way then GLOBAL PROPERTY
IF(WIN32)
find_package(Threads REQUIRED)
SET(ASAPO_COMMON_IO_LIBRARIES ${CMAKE_THREAD_LIBS_INIT} wsock32 ws2_32)
ELSEIF(UNIX)
SET(ASAPO_COMMON_IO_LIBRARIES Threads::Threads)
ENDIF(WIN32)
SET_PROPERTY(GLOBAL PROPERTY ASAPO_COMMON_IO_LIBRARIES ${ASAPO_COMMON_IO_LIBRARIES})
if (APPLE)
link_directories("/usr/local/lib")
endif()
SET(ASAPO_COMMON_FABRIC_LIBRARIES ${ASAPO_COMMON_IO_LIBRARIES})
IF(ENABLE_LIBFABRIC)
find_package(LibFabric)
if(NOT LIBFABRIC_LIBRARY)
message(FATAL_ERROR "Did not find libfabric")
endif()
message(STATUS "LibFabric support enabled")
message(STATUS "LIB_FABRIC: Path: ${LIBFABRIC_LIBRARY} Include: ${LIBFABRIC_INCLUDE_DIR}")
add_definitions(-DLIBFABRIC_ENABLED)
SET(ASAPO_COMMON_FABRIC_LIBRARIES ${ASAPO_COMMON_FABRIC_LIBRARIES} dl)
IF(ENABLE_LIBFABRIC_LOCALHOST)
message(STATUS "LIB_FABRIC: Enabled emulated RDMA when localhost is used. Should only be used for tests.")
add_definitions(-DLIBFARBIC_ALLOW_LOCALHOST)
ENDIF()
ENDIF()
SET_PROPERTY(GLOBAL PROPERTY ASAPO_COMMON_FABRIC_LIBRARIES ${ASAPO_COMMON_FABRIC_LIBRARIES})
if(NOT PACKAGE_RELEASE_SUFFIX)
set(PACKAGE_RELEASE_SUFFIX linux)
endif()
set(CPACK_PACKAGE_VERSION ${ASAPO_VERSION})
set(CPACK_PACKAGE_RELEASE ${PACKAGE_RELEASE_SUFFIX})
set(CPACK_PACKAGE_CONTACT "IT")
set(CPACK_PACKAGE_VENDOR "DESY")
IF (WIN32)
install(FILES ${CURL_LIBRARIES} DESTINATION lib)
set(CPACK_PACKAGE_FILE_NAME "asapo-${CPACK_PACKAGE_VERSION}.${CMAKE_SYSTEM_PROCESSOR}")
else()
get_filename_component(CURLLIB_EXT ${CURL_LIBRARIES} EXT)
if(DEFINED PACK_STATIC_CURL_LIB)
install(FILES ${PACK_STATIC_CURL_LIB} DESTINATION lib RENAME ${CMAKE_STATIC_LIBRARY_PREFIX}asapo-curl${CMAKE_STATIC_LIBRARY_SUFFIX})
elseif("R${CMAKE_STATIC_LIBRARY_SUFFIX}" STREQUAL "R${CURLLIB_EXT}")
install(FILES ${CURL_LIBRARIES} DESTINATION lib RENAME ${CMAKE_STATIC_LIBRARY_PREFIX}asapo-curl${CURLLIB_EXT})
endif()
set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
set(CPACK_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}-${CPACK_PACKAGE_RELEASE}.${CMAKE_SYSTEM_PROCESSOR}")
endif()
set(CPACK_DEBIAN_PACKAGE_DEPENDS "libcurl4-openssl-dev")
set(CPACK_RPM_PACKAGE_REQUIRES "libcurl-devel")
include(CPack)
string(TIMESTAMP TIMESTAMP "%H:%M:%S %d.%m.%Y UTC" UTC)
configure_file(
${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/internal/version.h.in
${PROJECT_SOURCE_DIR}/common/cpp/include/asapo/common/internal/version.h @ONLY
)
configure_file(
${PROJECT_SOURCE_DIR}/common/go/src/asapo_common/version/version_lib.go.in
${PROJECT_SOURCE_DIR}/common/go/src/asapo_common/version/version_lib.go @ONLY
)