Commit 36ad4de8 authored by dgasull's avatar dgasull

Released new elastic-dataClay 1.4 + Changed demos accordingly

parent 89e485a7
/.pydevproject
/.project
/tmp/
/elastic-dataclay.iml
/.idea/
/.contractids/
FROM ubuntu:18.04
FROM bscdataclay/client:2.5.dev-alpine
ARG WORKING_DIR=/elastic
ARG PYNAMESPACE=ElasticNS
ARG JAVANAMESPACE=ElasticJavaNS
ARG DATASET=ElasticDS
ARG USER=ElasticUser
ARG PASS=ElasticPass
ARG DATACLAY_VIRTUAL_ENV=/dataclay_venv
ARG DATACLAY_PYVER=3.7
ARG DATACLAY_PIP_VERSION=3
ARG CACHEBUST=1
ARG FIWARENAMESPACE=FiwareNS
ARG DEFAULT_DATASET=ElasticDS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ARG DC_SHARED_VOLUME=/srv/dataclay/shared
ENV WORKING_DIR=${WORKING_DIR} \
DC_SHARED_VOLUME=${DC_SHARED_VOLUME} \
DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
DATASET=${DEFAULT_DATASET} \
JAVANAMESPACE=${JAVANAMESPACE} \
FIWARENAMESPACE=${FIWARENAMESPACE} \
PYNAMESPACE=${PYNAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
JAVASTUBSPATH=${WORKING_DIR}/stubs/java_stubs \
PYSTUBSPATH=${WORKING_DIR}/stubs/python_stubs
RUN apk --no-cache --update add maven
ENV DATACLAY_JAR=/home/dataclayusr/dataclay/dataclay.jar
ENV ELASTIC_HOME=/elastic
ENV DATACLAYCLIENTCONFIG=/elastic/cfgfiles/client.properties
ENV DATACLAYGLOBALCONFIG=/elastic/cfgfiles/global.properties
ENV DATACLAYSESSIONCONFIG=/elastic/cfgfiles/session.properties
ENV JAVASTUBSPATH=/elastic/stubs/java_stubs
ENV PYSTUBSPATH=/elastic/stubs/python_stubs
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated openjdk-8-jdk \
maven python${DATACLAY_PYVER} gcc libyaml-dev libpython${DATACLAY_PYVER}-dev build-essential curl libpython2.7-dev \
python${DATACLAY_PYVER}-dev python${DATACLAY_PIP_VERSION}-pip \
python${DATACLAY_PIP_VERSION}-setuptools \
&& rm -rf /var/lib/apt/lists/*
ENV DATACLAY_JAR="/root/.m2/repository/es/bsc/dataclay/dataclay/2.4/dataclay-2.4-jar-with-dependencies.jar"
# Get dataClay maven dependency and set env
RUN mvn -DgroupId=es.bsc.dataclay -DartifactId=dataclay -Dversion=2.4 -Dclassifier=jar-with-dependencies dependency:get
# If we want to build again, argument must be modified
ARG CACHEBUST
# Copy files for connecting dataClay
COPY ./cfgfiles ${ELASTIC_HOME}/cfgfiles
# Wait for dataclay to be alive (max retries 10 and 5 seconds per retry)
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.WaitForDataClayToBeAlive 10 5
# Register account
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewAccount ${USER} ${PASS}
# Register datacontract
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewDataContract ${USER} ${PASS} ${DATASET} ${USER}
########################### java model ###########################
# Register namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewNamespace ${USER} ${PASS} ${JAVANAMESPACE} java
# Register java model
COPY ./nfrtool-model/model ${ELASTIC_HOME}/nfrtool-model/model
RUN cd ${ELASTIC_HOME}/nfrtool-model/model && mvn package
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewModel ${USER} ${PASS} ${JAVANAMESPACE} ${ELASTIC_HOME}/nfrtool-model/model/target/classes
# Access namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.AccessNamespace ${USER} ${PASS} ${JAVANAMESPACE}
# Get java stubs
RUN mkdir -p ${JAVASTUBSPATH}
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.GetStubs ${USER} ${PASS} ${JAVANAMESPACE} ${JAVASTUBSPATH}
########################### python model ###########################
# Register namespace
RUN java -cp $DATACLAY_JAR es.bsc.dataclay.tool.NewNamespace ${USER} ${PASS} ${PYNAMESPACE} python
# Create virtualenvironment
RUN pip${DATACLAY_PIP_VERSION} install --upgrade pip
RUN pip${DATACLAY_PIP_VERSION} install wheel
RUN pip${DATACLAY_PIP_VERSION} install virtualenv
RUN python${DATACLAY_PIP_VERSION} -m virtualenv --python=/usr/bin/python${DATACLAY_PYVER} ${DATACLAY_VIRTUAL_ENV}
ENV PATH="$DATACLAY_VIRTUAL_ENV/bin:$PATH"
RUN python${DATACLAY_PYVER} --version
# Install dataClay
RUN pip${DATACLAY_PIP_VERSION} install dataClay
# Copy models
COPY ./events-model/model ${ELASTIC_HOME}/events-model/model
# Register python models
RUN python -m dataclay.tool register_model ${USER} ${PASS} ${PYNAMESPACE} ${ELASTIC_HOME}/events-model/model/src
COPY ./cfgfiles ${WORKING_DIR}/cfgfiles
COPY initialize-dataclay.sh .
# Get stubs
RUN mkdir -p ${PYSTUBSPATH}
RUN CONTRACTID=`java -cp $DATACLAY_JAR es.bsc.dataclay.tool.AccessNamespace ${USER} ${PASS} ${PYNAMESPACE} | tail -1` \
&& python${DATACLAY_PYVER} -m dataclay.tool get_stubs ${USER} ${PASS} ${CONTRACTID} ${PYSTUBSPATH}
# Models
COPY ./nfrtool-model/model ${WORKING_DIR}/nfrtool-model/model
COPY ./events-model/model ${WORKING_DIR}/events-model/model
COPY ./fiware-model/model ${WORKING_DIR}/fiware-model/model
ENTRYPOINT ["./initialize-dataclay.sh"]
\ No newline at end of file
#!/bin/bash
#===================================================================================
#
# FILE: deploy.sh
#
# USAGE: deploy.sh
#
# DESCRIPTION: Deploy ELASTIC dataClay into DockerHub
#
# OPTIONS: --
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: dgasull@bsc.es
# COMPANY: Barcelona Supercomputing Center (BSC)
# VERSION: 1.0
#===================================================================================
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
SUPPORTED_PYTHON_VERSIONS=(3.6 3.7 3.8)
echo "Welcome to ELASTIC-dataClay build script, please which version do you want to build?"
read VERSION
echo "Going to build:"
echo " -- elasticeuh2020/dataclay-pyclient:${VERSION} "
echo " -- elasticeuh2020/dataclay-javaclient:${VERSION} "
echo " -- elasticeuh2020/dataclay-logicmodule:${VERSION} "
echo " -- elasticeuh2020/dataclay-dspython:${VERSION} "
echo " -- elasticeuh2020/dataclay-dsjava:${VERSION} "
echo " -- elasticeuh2020/kafka:${VERSION} "
echo " -- elasticeuh2020/mqtt-to-dataclay:${VERSION} "
read -r -p "Are you sure? [y/N] " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
then
SECONDS=0
## Build and start dataClay
pushd $SCRIPTDIR/dataclay
docker-compose kill
docker-compose down -v #sanity check
docker-compose up -d
popd
#
## BUILD ####
pushd $SCRIPTDIR
echo " ==== Cleaning previous images ==== "
docker images | grep elasticeuh2020 | awk '{print $3}' | xargs docker rmi -f
echo " ==== Building dataClay initializer ==== "
docker build --network dataclay_default --no-cache -t elasticeuh2020/dataclay-initializer .
echo " ==== Running dataClay initializer ==== "
rm -rf ./contractids
mkdir -p ./contractids
docker run --rm --network dataclay_default \
-v `pwd`/contractids:/contractids/:rw \
elasticeuh2020/dataclay-initializer
# create a new images based on the container
echo " ===== Building docker elasticeuh2020/dataclay-dsjava:${VERSION} ====="
ELASTIC_DSJAVA_CONTAINER_ID=$(docker commit dataclay_dsjava_1)
docker tag $ELASTIC_DSJAVA_CONTAINER_ID elasticeuh2020/dataclay-dsjava:${VERSION}
echo " ===== Building docker elasticeuh2020/dataclay-logicmodule:${VERSION} ====="
# prepare to export (needed to use docker commit)
docker exec -t dataclay_logicmodule_1 /prepare_to_export.sh
ELASTIC_LOGICMODULE_CONTAINER_ID=$(docker commit dataclay_logicmodule_1)
docker tag $ELASTIC_LOGICMODULE_CONTAINER_ID elasticeuh2020/dataclay-logicmodule:${VERSION}
echo " ===== Retrieving execution classes into $SCRIPTDIR/tmp/deploy ====="
rm -rf $SCRIPTDIR/tmp/
mkdir -p $SCRIPTDIR/tmp/deploy
docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR/tmp/
echo " ===== Building docker elasticeuh2020/dataclay-javaclient:${VERSION} ====="
docker build --rm -f elastic.javaclient.Dockerfile --no-cache \
-t elasticeuh2020/dataclay-javaclient:$VERSION .
echo " ===== Retrieving execution classes into $SCRIPTDIR/tmp/deploy and $SCRIPTDIR/tmp/execClasses ====="
rm -rf $SCRIPTDIR/tmp/
mkdir -p $SCRIPTDIR/tmp/deploy
docker cp dataclay_dspython_1:/home/dataclayusr/dataclay/deploy/ $SCRIPTDIR/tmp/
for PYTHON_VERSION in "${SUPPORTED_PYTHON_VERSIONS[@]}"; do
DATACLAY_PYVER="${PYTHON_VERSION//./}"
IMG_VER=${VERSION}.py${DATACLAY_PYVER}
echo " ===== Building docker elasticeuh2020/dataclay-pyclient:${IMG_VER} ====="
docker build --rm -f elastic.pyclient.Dockerfile --no-cache \
--build-arg DATACLAY_PYVER="py${DATACLAY_PYVER}" \
--build-arg PYTHON_VERSION="$PYTHON_VERSION" \
-t elasticeuh2020/dataclay-pyclient:${IMG_VER} .
done
# default tag
docker tag elasticeuh2020/dataclay-pyclient:${VERSION}.py37 elasticeuh2020/dataclay-pyclient:${VERSION}
for PYTHON_VERSION in "${SUPPORTED_PYTHON_VERSIONS[@]}"; do
DATACLAY_PYVER="${PYTHON_VERSION//./}"
IMG_VER=${VERSION}.py${DATACLAY_PYVER}
echo " ===== Building docker elasticeuh2020/dataclay-dspython:${IMG_VER} ====="
docker build --rm -f elastic.dspython.Dockerfile --no-cache \
--build-arg DATACLAY_PYVER="py${DATACLAY_PYVER}" \
-t elasticeuh2020/dataclay-dspython:${IMG_VER} .
done
# default tag
docker tag elasticeuh2020/dataclay-dspython:${VERSION}.py37 elasticeuh2020/dataclay-dspython:${VERSION}
echo " ===== Building docker elasticeuh2020/kafka:${VERSION} ====="
docker build --rm -f kafka.Dockerfile --no-cache -t elasticeuh2020/kafka:$VERSION .
echo " ===== Building docker elasticeuh2020/mqtt-to-dataclay:${VERSION} ====="
docker build --rm -f elastic.mqtt.bridge.Dockerfile --no-cache -t elasticeuh2020/mqtt-to-dataclay:$VERSION .
rm -rf ./contractids
popd
#######################################################################################
echo " ===== Stopping dataClay ====="
pushd $SCRIPTDIR/dataclay
docker-compose kill
docker-compose down -v #sanity check
popd
echo " ===== Cleaning images ====="
docker rmi -f elasticeuh2020/dataclay-initializer
docker images | grep elasticeuh2020
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "ELASTIC dataClay deployment FINISHED! "
else
echo "Aborting"
fi
version: '3.4'
services:
logicmodule:
image: "bscdataclay/logicmodule:2.4"
image: "bscdataclay/logicmodule:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "11034:11034"
......@@ -10,9 +10,6 @@ services:
- LOGICMODULE_HOST=logicmodule
- DATACLAY_ADMIN_USER=admin
- DATACLAY_ADMIN_PASSWORD=admin
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
- ./prop/log4j2.xml:/home/dataclayusr/dataclay/logging/log4j2.xml:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
......@@ -20,7 +17,7 @@ services:
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dsjava:
image: "bscdataclay/dsjava:2.4"
image: "bscdataclay/dsjava:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "2127:2127"
......@@ -31,9 +28,6 @@ services:
- DATASERVICE_JAVA_PORT_TCP=2127
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
- ./prop/log4j2.xml:/home/dataclayusr/dataclay/logging/log4j2.xml:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
......@@ -41,7 +35,7 @@ services:
test: ["CMD-SHELL", "/home/dataclayusr/dataclay/health/health_check.sh"]
dspython:
image: "bscdataclay/dspython:2.4"
image: "bscdataclay/dspython:2.5.dev-alpine"
command: "${COMMAND_OPTS}"
ports:
- "6867:6867"
......@@ -53,8 +47,6 @@ services:
- LOGICMODULE_PORT_TCP=11034
- LOGICMODULE_HOST=logicmodule
- DEBUG=False
volumes:
- ./prop/global.properties:/home/dataclayusr/dataclay/cfgfiles/global.properties:ro
stop_grace_period: 5m
healthcheck:
interval: 5s
......
CHECK_LOG4J_DEBUG=false
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<Configuration monitorInterval="60" status="off">
<Appenders>
<Console name="ConsoleAppender" target="SYSTEM_OUT">
<PatternLayout
pattern="%d{ISO8601} %p [%c] [%t] [%C{1}:%L] %m%n"></PatternLayout>
</Console>
</Appenders>
<Loggers>
<!-- Runtime -->
<Logger name="ClientRuntime" level="off" />
<Logger name="ClientManagementLib" level="off" />
<Logger name="DataClayRuntime" level="off" />
<Logger name="DataServiceRuntime" level="off" />
<Logger name="DataClayObjectLoader" level="off" />
<Logger name="DataClayObject" level="off" /> <!-- This is very verbose! -->
<!-- Data service -->
<Logger name="DataService" level="off" />
<Logger name="ExecutionEnvironmentSrv" level="off" />
<!-- Lockers -->
<Logger name="dataclay.util.classloaders.SyncClass" level="off" />
<Logger name="dataclay.heap.LockerPool" level="off" />
<Logger name="LockerPool" level="off" />
<Logger name="dataclay.util.classloaders.ClassLockers"
level="off" />
<!-- Garbage collection -->
<Logger name="GlobalGC" level="off" />
<Logger name="heap.HeapManager" level="off" />
<Logger name="ReferenceCounting" level="off" />
<Logger name="StorageLocation" level="off" />
<!-- Logic module -->
<Logger name="LogicModule" level="off" />
<Logger name="LMDB" level="off" />
<Logger name="managers" level="off" />
<Logger name="MetaDataService.db" level="off" />
<Logger name="MetaDataService" level="off" />
<!-- Communication -->
<Logger name="io.grpc" level="off" />
<Logger name="io.netty" level="off" />
<Logger name="NettyClientHandler" level="off" />
<Logger name="grpc.client" level="off" />
<Logger name="grpc.client.logicmodule" level="off" />
<Logger name="communication.LogicModule.service" level="off" />
<!-- Databases -->
<Logger name="dataclay.dbhandler" level="off" />
<Logger name="dbhandler.PostgresConnection" level="off" />
<Logger name="org.apache.commons.dbcp2" level="off" />
<Logger name="PostgresHandler" level="off" />
<Logger name="SQLHandler" level="off" />
<!-- Misc -->
<Logger name="util" level="off" />
<Logger name="exceptions" level="off" />
<Logger name="Paraver" level="info" />
<Logger name="DataClaySerializationLib" level="off" />
<Logger name="DataClayDeserializationLib" level="off" />
<!-- ROOT LOGGER -->
<Root level="off">
<AppenderRef ref="ConsoleAppender" />
</Root>
</Loggers>
</Configuration>
This diff is collapsed.
#!/bin/bash
#===================================================================================
#
# FILE: deploy.sh
#
# USAGE: deploy.sh
#
# DESCRIPTION: Deploy python requirements into DockerHub
#
# OPTIONS: see function ’usage’ below
# REQUIREMENTS: ---
# BUGS: ---
# NOTES: ---
# AUTHOR: dgasull@bsc.es
# COMPANY: Barcelona Supercomputing Center (BSC)
# VERSION: 1.0
#===================================================================================
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
echo "Welcome to ELASTIC-dataClay deployment script, please which version do you want to publish?"
read VERSION
echo "Going to publish:"
echo " -- elasticeuh2020/dataclay-pyclient:${VERSION}-requirements"
read -r -p "Are you sure? [y/N] " response
if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]
then
REQUIRED_DOCKER_VERSION=19
export PLATFORMS=linux/amd64,linux/arm64
############################# Prepare docker builder #############################
printf "Checking if docker version >= $REQUIRED_DOCKER_VERSION..."
version=$(docker version --format '{{.Server.Version}}')
if [[ "$version" < "$REQUIRED_DOCKER_VERSION" ]]; then
echo "ERROR: Docker version is less than $REQUIRED_DOCKER_VERSION"
exit 1
fi
printf "OK\n"
# prepare architectures
docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
#docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker run --rm -t arm64v8/ubuntu uname -m
DOCKER_BUILDER=$(docker buildx create)
docker buildx use $DOCKER_BUILDER
echo "Checking buildx with available platforms to simulate..."
docker buildx inspect --bootstrap
BUILDER_PLATFORMS=$(docker buildx inspect --bootstrap | grep Platforms | awk -F":" '{print $2}')
IFS=',' read -ra BUILDER_PLATFORMS_ARRAY <<< "$BUILDER_PLATFORMS"
IFS=',' read -ra SUPPORTED_PLATFORMS_ARRAY <<< "$PLATFORMS"
echo "Builder created with platforms: ${BUILDER_PLATFORMS_ARRAY[@]}"
#Print the split string
for i in "${SUPPORTED_PLATFORMS_ARRAY[@]}"
do
FOUND=false
SUP_PLATFORM=`echo $i | sed 's/ *$//g'` #remove spaces
printf "Checking if platform $i can be simulated by buildx..."
for j in "${BUILDER_PLATFORMS_ARRAY[@]}"
do
B_PLATFORM=`echo $j | sed 's/ *$//g'` #remove spaces
if [ "$SUP_PLATFORM" == "$B_PLATFORM" ]; then
FOUND=true
break
fi
done
if [ "$FOUND" = false ] ; then
echo "ERROR: missing support for $i in buildx builder."
echo " Check https://github.com/multiarch/qemu-user-static for more information on how to simulate architectures"
return -1
fi
printf "OK\n"
done
#######################################################################################
SECONDS=0
# NOTE we create a requirements image to avoid building it many times (it may take time for ARM64)
echo " ===== Building docker elasticeuh2020/dataclay-pyclient:${VERSION}-requirements ====="
echo " WARNING!!!!!!!!!!!!!!!!!!: It may take a lot of time for ARM64 arch "
docker buildx build -f elastic.pyrequirements.Dockerfile \
--cache-to=type=registry,ref=elasticeuh2020/dataclay-pyclient:buildxcache${VERSION}-requirements,mode=max \
--cache-from=type=registry,ref=elasticeuh2020/dataclay-pyclient:buildxcache${VERSION}-requirements \
-t elasticeuh2020/dataclay-pyclient:${VERSION}-requirements --platform $PLATFORMS --push .
docker buildx rm $DOCKER_BUILDER
duration=$SECONDS
echo "$(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed."
echo "ELASTIC dataClay deployment FINISHED! "
else
echo "Aborting"
fi
\ No newline at end of file
FROM bscdataclay/dsjava:2.4
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq mosquitto mosquitto-clients >/dev/null \
&& rm -rf /var/lib/apt/lists/*
# KAFKA
ENV KAFKA_VERSION=2.4.1
ENV SCALA_VERSION=2.12
ENV KAFKA_HOME=/opt/kafka
ENV PATH=${PATH}:${KAFKA_HOME}/bin
RUN mkdir -p ${KAFKA_HOME}
# Download kafka
RUN url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') \
&& wget -q "${url}" -O /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \
&& tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
FROM bscdataclay/dsjava:2.5.dev-alpine
COPY ./tmp/execClasses ${DATACLAY_HOME}/execClasses
......
FROM bscdataclay/dspython:2.4
ARG DATACLAY_PYVER=py37
FROM bscdataclay/dspython:2.5.${DATACLAY_PYVER}.dev-alpine
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated curl wget jq mosquitto mosquitto-clients >/dev/null \
&& rm -rf /var/lib/apt/lists/*
# KAFKA
ENV KAFKA_VERSION=2.4.1
ENV SCALA_VERSION=2.12
ENV KAFKA_HOME=/opt/kafka
ENV PATH=${PATH}:${KAFKA_HOME}/bin
RUN mkdir -p ${KAFKA_HOME}
# Download kafka
RUN url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') \
&& wget -q "${url}" -O /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \
&& tar -xzf /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz --directory ${KAFKA_HOME} --strip-components=1
RUN pip install kafka-python
COPY ./tmp/deploy ${DATACLAY_HOME}/deploy
# Execute
# Don't use CMD in order to keep compatibility with singularity container's generator
ENTRYPOINT ["dataclay-python-entry-point", "-m", "dataclay.executionenv.server"]
FROM maven:3.6.3-openjdk-11-slim
ARG JDK=8
FROM bscdataclay/logicmodule:2.5.jdk${JDK}.dev-alpine
FROM maven:3.6.1-jdk-8-alpine
ENV ELASTIC_HOME=/elastic
ENV DATACLAYCLIENTCONFIG=/elastic/cfgfiles/client.properties
ENV DATACLAYGLOBALCONFIG=/elastic/cfgfiles/global.properties
ENV DATACLAYSESSIONCONFIG=/elastic/cfgfiles/session.properties
ENV JAVASTUBSPATH=/elastic/stubs
ENV DATACLAY_JAR_MVN="/root/.m2/repository/es/bsc/dataclay/dataclay/2.4/dataclay-2.4-jar-with-dependencies.jar"
ENV DATACLAY_JAR="/elastic/dataclay.jar"
ENV STUBS_JAR=${ELASTIC_HOME}/stubs.jar
ARG WORKING_DIR=/elastic
ARG DEFAULT_NAMESPACE=ElasticJavaNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ARG JDK=8
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
DATACLAY_JAR=${WORKING_DIR}/dataclay.jar \
STUBSPATH=${WORKING_DIR}/stubs \
STUBS_JAR=${WORKING_DIR}/stubs.jar
# Prepare
WORKDIR ${ELASTIC_HOME}
WORKDIR ${WORKING_DIR}
# Get dataClay maven dependency and set env
RUN mvn -DgroupId=es.bsc.dataclay -DartifactId=dataclay -Dversion=2.4 -Dclassifier=jar-with-dependencies dependency:get
# Move JAR to allow apps without docker to get it easily via docker cp
# Also to not use maven but java directly
RUN cp $DATACLAY_JAR_MVN $DATACLAY_JAR
# Get dataClay
COPY --from=0 /home/dataclayusr/dataclay/dataclay.jar ${DATACLAY_JAR}
# Get stubs
COPY ./cfgfiles/ ${ELASTIC_HOME}/cfgfiles/
COPY ./tmp/stubs/java_stubs ${JAVASTUBSPATH}
# Package stubs
RUN jar cvf ${STUBS_JAR} -C ${JAVASTUBSPATH} .
# Install stubs in local repository to use it as a pom dependency
RUN mvn install:install-file -Dfile=${STUBS_JAR} -DgroupId=es.bsc.dataclay \
-DartifactId=nfrtool-dataclay-stubs -Dversion=latest -Dpackaging=jar -DcreateChecksum=true
# Get initializers
COPY ./cfgfiles/ ${WORKING_DIR}/cfgfiles/
COPY entrypoints/javaclient /dataclay-client
# Run
ENTRYPOINT ["mvn"]
\ No newline at end of file
ENTRYPOINT ["/dataclay-client/entrypoint.sh"]
# Provide command (java main class...)
CMD ["--version"]
\ No newline at end of file
FROM ubuntu:18.04
RUN apt-get -y update
RUN apt-get install -y sqlite3 libsqlite3-dev
FROM alpine:3
RUN apk --no-cache --update add sqlite
COPY ./tmp/LM.sqlite /tmp/dataclay/dump.sql
RUN mkdir -p "/dataclay/storage"
RUN sqlite3 "/dataclay/storage/LM" ".read /tmp/dataclay/dump.sql"
FROM bscdataclay/logicmodule:2.4
FROM bscdataclay/logicmodule:2.5.dev-alpine
COPY --from=0 /dataclay/storage/LM /dataclay/storage/LM
# The command can contain additional options for the Java Virtual Machine and
# must contain a class to be executed.
ENTRYPOINT ["dataclay-java-entry-point", "es.bsc.dataclay.logic.server.LogicModuleSrv"]
......
FROM elasticeuh2020/dataclay-pyclient:1.4
FROM nuvlabox/mqtt-fiware-bridge:0.0.1
# Install packages:
RUN apt-get update && apt-get install --no-install-recommends -y gcc libc-dev && rm -rf /var/lib/apt/lists/*
# Get environment from arguments
ENV WORKING_DIR=/opt/nuvlabox
ARG DEFAULT_NAMESPACE=FiwareNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
STUBSPATH=/elastic/stubs/fiware
COPY --from=0 /dataclay-client/fiware-contractid /srv/dataclay/shared/contractid
# Install dataClay last version
RUN pip install --upgrade pip
RUN pip install --pre --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple dataClay
# Copy app
COPY mqtt-to-dataclay/ ${WORKING_DIR}
RUN mkdir -p ${STUBSPATH}
# Run
ENTRYPOINT ["python","-u","mqtt_to_dataclay.py"]
\ No newline at end of file
ARG REQ_VERSION=1.0
FROM elasticeuh2020/dataclay-pyclient:${REQ_VERSION}-requirements
#FROM nuvlabox/mqtt-fiware-bridge:0.0.1
ARG DATACLAY_PYVER
ARG PYTHON_VERSION
FROM bscdataclay/dspython:2.5.${DATACLAY_PYVER}.dev-alpine
FROM python:${PYTHON_VERSION}-alpine
ENV ELASTIC_HOME=/elastic
ENV DATACLAYCLIENTCONFIG=/elastic/cfgfiles/client.properties
ENV DATACLAYGLOBALCONFIG=/elastic/cfgfiles/global.properties
ENV DATACLAYSESSIONCONFIG=/elastic/cfgfiles/session.properties
ENV PYSTUBSPATH=/elastic/stubs
ARG WORKING_DIR=/elastic
ARG DEFAULT_NAMESPACE=ElasticNS
ARG DEFAULT_USER=ElasticUser
ARG DEFAULT_PASS=ElasticPass
# Install packages:
RUN apt-get update \
&& apt-get install --no-install-recommends -y build-essential mosquitto mosquitto-clients \
&& rm -rf /var/lib/apt/lists/*
ENV DATACLAYCLIENTCONFIG=${WORKING_DIR}/cfgfiles/client.properties \
DATACLAYGLOBALCONFIG=${WORKING_DIR}/cfgfiles/global.properties \
DATACLAYSESSIONCONFIG=${WORKING_DIR}/cfgfiles/session.properties \
NAMESPACE=${DEFAULT_NAMESPACE} \
USER=${DEFAULT_USER} \
PASS=${DEFAULT_PASS} \
STUBSPATH=${WORKING_DIR}/stubs
# Prepare
WORKDIR ${ELASTIC_HOME}
WORKDIR ${WORKING_DIR}
# Get python stubs
RUN mkdir -p ${PYSTUBSPATH}
COPY ./cfgfiles/ ${ELASTIC_HOME}/cfgfiles/
COPY ./tmp/stubs/python_stubs ${PYSTUBSPATH}
# =============== INSTALL DATACLAY =================== #
RUN apk add libstdc++
ENV DATACLAY_VIRTUAL_ENV=${WORKING_DIR}/dataclay_venv
COPY --from=0 /home/dataclayusr/dataclay/dataclay_venv ${DATACLAY_VIRTUAL_ENV}
ENV PATH="$DATACLAY_VIRTUAL_ENV/bin:$PATH"
# check dataclay is installed
RUN python --version
RUN python -c "import dataclay; print('import ok')"
# install mqtt
RUN python -m pip install paho-mqtt
# ================================== #
# Get dc initializer
COPY ./cfgfiles/ ${WORKING_DIR}/cfgfiles/
COPY entrypoints/pyclient /dataclay-client/
COPY ./contractids/events-contractid /dataclay-client/events-contractid
COPY ./contractids/fiware-contractid /dataclay-client/fiware-contractid
# Run
ENTRYPOINT ["python3", "-u"]
\ No newline at end of file
ENTRYPOINT ["/dataclay-client/entrypoint.sh"]
# Provide python command (main class...)
CMD ["--version"]
\ No newline at end of file
FROM python:3.7.5-slim-stretch
# ============ REQUIREMENTS ================== #
# Install
RUN apt-get update \
&& apt-get install --no-install-recommends -y --allow-unauthenticated \
gcc libyaml-dev build-essential curl >/dev/null \
&& rm -rf /var/lib/apt/lists/*
RUN pip3 install --upgrade pip
# Install dataClay
RUN pip3 install dataClay
# Install MQTT client
# RUN pip3 install paho-mqtt
\ No newline at end of file